[base] Avoid unnecessary long division. This applies to `FT_MulDiv' but not to `FT_DivFix', where overflows or lack thereof are predicted accurately. * src/base/ftcalc.c (ft_div64by32): Improve readability. (FT_MulDiv, FT_MulDiv_No_Round) [!FT_LONG64]: Use straight division when multiplication stayed within 32 bits.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65
diff --git a/ChangeLog b/ChangeLog
index 808ca3a..f5666bc 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,14 @@
+2014-09-25 Alexei Podtelezhnikov <apodtele@gmail.com>
+
+ [base] Avoid unnecessary long division.
+
+ This applies to `FT_MulDiv' but not to `FT_DivFix', where overflows or
+ lack thereof are predicted accurately.
+
+ * src/base/ftcalc.c (ft_div64by32): Improve readability.
+ (FT_MulDiv, FT_MulDiv_No_Round) [!FT_LONG64]: Use straight division
+ when multiplication stayed within 32 bits.
+
2014-09-24 Werner Lemberg <wl@gnu.org>
[autofit] Minor clean-ups.
diff --git a/src/base/ftcalc.c b/src/base/ftcalc.c
index be65a7a..6a39477 100644
--- a/src/base/ftcalc.c
+++ b/src/base/ftcalc.c
@@ -303,16 +303,14 @@
i = 32;
do
{
- r <<= 1;
q <<= 1;
- r |= lo >> 31;
+ r = ( r << 1 ) | ( lo >> 31 ); lo <<= 1; /* left 64-bit shift */
if ( r >= y )
{
r -= y;
q |= 1;
}
- lo <<= 1;
} while ( --i );
return q;
@@ -416,7 +414,10 @@
temp2.hi = 0;
temp2.lo = (FT_UInt32)(c >> 1);
FT_Add64( &temp, &temp2, &temp );
- a = ft_div64by32( temp.hi, temp.lo, (FT_Int32)c );
+
+ /* last attempt to ditch long division */
+ a = temp.hi == 0 ? temp.lo / c
+ : ft_div64by32( temp.hi, temp.lo, (FT_Int32)c );
}
return ( s < 0 ? -a : a );
@@ -450,7 +451,10 @@
ft_multo64( (FT_Int32)a, (FT_Int32)b, &temp );
- a = ft_div64by32( temp.hi, temp.lo, (FT_Int32)c );
+
+ /* last attempt to ditch long division */
+ a = temp.hi == 0 ? temp.lo / c
+ : ft_div64by32( temp.hi, temp.lo, (FT_Int32)c );
}
return ( s < 0 ? -a : a );