Fix more invalid left-shifts. * src/pfr/pfrgload.c (pfr_glyph_load_compound): Use multiplication, not left-shift. * src/truetype/ttgxvar.c (ft_var_load_avar, ft_var_load_gvar, tt_face_vary_cvt, TT_Vary_Apply_Glyph_Deltas): Use multiplication, not left-shift.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106
diff --git a/ChangeLog b/ChangeLog
index efb8dd0..024544c 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,14 @@
+2015-08-02 Werner Lemberg <wl@gnu.org>
+
+ Fix more invalid left-shifts.
+
+ * src/pfr/pfrgload.c (pfr_glyph_load_compound): Use multiplication,
+ not left-shift.
+
+ * src/truetype/ttgxvar.c (ft_var_load_avar, ft_var_load_gvar,
+ tt_face_vary_cvt, TT_Vary_Apply_Glyph_Deltas): Use multiplication,
+ not left-shift.
+
2015-07-31 Werner Lemberg <wl@gnu.org>
Fix some bugs found by clang's `-fsanitize=undefined' (#45661).
diff --git a/src/pfr/pfrgload.c b/src/pfr/pfrgload.c
index 1cd13c2..88df06a 100644
--- a/src/pfr/pfrgload.c
+++ b/src/pfr/pfrgload.c
@@ -632,14 +632,14 @@
if ( format & PFR_SUBGLYPH_XSCALE )
{
PFR_CHECK( 2 );
- subglyph->x_scale = PFR_NEXT_SHORT( p ) << 4;
+ subglyph->x_scale = PFR_NEXT_SHORT( p ) * 16;
}
subglyph->y_scale = 0x10000L;
if ( format & PFR_SUBGLYPH_YSCALE )
{
PFR_CHECK( 2 );
- subglyph->y_scale = PFR_NEXT_SHORT( p ) << 4;
+ subglyph->y_scale = PFR_NEXT_SHORT( p ) * 16;
}
/* read offset */
diff --git a/src/truetype/ttgxvar.c b/src/truetype/ttgxvar.c
index 7a8c46c..dd9e250 100644
--- a/src/truetype/ttgxvar.c
+++ b/src/truetype/ttgxvar.c
@@ -357,8 +357,8 @@
for ( j = 0; j < segment->pairCount; j++ )
{
/* convert to Fixed */
- segment->correspondence[j].fromCoord = FT_GET_SHORT() << 2;
- segment->correspondence[j].toCoord = FT_GET_SHORT() << 2;
+ segment->correspondence[j].fromCoord = FT_GET_SHORT() * 4;
+ segment->correspondence[j].toCoord = FT_GET_SHORT() * 4;
FT_TRACE5(( " mapping %.4f to %.4f\n",
segment->correspondence[j].fromCoord / 65536.0,
@@ -514,7 +514,7 @@
for ( j = 0 ; j < (FT_UInt)gvar_head.axisCount; j++ )
{
blend->tuplecoords[i * gvar_head.axisCount + j] =
- FT_GET_SHORT() << 2; /* convert to FT_Fixed */
+ FT_GET_SHORT() * 4; /* convert to FT_Fixed */
FT_TRACE5(( "%.4f ",
blend->tuplecoords[i * gvar_head.axisCount + j] / 65536.0 ));
}
@@ -1379,7 +1379,7 @@
if ( tupleIndex & GX_TI_EMBEDDED_TUPLE_COORD )
{
for ( j = 0; j < blend->num_axis; j++ )
- tuple_coords[j] = FT_GET_SHORT() << 2; /* convert from */
+ tuple_coords[j] = FT_GET_SHORT() * 4; /* convert from */
/* short frac to fixed */
}
else
@@ -1397,9 +1397,9 @@
if ( tupleIndex & GX_TI_INTERMEDIATE_TUPLE )
{
for ( j = 0; j < blend->num_axis; j++ )
- im_start_coords[j] = FT_GET_SHORT() << 2;
+ im_start_coords[j] = FT_GET_SHORT() * 4;
for ( j = 0; j < blend->num_axis; j++ )
- im_end_coords[j] = FT_GET_SHORT() << 2;
+ im_end_coords[j] = FT_GET_SHORT() * 4;
}
apply = ft_var_apply_tuple( blend,
@@ -1850,7 +1850,7 @@
if ( tupleIndex & GX_TI_EMBEDDED_TUPLE_COORD )
{
for ( j = 0; j < blend->num_axis; j++ )
- tuple_coords[j] = FT_GET_SHORT() << 2; /* convert from */
+ tuple_coords[j] = FT_GET_SHORT() * 4; /* convert from */
/* short frac to fixed */
}
else if ( ( tupleIndex & GX_TI_TUPLE_INDEX_MASK ) >= blend->tuplecount )
@@ -1867,9 +1867,9 @@
if ( tupleIndex & GX_TI_INTERMEDIATE_TUPLE )
{
for ( j = 0; j < blend->num_axis; j++ )
- im_start_coords[j] = FT_GET_SHORT() << 2;
+ im_start_coords[j] = FT_GET_SHORT() * 4;
for ( j = 0; j < blend->num_axis; j++ )
- im_end_coords[j] = FT_GET_SHORT() << 2;
+ im_end_coords[j] = FT_GET_SHORT() * 4;
}
apply = ft_var_apply_tuple( blend,