Fix ubsan in gimple-fold.c (PR tree-optimization/82491).
authorMartin Liska <mliska@suse.cz>
Mon, 19 Feb 2018 18:29:52 +0000 (19:29 +0100)
committerMartin Liska <marxin@gcc.gnu.org>
Mon, 19 Feb 2018 18:29:52 +0000 (18:29 +0000)
2018-02-19  Martin Liska  <mliska@suse.cz>
    Richard Sandiford  <richard.sandiford@linaro.org>

PR tree-optimization/82491
* gimple-fold.c (get_base_constructor): Make earlier bail out
to prevent ubsan.

Co-Authored-By: Richard Sandiford <richard.sandiford@linaro.org>
From-SVN: r257816

gcc/ChangeLog
gcc/gimple-fold.c

index 6b5972278f38a9c9de1d95735b5b0a2fc04d1a2d..7a37db734da6afe97213e6727f6ebd7f8b82a9bf 100644 (file)
@@ -1,3 +1,10 @@
+2018-02-19  Martin Liska  <mliska@suse.cz>
+           Richard Sandiford  <richard.sandiford@linaro.org>
+
+       PR tree-optimization/82491
+       * gimple-fold.c (get_base_constructor): Make earlier bail out
+       to prevent ubsan.
+
 2018-02-19  Carl Love  <cel@us.ibm.com>
 
        * config/rs6000/rs6000-builtin.def: Change NEG macro expansions from
index e556f050e43cbda185069d436c1127acc48e1b95..c9dad6f42d13638995db9019a0861cc7501b811a 100644 (file)
@@ -6442,13 +6442,9 @@ get_base_constructor (tree base, poly_int64_pod *bit_offset,
 
   if (TREE_CODE (base) == MEM_REF)
     {
-      if (!integer_zerop (TREE_OPERAND (base, 1)))
-       {
-         if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
-           return NULL_TREE;
-         *bit_offset += (mem_ref_offset (base).force_shwi ()
-                         * BITS_PER_UNIT);
-       }
+      poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
+      if (!boff.to_shwi (bit_offset))
+       return NULL_TREE;
 
       if (valueize
          && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)