i386.c (legitimize_tls_address): Generate tls_initial_exec_64_sun only when !TARGET_X32.
[gcc.git] / gcc / tree-ssa-pre.c
index df2a13867d1f5da397cd02f87631b70c1d7a6b68..9adf55d18cf24bc23ee03f1d159deb81c8885f10 100644 (file)
@@ -580,15 +580,14 @@ phi_trans_add (pre_expr e, pre_expr v, basic_block pred)
 
   slot = htab_find_slot_with_hash (phi_translate_table, new_pair,
                                   new_pair->hashcode, INSERT);
-  if (*slot)
-    free (*slot);
+  free (*slot);
   *slot = (void *) new_pair;
 }
 
 
 /* Add expression E to the expression set of value id V.  */
 
-void
+static void
 add_to_value (unsigned int v, pre_expr e)
 {
   bitmap_set_t set;
@@ -1030,9 +1029,27 @@ debug_bitmap_set (bitmap_set_t set)
   print_bitmap_set (stderr, set, "debug", 0);
 }
 
+void debug_bitmap_sets_for (basic_block);
+
+DEBUG_FUNCTION void
+debug_bitmap_sets_for (basic_block bb)
+{
+  print_bitmap_set (stderr, AVAIL_OUT (bb), "avail_out", bb->index);
+  if (!in_fre)
+    {
+      print_bitmap_set (stderr, EXP_GEN (bb), "exp_gen", bb->index);
+      print_bitmap_set (stderr, PHI_GEN (bb), "phi_gen", bb->index);
+      print_bitmap_set (stderr, TMP_GEN (bb), "tmp_gen", bb->index);
+      print_bitmap_set (stderr, ANTIC_IN (bb), "antic_in", bb->index);
+      if (do_partial_partial)
+       print_bitmap_set (stderr, PA_IN (bb), "pa_in", bb->index);
+      print_bitmap_set (stderr, NEW_SETS (bb), "new_sets", bb->index);
+    }
+}
+
 /* Print out the expressions that have VAL to OUTFILE.  */
 
-void
+static void
 print_value_expressions (FILE *outfile, unsigned int val)
 {
   bitmap_set_t set = VEC_index (bitmap_set_t, value_expressions, val);
@@ -1151,14 +1168,6 @@ fully_constant_expression (pre_expr e)
        vn_nary_op_t nary = PRE_EXPR_NARY (e);
        switch (TREE_CODE_CLASS (nary->opcode))
          {
-         case tcc_expression:
-           if (nary->opcode == TRUTH_NOT_EXPR)
-             goto do_unary;
-           if (nary->opcode != TRUTH_AND_EXPR
-               && nary->opcode != TRUTH_OR_EXPR
-               && nary->opcode != TRUTH_XOR_EXPR)
-             return e;
-           /* Fallthrough.  */
          case tcc_binary:
          case tcc_comparison:
            {
@@ -1200,7 +1209,6 @@ fully_constant_expression (pre_expr e)
              return e;
            /* Fallthrough.  */
          case tcc_unary:
-do_unary:
            {
              /* We have to go from trees to pre exprs to value ids to
                 constants.  */
@@ -1409,7 +1417,7 @@ get_representative_for (const pre_expr e)
   if (!pretemp || exprtype != TREE_TYPE (pretemp))
     {
       pretemp = create_tmp_reg (exprtype, "pretmp");
-      get_var_ann (pretemp);
+      add_referenced_var (pretemp);
     }
 
   name = make_ssa_name (pretemp, gimple_build_nop ());
@@ -1453,20 +1461,18 @@ phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
        unsigned int i;
        bool changed = false;
        vn_nary_op_t nary = PRE_EXPR_NARY (expr);
-       struct vn_nary_op_s newnary;
-       /* The NARY structure is only guaranteed to have been
-          allocated to the nary->length operands.  */
-       memcpy (&newnary, nary, (sizeof (struct vn_nary_op_s)
-                                - sizeof (tree) * (4 - nary->length)));
+       vn_nary_op_t newnary = XALLOCAVAR (struct vn_nary_op_s,
+                                          sizeof_vn_nary_op (nary->length));
+       memcpy (newnary, nary, sizeof_vn_nary_op (nary->length));
 
-       for (i = 0; i < newnary.length; i++)
+       for (i = 0; i < newnary->length; i++)
          {
-           if (TREE_CODE (newnary.op[i]) != SSA_NAME)
+           if (TREE_CODE (newnary->op[i]) != SSA_NAME)
              continue;
            else
              {
                 pre_expr leader, result;
-               unsigned int op_val_id = VN_INFO (newnary.op[i])->value_id;
+               unsigned int op_val_id = VN_INFO (newnary->op[i])->value_id;
                leader = find_leader_in_sets (op_val_id, set1, set2);
                 result = phi_translate (leader, set1, set2, pred, phiblock);
                if (result && result != leader)
@@ -1474,12 +1480,12 @@ phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
                    tree name = get_representative_for (result);
                    if (!name)
                      return NULL;
-                   newnary.op[i] = name;
+                   newnary->op[i] = name;
                  }
                else if (!result)
                  return NULL;
 
-               changed |= newnary.op[i] != nary->op[i];
+               changed |= newnary->op[i] != nary->op[i];
              }
          }
        if (changed)
@@ -1487,13 +1493,10 @@ phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
            pre_expr constant;
            unsigned int new_val_id;
 
-           tree result = vn_nary_op_lookup_pieces (newnary.length,
-                                                   newnary.opcode,
-                                                   newnary.type,
-                                                   newnary.op[0],
-                                                   newnary.op[1],
-                                                   newnary.op[2],
-                                                   newnary.op[3],
+           tree result = vn_nary_op_lookup_pieces (newnary->length,
+                                                   newnary->opcode,
+                                                   newnary->type,
+                                                   &newnary->op[0],
                                                    &nary);
            if (result && is_gimple_min_invariant (result))
              return get_or_alloc_expr_for_constant (result);
@@ -1517,13 +1520,10 @@ phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
                VEC_safe_grow_cleared (bitmap_set_t, heap,
                                       value_expressions,
                                       get_max_value_id() + 1);
-               nary = vn_nary_op_insert_pieces (newnary.length,
-                                                newnary.opcode,
-                                                newnary.type,
-                                                newnary.op[0],
-                                                newnary.op[1],
-                                                newnary.op[2],
-                                                newnary.op[3],
+               nary = vn_nary_op_insert_pieces (newnary->length,
+                                                newnary->opcode,
+                                                newnary->type,
+                                                &newnary->op[0],
                                                 result, new_val_id);
                PRE_EXPR_NARY (expr) = nary;
                constant = fully_constant_expression (expr);
@@ -1545,7 +1545,7 @@ phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
        tree newvuse = vuse;
        VEC (vn_reference_op_s, heap) *newoperands = NULL;
        bool changed = false, same_valid = true;
-       unsigned int i, j;
+       unsigned int i, j, n;
        vn_reference_op_t operand;
        vn_reference_t newref;
 
@@ -1554,100 +1554,83 @@ phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
          {
            pre_expr opresult;
            pre_expr leader;
-           tree oldop0 = operand->op0;
-           tree oldop1 = operand->op1;
-           tree oldop2 = operand->op2;
-           tree op0 = oldop0;
-           tree op1 = oldop1;
-           tree op2 = oldop2;
+           tree op[3];
            tree type = operand->type;
            vn_reference_op_s newop = *operand;
-
-           if (op0 && TREE_CODE (op0) == SSA_NAME)
+           op[0] = operand->op0;
+           op[1] = operand->op1;
+           op[2] = operand->op2;
+           for (n = 0; n < 3; ++n)
              {
-               unsigned int op_val_id = VN_INFO (op0)->value_id;
-               leader = find_leader_in_sets (op_val_id, set1, set2);
-               opresult = phi_translate (leader, set1, set2, pred, phiblock);
-               if (opresult && opresult != leader)
+               unsigned int op_val_id;
+               if (!op[n])
+                 continue;
+               if (TREE_CODE (op[n]) != SSA_NAME)
                  {
-                   tree name = get_representative_for (opresult);
-                   if (!name)
+                   /* We can't possibly insert these.  */
+                   if (n != 0
+                       && !is_gimple_min_invariant (op[n]))
                      break;
-                   op0 = name;
+                   continue;
                  }
-               else if (!opresult)
-                 break;
-             }
-           changed |= op0 != oldop0;
-
-           if (op1 && TREE_CODE (op1) == SSA_NAME)
-             {
-               unsigned int op_val_id = VN_INFO (op1)->value_id;
+               op_val_id = VN_INFO (op[n])->value_id;
                leader = find_leader_in_sets (op_val_id, set1, set2);
-               opresult = phi_translate (leader, set1, set2, pred, phiblock);
-               if (opresult && opresult != leader)
+               if (!leader)
+                 break;
+               /* Make sure we do not recursively translate ourselves
+                  like for translating a[n_1] with the leader for
+                  n_1 being a[n_1].  */
+               if (get_expression_id (leader) != get_expression_id (expr))
                  {
-                   tree name = get_representative_for (opresult);
-                   if (!name)
+                   opresult = phi_translate (leader, set1, set2,
+                                             pred, phiblock);
+                   if (!opresult)
                      break;
-                   op1 = name;
+                   if (opresult != leader)
+                     {
+                       tree name = get_representative_for (opresult);
+                       if (!name)
+                         break;
+                       changed |= name != op[n];
+                       op[n] = name;
+                     }
                  }
-               else if (!opresult)
-                 break;
              }
-           /* We can't possibly insert these.  */
-           else if (op1 && !is_gimple_min_invariant (op1))
-             break;
-           changed |= op1 != oldop1;
-           if (op2 && TREE_CODE (op2) == SSA_NAME)
+           if (n != 3)
              {
-               unsigned int op_val_id = VN_INFO (op2)->value_id;
-               leader = find_leader_in_sets (op_val_id, set1, set2);
-               opresult = phi_translate (leader, set1, set2, pred, phiblock);
-               if (opresult && opresult != leader)
-                 {
-                   tree name = get_representative_for (opresult);
-                   if (!name)
-                     break;
-                   op2 = name;
-                 }
-               else if (!opresult)
-                 break;
+               if (newoperands)
+                 VEC_free (vn_reference_op_s, heap, newoperands);
+               return NULL;
              }
-           /* We can't possibly insert these.  */
-           else if (op2 && !is_gimple_min_invariant (op2))
-             break;
-           changed |= op2 != oldop2;
-
            if (!newoperands)
              newoperands = VEC_copy (vn_reference_op_s, heap, operands);
            /* We may have changed from an SSA_NAME to a constant */
-           if (newop.opcode == SSA_NAME && TREE_CODE (op0) != SSA_NAME)
-             newop.opcode = TREE_CODE (op0);
+           if (newop.opcode == SSA_NAME && TREE_CODE (op[0]) != SSA_NAME)
+             newop.opcode = TREE_CODE (op[0]);
            newop.type = type;
-           newop.op0 = op0;
-           newop.op1 = op1;
-           newop.op2 = op2;
+           newop.op0 = op[0];
+           newop.op1 = op[1];
+           newop.op2 = op[2];
            /* If it transforms a non-constant ARRAY_REF into a constant
               one, adjust the constant offset.  */
            if (newop.opcode == ARRAY_REF
                && newop.off == -1
-               && TREE_CODE (op0) == INTEGER_CST
-               && TREE_CODE (op1) == INTEGER_CST
-               && TREE_CODE (op2) == INTEGER_CST)
+               && TREE_CODE (op[0]) == INTEGER_CST
+               && TREE_CODE (op[1]) == INTEGER_CST
+               && TREE_CODE (op[2]) == INTEGER_CST)
              {
-               double_int off = tree_to_double_int (op0);
+               double_int off = tree_to_double_int (op[0]);
                off = double_int_add (off,
                                      double_int_neg
-                                       (tree_to_double_int (op1)));
-               off = double_int_mul (off, tree_to_double_int (op2));
+                                       (tree_to_double_int (op[1])));
+               off = double_int_mul (off, tree_to_double_int (op[2]));
                if (double_int_fits_in_shwi_p (off))
                  newop.off = off.low;
              }
            VEC_replace (vn_reference_op_s, newoperands, j, &newop);
            /* If it transforms from an SSA_NAME to an address, fold with
               a preceding indirect reference.  */
-           if (j > 0 && op0 && TREE_CODE (op0) == ADDR_EXPR
+           if (j > 0 && op[0] && TREE_CODE (op[0]) == ADDR_EXPR
                && VEC_index (vn_reference_op_s,
                              newoperands, j - 1)->opcode == MEM_REF)
              vn_reference_fold_indirect (&newoperands, &j);
@@ -1676,7 +1659,6 @@ phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
          {
            unsigned int new_val_id;
            pre_expr constant;
-           bool converted = false;
 
            tree result = vn_reference_lookup_pieces (newvuse, ref->set,
                                                      ref->type,
@@ -1685,12 +1667,29 @@ phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
            if (result)
              VEC_free (vn_reference_op_s, heap, newoperands);
 
-           if (result
-               && !useless_type_conversion_p (ref->type, TREE_TYPE (result)))
+           /* We can always insert constants, so if we have a partial
+              redundant constant load of another type try to translate it
+              to a constant of appropriate type.  */
+           if (result && is_gimple_min_invariant (result))
              {
-               result = fold_build1 (VIEW_CONVERT_EXPR, ref->type, result);
-               converted = true;
+               tree tem = result;
+               if (!useless_type_conversion_p (ref->type, TREE_TYPE (result)))
+                 {
+                   tem = fold_unary (VIEW_CONVERT_EXPR, ref->type, result);
+                   if (tem && !is_gimple_min_invariant (tem))
+                     tem = NULL_TREE;
+                 }
+               if (tem)
+                 return get_or_alloc_expr_for_constant (tem);
              }
+
+           /* If we'd have to convert things we would need to validate
+              if we can insert the translated expression.  So fail
+              here for now - we cannot insert an alias with a different
+              type in the VN tables either, as that would assert.  */
+           if (result
+               && !useless_type_conversion_p (ref->type, TREE_TYPE (result)))
+             return NULL;
            else if (!result && newref
                     && !useless_type_conversion_p (ref->type, newref->type))
              {
@@ -1698,64 +1697,11 @@ phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
                return NULL;
              }
 
-           if (result && is_gimple_min_invariant (result))
-             {
-               gcc_assert (!newoperands);
-               return get_or_alloc_expr_for_constant (result);
-             }
-
            expr = (pre_expr) pool_alloc (pre_expr_pool);
            expr->kind = REFERENCE;
            expr->id = 0;
 
-           if (converted)
-             {
-               vn_nary_op_t nary;
-               tree nresult;
-
-               gcc_assert (CONVERT_EXPR_P (result)
-                           || TREE_CODE (result) == VIEW_CONVERT_EXPR);
-
-               nresult = vn_nary_op_lookup_pieces (1, TREE_CODE (result),
-                                                   TREE_TYPE (result),
-                                                   TREE_OPERAND (result, 0),
-                                                   NULL_TREE, NULL_TREE,
-                                                   NULL_TREE,
-                                                   &nary);
-               if (nresult && is_gimple_min_invariant (nresult))
-                 return get_or_alloc_expr_for_constant (nresult);
-
-               expr->kind = NARY;
-               if (nary)
-                 {
-                   PRE_EXPR_NARY (expr) = nary;
-                   constant = fully_constant_expression (expr);
-                   if (constant != expr)
-                     return constant;
-
-                   new_val_id = nary->value_id;
-                   get_or_alloc_expression_id (expr);
-                 }
-               else
-                 {
-                   new_val_id = get_next_value_id ();
-                   VEC_safe_grow_cleared (bitmap_set_t, heap,
-                                          value_expressions,
-                                          get_max_value_id() + 1);
-                   nary = vn_nary_op_insert_pieces (1, TREE_CODE (result),
-                                                    TREE_TYPE (result),
-                                                    TREE_OPERAND (result, 0),
-                                                    NULL_TREE, NULL_TREE,
-                                                    NULL_TREE, NULL_TREE,
-                                                    new_val_id);
-                   PRE_EXPR_NARY (expr) = nary;
-                   constant = fully_constant_expression (expr);
-                   if (constant != expr)
-                     return constant;
-                   get_or_alloc_expression_id (expr);
-                 }
-             }
-           else if (newref)
+           if (newref)
              {
                PRE_EXPR_REFERENCE (expr) = newref;
                constant = fully_constant_expression (expr);
@@ -2052,57 +1998,19 @@ value_dies_in_block_x (pre_expr expr, basic_block block)
 }
 
 
-#define union_contains_value(SET1, SET2, VAL)                  \
-  (bitmap_set_contains_value ((SET1), (VAL))                   \
-   || ((SET2) && bitmap_set_contains_value ((SET2), (VAL))))
+/* Determine if OP is valid in SET1 U SET2, which it is when the union
+   contains its value-id.  */
 
-/* Determine if vn_reference_op_t VRO is legal in SET1 U SET2.
- */
 static bool
-vro_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2,
-                  vn_reference_op_t vro)
+op_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, tree op)
 {
-  if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
+  if (op && TREE_CODE (op) == SSA_NAME)
     {
-      struct pre_expr_d temp;
-      temp.kind = NAME;
-      temp.id = 0;
-      PRE_EXPR_NAME (&temp) = vro->op0;
-      temp.id = lookup_expression_id (&temp);
-      if (temp.id == 0)
-       return false;
-      if (!union_contains_value (set1, set2,
-                                get_expr_value_id (&temp)))
+      unsigned int value_id = VN_INFO (op)->value_id;
+      if (!(bitmap_set_contains_value (set1, value_id)
+           || (set2 && bitmap_set_contains_value  (set2, value_id))))
        return false;
     }
-  if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
-    {
-      struct pre_expr_d temp;
-      temp.kind = NAME;
-      temp.id = 0;
-      PRE_EXPR_NAME (&temp) = vro->op1;
-      temp.id = lookup_expression_id (&temp);
-      if (temp.id == 0)
-       return false;
-      if (!union_contains_value (set1, set2,
-                                get_expr_value_id (&temp)))
-       return false;
-    }
-
-  if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
-    {
-      struct pre_expr_d temp;
-      temp.kind = NAME;
-      temp.id = 0;
-      PRE_EXPR_NAME (&temp) = vro->op2;
-      temp.id = lookup_expression_id (&temp);
-      if (temp.id == 0)
-       return false;
-      if (!union_contains_value (set1, set2,
-                                get_expr_value_id (&temp)))
-       return false;
-    }
-
   return true;
 }
 
@@ -2125,28 +2033,8 @@ valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr,
        unsigned int i;
        vn_nary_op_t nary = PRE_EXPR_NARY (expr);
        for (i = 0; i < nary->length; i++)
-         {
-           if (TREE_CODE (nary->op[i]) == SSA_NAME)
-             {
-               struct pre_expr_d temp;
-               temp.kind = NAME;
-               temp.id = 0;
-               PRE_EXPR_NAME (&temp) = nary->op[i];
-               temp.id = lookup_expression_id (&temp);
-               if (temp.id == 0)
-                 return false;
-               if (!union_contains_value (set1, set2,
-                                          get_expr_value_id (&temp)))
-                 return false;
-             }
-         }
-       /* If the NARY may trap make sure the block does not contain
-          a possible exit point.
-          ???  This is overly conservative if we translate AVAIL_OUT
-          as the available expression might be after the exit point.  */
-       if (BB_MAY_NOTRETURN (block)
-           && vn_nary_may_trap (nary))
-         return false;
+         if (!op_valid_in_sets (set1, set2, nary->op[i]))
+           return false;
        return true;
       }
       break;
@@ -2158,19 +2046,12 @@ valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr,
 
        FOR_EACH_VEC_ELT (vn_reference_op_s, ref->operands, i, vro)
          {
-           if (!vro_valid_in_sets (set1, set2, vro))
+           if (!op_valid_in_sets (set1, set2, vro->op0)
+               || !op_valid_in_sets (set1, set2, vro->op1)
+               || !op_valid_in_sets (set1, set2, vro->op2))
              return false;
          }
-       if (ref->vuse)
-         {
-           gimple def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
-           if (!gimple_nop_p (def_stmt)
-               && gimple_bb (def_stmt) != block
-               && !dominated_by_p (CDI_DOMINATORS,
-                                   block, gimple_bb (def_stmt)))
-             return false;
-         }
-       return !value_dies_in_block_x (expr, block);
+       return true;
       }
     default:
       gcc_unreachable ();
@@ -2217,6 +2098,47 @@ clean (bitmap_set_t set, basic_block block)
   VEC_free (pre_expr, heap, exprs);
 }
 
+/* Clean the set of expressions that are no longer valid in SET because
+   they are clobbered in BLOCK or because they trap and may not be executed.  */
+
+static void
+prune_clobbered_mems (bitmap_set_t set, basic_block block)
+{
+  bitmap_iterator bi;
+  unsigned i;
+
+  FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
+    {
+      pre_expr expr = expression_for_id (i);
+      if (expr->kind == REFERENCE)
+       {
+         vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
+         if (ref->vuse)
+           {
+             gimple def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
+             if (!gimple_nop_p (def_stmt)
+                 && ((gimple_bb (def_stmt) != block
+                      && !dominated_by_p (CDI_DOMINATORS,
+                                          block, gimple_bb (def_stmt)))
+                     || (gimple_bb (def_stmt) == block
+                         && value_dies_in_block_x (expr, block))))
+               bitmap_remove_from_set (set, expr);
+           }
+       }
+      else if (expr->kind == NARY)
+       {
+         vn_nary_op_t nary = PRE_EXPR_NARY (expr);
+         /* If the NARY may trap make sure the block does not contain
+            a possible exit point.
+            ???  This is overly conservative if we translate AVAIL_OUT
+            as the available expression might be after the exit point.  */
+         if (BB_MAY_NOTRETURN (block)
+             && vn_nary_may_trap (nary))
+           bitmap_remove_from_set (set, expr);
+       }
+    }
+}
+
 static sbitmap has_abnormal_preds;
 
 /* List of blocks that may have changed during ANTIC computation and
@@ -2358,6 +2280,10 @@ compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
       VEC_free (basic_block, heap, worklist);
     }
 
+  /* Prune expressions that are clobbered in block and thus become
+     invalid if translated from ANTIC_OUT to ANTIC_IN.  */
+  prune_clobbered_mems (ANTIC_OUT, block);
+
   /* Generate ANTIC_OUT - TMP_GEN.  */
   S = bitmap_set_subtract (ANTIC_OUT, TMP_GEN (block));
 
@@ -2512,6 +2438,10 @@ compute_partial_antic_aux (basic_block block,
       VEC_free (basic_block, heap, worklist);
     }
 
+  /* Prune expressions that are clobbered in block and thus become
+     invalid if translated from PA_OUT to PA_IN.  */
+  prune_clobbered_mems (PA_OUT, block);
+
   /* PA_IN starts with PA_OUT - TMP_GEN.
      Then we subtract things from ANTIC_IN.  */
   PA_IN (block) = bitmap_set_subtract (PA_OUT, TMP_GEN (block));
@@ -2656,17 +2586,6 @@ compute_antic (void)
   sbitmap_free (changed_blocks);
 }
 
-/* Return true if we can value number the call in STMT.  This is true
-   if we have a pure or constant call.  */
-
-static bool
-can_value_number_call (gimple stmt)
-{
-  if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
-    return true;
-  return false;
-}
-
 /* Return true if OP is a tree which we can perform PRE on.
    This may not match the operations we can value number, but in
    a perfect world would.  */
@@ -2770,7 +2689,7 @@ create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
            gcc_assert (base);
            offset = int_const_binop (PLUS_EXPR, offset,
                                      build_int_cst (TREE_TYPE (offset),
-                                                    off), 0);
+                                                    off));
            baseop = build_fold_addr_expr (base);
          }
        return fold_build2 (MEM_REF, currop->type, baseop, offset);
@@ -2828,6 +2747,23 @@ create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
        return folded;
       }
       break;
+    case WITH_SIZE_EXPR:
+      {
+       tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
+                                                       stmts, domstmt);
+       pre_expr op1expr = get_or_alloc_expr_for (currop->op0);
+       tree genop1;
+
+       if (!genop0)
+         return NULL_TREE;
+
+       genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
+       if (!genop1)
+         return NULL_TREE;
+
+       return fold_build2 (currop->opcode, currop->type, genop0, genop1);
+      }
+      break;
     case BIT_FIELD_REF:
       {
        tree folded;
@@ -3095,50 +3031,53 @@ create_expression_by_pieces (basic_block block, pre_expr expr,
     case NARY:
       {
        vn_nary_op_t nary = PRE_EXPR_NARY (expr);
-       switch (nary->length)
+       tree genop[4];
+       unsigned i;
+       for (i = 0; i < nary->length; ++i)
          {
-         case 2:
-           {
-             pre_expr op1 = get_or_alloc_expr_for (nary->op[0]);
-             pre_expr op2 = get_or_alloc_expr_for (nary->op[1]);
-             tree genop1 = find_or_generate_expression (block, op1,
-                                                        stmts, domstmt);
-             tree genop2 = find_or_generate_expression (block, op2,
-                                                        stmts, domstmt);
-             if (!genop1 || !genop2)
-               return NULL_TREE;
-             /* Ensure op2 is a sizetype for POINTER_PLUS_EXPR.  It
-                may be a constant with the wrong type.  */
-             if (nary->opcode == POINTER_PLUS_EXPR)
-               {
-                 genop1 = fold_convert (nary->type, genop1);
-                 genop2 = fold_convert (sizetype, genop2);
-               }
-             else
-               {
-                 genop1 = fold_convert (TREE_TYPE (nary->op[0]), genop1);
-                 genop2 = fold_convert (TREE_TYPE (nary->op[1]), genop2);
-               }
-
-             folded = fold_build2 (nary->opcode, nary->type,
-                                   genop1, genop2);
-           }
-           break;
-         case 1:
-           {
-             pre_expr op1 = get_or_alloc_expr_for (nary->op[0]);
-             tree genop1 = find_or_generate_expression (block, op1,
-                                                        stmts, domstmt);
-             if (!genop1)
-               return NULL_TREE;
-             genop1 = fold_convert (TREE_TYPE (nary->op[0]), genop1);
-
-             folded = fold_build1 (nary->opcode, nary->type,
-                                   genop1);
-           }
-           break;
-         default:
-           return NULL_TREE;
+           pre_expr op = get_or_alloc_expr_for (nary->op[i]);
+           genop[i] = find_or_generate_expression (block, op,
+                                                   stmts, domstmt);
+           if (!genop[i])
+             return NULL_TREE;
+           /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR.  It
+              may have conversions stripped.  */
+           if (nary->opcode == POINTER_PLUS_EXPR)
+             {
+               if (i == 0)
+                 genop[i] = fold_convert (nary->type, genop[i]);
+               else if (i == 1)
+                 genop[i] = convert_to_ptrofftype (genop[i]);
+             }
+           else
+             genop[i] = fold_convert (TREE_TYPE (nary->op[i]), genop[i]);
+         }
+       if (nary->opcode == CONSTRUCTOR)
+         {
+           VEC(constructor_elt,gc) *elts = NULL;
+           for (i = 0; i < nary->length; ++i)
+             CONSTRUCTOR_APPEND_ELT (elts, NULL_TREE, genop[i]);
+           folded = build_constructor (nary->type, elts);
+         }
+       else
+         {
+           switch (nary->length)
+             {
+             case 1:
+               folded = fold_build1 (nary->opcode, nary->type,
+                                     genop[0]);
+               break;
+             case 2:
+               folded = fold_build2 (nary->opcode, nary->type,
+                                     genop[0], genop[1]);
+               break;
+             case 3:
+               folded = fold_build3 (nary->opcode, nary->type,
+                                     genop[0], genop[1], genop[3]);
+               break;
+             default:
+               gcc_unreachable ();
+             }
          }
       }
       break;
@@ -3178,7 +3117,6 @@ create_expression_by_pieces (basic_block block, pre_expr expr,
                bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
              bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
            }
-         mark_symbols_for_renaming (stmt);
        }
       gimple_seq_add_seq (stmts, forced_stmts);
     }
@@ -3186,10 +3124,7 @@ create_expression_by_pieces (basic_block block, pre_expr expr,
   /* Build and insert the assignment of the end result to the temporary
      that we will return.  */
   if (!pretemp || exprtype != TREE_TYPE (pretemp))
-    {
-      pretemp = create_tmp_reg (exprtype, "pretmp");
-      get_var_ann (pretemp);
-    }
+    pretemp = create_tmp_reg (exprtype, "pretmp");
 
   temp = pretemp;
   add_referenced_var (temp);
@@ -3202,8 +3137,10 @@ create_expression_by_pieces (basic_block block, pre_expr expr,
   gimple_seq_add_stmt (stmts, newstmt);
   bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (name));
 
-  /* All the symbols in NEWEXPR should be put into SSA form.  */
-  mark_symbols_for_renaming (newstmt);
+  /* Fold the last statement.  */
+  gsi = gsi_last (*stmts);
+  if (fold_stmt_inplace (&gsi))
+    update_stmt (gsi_stmt (gsi));
 
   /* Add a value number to the temporary.
      The value may already exist in either NEW_SETS, or AVAIL_OUT, because
@@ -3256,6 +3193,10 @@ inhibit_phi_insertion (basic_block bb, pre_expr expr)
     {
       switch (op->opcode)
        {
+       case CALL_EXPR:
+         /* Calls are not a problem.  */
+         return false;
+
        case ARRAY_REF:
        case ARRAY_RANGE_REF:
          if (TREE_CODE (op->op0) != SSA_NAME)
@@ -3309,13 +3250,6 @@ insert_into_preds_of_block (basic_block block, unsigned int exprnum,
   tree temp;
   gimple phi;
 
-  if (dump_file && (dump_flags & TDF_DETAILS))
-    {
-      fprintf (dump_file, "Found partial redundancy for expression ");
-      print_pre_expr (dump_file, expr);
-      fprintf (dump_file, " (%04d)\n", val);
-    }
-
   /* Make sure we aren't creating an induction variable.  */
   if (block->loop_depth > 0 && EDGE_COUNT (block->preds) == 2)
     {
@@ -3449,10 +3383,7 @@ insert_into_preds_of_block (basic_block block, unsigned int exprnum,
 
   /* Now build a phi for the new variable.  */
   if (!prephitemp || TREE_TYPE (prephitemp) != type)
-    {
-      prephitemp = create_tmp_var (type, "prephitmp");
-      get_var_ann (prephitemp);
-    }
+    prephitemp = create_tmp_var (type, "prephitmp");
 
   temp = prephitemp;
   add_referenced_var (temp);
@@ -3633,11 +3564,21 @@ do_regular_insertion (basic_block block, basic_block dom)
                               "optimized for speed edge\n", val);
                    }
                }
-             else if (dbg_cnt (treepre_insert)
-                      && insert_into_preds_of_block (block,
-                                                     get_expression_id (expr),
-                                                     avail))
-               new_stuff = true;
+             else if (dbg_cnt (treepre_insert))
+               {
+                 if (dump_file && (dump_flags & TDF_DETAILS))
+                   {
+                     fprintf (dump_file, "Found partial redundancy for "
+                              "expression ");
+                     print_pre_expr (dump_file, expr);
+                     fprintf (dump_file, " (%04d)\n",
+                              get_expr_value_id (expr));
+                   }
+                 if (insert_into_preds_of_block (block,
+                                                 get_expression_id (expr),
+                                                 avail))
+                   new_stuff = true;
+               }
            }
          /* If all edges produce the same value and that value is
             an invariant, then the PHI has the same value on all
@@ -3756,20 +3697,59 @@ do_partial_partial_insertion (basic_block block, basic_block dom)
                }
              else
                avail[bprime->index] = edoubleprime;
-
            }
 
          /* If we can insert it, it's not the same value
             already existing along every predecessor, and
             it's defined by some predecessor, it is
             partially redundant.  */
-         if (!cant_insert && by_all && dbg_cnt (treepre_insert))
+         if (!cant_insert && by_all)
            {
-             pre_stats.pa_insert++;
-             if (insert_into_preds_of_block (block, get_expression_id (expr),
-                                             avail))
-               new_stuff = true;
-           }
+             edge succ;
+             bool do_insertion = false;
+
+             /* Insert only if we can remove a later expression on a path
+                that we want to optimize for speed.
+                The phi node that we will be inserting in BLOCK is not free,
+                and inserting it for the sake of !optimize_for_speed successor
+                may cause regressions on the speed path.  */
+             FOR_EACH_EDGE (succ, ei, block->succs)
+               {
+                 if (bitmap_set_contains_value (PA_IN (succ->dest), val))
+                   {
+                     if (optimize_edge_for_speed_p (succ))
+                       do_insertion = true;
+                   }
+               }
+
+             if (!do_insertion)
+               {
+                 if (dump_file && (dump_flags & TDF_DETAILS))
+                   {
+                     fprintf (dump_file, "Skipping partial partial redundancy "
+                              "for expression ");
+                     print_pre_expr (dump_file, expr);
+                     fprintf (dump_file, " (%04d), not partially anticipated "
+                              "on any to be optimized for speed edges\n", val);
+                   }
+               }
+             else if (dbg_cnt (treepre_insert))
+               {
+                 pre_stats.pa_insert++;
+                 if (dump_file && (dump_flags & TDF_DETAILS))
+                   {
+                     fprintf (dump_file, "Found partial partial redundancy "
+                              "for expression ");
+                     print_pre_expr (dump_file, expr);
+                     fprintf (dump_file, " (%04d)\n",
+                              get_expr_value_id (expr));
+                   }
+                 if (insert_into_preds_of_block (block,
+                                                 get_expression_id (expr),
+                                                 avail))
+                   new_stuff = true;
+               }          
+           } 
          free (avail);
        }
     }
@@ -3839,6 +3819,8 @@ insert (void)
   while (new_stuff)
     {
       num_iterations++;
+      if (dump_file && dump_flags & TDF_DETAILS)
+       fprintf (dump_file, "Starting insert iteration %d\n", num_iterations);
       new_stuff = insert_aux (ENTRY_BLOCK_PTR);
     }
   statistics_histogram_event (cfun, "insert iterations", num_iterations);
@@ -3976,8 +3958,7 @@ compute_avail (void)
             or control flow.
             If this isn't a call or it is the last stmt in the
             basic-block then the CFG represents things correctly.  */
-         if (is_gimple_call (stmt)
-             && !stmt_ends_bb_p (stmt))
+         if (is_gimple_call (stmt) && !stmt_ends_bb_p (stmt))
            {
              /* Non-looping const functions always return normally.
                 Otherwise the call might not return or have side-effects
@@ -3999,8 +3980,7 @@ compute_avail (void)
              bitmap_value_insert_into_set (AVAIL_OUT (block), e);
            }
 
-         if (gimple_has_volatile_ops (stmt)
-             || stmt_could_throw_p (stmt))
+         if (gimple_has_side_effects (stmt) || stmt_could_throw_p (stmt))
            continue;
 
          switch (gimple_code (stmt))
@@ -4018,7 +3998,8 @@ compute_avail (void)
                pre_expr result = NULL;
                VEC(vn_reference_op_s, heap) *ops = NULL;
 
-               if (!can_value_number_call (stmt))
+               /* We can value number only calls to real functions.  */
+               if (gimple_call_internal_p (stmt))
                  continue;
 
                copy_reference_ops_from_call (stmt, &ops);
@@ -4040,15 +4021,26 @@ compute_avail (void)
                    if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
                      add_to_exp_gen (block, vro->op2);
                  }
-               result = (pre_expr) pool_alloc (pre_expr_pool);
-               result->kind = REFERENCE;
-               result->id = 0;
-               PRE_EXPR_REFERENCE (result) = ref;
 
-               get_or_alloc_expression_id (result);
-               add_to_value (get_expr_value_id (result), result);
-               if (!in_fre)
-                 bitmap_value_insert_into_set (EXP_GEN (block), result);
+               /* If the value of the call is not invalidated in
+                  this block until it is computed, add the expression
+                  to EXP_GEN.  */
+               if (!gimple_vuse (stmt)
+                   || gimple_code
+                        (SSA_NAME_DEF_STMT (gimple_vuse (stmt))) == GIMPLE_PHI
+                   || gimple_bb (SSA_NAME_DEF_STMT
+                                   (gimple_vuse (stmt))) != block)
+                 {
+                   result = (pre_expr) pool_alloc (pre_expr_pool);
+                   result->kind = REFERENCE;
+                   result->id = 0;
+                   PRE_EXPR_REFERENCE (result) = ref;
+
+                   get_or_alloc_expression_id (result);
+                   add_to_value (get_expr_value_id (result), result);
+                   if (!in_fre)
+                     bitmap_value_insert_into_set (EXP_GEN (block), result);
+                 }
                continue;
              }
 
@@ -4067,9 +4059,8 @@ compute_avail (void)
                      vn_nary_op_lookup_pieces (gimple_num_ops (stmt) - 1,
                                                gimple_assign_rhs_code (stmt),
                                                gimple_expr_type (stmt),
-                                               gimple_assign_rhs1 (stmt),
-                                               gimple_assign_rhs2 (stmt),
-                                               NULL_TREE, NULL_TREE, &nary);
+                                               gimple_assign_rhs1_ptr (stmt),
+                                               &nary);
 
                      if (!nary)
                        continue;
@@ -4078,6 +4069,13 @@ compute_avail (void)
                        if (TREE_CODE (nary->op[i]) == SSA_NAME)
                          add_to_exp_gen (block, nary->op[i]);
 
+                     /* If the NARY traps and there was a preceding
+                        point in the block that might not return avoid
+                        adding the nary to EXP_GEN.  */
+                     if (BB_MAY_NOTRETURN (block)
+                         && vn_nary_may_trap (nary))
+                       continue;
+
                      result = (pre_expr) pool_alloc (pre_expr_pool);
                      result->kind = NARY;
                      result->id = 0;
@@ -4109,6 +4107,32 @@ compute_avail (void)
                          if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
                            add_to_exp_gen (block, vro->op2);
                        }
+
+                     /* If the value of the reference is not invalidated in
+                        this block until it is computed, add the expression
+                        to EXP_GEN.  */
+                     if (gimple_vuse (stmt))
+                       {
+                         gimple def_stmt;
+                         bool ok = true;
+                         def_stmt = SSA_NAME_DEF_STMT (gimple_vuse (stmt));
+                         while (!gimple_nop_p (def_stmt)
+                                && gimple_code (def_stmt) != GIMPLE_PHI
+                                && gimple_bb (def_stmt) == block)
+                           {
+                             if (stmt_may_clobber_ref_p
+                                   (def_stmt, gimple_assign_rhs1 (stmt)))
+                               {
+                                 ok = false;
+                                 break;
+                               }
+                             def_stmt
+                               = SSA_NAME_DEF_STMT (gimple_vuse (def_stmt));
+                           }
+                         if (!ok)
+                           continue;
+                       }
+
                      result = (pre_expr) pool_alloc (pre_expr_pool);
                      result->kind = REFERENCE;
                      result->id = 0;
@@ -4185,6 +4209,7 @@ static unsigned int
 eliminate (void)
 {
   VEC (gimple, heap) *to_remove = NULL;
+  VEC (gimple, heap) *to_update = NULL;
   basic_block b;
   unsigned int todo = 0;
   gimple_stmt_iterator gsi;
@@ -4195,27 +4220,40 @@ eliminate (void)
     {
       for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
        {
+         tree lhs = NULL_TREE;
+         tree rhs = NULL_TREE;
+
          stmt = gsi_stmt (gsi);
 
+         if (gimple_has_lhs (stmt))
+           lhs = gimple_get_lhs (stmt);
+
+         if (gimple_assign_single_p (stmt))
+           rhs = gimple_assign_rhs1 (stmt);
+
          /* Lookup the RHS of the expression, see if we have an
             available computation for it.  If so, replace the RHS with
-            the available computation.  */
+            the available computation.
+
+            See PR43491.
+            We don't replace global register variable when it is a the RHS of
+            a single assign. We do replace local register variable since gcc
+            does not guarantee local variable will be allocated in register.  */
          if (gimple_has_lhs (stmt)
-             && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME
+             && TREE_CODE (lhs) == SSA_NAME
              && !gimple_assign_ssa_name_copy_p (stmt)
              && (!gimple_assign_single_p (stmt)
-                 || !is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
+                 || (!is_gimple_min_invariant (rhs)
+                      && (gimple_assign_rhs_code (stmt) != VAR_DECL
+                          || !is_global_var (rhs)
+                          || !DECL_HARD_REGISTER (rhs))))
              && !gimple_has_volatile_ops  (stmt)
-             && !has_zero_uses (gimple_get_lhs (stmt)))
+             && !has_zero_uses (lhs))
            {
-             tree lhs = gimple_get_lhs (stmt);
-             tree rhs = NULL_TREE;
              tree sprime = NULL;
              pre_expr lhsexpr = get_or_alloc_expr_for_name (lhs);
              pre_expr sprimeexpr;
-
-             if (gimple_assign_single_p (stmt))
-               rhs = gimple_assign_rhs1 (stmt);
+             gimple orig_stmt = stmt;
 
              sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
                                               get_expr_value_id (lhsexpr),
@@ -4253,6 +4291,16 @@ eliminate (void)
                  propagate_tree_value_into_stmt (&gsi, sprime);
                  stmt = gsi_stmt (gsi);
                  update_stmt (stmt);
+
+                 /* If we removed EH side-effects from the statement, clean
+                    its EH information.  */
+                 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
+                   {
+                     bitmap_set_bit (need_eh_cleanup,
+                                     gimple_bb (stmt)->index);
+                     if (dump_file && (dump_flags & TDF_DETAILS))
+                       fprintf (dump_file, "  Removed EH side-effects.\n");
+                   }
                  continue;
                }
 
@@ -4308,7 +4356,7 @@ eliminate (void)
 
                  /* If we removed EH side-effects from the statement, clean
                     its EH information.  */
-                 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
+                 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
                    {
                      bitmap_set_bit (need_eh_cleanup,
                                      gimple_bb (stmt)->index);
@@ -4331,11 +4379,11 @@ eliminate (void)
             has the same value number as its rhs.  If so, the store is
             dead.  */
          else if (gimple_assign_single_p (stmt)
+                  && !gimple_has_volatile_ops (stmt)
                   && !is_gimple_reg (gimple_assign_lhs (stmt))
-                  && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
-                      || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
+                  && (TREE_CODE (rhs) == SSA_NAME
+                      || is_gimple_min_invariant (rhs)))
            {
-             tree rhs = gimple_assign_rhs1 (stmt);
              tree val;
              val = vn_reference_lookup (gimple_assign_lhs (stmt),
                                         gimple_vuse (stmt), VN_WALK, NULL);
@@ -4384,6 +4432,8 @@ eliminate (void)
            {
              tree orig_fn = gimple_call_fn (stmt);
              tree fn;
+             if (!orig_fn)
+               continue;
              if (TREE_CODE (orig_fn) == SSA_NAME)
                fn = VN_INFO (orig_fn)->valnum;
              else if (TREE_CODE (orig_fn) == OBJ_TYPE_REF
@@ -4408,7 +4458,7 @@ eliminate (void)
                    }
 
                  gimple_call_set_fn (stmt, fn);
-                 update_stmt (stmt);
+                 VEC_safe_push (gimple, heap, to_update, stmt);
 
                  /* When changing a call into a noreturn call, cfg cleanup
                     is needed to fix up the noreturn call.  */
@@ -4550,9 +4600,8 @@ eliminate (void)
          basic_block bb = gimple_bb (stmt);
          gsi = gsi_for_stmt (stmt);
          unlink_stmt_vdef (stmt);
-         gsi_remove (&gsi, true);
-         if (gimple_purge_dead_eh_edges (bb))
-           todo |= TODO_cleanup_cfg;
+         if (gsi_remove (&gsi, true))
+           bitmap_set_bit (need_eh_cleanup, bb->index);
          if (TREE_CODE (lhs) == SSA_NAME)
            bitmap_clear_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
          release_defs (stmt);
@@ -4560,6 +4609,13 @@ eliminate (void)
     }
   VEC_free (gimple, heap, to_remove);
 
+  /* We cannot update call statements with virtual operands during
+     SSA walk.  This might remove them which in turn makes our
+     VN lattice invalid.  */
+  FOR_EACH_VEC_ELT (gimple, to_update, i, stmt)
+    update_stmt (stmt);
+  VEC_free (gimple, heap, to_update);
+
   return todo;
 }
 
@@ -4820,6 +4876,9 @@ init_pre (bool do_fre)
 static void
 fini_pre (bool do_fre)
 {
+  bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
+  bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
+
   free (postorder);
   VEC_free (bitmap_set_t, heap, value_expressions);
   BITMAP_FREE (inserted_exprs);
@@ -4835,22 +4894,18 @@ fini_pre (bool do_fre)
 
   free_dominance_info (CDI_POST_DOMINATORS);
 
-  if (!bitmap_empty_p (need_eh_cleanup))
-    {
-      gimple_purge_all_dead_eh_edges (need_eh_cleanup);
-      cleanup_tree_cfg ();
-    }
+  if (do_eh_cleanup)
+    gimple_purge_all_dead_eh_edges (need_eh_cleanup);
 
-  BITMAP_FREE (need_eh_cleanup);
-
-  if (!bitmap_empty_p (need_ab_cleanup))
-    {
-      gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
-      cleanup_tree_cfg ();
-    }
+  if (do_ab_cleanup)
+    gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
 
+  BITMAP_FREE (need_eh_cleanup);
   BITMAP_FREE (need_ab_cleanup);
 
+  if (do_eh_cleanup || do_ab_cleanup)
+    cleanup_tree_cfg ();
+
   if (!do_fre)
     loop_optimizer_finalize ();
 }
@@ -4863,7 +4918,8 @@ execute_pre (bool do_fre)
 {
   unsigned int todo = 0;
 
-  do_partial_partial = optimize > 2 && optimize_function_for_speed_p (cfun);
+  do_partial_partial =
+    flag_tree_partial_pre && optimize_function_for_speed_p (cfun);
 
   /* This has to happen before SCCVN runs because
      loop_optimizer_init may create new phis, etc.  */
@@ -4924,7 +4980,6 @@ execute_pre (bool do_fre)
   statistics_counter_event (cfun, "Constified", pre_stats.constified);
 
   clear_expression_ids ();
-  free_scc_vn ();
   if (!do_fre)
     {
       remove_dead_inserted_code ();
@@ -4934,6 +4989,17 @@ execute_pre (bool do_fre)
   scev_finalize ();
   fini_pre (do_fre);
 
+  if (!do_fre)
+    /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
+       case we can merge the block with the remaining predecessor of the block.
+       It should either:
+       - call merge_blocks after each tail merge iteration
+       - call merge_blocks after all tail merge iterations
+       - mark TODO_cleanup_cfg when necessary
+       - share the cfg cleanup with fini_pre.  */
+    todo |= tail_merge_optimize (todo);
+  free_scc_vn ();
+
   return todo;
 }
 
@@ -4967,7 +5033,7 @@ struct gimple_opt_pass pass_pre =
   0,                                   /* properties_provided */
   0,                                   /* properties_destroyed */
   TODO_rebuild_alias,                  /* todo_flags_start */
-  TODO_update_ssa_only_virtuals | TODO_dump_func | TODO_ggc_collect
+  TODO_update_ssa_only_virtuals  | TODO_ggc_collect
   | TODO_verify_ssa /* todo_flags_finish */
  }
 };
@@ -5002,6 +5068,6 @@ struct gimple_opt_pass pass_fre =
   0,                                   /* properties_provided */
   0,                                   /* properties_destroyed */
   0,                                   /* todo_flags_start */
-  TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */
+  TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */
  }
 };