Silence overactive sanity check with -fpartial-profile-training
[gcc.git] / gcc / tree-inline.c
index 6b6d489f6ca49631a0e31fe47e0b13313ca2e5a1..720f50eefecda2b7b1b03e87ee9988209015d83d 100644 (file)
@@ -1,5 +1,5 @@
 /* Tree inlining.
-   Copyright (C) 2001-2017 Free Software Foundation, Inc.
+   Copyright (C) 2001-2019 Free Software Foundation, Inc.
    Contributed by Alexandre Oliva <aoliva@redhat.com>
 
 This file is part of GCC.
@@ -56,8 +56,11 @@ along with GCC; see the file COPYING3.  If not see
 #include "value-prof.h"
 #include "cfgloop.h"
 #include "builtins.h"
-#include "tree-chkp.h"
-
+#include "stringpool.h"
+#include "attribs.h"
+#include "sreal.h"
+#include "tree-cfgcleanup.h"
+#include "tree-ssa-live.h"
 
 /* I'm not real happy about this, but we need to handle gimple and
    non-gimple trees.  */
@@ -119,18 +122,16 @@ eni_weights eni_time_weights;
 
 /* Prototypes.  */
 
-static tree declare_return_variable (copy_body_data *, tree, tree, tree,
+static tree declare_return_variable (copy_body_data *, tree, tree,
                                     basic_block);
 static void remap_block (tree *, copy_body_data *);
 static void copy_bind_expr (tree *, int *, copy_body_data *);
 static void declare_inline_vars (tree, tree);
 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
 static void prepend_lexical_block (tree current_block, tree new_block);
-static tree copy_decl_to_var (tree, copy_body_data *);
 static tree copy_result_decl_to_var (tree, copy_body_data *);
 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
-static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
 
 /* Insert a tree->tree mapping for ID.  Despite the name suggests
@@ -189,7 +190,21 @@ remap_ssa_name (tree name, copy_body_data *id)
 
   n = id->decl_map->get (name);
   if (n)
-    return unshare_expr (*n);
+    {
+      /* WHen we perform edge redirection as part of CFG copy, IPA-SRA can
+        remove an unused LHS from a call statement.  Such LHS can however
+        still appear in debug statements, but their value is lost in this
+        function and we do not want to map them.  */
+      if (id->killed_new_ssa_names
+         && id->killed_new_ssa_names->contains (*n))
+       {
+         gcc_assert (processing_debug_stmt);
+         processing_debug_stmt = -1;
+         return name;
+       }
+
+      return unshare_expr (*n);
+    }
 
   if (processing_debug_stmt)
     {
@@ -206,17 +221,22 @@ remap_ssa_name (tree name, copy_body_data *id)
          n = id->decl_map->get (val);
          if (n != NULL)
            val = *n;
-         if (TREE_CODE (val) != PARM_DECL)
+         if (TREE_CODE (val) != PARM_DECL
+             && !(VAR_P (val) && DECL_ABSTRACT_ORIGIN (val)))
            {
              processing_debug_stmt = -1;
              return name;
            }
+         n = id->decl_map->get (val);
+         if (n && TREE_CODE (*n) == DEBUG_EXPR_DECL)
+           return *n;
          def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
          DECL_ARTIFICIAL (vexpr) = 1;
          TREE_TYPE (vexpr) = TREE_TYPE (name);
          SET_DECL_MODE (vexpr, DECL_MODE (SSA_NAME_VAR (name)));
          gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
          gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
+         insert_decl_map (id, val, vexpr);
          return vexpr;
        }
 
@@ -251,6 +271,11 @@ remap_ssa_name (tree name, copy_body_data *id)
          struct ptr_info_def *new_pi = get_ptr_info (new_tree);
          new_pi->pt = pi->pt;
        }
+      /* So can range-info.  */
+      if (!POINTER_TYPE_P (TREE_TYPE (name))
+         && SSA_NAME_RANGE_INFO (name))
+       duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
+                                      SSA_NAME_RANGE_INFO (name));
       return new_tree;
     }
 
@@ -284,6 +309,11 @@ remap_ssa_name (tree name, copy_body_data *id)
          struct ptr_info_def *new_pi = get_ptr_info (new_tree);
          new_pi->pt = pi->pt;
        }
+      /* So can range-info.  */
+      if (!POINTER_TYPE_P (TREE_TYPE (name))
+         && SSA_NAME_RANGE_INFO (name))
+       duplicate_ssa_name_range_info (new_tree, SSA_NAME_RANGE_TYPE (name),
+                                      SSA_NAME_RANGE_INFO (name));
       if (SSA_NAME_IS_DEFAULT_DEF (name))
        {
          /* By inlining function having uninitialized variable, we might
@@ -516,11 +546,27 @@ remap_type_1 (tree type, copy_body_data *id)
 
       if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
        {
-         gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
+         gcc_checking_assert (TYPE_DOMAIN (type)
+                              == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
          TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
        }
       else
-       TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
+        {
+         TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
+         /* For array bounds where we have decided not to copy over the bounds
+            variable which isn't used in OpenMP/OpenACC region, change them to
+            an uninitialized VAR_DECL temporary.  */
+         if (TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) == error_mark_node
+             && id->adjust_array_error_bounds
+             && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
+           {
+             tree v = create_tmp_var (TREE_TYPE (TYPE_DOMAIN (new_tree)));
+             DECL_ATTRIBUTES (v)
+               = tree_cons (get_identifier ("omp dummy var"), NULL_TREE,
+                            DECL_ATTRIBUTES (v));
+             TYPE_MAX_VALUE (TYPE_DOMAIN (new_tree)) = v;
+           }
+        }
       break;
 
     case RECORD_TYPE:
@@ -553,9 +599,16 @@ remap_type_1 (tree type, copy_body_data *id)
   /* All variants of type share the same size, so use the already remaped data.  */
   if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
     {
-      gcc_checking_assert (TYPE_SIZE (type) == TYPE_SIZE (TYPE_MAIN_VARIANT (type)));
-      gcc_checking_assert (TYPE_SIZE_UNIT (type) == TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type)));
-
+      tree s = TYPE_SIZE (type);
+      tree mvs = TYPE_SIZE (TYPE_MAIN_VARIANT (type));
+      tree su = TYPE_SIZE_UNIT (type);
+      tree mvsu = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type));
+      gcc_checking_assert ((TREE_CODE (s) == PLACEHOLDER_EXPR
+                           && (TREE_CODE (mvs) == PLACEHOLDER_EXPR))
+                          || s == mvs);
+      gcc_checking_assert ((TREE_CODE (su) == PLACEHOLDER_EXPR
+                           && (TREE_CODE (mvsu) == PLACEHOLDER_EXPR))
+                          || su == mvsu);
       TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
       TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
     }
@@ -568,6 +621,92 @@ remap_type_1 (tree type, copy_body_data *id)
   return new_tree;
 }
 
+/* Helper function for remap_type_2, called through walk_tree.  */
+
+static tree
+remap_type_3 (tree *tp, int *walk_subtrees, void *data)
+{
+  copy_body_data *id = (copy_body_data *) data;
+
+  if (TYPE_P (*tp))
+    *walk_subtrees = 0;
+
+  else if (DECL_P (*tp) && remap_decl (*tp, id) != *tp)
+    return *tp;
+
+  return NULL_TREE;
+}
+
+/* Return true if TYPE needs to be remapped because remap_decl on any
+   needed embedded decl returns something other than that decl.  */
+
+static bool
+remap_type_2 (tree type, copy_body_data *id)
+{
+  tree t;
+
+#define RETURN_TRUE_IF_VAR(T) \
+  do                                                           \
+    {                                                          \
+      tree _t = (T);                                           \
+      if (_t)                                                  \
+       {                                                       \
+         if (DECL_P (_t) && remap_decl (_t, id) != _t)         \
+           return true;                                        \
+         if (!TYPE_SIZES_GIMPLIFIED (type)                     \
+             && walk_tree (&_t, remap_type_3, id, NULL))       \
+           return true;                                        \
+       }                                                       \
+    }                                                          \
+  while (0)
+
+  switch (TREE_CODE (type))
+    {
+    case POINTER_TYPE:
+    case REFERENCE_TYPE:
+    case FUNCTION_TYPE:
+    case METHOD_TYPE:
+      return remap_type_2 (TREE_TYPE (type), id);
+
+    case INTEGER_TYPE:
+    case REAL_TYPE:
+    case FIXED_POINT_TYPE:
+    case ENUMERAL_TYPE:
+    case BOOLEAN_TYPE:
+      RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
+      RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
+      return false;
+
+    case ARRAY_TYPE:
+      if (remap_type_2 (TREE_TYPE (type), id)
+         || (TYPE_DOMAIN (type) && remap_type_2 (TYPE_DOMAIN (type), id)))
+       return true;
+      break;
+
+    case RECORD_TYPE:
+    case UNION_TYPE:
+    case QUAL_UNION_TYPE:
+      for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
+       if (TREE_CODE (t) == FIELD_DECL)
+         {
+           RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
+           RETURN_TRUE_IF_VAR (DECL_SIZE (t));
+           RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
+           if (TREE_CODE (type) == QUAL_UNION_TYPE)
+             RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
+         }
+      break;
+
+    default:
+      return false;
+    }
+
+  RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
+  RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
+  return false;
+#undef RETURN_TRUE_IF_VAR
+}
+
 tree
 remap_type (tree type, copy_body_data *id)
 {
@@ -583,7 +722,10 @@ remap_type (tree type, copy_body_data *id)
     return *node;
 
   /* The type only needs remapping if it's variably modified.  */
-  if (! variably_modified_type_p (type, id->src_fn))
+  if (! variably_modified_type_p (type, id->src_fn)
+      /* Don't remap if copy_decl method doesn't always return a new
+        decl and for all embedded decls returns the passed in decl.  */
+      || (id->dont_remap_vla_if_no_change && !remap_type_2 (type, id)))
     {
       insert_decl_map (id, type, type);
       return type;
@@ -601,7 +743,7 @@ remap_type (tree type, copy_body_data *id)
 static bool
 can_be_nonlocal (tree decl, copy_body_data *id)
 {
-  /* We can not duplicate function decls.  */
+  /* We cannot duplicate function decls.  */
   if (TREE_CODE (decl) == FUNCTION_DECL)
     return true;
 
@@ -690,7 +832,7 @@ remap_block (tree *block, copy_body_data *id)
   old_block = *block;
   new_block = make_node (BLOCK);
   TREE_USED (new_block) = TREE_USED (old_block);
-  BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
+  BLOCK_ABSTRACT_ORIGIN (new_block) = BLOCK_ORIGIN (old_block);
   BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
   BLOCK_NONLOCALIZED_VARS (new_block)
     = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
@@ -709,6 +851,7 @@ remap_block (tree *block, copy_body_data *id)
 }
 
 /* Copy the whole block tree and root it in id->block.  */
+
 static tree
 remap_blocks (tree block, copy_body_data *id)
 {
@@ -729,6 +872,7 @@ remap_blocks (tree block, copy_body_data *id)
 }
 
 /* Remap the block tree rooted at BLOCK to nothing.  */
+
 static void
 remap_blocks_to_null (tree block, copy_body_data *id)
 {
@@ -738,6 +882,27 @@ remap_blocks_to_null (tree block, copy_body_data *id)
     remap_blocks_to_null (t, id);
 }
 
+/* Remap the location info pointed to by LOCUS.  */
+
+static location_t
+remap_location (location_t locus, copy_body_data *id)
+{
+  if (LOCATION_BLOCK (locus))
+    {
+      tree *n = id->decl_map->get (LOCATION_BLOCK (locus));
+      gcc_assert (n);
+      if (*n)
+       return set_block (locus, *n);
+    }
+
+  locus = LOCATION_LOCUS (locus);
+
+  if (locus != UNKNOWN_LOCATION && id->block)
+    return set_block (locus, id->block);
+
+  return locus;
+}
+
 static void
 copy_statement_list (tree *tp)
 {
@@ -858,7 +1023,12 @@ remap_dependence_clique (copy_body_data *id, unsigned short clique)
   bool existed;
   unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
   if (!existed)
-    newc = ++cfun->last_clique;
+    {
+      /* Clique 1 is reserved for local ones set by PTA.  */
+      if (cfun->last_clique == 0)
+       cfun->last_clique = 1;
+      newc = ++cfun->last_clique;
+    }
   return newc;
 }
 
@@ -941,7 +1111,7 @@ remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
        *walk_subtrees = 0;
 
       else if (TREE_CODE (*tp) == INTEGER_CST)
-       *tp = wide_int_to_tree (new_type, *tp);
+       *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
       else
        {
          *tp = copy_node (*tp);
@@ -953,7 +1123,7 @@ remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
       /* Otherwise, just copy the node.  Note that copy_tree_r already
         knows not to copy VAR_DECLs, etc., so this is safe.  */
 
-      if (TREE_CODE (*tp) == MEM_REF)
+      if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
        {
          /* We need to re-canonicalize MEM_REFs from inline substitutions
             that can happen when a pointer argument is an ADDR_EXPR.
@@ -1125,7 +1295,7 @@ copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
        *walk_subtrees = 0;
 
       else if (TREE_CODE (*tp) == INTEGER_CST)
-       *tp = wide_int_to_tree (new_type, *tp);
+       *tp = wide_int_to_tree (new_type, wi::to_wide (*tp));
       else
        {
          *tp = copy_node (*tp);
@@ -1179,10 +1349,11 @@ copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
              tree type = TREE_TYPE (*tp);
              tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
              tree old = *tp;
-             *tp = gimple_fold_indirect_ref (ptr);
+             *tp = id->do_not_fold ? NULL : gimple_fold_indirect_ref (ptr);
              if (! *tp)
                {
-                 if (TREE_CODE (ptr) == ADDR_EXPR)
+                 type = remap_type (type, id);
+                 if (TREE_CODE (ptr) == ADDR_EXPR && !id->do_not_fold)
                    {
                      *tp
                        = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
@@ -1211,7 +1382,7 @@ copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
              return NULL;
            }
        }
-      else if (TREE_CODE (*tp) == MEM_REF)
+      else if (TREE_CODE (*tp) == MEM_REF && !id->do_not_fold)
        {
          /* We need to re-canonicalize MEM_REFs from inline substitutions
             that can happen when a pointer argument is an ADDR_EXPR.
@@ -1283,7 +1454,8 @@ copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
 
          /* Handle the case where we substituted an INDIRECT_REF
             into the operand of the ADDR_EXPR.  */
-         if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
+         if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF
+             && !id->do_not_fold)
            {
              tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
              if (TREE_TYPE (t) != TREE_TYPE (*tp))
@@ -1346,41 +1518,35 @@ remap_gimple_stmt (gimple *stmt, copy_body_data *id)
   gimple_seq stmts = NULL;
 
   if (is_gimple_debug (stmt)
-      && !opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
-    return stmts;
+      && (gimple_debug_nonbind_marker_p (stmt)
+         ? !DECL_STRUCT_FUNCTION (id->dst_fn)->debug_nonbind_markers
+         : !opt_for_fn (id->dst_fn, flag_var_tracking_assignments)))
+    return NULL;
 
   /* Begin by recognizing trees that we'll completely rewrite for the
      inlining context.  Our output for these trees is completely
-     different from out input (e.g. RETURN_EXPR is deleted, and morphs
+     different from our input (e.g. RETURN_EXPR is deleted and morphs
      into an edge).  Further down, we'll handle trees that get
      duplicated and/or tweaked.  */
 
-  /* When requested, GIMPLE_RETURNs should be transformed to just the
+  /* When requested, GIMPLE_RETURN should be transformed to just the
      contained GIMPLE_ASSIGN.  The branch semantics of the return will
      be handled elsewhere by manipulating the CFG rather than the
      statement.  */
   if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
     {
       tree retval = gimple_return_retval (as_a <greturn *> (stmt));
-      tree retbnd = gimple_return_retbnd (stmt);
-      tree bndslot = id->retbnd;
-
-      if (retbnd && bndslot)
-       {
-         gimple *bndcopy = gimple_build_assign (bndslot, retbnd);
-         memset (&wi, 0, sizeof (wi));
-         wi.info = id;
-         walk_gimple_op (bndcopy, remap_gimple_op_r, &wi);
-         gimple_seq_add_stmt (&stmts, bndcopy);
-       }
 
       /* If we're returning something, just turn that into an
-        assignment into the equivalent of the original RESULT_DECL.
+        assignment to the equivalent of the original RESULT_DECL.
         If RETVAL is just the result decl, the result decl has
         already been set (e.g. a recent "foo (&result_decl, ...)");
-        just toss the entire GIMPLE_RETURN.  */
+        just toss the entire GIMPLE_RETURN.  Likewise for when the
+        call doesn't want the return value.  */
       if (retval
          && (TREE_CODE (retval) != RESULT_DECL
+             && (!id->call_stmt
+                 || gimple_call_lhs (id->call_stmt) != NULL_TREE)
              && (TREE_CODE (retval) != SSA_NAME
                  || ! SSA_NAME_VAR (retval)
                  || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
@@ -1390,18 +1556,9 @@ remap_gimple_stmt (gimple *stmt, copy_body_data *id)
                                      retval);
          /* id->retvar is already substituted.  Skip it on later remapping.  */
          skip_first = true;
-
-         /* We need to copy bounds if return structure with pointers into
-            instrumented function.  */
-         if (chkp_function_instrumented_p (id->dst_fn)
-             && !bndslot
-             && !BOUNDED_P (id->retvar)
-             && chkp_type_has_pointer (TREE_TYPE (id->retvar)))
-           id->assign_stmts.safe_push (copy);
-
        }
       else
-       return stmts;
+       return NULL;
     }
   else if (gimple_has_substatements (stmt))
     {
@@ -1495,7 +1652,8 @@ remap_gimple_stmt (gimple *stmt, copy_body_data *id)
 
        case GIMPLE_OMP_TASKGROUP:
          s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
-         copy = gimple_build_omp_taskgroup (s1);
+         copy = gimple_build_omp_taskgroup
+                  (s1, gimple_omp_taskgroup_clauses (stmt));
          break;
 
        case GIMPLE_OMP_ORDERED:
@@ -1505,6 +1663,12 @@ remap_gimple_stmt (gimple *stmt, copy_body_data *id)
                    gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
          break;
 
+       case GIMPLE_OMP_SCAN:
+         s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
+         copy = gimple_build_omp_scan
+                  (s1, gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)));
+         break;
+
        case GIMPLE_OMP_SECTION:
          s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
          copy = gimple_build_omp_section (s1);
@@ -1608,6 +1772,18 @@ remap_gimple_stmt (gimple *stmt, copy_body_data *id)
                return NULL;
            }
        }
+     
+      /* We do not allow CLOBBERs of handled components.  In case
+        returned value is stored via such handled component, remove
+        the clobber so stmt verifier is happy.  */
+      if (gimple_clobber_p (stmt)
+         && TREE_CODE (gimple_assign_lhs (stmt)) == RESULT_DECL)
+       {
+         tree remapped = remap_decl (gimple_assign_lhs (stmt), id);
+         if (!DECL_P (remapped)
+             && TREE_CODE (remapped) != MEM_REF)
+           return NULL;
+       }
 
       if (gimple_debug_bind_p (stmt))
        {
@@ -1615,6 +1791,8 @@ remap_gimple_stmt (gimple *stmt, copy_body_data *id)
            = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
                                       gimple_debug_bind_get_value (stmt),
                                       stmt);
+         if (id->reset_location)
+           gimple_set_location (copy, input_location);
          id->debug_stmts.safe_push (copy);
          gimple_seq_add_stmt (&stmts, copy);
          return stmts;
@@ -1625,6 +1803,23 @@ remap_gimple_stmt (gimple *stmt, copy_body_data *id)
                           (gimple_debug_source_bind_get_var (stmt),
                            gimple_debug_source_bind_get_value (stmt),
                            stmt);
+         if (id->reset_location)
+           gimple_set_location (copy, input_location);
+         id->debug_stmts.safe_push (copy);
+         gimple_seq_add_stmt (&stmts, copy);
+         return stmts;
+       }
+      if (gimple_debug_nonbind_marker_p (stmt))
+       {
+         /* If the inlined function has too many debug markers,
+            don't copy them.  */
+         if (id->src_cfun->debug_marker_count
+             > param_max_debug_marker_count)
+           return stmts;
+
+         gdebug *copy = as_a <gdebug *> (gimple_copy (stmt));
+         if (id->reset_location)
+           gimple_set_location (copy, input_location);
          id->debug_stmts.safe_push (copy);
          gimple_seq_add_stmt (&stmts, copy);
          return stmts;
@@ -1663,7 +1858,7 @@ remap_gimple_stmt (gimple *stmt, copy_body_data *id)
          case GIMPLE_CALL:
            {
              tree r, fndecl = gimple_call_fndecl (copy);
-             if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
+             if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
                switch (DECL_FUNCTION_CODE (fndecl))
                  {
                  case BUILT_IN_EH_COPY_VALUES:
@@ -1714,22 +1909,36 @@ remap_gimple_stmt (gimple *stmt, copy_body_data *id)
          }
     }
 
-  /* If STMT has a block defined, map it to the newly constructed
-     block.  */
-  if (gimple_block (copy))
+  /* If STMT has a block defined, map it to the newly constructed block.  */
+  if (tree block = gimple_block (copy))
     {
       tree *n;
-      n = id->decl_map->get (gimple_block (copy));
+      n = id->decl_map->get (block);
       gcc_assert (n);
       gimple_set_block (copy, *n);
     }
-
-  if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
+  if (id->param_body_adjs)
     {
-      gimple_seq_add_stmt (&stmts, copy);
-      return stmts;
+      gimple_seq extra_stmts = NULL;
+      id->param_body_adjs->modify_gimple_stmt (&copy, &extra_stmts);
+      if (!gimple_seq_empty_p (extra_stmts))
+       {
+         memset (&wi, 0, sizeof (wi));
+         wi.info = id;
+         for (gimple_stmt_iterator egsi = gsi_start (extra_stmts);
+              !gsi_end_p (egsi);
+              gsi_next (&egsi))
+           walk_gimple_op (gsi_stmt (egsi), remap_gimple_op_r, &wi);
+         gimple_seq_add_seq (&stmts, extra_stmts);
+       }
     }
 
+  if (id->reset_location)
+    gimple_set_location (copy, input_location);
+
+  /* Debug statements ought to be rebuilt and not copied.  */
+  gcc_checking_assert (!is_gimple_debug (copy));
+
   /* Remap all the operands in COPY.  */
   memset (&wi, 0, sizeof (wi));
   wi.info = id;
@@ -1755,15 +1964,16 @@ remap_gimple_stmt (gimple *stmt, copy_body_data *id)
    later  */
 
 static basic_block
-copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
-         gcov_type count_scale)
+copy_bb (copy_body_data *id, basic_block bb,
+         profile_count num, profile_count den)
 {
   gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
   basic_block copy_basic_block;
   tree decl;
-  gcov_type freq;
   basic_block prev;
 
+  profile_count::adjust_for_ipa_scaling (&num, &den);
+
   /* Search for previous copied basic block.  */
   prev = bb->prev_bb;
   while (!prev->aux)
@@ -1772,16 +1982,7 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
   /* create_basic_block() will append every new block to
      basic_block_info automatically.  */
   copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
-  copy_basic_block->count = apply_scale (bb->count, count_scale);
-
-  /* We are going to rebuild frequencies from scratch.  These values
-     have just small importance to drive canonicalize_loop_headers.  */
-  freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
-
-  /* We recompute frequencies after inlining, so this is quite safe.  */
-  if (freq > BB_FREQ_MAX)
-    freq = BB_FREQ_MAX;
-  copy_basic_block->frequency = freq;
+  copy_basic_block->count = bb->count.apply_scale (num, den);
 
   copy_gsi = gsi_start_bb (copy_basic_block);
 
@@ -1869,29 +2070,13 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
              gcall *new_call;
              vec<tree> argarray;
              size_t nargs = gimple_call_num_args (id->call_stmt);
-             size_t n, i, nargs_to_copy;
-             bool remove_bounds = false;
+             size_t n;
 
              for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
                nargs--;
 
-             /* Bounds should be removed from arg pack in case
-                we handle not instrumented call in instrumented
-                function.  */
-             nargs_to_copy = nargs;
-             if (gimple_call_with_bounds_p (id->call_stmt)
-                 && !gimple_call_with_bounds_p (stmt))
-               {
-                 for (i = gimple_call_num_args (id->call_stmt) - nargs;
-                      i < gimple_call_num_args (id->call_stmt);
-                      i++)
-                   if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
-                     nargs_to_copy--;
-                 remove_bounds = true;
-               }
-
              /* Create the new array of arguments.  */
-             n = nargs_to_copy + gimple_call_num_args (call_stmt);
+             n = nargs + gimple_call_num_args (call_stmt);
              argarray.create (n);
              argarray.safe_grow_cleared (n);
 
@@ -1900,26 +2085,11 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
                      gimple_call_arg_ptr (call_stmt, 0),
                      gimple_call_num_args (call_stmt) * sizeof (tree));
 
-             if (remove_bounds)
-               {
-                 /* Append the rest of arguments removing bounds.  */
-                 unsigned cur = gimple_call_num_args (call_stmt);
-                 i = gimple_call_num_args (id->call_stmt) - nargs;
-                 for (i = gimple_call_num_args (id->call_stmt) - nargs;
-                      i < gimple_call_num_args (id->call_stmt);
-                      i++)
-                   if (!POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
-                     argarray[cur++] = gimple_call_arg (id->call_stmt, i);
-                 gcc_assert (cur == n);
-               }
-             else
-               {
-                 /* Append the arguments passed in '...'  */
-                 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
-                         gimple_call_arg_ptr (id->call_stmt, 0)
-                         + (gimple_call_num_args (id->call_stmt) - nargs),
-                         nargs * sizeof (tree));
-               }
+             /* Append the arguments passed in '...'  */
+             memcpy (argarray.address () + gimple_call_num_args (call_stmt),
+                     gimple_call_arg_ptr (id->call_stmt, 0)
+                     + (gimple_call_num_args (id->call_stmt) - nargs),
+                     nargs * sizeof (tree));
 
              new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
                                                argarray);
@@ -1930,8 +2100,8 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
                 GF_CALL_VA_ARG_PACK.  */
              gimple_call_copy_flags (new_call, call_stmt);
              gimple_call_set_va_arg_pack (new_call, false);
+             /* location includes block.  */
              gimple_set_location (new_call, gimple_location (stmt));
-             gimple_set_block (new_call, gimple_block (stmt));
              gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
 
              gsi_replace (&copy_gsi, new_call, false);
@@ -1940,30 +2110,39 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
          else if (call_stmt
                   && id->call_stmt
                   && (decl = gimple_call_fndecl (stmt))
-                  && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
-                  && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN
-                  && ! gimple_call_va_arg_pack_p (id->call_stmt))
+                  && fndecl_built_in_p (decl, BUILT_IN_VA_ARG_PACK_LEN))
            {
              /* __builtin_va_arg_pack_len () should be replaced by
                 the number of anonymous arguments.  */
-             size_t nargs = gimple_call_num_args (id->call_stmt), i;
+             size_t nargs = gimple_call_num_args (id->call_stmt);
              tree count, p;
              gimple *new_stmt;
 
              for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
                nargs--;
 
-             /* For instrumented calls we should ignore bounds.  */
-             for (i = gimple_call_num_args (id->call_stmt) - nargs;
-                  i < gimple_call_num_args (id->call_stmt);
-                  i++)
-               if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
-                 nargs--;
-
-             count = build_int_cst (integer_type_node, nargs);
-             new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
-             gsi_replace (&copy_gsi, new_stmt, false);
-             stmt = new_stmt;
+             if (!gimple_call_lhs (stmt))
+               {
+                 /* Drop unused calls.  */
+                 gsi_remove (&copy_gsi, false);
+                 continue;
+               }
+             else if (!gimple_call_va_arg_pack_p (id->call_stmt))
+               {
+                 count = build_int_cst (integer_type_node, nargs);
+                 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
+                 gsi_replace (&copy_gsi, new_stmt, false);
+                 stmt = new_stmt;
+               }
+             else if (nargs != 0)
+               {
+                 tree newlhs = create_tmp_reg_or_ssa_name (integer_type_node);
+                 count = build_int_cst (integer_type_node, nargs);
+                 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
+                                                 PLUS_EXPR, newlhs, count);
+                 gimple_call_set_lhs (stmt, newlhs);
+                 gsi_insert_after (&copy_gsi, new_stmt, GSI_NEW_STMT);
+               }
            }
          else if (call_stmt
                   && id->call_stmt
@@ -2001,21 +2180,16 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
                  edge = id->src_node->get_edge (orig_stmt);
                  if (edge)
                    {
-                     int edge_freq = edge->frequency;
-                     int new_freq;
                      struct cgraph_edge *old_edge = edge;
+                     profile_count old_cnt = edge->count;
                      edge = edge->clone (id->dst_node, call_stmt,
                                          gimple_uid (stmt),
-                                         REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
+                                         num, den,
                                          true);
-                     /* We could also just rescale the frequency, but
-                        doing so would introduce roundoff errors and make
-                        verifier unhappy.  */
-                     new_freq  = compute_call_stmt_bb_frequency (id->dst_node->decl,
-                                                                 copy_basic_block);
-
-                     /* Speculative calls consist of two edges - direct and indirect.
-                        Duplicate the whole thing and distribute frequencies accordingly.  */
+
+                     /* Speculative calls consist of two edges - direct and
+                        indirect.  Duplicate the whole thing and distribute
+                        frequencies accordingly.  */
                      if (edge->speculative)
                        {
                          struct cgraph_edge *direct, *indirect;
@@ -2023,40 +2197,22 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
 
                          gcc_assert (!edge->indirect_unknown_callee);
                          old_edge->speculative_call_info (direct, indirect, ref);
+
+                         profile_count indir_cnt = indirect->count;
                          indirect = indirect->clone (id->dst_node, call_stmt,
                                                      gimple_uid (stmt),
-                                                     REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
+                                                     num, den,
                                                      true);
-                         if (old_edge->frequency + indirect->frequency)
-                           {
-                             edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
-                                                          (old_edge->frequency + indirect->frequency)),
-                                                    CGRAPH_FREQ_MAX);
-                             indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
-                                                              (old_edge->frequency + indirect->frequency)),
-                                                        CGRAPH_FREQ_MAX);
-                           }
+
+                         profile_probability prob
+                            = indir_cnt.probability_in (old_cnt + indir_cnt);
+                         indirect->count
+                            = copy_basic_block->count.apply_probability (prob);
+                         edge->count = copy_basic_block->count - indirect->count;
                          id->dst_node->clone_reference (ref, stmt);
                        }
                      else
-                       {
-                         edge->frequency = new_freq;
-                         if (dump_file
-                             && profile_status_for_fn (cfun) != PROFILE_ABSENT
-                             && (edge_freq > edge->frequency + 10
-                                 || edge_freq < edge->frequency - 10))
-                           {
-                             fprintf (dump_file, "Edge frequency estimated by "
-                                      "cgraph %i diverge from inliner's estimate %i\n",
-                                      edge_freq,
-                                      edge->frequency);
-                             fprintf (dump_file,
-                                      "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
-                                      bb->index,
-                                      bb->frequency,
-                                      copy_basic_block->frequency);
-                           }
-                       }
+                       edge->count = copy_basic_block->count;
                    }
                  break;
 
@@ -2099,15 +2255,10 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
                  if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
                    id->dst_node->create_edge_including_clones
                      (dest, orig_stmt, call_stmt, bb->count,
-                      compute_call_stmt_bb_frequency (id->dst_node->decl,
-                                                      copy_basic_block),
                       CIF_ORIGINALLY_INDIRECT_CALL);
                  else
                    id->dst_node->create_edge (dest, call_stmt,
-                                       bb->count,
-                                       compute_call_stmt_bb_frequency
-                                         (id->dst_node->decl,
-                                          copy_basic_block))->inline_failed
+                                       bb->count)->inline_failed
                      = CIF_ORIGINALLY_INDIRECT_CALL;
                  if (dump_file)
                    {
@@ -2193,6 +2344,79 @@ update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
       }
 }
 
+/* Insert clobbers for automatic variables of inlined ID->src_fn
+   function at the start of basic block ID->eh_landing_pad_dest.  */
+
+static void
+add_clobbers_to_eh_landing_pad (copy_body_data *id)
+{
+  tree var;
+  basic_block bb = id->eh_landing_pad_dest;
+  live_vars_map *vars = NULL;
+  unsigned int cnt = 0;
+  unsigned int i;
+  FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
+    if (VAR_P (var)
+       && !DECL_HARD_REGISTER (var)
+       && !TREE_THIS_VOLATILE (var)
+       && !DECL_HAS_VALUE_EXPR_P (var)
+       && !is_gimple_reg (var)
+       && auto_var_in_fn_p (var, id->src_fn)
+       && !lookup_attribute ("omp simd array", DECL_ATTRIBUTES (var)))
+      {
+       tree *t = id->decl_map->get (var);
+       if (!t)
+         continue;
+       tree new_var = *t;
+       if (VAR_P (new_var)
+           && !DECL_HARD_REGISTER (new_var)
+           && !TREE_THIS_VOLATILE (new_var)
+           && !DECL_HAS_VALUE_EXPR_P (new_var)
+           && !is_gimple_reg (new_var)
+           && auto_var_in_fn_p (new_var, id->dst_fn))
+         {
+           if (vars == NULL)
+             vars = new live_vars_map;
+            vars->put (DECL_UID (var), cnt++);
+         }
+      }
+  if (vars == NULL)
+    return;
+
+  vec<bitmap_head> live = compute_live_vars (id->src_cfun, vars);
+  FOR_EACH_VEC_SAFE_ELT (id->src_cfun->local_decls, i, var)
+    if (VAR_P (var))
+      {
+       edge e;
+       edge_iterator ei;
+       bool needed = false;
+       unsigned int *v = vars->get (DECL_UID (var));
+       if (v == NULL)
+         continue;
+       FOR_EACH_EDGE (e, ei, bb->preds)
+         if ((e->flags & EDGE_EH) != 0
+             && e->src->index >= id->add_clobbers_to_eh_landing_pads)
+           {
+             basic_block src_bb = (basic_block) e->src->aux;
+
+             if (bitmap_bit_p (&live[src_bb->index], *v))
+               {
+                 needed = true;
+                 break;
+               }
+           }
+       if (needed)
+         {
+           tree new_var = *id->decl_map->get (var);
+           gimple_stmt_iterator gsi = gsi_after_labels (bb);
+           tree clobber = build_clobber (TREE_TYPE (new_var));
+           gimple *clobber_stmt = gimple_build_assign (new_var, clobber);
+           gsi_insert_before (&gsi, clobber_stmt, GSI_NEW_STMT);
+         }
+      }
+  destroy_live_vars (live);
+  delete vars;
+}
 
 /* Copy edges from BB into its copy constructed earlier, scale profile
    accordingly.  Edges will be taken care of later.  Assume aux
@@ -2200,14 +2424,14 @@ update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
    debug stmts are left after a statement that must end the basic block.  */
 
 static bool
-copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
-                  basic_block abnormal_goto_dest)
+copy_edges_for_bb (basic_block bb, profile_count num, profile_count den,
+                  basic_block ret_bb, basic_block abnormal_goto_dest,
+                  copy_body_data *id)
 {
   basic_block new_bb = (basic_block) bb->aux;
   edge_iterator ei;
   edge old_edge;
   gimple_stmt_iterator si;
-  int flags;
   bool need_debug_cleanup = false;
 
   /* Use the indices from the original blocks to create edges for the
@@ -2216,22 +2440,33 @@ copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
     if (!(old_edge->flags & EDGE_EH))
       {
        edge new_edge;
+       int flags = old_edge->flags;
+       location_t locus = old_edge->goto_locus;
 
-       flags = old_edge->flags;
-
-       /* Return edges do get a FALLTHRU flag when the get inlined.  */
+       /* Return edges do get a FALLTHRU flag when they get inlined.  */
        if (old_edge->dest->index == EXIT_BLOCK
-           && !(old_edge->flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
+           && !(flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
            && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
          flags |= EDGE_FALLTHRU;
-       new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
-       new_edge->count = apply_scale (old_edge->count, count_scale);
+
+       new_edge
+         = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
        new_edge->probability = old_edge->probability;
+       if (!id->reset_location)
+         new_edge->goto_locus = remap_location (locus, id);
       }
 
   if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
     return false;
 
+  /* When doing function splitting, we must decrease count of the return block
+     which was previously reachable by block we did not copy.  */
+  if (single_succ_p (bb) && single_succ_edge (bb)->dest->index == EXIT_BLOCK)
+    FOR_EACH_EDGE (old_edge, ei, bb->preds)
+      if (old_edge->src->index != ENTRY_BLOCK
+         && !old_edge->src->aux)
+       new_bb->count -= old_edge->count ().apply_scale (num, den);
+
   for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
     {
       gimple *copy_stmt;
@@ -2255,7 +2490,7 @@ copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
          propagation can change an INDIRECT_REF which throws
          into a COMPONENT_REF which doesn't.  If the copy
          can throw, the original could also throw.  */
-      can_throw = stmt_can_throw_internal (copy_stmt);
+      can_throw = stmt_can_throw_internal (cfun, copy_stmt);
       nonlocal_goto
        = (stmt_can_make_abnormal_goto (copy_stmt)
           && !computed_goto_p (copy_stmt));
@@ -2281,10 +2516,48 @@ copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
            }
        }
 
+      bool update_probs = false;
+
       if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
-       make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
+       {
+         make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
+         update_probs = true;
+       }
       else if (can_throw)
-       make_eh_edges (copy_stmt);
+       {
+         make_eh_edges (copy_stmt);
+         update_probs = true;
+       }
+
+      /* EH edges may not match old edges.  Copy as much as possible.  */
+      if (update_probs)
+       {
+          edge e;
+          edge_iterator ei;
+         basic_block copy_stmt_bb = gimple_bb (copy_stmt);
+
+          FOR_EACH_EDGE (old_edge, ei, bb->succs)
+            if ((old_edge->flags & EDGE_EH)
+               && (e = find_edge (copy_stmt_bb,
+                                  (basic_block) old_edge->dest->aux))
+               && (e->flags & EDGE_EH))
+             e->probability = old_edge->probability;
+           
+          FOR_EACH_EDGE (e, ei, copy_stmt_bb->succs)
+           if (e->flags & EDGE_EH)
+             {
+               if (!e->probability.initialized_p ())
+                 e->probability = profile_probability::never ();
+               if (e->dest->index < id->add_clobbers_to_eh_landing_pads)
+                 {
+                   if (id->eh_landing_pad_dest == NULL)
+                     id->eh_landing_pad_dest = e->dest;
+                   else
+                     gcc_assert (id->eh_landing_pad_dest == e->dest);
+                 }
+             }
+        }
+
 
       /* If the call we inline cannot make abnormal goto do not add
          additional abnormal edges but only retain those already present
@@ -2307,7 +2580,8 @@ copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
                   && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
            nonlocal_goto = false;
          else
-           make_edge (copy_stmt_bb, abnormal_goto_dest, EDGE_ABNORMAL);
+           make_single_succ_edge (copy_stmt_bb, abnormal_goto_dest,
+                                  EDGE_ABNORMAL);
        }
 
       if ((can_throw || nonlocal_goto)
@@ -2344,50 +2618,54 @@ copy_phis_for_bb (basic_block bb, copy_body_data *id)
       if (!virtual_operand_p (res))
        {
          walk_tree (&new_res, copy_tree_body_r, id, NULL);
-         new_phi = create_phi_node (new_res, new_bb);
-         FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
+         if (EDGE_COUNT (new_bb->preds) == 0)
            {
-             edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
-             tree arg;
-             tree new_arg;
-             edge_iterator ei2;
-             location_t locus;
-
-             /* When doing partial cloning, we allow PHIs on the entry block
-                as long as all the arguments are the same.  Find any input
-                edge to see argument to copy.  */
-             if (!old_edge)
-               FOR_EACH_EDGE (old_edge, ei2, bb->preds)
-                 if (!old_edge->src->aux)
-                   break;
-
-             arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
-             new_arg = arg;
-             walk_tree (&new_arg, copy_tree_body_r, id, NULL);
-             gcc_assert (new_arg);
-             /* With return slot optimization we can end up with
-                non-gimple (foo *)&this->m, fix that here.  */
-             if (TREE_CODE (new_arg) != SSA_NAME
-                 && TREE_CODE (new_arg) != FUNCTION_DECL
-                 && !is_gimple_val (new_arg))
-               {
-                 gimple_seq stmts = NULL;
-                 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
-                 gsi_insert_seq_on_edge (new_edge, stmts);
-                 inserted = true;
-               }
-             locus = gimple_phi_arg_location_from_edge (phi, old_edge);
-             if (LOCATION_BLOCK (locus))
+             /* Technically we'd want a SSA_DEFAULT_DEF here... */
+             SSA_NAME_DEF_STMT (new_res) = gimple_build_nop ();
+           }
+         else
+           {
+             new_phi = create_phi_node (new_res, new_bb);
+             FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
                {
-                 tree *n;
-                 n = id->decl_map->get (LOCATION_BLOCK (locus));
-                 gcc_assert (n);
-                 locus = set_block (locus, *n);
+                 edge old_edge = find_edge ((basic_block) new_edge->src->aux,
+                                            bb);
+                 tree arg;
+                 tree new_arg;
+                 edge_iterator ei2;
+                 location_t locus;
+
+                 /* When doing partial cloning, we allow PHIs on the entry
+                    block as long as all the arguments are the same.
+                    Find any input edge to see argument to copy.  */
+                 if (!old_edge)
+                   FOR_EACH_EDGE (old_edge, ei2, bb->preds)
+                     if (!old_edge->src->aux)
+                       break;
+
+                 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
+                 new_arg = arg;
+                 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
+                 gcc_assert (new_arg);
+                 /* With return slot optimization we can end up with
+                    non-gimple (foo *)&this->m, fix that here.  */
+                 if (TREE_CODE (new_arg) != SSA_NAME
+                     && TREE_CODE (new_arg) != FUNCTION_DECL
+                     && !is_gimple_val (new_arg))
+                   {
+                     gimple_seq stmts = NULL;
+                     new_arg = force_gimple_operand (new_arg, &stmts, true,
+                                                     NULL);
+                     gsi_insert_seq_on_edge (new_edge, stmts);
+                     inserted = true;
+                   }
+                 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
+                 if (id->reset_location)
+                   locus = input_location;
+                 else
+                   locus = remap_location (locus, id);
+                 add_phi_arg (new_phi, new_arg, new_edge, locus);
                }
-             else
-               locus = LOCATION_LOCUS (locus);
-
-             add_phi_arg (new_phi, new_arg, new_edge, locus);
            }
        }
     }
@@ -2412,23 +2690,15 @@ remap_decl_1 (tree decl, void *data)
    the cfun to the function of new_fndecl (and current_function_decl too).  */
 
 static void
-initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
+initialize_cfun (tree new_fndecl, tree callee_fndecl, profile_count count)
 {
   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
-  gcov_type count_scale;
 
   if (!DECL_ARGUMENTS (new_fndecl))
     DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
   if (!DECL_RESULT (new_fndecl))
     DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
 
-  if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
-    count_scale
-        = GCOV_COMPUTE_SCALE (count,
-                              ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
-  else
-    count_scale = REG_BR_PROB_BASE;
-
   /* Register specific tree functions.  */
   gimple_register_cfg_hooks ();
 
@@ -2450,6 +2720,7 @@ initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
   cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
   cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
   cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
+  cfun->calls_eh_return = src_cfun->calls_eh_return;
   cfun->stdarg = src_cfun->stdarg;
   cfun->after_inlining = src_cfun->after_inlining;
   cfun->can_throw_non_call_exceptions
@@ -2461,16 +2732,17 @@ initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
   init_empty_tree_cfg ();
 
   profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
+
+  profile_count num = count;
+  profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
+  profile_count::adjust_for_ipa_scaling (&num, &den);
+
   ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
-    (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
-     REG_BR_PROB_BASE);
-  ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
-    = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
+    ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
+                               ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
   EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
-    (EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
-     REG_BR_PROB_BASE);
-  EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency =
-    EXIT_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
+    EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count.apply_scale (count,
+                               ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
   if (src_cfun->eh)
     init_eh_for_function ();
 
@@ -2497,7 +2769,7 @@ maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
 
   if (gsi_end_p (si)
       || gsi_one_before_end_p (si)
-      || !(stmt_can_throw_internal (gsi_stmt (si))
+      || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
           || stmt_can_make_abnormal_goto (gsi_stmt (si))))
     return;
 
@@ -2519,7 +2791,10 @@ maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
              si = ssi;
              gsi_prev (&ssi);
              if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
-               gimple_debug_bind_reset_value (stmt);
+               {
+                 gimple_debug_bind_reset_value (stmt);
+                 gimple_set_location (stmt, UNKNOWN_LOCATION);
+               }
              gsi_remove (&si, false);
              gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
              continue;
@@ -2532,10 +2807,10 @@ maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
                {
                  value = gimple_debug_bind_get_value (stmt);
                  value = unshare_expr (value);
+                 new_stmt = gimple_build_debug_bind (var, value, stmt);
                }
              else
-               value = NULL_TREE;
-             new_stmt = gimple_build_debug_bind (var, value, stmt);
+               new_stmt = gimple_build_debug_bind (var, NULL_TREE, NULL);
            }
          else if (gimple_debug_source_bind_p (stmt))
            {
@@ -2543,6 +2818,8 @@ maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
              value = gimple_debug_source_bind_get_value (stmt);
              new_stmt = gimple_build_debug_source_bind (var, value, stmt);
            }
+         else if (gimple_debug_nonbind_marker_p (stmt))
+           new_stmt = as_a <gdebug *> (gimple_copy (stmt));
          else
            gcc_unreachable ();
          gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
@@ -2557,15 +2834,15 @@ maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
 
 static void
 copy_loops (copy_body_data *id,
-           struct loop *dest_parent, struct loop *src_parent)
+           class loop *dest_parent, class loop *src_parent)
 {
-  struct loop *src_loop = src_parent->inner;
+  class loop *src_loop = src_parent->inner;
   while (src_loop)
     {
       if (!id->blocks_to_copy
          || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
        {
-         struct loop *dest_loop = alloc_loop ();
+         class loop *dest_loop = alloc_loop ();
 
          /* Assign the new loop its header and latch and associate
             those with the new loop.  */
@@ -2579,18 +2856,20 @@ copy_loops (copy_body_data *id,
 
          /* Copy loop meta-data.  */
          copy_loop_info (src_loop, dest_loop);
+         if (dest_loop->unroll)
+           cfun->has_unroll = true;
+         if (dest_loop->force_vectorize)
+           cfun->has_force_vectorize_loops = true;
+         if (id->src_cfun->last_clique != 0)
+           dest_loop->owned_clique
+             = remap_dependence_clique (id,
+                                        src_loop->owned_clique
+                                        ? src_loop->owned_clique : 1);
 
          /* Finally place it into the loop array and the loop tree.  */
          place_new_loop (cfun, dest_loop);
          flow_loop_tree_node_add (dest_parent, dest_loop);
 
-         dest_loop->safelen = src_loop->safelen;
-         dest_loop->dont_vectorize = src_loop->dont_vectorize;
-         if (src_loop->force_vectorize)
-           {
-             dest_loop->force_vectorize = true;
-             cfun->has_force_vectorize_loops = true;
-           }
          if (src_loop->simduid)
            {
              dest_loop->simduid = remap_decl (src_loop->simduid, id);
@@ -2604,7 +2883,7 @@ copy_loops (copy_body_data *id,
     }
 }
 
-/* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
+/* Call redirect_call_stmt_to_callee on all calls in BB.  */
 
 void
 redirect_all_calls (copy_body_data * id, basic_block bb)
@@ -2616,10 +2895,24 @@ redirect_all_calls (copy_body_data * id, basic_block bb)
       gimple *stmt = gsi_stmt (si);
       if (is_gimple_call (stmt))
        {
+         tree old_lhs = gimple_call_lhs (stmt);
          struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
          if (edge)
            {
-             edge->redirect_call_stmt_to_callee ();
+             gimple *new_stmt = edge->redirect_call_stmt_to_callee ();
+             /* If IPA-SRA transformation, run as part of edge redirection,
+                removed the LHS because it is unused, save it to
+                killed_new_ssa_names so that we can prune it from debug
+                statements.  */
+             if (old_lhs
+                 && TREE_CODE (old_lhs) == SSA_NAME
+                 && !gimple_call_lhs (new_stmt))
+               {
+                 if (!id->killed_new_ssa_names)
+                   id->killed_new_ssa_names = new hash_set<tree> (16);
+                 id->killed_new_ssa_names->add (old_lhs);
+               }
+
              if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
                gimple_purge_dead_eh_edges (bb);
            }
@@ -2627,34 +2920,11 @@ redirect_all_calls (copy_body_data * id, basic_block bb)
     }
 }
 
-/* Convert estimated frequencies into counts for NODE, scaling COUNT
-   with each bb's frequency. Used when NODE has a 0-weight entry
-   but we are about to inline it into a non-zero count call bb.
-   See the comments for handle_missing_profiles() in predict.c for
-   when this can happen for COMDATs.  */
-
-void
-freqs_to_counts (struct cgraph_node *node, gcov_type count)
-{
-  basic_block bb;
-  edge_iterator ei;
-  edge e;
-  struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
-
-  FOR_ALL_BB_FN(bb, fn)
-    {
-      bb->count = apply_scale (count,
-                               GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX));
-      FOR_EACH_EDGE (e, ei, bb->succs)
-        e->count = apply_probability (e->src->count, e->probability);
-    }
-}
-
 /* Make a copy of the body of FN so that it can be inserted inline in
    another function.  Walks FN via CFG, returns new fndecl.  */
 
 static tree
-copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
+copy_cfg_body (copy_body_data * id,
               basic_block entry_block_map, basic_block exit_block_map,
               basic_block new_entry)
 {
@@ -2665,35 +2935,11 @@ copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
   basic_block bb;
   tree new_fndecl = NULL;
   bool need_debug_cleanup = false;
-  gcov_type count_scale;
   int last;
-  int incoming_frequency = 0;
-  gcov_type incoming_count = 0;
-
-  /* This can happen for COMDAT routines that end up with 0 counts
-     despite being called (see the comments for handle_missing_profiles()
-     in predict.c as to why). Apply counts to the blocks in the callee
-     before inlining, using the guessed edge frequencies, so that we don't
-     end up with a 0-count inline body which can confuse downstream
-     optimizations such as function splitting.  */
-  if (!ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count && count)
-    {
-      /* Apply the larger of the call bb count and the total incoming
-         call edge count to the callee.  */
-      gcov_type in_count = 0;
-      struct cgraph_edge *in_edge;
-      for (in_edge = id->src_node->callers; in_edge;
-           in_edge = in_edge->next_caller)
-        in_count += in_edge->count;
-      freqs_to_counts (id->src_node, count > in_count ? count : in_count);
-    }
-
-  if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
-    count_scale
-        = GCOV_COMPUTE_SCALE (count,
-                              ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
-  else
-    count_scale = REG_BR_PROB_BASE;
+  profile_count den = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count;
+  profile_count num = entry_block_map->count;
+
+  cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
 
   /* Register specific tree functions.  */
   gimple_register_cfg_hooks ();
@@ -2707,25 +2953,20 @@ copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
     {
       edge e;
       edge_iterator ei;
+      den = profile_count::zero ();
 
       FOR_EACH_EDGE (e, ei, new_entry->preds)
        if (!e->src->aux)
-         {
-           incoming_frequency += EDGE_FREQUENCY (e);
-           incoming_count += e->count;
-         }
-      incoming_count = apply_scale (incoming_count, count_scale);
-      incoming_frequency
-       = apply_scale ((gcov_type)incoming_frequency, frequency_scale);
-      ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = incoming_count;
-      ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = incoming_frequency;
+         den += e->count ();
+      ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = den;
     }
 
+  profile_count::adjust_for_ipa_scaling (&num, &den);
+
   /* Must have a CFG here at this point.  */
   gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
              (DECL_STRUCT_FUNCTION (callee_fndecl)));
 
-  cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
 
   ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
   EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
@@ -2741,7 +2982,7 @@ copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
   FOR_EACH_BB_FN (bb, cfun_to_copy)
     if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
       {
-       basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
+       basic_block new_bb = copy_bb (id, bb, num, den);
        bb->aux = new_bb;
        new_bb->aux = bb;
        new_bb->loop_father = entry_block_map->loop_father;
@@ -2764,14 +3005,20 @@ copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
   FOR_ALL_BB_FN (bb, cfun_to_copy)
     if (!id->blocks_to_copy
        || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
-      need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
-                                              abnormal_goto_dest);
+      need_debug_cleanup |= copy_edges_for_bb (bb, num, den, exit_block_map,
+                                              abnormal_goto_dest, id);
+
+  if (id->eh_landing_pad_dest)
+    {
+      add_clobbers_to_eh_landing_pad (id);
+      id->eh_landing_pad_dest = NULL;
+    }
 
   if (new_entry)
     {
-      edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
-      e->probability = REG_BR_PROB_BASE;
-      e->count = incoming_count;
+      edge e = make_edge (entry_block_map, (basic_block)new_entry->aux,
+                         EDGE_FALLTHRU);
+      e->probability = profile_probability::always ();
     }
 
   /* Duplicate the loop tree, if available and wanted.  */
@@ -2802,7 +3049,7 @@ copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
            && bb->index != ENTRY_BLOCK
            && bb->index != EXIT_BLOCK)
          maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
-       /* Update call edge destinations.  This can not be done before loop
+       /* Update call edge destinations.  This cannot be done before loop
           info is updated, because we may split basic blocks.  */
        if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
            && bb->index != ENTRY_BLOCK
@@ -2820,7 +3067,7 @@ copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
        maybe_move_debug_stmts_to_successors (id,
                                              BASIC_BLOCK_FOR_FN (cfun, last));
       BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
-      /* Update call edge destinations.  This can not be done before loop
+      /* Update call edge destinations.  This cannot be done before loop
         info is updated, because we may split basic blocks.  */
       if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
        redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
@@ -2854,12 +3101,15 @@ copy_debug_stmt (gdebug *stmt, copy_body_data *id)
   tree t, *n;
   struct walk_stmt_info wi;
 
-  if (gimple_block (stmt))
+  if (tree block = gimple_block (stmt))
     {
-      n = id->decl_map->get (gimple_block (stmt));
+      n = id->decl_map->get (block);
       gimple_set_block (stmt, n ? *n : id->block);
     }
 
+  if (gimple_debug_nonbind_marker_p (stmt))
+    return;
+
   /* Remap all the operands in COPY.  */
   memset (&wi, 0, sizeof (wi));
   wi.info = id;
@@ -2868,8 +3118,10 @@ copy_debug_stmt (gdebug *stmt, copy_body_data *id)
 
   if (gimple_debug_source_bind_p (stmt))
     t = gimple_debug_source_bind_get_var (stmt);
-  else
+  else if (gimple_debug_bind_p (stmt))
     t = gimple_debug_bind_get_var (stmt);
+  else
+    gcc_unreachable ();
 
   if (TREE_CODE (t) == PARM_DECL && id->debug_map
       && (n = id->debug_map->get (t)))
@@ -2969,7 +3221,7 @@ copy_tree_body (copy_body_data *id)
    another function.  */
 
 static tree
-copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
+copy_body (copy_body_data *id,
           basic_block entry_block_map, basic_block exit_block_map,
           basic_block new_entry)
 {
@@ -2978,9 +3230,11 @@ copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
 
   /* If this body has a CFG, walk CFG and copy.  */
   gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
-  body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
+  body = copy_cfg_body (id, entry_block_map, exit_block_map,
                        new_entry);
   copy_debug_stmts (id);
+  delete id->killed_new_ssa_names;
+  id->killed_new_ssa_names = NULL;
 
   return body;
 }
@@ -3081,6 +3335,29 @@ insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
     }
 }
 
+/* Deal with mismatched formal/actual parameters, in a rather brute-force way
+   if need be (which should only be necessary for invalid programs).  Attempt
+   to convert VAL to TYPE and return the result if it is possible, just return
+   a zero constant of the given type if it fails.  */
+
+tree
+force_value_to_type (tree type, tree value)
+{
+  /* If we can match up types by promotion/demotion do so.  */
+  if (fold_convertible_p (type, value))
+    return fold_convert (type, value);
+
+  /* ???  For valid programs we should not end up here.
+     Still if we end up with truly mismatched types here, fall back
+     to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
+     GIMPLE to the following passes.  */
+  if (!is_gimple_reg_type (TREE_TYPE (value))
+          || TYPE_SIZE (type) == TYPE_SIZE (TREE_TYPE (value)))
+    return fold_build1 (VIEW_CONVERT_EXPR, type, value);
+  else
+    return build_zero_cst (type);
+}
+
 /* Initialize parameter P with VALUE.  If needed, produce init statement
    at the end of BB.  When BB is NULL, we return init statement to be
    output later.  */
@@ -3097,23 +3374,7 @@ setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
   if (value
       && value != error_mark_node
       && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
-    {
-      /* If we can match up types by promotion/demotion do so.  */
-      if (fold_convertible_p (TREE_TYPE (p), value))
-       rhs = fold_convert (TREE_TYPE (p), value);
-      else
-       {
-         /* ???  For valid programs we should not end up here.
-            Still if we end up with truly mismatched types here, fall back
-            to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
-            GIMPLE to the following passes.  */
-         if (!is_gimple_reg_type (TREE_TYPE (value))
-             || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
-           rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
-         else
-           rhs = build_zero_cst (TREE_TYPE (p));
-       }
-    }
+    rhs = force_value_to_type (TREE_TYPE (p), value);
 
   /* Make an equivalent VAR_DECL.  Note that we must NOT remap the type
      here since the type of this decl must be visible to the calling
@@ -3317,14 +3578,12 @@ initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
    is set only for CALL_EXPR_RETURN_SLOT_OPT.  MODIFY_DEST, if non-null,
    was the LHS of the MODIFY_EXPR to which this call is the RHS.
 
-   RETURN_BOUNDS holds a destination for returned bounds.
-
    The return value is a (possibly null) value that holds the result
    as seen by the caller.  */
 
 static tree
 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
-                        tree return_bounds, basic_block entry_bb)
+                        basic_block entry_bb)
 {
   tree callee = id->src_fn;
   tree result = DECL_RESULT (callee);
@@ -3336,7 +3595,9 @@ declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
      vs. the call expression.  */
   if (modify_dest)
     caller_type = TREE_TYPE (modify_dest);
-  else
+  else if (return_slot)
+    caller_type = TREE_TYPE (return_slot);
+  else /* No LHS on the call.  */
     caller_type = TREE_TYPE (TREE_TYPE (callee));
 
   /* We don't need to do anything for functions that don't return anything.  */
@@ -3363,6 +3624,7 @@ declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
             taken by alias analysis.  */
          gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
          var = return_slot_addr;
+         mark_addressable (return_slot);
        }
       else
        {
@@ -3376,6 +3638,10 @@ declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
          && !DECL_GIMPLE_REG_P (result)
          && DECL_P (var))
        DECL_GIMPLE_REG_P (var) = 0;
+
+      if (!useless_type_conversion_p (callee_type, caller_type))
+       var = build1 (VIEW_CONVERT_EXPR, callee_type, var);
+
       use = NULL;
       goto done;
     }
@@ -3396,7 +3662,7 @@ declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
       /* ??? If we're assigning to a variable sized type, then we must
         reuse the destination variable, because we've no good way to
         create variable sized temporaries at this point.  */
-      else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
+      else if (!poly_int_tree_p (TYPE_SIZE_UNIT (caller_type)))
        use_it = true;
 
       /* If the callee cannot possibly modify MODIFY_DEST, then we can
@@ -3431,7 +3697,7 @@ declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
        }
     }
 
-  gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
+  gcc_assert (poly_int_tree_p (TYPE_SIZE_UNIT (callee_type)));
 
   var = copy_result_decl_to_var (result, id);
   DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
@@ -3503,20 +3769,6 @@ declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
 
   /* Remember this so we can ignore it in remap_decls.  */
   id->retvar = var;
-
-  /* If returned bounds are used, then make var for them.  */
-  if (return_bounds)
-  {
-    tree bndtemp = create_tmp_var (pointer_bounds_type_node, "retbnd");
-    DECL_SEEN_IN_BIND_EXPR_P (bndtemp) = 1;
-    TREE_NO_WARNING (bndtemp) = 1;
-    declare_inline_vars (id->block, bndtemp);
-
-    id->retbnd = bndtemp;
-    insert_init_stmt (id, entry_bb,
-                     gimple_build_assign (bndtemp, chkp_get_zero_bounds_var ()));
-  }
-
   return use;
 }
 
@@ -3562,7 +3814,7 @@ copy_forbidden (struct function *fun)
 static const char *inline_forbidden_reason;
 
 /* A callback for walk_gimple_seq to handle statements.  Returns non-null
-   iff a function can not be inlined.  Also sets the reason why. */
+   iff a function cannot be inlined.  Also sets the reason why. */
 
 static tree
 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
@@ -3648,7 +3900,7 @@ inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
               return from the function the inline has been inlined into.  */
            inline_forbidden_reason
              = G_("function %q+F can never be inlined because "
-                  "it uses __builtin_return or __builtin_apply_args");
+                  "it uses %<__builtin_return%> or %<__builtin_apply_args%>");
            *handled_ops_p = true;
            return t;
 
@@ -3729,7 +3981,7 @@ function_attribute_inlinable_p (const_tree fndecl)
 
       for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
        {
-         const_tree name = TREE_PURPOSE (a);
+         const_tree name = get_attribute_name (a);
          int i;
 
          for (i = 0; targetm.attribute_table[i].name != NULL; i++)
@@ -3768,8 +4020,8 @@ tree_inlinable_function_p (tree fn)
       && always_inline == NULL)
     {
       if (do_warning)
-        warning (OPT_Winline, "function %q+F can never be inlined because it "
-                 "is suppressed using -fno-inline", fn);
+       warning (OPT_Winline, "function %q+F can never be inlined because it "
+                "is suppressed using %<-fno-inline%>", fn);
       inlinable = false;
     }
 
@@ -3817,11 +4069,12 @@ estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
 
   if (TREE_CODE (type) == VECTOR_TYPE)
     {
-      machine_mode inner = TYPE_MODE (TREE_TYPE (type));
-      machine_mode simd
-       = targetm.vectorize.preferred_simd_mode (inner);
-      int simd_mode_size = GET_MODE_SIZE (simd);
-      return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
+      scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
+      machine_mode simd = targetm.vectorize.preferred_simd_mode (inner);
+      int orig_mode_size
+       = estimated_poly_value (GET_MODE_SIZE (TYPE_MODE (type)));
+      int simd_mode_size = estimated_poly_value (GET_MODE_SIZE (simd));
+      return ((orig_mode_size + simd_mode_size - 1)
              / simd_mode_size);
     }
 
@@ -3859,10 +4112,10 @@ estimate_operator_cost (enum tree_code code, eni_weights *weights,
 
     case PLUS_EXPR:
     case POINTER_PLUS_EXPR:
+    case POINTER_DIFF_EXPR:
     case MINUS_EXPR:
     case MULT_EXPR:
     case MULT_HIGHPART_EXPR:
-    case FMA_EXPR:
 
     case ADDR_SPACE_CONVERT_EXPR:
     case FIXED_CONVERT_EXPR:
@@ -3873,6 +4126,7 @@ estimate_operator_cost (enum tree_code code, eni_weights *weights,
     case MIN_EXPR:
     case MAX_EXPR:
     case ABS_EXPR:
+    case ABSU_EXPR:
 
     case LSHIFT_EXPR:
     case RSHIFT_EXPR:
@@ -3916,9 +4170,6 @@ estimate_operator_cost (enum tree_code code, eni_weights *weights,
 
     case REALIGN_LOAD_EXPR:
 
-    case REDUC_MAX_EXPR:
-    case REDUC_MIN_EXPR:
-    case REDUC_PLUS_EXPR:
     case WIDEN_SUM_EXPR:
     case WIDEN_MULT_EXPR:
     case DOT_PROD_EXPR:
@@ -3935,11 +4186,16 @@ estimate_operator_cost (enum tree_code code, eni_weights *weights,
     case VEC_UNPACK_LO_EXPR:
     case VEC_UNPACK_FLOAT_HI_EXPR:
     case VEC_UNPACK_FLOAT_LO_EXPR:
+    case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
+    case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
     case VEC_PACK_TRUNC_EXPR:
     case VEC_PACK_SAT_EXPR:
     case VEC_PACK_FIX_TRUNC_EXPR:
+    case VEC_PACK_FLOAT_EXPR:
     case VEC_WIDEN_LSHIFT_HI_EXPR:
     case VEC_WIDEN_LSHIFT_LO_EXPR:
+    case VEC_DUPLICATE_EXPR:
+    case VEC_SERIES_EXPR:
 
       return 1;
 
@@ -4070,7 +4326,7 @@ estimate_num_insns (gimple *stmt, eni_weights *weights)
        if (gimple_call_internal_p (stmt))
          return 0;
        else if ((decl = gimple_call_fndecl (stmt))
-                && DECL_BUILT_IN (decl))
+                && fndecl_built_in_p (decl))
          {
            /* Do not special case builtins where we see the body.
               This just confuse inliner.  */
@@ -4140,7 +4396,10 @@ estimate_num_insns (gimple *stmt, eni_weights *weights)
           with very long asm statements.  */
        if (count > 1000)
          count = 1000;
-       return count;
+       /* If this asm is asm inline, count anything as minimum size.  */
+       if (gimple_asm_inline_p (as_a <gasm *> (stmt)))
+         count = MIN (1, count);
+       return MAX (1, count);
       }
 
     case GIMPLE_RESX:
@@ -4194,6 +4453,7 @@ estimate_num_insns (gimple *stmt, eni_weights *weights)
     case GIMPLE_OMP_MASTER:
     case GIMPLE_OMP_TASKGROUP:
     case GIMPLE_OMP_ORDERED:
+    case GIMPLE_OMP_SCAN:
     case GIMPLE_OMP_SECTION:
     case GIMPLE_OMP_SECTIONS:
     case GIMPLE_OMP_SINGLE:
@@ -4323,7 +4583,7 @@ reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
   if (!VAR_P (*remappedvarp))
     return;
 
-  if (*remappedvarp == id->retvar || *remappedvarp == id->retbnd)
+  if (*remappedvarp == id->retvar)
     return;
 
   tree tvar = target_for_debug_bind (*remappedvarp);
@@ -4366,7 +4626,8 @@ reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion.  */
 
 static bool
-expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
+expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id,
+                   bitmap to_purge)
 {
   tree use_retvar;
   tree fn;
@@ -4374,7 +4635,6 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
   hash_map<tree, tree> *st = NULL;
   tree return_slot;
   tree modify_dest;
-  tree return_bounds = NULL;
   struct cgraph_edge *cg_edge;
   cgraph_inline_failed_t reason;
   basic_block return_block;
@@ -4383,8 +4643,12 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
   bool successfully_inlined = false;
   bool purge_dead_abnormal_edges;
   gcall *call_stmt;
-  unsigned int i;
   unsigned int prop_mask, src_properties;
+  struct function *dst_cfun;
+  tree simduid;
+  use_operand_p use;
+  gimple *simtenter_stmt = NULL;
+  vec<tree> *simtvars_save;
 
   /* The gimplifier uses input_location in too many places, such as
      internal_get_tmp_var ().  */
@@ -4433,7 +4697,7 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
             be to be able to keep both bodies and use extern inline body
             for inlining, but we can't do that because frontends overwrite
             the body.  */
-         && !cg_edge->callee->local.redefined_extern_inline
+         && !cg_edge->callee->redefined_extern_inline
          /* During early inline pass, report only when optimization is
             not turned on.  */
          && (symtab->global_info_ready
@@ -4442,7 +4706,7 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
          /* PR 20090218-1_0.c. Body can be provided by another module. */
          && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
        {
-         error ("inlining failed in call to always_inline %q+F: %s", fn,
+         error ("inlining failed in call to %<always_inline%> %q+F: %s", fn,
                 cgraph_inline_failed_string (reason));
          if (gimple_location (stmt) != UNKNOWN_LOCATION)
            inform (gimple_location (stmt), "called from here");
@@ -4461,6 +4725,7 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
               /* Avoid warnings during early inline pass. */
               && symtab->global_info_ready)
        {
+         auto_diagnostic_group d;
          if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
                       fn, _(cgraph_inline_failed_string (reason))))
            {
@@ -4481,17 +4746,16 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
     {
       cgraph_edge *edge;
       tree virtual_offset = NULL;
-      int freq = cg_edge->frequency;
-      gcov_type count = cg_edge->count;
+      profile_count count = cg_edge->count;
       tree op;
       gimple_stmt_iterator iter = gsi_for_stmt (stmt);
 
       cg_edge->remove ();
       edge = id->src_node->callees->clone (id->dst_node, call_stmt,
                                           gimple_uid (stmt),
-                                          REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
+                                          profile_count::one (),
+                                          profile_count::one (),
                                           true);
-      edge->frequency = freq;
       edge->count = count;
       if (id->src_node->thunk.virtual_offset_p)
         virtual_offset = size_int (id->src_node->thunk.virtual_value);
@@ -4502,13 +4766,13 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
                         GSI_NEW_STMT);
       gcc_assert (id->src_node->thunk.this_adjusting);
       op = thunk_adjust (&iter, op, 1, id->src_node->thunk.fixed_offset,
-                        virtual_offset);
+                        virtual_offset, id->src_node->thunk.indirect_offset);
 
       gimple_call_set_arg (stmt, 0, op);
       gimple_call_set_fndecl (stmt, edge->callee->decl);
       update_stmt (stmt);
       id->src_node->remove ();
-      expand_call_inline (bb, stmt, id);
+      expand_call_inline (bb, stmt, id, to_purge);
       maybe_remove_unused_call_args (cfun, stmt);
       return true;
     }
@@ -4520,40 +4784,26 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
 
   /* We will be inlining this callee.  */
   id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
-  id->assign_stmts.create (0);
 
   /* Update the callers EH personality.  */
-  if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
+  if (DECL_FUNCTION_PERSONALITY (fn))
     DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
-      = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
+      = DECL_FUNCTION_PERSONALITY (fn);
 
-  /* Split the block holding the GIMPLE_CALL.  */
-  e = split_block (bb, stmt);
+  /* Split the block before the GIMPLE_CALL.  */
+  stmt_gsi = gsi_for_stmt (stmt);
+  gsi_prev (&stmt_gsi);
+  e = split_block (bb, gsi_end_p (stmt_gsi) ? NULL : gsi_stmt (stmt_gsi));
   bb = e->src;
   return_block = e->dest;
   remove_edge (e);
 
-  /* split_block splits after the statement; work around this by
-     moving the call into the second block manually.  Not pretty,
-     but seems easier than doing the CFG manipulation by hand
-     when the GIMPLE_CALL is in the last statement of BB.  */
-  stmt_gsi = gsi_last_bb (bb);
-  gsi_remove (&stmt_gsi, false);
-
   /* If the GIMPLE_CALL was in the last statement of BB, it may have
      been the source of abnormal edges.  In this case, schedule
      the removal of dead abnormal edges.  */
   gsi = gsi_start_bb (return_block);
-  if (gsi_end_p (gsi))
-    {
-      gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
-      purge_dead_abnormal_edges = true;
-    }
-  else
-    {
-      gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
-      purge_dead_abnormal_edges = false;
-    }
+  gsi_next (&gsi);
+  purge_dead_abnormal_edges = gsi_end_p (gsi);
 
   stmt_gsi = gsi_start_bb (return_block);
 
@@ -4567,36 +4817,59 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
      artificial decls inserted by the compiler itself.  We need to
      either link the inlined blocks into the caller block tree or
      not refer to them in any way to not break GC for locations.  */
-  if (gimple_block (stmt))
-    {
+  if (tree block = gimple_block (stmt))
+    {
+      /* We do want to assign a not UNKNOWN_LOCATION BLOCK_SOURCE_LOCATION
+         to make inlined_function_outer_scope_p return true on this BLOCK.  */
+      location_t loc = LOCATION_LOCUS (gimple_location (stmt));
+      if (loc == UNKNOWN_LOCATION)
+       loc = LOCATION_LOCUS (DECL_SOURCE_LOCATION (fn));
+      if (loc == UNKNOWN_LOCATION)
+       loc = BUILTINS_LOCATION;
       id->block = make_node (BLOCK);
-      BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
-      BLOCK_SOURCE_LOCATION (id->block) 
-       = LOCATION_LOCUS (gimple_location (stmt));
-      prepend_lexical_block (gimple_block (stmt), id->block);
+      BLOCK_ABSTRACT_ORIGIN (id->block) = DECL_ORIGIN (fn);
+      BLOCK_SOURCE_LOCATION (id->block) = loc;
+      prepend_lexical_block (block, id->block);
     }
 
-  /* Local declarations will be replaced by their equivalents in this
-     map.  */
+  /* Local declarations will be replaced by their equivalents in this map.  */
   st = id->decl_map;
   id->decl_map = new hash_map<tree, tree>;
   dst = id->debug_map;
   id->debug_map = NULL;
+  if (flag_stack_reuse != SR_NONE)
+    id->add_clobbers_to_eh_landing_pads = last_basic_block_for_fn (cfun);
 
   /* Record the function we are about to inline.  */
   id->src_fn = fn;
   id->src_cfun = DECL_STRUCT_FUNCTION (fn);
+  id->reset_location = DECL_IGNORED_P (fn);
   id->call_stmt = call_stmt;
 
+  /* When inlining into an OpenMP SIMD-on-SIMT loop, arrange for new automatic
+     variables to be added to IFN_GOMP_SIMT_ENTER argument list.  */
+  dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
+  simtvars_save = id->dst_simt_vars;
+  if (!(dst_cfun->curr_properties & PROP_gimple_lomp_dev)
+      && (simduid = bb->loop_father->simduid) != NULL_TREE
+      && (simduid = ssa_default_def (dst_cfun, simduid)) != NULL_TREE
+      && single_imm_use (simduid, &use, &simtenter_stmt)
+      && is_gimple_call (simtenter_stmt)
+      && gimple_call_internal_p (simtenter_stmt, IFN_GOMP_SIMT_ENTER))
+    vec_alloc (id->dst_simt_vars, 0);
+  else
+    id->dst_simt_vars = NULL;
+
+  if (profile_status_for_fn (id->src_cfun) == PROFILE_ABSENT)
+    profile_status_for_fn (dst_cfun) = PROFILE_ABSENT;
+
   /* If the src function contains an IFN_VA_ARG, then so will the dst
      function after inlining.  Likewise for IFN_GOMP_USE_SIMT.  */
   prop_mask = PROP_gimple_lva | PROP_gimple_lomp_dev;
   src_properties = id->src_cfun->curr_properties & prop_mask;
   if (src_properties != prop_mask)
-    {
-      struct function *dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
-      dst_cfun->curr_properties &= src_properties | ~prop_mask;
-    }
+    dst_cfun->curr_properties &= src_properties | ~prop_mask;
+  dst_cfun->calls_eh_return |= id->src_cfun->calls_eh_return;
 
   gcc_assert (!id->src_cfun->after_inlining);
 
@@ -4609,6 +4882,14 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
                        GSI_NEW_STMT);
     }
   initialize_inlined_parameters (id, stmt, fn, bb);
+  if (debug_nonbind_markers_p && debug_inline_points && id->block
+      && inlined_function_outer_scope_p (id->block))
+    {
+      gimple_stmt_iterator si = gsi_last_bb (bb);
+      gsi_insert_after (&si, gimple_build_debug_inline_entry
+                       (id->block, DECL_SOURCE_LOCATION (id->src_fn)),
+                       GSI_NEW_STMT);
+    }
 
   if (DECL_INITIAL (fn))
     {
@@ -4652,24 +4933,6 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
     {
       modify_dest = gimple_call_lhs (stmt);
 
-      /* Remember where to copy returned bounds.  */
-      if (gimple_call_with_bounds_p (stmt)
-         && TREE_CODE (modify_dest) == SSA_NAME)
-       {
-         gcall *retbnd = chkp_retbnd_call_by_val (modify_dest);
-         if (retbnd)
-           {
-             return_bounds = gimple_call_lhs (retbnd);
-             /* If returned bounds are not used then just
-                remove unused call.  */
-             if (!return_bounds)
-               {
-                 gimple_stmt_iterator iter = gsi_for_stmt (retbnd);
-                 gsi_remove (&iter, true);
-               }
-           }
-       }
-
       /* The function which we are inlining might not return a value,
         in which case we should issue a warning that the function
         does not return a value.  In that case the optimizers will
@@ -4693,26 +4956,64 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
      we may get confused if the compiler sees that the inlined new
      function returns a pointer which was just deleted.  See bug
      33407.  */
-  if (DECL_IS_OPERATOR_NEW (fn))
+  if (DECL_IS_OPERATOR_NEW_P (fn))
     {
       return_slot = NULL;
       modify_dest = NULL;
     }
 
   /* Declare the return variable for the function.  */
-  use_retvar = declare_return_variable (id, return_slot, modify_dest,
-                                       return_bounds, bb);
+  use_retvar = declare_return_variable (id, return_slot, modify_dest, bb);
 
   /* Add local vars in this inlined callee to caller.  */
   add_local_variables (id->src_cfun, cfun, id);
 
-  if (dump_file && (dump_flags & TDF_DETAILS))
+  if (id->src_node->clone.performed_splits)
     {
-      fprintf (dump_file, "Inlining ");
-      print_generic_expr (dump_file, id->src_fn, 0);
-      fprintf (dump_file, " to ");
-      print_generic_expr (dump_file, id->dst_fn, 0);
-      fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
+      /* Any calls from the inlined function will be turned into calls from the
+        function we inline into.  We must preserve notes about how to split
+        parameters such calls should be redirected/updated.  */
+      unsigned len = vec_safe_length (id->src_node->clone.performed_splits);
+      for (unsigned i = 0; i < len; i++)
+       {
+         ipa_param_performed_split ps
+           = (*id->src_node->clone.performed_splits)[i];
+         ps.dummy_decl = remap_decl (ps.dummy_decl, id);
+         vec_safe_push (id->dst_node->clone.performed_splits, ps);
+       }
+
+      if (flag_checking)
+       {
+         len = vec_safe_length (id->dst_node->clone.performed_splits);
+         for (unsigned i = 0; i < len; i++)
+           {
+             ipa_param_performed_split *ps1
+               = &(*id->dst_node->clone.performed_splits)[i];
+             for (unsigned j = i + 1; j < len; j++)
+               {
+                 ipa_param_performed_split *ps2
+                   = &(*id->dst_node->clone.performed_splits)[j];
+                 gcc_assert (ps1->dummy_decl != ps2->dummy_decl
+                             || ps1->unit_offset != ps2->unit_offset);
+               }
+           }
+       }
+    }
+
+  if (dump_enabled_p ())
+    {
+      char buf[128];
+      snprintf (buf, sizeof(buf), "%4.2f",
+               cg_edge->sreal_frequency ().to_double ());
+      dump_printf_loc (MSG_NOTE | MSG_PRIORITY_INTERNALS,
+                      call_stmt,
+                      "Inlining %C to %C with frequency %s\n",
+                      id->src_node, id->dst_node, buf);
+      if (dump_file && (dump_flags & TDF_DETAILS))
+       {
+         id->src_node->dump (dump_file);
+         id->dst_node->dump (dump_file);
+       }
     }
 
   /* This is it.  Duplicate the callee body.  Assume callee is
@@ -4720,16 +5021,50 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
      function in any way before this point, as this CALL_EXPR may be
      a self-referential call; if we're calling ourselves, we need to
      duplicate our body before altering anything.  */
-  copy_body (id, cg_edge->callee->count,
-            GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
-            bb, return_block, NULL);
+  copy_body (id, bb, return_block, NULL);
 
   reset_debug_bindings (id, stmt_gsi);
 
+  if (flag_stack_reuse != SR_NONE)
+    for (tree p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
+      if (!TREE_THIS_VOLATILE (p))
+       {
+         tree *varp = id->decl_map->get (p);
+         if (varp && VAR_P (*varp) && !is_gimple_reg (*varp))
+           {
+             tree clobber = build_clobber (TREE_TYPE (*varp));
+             gimple *clobber_stmt;
+             clobber_stmt = gimple_build_assign (*varp, clobber);
+             gimple_set_location (clobber_stmt, gimple_location (stmt));
+             gsi_insert_before (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
+           }
+       }
+
   /* Reset the escaped solution.  */
   if (cfun->gimple_df)
     pt_solution_reset (&cfun->gimple_df->escaped);
 
+  /* Add new automatic variables to IFN_GOMP_SIMT_ENTER arguments.  */
+  if (id->dst_simt_vars && id->dst_simt_vars->length () > 0)
+    {
+      size_t nargs = gimple_call_num_args (simtenter_stmt);
+      vec<tree> *vars = id->dst_simt_vars;
+      auto_vec<tree> newargs (nargs + vars->length ());
+      for (size_t i = 0; i < nargs; i++)
+       newargs.quick_push (gimple_call_arg (simtenter_stmt, i));
+      for (tree *pvar = vars->begin (); pvar != vars->end (); pvar++)
+       {
+         tree ptrtype = build_pointer_type (TREE_TYPE (*pvar));
+         newargs.quick_push (build1 (ADDR_EXPR, ptrtype, *pvar));
+       }
+      gcall *g = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, newargs);
+      gimple_call_set_lhs (g, gimple_call_lhs (simtenter_stmt));
+      gimple_stmt_iterator gsi = gsi_for_stmt (simtenter_stmt);
+      gsi_replace (&gsi, g, false);
+    }
+  vec_free (id->dst_simt_vars);
+  id->dst_simt_vars = simtvars_save;
+
   /* Clean up.  */
   if (id->debug_map)
     {
@@ -4753,14 +5088,25 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
     {
       gimple *old_stmt = stmt;
       stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
+      gimple_set_location (stmt, gimple_location (old_stmt));
       gsi_replace (&stmt_gsi, stmt, false);
       maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
-
-      /* Copy bounds if we copy structure with bounds.  */
-      if (chkp_function_instrumented_p (id->dst_fn)
-         && !BOUNDED_P (use_retvar)
-         && chkp_type_has_pointer (TREE_TYPE (use_retvar)))
-       id->assign_stmts.safe_push (stmt);
+      /* Append a clobber for id->retvar if easily possible.  */
+      if (flag_stack_reuse != SR_NONE
+         && id->retvar
+         && VAR_P (id->retvar)
+         && id->retvar != return_slot
+         && id->retvar != modify_dest
+         && !TREE_THIS_VOLATILE (id->retvar)
+         && !is_gimple_reg (id->retvar)
+         && !stmt_ends_bb_p (stmt))
+       {
+         tree clobber = build_clobber (TREE_TYPE (id->retvar));
+         gimple *clobber_stmt;
+         clobber_stmt = gimple_build_assign (id->retvar, clobber);
+         gimple_set_location (clobber_stmt, gimple_location (old_stmt));
+         gsi_insert_after (&stmt_gsi, clobber_stmt, GSI_SAME_STMT);
+       }
     }
   else
     {
@@ -4793,29 +5139,28 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
              SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
            }
        }
+      /* Replace with a clobber for id->retvar.  */
+      else if (flag_stack_reuse != SR_NONE
+              && id->retvar
+              && VAR_P (id->retvar)
+              && id->retvar != return_slot
+              && id->retvar != modify_dest
+              && !TREE_THIS_VOLATILE (id->retvar)
+              && !is_gimple_reg (id->retvar))
+       {
+         tree clobber = build_clobber (TREE_TYPE (id->retvar));
+         gimple *clobber_stmt;
+         clobber_stmt = gimple_build_assign (id->retvar, clobber);
+         gimple_set_location (clobber_stmt, gimple_location (stmt));
+         gsi_replace (&stmt_gsi, clobber_stmt, false);
+         maybe_clean_or_replace_eh_stmt (stmt, clobber_stmt);
+       }
       else
-        gsi_remove (&stmt_gsi, true);
-    }
-
-  /* Put returned bounds into the correct place if required.  */
-  if (return_bounds)
-    {
-      gimple *old_stmt = SSA_NAME_DEF_STMT (return_bounds);
-      gimple *new_stmt = gimple_build_assign (return_bounds, id->retbnd);
-      gimple_stmt_iterator bnd_gsi = gsi_for_stmt (old_stmt);
-      unlink_stmt_vdef (old_stmt);
-      gsi_replace (&bnd_gsi, new_stmt, false);
-      maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt);
-      cgraph_update_edges_for_call_stmt (old_stmt,
-                                        gimple_call_fndecl (old_stmt),
-                                        new_stmt);
+       gsi_remove (&stmt_gsi, true);
     }
 
   if (purge_dead_abnormal_edges)
-    {
-      gimple_purge_dead_eh_edges (return_block);
-      gimple_purge_dead_abnormal_call_edges (return_block);
-    }
+    bitmap_set_bit (to_purge, return_block->index);
 
   /* If the value of the new expression is ignored, that's OK.  We
      don't warn about this for CALL_EXPRs, so we shouldn't warn about
@@ -4827,22 +5172,20 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
       TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
     }
 
-  /* Copy bounds for all generated assigns that need it.  */
-  for (i = 0; i < id->assign_stmts.length (); i++)
-    chkp_copy_bounds_for_assign (id->assign_stmts[i], cg_edge);
-  id->assign_stmts.release ();
+  id->add_clobbers_to_eh_landing_pads = 0;
 
   /* Output the inlining info for this abstract function, since it has been
      inlined.  If we don't do this now, we can lose the information about the
      variables in the function when the blocks get blown away as soon as we
      remove the cgraph node.  */
   if (gimple_block (stmt))
-    (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
+    (*debug_hooks->outlining_inline_function) (fn);
 
   /* Update callgraph if needed.  */
   cg_edge->callee->remove ();
 
   id->block = NULL_TREE;
+  id->retvar = NULL_TREE;
   successfully_inlined = true;
 
  egress:
@@ -4855,7 +5198,8 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
    in a MODIFY_EXPR.  */
 
 static bool
-gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
+gimple_expand_calls_inline (basic_block bb, copy_body_data *id,
+                           bitmap to_purge)
 {
   gimple_stmt_iterator gsi;
   bool inlined = false;
@@ -4867,7 +5211,7 @@ gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
 
       if (is_gimple_call (stmt)
          && !gimple_call_internal_p (stmt))
-       inlined |= expand_call_inline (bb, stmt, id);
+       inlined |= expand_call_inline (bb, stmt, id, to_purge);
     }
 
   return inlined;
@@ -4880,7 +5224,8 @@ gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
 static void
 fold_marked_statements (int first, hash_set<gimple *> *statements)
 {
-  for (; first < n_basic_blocks_for_fn (cfun); first++)
+  auto_bitmap to_purge;
+  for (; first < last_basic_block_for_fn (cfun); first++)
     if (BASIC_BLOCK_FOR_FN (cfun, first))
       {
         gimple_stmt_iterator gsi;
@@ -4891,9 +5236,10 @@ fold_marked_statements (int first, hash_set<gimple *> *statements)
          if (statements->contains (gsi_stmt (gsi)))
            {
              gimple *old_stmt = gsi_stmt (gsi);
-             tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
+             tree old_decl
+               = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
 
-             if (old_decl && DECL_BUILT_IN (old_decl))
+             if (old_decl && fndecl_built_in_p (old_decl))
                {
                  /* Folding builtins can create multiple instructions,
                     we need to look at all of them.  */
@@ -4935,8 +5281,7 @@ fold_marked_statements (int first, hash_set<gimple *> *statements)
                                 is mood anyway.  */
                              if (maybe_clean_or_replace_eh_stmt (old_stmt,
                                                                  new_stmt))
-                               gimple_purge_dead_eh_edges (
-                                 BASIC_BLOCK_FOR_FN (cfun, first));
+                               bitmap_set_bit (to_purge, first);
                              break;
                            }
                          gsi_next (&i2);
@@ -4956,11 +5301,11 @@ fold_marked_statements (int first, hash_set<gimple *> *statements)
                                                       new_stmt);
 
                  if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
-                   gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
-                                                                   first));
+                   bitmap_set_bit (to_purge, first);
                }
            }
       }
+  gimple_purge_all_dead_eh_edges (to_purge);
 }
 
 /* Expand calls to inline functions in the body of FN.  */
@@ -5006,8 +5351,9 @@ optimize_inline_calls (tree fn)
      will split id->current_basic_block, and the new blocks will
      follow it; we'll trudge through them, processing their CALL_EXPRs
      along the way.  */
+  auto_bitmap to_purge;
   FOR_EACH_BB_FN (bb, cfun)
-    inlined_p |= gimple_expand_calls_inline (bb, &id);
+    inlined_p |= gimple_expand_calls_inline (bb, &id, to_purge);
 
   pop_gimplify_context (NULL);
 
@@ -5023,9 +5369,25 @@ optimize_inline_calls (tree fn)
     }
 
   /* Fold queued statements.  */
+  update_max_bb_count ();
   fold_marked_statements (last, id.statements_to_fold);
   delete id.statements_to_fold;
 
+  /* Finally purge EH and abnormal edges from the call stmts we inlined.
+     We need to do this after fold_marked_statements since that may walk
+     the SSA use-def chain.  */
+  unsigned i;
+  bitmap_iterator bi;
+  EXECUTE_IF_SET_IN_BITMAP (to_purge, 0, i, bi)
+    {
+      basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
+      if (bb)
+       {
+         gimple_purge_dead_eh_edges (bb);
+         gimple_purge_dead_abnormal_call_edges (bb);
+       }
+    }
+
   gcc_assert (!id.debug_stmts.exists ());
 
   /* If we didn't inline into the function there is nothing to do.  */
@@ -5035,7 +5397,8 @@ optimize_inline_calls (tree fn)
   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
   number_blocks (fn);
 
-  delete_unreachable_blocks_update_callgraph (&id);
+  delete_unreachable_blocks_update_callgraph (id.dst_node, false);
+
   if (flag_checking)
     id.dst_node->verify ();
 
@@ -5436,6 +5799,10 @@ copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
   if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
       && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
     SET_DECL_RTL (copy, 0);
+  /* For vector typed decls make sure to update DECL_MODE according
+     to the new function context.  */
+  if (VECTOR_TYPE_P (TREE_TYPE (copy)))
+    SET_DECL_MODE (copy, TYPE_MODE (TREE_TYPE (copy)));
 
   /* These args would always appear unused, if not for this.  */
   TREE_USED (copy) = 1;
@@ -5453,14 +5820,28 @@ copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
        function.  */
     ;
   else
-    /* Ordinary automatic local variables are now in the scope of the
-       new function.  */
-    DECL_CONTEXT (copy) = id->dst_fn;
+    {
+      /* Ordinary automatic local variables are now in the scope of the
+        new function.  */
+      DECL_CONTEXT (copy) = id->dst_fn;
+      if (VAR_P (copy) && id->dst_simt_vars && !is_gimple_reg (copy))
+       {
+         if (!lookup_attribute ("omp simt private", DECL_ATTRIBUTES (copy)))
+           DECL_ATTRIBUTES (copy)
+             = tree_cons (get_identifier ("omp simt private"), NULL,
+                          DECL_ATTRIBUTES (copy));
+         id->dst_simt_vars->safe_push (copy);
+       }
+    }
 
   return copy;
 }
 
-static tree
+/* Create a new VAR_DECL that is indentical in all respect to DECL except that
+   DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL.  The original
+   DECL must come from ID->src_fn and the copy will be part of ID->dst_fn.  */
+
+tree
 copy_decl_to_var (tree decl, copy_body_data *id)
 {
   tree copy, type;
@@ -5543,38 +5924,24 @@ copy_decl_maybe_to_var (tree decl, copy_body_data *id)
     return copy_decl_no_change (decl, id);
 }
 
-/* Return a copy of the function's argument tree.  */
+/* Return a copy of the function's argument tree without any modifications.  */
+
 static tree
-copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
-                              bitmap args_to_skip, tree *vars)
+copy_arguments_nochange (tree orig_parm, copy_body_data * id)
 {
   tree arg, *parg;
   tree new_parm = NULL;
-  int i = 0;
 
   parg = &new_parm;
-
-  for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
-    if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
-      {
-        tree new_tree = remap_decl (arg, id);
-       if (TREE_CODE (new_tree) != PARM_DECL)
-         new_tree = id->copy_decl (arg, id);
-        lang_hooks.dup_lang_specific_decl (new_tree);
-        *parg = new_tree;
-       parg = &DECL_CHAIN (new_tree);
-      }
-    else if (!id->decl_map->get (arg))
-      {
-       /* Make an equivalent VAR_DECL.  If the argument was used
-          as temporary variable later in function, the uses will be
-          replaced by local variable.  */
-       tree var = copy_decl_to_var (arg, id);
-       insert_decl_map (id, arg, var);
-        /* Declare this new variable.  */
-        DECL_CHAIN (var) = *vars;
-        *vars = var;
-      }
+  for (arg = orig_parm; arg; arg = DECL_CHAIN (arg))
+    {
+      tree new_tree = remap_decl (arg, id);
+      if (TREE_CODE (new_tree) != PARM_DECL)
+       new_tree = id->copy_decl (arg, id);
+      lang_hooks.dup_lang_specific_decl (new_tree);
+      *parg = new_tree;
+      parg = &DECL_CHAIN (new_tree);
+    }
   return new_parm;
 }
 
@@ -5605,84 +5972,23 @@ tree_versionable_function_p (tree fndecl)
          && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
 }
 
-/* Delete all unreachable basic blocks and update callgraph.
-   Doing so is somewhat nontrivial because we need to update all clones and
-   remove inline function that become unreachable.  */
+/* Update clone info after duplication.  */
 
-static bool
-delete_unreachable_blocks_update_callgraph (copy_body_data *id)
+static void
+update_clone_info (copy_body_data * id)
 {
-  bool changed = false;
-  basic_block b, next_bb;
-
-  find_unreachable_blocks ();
-
-  /* Delete all unreachable basic blocks.  */
-
-  for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
-       != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
+  vec<ipa_param_performed_split, va_gc> *cur_performed_splits
+    = id->dst_node->clone.performed_splits;
+  if (cur_performed_splits)
     {
-      next_bb = b->next_bb;
-
-      if (!(b->flags & BB_REACHABLE))
+      unsigned len = cur_performed_splits->length ();
+      for (unsigned i = 0; i < len; i++)
        {
-          gimple_stmt_iterator bsi;
-
-          for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
-           {
-             struct cgraph_edge *e;
-             struct cgraph_node *node;
-
-             id->dst_node->remove_stmt_references (gsi_stmt (bsi));
-
-             if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
-                 &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
-               {
-                 if (!e->inline_failed)
-                   e->callee->remove_symbol_and_inline_clones (id->dst_node);
-                 else
-                   e->remove ();
-               }
-             if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
-                 && id->dst_node->clones)
-               for (node = id->dst_node->clones; node != id->dst_node;)
-                 {
-                   node->remove_stmt_references (gsi_stmt (bsi));
-                   if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
-                       && (e = node->get_edge (gsi_stmt (bsi))) != NULL)
-                     {
-                       if (!e->inline_failed)
-                         e->callee->remove_symbol_and_inline_clones (id->dst_node);
-                       else
-                         e->remove ();
-                     }
-
-                   if (node->clones)
-                     node = node->clones;
-                   else if (node->next_sibling_clone)
-                     node = node->next_sibling_clone;
-                   else
-                     {
-                       while (node != id->dst_node && !node->next_sibling_clone)
-                         node = node->clone_of;
-                       if (node != id->dst_node)
-                         node = node->next_sibling_clone;
-                     }
-                 }
-           }
-         delete_basic_block (b);
-         changed = true;
+         ipa_param_performed_split *ps = &(*cur_performed_splits)[i];
+         ps->dummy_decl = remap_decl (ps->dummy_decl, id);
        }
     }
 
-  return changed;
-}
-
-/* Update clone info after duplication.  */
-
-static void
-update_clone_info (copy_body_data * id)
-{
   struct cgraph_node *node;
   if (!id->dst_node->clones)
     return;
@@ -5696,10 +6002,55 @@ update_clone_info (copy_body_data * id)
            {
              struct ipa_replace_map *replace_info;
              replace_info = (*node->clone.tree_map)[i];
-             walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
              walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
            }
        }
+      if (node->clone.performed_splits)
+       {
+         unsigned len = vec_safe_length (node->clone.performed_splits);
+         for (unsigned i = 0; i < len; i++)
+           {
+             ipa_param_performed_split *ps
+               = &(*node->clone.performed_splits)[i];
+             ps->dummy_decl = remap_decl (ps->dummy_decl, id);
+           }
+       }
+      if (unsigned len = vec_safe_length (cur_performed_splits))
+       {
+         /* We do not want to add current performed splits when we are saving
+            a copy of function body for later during inlining, that would just
+            duplicate all entries.  So let's have a look whether anything
+            referring to the first dummy_decl is present.  */
+         unsigned dst_len = vec_safe_length (node->clone.performed_splits);
+         ipa_param_performed_split *first = &(*cur_performed_splits)[0];
+         for (unsigned i = 0; i < dst_len; i++)
+           if ((*node->clone.performed_splits)[i].dummy_decl
+               == first->dummy_decl)
+             {
+               len = 0;
+               break;
+             }
+
+         for (unsigned i = 0; i < len; i++)
+           vec_safe_push (node->clone.performed_splits,
+                          (*cur_performed_splits)[i]);
+         if (flag_checking)
+           {
+             for (unsigned i = 0; i < dst_len; i++)
+               {
+                 ipa_param_performed_split *ps1
+                   = &(*node->clone.performed_splits)[i];
+                 for (unsigned j = i + 1; j < dst_len; j++)
+                   {
+                     ipa_param_performed_split *ps2
+                       = &(*node->clone.performed_splits)[j];
+                     gcc_assert (ps1->dummy_decl != ps2->dummy_decl
+                                 || ps1->unit_offset != ps2->unit_offset);
+                   }
+               }
+           }
+       }
+
       if (node->clones)
        node = node->clones;
       else if (node->next_sibling_clone)
@@ -5721,19 +6072,18 @@ update_clone_info (copy_body_data * id)
    tree with another tree while duplicating the function's
    body, TREE_MAP represents the mapping between these
    trees. If UPDATE_CLONES is set, the call_stmt fields
-   of edges of clones of the function will be updated.  
+   of edges of clones of the function will be updated.
 
-   If non-NULL ARGS_TO_SKIP determine function parameters to remove
-   from new version.
-   If SKIP_RETURN is true, the new version will return void.
-   If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
+   If non-NULL PARAM_ADJUSTMENTS determines how function prototype (i.e. the
+   function parameters and return value) should be modified).
+   If non-NULL BLOCKS_TO_COPY determine what basic blocks to copy.
    If non_NULL NEW_ENTRY determine new entry BB of the clone.
 */
 void
 tree_function_versioning (tree old_decl, tree new_decl,
                          vec<ipa_replace_map *, va_gc> *tree_map,
-                         bool update_clones, bitmap args_to_skip,
-                         bool skip_return, bitmap blocks_to_copy,
+                         ipa_param_adjustments *param_adjustments,
+                         bool update_clones, bitmap blocks_to_copy,
                          basic_block new_entry)
 {
   struct cgraph_node *old_version_node;
@@ -5745,7 +6095,6 @@ tree_function_versioning (tree old_decl, tree new_decl,
   basic_block old_entry_block, bb;
   auto_vec<gimple *, 10> init_stmts;
   tree vars = NULL_TREE;
-  bitmap debug_args_to_skip = args_to_skip;
 
   gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
              && TREE_CODE (new_decl) == FUNCTION_DECL);
@@ -5810,7 +6159,7 @@ tree_function_versioning (tree old_decl, tree new_decl,
   DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
   DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
   initialize_cfun (new_decl, old_decl,
-                  old_entry_block->count);
+                  new_entry ? new_entry->count : old_entry_block->count);
   if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
     DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
       = id.src_cfun->gimple_df->ipa_pta;
@@ -5821,96 +6170,78 @@ tree_function_versioning (tree old_decl, tree new_decl,
     DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
       = copy_static_chain (p, &id);
 
+  auto_vec<int, 16> new_param_indices;
+  ipa_param_adjustments *old_param_adjustments
+    = old_version_node->clone.param_adjustments;
+  if (old_param_adjustments)
+    old_param_adjustments->get_updated_indices (&new_param_indices);
+
   /* If there's a tree_map, prepare for substitution.  */
   if (tree_map)
     for (i = 0; i < tree_map->length (); i++)
       {
        gimple *init;
        replace_info = (*tree_map)[i];
-       if (replace_info->replace_p)
+
+       int p = replace_info->parm_num;
+       if (old_param_adjustments)
+         p = new_param_indices[p];
+
+       tree parm;
+       tree req_type, new_type;
+
+       for (parm = DECL_ARGUMENTS (old_decl); p;
+            parm = DECL_CHAIN (parm))
+         p--;
+       tree old_tree = parm;
+       req_type = TREE_TYPE (parm);
+       new_type = TREE_TYPE (replace_info->new_tree);
+       if (!useless_type_conversion_p (req_type, new_type))
          {
-           int parm_num = -1;
-           if (!replace_info->old_tree)
-             {
-               int p = replace_info->parm_num;
-               tree parm;
-               tree req_type, new_type;
-
-               for (parm = DECL_ARGUMENTS (old_decl); p;
-                    parm = DECL_CHAIN (parm))
-                 p--;
-               replace_info->old_tree = parm;
-               parm_num = replace_info->parm_num;
-               req_type = TREE_TYPE (parm);
-               new_type = TREE_TYPE (replace_info->new_tree);
-               if (!useless_type_conversion_p (req_type, new_type))
-                 {
-                   if (fold_convertible_p (req_type, replace_info->new_tree))
-                     replace_info->new_tree
-                       = fold_build1 (NOP_EXPR, req_type,
-                                      replace_info->new_tree);
-                   else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
-                     replace_info->new_tree
-                       = fold_build1 (VIEW_CONVERT_EXPR, req_type,
-                                      replace_info->new_tree);
-                   else
-                     {
-                       if (dump_file)
-                         {
-                           fprintf (dump_file, "    const ");
-                           print_generic_expr (dump_file,
-                                               replace_info->new_tree, 0);
-                           fprintf (dump_file,
-                                    "  can't be converted to param ");
-                           print_generic_expr (dump_file, parm, 0);
-                           fprintf (dump_file, "\n");
-                         }
-                       replace_info->old_tree = NULL;
-                     }
-                 }
-             }
+           if (fold_convertible_p (req_type, replace_info->new_tree))
+             replace_info->new_tree
+               = fold_build1 (NOP_EXPR, req_type, replace_info->new_tree);
+           else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
+             replace_info->new_tree
+               = fold_build1 (VIEW_CONVERT_EXPR, req_type,
+                              replace_info->new_tree);
            else
-             gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
-           if (replace_info->old_tree)
              {
-               init = setup_one_parameter (&id, replace_info->old_tree,
-                                           replace_info->new_tree, id.src_fn,
-                                           NULL,
-                                           &vars);
-               if (init)
-                 init_stmts.safe_push (init);
-               if (MAY_HAVE_DEBUG_STMTS && args_to_skip)
+               if (dump_file)
                  {
-                   if (parm_num == -1)
-                     {
-                       tree parm;
-                       int p;
-                       for (parm = DECL_ARGUMENTS (old_decl), p = 0; parm;
-                            parm = DECL_CHAIN (parm), p++)
-                         if (parm == replace_info->old_tree)
-                           {
-                             parm_num = p;
-                             break;
-                           }
-                     }
-                   if (parm_num != -1)
-                     {
-                       if (debug_args_to_skip == args_to_skip)
-                         {
-                           debug_args_to_skip = BITMAP_ALLOC (NULL);
-                           bitmap_copy (debug_args_to_skip, args_to_skip);
-                         }
-                       bitmap_clear_bit (debug_args_to_skip, parm_num);
-                     }
+                   fprintf (dump_file, "    const ");
+                   print_generic_expr (dump_file,
+                                       replace_info->new_tree);
+                   fprintf (dump_file,
+                            "  can't be converted to param ");
+                   print_generic_expr (dump_file, parm);
+                   fprintf (dump_file, "\n");
                  }
+               old_tree = NULL;
              }
          }
+
+       if (old_tree)
+         {
+           init = setup_one_parameter (&id, old_tree, replace_info->new_tree,
+                                       id.src_fn, NULL, &vars);
+           if (init)
+             init_stmts.safe_push (init);
+         }
       }
-  /* Copy the function's arguments.  */
-  if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
+
+  ipa_param_body_adjustments *param_body_adjs = NULL;
+  if (param_adjustments)
+    {
+      param_body_adjs = new ipa_param_body_adjustments (param_adjustments,
+                                                       new_decl, old_decl,
+                                                       &id, &vars, tree_map);
+      id.param_body_adjs = param_body_adjs;
+      DECL_ARGUMENTS (new_decl) = param_body_adjs->get_new_param_chain ();
+    }
+  else if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
     DECL_ARGUMENTS (new_decl)
-      = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
-                                      args_to_skip, &vars);
+      = copy_arguments_nochange (DECL_ARGUMENTS (old_decl), &id);
 
   DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
   BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
@@ -5923,12 +6254,19 @@ tree_function_versioning (tree old_decl, tree new_decl,
 
   if (DECL_RESULT (old_decl) == NULL_TREE)
     ;
-  else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
+  else if (param_adjustments && param_adjustments->m_skip_return
+          && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
     {
+      tree resdecl_repl = copy_result_decl_to_var (DECL_RESULT (old_decl),
+                                                  &id);
+      declare_inline_vars (NULL, resdecl_repl);
+      insert_decl_map (&id, DECL_RESULT (old_decl), resdecl_repl);
+
       DECL_RESULT (new_decl)
        = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
                      RESULT_DECL, NULL_TREE, void_type_node);
       DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
+      DECL_IS_MALLOC (new_decl) = false;
       cfun->returns_struct = 0;
       cfun->returns_pcc_struct = 0;
     }
@@ -5957,8 +6295,7 @@ tree_function_versioning (tree old_decl, tree new_decl,
     }
 
   /* Copy the Function's body.  */
-  copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
-            ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
+  copy_body (&id, ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
             new_entry);
 
   /* Renumber the lexical scoping (non-code) blocks consecutively.  */
@@ -5989,9 +6326,10 @@ tree_function_versioning (tree old_decl, tree new_decl,
   free_dominance_info (CDI_DOMINATORS);
   free_dominance_info (CDI_POST_DOMINATORS);
 
+  update_max_bb_count ();
   fold_marked_statements (0, id.statements_to_fold);
   delete id.statements_to_fold;
-  delete_unreachable_blocks_update_callgraph (&id);
+  delete_unreachable_blocks_update_callgraph (id.dst_node, update_clones);
   if (id.dst_node->definition)
     cgraph_edge::rebuild_references ();
   if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
@@ -6012,42 +6350,39 @@ tree_function_versioning (tree old_decl, tree new_decl,
       for (e = new_version_node->callees; e; e = e->next_callee)
        {
          basic_block bb = gimple_bb (e->call_stmt);
-         e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
-                                                        bb);
          e->count = bb->count;
        }
       for (e = new_version_node->indirect_calls; e; e = e->next_callee)
        {
          basic_block bb = gimple_bb (e->call_stmt);
-         e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
-                                                        bb);
          e->count = bb->count;
        }
     }
 
-  if (debug_args_to_skip && MAY_HAVE_DEBUG_STMTS)
+  if (param_body_adjs && MAY_HAVE_DEBUG_BIND_STMTS)
     {
-      tree parm;
       vec<tree, va_gc> **debug_args = NULL;
       unsigned int len = 0;
-      for (parm = DECL_ARGUMENTS (old_decl), i = 0;
-          parm; parm = DECL_CHAIN (parm), i++)
-       if (bitmap_bit_p (debug_args_to_skip, i) && is_gimple_reg (parm))
-         {
-           tree ddecl;
+      unsigned reset_len = param_body_adjs->m_reset_debug_decls.length ();
 
-           if (debug_args == NULL)
-             {
-               debug_args = decl_debug_args_insert (new_decl);
-               len = vec_safe_length (*debug_args);
-             }
-           ddecl = make_node (DEBUG_EXPR_DECL);
-           DECL_ARTIFICIAL (ddecl) = 1;
-           TREE_TYPE (ddecl) = TREE_TYPE (parm);
-           SET_DECL_MODE (ddecl, DECL_MODE (parm));
-           vec_safe_push (*debug_args, DECL_ORIGIN (parm));
-           vec_safe_push (*debug_args, ddecl);
-         }
+      for (i = 0; i < reset_len; i++)
+       {
+         tree parm = param_body_adjs->m_reset_debug_decls[i];
+         gcc_assert (is_gimple_reg (parm));
+         tree ddecl;
+
+         if (debug_args == NULL)
+           {
+             debug_args = decl_debug_args_insert (new_decl);
+             len = vec_safe_length (*debug_args);
+           }
+         ddecl = make_node (DEBUG_EXPR_DECL);
+         DECL_ARTIFICIAL (ddecl) = 1;
+         TREE_TYPE (ddecl) = TREE_TYPE (parm);
+         SET_DECL_MODE (ddecl, DECL_MODE (parm));
+         vec_safe_push (*debug_args, DECL_ORIGIN (parm));
+         vec_safe_push (*debug_args, ddecl);
+       }
       if (debug_args != NULL)
        {
          /* On the callee side, add
@@ -6058,11 +6393,11 @@ tree_function_versioning (tree old_decl, tree new_decl,
             in the debug info that var (whole DECL_ORIGIN is the parm
             PARM_DECL) is optimized away, but could be looked up at the
             call site as value of D#X there.  */
-         tree var = vars, vexpr;
+         tree vexpr;
          gimple_stmt_iterator cgsi
            = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
          gimple *def_temp;
-         var = vars;
+         tree var = vars;
          i = vec_safe_length (*debug_args);
          do
            {
@@ -6073,7 +6408,7 @@ tree_function_versioning (tree old_decl, tree new_decl,
              if (var == NULL_TREE)
                break;
              vexpr = make_node (DEBUG_EXPR_DECL);
-             parm = (**debug_args)[i];
+             tree parm = (**debug_args)[i];
              DECL_ARTIFICIAL (vexpr) = 1;
              TREE_TYPE (vexpr) = TREE_TYPE (parm);
              SET_DECL_MODE (vexpr, DECL_MODE (parm));
@@ -6085,9 +6420,7 @@ tree_function_versioning (tree old_decl, tree new_decl,
          while (i > len);
        }
     }
-
-  if (debug_args_to_skip && debug_args_to_skip != args_to_skip)
-    BITMAP_FREE (debug_args_to_skip);
+  delete param_body_adjs;
   free_dominance_info (CDI_DOMINATORS);
   free_dominance_info (CDI_POST_DOMINATORS);
 
@@ -6207,6 +6540,7 @@ copy_fn (tree fn, tree& parms, tree& result)
      since front-end specific mechanisms may rely on sharing.  */
   id.regimplify = false;
   id.do_not_unshare = true;
+  id.do_not_fold = true;
 
   /* We're not inside any EH region.  */
   id.eh_lp_nr = 0;