IA MCU psABI support: changes to libraries
[gcc.git] / gcc / ipa-prop.c
index 6bdb0dfc735ab3c53e13d2d3d223b1e80e84db32..b5044cf3c5f1772b171411e472cfc8da616d0584 100644 (file)
@@ -1,5 +1,5 @@
 /* Interprocedural analyses.
-   Copyright (C) 2005-2013 Free Software Foundation, Inc.
+   Copyright (C) 2005-2015 Free Software Foundation, Inc.
 
 This file is part of GCC.
 
@@ -20,15 +20,34 @@ along with GCC; see the file COPYING3.  If not see
 #include "config.h"
 #include "system.h"
 #include "coretypes.h"
+#include "alias.h"
+#include "symtab.h"
+#include "options.h"
 #include "tree.h"
+#include "fold-const.h"
+#include "predict.h"
+#include "tm.h"
+#include "hard-reg-set.h"
+#include "function.h"
+#include "dominance.h"
+#include "cfg.h"
 #include "basic-block.h"
 #include "tree-ssa-alias.h"
 #include "internal-fn.h"
 #include "gimple-fold.h"
 #include "tree-eh.h"
 #include "gimple-expr.h"
-#include "is-a.h"
 #include "gimple.h"
+#include "rtl.h"
+#include "flags.h"
+#include "insn-config.h"
+#include "expmed.h"
+#include "dojump.h"
+#include "explow.h"
+#include "calls.h"
+#include "emit-rtl.h"
+#include "varasm.h"
+#include "stmt.h"
 #include "expr.h"
 #include "stor-layout.h"
 #include "print-tree.h"
@@ -38,6 +57,9 @@ along with GCC; see the file COPYING3.  If not see
 #include "gimple-walk.h"
 #include "langhooks.h"
 #include "target.h"
+#include "cgraph.h"
+#include "alloc-pool.h"
+#include "symbol-summary.h"
 #include "ipa-prop.h"
 #include "bitmap.h"
 #include "gimple-ssa.h"
@@ -49,7 +71,6 @@ along with GCC; see the file COPYING3.  If not see
 #include "tree-pass.h"
 #include "tree-inline.h"
 #include "ipa-inline.h"
-#include "flags.h"
 #include "diagnostic.h"
 #include "gimple-pretty-print.h"
 #include "lto-streamer.h"
@@ -57,28 +78,73 @@ along with GCC; see the file COPYING3.  If not see
 #include "tree-streamer.h"
 #include "params.h"
 #include "ipa-utils.h"
+#include "stringpool.h"
+#include "tree-ssanames.h"
+#include "dbgcnt.h"
+#include "domwalk.h"
+#include "builtins.h"
+
+/* Intermediate information that we get from alias analysis about a particular
+   parameter in a particular basic_block.  When a parameter or the memory it
+   references is marked modified, we use that information in all dominatd
+   blocks without cosulting alias analysis oracle.  */
+
+struct param_aa_status
+{
+  /* Set when this structure contains meaningful information.  If not, the
+     structure describing a dominating BB should be used instead.  */
+  bool valid;
+
+  /* Whether we have seen something which might have modified the data in
+     question.  PARM is for the parameter itself, REF is for data it points to
+     but using the alias type of individual accesses and PT is the same thing
+     but for computing aggregate pass-through functions using a very inclusive
+     ao_ref.  */
+  bool parm_modified, ref_modified, pt_modified;
+};
 
-/* Intermediate information about a parameter that is only useful during the
-   run of ipa_analyze_node and is not kept afterwards.  */
+/* Information related to a given BB that used only when looking at function
+   body.  */
 
-struct param_analysis_info
+struct ipa_bb_info
 {
-  bool parm_modified, ref_modified, pt_modified;
-  bitmap parm_visited_statements, pt_visited_statements;
+  /* Call graph edges going out of this BB.  */
+  vec<cgraph_edge *> cg_edges;
+  /* Alias analysis statuses of each formal parameter at this bb.  */
+  vec<param_aa_status> param_aa_statuses;
 };
 
+/* Structure with global information that is only used when looking at function
+   body. */
+
+struct func_body_info
+{
+  /* The node that is being analyzed.  */
+  cgraph_node *node;
+
+  /* Its info.  */
+  struct ipa_node_params *info;
+
+  /* Information about individual BBs. */
+  vec<ipa_bb_info> bb_infos;
+
+  /* Number of parameters.  */
+  int param_count;
+
+  /* Number of statements already walked by when analyzing this function.  */
+  unsigned int aa_walked;
+};
+
+/* Function summary where the parameter infos are actually stored. */
+ipa_node_params_t *ipa_node_params_sum = NULL;
+/* Vector of IPA-CP transformation data for each clone.  */
+vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
 /* Vector where the parameter infos are actually stored. */
-vec<ipa_node_params_t> ipa_node_params_vector;
-/* Vector of known aggregate values in cloned nodes.  */
-vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
-/* Vector where the parameter infos are actually stored. */
-vec<ipa_edge_args_t, va_gc> *ipa_edge_args_vector;
+vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
 
 /* Holders of ipa cgraph hooks: */
 static struct cgraph_edge_hook_list *edge_removal_hook_holder;
-static struct cgraph_node_hook_list *node_removal_hook_holder;
 static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
-static struct cgraph_2node_hook_list *node_duplication_hook_holder;
 static struct cgraph_node_hook_list *function_insertion_hook_holder;
 
 /* Description of a reference to an IPA constant.  */
@@ -95,7 +161,8 @@ struct ipa_cst_ref_desc
 
 /* Allocation pool for reference descriptions.  */
 
-static alloc_pool ipa_refdesc_pool;
+static pool_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
+  ("IPA-PROP ref descriptions", 32);
 
 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
    with NODE should prevent us from analyzing it for the purposes of IPA-CP.  */
@@ -104,19 +171,17 @@ static bool
 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
 {
   tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
-  struct cl_optimization *os;
 
   if (!fs_opts)
     return false;
-  os = TREE_OPTIMIZATION (fs_opts);
-  return !os->x_optimize || !os->x_flag_ipa_cp;
+  return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
 }
 
 /* Return index of the formal whose tree is PTREE in function which corresponds
    to INFO.  */
 
 static int
-ipa_get_param_decl_index_1 (vec<ipa_param_descriptor_t> descriptors, tree ptree)
+ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
 {
   int i, count;
 
@@ -142,7 +207,7 @@ ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
 
 static void
 ipa_populate_param_decls (struct cgraph_node *node,
-                         vec<ipa_param_descriptor_t> &descriptors)
+                         vec<ipa_param_descriptor> &descriptors)
 {
   tree fndecl;
   tree fnargs;
@@ -156,14 +221,15 @@ ipa_populate_param_decls (struct cgraph_node *node,
   for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
     {
       descriptors[param_num].decl = parm;
-      descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
+      descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
+                                                            true);
       param_num++;
     }
 }
 
 /* Return how many formal parameters FNDECL has.  */
 
-static inline int
+int
 count_formal_params (tree fndecl)
 {
   tree parm;
@@ -238,15 +304,6 @@ ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
       fprintf (f, "       param %d: ", i);
       if (type == IPA_JF_UNKNOWN)
        fprintf (f, "UNKNOWN\n");
-      else if (type == IPA_JF_KNOWN_TYPE)
-       {
-         fprintf (f, "KNOWN TYPE: base  ");
-         print_generic_expr (f, jump_func->value.known_type.base_type, 0);
-         fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
-                  jump_func->value.known_type.offset);
-         print_generic_expr (f, jump_func->value.known_type.component_type, 0);
-         fprintf (f, "\n");
-       }
       else if (type == IPA_JF_CONST)
        {
          tree val = jump_func->value.constant.value;
@@ -275,21 +332,16 @@ ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
            }
          if (jump_func->value.pass_through.agg_preserved)
            fprintf (f, ", agg_preserved");
-         if (jump_func->value.pass_through.type_preserved)
-           fprintf (f, ", type_preserved");
          fprintf (f, "\n");
        }
       else if (type == IPA_JF_ANCESTOR)
        {
          fprintf (f, "ANCESTOR: ");
-         fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
+         fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
                   jump_func->value.ancestor.formal_id,
                   jump_func->value.ancestor.offset);
-         print_generic_expr (f, jump_func->value.ancestor.type, 0);
          if (jump_func->value.ancestor.agg_preserved)
            fprintf (f, ", agg_preserved");
-         if (jump_func->value.ancestor.type_preserved)
-           fprintf (f, ", type_preserved");
          fprintf (f, "\n");
        }
 
@@ -315,6 +367,23 @@ ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
              fprintf (f, "\n");
            }
        }
+
+      struct ipa_polymorphic_call_context *ctx
+       = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
+      if (ctx && !ctx->useless_p ())
+       {
+         fprintf (f, "         Context: ");
+         ctx->dump (dump_file);
+       }
+
+      if (jump_func->alignment.known)
+       {
+         fprintf (f, "         Alignment: %u, misalignment: %u\n",
+                  jump_func->alignment.align,
+                  jump_func->alignment.misalign);
+       }
+      else
+       fprintf (f, "         Unknown alignment\n");
     }
 }
 
@@ -335,8 +404,8 @@ ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
        continue;
 
       fprintf (f, "    callsite  %s/%i -> %s/%i : \n",
-              xstrdup (node->name ()), node->order,
-              xstrdup (cs->callee->name ()),
+              xstrdup_for_dump (node->name ()), node->order,
+              xstrdup_for_dump (cs->callee->name ()),
               cs->callee->order);
       ipa_print_node_jump_functions_for_edge (f, cs);
     }
@@ -355,8 +424,10 @@ ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
                 ii->param_index, ii->offset,
                 ii->by_ref ? "by reference" : "by_value");
       else
-       fprintf (f, "    indirect %s callsite, calling param %i",
-                ii->polymorphic ? "polymorphic" : "simple", ii->param_index);
+       fprintf (f, "    indirect %s callsite, calling param %i, "
+                "offset " HOST_WIDE_INT_PRINT_DEC,
+                ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
+                ii->offset);
 
       if (cs->call_stmt)
        {
@@ -365,6 +436,8 @@ ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
        }
       else
        fprintf (f, "\n");
+      if (ii->polymorphic)
+       ii->context.dump (f);
       ipa_print_node_jump_functions_for_edge (f, cs);
     }
 }
@@ -383,19 +456,13 @@ ipa_print_all_jump_functions (FILE *f)
     }
 }
 
-/* Set JFUNC to be a known type jump function.  */
+/* Set jfunc to be a know-really nothing jump function.  */
 
 static void
-ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
-                      tree base_type, tree component_type)
+ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
 {
-  gcc_assert (TREE_CODE (component_type) == RECORD_TYPE
-             && TYPE_BINFO (component_type));
-  jfunc->type = IPA_JF_KNOWN_TYPE;
-  jfunc->value.known_type.offset = offset,
-  jfunc->value.known_type.base_type = base_type;
-  jfunc->value.known_type.component_type = component_type;
-  gcc_assert (component_type);
+  jfunc->type = IPA_JF_UNKNOWN;
+  jfunc->alignment.known = false;
 }
 
 /* Set JFUNC to be a copy of another jmp (to be used by jump function
@@ -427,11 +494,8 @@ ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
       && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
     {
       struct ipa_cst_ref_desc *rdesc;
-      if (!ipa_refdesc_pool)
-       ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
-                                       sizeof (struct ipa_cst_ref_desc), 32);
 
-      rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
+      rdesc = ipa_refdesc_pool.allocate ();
       rdesc->cs = cs;
       rdesc->next_duplicate = NULL;
       rdesc->refcount = 1;
@@ -444,14 +508,13 @@ ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
 /* Set JFUNC to be a simple pass-through jump function.  */
 static void
 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
-                               bool agg_preserved, bool type_preserved)
+                               bool agg_preserved)
 {
   jfunc->type = IPA_JF_PASS_THROUGH;
   jfunc->value.pass_through.operand = NULL_TREE;
   jfunc->value.pass_through.formal_id = formal_id;
   jfunc->value.pass_through.operation = NOP_EXPR;
   jfunc->value.pass_through.agg_preserved = agg_preserved;
-  jfunc->value.pass_through.type_preserved = type_preserved;
 }
 
 /* Set JFUNC to be an arithmetic pass through jump function.  */
@@ -465,42 +528,34 @@ ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
   jfunc->value.pass_through.formal_id = formal_id;
   jfunc->value.pass_through.operation = operation;
   jfunc->value.pass_through.agg_preserved = false;
-  jfunc->value.pass_through.type_preserved = false;
 }
 
 /* Set JFUNC to be an ancestor jump function.  */
 
 static void
 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
-                    tree type, int formal_id, bool agg_preserved,
-                    bool type_preserved)
+                    int formal_id, bool agg_preserved)
 {
   jfunc->type = IPA_JF_ANCESTOR;
   jfunc->value.ancestor.formal_id = formal_id;
   jfunc->value.ancestor.offset = offset;
-  jfunc->value.ancestor.type = type;
   jfunc->value.ancestor.agg_preserved = agg_preserved;
-  jfunc->value.ancestor.type_preserved = type_preserved;
 }
 
-/* Extract the acual BINFO being described by JFUNC which must be a known type
-   jump function.  */
+/* Get IPA BB information about the given BB.  FBI is the context of analyzis
+   of this function body.  */
 
-tree
-ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
+static struct ipa_bb_info *
+ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
 {
-  tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
-  if (!base_binfo)
-    return NULL_TREE;
-  return get_binfo_at_offset (base_binfo,
-                             jfunc->value.known_type.offset,
-                             jfunc->value.known_type.component_type);
+  gcc_checking_assert (fbi);
+  return &fbi->bb_infos[bb->index];
 }
 
 /* Structure to be passed in between detect_type_change and
    check_stmt_for_type_change.  */
 
-struct type_change_info
+struct prop_type_change_info
 {
   /* Offset into the object where there is the virtual method pointer we are
      looking for.  */
@@ -508,14 +563,8 @@ struct type_change_info
   /* The declaration or SSA_NAME pointer of the base that we are checking for
      type change.  */
   tree object;
-  /* If we actually can tell the type that the object has changed to, it is
-     stored in this field.  Otherwise it remains NULL_TREE.  */
-  tree known_current_type;
   /* Set to true if dynamic type change has been detected.  */
   bool type_maybe_changed;
-  /* Set to true if multiple types have been encountered.  known_current_type
-     must be disregarded in that case.  */
-  bool multiple_types_encountered;
 };
 
 /* Return true if STMT can modify a virtual method table pointer.
@@ -560,6 +609,8 @@ stmt_may_be_vtbl_ptr_store (gimple stmt)
 {
   if (is_gimple_call (stmt))
     return false;
+  if (gimple_clobber_p (stmt))
+    return false;
   else if (is_gimple_assign (stmt))
     {
       tree lhs = gimple_assign_lhs (stmt);
@@ -581,70 +632,19 @@ stmt_may_be_vtbl_ptr_store (gimple stmt)
   return true;
 }
 
-/* If STMT can be proved to be an assignment to the virtual method table
-   pointer of ANALYZED_OBJ and the type associated with the new table
-   identified, return the type.  Otherwise return NULL_TREE.  */
-
-static tree
-extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
-{
-  HOST_WIDE_INT offset, size, max_size;
-  tree lhs, rhs, base;
-
-  if (!gimple_assign_single_p (stmt))
-    return NULL_TREE;
-
-  lhs = gimple_assign_lhs (stmt);
-  rhs = gimple_assign_rhs1 (stmt);
-  if (TREE_CODE (lhs) != COMPONENT_REF
-      || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
-      || TREE_CODE (rhs) != ADDR_EXPR)
-    return NULL_TREE;
-  rhs = get_base_address (TREE_OPERAND (rhs, 0));
-  if (!rhs
-      || TREE_CODE (rhs) != VAR_DECL
-      || !DECL_VIRTUAL_P (rhs))
-    return NULL_TREE;
-
-  base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
-  if (offset != tci->offset
-      || size != POINTER_SIZE
-      || max_size != POINTER_SIZE)
-    return NULL_TREE;
-  if (TREE_CODE (base) == MEM_REF)
-    {
-      if (TREE_CODE (tci->object) != MEM_REF
-         || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
-         || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
-                                 TREE_OPERAND (base, 1)))
-       return NULL_TREE;
-    }
-  else if (tci->object != base)
-    return NULL_TREE;
-
-  return DECL_CONTEXT (rhs);
-}
-
-/* Callback of walk_aliased_vdefs and a helper function for
-   detect_type_change to check whether a particular statement may modify
-   the virtual table pointer, and if possible also determine the new type of
-   the (sub-)object.  It stores its result into DATA, which points to a
-   type_change_info structure.  */
+/* Callback of walk_aliased_vdefs and a helper function for detect_type_change
+   to check whether a particular statement may modify the virtual table
+   pointerIt stores its result into DATA, which points to a
+   prop_type_change_info structure.  */
 
 static bool
 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
 {
   gimple stmt = SSA_NAME_DEF_STMT (vdef);
-  struct type_change_info *tci = (struct type_change_info *) data;
+  struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
 
   if (stmt_may_be_vtbl_ptr_store (stmt))
     {
-      tree type;
-      type = extr_type_from_vtbl_ptr_store (stmt, tci);
-      if (tci->type_maybe_changed
-         && type != tci->known_current_type)
-       tci->multiple_types_encountered = true;
-      tci->known_current_type = type;
       tci->type_maybe_changed = true;
       return true;
     }
@@ -652,34 +652,94 @@ check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
     return false;
 }
 
+/* See if ARG is PARAM_DECl describing instance passed by pointer
+   or reference in FUNCTION.  Return false if the dynamic type may change
+   in between beggining of the function until CALL is invoked.
+
+   Generally functions are not allowed to change type of such instances,
+   but they call destructors.  We assume that methods can not destroy the THIS
+   pointer.  Also as a special cases, constructor and destructors may change
+   type of the THIS pointer.  */
 
+static bool
+param_type_may_change_p (tree function, tree arg, gimple call)
+{
+  /* Pure functions can not do any changes on the dynamic type;
+     that require writting to memory.  */
+  if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
+    return false;
+  /* We need to check if we are within inlined consturctor
+     or destructor (ideally we would have way to check that the
+     inline cdtor is actually working on ARG, but we don't have
+     easy tie on this, so punt on all non-pure cdtors.
+     We may also record the types of cdtors and once we know type
+     of the instance match them.
+
+     Also code unification optimizations may merge calls from
+     different blocks making return values unreliable.  So
+     do nothing during late optimization.  */
+  if (DECL_STRUCT_FUNCTION (function)->after_inlining)
+    return true;
+  if (TREE_CODE (arg) == SSA_NAME
+      && SSA_NAME_IS_DEFAULT_DEF (arg)
+      && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
+    {
+      /* Normal (non-THIS) argument.  */
+      if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
+          || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
+         /* THIS pointer of an method - here we we want to watch constructors
+            and destructors as those definitely may change the dynamic
+            type.  */
+         || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
+             && !DECL_CXX_CONSTRUCTOR_P (function)
+             && !DECL_CXX_DESTRUCTOR_P (function)
+             && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
+       {
+         /* Walk the inline stack and watch out for ctors/dtors.  */
+         for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
+              block = BLOCK_SUPERCONTEXT (block))
+           if (inlined_polymorphic_ctor_dtor_block_p (block, false))
+             return true;
+         return false;
+       }
+    }
+  return true;
+}
 
 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
    callsite CALL) by looking for assignments to its virtual table pointer.  If
    it is, return true and fill in the jump function JFUNC with relevant type
    information or set it to unknown.  ARG is the object itself (not a pointer
    to it, unless dereferenced).  BASE is the base of the memory access as
-   returned by get_ref_base_and_extent, as is the offset.  */
+   returned by get_ref_base_and_extent, as is the offset. 
+
+   This is helper function for detect_type_change and detect_type_change_ssa
+   that does the heavy work which is usually unnecesary.  */
 
 static bool
-detect_type_change (tree arg, tree base, tree comp_type, gimple call,
-                   struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
+detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
+                                      gcall *call, struct ipa_jump_func *jfunc,
+                                      HOST_WIDE_INT offset)
 {
-  struct type_change_info tci;
+  struct prop_type_change_info tci;
   ao_ref ao;
+  bool entry_reached = false;
 
   gcc_checking_assert (DECL_P (arg)
                       || TREE_CODE (arg) == MEM_REF
                       || handled_component_p (arg));
+
+  comp_type = TYPE_MAIN_VARIANT (comp_type);
+
   /* Const calls cannot call virtual methods through VMT and so type changes do
      not matter.  */
   if (!flag_devirtualize || !gimple_vuse (call)
       /* Be sure expected_type is polymorphic.  */
       || !comp_type
       || TREE_CODE (comp_type) != RECORD_TYPE
-      || !TYPE_BINFO (comp_type)
-      || !BINFO_VTABLE (TYPE_BINFO (comp_type)))
-    return false;
+      || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
+      || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
+    return true;
 
   ao_ref_init (&ao, arg);
   ao.base = base;
@@ -689,42 +749,60 @@ detect_type_change (tree arg, tree base, tree comp_type, gimple call,
 
   tci.offset = offset;
   tci.object = get_base_address (arg);
-  tci.known_current_type = NULL_TREE;
   tci.type_maybe_changed = false;
-  tci.multiple_types_encountered = false;
 
   walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
-                     &tci, NULL);
+                     &tci, NULL, &entry_reached);
   if (!tci.type_maybe_changed)
     return false;
 
-  if (!tci.known_current_type
-      || tci.multiple_types_encountered
-      || offset != 0)
-    jfunc->type = IPA_JF_UNKNOWN;
-  else
-    ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
-
+  ipa_set_jf_unknown (jfunc);
   return true;
 }
 
+/* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
+   If it is, return true and fill in the jump function JFUNC with relevant type
+   information or set it to unknown.  ARG is the object itself (not a pointer
+   to it, unless dereferenced).  BASE is the base of the memory access as
+   returned by get_ref_base_and_extent, as is the offset.  */
+
+static bool
+detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
+                   struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
+{
+  if (!flag_devirtualize)
+    return false;
+
+  if (TREE_CODE        (base) == MEM_REF
+      && !param_type_may_change_p (current_function_decl,
+                                  TREE_OPERAND (base, 0),
+                                  call))
+    return false;
+  return detect_type_change_from_memory_writes (arg, base, comp_type,
+                                               call, jfunc, offset);
+}
+
 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
    SSA name (its dereference will become the base and the offset is assumed to
    be zero).  */
 
 static bool
 detect_type_change_ssa (tree arg, tree comp_type,
-                       gimple call, struct ipa_jump_func *jfunc)
+                       gcall *call, struct ipa_jump_func *jfunc)
 {
   gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
   if (!flag_devirtualize
       || !POINTER_TYPE_P (TREE_TYPE (arg)))
     return false;
 
+  if (!param_type_may_change_p (current_function_decl, arg, call))
+    return false;
+
   arg = build2 (MEM_REF, ptr_type_node, arg,
                build_int_cst (ptr_type_node, 0));
 
-  return detect_type_change (arg, arg, comp_type, call, jfunc, 0);
+  return detect_type_change_from_memory_writes (arg, arg, comp_type,
+                                               call, jfunc, 0);
 }
 
 /* Callback of walk_aliased_vdefs.  Flags that it has been invoked to the
@@ -739,34 +817,101 @@ mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
   return true;
 }
 
+/* Return true if we have already walked so many statements in AA that we
+   should really just start giving up.  */
+
+static bool
+aa_overwalked (struct func_body_info *fbi)
+{
+  gcc_checking_assert (fbi);
+  return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
+}
+
+/* Find the nearest valid aa status for parameter specified by INDEX that
+   dominates BB.  */
+
+static struct param_aa_status *
+find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
+                          int index)
+{
+  while (true)
+    {
+      bb = get_immediate_dominator (CDI_DOMINATORS, bb);
+      if (!bb)
+       return NULL;
+      struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
+      if (!bi->param_aa_statuses.is_empty ()
+         && bi->param_aa_statuses[index].valid)
+       return &bi->param_aa_statuses[index];
+    }
+}
+
+/* Get AA status structure for the given BB and parameter with INDEX.  Allocate
+   structures and/or intialize the result with a dominating description as
+   necessary.  */
+
+static struct param_aa_status *
+parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
+                         int index)
+{
+  gcc_checking_assert (fbi);
+  struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
+  if (bi->param_aa_statuses.is_empty ())
+    bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
+  struct param_aa_status *paa = &bi->param_aa_statuses[index];
+  if (!paa->valid)
+    {
+      gcc_checking_assert (!paa->parm_modified
+                          && !paa->ref_modified
+                          && !paa->pt_modified);
+      struct param_aa_status *dom_paa;
+      dom_paa = find_dominating_aa_status (fbi, bb, index);
+      if (dom_paa)
+       *paa = *dom_paa;
+      else
+       paa->valid = true;
+    }
+
+  return paa;
+}
+
 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
    a value known not to be modified in this function before reaching the
-   statement STMT.  PARM_AINFO is a pointer to a structure containing temporary
-   information about the parameter.  */
+   statement STMT.  FBI holds information about the function we have so far
+   gathered but do not survive the summary building stage.  */
 
 static bool
-parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
-                              gimple stmt, tree parm_load)
+parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
+                             gimple stmt, tree parm_load)
 {
+  struct param_aa_status *paa;
   bool modified = false;
-  bitmap *visited_stmts;
   ao_ref refd;
 
-  if (parm_ainfo && parm_ainfo->parm_modified)
-    return false;
+  /* FIXME: FBI can be NULL if we are being called from outside
+     ipa_node_analysis or ipcp_transform_function, which currently happens
+     during inlining analysis.  It would be great to extend fbi's lifetime and
+     always have it.  Currently, we are just not afraid of too much walking in
+     that case.  */
+  if (fbi)
+    {
+      if (aa_overwalked (fbi))
+       return false;
+      paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
+      if (paa->parm_modified)
+       return false;
+    }
+  else
+    paa = NULL;
 
   gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
   ao_ref_init (&refd, parm_load);
-  /* We can cache visited statements only when parm_ainfo is available and when
-     we are looking at a naked load of the whole parameter.  */
-  if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
-    visited_stmts = NULL;
-  else
-    visited_stmts = &parm_ainfo->parm_visited_statements;
-  walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
-                     visited_stmts);
-  if (parm_ainfo && modified)
-    parm_ainfo->parm_modified = true;
+  int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
+                                  &modified, NULL);
+  if (fbi)
+    fbi->aa_walked += walked;
+  if (paa && modified)
+    paa->parm_modified = true;
   return !modified;
 }
 
@@ -775,8 +920,8 @@ parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
    modified.  Otherwise return -1.  */
 
 static int
-load_from_unmodified_param (vec<ipa_param_descriptor_t> descriptors,
-                           struct param_analysis_info *parms_ainfo,
+load_from_unmodified_param (struct func_body_info *fbi,
+                           vec<ipa_param_descriptor> descriptors,
                            gimple stmt)
 {
   int index;
@@ -791,45 +936,58 @@ load_from_unmodified_param (vec<ipa_param_descriptor_t> descriptors,
 
   index = ipa_get_param_decl_index_1 (descriptors, op1);
   if (index < 0
-      || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
-                                       : NULL, stmt, op1))
+      || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
     return -1;
 
   return index;
 }
 
-/* Return true if memory reference REF loads data that are known to be
-   unmodified in this function before reaching statement STMT.  PARM_AINFO, if
-   non-NULL, is a pointer to a structure containing temporary information about
-   PARM.  */
+/* Return true if memory reference REF (which must be a load through parameter
+   with INDEX) loads data that are known to be unmodified in this function
+   before reaching statement STMT.  */
 
 static bool
-parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
-                             gimple stmt, tree ref)
+parm_ref_data_preserved_p (struct func_body_info *fbi,
+                          int index, gimple stmt, tree ref)
 {
+  struct param_aa_status *paa;
   bool modified = false;
   ao_ref refd;
 
-  gcc_checking_assert (gimple_vuse (stmt));
-  if (parm_ainfo && parm_ainfo->ref_modified)
-    return false;
+  /* FIXME: FBI can be NULL if we are being called from outside
+     ipa_node_analysis or ipcp_transform_function, which currently happens
+     during inlining analysis.  It would be great to extend fbi's lifetime and
+     always have it.  Currently, we are just not afraid of too much walking in
+     that case.  */
+  if (fbi)
+    {
+      if (aa_overwalked (fbi))
+       return false;
+      paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
+      if (paa->ref_modified)
+       return false;
+    }
+  else
+    paa = NULL;
 
+  gcc_checking_assert (gimple_vuse (stmt));
   ao_ref_init (&refd, ref);
-  walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
-                     NULL);
-  if (parm_ainfo && modified)
-    parm_ainfo->ref_modified = true;
+  int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
+                                  &modified, NULL);
+  if (fbi)
+    fbi->aa_walked += walked;
+  if (paa && modified)
+    paa->ref_modified = true;
   return !modified;
 }
 
-/* Return true if the data pointed to by PARM is known to be unmodified in this
-   function before reaching call statement CALL into which it is passed.
-   PARM_AINFO is a pointer to a structure containing temporary information
-   about PARM.  */
+/* Return true if the data pointed to by PARM (which is a parameter with INDEX)
+   is known to be unmodified in this function before reaching call statement
+   CALL into which it is passed.  FBI describes the function body.  */
 
 static bool
-parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
-                              gimple call, tree parm)
+parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
+                             gimple call, tree parm)
 {
   bool modified = false;
   ao_ref refd;
@@ -838,17 +996,21 @@ parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
      function because it is not goin to use it.  But do not cache the result
      either.  Also, no such calculations for non-pointers.  */
   if (!gimple_vuse (call)
-      || !POINTER_TYPE_P (TREE_TYPE (parm)))
+      || !POINTER_TYPE_P (TREE_TYPE (parm))
+      || aa_overwalked (fbi))
     return false;
 
-  if (parm_ainfo->pt_modified)
+  struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
+                                                         index);
+  if (paa->pt_modified)
     return false;
 
   ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
-  walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
-                     parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
+  int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
+                                  &modified, NULL);
+  fbi->aa_walked += walked;
   if (modified)
-    parm_ainfo->pt_modified = true;
+    paa->pt_modified = true;
   return !modified;
 }
 
@@ -863,10 +1025,11 @@ parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
    reference respectively.  */
 
 static bool
-ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor_t> descriptors,
-                         struct param_analysis_info *parms_ainfo, gimple stmt,
-                         tree op, int *index_p, HOST_WIDE_INT *offset_p,
-                         HOST_WIDE_INT *size_p, bool *by_ref_p)
+ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
+                         vec<ipa_param_descriptor> descriptors,
+                         gimple stmt, tree op, int *index_p,
+                         HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
+                         bool *by_ref_p)
 {
   int index;
   HOST_WIDE_INT size, max_size;
@@ -879,8 +1042,7 @@ ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor_t> descriptors,
     {
       int index = ipa_get_param_decl_index_1 (descriptors, base);
       if (index >= 0
-         && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
-                                          : NULL, stmt, op))
+         && parm_preserved_before_stmt_p (fbi, index, stmt, op))
        {
          *index_p = index;
          *by_ref_p = false;
@@ -919,12 +1081,11 @@ ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor_t> descriptors,
       */
 
       gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
-      index = load_from_unmodified_param (descriptors, parms_ainfo, def);
+      index = load_from_unmodified_param (fbi, descriptors, def);
     }
 
   if (index >= 0
-      && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
-                                   stmt, op))
+      && parm_ref_data_preserved_p (fbi, index, stmt, op))
     {
       *index_p = index;
       *by_ref_p = true;
@@ -943,7 +1104,7 @@ ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
                        tree op, int *index_p, HOST_WIDE_INT *offset_p,
                        bool *by_ref_p)
 {
-  return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
+  return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
                                   offset_p, NULL, by_ref_p);
 }
 
@@ -1001,10 +1162,10 @@ ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
    only needed for intraprocedural analysis.  */
 
 static void
-compute_complex_assign_jump_func (struct ipa_node_params *info,
-                                 struct param_analysis_info *parms_ainfo,
+compute_complex_assign_jump_func (struct func_body_info *fbi,
+                                 struct ipa_node_params *info,
                                  struct ipa_jump_func *jfunc,
-                                 gimple call, gimple stmt, tree name,
+                                 gcall *call, gimple stmt, tree name,
                                  tree param_type)
 {
   HOST_WIDE_INT offset, size, max_size;
@@ -1018,13 +1179,13 @@ compute_complex_assign_jump_func (struct ipa_node_params *info,
       if (SSA_NAME_IS_DEFAULT_DEF (op1))
        index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
       else
-       index = load_from_unmodified_param (info->descriptors, parms_ainfo,
+       index = load_from_unmodified_param (fbi, info->descriptors,
                                            SSA_NAME_DEF_STMT (op1));
       tc_ssa = op1;
     }
   else
     {
-      index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
+      index = load_from_unmodified_param (fbi, info->descriptors, stmt);
       tc_ssa = gimple_assign_lhs (stmt);
     }
 
@@ -1045,15 +1206,8 @@ compute_complex_assign_jump_func (struct ipa_node_params *info,
        }
       else if (gimple_assign_single_p (stmt))
        {
-         bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
-                                                    call, tc_ssa);
-         bool type_p = false;
-
-         if (param_type && POINTER_TYPE_P (param_type))
-           type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
-                                             call, jfunc);
-         if (type_p || jfunc->type == IPA_JF_UNKNOWN)
-           ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
+         bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
+         ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
        }
       return;
     }
@@ -1069,7 +1223,7 @@ compute_complex_assign_jump_func (struct ipa_node_params *info,
       || max_size == -1
       || max_size != size)
     return;
-  offset += mem_ref_offset (base).low * BITS_PER_UNIT;
+  offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
   ssa = TREE_OPERAND (base, 0);
   if (TREE_CODE (ssa) != SSA_NAME
       || !SSA_NAME_IS_DEFAULT_DEF (ssa)
@@ -1079,14 +1233,8 @@ compute_complex_assign_jump_func (struct ipa_node_params *info,
   /* Dynamic types are changed in constructors and destructors.  */
   index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
   if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
-    {
-      bool type_p = !detect_type_change (op1, base, TREE_TYPE (param_type),
-                                        call, jfunc, offset);
-      if (type_p || jfunc->type == IPA_JF_UNKNOWN)
-       ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
-                            parm_ref_data_pass_through_p (&parms_ainfo[index],
-                                                          call, ssa), type_p);
-    }
+    ipa_set_ancestor_jf (jfunc, offset,  index,
+                        parm_ref_data_pass_through_p (fbi, index, call, ssa));
 }
 
 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
@@ -1128,7 +1276,7 @@ get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
       || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
     return NULL_TREE;
 
-  *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
+  *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
   *obj_p = obj;
   return expr;
 }
@@ -1156,10 +1304,10 @@ get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
      return D.1879_6;  */
 
 static void
-compute_complex_ancestor_jump_func (struct ipa_node_params *info,
-                                   struct param_analysis_info *parms_ainfo,
+compute_complex_ancestor_jump_func (struct func_body_info *fbi,
+                                   struct ipa_node_params *info,
                                    struct ipa_jump_func *jfunc,
-                                   gimple call, gimple phi, tree param_type)
+                                   gcall *call, gphi *phi)
 {
   HOST_WIDE_INT offset;
   gimple assign, cond;
@@ -1191,7 +1339,8 @@ compute_complex_ancestor_jump_func (struct ipa_node_params *info,
     return;
   parm = TREE_OPERAND (expr, 0);
   index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
-  gcc_assert (index >= 0);
+  if (index < 0)
+    return;
 
   cond_bb = single_pred (assign_bb);
   cond = last_stmt (cond_bb);
@@ -1210,52 +1359,8 @@ compute_complex_ancestor_jump_func (struct ipa_node_params *info,
        return;
     }
 
-  bool type_p = false;
-  if (param_type && POINTER_TYPE_P (param_type))
-    type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
-                                 call, jfunc, offset);
-  if (type_p || jfunc->type == IPA_JF_UNKNOWN)
-    ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
-                        parm_ref_data_pass_through_p (&parms_ainfo[index],
-                                                      call, parm), type_p);
-}
-
-/* Given OP which is passed as an actual argument to a called function,
-   determine if it is possible to construct a KNOWN_TYPE jump function for it
-   and if so, create one and store it to JFUNC.
-   EXPECTED_TYPE represents a type the argument should be in  */
-
-static void
-compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
-                             gimple call, tree expected_type)
-{
-  HOST_WIDE_INT offset, size, max_size;
-  tree base;
-
-  if (!flag_devirtualize
-      || TREE_CODE (op) != ADDR_EXPR
-      || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE
-      /* Be sure expected_type is polymorphic.  */
-      || !expected_type
-      || TREE_CODE (expected_type) != RECORD_TYPE
-      || !TYPE_BINFO (expected_type)
-      || !BINFO_VTABLE (TYPE_BINFO (expected_type)))
-    return;
-
-  op = TREE_OPERAND (op, 0);
-  base = get_ref_base_and_extent (op, &offset, &size, &max_size);
-  if (!DECL_P (base)
-      || max_size == -1
-      || max_size != size
-      || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
-      || is_global_var (base))
-    return;
-
-  if (detect_type_change (op, base, expected_type, call, jfunc, offset))
-    return;
-
-  ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
-                        expected_type);
+  ipa_set_ancestor_jf (jfunc, offset, index,
+                      parm_ref_data_pass_through_p (fbi, index, call, parm));
 }
 
 /* Inspect the given TYPE and return true iff it has the same structure (the
@@ -1325,14 +1430,73 @@ struct ipa_known_agg_contents_list
   struct ipa_known_agg_contents_list *next;
 };
 
+/* Find the proper place in linked list of ipa_known_agg_contents_list
+   structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
+   unless there is a partial overlap, in which case return NULL, or such
+   element is already there, in which case set *ALREADY_THERE to true.  */
+
+static struct ipa_known_agg_contents_list **
+get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
+                               HOST_WIDE_INT lhs_offset,
+                               HOST_WIDE_INT lhs_size,
+                               bool *already_there)
+{
+  struct ipa_known_agg_contents_list **p = list;
+  while (*p && (*p)->offset < lhs_offset)
+    {
+      if ((*p)->offset + (*p)->size > lhs_offset)
+       return NULL;
+      p = &(*p)->next;
+    }
+
+  if (*p && (*p)->offset < lhs_offset + lhs_size)
+    {
+      if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
+       /* We already know this value is subsequently overwritten with
+          something else.  */
+       *already_there = true;
+      else
+       /* Otherwise this is a partial overlap which we cannot
+          represent.  */
+       return NULL;
+    }
+  return p;
+}
+
+/* Build aggregate jump function from LIST, assuming there are exactly
+   CONST_COUNT constant entries there and that th offset of the passed argument
+   is ARG_OFFSET and store it into JFUNC.  */
+
+static void
+build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
+                              int const_count, HOST_WIDE_INT arg_offset,
+                              struct ipa_jump_func *jfunc)
+{
+  vec_alloc (jfunc->agg.items, const_count);
+  while (list)
+    {
+      if (list->constant)
+       {
+         struct ipa_agg_jf_item item;
+         item.offset = list->offset - arg_offset;
+         gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
+         item.value = unshare_expr_without_location (list->constant);
+         jfunc->agg.items->quick_push (item);
+       }
+      list = list->next;
+    }
+}
+
 /* Traverse statements from CALL backwards, scanning whether an aggregate given
    in ARG is filled in with constant values.  ARG can either be an aggregate
-   expression or a pointer to an aggregate.  JFUNC is the jump function into
-   which the constants are subsequently stored.  */
+   expression or a pointer to an aggregate.  ARG_TYPE is the type of the
+   aggregate.  JFUNC is the jump function into which the constants are
+   subsequently stored.  */
 
 static void
-determine_known_aggregate_parts (gimple call, tree arg,
-                                struct ipa_jump_func *jfunc)
+determine_locally_known_aggregate_parts (gcall *call, tree arg,
+                                        tree arg_type,
+                                        struct ipa_jump_func *jfunc)
 {
   struct ipa_known_agg_contents_list *list = NULL;
   int item_count = 0, const_count = 0;
@@ -1346,18 +1510,18 @@ determine_known_aggregate_parts (gimple call, tree arg,
      arg_base and arg_offset based on what is actually passed as an actual
      argument.  */
 
-  if (POINTER_TYPE_P (TREE_TYPE (arg)))
+  if (POINTER_TYPE_P (arg_type))
     {
       by_ref = true;
       if (TREE_CODE (arg) == SSA_NAME)
        {
          tree type_size;
-          if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)))))
+          if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
             return;
          check_ref = true;
          arg_base = arg;
          arg_offset = 0;
-         type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
+         type_size = TYPE_SIZE (TREE_TYPE (arg_type));
          arg_size = tree_to_uhwi (type_size);
          ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
        }
@@ -1374,10 +1538,8 @@ determine_known_aggregate_parts (gimple call, tree arg,
            return;
          if (DECL_P (arg_base))
            {
-             tree size;
              check_ref = false;
-             size = build_int_cst (integer_type_node, arg_size);
-             ao_ref_init_from_ptr_and_size (&r, arg_base, size);
+             ao_ref_init (&r, arg_base);
            }
          else
            return;
@@ -1415,7 +1577,6 @@ determine_known_aggregate_parts (gimple call, tree arg,
       gimple stmt = gsi_stmt (gsi);
       HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
       tree lhs, rhs, lhs_base;
-      bool partial_overlap;
 
       if (!stmt_may_clobber_ref_p_1 (stmt, &r))
        continue;
@@ -1424,7 +1585,7 @@ determine_known_aggregate_parts (gimple call, tree arg,
 
       lhs = gimple_assign_lhs (stmt);
       rhs = gimple_assign_rhs1 (stmt);
-      if (!is_gimple_reg_type (rhs)
+      if (!is_gimple_reg_type (TREE_TYPE (rhs))
          || TREE_CODE (lhs) == BIT_FIELD_REF
          || contains_bitfld_component_ref_p (lhs))
        break;
@@ -1432,11 +1593,7 @@ determine_known_aggregate_parts (gimple call, tree arg,
       lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
                                          &lhs_max_size);
       if (lhs_max_size == -1
-         || lhs_max_size != lhs_size
-         || (lhs_offset < arg_offset
-             && lhs_offset + lhs_size > arg_offset)
-         || (lhs_offset < arg_offset + arg_size
-             && lhs_offset + lhs_size > arg_offset + arg_size))
+         || lhs_max_size != lhs_size)
        break;
 
       if (check_ref)
@@ -1454,34 +1611,13 @@ determine_known_aggregate_parts (gimple call, tree arg,
            break;
        }
 
-      if (lhs_offset + lhs_size < arg_offset
-         || lhs_offset >= (arg_offset + arg_size))
-       continue;
-
-      partial_overlap = false;
-      p = &list;
-      while (*p && (*p)->offset < lhs_offset)
-       {
-         if ((*p)->offset + (*p)->size > lhs_offset)
-           {
-             partial_overlap = true;
-             break;
-           }
-         p = &(*p)->next;
-       }
-      if (partial_overlap)
+      bool already_there = false;
+      p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
+                                         &already_there);
+      if (!p)
        break;
-      if (*p && (*p)->offset < lhs_offset + lhs_size)
-       {
-         if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
-           /* We already know this value is subsequently overwritten with
-              something else.  */
-           continue;
-         else
-           /* Otherwise this is a partial overlap which we cannot
-              represent.  */
-           break;
-       }
+      if (already_there)
+       continue;
 
       rhs = get_ssa_def_if_simple_copy (rhs);
       n = XALLOCA (struct ipa_known_agg_contents_list);
@@ -1510,19 +1646,7 @@ determine_known_aggregate_parts (gimple call, tree arg,
   if (const_count)
     {
       jfunc->agg.by_ref = by_ref;
-      vec_alloc (jfunc->agg.items, const_count);
-      while (list)
-       {
-         if (list->constant)
-           {
-             struct ipa_agg_jf_item item;
-             item.offset = list->offset - arg_offset;
-             gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
-             item.value = unshare_expr_without_location (list->constant);
-             jfunc->agg.items->quick_push (item);
-           }
-         list = list->next;
-       }
+      build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
     }
 }
 
@@ -1562,17 +1686,20 @@ ipa_get_callee_param_type (struct cgraph_edge *e, int i)
    to this callsite.  */
 
 static void
-ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
+ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
                                     struct cgraph_edge *cs)
 {
   struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
   struct ipa_edge_args *args = IPA_EDGE_REF (cs);
-  gimple call = cs->call_stmt;
+  gcall *call = cs->call_stmt;
   int n, arg_num = gimple_call_num_args (call);
+  bool useful_context = false;
 
   if (arg_num == 0 || args->jump_functions)
     return;
   vec_safe_grow_cleared (args->jump_functions, arg_num);
+  if (flag_devirtualize)
+    vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
 
   if (gimple_call_internal_p (call))
     return;
@@ -1584,6 +1711,36 @@ ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
       struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
       tree arg = gimple_call_arg (call, n);
       tree param_type = ipa_get_callee_param_type (cs, n);
+      if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
+       {
+         tree instance;
+         struct ipa_polymorphic_call_context context (cs->caller->decl,
+                                                      arg, cs->call_stmt,
+                                                      &instance);
+         context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
+         *ipa_get_ith_polymorhic_call_context (args, n) = context;
+         if (!context.useless_p ())
+           useful_context = true;
+       }
+
+      if (POINTER_TYPE_P (TREE_TYPE(arg)))
+       {
+         unsigned HOST_WIDE_INT hwi_bitpos;
+         unsigned align;
+
+         if (get_pointer_alignment_1 (arg, &align, &hwi_bitpos)
+             && align % BITS_PER_UNIT == 0
+             && hwi_bitpos % BITS_PER_UNIT == 0)
+           {
+             jfunc->alignment.known = true;
+             jfunc->alignment.align = align / BITS_PER_UNIT;
+             jfunc->alignment.misalign = hwi_bitpos / BITS_PER_UNIT;
+           }
+         else
+           gcc_assert (!jfunc->alignment.known);
+       }
+      else
+       gcc_assert (!jfunc->alignment.known);
 
       if (is_gimple_ip_invariant (arg))
        ipa_set_jf_constant (jfunc, arg, cs);
@@ -1596,9 +1753,9 @@ ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
          /* Aggregate passed by value, check for pass-through, otherwise we
             will attempt to fill in aggregate contents later in this
             for cycle.  */
-         if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
+         if (parm_preserved_before_stmt_p (fbi, index, call, arg))
            {
-             ipa_set_jf_simple_pass_through (jfunc, index, false, false);
+             ipa_set_jf_simple_pass_through (jfunc, index, false);
              continue;
            }
        }
@@ -1609,69 +1766,69 @@ ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
              int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
              if (index >= 0)
                {
-                 bool agg_p, type_p;
-                 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
-                                                       call, arg);
-                 if (param_type && POINTER_TYPE_P (param_type))
-                   type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
-                                                     call, jfunc);
-                 else
-                   type_p = false;
-                 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
-                   ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
-                                                   type_p);
+                 bool agg_p;
+                 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
+                 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
                }
            }
          else
            {
              gimple stmt = SSA_NAME_DEF_STMT (arg);
              if (is_gimple_assign (stmt))
-               compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
+               compute_complex_assign_jump_func (fbi, info, jfunc,
                                                  call, stmt, arg, param_type);
              else if (gimple_code (stmt) == GIMPLE_PHI)
-               compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
-                                                   call, stmt, param_type);
+               compute_complex_ancestor_jump_func (fbi, info, jfunc,
+                                                   call,
+                                                   as_a <gphi *> (stmt));
            }
        }
-      else
-       compute_known_type_jump_func (arg, jfunc, call,
-                                     param_type
-                                     && POINTER_TYPE_P (param_type)
-                                     ? TREE_TYPE (param_type)
-                                     : NULL);
+
+      /* If ARG is pointer, we can not use its type to determine the type of aggregate
+        passed (because type conversions are ignored in gimple).  Usually we can
+        safely get type from function declaration, but in case of K&R prototypes or
+        variadic functions we can try our luck with type of the pointer passed.
+        TODO: Since we look for actual initialization of the memory object, we may better
+        work out the type based on the memory stores we find.  */
+      if (!param_type)
+       param_type = TREE_TYPE (arg);
 
       if ((jfunc->type != IPA_JF_PASS_THROUGH
              || !ipa_get_jf_pass_through_agg_preserved (jfunc))
          && (jfunc->type != IPA_JF_ANCESTOR
              || !ipa_get_jf_ancestor_agg_preserved (jfunc))
          && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
-             || (POINTER_TYPE_P (TREE_TYPE (arg)))))
-       determine_known_aggregate_parts (call, arg, jfunc);
+             || POINTER_TYPE_P (param_type)))
+       determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
     }
+  if (!useful_context)
+    vec_free (args->polymorphic_call_contexts);
 }
 
 /* Compute jump functions for all edges - both direct and indirect - outgoing
-   from NODE.  Also count the actual arguments in the process.  */
+   from BB.  */
 
 static void
-ipa_compute_jump_functions (struct cgraph_node *node,
-                           struct param_analysis_info *parms_ainfo)
+ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
 {
+  struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
+  int i;
   struct cgraph_edge *cs;
 
-  for (cs = node->callees; cs; cs = cs->next_callee)
+  FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
     {
-      struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
-                                                                 NULL);
-      /* We do not need to bother analyzing calls to unknown
-        functions unless they may become known during lto/whopr.  */
-      if (!callee->definition && !flag_lto)
-       continue;
-      ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
-    }
+      struct cgraph_node *callee = cs->callee;
 
-  for (cs = node->indirect_calls; cs; cs = cs->next_callee)
-    ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
+      if (callee)
+       {
+         callee->ultimate_alias_target ();
+         /* We do not need to bother analyzing calls to unknown functions
+            unless they may become known during lto/whopr.  */
+         if (!callee->definition && !flag_lto)
+           continue;
+       }
+      ipa_compute_jump_functions_for_edge (fbi, cs);
+    }
 }
 
 /* If STMT looks like a statement loading a value from a member pointer formal
@@ -1743,11 +1900,12 @@ ipa_is_ssa_with_stmt_def (tree t)
    indirect call graph edge.  */
 
 static struct cgraph_edge *
-ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
+ipa_note_param_call (struct cgraph_node *node, int param_index,
+                    gcall *stmt)
 {
   struct cgraph_edge *cs;
 
-  cs = cgraph_edge (node, stmt);
+  cs = node->get_edge (stmt);
   cs->indirect_info->param_index = param_index;
   cs->indirect_info->agg_contents = 0;
   cs->indirect_info->member_ptr = 0;
@@ -1814,39 +1972,30 @@ ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
    passed by value or reference.  */
 
 static void
-ipa_analyze_indirect_call_uses (struct cgraph_node *node,
-                               struct ipa_node_params *info,
-                               struct param_analysis_info *parms_ainfo,
-                               gimple call, tree target)
+ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gcall *call,
+                               tree target)
 {
-  gimple def;
-  tree n1, n2;
-  gimple d1, d2;
-  tree rec, rec2, cond;
-  gimple branch;
-  int index;
-  basic_block bb, virt_bb, join;
+  struct ipa_node_params *info = fbi->info;
   HOST_WIDE_INT offset;
   bool by_ref;
 
   if (SSA_NAME_IS_DEFAULT_DEF (target))
     {
       tree var = SSA_NAME_VAR (target);
-      index = ipa_get_param_decl_index (info, var);
+      int index = ipa_get_param_decl_index (info, var);
       if (index >= 0)
-       ipa_note_param_call (node, index, call);
+       ipa_note_param_call (fbi->node, index, call);
       return;
     }
 
-  def = SSA_NAME_DEF_STMT (target);
+  int index;
+  gimple def = SSA_NAME_DEF_STMT (target);
   if (gimple_assign_single_p (def)
-      && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
+      && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
                                   gimple_assign_rhs1 (def), &index, &offset,
                                   NULL, &by_ref))
     {
-      struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
-      if (cs->indirect_info->offset != offset)
-       cs->indirect_info->outer_type = NULL;
+      struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
       cs->indirect_info->offset = offset;
       cs->indirect_info->agg_contents = 1;
       cs->indirect_info->by_ref = by_ref;
@@ -1863,14 +2012,16 @@ ipa_analyze_indirect_call_uses (struct cgraph_node *node,
 
   /* First, we need to check whether one of these is a load from a member
      pointer that is a parameter to this function. */
-  n1 = PHI_ARG_DEF (def, 0);
-  n2 = PHI_ARG_DEF (def, 1);
+  tree n1 = PHI_ARG_DEF (def, 0);
+  tree n2 = PHI_ARG_DEF (def, 1);
   if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
     return;
-  d1 = SSA_NAME_DEF_STMT (n1);
-  d2 = SSA_NAME_DEF_STMT (n2);
+  gimple d1 = SSA_NAME_DEF_STMT (n1);
+  gimple d2 = SSA_NAME_DEF_STMT (n2);
 
-  join = gimple_bb (def);
+  tree rec;
+  basic_block bb, virt_bb;
+  basic_block join = gimple_bb (def);
   if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
     {
       if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
@@ -1898,7 +2049,7 @@ ipa_analyze_indirect_call_uses (struct cgraph_node *node,
   /* Third, let's see that the branching is done depending on the least
      significant bit of the pfn. */
 
-  branch = last_stmt (bb);
+  gimple branch = last_stmt (bb);
   if (!branch || gimple_code (branch) != GIMPLE_COND)
     return;
 
@@ -1907,7 +2058,7 @@ ipa_analyze_indirect_call_uses (struct cgraph_node *node,
       || !integer_zerop (gimple_cond_rhs (branch)))
     return;
 
-  cond = gimple_cond_lhs (branch);
+  tree cond = gimple_cond_lhs (branch);
   if (!ipa_is_ssa_with_stmt_def (cond))
     return;
 
@@ -1932,6 +2083,7 @@ ipa_analyze_indirect_call_uses (struct cgraph_node *node,
       def = SSA_NAME_DEF_STMT (cond);
     }
 
+  tree rec2;
   rec2 = ipa_get_stmt_member_ptr_load_param (def,
                                             (TARGET_PTRMEMFUNC_VBIT_LOCATION
                                              == ptrmemfunc_vbit_in_delta),
@@ -1941,11 +2093,9 @@ ipa_analyze_indirect_call_uses (struct cgraph_node *node,
 
   index = ipa_get_param_decl_index (info, rec);
   if (index >= 0
-      && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
+      && parm_preserved_before_stmt_p (fbi, index, call, rec))
     {
-      struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
-      if (cs->indirect_info->offset != offset)
-       cs->indirect_info->outer_type = NULL;
+      struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
       cs->indirect_info->offset = offset;
       cs->indirect_info->agg_contents = 1;
       cs->indirect_info->member_ptr = 1;
@@ -1956,16 +2106,13 @@ ipa_analyze_indirect_call_uses (struct cgraph_node *node,
 
 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
    object referenced in the expression is a formal parameter of the caller
-   (described by INFO), create a call note for the statement. */
+   FBI->node (described by FBI->info), create a call note for the
+   statement.  */
 
 static void
-ipa_analyze_virtual_call_uses (struct cgraph_node *node,
-                              struct ipa_node_params *info, gimple call,
-                              tree target)
+ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
+                              gcall *call, tree target)
 {
-  struct cgraph_edge *cs;
-  struct cgraph_indirect_call_info *ii;
-  struct ipa_jump_func jfunc;
   tree obj = OBJ_TYPE_REF_OBJECT (target);
   int index;
   HOST_WIDE_INT anc_offset;
@@ -1976,8 +2123,10 @@ ipa_analyze_virtual_call_uses (struct cgraph_node *node,
   if (TREE_CODE (obj) != SSA_NAME)
     return;
 
+  struct ipa_node_params *info = fbi->info;
   if (SSA_NAME_IS_DEFAULT_DEF (obj))
     {
+      struct ipa_jump_func jfunc;
       if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
        return;
 
@@ -1990,6 +2139,7 @@ ipa_analyze_virtual_call_uses (struct cgraph_node *node,
     }
   else
     {
+      struct ipa_jump_func jfunc;
       gimple stmt = SSA_NAME_DEF_STMT (obj);
       tree expr;
 
@@ -2004,8 +2154,8 @@ ipa_analyze_virtual_call_uses (struct cgraph_node *node,
        return;
     }
 
-  cs = ipa_note_param_call (node, index, call);
-  ii = cs->indirect_info;
+  struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
+  struct cgraph_indirect_call_info *ii = cs->indirect_info;
   ii->offset = anc_offset;
   ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
   ii->otr_type = obj_type_ref_class (target);
@@ -2017,32 +2167,56 @@ ipa_analyze_virtual_call_uses (struct cgraph_node *node,
    containing intermediate information about each formal parameter.  */
 
 static void
-ipa_analyze_call_uses (struct cgraph_node *node,
-                      struct ipa_node_params *info,
-                      struct param_analysis_info *parms_ainfo, gimple call)
+ipa_analyze_call_uses (struct func_body_info *fbi, gcall *call)
 {
   tree target = gimple_call_fn (call);
 
-  if (!target)
+  if (!target
+      || (TREE_CODE (target) != SSA_NAME
+          && !virtual_method_call_p (target)))
     return;
+
+  struct cgraph_edge *cs = fbi->node->get_edge (call);
+  /* If we previously turned the call into a direct call, there is
+     no need to analyze.  */
+  if (cs && !cs->indirect_unknown_callee)
+    return;
+
+  if (cs->indirect_info->polymorphic && flag_devirtualize)
+    {
+      tree instance;
+      tree target = gimple_call_fn (call);
+      ipa_polymorphic_call_context context (current_function_decl,
+                                           target, call, &instance);
+
+      gcc_checking_assert (cs->indirect_info->otr_type
+                          == obj_type_ref_class (target));
+      gcc_checking_assert (cs->indirect_info->otr_token
+                          == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
+
+      cs->indirect_info->vptr_changed
+       = !context.get_dynamic_type (instance,
+                                    OBJ_TYPE_REF_OBJECT (target),
+                                    obj_type_ref_class (target), call);
+      cs->indirect_info->context = context;
+    }
+
   if (TREE_CODE (target) == SSA_NAME)
-    ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
+    ipa_analyze_indirect_call_uses (fbi, call, target);
   else if (virtual_method_call_p (target))
-    ipa_analyze_virtual_call_uses (node, info, call, target);
+    ipa_analyze_virtual_call_uses (fbi, call, target);
 }
 
 
 /* Analyze the call statement STMT with respect to formal parameters (described
-   in INFO) of caller given by NODE.  Currently it only checks whether formal
-   parameters are called.  PARMS_AINFO is a pointer to a vector containing
-   intermediate information about each formal parameter.  */
+   in INFO) of caller given by FBI->NODE.  Currently it only checks whether
+   formal parameters are called.  */
 
 static void
-ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
-                      struct param_analysis_info *parms_ainfo, gimple stmt)
+ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
 {
   if (is_gimple_call (stmt))
-    ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
+    ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
 }
 
 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
@@ -2050,8 +2224,7 @@ ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
    passed in DATA.  */
 
 static bool
-visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
-                            tree op, void *data)
+visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
 {
   struct ipa_node_params *info = (struct ipa_node_params *) data;
 
@@ -2067,37 +2240,43 @@ visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
   return false;
 }
 
-/* Scan the function body of NODE and inspect the uses of formal parameters.
-   Store the findings in various structures of the associated ipa_node_params
-   structure, such as parameter flags, notes etc.  PARMS_AINFO is a pointer to a
-   vector containing intermediate information about each formal parameter.   */
+/* Scan the statements in BB and inspect the uses of formal parameters.  Store
+   the findings in various structures of the associated ipa_node_params
+   structure, such as parameter flags, notes etc.  FBI holds various data about
+   the function being analyzed.  */
 
 static void
-ipa_analyze_params_uses (struct cgraph_node *node,
-                        struct param_analysis_info *parms_ainfo)
+ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
 {
-  tree decl = node->decl;
-  basic_block bb;
-  struct function *func;
   gimple_stmt_iterator gsi;
-  struct ipa_node_params *info = IPA_NODE_REF (node);
-  int i;
+  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+    {
+      gimple stmt = gsi_stmt (gsi);
 
-  if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
-    return;
+      if (is_gimple_debug (stmt))
+       continue;
 
-  info->uses_analysis_done = 1;
-  if (ipa_func_spec_opts_forbid_analysis_p (node))
-    {
-      for (i = 0; i < ipa_get_param_count (info); i++)
-       {
-         ipa_set_param_used (info, i, true);
-         ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
-       }
-      return;
+      ipa_analyze_stmt_uses (fbi, stmt);
+      walk_stmt_load_store_addr_ops (stmt, fbi->info,
+                                    visit_ref_for_mod_analysis,
+                                    visit_ref_for_mod_analysis,
+                                    visit_ref_for_mod_analysis);
     }
+  for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+    walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
+                                  visit_ref_for_mod_analysis,
+                                  visit_ref_for_mod_analysis,
+                                  visit_ref_for_mod_analysis);
+}
+
+/* Calculate controlled uses of parameters of NODE.  */
+
+static void
+ipa_analyze_controlled_uses (struct cgraph_node *node)
+{
+  struct ipa_node_params *info = IPA_NODE_REF (node);
 
-  for (i = 0; i < ipa_get_param_count (info); i++)
+  for (int i = 0; i < ipa_get_param_count (info); i++)
     {
       tree parm = ipa_get_param (info, i);
       int controlled_uses = 0;
@@ -2117,8 +2296,11 @@ ipa_analyze_params_uses (struct cgraph_node *node,
              FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
                if (!is_gimple_call (USE_STMT (use_p)))
                  {
-                   controlled_uses = IPA_UNDESCRIBED_USE;
-                   break;
+                   if (!is_gimple_debug (USE_STMT (use_p)))
+                     {
+                       controlled_uses = IPA_UNDESCRIBED_USE;
+                       break;
+                     }
                  }
                else
                  controlled_uses++;
@@ -2130,45 +2312,36 @@ ipa_analyze_params_uses (struct cgraph_node *node,
        controlled_uses = IPA_UNDESCRIBED_USE;
       ipa_set_controlled_uses (info, i, controlled_uses);
     }
+}
 
-  func = DECL_STRUCT_FUNCTION (decl);
-  FOR_EACH_BB_FN (bb, func)
-    {
-      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
-       {
-         gimple stmt = gsi_stmt (gsi);
-
-         if (is_gimple_debug (stmt))
-           continue;
+/* Free stuff in BI.  */
 
-         ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
-         walk_stmt_load_store_addr_ops (stmt, info,
-                                        visit_ref_for_mod_analysis,
-                                        visit_ref_for_mod_analysis,
-                                        visit_ref_for_mod_analysis);
-       }
-      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
-       walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
-                                      visit_ref_for_mod_analysis,
-                                      visit_ref_for_mod_analysis,
-                                      visit_ref_for_mod_analysis);
-    }
+static void
+free_ipa_bb_info (struct ipa_bb_info *bi)
+{
+  bi->cg_edges.release ();
+  bi->param_aa_statuses.release ();
 }
 
-/* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters.  */
+/* Dominator walker driving the analysis.  */
 
-static void
-free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
+class analysis_dom_walker : public dom_walker
 {
-  int i;
+public:
+  analysis_dom_walker (struct func_body_info *fbi)
+    : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
 
-  for (i = 0; i < param_count; i++)
-    {
-      if (parms_ainfo[i].parm_visited_statements)
-       BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
-      if (parms_ainfo[i].pt_visited_statements)
-       BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
-    }
+  virtual void before_dom_children (basic_block);
+
+private:
+  struct func_body_info *m_fbi;
+};
+
+void
+analysis_dom_walker::before_dom_children (basic_block bb)
+{
+  ipa_analyze_params_uses_in_bb (m_fbi, bb);
+  ipa_compute_jump_functions_for_bb (m_fbi, bb);
 }
 
 /* Initialize the array describing properties of of formal parameters
@@ -2178,81 +2351,61 @@ free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
 void
 ipa_analyze_node (struct cgraph_node *node)
 {
+  struct func_body_info fbi;
   struct ipa_node_params *info;
-  struct param_analysis_info *parms_ainfo;
-  int param_count;
 
   ipa_check_create_node_params ();
   ipa_check_create_edge_args ();
   info = IPA_NODE_REF (node);
-  push_cfun (DECL_STRUCT_FUNCTION (node->decl));
-  ipa_initialize_node_params (node);
 
-  param_count = ipa_get_param_count (info);
-  parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
-  memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
-
-  ipa_analyze_params_uses (node, parms_ainfo);
-  ipa_compute_jump_functions (node, parms_ainfo);
-
-  free_parms_ainfo (parms_ainfo, param_count);
-  pop_cfun ();
-}
+  if (info->analysis_done)
+    return;
+  info->analysis_done = 1;
 
-/* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
-   attempt a type-based devirtualization.  If successful, return the
-   target function declaration, otherwise return NULL.  */
+  if (ipa_func_spec_opts_forbid_analysis_p (node))
+    {
+      for (int i = 0; i < ipa_get_param_count (info); i++)
+       {
+         ipa_set_param_used (info, i, true);
+         ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
+       }
+      return;
+    }
 
-tree
-ipa_intraprocedural_devirtualization (gimple call)
-{
-  tree binfo, token, fndecl;
-  struct ipa_jump_func jfunc;
-  tree otr = gimple_call_fn (call);
-
-  jfunc.type = IPA_JF_UNKNOWN;
-  compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
-                               call, obj_type_ref_class (otr));
-  if (jfunc.type != IPA_JF_KNOWN_TYPE)
-    return NULL_TREE;
-  binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
-  if (!binfo)
-    return NULL_TREE;
-  token = OBJ_TYPE_REF_TOKEN (otr);
-  fndecl = gimple_get_virt_method_for_binfo (tree_to_uhwi (token),
-                                            binfo);
-#ifdef ENABLE_CHECKING
-  if (fndecl)
-    gcc_assert (possible_polymorphic_call_target_p
-                 (otr, cgraph_get_node (fndecl)));
-#endif
-  return fndecl;
-}
+  struct function *func = DECL_STRUCT_FUNCTION (node->decl);
+  push_cfun (func);
+  calculate_dominance_info (CDI_DOMINATORS);
+  ipa_initialize_node_params (node);
+  ipa_analyze_controlled_uses (node);
 
-/* Update the jump function DST when the call graph edge corresponding to SRC is
-   is being inlined, knowing that DST is of type ancestor and src of known
-   type.  */
+  fbi.node = node;
+  fbi.info = IPA_NODE_REF (node);
+  fbi.bb_infos = vNULL;
+  fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
+  fbi.param_count = ipa_get_param_count (info);
+  fbi.aa_walked = 0;
 
-static void
-combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
-                                    struct ipa_jump_func *dst)
-{
-  HOST_WIDE_INT combined_offset;
-  tree combined_type;
+  for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
+    {
+      ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
+      bi->cg_edges.safe_push (cs);
+    }
 
-  if (!ipa_get_jf_ancestor_type_preserved (dst))
+  for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
     {
-      dst->type = IPA_JF_UNKNOWN;
-      return;
+      ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
+      bi->cg_edges.safe_push (cs);
     }
 
-  combined_offset = ipa_get_jf_known_type_offset (src)
-    + ipa_get_jf_ancestor_offset (dst);
-  combined_type = ipa_get_jf_ancestor_type (dst);
+  analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
 
-  ipa_set_jf_known_type (dst, combined_offset,
-                        ipa_get_jf_known_type_base_type (src),
-                        combined_type);
+  int i;
+  struct ipa_bb_info *bi;
+  FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
+    free_ipa_bb_info (bi);
+  fbi.bb_infos.release ();
+  free_dominance_info (CDI_DOMINATORS);
+  pop_cfun ();
 }
 
 /* Update the jump functions associated with call graph edge E when the call
@@ -2271,23 +2424,44 @@ update_jump_functions_after_inlining (struct cgraph_edge *cs,
   for (i = 0; i < count; i++)
     {
       struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
+      struct ipa_polymorphic_call_context *dst_ctx
+       = ipa_get_ith_polymorhic_call_context (args, i);
 
       if (dst->type == IPA_JF_ANCESTOR)
        {
          struct ipa_jump_func *src;
          int dst_fid = dst->value.ancestor.formal_id;
+         struct ipa_polymorphic_call_context *src_ctx
+           = ipa_get_ith_polymorhic_call_context (top, dst_fid);
 
          /* Variable number of arguments can cause havoc if we try to access
             one that does not exist in the inlined edge.  So make sure we
             don't.  */
          if (dst_fid >= ipa_get_cs_argument_count (top))
            {
-             dst->type = IPA_JF_UNKNOWN;
+             ipa_set_jf_unknown (dst);
              continue;
            }
 
          src = ipa_get_ith_jump_func (top, dst_fid);
 
+         if (src_ctx && !src_ctx->useless_p ())
+           {
+             struct ipa_polymorphic_call_context ctx = *src_ctx;
+
+             /* TODO: Make type preserved safe WRT contexts.  */
+             if (!ipa_get_jf_ancestor_type_preserved (dst))
+               ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
+             ctx.offset_by (dst->value.ancestor.offset);
+             if (!ctx.useless_p ())
+               {
+                 vec_safe_grow_cleared (args->polymorphic_call_contexts,
+                                        count);
+                 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
+               }
+             dst_ctx->combine_with (ctx);
+           }
+
          if (src->agg.items
              && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
            {
@@ -2304,16 +2478,12 @@ update_jump_functions_after_inlining (struct cgraph_edge *cs,
                item->offset -= dst->value.ancestor.offset;
            }
 
-         if (src->type == IPA_JF_KNOWN_TYPE)
-           combine_known_type_and_ancestor_jfs (src, dst);
-         else if (src->type == IPA_JF_PASS_THROUGH
-                  && src->value.pass_through.operation == NOP_EXPR)
+         if (src->type == IPA_JF_PASS_THROUGH
+             && src->value.pass_through.operation == NOP_EXPR)
            {
              dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
              dst->value.ancestor.agg_preserved &=
                src->value.pass_through.agg_preserved;
-             dst->value.ancestor.type_preserved &=
-               src->value.pass_through.type_preserved;
            }
          else if (src->type == IPA_JF_ANCESTOR)
            {
@@ -2321,11 +2491,9 @@ update_jump_functions_after_inlining (struct cgraph_edge *cs,
              dst->value.ancestor.offset += src->value.ancestor.offset;
              dst->value.ancestor.agg_preserved &=
                src->value.ancestor.agg_preserved;
-             dst->value.ancestor.type_preserved &=
-               src->value.ancestor.type_preserved;
            }
          else
-           dst->type = IPA_JF_UNKNOWN;
+           ipa_set_jf_unknown (dst);
        }
       else if (dst->type == IPA_JF_PASS_THROUGH)
        {
@@ -2339,17 +2507,31 @@ update_jump_functions_after_inlining (struct cgraph_edge *cs,
              int dst_fid = dst->value.pass_through.formal_id;
              src = ipa_get_ith_jump_func (top, dst_fid);
              bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
+             struct ipa_polymorphic_call_context *src_ctx
+               = ipa_get_ith_polymorhic_call_context (top, dst_fid);
+
+             if (src_ctx && !src_ctx->useless_p ())
+               {
+                 struct ipa_polymorphic_call_context ctx = *src_ctx;
 
+                 /* TODO: Make type preserved safe WRT contexts.  */
+                 if (!ipa_get_jf_pass_through_type_preserved (dst))
+                   ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
+                 if (!ctx.useless_p ())
+                   {
+                     if (!dst_ctx)
+                       {
+                         vec_safe_grow_cleared (args->polymorphic_call_contexts,
+                                                count);
+                         dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
+                       }
+                     dst_ctx->combine_with (ctx);
+                   }
+               }
              switch (src->type)
                {
                case IPA_JF_UNKNOWN:
-                 dst->type = IPA_JF_UNKNOWN;
-                 break;
-               case IPA_JF_KNOWN_TYPE:
-                 ipa_set_jf_known_type (dst,
-                                        ipa_get_jf_known_type_offset (src),
-                                        ipa_get_jf_known_type_base_type (src),
-                                        ipa_get_jf_known_type_base_type (src));
+                 ipa_set_jf_unknown (dst);
                  break;
                case IPA_JF_CONST:
                  ipa_set_jf_cst_copy (dst, src);
@@ -2363,13 +2545,10 @@ update_jump_functions_after_inlining (struct cgraph_edge *cs,
 
                    if (operation == NOP_EXPR)
                      {
-                       bool agg_p, type_p;
+                       bool agg_p;
                        agg_p = dst_agg_p
                          && ipa_get_jf_pass_through_agg_preserved (src);
-                       type_p = ipa_get_jf_pass_through_type_preserved (src)
-                         && ipa_get_jf_pass_through_type_preserved (dst);
-                       ipa_set_jf_simple_pass_through (dst, formal_id,
-                                                       agg_p, type_p);
+                       ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
                      }
                    else
                      {
@@ -2381,16 +2560,13 @@ update_jump_functions_after_inlining (struct cgraph_edge *cs,
                  }
                case IPA_JF_ANCESTOR:
                  {
-                   bool agg_p, type_p;
+                   bool agg_p;
                    agg_p = dst_agg_p
                      && ipa_get_jf_ancestor_agg_preserved (src);
-                   type_p = ipa_get_jf_ancestor_type_preserved (src)
-                     && ipa_get_jf_pass_through_type_preserved (dst);
                    ipa_set_ancestor_jf (dst,
                                         ipa_get_jf_ancestor_offset (src),
-                                        ipa_get_jf_ancestor_type (src),
                                         ipa_get_jf_ancestor_formal_id (src),
-                                        agg_p, type_p);
+                                        agg_p);
                    break;
                  }
                default:
@@ -2409,16 +2585,18 @@ update_jump_functions_after_inlining (struct cgraph_edge *cs,
                }
            }
          else
-           dst->type = IPA_JF_UNKNOWN;
+           ipa_set_jf_unknown (dst);
        }
     }
 }
 
-/* If TARGET is an addr_expr of a function declaration, make it the destination
-   of an indirect edge IE and return the edge.  Otherwise, return NULL.  */
+/* If TARGET is an addr_expr of a function declaration, make it the 
+   (SPECULATIVE)destination of an indirect edge IE and return the edge.
+   Otherwise, return NULL.  */
 
 struct cgraph_edge *
-ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
+ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
+                               bool speculative)
 {
   struct cgraph_node *callee;
   struct inline_edge_summary *es = inline_edge_summary (ie);
@@ -2431,23 +2609,48 @@ ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
       target = canonicalize_constructor_val (target, NULL);
       if (!target || TREE_CODE (target) != FUNCTION_DECL)
        {
-         if (ie->indirect_info->member_ptr)
-           /* Member pointer call that goes through a VMT lookup.  */
-           return NULL;
+         /* Member pointer call that goes through a VMT lookup.  */
+         if (ie->indirect_info->member_ptr
+             /* Or if target is not an invariant expression and we do not
+                know if it will evaulate to function at runtime.
+                This can happen when folding through &VAR, where &VAR
+                is IP invariant, but VAR itself is not.
+
+                TODO: Revisit this when GCC 5 is branched.  It seems that
+                member_ptr check is not needed and that we may try to fold
+                the expression and see if VAR is readonly.  */
+             || !is_gimple_ip_invariant (target))
+           {
+             if (dump_enabled_p ())
+               {
+                 location_t loc = gimple_location_safe (ie->call_stmt);
+                 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
+                                  "discovered direct call non-invariant "
+                                  "%s/%i\n",
+                                  ie->caller->name (), ie->caller->order);
+               }
+             return NULL;
+           }
+
+
+          if (dump_enabled_p ())
+           {
+             location_t loc = gimple_location_safe (ie->call_stmt);
+             dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
+                              "discovered direct call to non-function in %s/%i, "
+                              "making it __builtin_unreachable\n",
+                              ie->caller->name (), ie->caller->order);
+           }
 
-         if (dump_file)
-           fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
-                               " in %s/%i, making it unreachable.\n",
-                    ie->caller->name (), ie->caller->order);
          target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
-         callee = cgraph_get_create_node (target);
+         callee = cgraph_node::get_create (target);
          unreachable = true;
        }
       else
-       callee = cgraph_get_node (target);
+       callee = cgraph_node::get (target);
     }
   else
-    callee = cgraph_get_node (target);
+    callee = cgraph_node::get (target);
 
   /* Because may-edges are not explicitely represented and vtable may be external,
      we may create the first reference to the object in the unit.  */
@@ -2464,14 +2667,50 @@ ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
          if (dump_file)
            fprintf (dump_file, "ipa-prop: Discovered call to a known target "
                     "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
-                    xstrdup (ie->caller->name ()),
+                    xstrdup_for_dump (ie->caller->name ()),
                     ie->caller->order,
-                    xstrdup (ie->callee->name ()),
+                    xstrdup_for_dump (ie->callee->name ()),
                     ie->callee->order);
          return NULL;
        }
-      callee = cgraph_get_create_node (target);
+      callee = cgraph_node::get_create (target);
+    }
+
+  /* If the edge is already speculated.  */
+  if (speculative && ie->speculative)
+    {
+      struct cgraph_edge *e2;
+      struct ipa_ref *ref;
+      ie->speculative_call_info (e2, ie, ref);
+      if (e2->callee->ultimate_alias_target ()
+         != callee->ultimate_alias_target ())
+       {
+         if (dump_file)
+           fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
+                    "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
+                    xstrdup_for_dump (ie->caller->name ()),
+                    ie->caller->order,
+                    xstrdup_for_dump (callee->name ()),
+                    callee->order,
+                    xstrdup_for_dump (e2->callee->name ()),
+                    e2->callee->order);
+       }
+      else
+       {
+         if (dump_file)
+           fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
+                    "(%s/%i -> %s/%i) this agree with previous speculation.\n",
+                    xstrdup_for_dump (ie->caller->name ()),
+                    ie->caller->order,
+                    xstrdup_for_dump (callee->name ()),
+                    callee->order);
+       }
+      return NULL;
     }
+
+  if (!dbg_cnt (devirt))
+    return NULL;
+
   ipa_check_create_node_params ();
 
   /* We can not make edges to inline clones.  It is bug that someone removed
@@ -2480,24 +2719,55 @@ ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
 
   if (dump_file && !unreachable)
     {
-      fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
+      fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
               "(%s/%i -> %s/%i), for stmt ",
               ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
-              xstrdup (ie->caller->name ()),
+              speculative ? "speculative" : "known",
+              xstrdup_for_dump (ie->caller->name ()),
               ie->caller->order,
-              xstrdup (callee->name ()),
+              xstrdup_for_dump (callee->name ()),
               callee->order);
       if (ie->call_stmt)
        print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
       else
        fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
      }
-  ie = cgraph_make_edge_direct (ie, callee);
-  es = inline_edge_summary (ie);
-  es->call_stmt_size -= (eni_size_weights.indirect_call_cost
-                        - eni_size_weights.call_cost);
-  es->call_stmt_time -= (eni_time_weights.indirect_call_cost
-                        - eni_time_weights.call_cost);
+  if (dump_enabled_p ())
+    {
+      location_t loc = gimple_location_safe (ie->call_stmt);
+
+      dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
+                      "converting indirect call in %s to direct call to %s\n",
+                      ie->caller->name (), callee->name ());
+    }
+  if (!speculative)
+    {
+      struct cgraph_edge *orig = ie;
+      ie = ie->make_direct (callee);
+      /* If we resolved speculative edge the cost is already up to date
+        for direct call (adjusted by inline_edge_duplication_hook).  */
+      if (ie == orig)
+       {
+         es = inline_edge_summary (ie);
+         es->call_stmt_size -= (eni_size_weights.indirect_call_cost
+                                - eni_size_weights.call_cost);
+         es->call_stmt_time -= (eni_time_weights.indirect_call_cost
+                                - eni_time_weights.call_cost);
+       }
+    }
+  else
+    {
+      if (!callee->can_be_discarded_p ())
+       {
+         cgraph_node *alias;
+         alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
+         if (alias)
+           callee = alias;
+       }
+      /* make_speculative will update ie's cost to direct call cost. */
+      ie = ie->make_speculative
+            (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
+    }
 
   return ie;
 }
@@ -2540,16 +2810,16 @@ remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
   origin = rdesc->cs;
   if (!origin)
     return false;
-  to_del = ipa_find_reference (origin->caller, symbol,
-                              origin->call_stmt, origin->lto_stmt_uid);
+  to_del = origin->caller->find_reference (symbol, origin->call_stmt,
+                                          origin->lto_stmt_uid);
   if (!to_del)
     return false;
 
-  ipa_remove_reference (to_del);
+  to_del->remove_reference ();
   if (dump_file)
     fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
-            xstrdup (origin->caller->name ()),
-            origin->caller->order, xstrdup (symbol->name ()));
+            xstrdup_for_dump (origin->caller->name ()),
+            origin->caller->order, xstrdup_for_dump (symbol->name ()));
   return true;
 }
 
@@ -2580,7 +2850,7 @@ cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
       || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
     return NULL;
 
-  return cgraph_get_node (TREE_OPERAND (cst, 0));
+  return cgraph_node::get (TREE_OPERAND (cst, 0));
 }
 
 
@@ -2646,47 +2916,126 @@ try_make_edge_direct_simple_call (struct cgraph_edge *ie,
   return cs;
 }
 
+/* Return the target to be used in cases of impossible devirtualization.  IE
+   and target (the latter can be NULL) are dumped when dumping is enabled.  */
+
+tree
+ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
+{
+  if (dump_file)
+    {
+      if (target)
+       fprintf (dump_file,
+                "Type inconsistent devirtualization: %s/%i->%s\n",
+                ie->caller->name (), ie->caller->order,
+                IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
+      else
+       fprintf (dump_file,
+                "No devirtualization target in %s/%i\n",
+                ie->caller->name (), ie->caller->order);
+    }
+  tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
+  cgraph_node::get_create (new_target);
+  return new_target;
+}
+
 /* Try to find a destination for indirect edge IE that corresponds to a virtual
    call based on a formal parameter which is described by jump function JFUNC
    and if it can be determined, make it direct and return the direct edge.
-   Otherwise, return NULL.  NEW_ROOT_INFO is the node info that JFUNC lattices
-   are relative to.  */
+   Otherwise, return NULL.  CTX describes the polymorphic context that the
+   parameter the call is based on brings along with it.  */
 
 static struct cgraph_edge *
 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
                                   struct ipa_jump_func *jfunc,
-                                  struct ipa_node_params *new_root_info)
+                                  struct ipa_polymorphic_call_context ctx)
 {
-  tree binfo, target;
+  tree target = NULL;
+  bool speculative = false;
 
-  binfo = ipa_value_from_jfunc (new_root_info, jfunc);
-
-  if (!binfo)
+  if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
     return NULL;
 
-  if (TREE_CODE (binfo) != TREE_BINFO)
+  gcc_assert (!ie->indirect_info->by_ref);
+
+  /* Try to do lookup via known virtual table pointer value.  */
+  if (!ie->indirect_info->vptr_changed
+      || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
     {
-      binfo = gimple_extract_devirt_binfo_from_cst
-                (binfo, ie->indirect_info->otr_type);
-      if (!binfo)
-        return NULL;
+      tree vtable;
+      unsigned HOST_WIDE_INT offset;
+      tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
+                                          ie->indirect_info->offset,
+                                          true);
+      if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
+       {
+         t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
+                                                     vtable, offset);
+         if (t)
+           {
+             if ((TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
+                  && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
+                 || !possible_polymorphic_call_target_p
+                      (ie, cgraph_node::get (t)))
+               {
+                 /* Do not speculate builtin_unreachable, it is stupid!  */
+                 if (!ie->indirect_info->vptr_changed)
+                   target = ipa_impossible_devirt_target (ie, target);
+               }
+             else
+               {
+                 target = t;
+                 speculative = ie->indirect_info->vptr_changed;
+               }
+           }
+       }
     }
 
-  binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
-                              ie->indirect_info->otr_type);
-  if (binfo)
-    target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
-                                              binfo);
-  else
-    return NULL;
+  ipa_polymorphic_call_context ie_context (ie);
+  vec <cgraph_node *>targets;
+  bool final;
+
+  ctx.offset_by (ie->indirect_info->offset);
+  if (ie->indirect_info->vptr_changed)
+    ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
+                                     ie->indirect_info->otr_type);
+  ctx.combine_with (ie_context, ie->indirect_info->otr_type);
+  targets = possible_polymorphic_call_targets
+    (ie->indirect_info->otr_type,
+     ie->indirect_info->otr_token,
+     ctx, &final);
+  if (final && targets.length () <= 1)
+    {
+      speculative = false;
+      if (targets.length () == 1)
+       target = targets[0]->decl;
+      else
+       target = ipa_impossible_devirt_target (ie, NULL_TREE);
+    }
+  else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
+          && !ie->speculative && ie->maybe_hot_p ())
+    {
+      cgraph_node *n;
+      n = try_speculative_devirtualization (ie->indirect_info->otr_type,
+                                           ie->indirect_info->otr_token,
+                                           ie->indirect_info->context);
+      if (n)
+       {
+         target = n->decl;
+         speculative = true;
+       }
+    }
 
   if (target)
     {
-#ifdef ENABLE_CHECKING
-      gcc_assert (possible_polymorphic_call_target_p
-        (ie, cgraph_get_node (target)));
-#endif
-      return ipa_make_edge_direct_to_target (ie, target);
+      if (!possible_polymorphic_call_target_p
+         (ie, cgraph_node::get_create (target)))
+       {
+         if (speculative)
+           return NULL;
+         target = ipa_impossible_devirt_target (ie, target);
+       }
+      return ipa_make_edge_direct_to_target (ie, target, speculative);
     }
   else
     return NULL;
@@ -2701,7 +3050,7 @@ try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
 static bool
 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
                                      struct cgraph_node *node,
-                                     vec<cgraph_edge_p> *new_edges)
+                                     vec<cgraph_edge *> *new_edges)
 {
   struct ipa_edge_args *top;
   struct cgraph_edge *ie, *next_ie, *new_direct_edge;
@@ -2719,6 +3068,7 @@ update_indirect_edges_after_inlining (struct cgraph_edge *cs,
       struct cgraph_indirect_call_info *ici = ie->indirect_info;
       struct ipa_jump_func *jfunc;
       int param_index;
+      cgraph_node *spec_target = NULL;
 
       next_ie = ie->next_callee;
 
@@ -2735,20 +3085,34 @@ update_indirect_edges_after_inlining (struct cgraph_edge *cs,
       param_index = ici->param_index;
       jfunc = ipa_get_ith_jump_func (top, param_index);
 
-      if (!flag_indirect_inlining)
+      if (ie->speculative)
+       {
+         struct cgraph_edge *de;
+          struct ipa_ref *ref;
+         ie->speculative_call_info (de, ie, ref);
+         spec_target = de->callee;
+       }
+
+      if (!opt_for_fn (node->decl, flag_indirect_inlining))
        new_direct_edge = NULL;
       else if (ici->polymorphic)
-       new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
-                                                            new_root_info);
+       {
+          ipa_polymorphic_call_context ctx;
+         ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
+         new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
+       }
       else
        new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
                                                            new_root_info);
       /* If speculation was removed, then we need to do nothing.  */
-      if (new_direct_edge && new_direct_edge != ie)
+      if (new_direct_edge && new_direct_edge != ie
+         && new_direct_edge->callee == spec_target)
        {
          new_direct_edge->indirect_inlining_edge = 1;
          top = IPA_EDGE_REF (cs);
          res = true;
+         if (!new_direct_edge->speculative)
+           continue;
        }
       else if (new_direct_edge)
        {
@@ -2764,27 +3128,39 @@ update_indirect_edges_after_inlining (struct cgraph_edge *cs,
              res = true;
            }
          top = IPA_EDGE_REF (cs);
+         /* If speculative edge was introduced we still need to update
+            call info of the indirect edge.  */
+         if (!new_direct_edge->speculative)
+           continue;
        }
-      else if (jfunc->type == IPA_JF_PASS_THROUGH
-              && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
+      if (jfunc->type == IPA_JF_PASS_THROUGH
+          && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
        {
          if (ici->agg_contents
-             && !ipa_get_jf_pass_through_agg_preserved (jfunc))
+             && !ipa_get_jf_pass_through_agg_preserved (jfunc)
+             && !ici->polymorphic)
            ici->param_index = -1;
          else
-           ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
+           {
+             ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
+             if (ici->polymorphic
+                 && !ipa_get_jf_pass_through_type_preserved (jfunc))
+               ici->vptr_changed = true;
+           }
        }
       else if (jfunc->type == IPA_JF_ANCESTOR)
        {
          if (ici->agg_contents
-             && !ipa_get_jf_ancestor_agg_preserved (jfunc))
+             && !ipa_get_jf_ancestor_agg_preserved (jfunc)
+             && !ici->polymorphic)
            ici->param_index = -1;
          else
            {
              ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
-             if (ipa_get_jf_ancestor_offset (jfunc))
-               ici->outer_type = NULL;
              ici->offset += ipa_get_jf_ancestor_offset (jfunc);
+             if (ici->polymorphic
+                 && !ipa_get_jf_ancestor_type_preserved (jfunc))
+               ici->vptr_changed = true;
            }
        }
       else
@@ -2806,7 +3182,7 @@ update_indirect_edges_after_inlining (struct cgraph_edge *cs,
 static bool
 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
                                   struct cgraph_node *node,
-                                  vec<cgraph_edge_p> *new_edges)
+                                  vec<cgraph_edge *> *new_edges)
 {
   struct cgraph_edge *e;
   bool res;
@@ -2870,21 +3246,20 @@ propagate_controlled_uses (struct cgraph_edge *cs)
            {
              struct cgraph_node *n;
              struct ipa_ref *ref;
-             tree t = new_root_info->known_vals[src_idx];
+             tree t = new_root_info->known_csts[src_idx];
 
              if (t && TREE_CODE (t) == ADDR_EXPR
                  && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
-                 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
-                 && (ref = ipa_find_reference (new_root,
-                                               n, NULL, 0)))
+                 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
+                 && (ref = new_root->find_reference (n, NULL, 0)))
                {
                  if (dump_file)
                    fprintf (dump_file, "ipa-prop: Removing cloning-created "
                             "reference from %s/%i to %s/%i.\n",
-                            xstrdup (new_root->name ()),
+                            xstrdup_for_dump (new_root->name ()),
                             new_root->order,
-                            xstrdup (n->name ()), n->order);
-                 ipa_remove_reference (ref);
+                            xstrdup_for_dump (n->name ()), n->order);
+                 ref->remove_reference ();
                }
            }
        }
@@ -2901,7 +3276,7 @@ propagate_controlled_uses (struct cgraph_edge *cs)
              gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
                                   && TREE_CODE (TREE_OPERAND (cst, 0))
                                   == FUNCTION_DECL);
-             n = cgraph_get_node (TREE_OPERAND (cst, 0));
+             n = cgraph_node::get (TREE_OPERAND (cst, 0));
              if (n)
                {
                  struct cgraph_node *clone;
@@ -2915,19 +3290,18 @@ propagate_controlled_uses (struct cgraph_edge *cs)
                         && IPA_NODE_REF (clone)->ipcp_orig_node)
                    {
                      struct ipa_ref *ref;
-                     ref = ipa_find_reference (clone,
-                                               n, NULL, 0);
+                     ref = clone->find_reference (n, NULL, 0);
                      if (ref)
                        {
                          if (dump_file)
                            fprintf (dump_file, "ipa-prop: Removing "
                                     "cloning-created reference "
                                     "from %s/%i to %s/%i.\n",
-                                    xstrdup (clone->name ()),
+                                    xstrdup_for_dump (clone->name ()),
                                     clone->order,
-                                    xstrdup (n->name ()),
+                                    xstrdup_for_dump (n->name ()),
                                     n->order);
-                         ipa_remove_reference (ref);
+                         ref->remove_reference ();
                        }
                      clone = clone->callers->caller;
                    }
@@ -2963,12 +3337,12 @@ propagate_controlled_uses (struct cgraph_edge *cs)
 
 bool
 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
-                                  vec<cgraph_edge_p> *new_edges)
+                                  vec<cgraph_edge *> *new_edges)
 {
   bool changed;
   /* Do nothing if the preparation phase has not been carried out yet
      (i.e. during early inlining).  */
-  if (!ipa_node_params_vector.exists ())
+  if (!ipa_node_params_sum)
     return false;
   gcc_assert (ipa_edge_args_vector);
 
@@ -3008,15 +3382,21 @@ ipa_free_all_edge_args (void)
 /* Frees all dynamically allocated structures that the param info points
    to.  */
 
-void
-ipa_free_node_params_substructures (struct ipa_node_params *info)
+ipa_node_params::~ipa_node_params ()
 {
-  info->descriptors.release ();
-  free (info->lattices);
+  descriptors.release ();
+  free (lattices);
   /* Lattice values and their sources are deallocated with their alocation
      pool.  */
-  info->known_vals.release ();
-  memset (info, 0, sizeof (*info));
+  known_contexts.release ();
+
+  lattices = NULL;
+  ipcp_orig_node = NULL;
+  analysis_done = 0;
+  node_enqueued = 0;
+  do_clone_for_all_contexts = 0;
+  is_all_contexts_clone = 0;
+  node_dead = 0;
 }
 
 /* Free all ipa_node_params structures.  */
@@ -3024,13 +3404,18 @@ ipa_free_node_params_substructures (struct ipa_node_params *info)
 void
 ipa_free_all_node_params (void)
 {
-  int i;
-  struct ipa_node_params *info;
+  delete ipa_node_params_sum;
+  ipa_node_params_sum = NULL;
+}
 
-  FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
-    ipa_free_node_params_substructures (info);
+/* Grow ipcp_transformations if necessary.  */
 
-  ipa_node_params_vector.release ();
+void
+ipcp_grow_transformations_if_necessary (void)
+{
+  if (vec_safe_length (ipcp_transformations)
+      <= (unsigned) symtab->cgraph_max_uid)
+    vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
 }
 
 /* Set the aggregate replacements of NODE to be AGGVALS.  */
@@ -3039,10 +3424,8 @@ void
 ipa_set_node_agg_value_chain (struct cgraph_node *node,
                              struct ipa_agg_replacement_value *aggvals)
 {
-  if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
-    vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
-
-  (*ipa_node_agg_replacements)[node->uid] = aggvals;
+  ipcp_grow_transformations_if_necessary ();
+  (*ipcp_transformations)[node->uid].agg_values = aggvals;
 }
 
 /* Hook that is called by cgraph.c when an edge is removed.  */
@@ -3075,23 +3458,11 @@ ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
   ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
 }
 
-/* Hook that is called by cgraph.c when a node is removed.  */
-
-static void
-ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
-{
-  /* During IPA-CP updating we can be called on not-yet analyze clones.  */
-  if (ipa_node_params_vector.length () > (unsigned)node->uid)
-    ipa_free_node_params_substructures (IPA_NODE_REF (node));
-  if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
-    (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
-}
-
 /* Hook that is called by cgraph.c when an edge is duplicated.  */
 
 static void
 ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
-                          __attribute__((unused)) void *data)
+                          void *)
 {
   struct ipa_edge_args *old_args, *new_args;
   unsigned int i;
@@ -3102,6 +3473,9 @@ ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
   new_args = IPA_EDGE_REF (dst);
 
   new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
+  if (old_args->polymorphic_call_contexts)
+    new_args->polymorphic_call_contexts
+      = vec_safe_copy (old_args->polymorphic_call_contexts);
 
   for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
     {
@@ -3121,14 +3495,12 @@ ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
              struct ipa_ref *ref;
              symtab_node *n = cgraph_node_for_jfunc (src_jf);
              gcc_checking_assert (n);
-             ref = ipa_find_reference (src->caller, n,
-                                       src->call_stmt, src->lto_stmt_uid);
+             ref = src->caller->find_reference (n, src->call_stmt,
+                                                src->lto_stmt_uid);
              gcc_checking_assert (ref);
-             ipa_clone_ref (ref, dst->caller, ref->stmt);
+             dst->caller->clone_reference (ref, ref->stmt);
 
-             gcc_checking_assert (ipa_refdesc_pool);
-             struct ipa_cst_ref_desc *dst_rdesc
-               = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
+             struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
              dst_rdesc->cs = dst;
              dst_rdesc->refcount = src_rdesc->refcount;
              dst_rdesc->next_duplicate = NULL;
@@ -3136,10 +3508,7 @@ ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
            }
          else if (src_rdesc->cs == src)
            {
-             struct ipa_cst_ref_desc *dst_rdesc;
-             gcc_checking_assert (ipa_refdesc_pool);
-             dst_rdesc
-               = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
+             struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
              dst_rdesc->cs = dst;
              dst_rdesc->refcount = src_rdesc->refcount;
              dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
@@ -3170,54 +3539,79 @@ ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
              dst_jf->value.constant.rdesc = dst_rdesc;
            }
        }
+      else if (dst_jf->type == IPA_JF_PASS_THROUGH
+              && src->caller == dst->caller)
+       {
+         struct cgraph_node *inline_root = dst->caller->global.inlined_to
+           ? dst->caller->global.inlined_to : dst->caller;
+         struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
+         int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
+
+         int c = ipa_get_controlled_uses (root_info, idx);
+         if (c != IPA_UNDESCRIBED_USE)
+           {
+             c++;
+             ipa_set_controlled_uses (root_info, idx, c);
+           }
+       }
     }
 }
 
-/* Hook that is called by cgraph.c when a node is duplicated.  */
+/* Analyze newly added function into callgraph.  */
 
 static void
-ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
-                          ATTRIBUTE_UNUSED void *data)
+ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
 {
-  struct ipa_node_params *old_info, *new_info;
-  struct ipa_agg_replacement_value *old_av, *new_av;
+  if (node->has_gimple_body_p ())
+    ipa_analyze_node (node);
+}
 
-  ipa_check_create_node_params ();
-  old_info = IPA_NODE_REF (src);
-  new_info = IPA_NODE_REF (dst);
+/* Hook that is called by summary when a node is duplicated.  */
+
+void
+ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
+                            ipa_node_params *old_info,
+                            ipa_node_params *new_info)
+{
+  ipa_agg_replacement_value *old_av, *new_av;
 
   new_info->descriptors = old_info->descriptors.copy ();
   new_info->lattices = NULL;
   new_info->ipcp_orig_node = old_info->ipcp_orig_node;
 
-  new_info->uses_analysis_done = old_info->uses_analysis_done;
+  new_info->analysis_done = old_info->analysis_done;
   new_info->node_enqueued = old_info->node_enqueued;
 
   old_av = ipa_get_agg_replacements_for_node (src);
-  if (!old_av)
-    return;
-
-  new_av = NULL;
-  while (old_av)
+  if (old_av)
     {
-      struct ipa_agg_replacement_value *v;
+      new_av = NULL;
+      while (old_av)
+       {
+         struct ipa_agg_replacement_value *v;
 
-      v = ggc_alloc_ipa_agg_replacement_value ();
-      memcpy (v, old_av, sizeof (*v));
-      v->next = new_av;
-      new_av = v;
-      old_av = old_av->next;
+         v = ggc_alloc<ipa_agg_replacement_value> ();
+         memcpy (v, old_av, sizeof (*v));
+         v->next = new_av;
+         new_av = v;
+         old_av = old_av->next;
+       }
+      ipa_set_node_agg_value_chain (dst, new_av);
     }
-  ipa_set_node_agg_value_chain (dst, new_av);
-}
-
 
-/* Analyze newly added function into callgraph.  */
+  ipcp_transformation_summary *src_trans = ipcp_get_transformation_summary (src);
 
-static void
-ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
-{
-  ipa_analyze_node (node);
+  if (src_trans && vec_safe_length (src_trans->alignments) > 0)
+    {
+      ipcp_grow_transformations_if_necessary ();
+      src_trans = ipcp_get_transformation_summary (src);
+      const vec<ipa_alignment, va_gc> *src_alignments = src_trans->alignments;
+      vec<ipa_alignment, va_gc> *&dst_alignments
+       = ipcp_get_transformation_summary (dst)->alignments;
+      vec_safe_reserve_exact (dst_alignments, src_alignments->length ());
+      for (unsigned i = 0; i < src_alignments->length (); ++i)
+       dst_alignments->quick_push ((*src_alignments)[i]);
+    }
 }
 
 /* Register our cgraph hooks if they are not already there.  */
@@ -3225,20 +3619,16 @@ ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
 void
 ipa_register_cgraph_hooks (void)
 {
+  ipa_check_create_node_params ();
+
   if (!edge_removal_hook_holder)
     edge_removal_hook_holder =
-      cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
-  if (!node_removal_hook_holder)
-    node_removal_hook_holder =
-      cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
+      symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
   if (!edge_duplication_hook_holder)
     edge_duplication_hook_holder =
-      cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
-  if (!node_duplication_hook_holder)
-    node_duplication_hook_holder =
-      cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
+      symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
   function_insertion_hook_holder =
-      cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
+      symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
 }
 
 /* Unregister our cgraph hooks if they are not already there.  */
@@ -3246,15 +3636,11 @@ ipa_register_cgraph_hooks (void)
 static void
 ipa_unregister_cgraph_hooks (void)
 {
-  cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
+  symtab->remove_edge_removal_hook (edge_removal_hook_holder);
   edge_removal_hook_holder = NULL;
-  cgraph_remove_node_removal_hook (node_removal_hook_holder);
-  node_removal_hook_holder = NULL;
-  cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
+  symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
   edge_duplication_hook_holder = NULL;
-  cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
-  node_duplication_hook_holder = NULL;
-  cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
+  symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
   function_insertion_hook_holder = NULL;
 }
 
@@ -3264,16 +3650,16 @@ ipa_unregister_cgraph_hooks (void)
 void
 ipa_free_all_structures_after_ipa_cp (void)
 {
-  if (!optimize)
+  if (!optimize && !in_lto_p)
     {
       ipa_free_all_edge_args ();
       ipa_free_all_node_params ();
-      free_alloc_pool (ipcp_sources_pool);
-      free_alloc_pool (ipcp_values_pool);
-      free_alloc_pool (ipcp_agg_lattice_pool);
+      ipcp_sources_pool.release ();
+      ipcp_cst_values_pool.release ();
+      ipcp_poly_ctx_values_pool.release ();
+      ipcp_agg_lattice_pool.release ();
       ipa_unregister_cgraph_hooks ();
-      if (ipa_refdesc_pool)
-       free_alloc_pool (ipa_refdesc_pool);
+      ipa_refdesc_pool.release ();
     }
 }
 
@@ -3286,14 +3672,11 @@ ipa_free_all_structures_after_iinln (void)
   ipa_free_all_edge_args ();
   ipa_free_all_node_params ();
   ipa_unregister_cgraph_hooks ();
-  if (ipcp_sources_pool)
-    free_alloc_pool (ipcp_sources_pool);
-  if (ipcp_values_pool)
-    free_alloc_pool (ipcp_values_pool);
-  if (ipcp_agg_lattice_pool)
-    free_alloc_pool (ipcp_agg_lattice_pool);
-  if (ipa_refdesc_pool)
-    free_alloc_pool (ipa_refdesc_pool);
+  ipcp_sources_pool.release ();
+  ipcp_cst_values_pool.release ();
+  ipcp_poly_ctx_values_pool.release ();
+  ipcp_agg_lattice_pool.release ();
+  ipa_refdesc_pool.release ();
 }
 
 /* Print ipa_tree_map data structures of all functions in the
@@ -3315,6 +3698,7 @@ ipa_print_node_params (FILE *f, struct cgraph_node *node)
     {
       int c;
 
+      fprintf (f, "    ");
       ipa_dump_param (f, info, i);
       if (ipa_is_param_used (info, i))
        fprintf (f, " used");
@@ -3443,7 +3827,15 @@ ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
          if (adj->by_ref)
            ptype = build_pointer_type (adj->type);
          else
-           ptype = adj->type;
+           {
+             ptype = adj->type;
+             if (is_gimple_reg_type (ptype))
+               {
+                 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
+                 if (TYPE_ALIGN (ptype) < malign)
+                   ptype = build_aligned_type (ptype, malign);
+               }
+           }
 
          if (care_for_types)
            new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
@@ -3514,23 +3906,9 @@ ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
       DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
     }
 
-  /* This is a new type, not a copy of an old type.  Need to reassociate
-     variants.  We can handle everything except the main variant lazily.  */
-  tree t = TYPE_MAIN_VARIANT (orig_type);
-  if (orig_type != t)
-    {
-      TYPE_MAIN_VARIANT (new_type) = t;
-      TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
-      TYPE_NEXT_VARIANT (t) = new_type;
-    }
-  else
-    {
-      TYPE_MAIN_VARIANT (new_type) = new_type;
-      TYPE_NEXT_VARIANT (new_type) = NULL;
-    }
-
   TREE_TYPE (fndecl) = new_type;
   DECL_VIRTUAL_P (fndecl) = 0;
+  DECL_LANG_SPECIFIC (fndecl) = NULL;
   otypes.release ();
   oparms.release ();
 }
@@ -3540,13 +3918,13 @@ ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
    contain the corresponding call graph edge.  */
 
 void
-ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
+ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
                           ipa_parm_adjustment_vec adjustments)
 {
-  struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
+  struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
   vec<tree> vargs;
   vec<tree, va_gc> **debug_args = NULL;
-  gimple new_stmt;
+  gcall *new_stmt;
   gimple_stmt_iterator gsi, prev_gsi;
   tree callee_decl;
   int i, len;
@@ -3554,7 +3932,7 @@ ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
   len = adjustments.length ();
   vargs.create (len);
   callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
-  ipa_remove_stmt_references (current_node, stmt);
+  current_node->remove_stmt_references (stmt);
 
   gsi = gsi_for_stmt (stmt);
   prev_gsi = gsi;
@@ -3668,26 +4046,39 @@ ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
                  if (TYPE_ALIGN (type) > align)
                    align = TYPE_ALIGN (type);
                }
-             misalign += (tree_to_double_int (off)
-                          .sext (TYPE_PRECISION (TREE_TYPE (off))).low
+             misalign += (offset_int::from (off, SIGNED).to_short_addr ()
                           * BITS_PER_UNIT);
              misalign = misalign & (align - 1);
              if (misalign != 0)
                align = (misalign & -misalign);
              if (align < TYPE_ALIGN (type))
                type = build_aligned_type (type, align);
+             base = force_gimple_operand_gsi (&gsi, base,
+                                              true, NULL, true, GSI_SAME_STMT);
              expr = fold_build2_loc (loc, MEM_REF, type, base, off);
+             /* If expr is not a valid gimple call argument emit
+                a load into a temporary.  */
+             if (is_gimple_reg_type (TREE_TYPE (expr)))
+               {
+                 gimple tem = gimple_build_assign (NULL_TREE, expr);
+                 if (gimple_in_ssa_p (cfun))
+                   {
+                     gimple_set_vuse (tem, gimple_vuse (stmt));
+                     expr = make_ssa_name (TREE_TYPE (expr), tem);
+                   }
+                 else
+                   expr = create_tmp_reg (TREE_TYPE (expr));
+                 gimple_assign_set_lhs (tem, expr);
+                 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
+               }
            }
          else
            {
              expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
              expr = build_fold_addr_expr (expr);
+             expr = force_gimple_operand_gsi (&gsi, expr,
+                                              true, NULL, true, GSI_SAME_STMT);
            }
-
-         expr = force_gimple_operand_gsi (&gsi, expr,
-                                          adj->by_ref
-                                          || is_gimple_reg_type (adj->type),
-                                          NULL, true, GSI_SAME_STMT);
          vargs.quick_push (expr);
        }
       if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
@@ -3743,6 +4134,15 @@ ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
     gimple_set_location (new_stmt, gimple_location (stmt));
   gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
   gimple_call_copy_flags (new_stmt, stmt);
+  if (gimple_in_ssa_p (cfun))
+    {
+      gimple_set_vuse (new_stmt, gimple_vuse (stmt));
+      if (gimple_vdef (stmt))
+       {
+         gimple_set_vdef (new_stmt, gimple_vdef (stmt));
+         SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
+       }
+    }
 
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
@@ -3752,17 +4152,13 @@ ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
     }
   gsi_replace (&gsi, new_stmt, true);
   if (cs)
-    cgraph_set_call_stmt (cs, new_stmt);
+    cs->set_call_stmt (new_stmt);
   do
     {
-      ipa_record_stmt_references (current_node, gsi_stmt (gsi));
+      current_node->record_stmt_references (gsi_stmt (gsi));
       gsi_prev (&gsi);
     }
-  while ((gsi_end_p (prev_gsi) && !gsi_end_p (gsi))
-        || (!gsi_end_p (prev_gsi) && gsi_stmt (gsi) == gsi_stmt (prev_gsi)));
-
-  update_ssa (TODO_update_ssa);
-  free_dominance_info (CDI_DOMINATORS);
+  while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
 }
 
 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
@@ -3856,7 +4252,7 @@ ipa_get_adjustment_candidate (tree **expr, bool *convert,
 
   if (TREE_CODE (base) == MEM_REF)
     {
-      offset += mem_ref_offset (base).low * BITS_PER_UNIT;
+      offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
       base = TREE_OPERAND (base, 0);
     }
 
@@ -4076,11 +4472,6 @@ ipa_write_jump_function (struct output_block *ob,
     {
     case IPA_JF_UNKNOWN:
       break;
-    case IPA_JF_KNOWN_TYPE:
-      streamer_write_uhwi (ob, jump_func->value.known_type.offset);
-      stream_write_tree (ob, jump_func->value.known_type.base_type, true);
-      stream_write_tree (ob, jump_func->value.known_type.component_type, true);
-      break;
     case IPA_JF_CONST:
       gcc_assert (
          EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
@@ -4093,7 +4484,6 @@ ipa_write_jump_function (struct output_block *ob,
          streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
          bp = bitpack_create (ob->main_stream);
          bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
-         bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
          streamer_write_bitpack (&bp);
        }
       else
@@ -4104,11 +4494,9 @@ ipa_write_jump_function (struct output_block *ob,
       break;
     case IPA_JF_ANCESTOR:
       streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
-      stream_write_tree (ob, jump_func->value.ancestor.type, true);
       streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
       bp = bitpack_create (ob->main_stream);
       bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
-      bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
       streamer_write_bitpack (&bp);
       break;
     }
@@ -4127,6 +4515,15 @@ ipa_write_jump_function (struct output_block *ob,
       streamer_write_uhwi (ob, item->offset);
       stream_write_tree (ob, item->value, true);
     }
+
+  bp = bitpack_create (ob->main_stream);
+  bp_pack_value (&bp, jump_func->alignment.known, 1);
+  streamer_write_bitpack (&bp);
+  if (jump_func->alignment.known)
+    {
+      streamer_write_uhwi (ob, jump_func->alignment.align);
+      streamer_write_uhwi (ob, jump_func->alignment.misalign);
+    }
 }
 
 /* Read in jump function JUMP_FUNC from IB.  */
@@ -4145,17 +4542,8 @@ ipa_read_jump_function (struct lto_input_block *ib,
   switch (jftype)
     {
     case IPA_JF_UNKNOWN:
-      jump_func->type = IPA_JF_UNKNOWN;
+      ipa_set_jf_unknown (jump_func);
       break;
-    case IPA_JF_KNOWN_TYPE:
-      {
-       HOST_WIDE_INT offset = streamer_read_uhwi (ib);
-       tree base_type = stream_read_tree (ib, data_in);
-       tree component_type = stream_read_tree (ib, data_in);
-
-       ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
-       break;
-      }
     case IPA_JF_CONST:
       ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
       break;
@@ -4166,9 +4554,7 @@ ipa_read_jump_function (struct lto_input_block *ib,
          int formal_id =  streamer_read_uhwi (ib);
          struct bitpack_d bp = streamer_read_bitpack (ib);
          bool agg_preserved = bp_unpack_value (&bp, 1);
-         bool type_preserved = bp_unpack_value (&bp, 1);
-         ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
-                                         type_preserved);
+         ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
        }
       else
        {
@@ -4181,14 +4567,10 @@ ipa_read_jump_function (struct lto_input_block *ib,
     case IPA_JF_ANCESTOR:
       {
        HOST_WIDE_INT offset = streamer_read_uhwi (ib);
-       tree type = stream_read_tree (ib, data_in);
        int formal_id = streamer_read_uhwi (ib);
        struct bitpack_d bp = streamer_read_bitpack (ib);
        bool agg_preserved = bp_unpack_value (&bp, 1);
-       bool type_preserved = bp_unpack_value (&bp, 1);
-
-       ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
-                            type_preserved);
+       ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
        break;
       }
     }
@@ -4207,6 +4589,17 @@ ipa_read_jump_function (struct lto_input_block *ib,
       item.value = stream_read_tree (ib, data_in);
       jump_func->agg.items->quick_push (item);
     }
+
+  struct bitpack_d bp = streamer_read_bitpack (ib);
+  bool alignment_known = bp_unpack_value (&bp, 1);
+  if (alignment_known)
+    {
+      jump_func->alignment.known = true;
+      jump_func->alignment.align = streamer_read_uhwi (ib);
+      jump_func->alignment.misalign = streamer_read_uhwi (ib);
+    }
+  else
+    jump_func->alignment.known = false;
 }
 
 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
@@ -4220,21 +4613,23 @@ ipa_write_indirect_edge_info (struct output_block *ob,
   struct bitpack_d bp;
 
   streamer_write_hwi (ob, ii->param_index);
-  streamer_write_hwi (ob, ii->offset);
   bp = bitpack_create (ob->main_stream);
   bp_pack_value (&bp, ii->polymorphic, 1);
   bp_pack_value (&bp, ii->agg_contents, 1);
   bp_pack_value (&bp, ii->member_ptr, 1);
   bp_pack_value (&bp, ii->by_ref, 1);
-  bp_pack_value (&bp, ii->maybe_in_construction, 1);
-  bp_pack_value (&bp, ii->maybe_derived_type, 1);
+  bp_pack_value (&bp, ii->vptr_changed, 1);
   streamer_write_bitpack (&bp);
+  if (ii->agg_contents || ii->polymorphic)
+    streamer_write_hwi (ob, ii->offset);
+  else
+    gcc_assert (ii->offset == 0);
 
   if (ii->polymorphic)
     {
       streamer_write_hwi (ob, ii->otr_token);
       stream_write_tree (ob, ii->otr_type, true);
-      stream_write_tree (ob, ii->outer_type, true);
+      ii->context.stream_out (ob);
     }
 }
 
@@ -4243,26 +4638,28 @@ ipa_write_indirect_edge_info (struct output_block *ob,
 
 static void
 ipa_read_indirect_edge_info (struct lto_input_block *ib,
-                            struct data_in *data_in ATTRIBUTE_UNUSED,
+                            struct data_in *data_in,
                             struct cgraph_edge *cs)
 {
   struct cgraph_indirect_call_info *ii = cs->indirect_info;
   struct bitpack_d bp;
 
   ii->param_index = (int) streamer_read_hwi (ib);
-  ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
   bp = streamer_read_bitpack (ib);
   ii->polymorphic = bp_unpack_value (&bp, 1);
   ii->agg_contents = bp_unpack_value (&bp, 1);
   ii->member_ptr = bp_unpack_value (&bp, 1);
   ii->by_ref = bp_unpack_value (&bp, 1);
-  ii->maybe_in_construction = bp_unpack_value (&bp, 1);
-  ii->maybe_derived_type = bp_unpack_value (&bp, 1);
+  ii->vptr_changed = bp_unpack_value (&bp, 1);
+  if (ii->agg_contents || ii->polymorphic)
+    ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
+  else
+    ii->offset = 0;
   if (ii->polymorphic)
     {
       ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
       ii->otr_type = stream_read_tree (ib, data_in);
-      ii->outer_type = stream_read_tree (ib, data_in);
+      ii->context.stream_in (ib, data_in);
     }
 }
 
@@ -4286,7 +4683,7 @@ ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
   for (j = 0; j < ipa_get_param_count (info); j++)
     streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
   bp = bitpack_create (ob->main_stream);
-  gcc_assert (info->uses_analysis_done
+  gcc_assert (info->analysis_done
              || ipa_get_param_count (info) == 0);
   gcc_assert (!info->node_enqueued);
   gcc_assert (!info->ipcp_orig_node);
@@ -4299,17 +4696,29 @@ ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
     {
       struct ipa_edge_args *args = IPA_EDGE_REF (e);
 
-      streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
+      streamer_write_uhwi (ob,
+                          ipa_get_cs_argument_count (args) * 2
+                          + (args->polymorphic_call_contexts != NULL));
       for (j = 0; j < ipa_get_cs_argument_count (args); j++)
-       ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
+       {
+         ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
+         if (args->polymorphic_call_contexts != NULL)
+           ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
+       }
     }
   for (e = node->indirect_calls; e; e = e->next_callee)
     {
       struct ipa_edge_args *args = IPA_EDGE_REF (e);
 
-      streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
+      streamer_write_uhwi (ob,
+                          ipa_get_cs_argument_count (args) * 2
+                          + (args->polymorphic_call_contexts != NULL));
       for (j = 0; j < ipa_get_cs_argument_count (args); j++)
-       ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
+       {
+         ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
+         if (args->polymorphic_call_contexts != NULL)
+           ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
+       }
       ipa_write_indirect_edge_info (ob, e);
     }
 }
@@ -4332,7 +4741,7 @@ ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
     
   bp = streamer_read_bitpack (ib);
   if (ipa_get_param_count (info) != 0)
-    info->uses_analysis_done = true;
+    info->analysis_done = true;
   info->node_enqueued = false;
   for (k = 0; k < ipa_get_param_count (info); k++)
     ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
@@ -4342,26 +4751,42 @@ ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
     {
       struct ipa_edge_args *args = IPA_EDGE_REF (e);
       int count = streamer_read_uhwi (ib);
+      bool contexts_computed = count & 1;
+      count /= 2;
 
       if (!count)
        continue;
       vec_safe_grow_cleared (args->jump_functions, count);
+      if (contexts_computed)
+       vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
 
       for (k = 0; k < ipa_get_cs_argument_count (args); k++)
-       ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
-                               data_in);
+       {
+         ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
+                                 data_in);
+         if (contexts_computed)
+           ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
+       }
     }
   for (e = node->indirect_calls; e; e = e->next_callee)
     {
       struct ipa_edge_args *args = IPA_EDGE_REF (e);
       int count = streamer_read_uhwi (ib);
+      bool contexts_computed = count & 1;
+      count /= 2;
 
       if (count)
        {
          vec_safe_grow_cleared (args->jump_functions, count);
+         if (contexts_computed)
+           vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
           for (k = 0; k < ipa_get_cs_argument_count (args); k++)
-           ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
-                                   data_in);
+           {
+             ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
+                                     data_in);
+             if (contexts_computed)
+               ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
+           }
        }
       ipa_read_indirect_edge_info (ib, data_in, e);
     }
@@ -4378,18 +4803,17 @@ ipa_prop_write_jump_functions (void)
   lto_symtab_encoder_iterator lsei;
   lto_symtab_encoder_t encoder;
 
-
-  if (!ipa_node_params_vector.exists ())
+  if (!ipa_node_params_sum)
     return;
 
   ob = create_output_block (LTO_section_jump_functions);
   encoder = ob->decl_state->symtab_node_encoder;
-  ob->cgraph_node = NULL;
+  ob->symbol = NULL;
   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
        lsei_next_function_in_partition (&lsei))
     {
       node = lsei_cgraph_node (lsei);
-      if (cgraph_function_with_gimple_body_p (node)
+      if (node->has_gimple_body_p ()
          && IPA_NODE_REF (node) != NULL)
        count++;
     }
@@ -4401,7 +4825,7 @@ ipa_prop_write_jump_functions (void)
        lsei_next_function_in_partition (&lsei))
     {
       node = lsei_cgraph_node (lsei);
-      if (cgraph_function_with_gimple_body_p (node)
+      if (node->has_gimple_body_p ()
          && IPA_NODE_REF (node) != NULL)
         ipa_write_node_info (ob, node);
     }
@@ -4422,12 +4846,11 @@ ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
   const int main_offset = cfg_offset + header->cfg_size;
   const int string_offset = main_offset + header->main_size;
   struct data_in *data_in;
-  struct lto_input_block ib_main;
   unsigned int i;
   unsigned int count;
 
-  LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
-                       header->main_size);
+  lto_input_block ib_main ((const char *) data + main_offset,
+                          header->main_size, file_data->mode_table);
 
   data_in =
     lto_data_in_create (file_data, (const char *) data + string_offset,
@@ -4442,7 +4865,8 @@ ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
 
       index = streamer_read_uhwi (&ib_main);
       encoder = file_data->symtab_node_encoder;
-      node = cgraph (lto_symtab_encoder_deref (encoder, index));
+      node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
+                                                               index));
       gcc_assert (node->definition);
       ipa_read_node_info (&ib_main, node, data_in);
     }
@@ -4486,7 +4910,7 @@ ipa_update_after_lto_read (void)
 }
 
 void
-write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
+write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
 {
   int node_ref;
   unsigned int count = 0;
@@ -4514,14 +4938,38 @@ write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
       bp_pack_value (&bp, av->by_ref, 1);
       streamer_write_bitpack (&bp);
     }
+
+  ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
+  if (ts && vec_safe_length (ts->alignments) > 0)
+    {
+      count = ts->alignments->length ();
+
+      streamer_write_uhwi (ob, count);
+      for (unsigned i = 0; i < count; ++i)
+       {
+         ipa_alignment *parm_al = &(*ts->alignments)[i];
+
+         struct bitpack_d bp;
+         bp = bitpack_create (ob->main_stream);
+         bp_pack_value (&bp, parm_al->known, 1);
+         streamer_write_bitpack (&bp);
+         if (parm_al->known)
+           {
+             streamer_write_uhwi (ob, parm_al->align);
+             streamer_write_hwi_in_range (ob->main_stream, 0, parm_al->align,
+                                          parm_al->misalign);
+           }
+       }
+    }
+  else
+    streamer_write_uhwi (ob, 0);
 }
 
 /* Stream in the aggregate value replacement chain for NODE from IB.  */
 
 static void
-read_agg_replacement_chain (struct lto_input_block *ib,
-                           struct cgraph_node *node,
-                           struct data_in *data_in)
+read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
+                              data_in *data_in)
 {
   struct ipa_agg_replacement_value *aggvals = NULL;
   unsigned int count, i;
@@ -4532,7 +4980,7 @@ read_agg_replacement_chain (struct lto_input_block *ib,
       struct ipa_agg_replacement_value *av;
       struct bitpack_d bp;
 
-      av = ggc_alloc_ipa_agg_replacement_value ();
+      av = ggc_alloc<ipa_agg_replacement_value> ();
       av->offset = streamer_read_uhwi (ib);
       av->index = streamer_read_uhwi (ib);
       av->value = stream_read_tree (ib, data_in);
@@ -4542,12 +4990,37 @@ read_agg_replacement_chain (struct lto_input_block *ib,
       aggvals = av;
     }
   ipa_set_node_agg_value_chain (node, aggvals);
+
+  count = streamer_read_uhwi (ib);
+  if (count > 0)
+    {
+      ipcp_grow_transformations_if_necessary ();
+
+      ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
+      vec_safe_grow_cleared (ts->alignments, count);
+
+      for (i = 0; i < count; i++)
+       {
+         ipa_alignment *parm_al;
+         parm_al = &(*ts->alignments)[i];
+         struct bitpack_d bp;
+         bp = streamer_read_bitpack (ib);
+         parm_al->known = bp_unpack_value (&bp, 1);
+         if (parm_al->known)
+           {
+             parm_al->align = streamer_read_uhwi (ib);
+             parm_al->misalign
+               = streamer_read_hwi_in_range (ib, "ipa-prop misalign",
+                                             0, parm_al->align);
+           }
+       }
+    }
 }
 
 /* Write all aggregate replacement for nodes in set.  */
 
 void
-ipa_prop_write_all_agg_replacement (void)
+ipcp_write_transformation_summaries (void)
 {
   struct cgraph_node *node;
   struct output_block *ob;
@@ -4555,18 +5028,14 @@ ipa_prop_write_all_agg_replacement (void)
   lto_symtab_encoder_iterator lsei;
   lto_symtab_encoder_t encoder;
 
-  if (!ipa_node_agg_replacements)
-    return;
-
   ob = create_output_block (LTO_section_ipcp_transform);
   encoder = ob->decl_state->symtab_node_encoder;
-  ob->cgraph_node = NULL;
+  ob->symbol = NULL;
   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
        lsei_next_function_in_partition (&lsei))
     {
       node = lsei_cgraph_node (lsei);
-      if (cgraph_function_with_gimple_body_p (node)
-         && ipa_get_agg_replacements_for_node (node) != NULL)
+      if (node->has_gimple_body_p ())
        count++;
     }
 
@@ -4576,9 +5045,8 @@ ipa_prop_write_all_agg_replacement (void)
        lsei_next_function_in_partition (&lsei))
     {
       node = lsei_cgraph_node (lsei);
-      if (cgraph_function_with_gimple_body_p (node)
-         && ipa_get_agg_replacements_for_node (node) != NULL)
-       write_agg_replacement_chain (ob, node);
+      if (node->has_gimple_body_p ())
+       write_ipcp_transformation_info (ob, node);
     }
   streamer_write_char_stream (ob->main_stream, 0);
   produce_asm (ob, NULL);
@@ -4599,12 +5067,11 @@ read_replacements_section (struct lto_file_decl_data *file_data,
   const int main_offset = cfg_offset + header->cfg_size;
   const int string_offset = main_offset + header->main_size;
   struct data_in *data_in;
-  struct lto_input_block ib_main;
   unsigned int i;
   unsigned int count;
 
-  LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
-                       header->main_size);
+  lto_input_block ib_main ((const char *) data + main_offset,
+                          header->main_size, file_data->mode_table);
 
   data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
                                header->string_size, vNULL);
@@ -4618,9 +5085,10 @@ read_replacements_section (struct lto_file_decl_data *file_data,
 
       index = streamer_read_uhwi (&ib_main);
       encoder = file_data->symtab_node_encoder;
-      node = cgraph (lto_symtab_encoder_deref (encoder, index));
+      node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
+                                                               index));
       gcc_assert (node->definition);
-      read_agg_replacement_chain (&ib_main, node, data_in);
+      read_ipcp_transformation_info (&ib_main, node, data_in);
     }
   lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
                         len);
@@ -4630,7 +5098,7 @@ read_replacements_section (struct lto_file_decl_data *file_data,
 /* Read IPA-CP aggregate replacements.  */
 
 void
-ipa_prop_read_all_agg_replacement (void)
+ipcp_read_transformation_summaries (void)
 {
   struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
   struct lto_file_decl_data *file_data;
@@ -4682,17 +5150,181 @@ adjust_agg_replacement_values (struct cgraph_node *node,
     v->index = adj[v->index];
 }
 
+/* Dominator walker driving the ipcp modification phase.  */
+
+class ipcp_modif_dom_walker : public dom_walker
+{
+public:
+  ipcp_modif_dom_walker (struct func_body_info *fbi,
+                        vec<ipa_param_descriptor> descs,
+                        struct ipa_agg_replacement_value *av,
+                        bool *sc, bool *cc)
+    : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
+      m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
+
+  virtual void before_dom_children (basic_block);
+
+private:
+  struct func_body_info *m_fbi;
+  vec<ipa_param_descriptor> m_descriptors;
+  struct ipa_agg_replacement_value *m_aggval;
+  bool *m_something_changed, *m_cfg_changed;
+};
+
+void
+ipcp_modif_dom_walker::before_dom_children (basic_block bb)
+{
+  gimple_stmt_iterator gsi;
+  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+    {
+      struct ipa_agg_replacement_value *v;
+      gimple stmt = gsi_stmt (gsi);
+      tree rhs, val, t;
+      HOST_WIDE_INT offset, size;
+      int index;
+      bool by_ref, vce;
+
+      if (!gimple_assign_load_p (stmt))
+       continue;
+      rhs = gimple_assign_rhs1 (stmt);
+      if (!is_gimple_reg_type (TREE_TYPE (rhs)))
+       continue;
+
+      vce = false;
+      t = rhs;
+      while (handled_component_p (t))
+       {
+         /* V_C_E can do things like convert an array of integers to one
+            bigger integer and similar things we do not handle below.  */
+         if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
+           {
+             vce = true;
+             break;
+           }
+         t = TREE_OPERAND (t, 0);
+       }
+      if (vce)
+       continue;
+
+      if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
+                                    &offset, &size, &by_ref))
+       continue;
+      for (v = m_aggval; v; v = v->next)
+       if (v->index == index
+           && v->offset == offset)
+         break;
+      if (!v
+         || v->by_ref != by_ref
+         || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
+       continue;
+
+      gcc_checking_assert (is_gimple_ip_invariant (v->value));
+      if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
+       {
+         if (fold_convertible_p (TREE_TYPE (rhs), v->value))
+           val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
+         else if (TYPE_SIZE (TREE_TYPE (rhs))
+                  == TYPE_SIZE (TREE_TYPE (v->value)))
+           val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
+         else
+           {
+             if (dump_file)
+               {
+                 fprintf (dump_file, "    const ");
+                 print_generic_expr (dump_file, v->value, 0);
+                 fprintf (dump_file, "  can't be converted to type of ");
+                 print_generic_expr (dump_file, rhs, 0);
+                 fprintf (dump_file, "\n");
+               }
+             continue;
+           }
+       }
+      else
+       val = v->value;
+
+      if (dump_file && (dump_flags & TDF_DETAILS))
+       {
+         fprintf (dump_file, "Modifying stmt:\n  ");
+         print_gimple_stmt (dump_file, stmt, 0, 0);
+       }
+      gimple_assign_set_rhs_from_tree (&gsi, val);
+      update_stmt (stmt);
+
+      if (dump_file && (dump_flags & TDF_DETAILS))
+       {
+         fprintf (dump_file, "into:\n  ");
+         print_gimple_stmt (dump_file, stmt, 0, 0);
+         fprintf (dump_file, "\n");
+       }
+
+      *m_something_changed = true;
+      if (maybe_clean_eh_stmt (stmt)
+         && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
+       *m_cfg_changed = true;
+    }
+
+}
+
+/* Update alignment of formal parameters as described in
+   ipcp_transformation_summary.  */
+
+static void
+ipcp_update_alignments (struct cgraph_node *node)
+{
+  tree fndecl = node->decl;
+  tree parm = DECL_ARGUMENTS (fndecl);
+  tree next_parm = parm;
+  ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
+  if (!ts || vec_safe_length (ts->alignments) == 0)
+    return;
+  const vec<ipa_alignment, va_gc> &alignments = *ts->alignments;
+  unsigned count = alignments.length ();
+
+  for (unsigned i = 0; i < count; ++i, parm = next_parm)
+    {
+      if (node->clone.combined_args_to_skip
+         && bitmap_bit_p (node->clone.combined_args_to_skip, i))
+       continue;
+      gcc_checking_assert (parm);
+      next_parm = DECL_CHAIN (parm);
+
+      if (!alignments[i].known || !is_gimple_reg (parm))
+       continue;
+      tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
+      if (!ddef)
+       continue;
+
+      if (dump_file)
+       fprintf (dump_file, "  Adjusting alignment of param %u to %u, "
+                "misalignment to %u\n", i, alignments[i].align,
+                alignments[i].misalign);
+
+      struct ptr_info_def *pi = get_ptr_info (ddef);
+      gcc_checking_assert (pi);
+      unsigned old_align;
+      unsigned old_misalign;
+      bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
 
-/* Function body transformation phase.  */
+      if (old_known
+         && old_align >= alignments[i].align)
+       {
+         if (dump_file)
+           fprintf (dump_file, "    But the alignment was already %u.\n",
+                    old_align);
+         continue;
+       }
+      set_ptr_info_alignment (pi, alignments[i].align, alignments[i].misalign);
+    }
+}
+
+/* IPCP transformation phase doing propagation of aggregate values.  */
 
 unsigned int
 ipcp_transform_function (struct cgraph_node *node)
 {
-  vec<ipa_param_descriptor_t> descriptors = vNULL;
-  struct param_analysis_info *parms_ainfo;
+  vec<ipa_param_descriptor> descriptors = vNULL;
+  struct func_body_info fbi;
   struct ipa_agg_replacement_value *aggval;
-  gimple_stmt_iterator gsi;
-  basic_block bb;
   int param_count;
   bool cfg_changed = false, something_changed = false;
 
@@ -4703,6 +5335,7 @@ ipcp_transform_function (struct cgraph_node *node)
     fprintf (dump_file, "Modification phase of node %s/%i\n",
             node->name (), node->order);
 
+  ipcp_update_alignments (node);
   aggval = ipa_get_agg_replacements_for_node (node);
   if (!aggval)
       return 0;
@@ -4712,102 +5345,28 @@ ipcp_transform_function (struct cgraph_node *node)
   adjust_agg_replacement_values (node, aggval);
   if (dump_file)
     ipa_dump_agg_replacement_values (dump_file, aggval);
-  parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
-  memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
-  descriptors.safe_grow_cleared (param_count);
-  ipa_populate_param_decls (node, descriptors);
-
-  FOR_EACH_BB (bb)
-    for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
-      {
-       struct ipa_agg_replacement_value *v;
-       gimple stmt = gsi_stmt (gsi);
-       tree rhs, val, t;
-       HOST_WIDE_INT offset, size;
-       int index;
-       bool by_ref, vce;
-
-       if (!gimple_assign_load_p (stmt))
-         continue;
-       rhs = gimple_assign_rhs1 (stmt);
-       if (!is_gimple_reg_type (TREE_TYPE (rhs)))
-         continue;
-
-       vce = false;
-       t = rhs;
-       while (handled_component_p (t))
-         {
-           /* V_C_E can do things like convert an array of integers to one
-               bigger integer and similar things we do not handle below.  */
-            if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
-             {
-               vce = true;
-               break;
-             }
-           t = TREE_OPERAND (t, 0);
-         }
-       if (vce)
-         continue;
 
-       if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
-                                      rhs, &index, &offset, &size, &by_ref))
-         continue;
-       for (v = aggval; v; v = v->next)
-         if (v->index == index
-             && v->offset == offset)
-           break;
-       if (!v
-           || v->by_ref != by_ref
-           || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
-         continue;
+  fbi.node = node;
+  fbi.info = NULL;
+  fbi.bb_infos = vNULL;
+  fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
+  fbi.param_count = param_count;
+  fbi.aa_walked = 0;
 
-       gcc_checking_assert (is_gimple_ip_invariant (v->value));
-       if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
-         {
-           if (fold_convertible_p (TREE_TYPE (rhs), v->value))
-             val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
-           else if (TYPE_SIZE (TREE_TYPE (rhs))
-                    == TYPE_SIZE (TREE_TYPE (v->value)))
-             val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
-           else
-             {
-               if (dump_file)
-                 {
-                   fprintf (dump_file, "    const ");
-                   print_generic_expr (dump_file, v->value, 0);
-                   fprintf (dump_file, "  can't be converted to type of ");
-                   print_generic_expr (dump_file, rhs, 0);
-                   fprintf (dump_file, "\n");
-                 }
-               continue;
-             }
-         }
-       else
-         val = v->value;
-
-       if (dump_file && (dump_flags & TDF_DETAILS))
-         {
-           fprintf (dump_file, "Modifying stmt:\n  ");
-           print_gimple_stmt (dump_file, stmt, 0, 0);
-         }
-       gimple_assign_set_rhs_from_tree (&gsi, val);
-       update_stmt (stmt);
-
-       if (dump_file && (dump_flags & TDF_DETAILS))
-         {
-           fprintf (dump_file, "into:\n  ");
-           print_gimple_stmt (dump_file, stmt, 0, 0);
-           fprintf (dump_file, "\n");
-         }
-
-       something_changed = true;
-       if (maybe_clean_eh_stmt (stmt)
-           && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
-         cfg_changed = true;
-      }
+  descriptors.safe_grow_cleared (param_count);
+  ipa_populate_param_decls (node, descriptors);
+  calculate_dominance_info (CDI_DOMINATORS);
+  ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
+                        &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
 
-  (*ipa_node_agg_replacements)[node->uid] = NULL;
-  free_parms_ainfo (parms_ainfo, param_count);
+  int i;
+  struct ipa_bb_info *bi;
+  FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
+    free_ipa_bb_info (bi);
+  fbi.bb_infos.release ();
+  free_dominance_info (CDI_DOMINATORS);
+  (*ipcp_transformations)[node->uid].agg_values = NULL;
+  (*ipcp_transformations)[node->uid].alignments = NULL;
   descriptors.release ();
 
   if (!something_changed)