ipa-cp.c (ipcp_print_edge_profiles): Test for node->analyzed rather than for DECL_SAV...
[gcc.git] / gcc / tree-inline.c
index a9ca33b14d4689cbac1e7508ee9847ae61e4e0ef..b5eb033588e429c08d6e7c6322891a5c213ba880 100644 (file)
@@ -715,6 +715,7 @@ copy_body_r (tree *tp, int *walk_subtrees, void *data)
                    {
                      *tp = build1 (INDIRECT_REF, type, new);
                      TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
+                     TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
                    }
                }
              *walk_subtrees = 0;
@@ -795,7 +796,8 @@ copy_body_r (tree *tp, int *walk_subtrees, void *data)
    later  */
 
 static basic_block
-copy_bb (copy_body_data *id, basic_block bb, int frequency_scale, int count_scale)
+copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
+         gcov_type count_scale)
 {
   block_stmt_iterator bsi, copy_bsi;
   basic_block copy_basic_block;
@@ -949,7 +951,7 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale, int count_scal
                pointer_set_insert (id->statements_to_fold, stmt);
              /* We're duplicating a CALL_EXPR.  Find any corresponding
                 callgraph edges and update or duplicate them.  */
-             if (call && (decl = get_callee_fndecl (call)))
+             if (call)
                {
                  struct cgraph_node *node;
                  struct cgraph_edge *edge;
@@ -960,7 +962,8 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale, int count_scal
                      edge = cgraph_edge (id->src_node, orig_stmt);
                      if (edge)
                        cgraph_clone_edge (edge, id->dst_node, stmt,
-                                          REG_BR_PROB_BASE, 1, edge->frequency, true);
+                                          REG_BR_PROB_BASE, 1,
+                                          edge->frequency, true);
                      break;
 
                    case CB_CGE_MOVE_CLONES:
@@ -969,8 +972,8 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale, int count_scal
                           node = node->next_clone)
                        {
                          edge = cgraph_edge (node, orig_stmt);
-                         gcc_assert (edge);
-                         cgraph_set_call_stmt (edge, stmt);
+                         if (edge)
+                           cgraph_set_call_stmt (edge, stmt);
                        }
                      /* FALLTHRU */
 
@@ -1108,7 +1111,7 @@ update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
    accordingly.  Edges will be taken care of later.  Assume aux
    pointers to point to the copies of each BB.  */
 static void
-copy_edges_for_bb (basic_block bb, int count_scale, basic_block ret_bb)
+copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
 {
   basic_block new_bb = (basic_block) bb->aux;
   edge_iterator ei;
@@ -1199,7 +1202,7 @@ copy_edges_for_bb (basic_block bb, int count_scale, basic_block ret_bb)
 static void
 copy_phis_for_bb (basic_block bb, copy_body_data *id)
 {
-  basic_block new_bb = bb->aux;
+  basic_block const new_bb = (basic_block) bb->aux;
   edge_iterator ei;
   tree phi;
 
@@ -1217,7 +1220,7 @@ copy_phis_for_bb (basic_block bb, copy_body_data *id)
            = new_phi = create_phi_node (new_res, new_bb);
          FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
            {
-             edge old_edge = find_edge (new_edge->src->aux, bb);
+             edge const old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
              tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
              tree new_arg = arg;
 
@@ -1257,7 +1260,7 @@ initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count,
   struct function *new_cfun
      = (struct function *) ggc_alloc_cleared (sizeof (struct function));
   struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
-  int count_scale, frequency_scale;
+  gcov_type count_scale, frequency_scale;
 
   if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
     count_scale = (REG_BR_PROB_BASE * count
@@ -1321,7 +1324,7 @@ copy_cfg_body (copy_body_data * id, gcov_type count, int frequency,
   struct function *cfun_to_copy;
   basic_block bb;
   tree new_fndecl = NULL;
-  int count_scale, frequency_scale;
+  gcov_type count_scale, frequency_scale;
   int last;
 
   if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
@@ -1588,8 +1591,9 @@ setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
          || !is_gimple_reg (var))
        {
           tree_stmt_iterator i;
+          struct gimplify_ctx gctx;
 
-         push_gimplify_context ();
+         push_gimplify_context (&gctx);
          gimplify_stmt (&init_stmt);
          if (gimple_in_ssa_p (cfun)
               && init_stmt && TREE_CODE (init_stmt) == STATEMENT_LIST)
@@ -2171,7 +2175,7 @@ struct eni_data
 static tree
 estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
 {
-  struct eni_data *d = data;
+  struct eni_data *const d = (struct eni_data *) data;
   tree x = *tp;
   unsigned cost;
 
@@ -2577,6 +2581,20 @@ add_lexical_block (tree current_block, tree new_block)
   BLOCK_SUPERCONTEXT (new_block) = current_block;
 }
 
+/* Fetch callee declaration from the call graph edge going from NODE and
+   associated with STMR call statement.  Return NULL_TREE if not found.  */
+static tree
+get_indirect_callee_fndecl (struct cgraph_node *node, tree stmt)
+{
+  struct cgraph_edge *cs;
+
+  cs = cgraph_edge (node, stmt);
+  if (cs)
+    return cs->callee->decl;
+
+  return NULL_TREE;
+}
+
 /* If *TP is a CALL_EXPR, replace it with its inline expansion.  */
 
 static bool
@@ -2618,7 +2636,11 @@ expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
      If we cannot, then there is no hope of inlining the function.  */
   fn = get_callee_fndecl (t);
   if (!fn)
-    goto egress;
+    {
+      fn = get_indirect_callee_fndecl (id->dst_node, stmt);
+      if (!fn)
+       goto egress;
+    }
 
   /* Turn forward declarations into real ones.  */
   fn = cgraph_node (fn)->decl;
@@ -2669,6 +2691,12 @@ expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
      inlining.  */
   if (!cgraph_inline_p (cg_edge, &reason))
     {
+      /* If this call was originally indirect, we do not want to emit any
+        inlining related warnings or sorry messages because there are no
+        guarantees regarding those.  */
+      if (cg_edge->indirect_call)
+       goto egress;
+
       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
          /* Avoid warnings during early inline pass. */
          && (!flag_unit_at_a_time || cgraph_global_info_ready))
@@ -2984,6 +3012,8 @@ optimize_inline_calls (tree fn)
   tree prev_fn;
   basic_block bb;
   int last = n_basic_blocks;
+  struct gimplify_ctx gctx;
+
   /* There is no point in performing inlining if errors have already
      occurred -- and we might crash if we try to inline invalid
      code.  */
@@ -3010,7 +3040,7 @@ optimize_inline_calls (tree fn)
   id.transform_lang_insert_block = NULL;
   id.statements_to_fold = pointer_set_create ();
 
-  push_gimplify_context ();
+  push_gimplify_context (&gctx);
 
   /* We make no attempts to keep dominance info up-to-date.  */
   free_dominance_info (CDI_DOMINATORS);
@@ -3587,7 +3617,7 @@ tree_function_versioning (tree old_decl, tree new_decl, varray_type tree_map,
   if (tree_map)
     for (i = 0; i < VARRAY_ACTIVE_SIZE (tree_map); i++)
       {
-       replace_info = VARRAY_GENERIC_PTR (tree_map, i);
+       replace_info = (struct ipa_replace_map *) VARRAY_GENERIC_PTR (tree_map, i);
        if (replace_info->replace_p)
          insert_decl_map (&id, replace_info->old_tree,
                           replace_info->new_tree);
@@ -3678,3 +3708,34 @@ build_duplicate_type (tree type)
 
   return type;
 }
+
+/* Return whether it is safe to inline a function because it used different
+   target specific options or different optimization options.  */
+bool
+tree_can_inline_p (tree caller, tree callee)
+{
+  /* Don't inline a function with a higher optimization level than the
+     caller, or with different space constraints (hot/cold functions).  */
+  tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller);
+  tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee);
+
+  if (caller_tree != callee_tree)
+    {
+      struct cl_optimization *caller_opt
+       = TREE_OPTIMIZATION ((caller_tree)
+                            ? caller_tree
+                            : optimization_default_node);
+
+      struct cl_optimization *callee_opt
+       = TREE_OPTIMIZATION ((callee_tree)
+                            ? callee_tree
+                            : optimization_default_node);
+
+      if ((caller_opt->optimize > callee_opt->optimize)
+         || (caller_opt->optimize_size != callee_opt->optimize_size))
+       return false;
+    }
+
+  /* Allow the backend to decide if inlining is ok.  */
+  return targetm.target_option.can_inline_p (caller, callee);
+}