Allow constant global VAR_DECLs in constant jump functions
[gcc.git] / gcc / ipa-cp.c
index d1c6236e289554d3e1aaa3edcf2bc4c520c0fa31..8caa973e46c7fe44a2e99c5d1b9bf618e81a986c 100644 (file)
@@ -1,5 +1,5 @@
 /* Interprocedural constant propagation
-   Copyright (C) 2005-2015 Free Software Foundation, Inc.
+   Copyright (C) 2005-2016 Free Software Foundation, Inc.
 
    Contributed by Razya Ladelsky <RAZYA@il.ibm.com> and Martin Jambor
    <mjambor@suse.cz>
@@ -103,23 +103,18 @@ along with GCC; see the file COPYING3.  If not see
 #include "config.h"
 #include "system.h"
 #include "coretypes.h"
-#include "alias.h"
+#include "backend.h"
 #include "tree.h"
-#include "options.h"
-#include "fold-const.h"
-#include "gimple-fold.h"
 #include "gimple-expr.h"
-#include "target.h"
-#include "backend.h"
 #include "predict.h"
-#include "hard-reg-set.h"
-#include "cgraph.h"
 #include "alloc-pool.h"
-#include "symbol-summary.h"
-#include "ipa-prop.h"
 #include "tree-pass.h"
-#include "flags.h"
+#include "cgraph.h"
 #include "diagnostic.h"
+#include "fold-const.h"
+#include "gimple-fold.h"
+#include "symbol-summary.h"
+#include "ipa-prop.h"
 #include "tree-pretty-print.h"
 #include "tree-inline.h"
 #include "params.h"
@@ -618,7 +613,7 @@ ipcp_cloning_candidate_p (struct cgraph_node *node)
       return false;
     }
 
-  if (!optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->decl)))
+  if (node->optimize_for_size_p ())
     {
       if (dump_file)
         fprintf (dump_file, "Not considering %s for cloning; "
@@ -1031,9 +1026,10 @@ ipa_get_jf_pass_through_result (struct ipa_jump_func *jfunc, tree input)
 {
   tree restype, res;
 
-  gcc_checking_assert (is_gimple_ip_invariant (input));
   if (ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
     return input;
+  if (!is_gimple_ip_invariant (input))
+    return NULL_TREE;
 
   if (TREE_CODE_CLASS (ipa_get_jf_pass_through_operation (jfunc))
       == tcc_comparison)
@@ -1060,7 +1056,7 @@ ipa_get_jf_ancestor_result (struct ipa_jump_func *jfunc, tree input)
     {
       tree t = TREE_OPERAND (input, 0);
       t = build_ref_for_offset (EXPR_LOCATION (t), t,
-                               ipa_get_jf_ancestor_offset (jfunc),
+                               ipa_get_jf_ancestor_offset (jfunc), false,
                                ptr_type_node, NULL, false);
       return build_fold_addr_expr (t);
     }
@@ -1882,6 +1878,18 @@ propagate_aggs_accross_jump_function (struct cgraph_edge *cs,
   return ret;
 }
 
+/* Return true if on the way cfrom CS->caller to the final (non-alias and
+   non-thunk) destination, the call passes through a thunk.  */
+
+static bool
+call_passes_through_thunk_p (cgraph_edge *cs)
+{
+  cgraph_node *alias_or_thunk = cs->callee;
+  while (alias_or_thunk->alias)
+    alias_or_thunk = alias_or_thunk->get_alias_target ();
+  return alias_or_thunk->thunk.thunk_p;
+}
+
 /* Propagate constants from the caller to the callee of CS.  INFO describes the
    caller.  */
 
@@ -1890,7 +1898,7 @@ propagate_constants_accross_call (struct cgraph_edge *cs)
 {
   struct ipa_node_params *callee_info;
   enum availability availability;
-  struct cgraph_node *callee, *alias_or_thunk;
+  cgraph_node *callee;
   struct ipa_edge_args *args;
   bool ret = false;
   int i, args_count, parms_count;
@@ -1928,10 +1936,7 @@ propagate_constants_accross_call (struct cgraph_edge *cs)
   /* If this call goes through a thunk we must not propagate to the first (0th)
      parameter.  However, we might need to uncover a thunk from below a series
      of aliases first.  */
-  alias_or_thunk = cs->callee;
-  while (alias_or_thunk->alias)
-    alias_or_thunk = alias_or_thunk->get_alias_target ();
-  if (alias_or_thunk->thunk.thunk_p)
+  if (call_passes_through_thunk_p (cs))
     {
       ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info,
                                                               0));
@@ -1995,9 +2000,9 @@ ipa_get_indirect_edge_target_1 (struct cgraph_edge *ie,
 
       if (ie->indirect_info->agg_contents)
        {
-         if (agg_reps)
+         t = NULL;
+         if (agg_reps && ie->indirect_info->guaranteed_unmodified)
            {
-             t = NULL;
              while (agg_reps)
                {
                  if (agg_reps->index == param_index
@@ -2010,15 +2015,22 @@ ipa_get_indirect_edge_target_1 (struct cgraph_edge *ie,
                  agg_reps = agg_reps->next;
                }
            }
-         else if (known_aggs.length () > (unsigned int) param_index)
+         if (!t)
            {
              struct ipa_agg_jump_function *agg;
-             agg = known_aggs[param_index];
-             t = ipa_find_agg_cst_for_param (agg, ie->indirect_info->offset,
-                                             ie->indirect_info->by_ref);
+             if (known_aggs.length () > (unsigned int) param_index)
+               agg = known_aggs[param_index];
+             else
+               agg = NULL;
+             bool from_global_constant;
+             t = ipa_find_agg_cst_for_param (agg, known_csts[param_index],
+                                             ie->indirect_info->offset,
+                                             ie->indirect_info->by_ref,
+                                             &from_global_constant);
+             if (!from_global_constant
+                 && !ie->indirect_info->guaranteed_unmodified)
+               t = NULL_TREE;
            }
-         else
-           t = NULL;
        }
       else
        t = known_csts[param_index];
@@ -2062,7 +2074,8 @@ ipa_get_indirect_edge_target_1 (struct cgraph_edge *ie,
     {
        struct ipa_agg_jump_function *agg;
        agg = known_aggs[param_index];
-       t = ipa_find_agg_cst_for_param (agg, ie->indirect_info->offset,
+       t = ipa_find_agg_cst_for_param (agg, known_csts[param_index],
+                                      ie->indirect_info->offset,
                                       true);
     }
 
@@ -2073,15 +2086,22 @@ ipa_get_indirect_edge_target_1 (struct cgraph_edge *ie,
       unsigned HOST_WIDE_INT offset;
       if (vtable_pointer_value_to_vtable (t, &vtable, &offset))
        {
+         bool can_refer;
          target = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
-                                                     vtable, offset);
-         if (target)
+                                                     vtable, offset, &can_refer);
+         if (can_refer)
            {
-             if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
-                  && DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
+             if (!target
+                 || (TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
+                     && DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
                  || !possible_polymorphic_call_target_p
                       (ie, cgraph_node::get (target)))
-               target = ipa_impossible_devirt_target (ie, target);
+               {
+                 /* Do not speculate builtin_unreachable, it is stupid!  */
+                 if (ie->indirect_info->vptr_changed)
+                   return NULL;
+                 target = ipa_impossible_devirt_target (ie, target);
+               }
               *speculative = ie->indirect_info->vptr_changed;
              if (!*speculative)
                return target;
@@ -2159,7 +2179,11 @@ ipa_get_indirect_edge_target_1 (struct cgraph_edge *ie,
 
   if (target && !possible_polymorphic_call_target_p (ie,
                                                     cgraph_node::get (target)))
-    target = ipa_impossible_devirt_target (ie, target);
+    {
+      if (*speculative)
+       return NULL;
+      target = ipa_impossible_devirt_target (ie, target);
+    }
 
   return target;
 }
@@ -2272,7 +2296,7 @@ good_cloning_opportunity_p (struct cgraph_node *node, int time_benefit,
 {
   if (time_benefit == 0
       || !opt_for_fn (node->decl, flag_ipa_cp_clone)
-      || !optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->decl)))
+      || node->optimize_for_size_p ())
     return false;
 
   gcc_assert (size_cost > 0);
@@ -2392,12 +2416,14 @@ gather_context_independent_values (struct ipa_node_params *info,
        *removable_params_cost
          += ipa_get_param_move_cost (info, i);
 
+      if (!ipa_is_param_used (info, i))
+       continue;
+
       ipcp_lattice<ipa_polymorphic_call_context> *ctxlat = &plats->ctxlat;
+      /* Do not account known context as reason for cloning.  We can see
+        if it permits devirtualization.  */
       if (ctxlat->is_single_const ())
-       {
-         (*known_contexts)[i] = ctxlat->values->value;
-         ret = true;
-       }
+       (*known_contexts)[i] = ctxlat->values->value;
 
       if (known_aggs)
        {
@@ -2499,7 +2525,10 @@ estimate_local_effects (struct cgraph_node *node)
                                                    &known_contexts, &known_aggs,
                                                    &removable_params_cost);
   known_aggs_ptrs = agg_jmp_p_vec_for_t_vec (known_aggs);
-  if (always_const)
+  int devirt_bonus = devirtualization_time_bonus (node, known_csts,
+                                          known_contexts, known_aggs_ptrs);
+  if (always_const || devirt_bonus
+      || (removable_params_cost && node->local.can_change_signature))
     {
       struct caller_statistics stats;
       inline_hints hints;
@@ -2510,8 +2539,7 @@ estimate_local_effects (struct cgraph_node *node)
                                              false);
       estimate_ipcp_clone_size_and_time (node, known_csts, known_contexts,
                                         known_aggs_ptrs, &size, &time, &hints);
-      time -= devirtualization_time_bonus (node, known_csts, known_contexts,
-                                          known_aggs_ptrs);
+      time -= devirt_bonus;
       time -= hint_time_bonus (hints);
       time -= removable_params_cost;
       size -= stats.n_calls * removable_params_cost;
@@ -2520,8 +2548,7 @@ estimate_local_effects (struct cgraph_node *node)
        fprintf (dump_file, " - context independent values, size: %i, "
                 "time_benefit: %i\n", size, base_time - time);
 
-      if (size <= 0
-         || node->will_be_removed_from_program_if_no_direct_calls_p ())
+      if (size <= 0 || node->local.local)
        {
          info->do_clone_for_all_contexts = true;
          base_time = time;
@@ -2549,6 +2576,10 @@ estimate_local_effects (struct cgraph_node *node)
                     "max_new_size would be reached with %li.\n",
                     size + overall_size);
        }
+      else if (dump_file && (dump_flags & TDF_DETAILS))
+       fprintf (dump_file, "   Not cloning for all contexts because "
+                "!good_cloning_opportunity_p.\n");
+       
     }
 
   for (i = 0; i < count ; i++)
@@ -3498,7 +3529,11 @@ find_more_scalar_values_for_callers_subset (struct cgraph_node *node,
          struct ipa_jump_func *jump_func;
          tree t;
 
-          if (i >= ipa_get_cs_argument_count (IPA_EDGE_REF (cs)))
+          if (i >= ipa_get_cs_argument_count (IPA_EDGE_REF (cs))
+             || (i == 0
+                 && call_passes_through_thunk_p (cs))
+             || (!cs->callee->instrumentation_clone
+                 && cs->callee->function_symbol ()->instrumentation_clone))
             {
               newval = NULL_TREE;
               break;
@@ -4424,7 +4459,7 @@ identify_dead_nodes (struct cgraph_node *node)
 {
   struct cgraph_node *v;
   for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
-    if (v->will_be_removed_from_program_if_no_direct_calls_p ()
+    if (v->local.local
        && !v->call_for_symbol_thunks_and_aliases
             (has_undead_caller_from_outside_scc_p, NULL, true))
       IPA_NODE_REF (v)->node_dead = 1;