/* Convert function calls to rtl insns, for GNU C compiler.
- Copyright (C) 1989-2015 Free Software Foundation, Inc.
+ Copyright (C) 1989-2016 Free Software Foundation, Inc.
This file is part of GCC.
#include "config.h"
#include "system.h"
#include "coretypes.h"
-#include "tm.h"
+#include "backend.h"
+#include "target.h"
#include "rtl.h"
-#include "hash-set.h"
-#include "vec.h"
-#include "input.h"
-#include "alias.h"
-#include "symtab.h"
-#include "inchash.h"
#include "tree.h"
+#include "gimple.h"
+#include "predict.h"
+#include "tm_p.h"
+#include "stringpool.h"
+#include "expmed.h"
+#include "optabs.h"
+#include "emit-rtl.h"
+#include "cgraph.h"
+#include "diagnostic-core.h"
#include "fold-const.h"
#include "stor-layout.h"
#include "varasm.h"
-#include "stringpool.h"
-#include "attribs.h"
-#include "predict.h"
-#include "hashtab.h"
-#include "hard-reg-set.h"
-#include "function.h"
-#include "basic-block.h"
-#include "tree-ssa-alias.h"
#include "internal-fn.h"
-#include "gimple-expr.h"
-#include "is-a.h"
-#include "gimple.h"
-#include "flags.h"
-#include "statistics.h"
-#include "insn-config.h"
-#include "expmed.h"
#include "dojump.h"
#include "explow.h"
#include "calls.h"
-#include "emit-rtl.h"
-#include "stmt.h"
#include "expr.h"
-#include "insn-codes.h"
-#include "optabs.h"
-#include "libfuncs.h"
-#include "regs.h"
-#include "diagnostic-core.h"
#include "output.h"
-#include "tm_p.h"
-#include "timevar.h"
-#include "sbitmap.h"
-#include "bitmap.h"
#include "langhooks.h"
-#include "target.h"
-#include "hash-map.h"
-#include "plugin-api.h"
-#include "ipa-ref.h"
-#include "cgraph.h"
#include "except.h"
#include "dbgcnt.h"
#include "rtl-iter.h"
&& targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
? force_not_mem (memory_address (FUNCTION_MODE, funexp))
: memory_address (FUNCTION_MODE, funexp));
- else if (flag_pic && !flag_plt && fndecl_or_type
- && TREE_CODE (fndecl_or_type) == FUNCTION_DECL
- && !targetm.binds_local_p (fndecl_or_type))
- {
- funexp = force_reg (Pmode, funexp);
- }
else if (! sibcallp)
{
if (!NO_FUNCTION_CSE && optimize && ! flag_no_function_cse)
cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
{
rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
- rtx_insn *call_insn;
- rtx call, funmem;
+ rtx call, funmem, pat;
int already_popped = 0;
HOST_WIDE_INT n_popped
= targetm.calls.return_pops_args (fndecl, funtype, stack_size);
else if (fntree)
set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
-#if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
- if ((ecf_flags & ECF_SIBCALL)
- && HAVE_sibcall_pop && HAVE_sibcall_value_pop
- && (n_popped > 0 || stack_size == 0))
+ if (ecf_flags & ECF_SIBCALL)
{
- rtx n_pop = GEN_INT (n_popped);
- rtx pat;
-
- /* If this subroutine pops its own args, record that in the call insn
- if possible, for the sake of frame pointer elimination. */
-
if (valreg)
- pat = GEN_SIBCALL_VALUE_POP (valreg, funmem, rounded_stack_size_rtx,
- next_arg_reg, n_pop);
+ pat = targetm.gen_sibcall_value (valreg, funmem,
+ rounded_stack_size_rtx,
+ next_arg_reg, NULL_RTX);
else
- pat = GEN_SIBCALL_POP (funmem, rounded_stack_size_rtx, next_arg_reg,
- n_pop);
-
- emit_call_insn (pat);
- already_popped = 1;
+ pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx,
+ next_arg_reg, GEN_INT (struct_value_size));
}
- else
-#endif
-
-#if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
/* If the target has "call" or "call_value" insns, then prefer them
if no arguments are actually popped. If the target does not have
"call" or "call_value" insns, then we must use the popping versions
even if the call has no arguments to pop. */
-#if defined (HAVE_call) && defined (HAVE_call_value)
- if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
- && n_popped > 0)
-#else
- if (HAVE_call_pop && HAVE_call_value_pop)
-#endif
+ else if (n_popped > 0
+ || !(valreg
+ ? targetm.have_call_value ()
+ : targetm.have_call ()))
{
rtx n_pop = GEN_INT (n_popped);
- rtx pat;
/* If this subroutine pops its own args, record that in the call insn
if possible, for the sake of frame pointer elimination. */
if (valreg)
- pat = GEN_CALL_VALUE_POP (valreg, funmem, rounded_stack_size_rtx,
- next_arg_reg, n_pop);
+ pat = targetm.gen_call_value_pop (valreg, funmem,
+ rounded_stack_size_rtx,
+ next_arg_reg, n_pop);
else
- pat = GEN_CALL_POP (funmem, rounded_stack_size_rtx, next_arg_reg,
- n_pop);
+ pat = targetm.gen_call_pop (funmem, rounded_stack_size_rtx,
+ next_arg_reg, n_pop);
- emit_call_insn (pat);
already_popped = 1;
}
else
-#endif
-
-#if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
- if ((ecf_flags & ECF_SIBCALL)
- && HAVE_sibcall && HAVE_sibcall_value)
{
if (valreg)
- emit_call_insn (GEN_SIBCALL_VALUE (valreg, funmem,
- rounded_stack_size_rtx,
- next_arg_reg, NULL_RTX));
+ pat = targetm.gen_call_value (valreg, funmem, rounded_stack_size_rtx,
+ next_arg_reg, NULL_RTX);
else
- emit_call_insn (GEN_SIBCALL (funmem, rounded_stack_size_rtx,
- next_arg_reg,
- GEN_INT (struct_value_size)));
+ pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg,
+ GEN_INT (struct_value_size));
}
- else
-#endif
-
-#if defined (HAVE_call) && defined (HAVE_call_value)
- if (HAVE_call && HAVE_call_value)
- {
- if (valreg)
- emit_call_insn (GEN_CALL_VALUE (valreg, funmem, rounded_stack_size_rtx,
- next_arg_reg, NULL_RTX));
- else
- emit_call_insn (GEN_CALL (funmem, rounded_stack_size_rtx, next_arg_reg,
- GEN_INT (struct_value_size)));
- }
- else
-#endif
- gcc_unreachable ();
+ emit_insn (pat);
/* Find the call we just emitted. */
- call_insn = last_call_insn ();
+ rtx_call_insn *call_insn = last_call_insn ();
/* Some target create a fresh MEM instead of reusing the one provided
above. Set its MEM_EXPR. */
/* We assume that alloca will always be called by name. It
makes no sense to pass it as a pointer-to-function to
anything that does not understand its behavior. */
- if (((IDENTIFIER_LENGTH (name_decl) == 6
- && name[0] == 'a'
- && ! strcmp (name, "alloca"))
- || (IDENTIFIER_LENGTH (name_decl) == 16
- && name[0] == '_'
- && ! strcmp (name, "__builtin_alloca"))))
+ if (IDENTIFIER_LENGTH (name_decl) == 6
+ && name[0] == 'a'
+ && ! strcmp (name, "alloca"))
flags |= ECF_MAY_BE_ALLOCA;
/* Disregard prefix _, __, __x or __builtin_. */
flags |= ECF_NORETURN;
}
+ if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
+ switch (DECL_FUNCTION_CODE (fndecl))
+ {
+ case BUILT_IN_ALLOCA:
+ case BUILT_IN_ALLOCA_WITH_ALIGN:
+ flags |= ECF_MAY_BE_ALLOCA;
+ break;
+ default:
+ break;
+ }
+
return flags;
}
/* Return true if STMT is an alloca call. */
bool
-gimple_alloca_call_p (const_gimple stmt)
+gimple_alloca_call_p (const gimple *stmt)
{
tree fndecl;
return flags;
}
+/* Return true if TYPE should be passed by invisible reference. */
+
+bool
+pass_by_reference (CUMULATIVE_ARGS *ca, machine_mode mode,
+ tree type, bool named_arg)
+{
+ if (type)
+ {
+ /* If this type contains non-trivial constructors, then it is
+ forbidden for the middle-end to create any new copies. */
+ if (TREE_ADDRESSABLE (type))
+ return true;
+
+ /* GCC post 3.4 passes *all* variable sized types by reference. */
+ if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
+ return true;
+
+ /* If a record type should be passed the same as its first (and only)
+ member, use the type and mode of that member. */
+ if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
+ {
+ type = TREE_TYPE (first_field (type));
+ mode = TYPE_MODE (type);
+ }
+ }
+
+ return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
+ type, named_arg);
+}
+
+/* Return true if TYPE, which is passed by reference, should be callee
+ copied instead of caller copied. */
+
+bool
+reference_callee_copied (CUMULATIVE_ARGS *ca, machine_mode mode,
+ tree type, bool named_arg)
+{
+ if (type && TREE_ADDRESSABLE (type))
+ return false;
+ return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
+ named_arg);
+}
+
+
/* Precompute all register parameters as described by ARGS, storing values
into fields within the ARGS array.
|| (GET_CODE (args[i].value) == SUBREG
&& REG_P (SUBREG_REG (args[i].value)))))
&& args[i].mode != BLKmode
- && set_src_cost (args[i].value, optimize_insn_for_speed_p ())
- > COSTS_N_INSNS (1)
+ && (set_src_cost (args[i].value, args[i].mode,
+ optimize_insn_for_speed_p ())
+ > COSTS_N_INSNS (1))
&& ((*reg_parm_seen
&& targetm.small_register_classes_for_mode_p (args[i].mode))
|| optimize))
args[i].aligned_regs[j] = reg;
word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
- word_mode, word_mode);
+ word_mode, word_mode, false);
/* There is no need to restrict this code to loading items
in TYPE_ALIGN sized hunks. The bitfield instructions can
bytes -= bitsize / BITS_PER_UNIT;
store_bit_field (reg, bitsize, endian_correction, 0, 0,
- word_mode, word);
+ word_mode, word, false);
}
}
}
and may be modified by this routine.
OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
- flags which may may be modified by this routine.
+ flags which may be modified by this routine.
MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
that requires allocation of stack space.
else
copy = assign_temp (type, 1, 0);
- store_expr (args[i].tree_value, copy, 0, false);
+ store_expr (args[i].tree_value, copy, 0, false, false);
/* Just change the const function to pure and then let
the next test clear the pure based on
rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
unsigned int bitsize = size * BITS_PER_UNIT - bitoff;
- rtx x = extract_bit_field (mem, bitsize, bitoff, 1,
- dest, word_mode, word_mode);
+ rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest,
+ word_mode, word_mode, false);
if (BYTES_BIG_ENDIAN)
x = expand_shift (LSHIFT_EXPR, word_mode, x,
BITS_PER_WORD - bitsize, dest, 1);
try_tail_call = 0;
/* Rest of purposes for tail call optimizations to fail. */
- if (
-#ifdef HAVE_sibcall_epilogue
- !HAVE_sibcall_epilogue
-#else
- 1
-#endif
- || !try_tail_call
+ if (!try_tail_call
+ || !targetm.have_sibcall_epilogue ()
/* Doing sibling call optimization needs some work, since
structure_value_addr can be allocated on the stack.
It does not seem worth the effort since few optimizable
compute_argument_addresses (args, argblock, num_actuals);
+ /* Stack is properly aligned, pops can't safely be deferred during
+ the evaluation of the arguments. */
+ NO_DEFER_POP;
+
+ /* Precompute all register parameters. It isn't safe to compute
+ anything once we have started filling any specific hard regs.
+ TLS symbols sometimes need a call to resolve. Precompute
+ register parameters before any stack pointer manipulation
+ to avoid unaligned stack in the called function. */
+ precompute_register_parameters (num_actuals, args, ®_parm_seen);
+
+ OK_DEFER_POP;
+
/* Perform stack alignment before the first push (the last arg). */
if (argblock == 0
&& adjusted_args_size.constant > reg_parm_stack_space
funexp = rtx_for_function_call (fndecl, addr);
- /* Precompute all register parameters. It isn't safe to compute anything
- once we have started filling any specific hard regs. */
- precompute_register_parameters (num_actuals, args, ®_parm_seen);
-
if (CALL_EXPR_STATIC_CHAIN (exp))
static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
else
if (flag_ipa_ra)
{
- rtx last, datum = orgfun;
+ rtx datum = orgfun;
gcc_assert (GET_CODE (datum) == SYMBOL_REF);
- last = last_call_insn ();
+ rtx_call_insn *last = last_call_insn ();
add_reg_note (last, REG_CALL_DECL, datum);
}
if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
i = INTVAL (XEXP (XEXP (x, 0), 1));
+ /* arg.locate doesn't contain the pretend_args_size offset,
+ it's part of argblock. Ensure we don't count it in I. */
+ if (STACK_GROWS_DOWNWARD)
+ i -= crtl->args.pretend_args_size;
+ else
+ i += crtl->args.pretend_args_size;
+
/* expand_call should ensure this. */
gcc_assert (!arg->locate.offset.var
&& arg->locate.size.var == 0