Daily bump.
[gcc.git] / gcc / calls.c
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989-2021 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #define INCLUDE_STRING
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "stringpool.h"
33 #include "expmed.h"
34 #include "optabs.h"
35 #include "emit-rtl.h"
36 #include "cgraph.h"
37 #include "diagnostic-core.h"
38 #include "fold-const.h"
39 #include "stor-layout.h"
40 #include "varasm.h"
41 #include "internal-fn.h"
42 #include "dojump.h"
43 #include "explow.h"
44 #include "calls.h"
45 #include "expr.h"
46 #include "output.h"
47 #include "langhooks.h"
48 #include "except.h"
49 #include "dbgcnt.h"
50 #include "rtl-iter.h"
51 #include "tree-vrp.h"
52 #include "tree-ssanames.h"
53 #include "tree-ssa-strlen.h"
54 #include "intl.h"
55 #include "stringpool.h"
56 #include "hash-map.h"
57 #include "hash-traits.h"
58 #include "attribs.h"
59 #include "builtins.h"
60 #include "gimple-fold.h"
61 #include "attr-fnspec.h"
62 #include "value-query.h"
63
64 #include "tree-pretty-print.h"
65
66 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
67 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
68
69 /* Data structure and subroutines used within expand_call. */
70
71 struct arg_data
72 {
73 /* Tree node for this argument. */
74 tree tree_value;
75 /* Mode for value; TYPE_MODE unless promoted. */
76 machine_mode mode;
77 /* Current RTL value for argument, or 0 if it isn't precomputed. */
78 rtx value;
79 /* Initially-compute RTL value for argument; only for const functions. */
80 rtx initial_value;
81 /* Register to pass this argument in, 0 if passed on stack, or an
82 PARALLEL if the arg is to be copied into multiple non-contiguous
83 registers. */
84 rtx reg;
85 /* Register to pass this argument in when generating tail call sequence.
86 This is not the same register as for normal calls on machines with
87 register windows. */
88 rtx tail_call_reg;
89 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
90 form for emit_group_move. */
91 rtx parallel_value;
92 /* If REG was promoted from the actual mode of the argument expression,
93 indicates whether the promotion is sign- or zero-extended. */
94 int unsignedp;
95 /* Number of bytes to put in registers. 0 means put the whole arg
96 in registers. Also 0 if not passed in registers. */
97 int partial;
98 /* Nonzero if argument must be passed on stack.
99 Note that some arguments may be passed on the stack
100 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
101 pass_on_stack identifies arguments that *cannot* go in registers. */
102 int pass_on_stack;
103 /* Some fields packaged up for locate_and_pad_parm. */
104 struct locate_and_pad_arg_data locate;
105 /* Location on the stack at which parameter should be stored. The store
106 has already been done if STACK == VALUE. */
107 rtx stack;
108 /* Location on the stack of the start of this argument slot. This can
109 differ from STACK if this arg pads downward. This location is known
110 to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
111 rtx stack_slot;
112 /* Place that this stack area has been saved, if needed. */
113 rtx save_area;
114 /* If an argument's alignment does not permit direct copying into registers,
115 copy in smaller-sized pieces into pseudos. These are stored in a
116 block pointed to by this field. The next field says how many
117 word-sized pseudos we made. */
118 rtx *aligned_regs;
119 int n_aligned_regs;
120 };
121
122 /* A vector of one char per byte of stack space. A byte if nonzero if
123 the corresponding stack location has been used.
124 This vector is used to prevent a function call within an argument from
125 clobbering any stack already set up. */
126 static char *stack_usage_map;
127
128 /* Size of STACK_USAGE_MAP. */
129 static unsigned int highest_outgoing_arg_in_use;
130
131 /* Assume that any stack location at this byte index is used,
132 without checking the contents of stack_usage_map. */
133 static unsigned HOST_WIDE_INT stack_usage_watermark = HOST_WIDE_INT_M1U;
134
135 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
136 stack location's tail call argument has been already stored into the stack.
137 This bitmap is used to prevent sibling call optimization if function tries
138 to use parent's incoming argument slots when they have been already
139 overwritten with tail call arguments. */
140 static sbitmap stored_args_map;
141
142 /* Assume that any virtual-incoming location at this byte index has been
143 stored, without checking the contents of stored_args_map. */
144 static unsigned HOST_WIDE_INT stored_args_watermark;
145
146 /* stack_arg_under_construction is nonzero when an argument may be
147 initialized with a constructor call (including a C function that
148 returns a BLKmode struct) and expand_call must take special action
149 to make sure the object being constructed does not overlap the
150 argument list for the constructor call. */
151 static int stack_arg_under_construction;
152
153 static void precompute_register_parameters (int, struct arg_data *, int *);
154 static int store_one_arg (struct arg_data *, rtx, int, int, int);
155 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
156 static int finalize_must_preallocate (int, int, struct arg_data *,
157 struct args_size *);
158 static void precompute_arguments (int, struct arg_data *);
159 static void compute_argument_addresses (struct arg_data *, rtx, int);
160 static rtx rtx_for_function_call (tree, tree);
161 static void load_register_parameters (struct arg_data *, int, rtx *, int,
162 int, int *);
163 static int special_function_p (const_tree, int);
164 static int check_sibcall_argument_overlap_1 (rtx);
165 static int check_sibcall_argument_overlap (rtx_insn *, struct arg_data *, int);
166
167 static tree split_complex_types (tree);
168
169 #ifdef REG_PARM_STACK_SPACE
170 static rtx save_fixed_argument_area (int, rtx, int *, int *);
171 static void restore_fixed_argument_area (rtx, rtx, int, int);
172 #endif
173 \f
174 /* Return true if bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
175 stack region might already be in use. */
176
177 static bool
178 stack_region_maybe_used_p (poly_uint64 lower_bound, poly_uint64 upper_bound,
179 unsigned int reg_parm_stack_space)
180 {
181 unsigned HOST_WIDE_INT const_lower, const_upper;
182 const_lower = constant_lower_bound (lower_bound);
183 if (!upper_bound.is_constant (&const_upper))
184 const_upper = HOST_WIDE_INT_M1U;
185
186 if (const_upper > stack_usage_watermark)
187 return true;
188
189 /* Don't worry about things in the fixed argument area;
190 it has already been saved. */
191 const_lower = MAX (const_lower, reg_parm_stack_space);
192 const_upper = MIN (const_upper, highest_outgoing_arg_in_use);
193 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
194 if (stack_usage_map[i])
195 return true;
196 return false;
197 }
198
199 /* Record that bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
200 stack region are now in use. */
201
202 static void
203 mark_stack_region_used (poly_uint64 lower_bound, poly_uint64 upper_bound)
204 {
205 unsigned HOST_WIDE_INT const_lower, const_upper;
206 const_lower = constant_lower_bound (lower_bound);
207 if (upper_bound.is_constant (&const_upper))
208 for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
209 stack_usage_map[i] = 1;
210 else
211 stack_usage_watermark = MIN (stack_usage_watermark, const_lower);
212 }
213
214 /* Force FUNEXP into a form suitable for the address of a CALL,
215 and return that as an rtx. Also load the static chain register
216 if FNDECL is a nested function.
217
218 CALL_FUSAGE points to a variable holding the prospective
219 CALL_INSN_FUNCTION_USAGE information. */
220
221 rtx
222 prepare_call_address (tree fndecl_or_type, rtx funexp, rtx static_chain_value,
223 rtx *call_fusage, int reg_parm_seen, int flags)
224 {
225 /* Make a valid memory address and copy constants through pseudo-regs,
226 but not for a constant address if -fno-function-cse. */
227 if (GET_CODE (funexp) != SYMBOL_REF)
228 {
229 /* If it's an indirect call by descriptor, generate code to perform
230 runtime identification of the pointer and load the descriptor. */
231 if ((flags & ECF_BY_DESCRIPTOR) && !flag_trampolines)
232 {
233 const int bit_val = targetm.calls.custom_function_descriptors;
234 rtx call_lab = gen_label_rtx ();
235
236 gcc_assert (fndecl_or_type && TYPE_P (fndecl_or_type));
237 fndecl_or_type
238 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
239 fndecl_or_type);
240 DECL_STATIC_CHAIN (fndecl_or_type) = 1;
241 rtx chain = targetm.calls.static_chain (fndecl_or_type, false);
242
243 if (GET_MODE (funexp) != Pmode)
244 funexp = convert_memory_address (Pmode, funexp);
245
246 /* Avoid long live ranges around function calls. */
247 funexp = copy_to_mode_reg (Pmode, funexp);
248
249 if (REG_P (chain))
250 emit_insn (gen_rtx_CLOBBER (VOIDmode, chain));
251
252 /* Emit the runtime identification pattern. */
253 rtx mask = gen_rtx_AND (Pmode, funexp, GEN_INT (bit_val));
254 emit_cmp_and_jump_insns (mask, const0_rtx, EQ, NULL_RTX, Pmode, 1,
255 call_lab);
256
257 /* Statically predict the branch to very likely taken. */
258 rtx_insn *insn = get_last_insn ();
259 if (JUMP_P (insn))
260 predict_insn_def (insn, PRED_BUILTIN_EXPECT, TAKEN);
261
262 /* Load the descriptor. */
263 rtx mem = gen_rtx_MEM (ptr_mode,
264 plus_constant (Pmode, funexp, - bit_val));
265 MEM_NOTRAP_P (mem) = 1;
266 mem = convert_memory_address (Pmode, mem);
267 emit_move_insn (chain, mem);
268
269 mem = gen_rtx_MEM (ptr_mode,
270 plus_constant (Pmode, funexp,
271 POINTER_SIZE / BITS_PER_UNIT
272 - bit_val));
273 MEM_NOTRAP_P (mem) = 1;
274 mem = convert_memory_address (Pmode, mem);
275 emit_move_insn (funexp, mem);
276
277 emit_label (call_lab);
278
279 if (REG_P (chain))
280 {
281 use_reg (call_fusage, chain);
282 STATIC_CHAIN_REG_P (chain) = 1;
283 }
284
285 /* Make sure we're not going to be overwritten below. */
286 gcc_assert (!static_chain_value);
287 }
288
289 /* If we are using registers for parameters, force the
290 function address into a register now. */
291 funexp = ((reg_parm_seen
292 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
293 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
294 : memory_address (FUNCTION_MODE, funexp));
295 }
296 else
297 {
298 /* funexp could be a SYMBOL_REF represents a function pointer which is
299 of ptr_mode. In this case, it should be converted into address mode
300 to be a valid address for memory rtx pattern. See PR 64971. */
301 if (GET_MODE (funexp) != Pmode)
302 funexp = convert_memory_address (Pmode, funexp);
303
304 if (!(flags & ECF_SIBCALL))
305 {
306 if (!NO_FUNCTION_CSE && optimize && ! flag_no_function_cse)
307 funexp = force_reg (Pmode, funexp);
308 }
309 }
310
311 if (static_chain_value != 0
312 && (TREE_CODE (fndecl_or_type) != FUNCTION_DECL
313 || DECL_STATIC_CHAIN (fndecl_or_type)))
314 {
315 rtx chain;
316
317 chain = targetm.calls.static_chain (fndecl_or_type, false);
318 static_chain_value = convert_memory_address (Pmode, static_chain_value);
319
320 emit_move_insn (chain, static_chain_value);
321 if (REG_P (chain))
322 {
323 use_reg (call_fusage, chain);
324 STATIC_CHAIN_REG_P (chain) = 1;
325 }
326 }
327
328 return funexp;
329 }
330
331 /* Generate instructions to call function FUNEXP,
332 and optionally pop the results.
333 The CALL_INSN is the first insn generated.
334
335 FNDECL is the declaration node of the function. This is given to the
336 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
337 its own args.
338
339 FUNTYPE is the data type of the function. This is given to the hook
340 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
341 own args. We used to allow an identifier for library functions, but
342 that doesn't work when the return type is an aggregate type and the
343 calling convention says that the pointer to this aggregate is to be
344 popped by the callee.
345
346 STACK_SIZE is the number of bytes of arguments on the stack,
347 ROUNDED_STACK_SIZE is that number rounded up to
348 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
349 both to put into the call insn and to generate explicit popping
350 code if necessary.
351
352 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
353 It is zero if this call doesn't want a structure value.
354
355 NEXT_ARG_REG is the rtx that results from executing
356 targetm.calls.function_arg (&args_so_far,
357 function_arg_info::end_marker ());
358 just after all the args have had their registers assigned.
359 This could be whatever you like, but normally it is the first
360 arg-register beyond those used for args in this call,
361 or 0 if all the arg-registers are used in this call.
362 It is passed on to `gen_call' so you can put this info in the call insn.
363
364 VALREG is a hard register in which a value is returned,
365 or 0 if the call does not return a value.
366
367 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
368 the args to this call were processed.
369 We restore `inhibit_defer_pop' to that value.
370
371 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
372 denote registers used by the called function. */
373
374 static void
375 emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
376 tree funtype ATTRIBUTE_UNUSED,
377 poly_int64 stack_size ATTRIBUTE_UNUSED,
378 poly_int64 rounded_stack_size,
379 poly_int64 struct_value_size ATTRIBUTE_UNUSED,
380 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
381 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
382 cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
383 {
384 rtx rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
385 rtx call, funmem, pat;
386 int already_popped = 0;
387 poly_int64 n_popped = 0;
388
389 /* Sibling call patterns never pop arguments (no sibcall(_value)_pop
390 patterns exist). Any popping that the callee does on return will
391 be from our caller's frame rather than ours. */
392 if (!(ecf_flags & ECF_SIBCALL))
393 {
394 n_popped += targetm.calls.return_pops_args (fndecl, funtype, stack_size);
395
396 #ifdef CALL_POPS_ARGS
397 n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
398 #endif
399 }
400
401 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
402 and we don't want to load it into a register as an optimization,
403 because prepare_call_address already did it if it should be done. */
404 if (GET_CODE (funexp) != SYMBOL_REF)
405 funexp = memory_address (FUNCTION_MODE, funexp);
406
407 funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
408 if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
409 {
410 tree t = fndecl;
411
412 /* Although a built-in FUNCTION_DECL and its non-__builtin
413 counterpart compare equal and get a shared mem_attrs, they
414 produce different dump output in compare-debug compilations,
415 if an entry gets garbage collected in one compilation, then
416 adds a different (but equivalent) entry, while the other
417 doesn't run the garbage collector at the same spot and then
418 shares the mem_attr with the equivalent entry. */
419 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
420 {
421 tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
422 if (t2)
423 t = t2;
424 }
425
426 set_mem_expr (funmem, t);
427 }
428 else if (fntree)
429 set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
430
431 if (ecf_flags & ECF_SIBCALL)
432 {
433 if (valreg)
434 pat = targetm.gen_sibcall_value (valreg, funmem,
435 rounded_stack_size_rtx,
436 next_arg_reg, NULL_RTX);
437 else
438 pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx,
439 next_arg_reg,
440 gen_int_mode (struct_value_size, Pmode));
441 }
442 /* If the target has "call" or "call_value" insns, then prefer them
443 if no arguments are actually popped. If the target does not have
444 "call" or "call_value" insns, then we must use the popping versions
445 even if the call has no arguments to pop. */
446 else if (maybe_ne (n_popped, 0)
447 || !(valreg
448 ? targetm.have_call_value ()
449 : targetm.have_call ()))
450 {
451 rtx n_pop = gen_int_mode (n_popped, Pmode);
452
453 /* If this subroutine pops its own args, record that in the call insn
454 if possible, for the sake of frame pointer elimination. */
455
456 if (valreg)
457 pat = targetm.gen_call_value_pop (valreg, funmem,
458 rounded_stack_size_rtx,
459 next_arg_reg, n_pop);
460 else
461 pat = targetm.gen_call_pop (funmem, rounded_stack_size_rtx,
462 next_arg_reg, n_pop);
463
464 already_popped = 1;
465 }
466 else
467 {
468 if (valreg)
469 pat = targetm.gen_call_value (valreg, funmem, rounded_stack_size_rtx,
470 next_arg_reg, NULL_RTX);
471 else
472 pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg,
473 gen_int_mode (struct_value_size, Pmode));
474 }
475 emit_insn (pat);
476
477 /* Find the call we just emitted. */
478 rtx_call_insn *call_insn = last_call_insn ();
479
480 /* Some target create a fresh MEM instead of reusing the one provided
481 above. Set its MEM_EXPR. */
482 call = get_call_rtx_from (call_insn);
483 if (call
484 && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
485 && MEM_EXPR (funmem) != NULL_TREE)
486 set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
487
488 /* Put the register usage information there. */
489 add_function_usage_to (call_insn, call_fusage);
490
491 /* If this is a const call, then set the insn's unchanging bit. */
492 if (ecf_flags & ECF_CONST)
493 RTL_CONST_CALL_P (call_insn) = 1;
494
495 /* If this is a pure call, then set the insn's unchanging bit. */
496 if (ecf_flags & ECF_PURE)
497 RTL_PURE_CALL_P (call_insn) = 1;
498
499 /* If this is a const call, then set the insn's unchanging bit. */
500 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
501 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
502
503 /* Create a nothrow REG_EH_REGION note, if needed. */
504 make_reg_eh_region_note (call_insn, ecf_flags, 0);
505
506 if (ecf_flags & ECF_NORETURN)
507 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
508
509 if (ecf_flags & ECF_RETURNS_TWICE)
510 {
511 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
512 cfun->calls_setjmp = 1;
513 }
514
515 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
516
517 /* Restore this now, so that we do defer pops for this call's args
518 if the context of the call as a whole permits. */
519 inhibit_defer_pop = old_inhibit_defer_pop;
520
521 if (maybe_ne (n_popped, 0))
522 {
523 if (!already_popped)
524 CALL_INSN_FUNCTION_USAGE (call_insn)
525 = gen_rtx_EXPR_LIST (VOIDmode,
526 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
527 CALL_INSN_FUNCTION_USAGE (call_insn));
528 rounded_stack_size -= n_popped;
529 rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
530 stack_pointer_delta -= n_popped;
531
532 add_args_size_note (call_insn, stack_pointer_delta);
533
534 /* If popup is needed, stack realign must use DRAP */
535 if (SUPPORTS_STACK_ALIGNMENT)
536 crtl->need_drap = true;
537 }
538 /* For noreturn calls when not accumulating outgoing args force
539 REG_ARGS_SIZE note to prevent crossjumping of calls with different
540 args sizes. */
541 else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
542 add_args_size_note (call_insn, stack_pointer_delta);
543
544 if (!ACCUMULATE_OUTGOING_ARGS)
545 {
546 /* If returning from the subroutine does not automatically pop the args,
547 we need an instruction to pop them sooner or later.
548 Perhaps do it now; perhaps just record how much space to pop later.
549
550 If returning from the subroutine does pop the args, indicate that the
551 stack pointer will be changed. */
552
553 if (maybe_ne (rounded_stack_size, 0))
554 {
555 if (ecf_flags & ECF_NORETURN)
556 /* Just pretend we did the pop. */
557 stack_pointer_delta -= rounded_stack_size;
558 else if (flag_defer_pop && inhibit_defer_pop == 0
559 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
560 pending_stack_adjust += rounded_stack_size;
561 else
562 adjust_stack (rounded_stack_size_rtx);
563 }
564 }
565 /* When we accumulate outgoing args, we must avoid any stack manipulations.
566 Restore the stack pointer to its original value now. Usually
567 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
568 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
569 popping variants of functions exist as well.
570
571 ??? We may optimize similar to defer_pop above, but it is
572 probably not worthwhile.
573
574 ??? It will be worthwhile to enable combine_stack_adjustments even for
575 such machines. */
576 else if (maybe_ne (n_popped, 0))
577 anti_adjust_stack (gen_int_mode (n_popped, Pmode));
578 }
579
580 /* Determine if the function identified by FNDECL is one with
581 special properties we wish to know about. Modify FLAGS accordingly.
582
583 For example, if the function might return more than one time (setjmp), then
584 set ECF_RETURNS_TWICE.
585
586 Set ECF_MAY_BE_ALLOCA for any memory allocation function that might allocate
587 space from the stack such as alloca. */
588
589 static int
590 special_function_p (const_tree fndecl, int flags)
591 {
592 tree name_decl = DECL_NAME (fndecl);
593
594 if (maybe_special_function_p (fndecl)
595 && IDENTIFIER_LENGTH (name_decl) <= 11)
596 {
597 const char *name = IDENTIFIER_POINTER (name_decl);
598 const char *tname = name;
599
600 /* We assume that alloca will always be called by name. It
601 makes no sense to pass it as a pointer-to-function to
602 anything that does not understand its behavior. */
603 if (IDENTIFIER_LENGTH (name_decl) == 6
604 && name[0] == 'a'
605 && ! strcmp (name, "alloca"))
606 flags |= ECF_MAY_BE_ALLOCA;
607
608 /* Disregard prefix _ or __. */
609 if (name[0] == '_')
610 {
611 if (name[1] == '_')
612 tname += 2;
613 else
614 tname += 1;
615 }
616
617 /* ECF_RETURNS_TWICE is safe even for -ffreestanding. */
618 if (! strcmp (tname, "setjmp")
619 || ! strcmp (tname, "sigsetjmp")
620 || ! strcmp (name, "savectx")
621 || ! strcmp (name, "vfork")
622 || ! strcmp (name, "getcontext"))
623 flags |= ECF_RETURNS_TWICE;
624 }
625
626 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
627 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl)))
628 flags |= ECF_MAY_BE_ALLOCA;
629
630 return flags;
631 }
632
633 /* Return fnspec for DECL. */
634
635 static attr_fnspec
636 decl_fnspec (tree fndecl)
637 {
638 tree attr;
639 tree type = TREE_TYPE (fndecl);
640 if (type)
641 {
642 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
643 if (attr)
644 {
645 return TREE_VALUE (TREE_VALUE (attr));
646 }
647 }
648 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
649 return builtin_fnspec (fndecl);
650 return "";
651 }
652
653 /* Similar to special_function_p; return a set of ERF_ flags for the
654 function FNDECL. */
655 static int
656 decl_return_flags (tree fndecl)
657 {
658 attr_fnspec fnspec = decl_fnspec (fndecl);
659
660 unsigned int arg;
661 if (fnspec.returns_arg (&arg))
662 return ERF_RETURNS_ARG | arg;
663
664 if (fnspec.returns_noalias_p ())
665 return ERF_NOALIAS;
666 return 0;
667 }
668
669 /* Return nonzero when FNDECL represents a call to setjmp. */
670
671 int
672 setjmp_call_p (const_tree fndecl)
673 {
674 if (DECL_IS_RETURNS_TWICE (fndecl))
675 return ECF_RETURNS_TWICE;
676 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
677 }
678
679
680 /* Return true if STMT may be an alloca call. */
681
682 bool
683 gimple_maybe_alloca_call_p (const gimple *stmt)
684 {
685 tree fndecl;
686
687 if (!is_gimple_call (stmt))
688 return false;
689
690 fndecl = gimple_call_fndecl (stmt);
691 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
692 return true;
693
694 return false;
695 }
696
697 /* Return true if STMT is a builtin alloca call. */
698
699 bool
700 gimple_alloca_call_p (const gimple *stmt)
701 {
702 tree fndecl;
703
704 if (!is_gimple_call (stmt))
705 return false;
706
707 fndecl = gimple_call_fndecl (stmt);
708 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
709 switch (DECL_FUNCTION_CODE (fndecl))
710 {
711 CASE_BUILT_IN_ALLOCA:
712 return gimple_call_num_args (stmt) > 0;
713 default:
714 break;
715 }
716
717 return false;
718 }
719
720 /* Return true when exp contains a builtin alloca call. */
721
722 bool
723 alloca_call_p (const_tree exp)
724 {
725 tree fndecl;
726 if (TREE_CODE (exp) == CALL_EXPR
727 && (fndecl = get_callee_fndecl (exp))
728 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
729 switch (DECL_FUNCTION_CODE (fndecl))
730 {
731 CASE_BUILT_IN_ALLOCA:
732 return true;
733 default:
734 break;
735 }
736
737 return false;
738 }
739
740 /* Return TRUE if FNDECL is either a TM builtin or a TM cloned
741 function. Return FALSE otherwise. */
742
743 static bool
744 is_tm_builtin (const_tree fndecl)
745 {
746 if (fndecl == NULL)
747 return false;
748
749 if (decl_is_tm_clone (fndecl))
750 return true;
751
752 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
753 {
754 switch (DECL_FUNCTION_CODE (fndecl))
755 {
756 case BUILT_IN_TM_COMMIT:
757 case BUILT_IN_TM_COMMIT_EH:
758 case BUILT_IN_TM_ABORT:
759 case BUILT_IN_TM_IRREVOCABLE:
760 case BUILT_IN_TM_GETTMCLONE_IRR:
761 case BUILT_IN_TM_MEMCPY:
762 case BUILT_IN_TM_MEMMOVE:
763 case BUILT_IN_TM_MEMSET:
764 CASE_BUILT_IN_TM_STORE (1):
765 CASE_BUILT_IN_TM_STORE (2):
766 CASE_BUILT_IN_TM_STORE (4):
767 CASE_BUILT_IN_TM_STORE (8):
768 CASE_BUILT_IN_TM_STORE (FLOAT):
769 CASE_BUILT_IN_TM_STORE (DOUBLE):
770 CASE_BUILT_IN_TM_STORE (LDOUBLE):
771 CASE_BUILT_IN_TM_STORE (M64):
772 CASE_BUILT_IN_TM_STORE (M128):
773 CASE_BUILT_IN_TM_STORE (M256):
774 CASE_BUILT_IN_TM_LOAD (1):
775 CASE_BUILT_IN_TM_LOAD (2):
776 CASE_BUILT_IN_TM_LOAD (4):
777 CASE_BUILT_IN_TM_LOAD (8):
778 CASE_BUILT_IN_TM_LOAD (FLOAT):
779 CASE_BUILT_IN_TM_LOAD (DOUBLE):
780 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
781 CASE_BUILT_IN_TM_LOAD (M64):
782 CASE_BUILT_IN_TM_LOAD (M128):
783 CASE_BUILT_IN_TM_LOAD (M256):
784 case BUILT_IN_TM_LOG:
785 case BUILT_IN_TM_LOG_1:
786 case BUILT_IN_TM_LOG_2:
787 case BUILT_IN_TM_LOG_4:
788 case BUILT_IN_TM_LOG_8:
789 case BUILT_IN_TM_LOG_FLOAT:
790 case BUILT_IN_TM_LOG_DOUBLE:
791 case BUILT_IN_TM_LOG_LDOUBLE:
792 case BUILT_IN_TM_LOG_M64:
793 case BUILT_IN_TM_LOG_M128:
794 case BUILT_IN_TM_LOG_M256:
795 return true;
796 default:
797 break;
798 }
799 }
800 return false;
801 }
802
803 /* Detect flags (function attributes) from the function decl or type node. */
804
805 int
806 flags_from_decl_or_type (const_tree exp)
807 {
808 int flags = 0;
809
810 if (DECL_P (exp))
811 {
812 /* The function exp may have the `malloc' attribute. */
813 if (DECL_IS_MALLOC (exp))
814 flags |= ECF_MALLOC;
815
816 /* The function exp may have the `returns_twice' attribute. */
817 if (DECL_IS_RETURNS_TWICE (exp))
818 flags |= ECF_RETURNS_TWICE;
819
820 /* Process the pure and const attributes. */
821 if (TREE_READONLY (exp))
822 flags |= ECF_CONST;
823 if (DECL_PURE_P (exp))
824 flags |= ECF_PURE;
825 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
826 flags |= ECF_LOOPING_CONST_OR_PURE;
827
828 if (DECL_IS_NOVOPS (exp))
829 flags |= ECF_NOVOPS;
830 if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
831 flags |= ECF_LEAF;
832 if (lookup_attribute ("cold", DECL_ATTRIBUTES (exp)))
833 flags |= ECF_COLD;
834
835 if (TREE_NOTHROW (exp))
836 flags |= ECF_NOTHROW;
837
838 if (flag_tm)
839 {
840 if (is_tm_builtin (exp))
841 flags |= ECF_TM_BUILTIN;
842 else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
843 || lookup_attribute ("transaction_pure",
844 TYPE_ATTRIBUTES (TREE_TYPE (exp))))
845 flags |= ECF_TM_PURE;
846 }
847
848 flags = special_function_p (exp, flags);
849 }
850 else if (TYPE_P (exp))
851 {
852 if (TYPE_READONLY (exp))
853 flags |= ECF_CONST;
854
855 if (flag_tm
856 && ((flags & ECF_CONST) != 0
857 || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
858 flags |= ECF_TM_PURE;
859 }
860 else
861 gcc_unreachable ();
862
863 if (TREE_THIS_VOLATILE (exp))
864 {
865 flags |= ECF_NORETURN;
866 if (flags & (ECF_CONST|ECF_PURE))
867 flags |= ECF_LOOPING_CONST_OR_PURE;
868 }
869
870 return flags;
871 }
872
873 /* Detect flags from a CALL_EXPR. */
874
875 int
876 call_expr_flags (const_tree t)
877 {
878 int flags;
879 tree decl = get_callee_fndecl (t);
880
881 if (decl)
882 flags = flags_from_decl_or_type (decl);
883 else if (CALL_EXPR_FN (t) == NULL_TREE)
884 flags = internal_fn_flags (CALL_EXPR_IFN (t));
885 else
886 {
887 tree type = TREE_TYPE (CALL_EXPR_FN (t));
888 if (type && TREE_CODE (type) == POINTER_TYPE)
889 flags = flags_from_decl_or_type (TREE_TYPE (type));
890 else
891 flags = 0;
892 if (CALL_EXPR_BY_DESCRIPTOR (t))
893 flags |= ECF_BY_DESCRIPTOR;
894 }
895
896 return flags;
897 }
898
899 /* Return true if ARG should be passed by invisible reference. */
900
901 bool
902 pass_by_reference (CUMULATIVE_ARGS *ca, function_arg_info arg)
903 {
904 if (tree type = arg.type)
905 {
906 /* If this type contains non-trivial constructors, then it is
907 forbidden for the middle-end to create any new copies. */
908 if (TREE_ADDRESSABLE (type))
909 return true;
910
911 /* GCC post 3.4 passes *all* variable sized types by reference. */
912 if (!TYPE_SIZE (type) || !poly_int_tree_p (TYPE_SIZE (type)))
913 return true;
914
915 /* If a record type should be passed the same as its first (and only)
916 member, use the type and mode of that member. */
917 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
918 {
919 arg.type = TREE_TYPE (first_field (type));
920 arg.mode = TYPE_MODE (arg.type);
921 }
922 }
923
924 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), arg);
925 }
926
927 /* Return true if TYPE should be passed by reference when passed to
928 the "..." arguments of a function. */
929
930 bool
931 pass_va_arg_by_reference (tree type)
932 {
933 return pass_by_reference (NULL, function_arg_info (type, /*named=*/false));
934 }
935
936 /* Decide whether ARG, which occurs in the state described by CA,
937 should be passed by reference. Return true if so and update
938 ARG accordingly. */
939
940 bool
941 apply_pass_by_reference_rules (CUMULATIVE_ARGS *ca, function_arg_info &arg)
942 {
943 if (pass_by_reference (ca, arg))
944 {
945 arg.type = build_pointer_type (arg.type);
946 arg.mode = TYPE_MODE (arg.type);
947 arg.pass_by_reference = true;
948 return true;
949 }
950 return false;
951 }
952
953 /* Return true if ARG, which is passed by reference, should be callee
954 copied instead of caller copied. */
955
956 bool
957 reference_callee_copied (CUMULATIVE_ARGS *ca, const function_arg_info &arg)
958 {
959 if (arg.type && TREE_ADDRESSABLE (arg.type))
960 return false;
961 return targetm.calls.callee_copies (pack_cumulative_args (ca), arg);
962 }
963
964
965 /* Precompute all register parameters as described by ARGS, storing values
966 into fields within the ARGS array.
967
968 NUM_ACTUALS indicates the total number elements in the ARGS array.
969
970 Set REG_PARM_SEEN if we encounter a register parameter. */
971
972 static void
973 precompute_register_parameters (int num_actuals, struct arg_data *args,
974 int *reg_parm_seen)
975 {
976 int i;
977
978 *reg_parm_seen = 0;
979
980 for (i = 0; i < num_actuals; i++)
981 if (args[i].reg != 0 && ! args[i].pass_on_stack)
982 {
983 *reg_parm_seen = 1;
984
985 if (args[i].value == 0)
986 {
987 push_temp_slots ();
988 args[i].value = expand_normal (args[i].tree_value);
989 preserve_temp_slots (args[i].value);
990 pop_temp_slots ();
991 }
992
993 /* If we are to promote the function arg to a wider mode,
994 do it now. */
995
996 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
997 args[i].value
998 = convert_modes (args[i].mode,
999 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1000 args[i].value, args[i].unsignedp);
1001
1002 /* If the value is a non-legitimate constant, force it into a
1003 pseudo now. TLS symbols sometimes need a call to resolve. */
1004 if (CONSTANT_P (args[i].value)
1005 && !targetm.legitimate_constant_p (args[i].mode, args[i].value))
1006 args[i].value = force_reg (args[i].mode, args[i].value);
1007
1008 /* If we're going to have to load the value by parts, pull the
1009 parts into pseudos. The part extraction process can involve
1010 non-trivial computation. */
1011 if (GET_CODE (args[i].reg) == PARALLEL)
1012 {
1013 tree type = TREE_TYPE (args[i].tree_value);
1014 args[i].parallel_value
1015 = emit_group_load_into_temps (args[i].reg, args[i].value,
1016 type, int_size_in_bytes (type));
1017 }
1018
1019 /* If the value is expensive, and we are inside an appropriately
1020 short loop, put the value into a pseudo and then put the pseudo
1021 into the hard reg.
1022
1023 For small register classes, also do this if this call uses
1024 register parameters. This is to avoid reload conflicts while
1025 loading the parameters registers. */
1026
1027 else if ((! (REG_P (args[i].value)
1028 || (GET_CODE (args[i].value) == SUBREG
1029 && REG_P (SUBREG_REG (args[i].value)))))
1030 && args[i].mode != BLKmode
1031 && (set_src_cost (args[i].value, args[i].mode,
1032 optimize_insn_for_speed_p ())
1033 > COSTS_N_INSNS (1))
1034 && ((*reg_parm_seen
1035 && targetm.small_register_classes_for_mode_p (args[i].mode))
1036 || optimize))
1037 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
1038 }
1039 }
1040
1041 #ifdef REG_PARM_STACK_SPACE
1042
1043 /* The argument list is the property of the called routine and it
1044 may clobber it. If the fixed area has been used for previous
1045 parameters, we must save and restore it. */
1046
1047 static rtx
1048 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
1049 {
1050 unsigned int low;
1051 unsigned int high;
1052
1053 /* Compute the boundary of the area that needs to be saved, if any. */
1054 high = reg_parm_stack_space;
1055 if (ARGS_GROW_DOWNWARD)
1056 high += 1;
1057
1058 if (high > highest_outgoing_arg_in_use)
1059 high = highest_outgoing_arg_in_use;
1060
1061 for (low = 0; low < high; low++)
1062 if (stack_usage_map[low] != 0 || low >= stack_usage_watermark)
1063 {
1064 int num_to_save;
1065 machine_mode save_mode;
1066 int delta;
1067 rtx addr;
1068 rtx stack_area;
1069 rtx save_area;
1070
1071 while (stack_usage_map[--high] == 0)
1072 ;
1073
1074 *low_to_save = low;
1075 *high_to_save = high;
1076
1077 num_to_save = high - low + 1;
1078
1079 /* If we don't have the required alignment, must do this
1080 in BLKmode. */
1081 scalar_int_mode imode;
1082 if (int_mode_for_size (num_to_save * BITS_PER_UNIT, 1).exists (&imode)
1083 && (low & (MIN (GET_MODE_SIZE (imode),
1084 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)) == 0)
1085 save_mode = imode;
1086 else
1087 save_mode = BLKmode;
1088
1089 if (ARGS_GROW_DOWNWARD)
1090 delta = -high;
1091 else
1092 delta = low;
1093
1094 addr = plus_constant (Pmode, argblock, delta);
1095 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
1096
1097 set_mem_align (stack_area, PARM_BOUNDARY);
1098 if (save_mode == BLKmode)
1099 {
1100 save_area = assign_stack_temp (BLKmode, num_to_save);
1101 emit_block_move (validize_mem (save_area), stack_area,
1102 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
1103 }
1104 else
1105 {
1106 save_area = gen_reg_rtx (save_mode);
1107 emit_move_insn (save_area, stack_area);
1108 }
1109
1110 return save_area;
1111 }
1112
1113 return NULL_RTX;
1114 }
1115
1116 static void
1117 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
1118 {
1119 machine_mode save_mode = GET_MODE (save_area);
1120 int delta;
1121 rtx addr, stack_area;
1122
1123 if (ARGS_GROW_DOWNWARD)
1124 delta = -high_to_save;
1125 else
1126 delta = low_to_save;
1127
1128 addr = plus_constant (Pmode, argblock, delta);
1129 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
1130 set_mem_align (stack_area, PARM_BOUNDARY);
1131
1132 if (save_mode != BLKmode)
1133 emit_move_insn (stack_area, save_area);
1134 else
1135 emit_block_move (stack_area, validize_mem (save_area),
1136 GEN_INT (high_to_save - low_to_save + 1),
1137 BLOCK_OP_CALL_PARM);
1138 }
1139 #endif /* REG_PARM_STACK_SPACE */
1140
1141 /* If any elements in ARGS refer to parameters that are to be passed in
1142 registers, but not in memory, and whose alignment does not permit a
1143 direct copy into registers. Copy the values into a group of pseudos
1144 which we will later copy into the appropriate hard registers.
1145
1146 Pseudos for each unaligned argument will be stored into the array
1147 args[argnum].aligned_regs. The caller is responsible for deallocating
1148 the aligned_regs array if it is nonzero. */
1149
1150 static void
1151 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
1152 {
1153 int i, j;
1154
1155 for (i = 0; i < num_actuals; i++)
1156 if (args[i].reg != 0 && ! args[i].pass_on_stack
1157 && GET_CODE (args[i].reg) != PARALLEL
1158 && args[i].mode == BLKmode
1159 && MEM_P (args[i].value)
1160 && (MEM_ALIGN (args[i].value)
1161 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1162 {
1163 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1164 int endian_correction = 0;
1165
1166 if (args[i].partial)
1167 {
1168 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
1169 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
1170 }
1171 else
1172 {
1173 args[i].n_aligned_regs
1174 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1175 }
1176
1177 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
1178
1179 /* Structures smaller than a word are normally aligned to the
1180 least significant byte. On a BYTES_BIG_ENDIAN machine,
1181 this means we must skip the empty high order bytes when
1182 calculating the bit offset. */
1183 if (bytes < UNITS_PER_WORD
1184 #ifdef BLOCK_REG_PADDING
1185 && (BLOCK_REG_PADDING (args[i].mode,
1186 TREE_TYPE (args[i].tree_value), 1)
1187 == PAD_DOWNWARD)
1188 #else
1189 && BYTES_BIG_ENDIAN
1190 #endif
1191 )
1192 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
1193
1194 for (j = 0; j < args[i].n_aligned_regs; j++)
1195 {
1196 rtx reg = gen_reg_rtx (word_mode);
1197 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1198 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1199
1200 args[i].aligned_regs[j] = reg;
1201 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1202 word_mode, word_mode, false, NULL);
1203
1204 /* There is no need to restrict this code to loading items
1205 in TYPE_ALIGN sized hunks. The bitfield instructions can
1206 load up entire word sized registers efficiently.
1207
1208 ??? This may not be needed anymore.
1209 We use to emit a clobber here but that doesn't let later
1210 passes optimize the instructions we emit. By storing 0 into
1211 the register later passes know the first AND to zero out the
1212 bitfield being set in the register is unnecessary. The store
1213 of 0 will be deleted as will at least the first AND. */
1214
1215 emit_move_insn (reg, const0_rtx);
1216
1217 bytes -= bitsize / BITS_PER_UNIT;
1218 store_bit_field (reg, bitsize, endian_correction, 0, 0,
1219 word_mode, word, false);
1220 }
1221 }
1222 }
1223
1224 /* The limit set by -Walloc-larger-than=. */
1225 static GTY(()) tree alloc_object_size_limit;
1226
1227 /* Initialize ALLOC_OBJECT_SIZE_LIMIT based on the -Walloc-size-larger-than=
1228 setting if the option is specified, or to the maximum object size if it
1229 is not. Return the initialized value. */
1230
1231 static tree
1232 alloc_max_size (void)
1233 {
1234 if (alloc_object_size_limit)
1235 return alloc_object_size_limit;
1236
1237 HOST_WIDE_INT limit = warn_alloc_size_limit;
1238 if (limit == HOST_WIDE_INT_MAX)
1239 limit = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
1240
1241 alloc_object_size_limit = build_int_cst (size_type_node, limit);
1242
1243 return alloc_object_size_limit;
1244 }
1245
1246 /* Return true when EXP's range can be determined and set RANGE[] to it
1247 after adjusting it if necessary to make EXP a represents a valid size
1248 of object, or a valid size argument to an allocation function declared
1249 with attribute alloc_size (whose argument may be signed), or to a string
1250 manipulation function like memset.
1251 When ALLOW_ZERO is set in FLAGS, allow returning a range of [0, 0] for
1252 a size in an anti-range [1, N] where N > PTRDIFF_MAX. A zero range is
1253 a (nearly) invalid argument to allocation functions like malloc but it
1254 is a valid argument to functions like memset.
1255 When USE_LARGEST is set in FLAGS set RANGE to the largest valid subrange
1256 in a multi-range, otherwise to the smallest valid subrange. */
1257
1258 bool
1259 get_size_range (range_query *query, tree exp, gimple *stmt, tree range[2],
1260 int flags /* = 0 */)
1261 {
1262 if (!exp)
1263 return false;
1264
1265 if (tree_fits_uhwi_p (exp))
1266 {
1267 /* EXP is a constant. */
1268 range[0] = range[1] = exp;
1269 return true;
1270 }
1271
1272 tree exptype = TREE_TYPE (exp);
1273 bool integral = INTEGRAL_TYPE_P (exptype);
1274
1275 wide_int min, max;
1276 enum value_range_kind range_type;
1277
1278 if (integral)
1279 {
1280 value_range vr;
1281 if (query && query->range_of_expr (vr, exp, stmt))
1282 {
1283 if (vr.undefined_p ())
1284 vr.set_varying (TREE_TYPE (exp));
1285 range_type = vr.kind ();
1286 min = wi::to_wide (vr.min ());
1287 max = wi::to_wide (vr.max ());
1288 }
1289 else
1290 range_type = determine_value_range (exp, &min, &max);
1291 }
1292 else
1293 range_type = VR_VARYING;
1294
1295 if (range_type == VR_VARYING)
1296 {
1297 if (integral)
1298 {
1299 /* Use the full range of the type of the expression when
1300 no value range information is available. */
1301 range[0] = TYPE_MIN_VALUE (exptype);
1302 range[1] = TYPE_MAX_VALUE (exptype);
1303 return true;
1304 }
1305
1306 range[0] = NULL_TREE;
1307 range[1] = NULL_TREE;
1308 return false;
1309 }
1310
1311 unsigned expprec = TYPE_PRECISION (exptype);
1312
1313 bool signed_p = !TYPE_UNSIGNED (exptype);
1314
1315 if (range_type == VR_ANTI_RANGE)
1316 {
1317 if (signed_p)
1318 {
1319 if (wi::les_p (max, 0))
1320 {
1321 /* EXP is not in a strictly negative range. That means
1322 it must be in some (not necessarily strictly) positive
1323 range which includes zero. Since in signed to unsigned
1324 conversions negative values end up converted to large
1325 positive values, and otherwise they are not valid sizes,
1326 the resulting range is in both cases [0, TYPE_MAX]. */
1327 min = wi::zero (expprec);
1328 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1329 }
1330 else if (wi::les_p (min - 1, 0))
1331 {
1332 /* EXP is not in a negative-positive range. That means EXP
1333 is either negative, or greater than max. Since negative
1334 sizes are invalid make the range [MAX + 1, TYPE_MAX]. */
1335 min = max + 1;
1336 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1337 }
1338 else
1339 {
1340 max = min - 1;
1341 min = wi::zero (expprec);
1342 }
1343 }
1344 else
1345 {
1346 wide_int maxsize = wi::to_wide (max_object_size ());
1347 min = wide_int::from (min, maxsize.get_precision (), UNSIGNED);
1348 max = wide_int::from (max, maxsize.get_precision (), UNSIGNED);
1349 if (wi::eq_p (0, min - 1))
1350 {
1351 /* EXP is unsigned and not in the range [1, MAX]. That means
1352 it's either zero or greater than MAX. Even though 0 would
1353 normally be detected by -Walloc-zero, unless ALLOW_ZERO
1354 is set, set the range to [MAX, TYPE_MAX] so that when MAX
1355 is greater than the limit the whole range is diagnosed. */
1356 wide_int maxsize = wi::to_wide (max_object_size ());
1357 if (flags & SR_ALLOW_ZERO)
1358 {
1359 if (wi::leu_p (maxsize, max + 1)
1360 || !(flags & SR_USE_LARGEST))
1361 min = max = wi::zero (expprec);
1362 else
1363 {
1364 min = max + 1;
1365 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1366 }
1367 }
1368 else
1369 {
1370 min = max + 1;
1371 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1372 }
1373 }
1374 else if ((flags & SR_USE_LARGEST)
1375 && wi::ltu_p (max + 1, maxsize))
1376 {
1377 /* When USE_LARGEST is set and the larger of the two subranges
1378 is a valid size, use it... */
1379 min = max + 1;
1380 max = maxsize;
1381 }
1382 else
1383 {
1384 /* ...otherwise use the smaller subrange. */
1385 max = min - 1;
1386 min = wi::zero (expprec);
1387 }
1388 }
1389 }
1390
1391 range[0] = wide_int_to_tree (exptype, min);
1392 range[1] = wide_int_to_tree (exptype, max);
1393
1394 return true;
1395 }
1396
1397 bool
1398 get_size_range (tree exp, tree range[2], int flags /* = 0 */)
1399 {
1400 return get_size_range (/*query=*/NULL, exp, /*stmt=*/NULL, range, flags);
1401 }
1402
1403 /* Diagnose a call EXP to function FN decorated with attribute alloc_size
1404 whose argument numbers given by IDX with values given by ARGS exceed
1405 the maximum object size or cause an unsigned oveflow (wrapping) when
1406 multiplied. FN is null when EXP is a call via a function pointer.
1407 When ARGS[0] is null the function does nothing. ARGS[1] may be null
1408 for functions like malloc, and non-null for those like calloc that
1409 are decorated with a two-argument attribute alloc_size. */
1410
1411 void
1412 maybe_warn_alloc_args_overflow (tree fn, tree exp, tree args[2], int idx[2])
1413 {
1414 /* The range each of the (up to) two arguments is known to be in. */
1415 tree argrange[2][2] = { { NULL_TREE, NULL_TREE }, { NULL_TREE, NULL_TREE } };
1416
1417 /* Maximum object size set by -Walloc-size-larger-than= or SIZE_MAX / 2. */
1418 tree maxobjsize = alloc_max_size ();
1419
1420 location_t loc = EXPR_LOCATION (exp);
1421
1422 tree fntype = fn ? TREE_TYPE (fn) : TREE_TYPE (TREE_TYPE (exp));
1423 bool warned = false;
1424
1425 /* Validate each argument individually. */
1426 for (unsigned i = 0; i != 2 && args[i]; ++i)
1427 {
1428 if (TREE_CODE (args[i]) == INTEGER_CST)
1429 {
1430 argrange[i][0] = args[i];
1431 argrange[i][1] = args[i];
1432
1433 if (tree_int_cst_lt (args[i], integer_zero_node))
1434 {
1435 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1436 "%Kargument %i value %qE is negative",
1437 exp, idx[i] + 1, args[i]);
1438 }
1439 else if (integer_zerop (args[i]))
1440 {
1441 /* Avoid issuing -Walloc-zero for allocation functions other
1442 than __builtin_alloca that are declared with attribute
1443 returns_nonnull because there's no portability risk. This
1444 avoids warning for such calls to libiberty's xmalloc and
1445 friends.
1446 Also avoid issuing the warning for calls to function named
1447 "alloca". */
1448 if (fn && fndecl_built_in_p (fn, BUILT_IN_ALLOCA)
1449 ? IDENTIFIER_LENGTH (DECL_NAME (fn)) != 6
1450 : !lookup_attribute ("returns_nonnull",
1451 TYPE_ATTRIBUTES (fntype)))
1452 warned = warning_at (loc, OPT_Walloc_zero,
1453 "%Kargument %i value is zero",
1454 exp, idx[i] + 1);
1455 }
1456 else if (tree_int_cst_lt (maxobjsize, args[i]))
1457 {
1458 /* G++ emits calls to ::operator new[](SIZE_MAX) in C++98
1459 mode and with -fno-exceptions as a way to indicate array
1460 size overflow. There's no good way to detect C++98 here
1461 so avoid diagnosing these calls for all C++ modes. */
1462 if (i == 0
1463 && fn
1464 && !args[1]
1465 && lang_GNU_CXX ()
1466 && DECL_IS_OPERATOR_NEW_P (fn)
1467 && integer_all_onesp (args[i]))
1468 continue;
1469
1470 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1471 "%Kargument %i value %qE exceeds "
1472 "maximum object size %E",
1473 exp, idx[i] + 1, args[i], maxobjsize);
1474 }
1475 }
1476 else if (TREE_CODE (args[i]) == SSA_NAME
1477 && get_size_range (args[i], argrange[i]))
1478 {
1479 /* Verify that the argument's range is not negative (including
1480 upper bound of zero). */
1481 if (tree_int_cst_lt (argrange[i][0], integer_zero_node)
1482 && tree_int_cst_le (argrange[i][1], integer_zero_node))
1483 {
1484 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1485 "%Kargument %i range [%E, %E] is negative",
1486 exp, idx[i] + 1,
1487 argrange[i][0], argrange[i][1]);
1488 }
1489 else if (tree_int_cst_lt (maxobjsize, argrange[i][0]))
1490 {
1491 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1492 "%Kargument %i range [%E, %E] exceeds "
1493 "maximum object size %E",
1494 exp, idx[i] + 1,
1495 argrange[i][0], argrange[i][1],
1496 maxobjsize);
1497 }
1498 }
1499 }
1500
1501 if (!argrange[0])
1502 return;
1503
1504 /* For a two-argument alloc_size, validate the product of the two
1505 arguments if both of their values or ranges are known. */
1506 if (!warned && tree_fits_uhwi_p (argrange[0][0])
1507 && argrange[1][0] && tree_fits_uhwi_p (argrange[1][0])
1508 && !integer_onep (argrange[0][0])
1509 && !integer_onep (argrange[1][0]))
1510 {
1511 /* Check for overflow in the product of a function decorated with
1512 attribute alloc_size (X, Y). */
1513 unsigned szprec = TYPE_PRECISION (size_type_node);
1514 wide_int x = wi::to_wide (argrange[0][0], szprec);
1515 wide_int y = wi::to_wide (argrange[1][0], szprec);
1516
1517 wi::overflow_type vflow;
1518 wide_int prod = wi::umul (x, y, &vflow);
1519
1520 if (vflow)
1521 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1522 "%Kproduct %<%E * %E%> of arguments %i and %i "
1523 "exceeds %<SIZE_MAX%>",
1524 exp, argrange[0][0], argrange[1][0],
1525 idx[0] + 1, idx[1] + 1);
1526 else if (wi::ltu_p (wi::to_wide (maxobjsize, szprec), prod))
1527 warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1528 "%Kproduct %<%E * %E%> of arguments %i and %i "
1529 "exceeds maximum object size %E",
1530 exp, argrange[0][0], argrange[1][0],
1531 idx[0] + 1, idx[1] + 1,
1532 maxobjsize);
1533
1534 if (warned)
1535 {
1536 /* Print the full range of each of the two arguments to make
1537 it clear when it is, in fact, in a range and not constant. */
1538 if (argrange[0][0] != argrange [0][1])
1539 inform (loc, "argument %i in the range [%E, %E]",
1540 idx[0] + 1, argrange[0][0], argrange[0][1]);
1541 if (argrange[1][0] != argrange [1][1])
1542 inform (loc, "argument %i in the range [%E, %E]",
1543 idx[1] + 1, argrange[1][0], argrange[1][1]);
1544 }
1545 }
1546
1547 if (warned && fn)
1548 {
1549 location_t fnloc = DECL_SOURCE_LOCATION (fn);
1550
1551 if (DECL_IS_UNDECLARED_BUILTIN (fn))
1552 inform (loc,
1553 "in a call to built-in allocation function %qD", fn);
1554 else
1555 inform (fnloc,
1556 "in a call to allocation function %qD declared here", fn);
1557 }
1558 }
1559
1560 /* If EXPR refers to a character array or pointer declared attribute
1561 nonstring return a decl for that array or pointer and set *REF to
1562 the referenced enclosing object or pointer. Otherwise returns
1563 null. */
1564
1565 tree
1566 get_attr_nonstring_decl (tree expr, tree *ref)
1567 {
1568 tree decl = expr;
1569 tree var = NULL_TREE;
1570 if (TREE_CODE (decl) == SSA_NAME)
1571 {
1572 gimple *def = SSA_NAME_DEF_STMT (decl);
1573
1574 if (is_gimple_assign (def))
1575 {
1576 tree_code code = gimple_assign_rhs_code (def);
1577 if (code == ADDR_EXPR
1578 || code == COMPONENT_REF
1579 || code == VAR_DECL)
1580 decl = gimple_assign_rhs1 (def);
1581 }
1582 else
1583 var = SSA_NAME_VAR (decl);
1584 }
1585
1586 if (TREE_CODE (decl) == ADDR_EXPR)
1587 decl = TREE_OPERAND (decl, 0);
1588
1589 /* To simplify calling code, store the referenced DECL regardless of
1590 the attribute determined below, but avoid storing the SSA_NAME_VAR
1591 obtained above (it's not useful for dataflow purposes). */
1592 if (ref)
1593 *ref = decl;
1594
1595 /* Use the SSA_NAME_VAR that was determined above to see if it's
1596 declared nonstring. Otherwise drill down into the referenced
1597 DECL. */
1598 if (var)
1599 decl = var;
1600 else if (TREE_CODE (decl) == ARRAY_REF)
1601 decl = TREE_OPERAND (decl, 0);
1602 else if (TREE_CODE (decl) == COMPONENT_REF)
1603 decl = TREE_OPERAND (decl, 1);
1604 else if (TREE_CODE (decl) == MEM_REF)
1605 return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
1606
1607 if (DECL_P (decl)
1608 && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
1609 return decl;
1610
1611 return NULL_TREE;
1612 }
1613
1614 /* Warn about passing a non-string array/pointer to a built-in function
1615 that expects a nul-terminated string argument. Returns true if
1616 a warning has been issued.*/
1617
1618 bool
1619 maybe_warn_nonstring_arg (tree fndecl, tree exp)
1620 {
1621 if (!fndecl || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1622 return false;
1623
1624 if (TREE_NO_WARNING (exp) || !warn_stringop_overread)
1625 return false;
1626
1627 /* Avoid clearly invalid calls (more checking done below). */
1628 unsigned nargs = call_expr_nargs (exp);
1629 if (!nargs)
1630 return false;
1631
1632 /* The bound argument to a bounded string function like strncpy. */
1633 tree bound = NULL_TREE;
1634
1635 /* The longest known or possible string argument to one of the comparison
1636 functions. If the length is less than the bound it is used instead.
1637 Since the length is only used for warning and not for code generation
1638 disable strict mode in the calls to get_range_strlen below. */
1639 tree maxlen = NULL_TREE;
1640
1641 /* It's safe to call "bounded" string functions with a non-string
1642 argument since the functions provide an explicit bound for this
1643 purpose. The exception is strncat where the bound may refer to
1644 either the destination or the source. */
1645 int fncode = DECL_FUNCTION_CODE (fndecl);
1646 switch (fncode)
1647 {
1648 case BUILT_IN_STRCMP:
1649 case BUILT_IN_STRNCMP:
1650 case BUILT_IN_STRNCASECMP:
1651 {
1652 /* For these, if one argument refers to one or more of a set
1653 of string constants or arrays of known size, determine
1654 the range of their known or possible lengths and use it
1655 conservatively as the bound for the unbounded function,
1656 and to adjust the range of the bound of the bounded ones. */
1657 for (unsigned argno = 0;
1658 argno < MIN (nargs, 2)
1659 && !(maxlen && TREE_CODE (maxlen) == INTEGER_CST); argno++)
1660 {
1661 tree arg = CALL_EXPR_ARG (exp, argno);
1662 if (!get_attr_nonstring_decl (arg))
1663 {
1664 c_strlen_data lendata = { };
1665 /* Set MAXBOUND to an arbitrary non-null non-integer
1666 node as a request to have it set to the length of
1667 the longest string in a PHI. */
1668 lendata.maxbound = arg;
1669 get_range_strlen (arg, &lendata, /* eltsize = */ 1);
1670 maxlen = lendata.maxbound;
1671 }
1672 }
1673 }
1674 /* Fall through. */
1675
1676 case BUILT_IN_STRNCAT:
1677 case BUILT_IN_STPNCPY:
1678 case BUILT_IN_STRNCPY:
1679 if (nargs > 2)
1680 bound = CALL_EXPR_ARG (exp, 2);
1681 break;
1682
1683 case BUILT_IN_STRNDUP:
1684 if (nargs > 1)
1685 bound = CALL_EXPR_ARG (exp, 1);
1686 break;
1687
1688 case BUILT_IN_STRNLEN:
1689 {
1690 tree arg = CALL_EXPR_ARG (exp, 0);
1691 if (!get_attr_nonstring_decl (arg))
1692 {
1693 c_strlen_data lendata = { };
1694 /* Set MAXBOUND to an arbitrary non-null non-integer
1695 node as a request to have it set to the length of
1696 the longest string in a PHI. */
1697 lendata.maxbound = arg;
1698 get_range_strlen (arg, &lendata, /* eltsize = */ 1);
1699 maxlen = lendata.maxbound;
1700 }
1701 if (nargs > 1)
1702 bound = CALL_EXPR_ARG (exp, 1);
1703 break;
1704 }
1705
1706 default:
1707 break;
1708 }
1709
1710 /* Determine the range of the bound argument (if specified). */
1711 tree bndrng[2] = { NULL_TREE, NULL_TREE };
1712 if (bound)
1713 {
1714 STRIP_NOPS (bound);
1715 get_size_range (bound, bndrng);
1716 }
1717
1718 location_t loc = EXPR_LOCATION (exp);
1719
1720 if (bndrng[0])
1721 {
1722 /* Diagnose excessive bound prior to the adjustment below and
1723 regardless of attribute nonstring. */
1724 tree maxobjsize = max_object_size ();
1725 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
1726 {
1727 bool warned = false;
1728 if (tree_int_cst_equal (bndrng[0], bndrng[1]))
1729 warned = warning_at (loc, OPT_Wstringop_overread,
1730 "%K%qD specified bound %E "
1731 "exceeds maximum object size %E",
1732 exp, fndecl, bndrng[0], maxobjsize);
1733 else
1734 warned = warning_at (loc, OPT_Wstringop_overread,
1735 "%K%qD specified bound [%E, %E] "
1736 "exceeds maximum object size %E",
1737 exp, fndecl, bndrng[0], bndrng[1],
1738 maxobjsize);
1739 if (warned)
1740 TREE_NO_WARNING (exp) = true;
1741
1742 return warned;
1743 }
1744 }
1745
1746 if (maxlen && !integer_all_onesp (maxlen))
1747 {
1748 /* Add one for the nul. */
1749 maxlen = const_binop (PLUS_EXPR, TREE_TYPE (maxlen), maxlen,
1750 size_one_node);
1751
1752 if (!bndrng[0])
1753 {
1754 /* Conservatively use the upper bound of the lengths for
1755 both the lower and the upper bound of the operation. */
1756 bndrng[0] = maxlen;
1757 bndrng[1] = maxlen;
1758 bound = void_type_node;
1759 }
1760 else if (maxlen)
1761 {
1762 /* Replace the bound on the operation with the upper bound
1763 of the length of the string if the latter is smaller. */
1764 if (tree_int_cst_lt (maxlen, bndrng[0]))
1765 bndrng[0] = maxlen;
1766 else if (tree_int_cst_lt (maxlen, bndrng[1]))
1767 bndrng[1] = maxlen;
1768 }
1769 }
1770
1771 bool any_arg_warned = false;
1772 /* Iterate over the built-in function's formal arguments and check
1773 each const char* against the actual argument. If the actual
1774 argument is declared attribute non-string issue a warning unless
1775 the argument's maximum length is bounded. */
1776 function_args_iterator it;
1777 function_args_iter_init (&it, TREE_TYPE (fndecl));
1778
1779 for (unsigned argno = 0; ; ++argno, function_args_iter_next (&it))
1780 {
1781 /* Avoid iterating past the declared argument in a call
1782 to function declared without a prototype. */
1783 if (argno >= nargs)
1784 break;
1785
1786 tree argtype = function_args_iter_cond (&it);
1787 if (!argtype)
1788 break;
1789
1790 if (TREE_CODE (argtype) != POINTER_TYPE)
1791 continue;
1792
1793 argtype = TREE_TYPE (argtype);
1794
1795 if (TREE_CODE (argtype) != INTEGER_TYPE
1796 || !TYPE_READONLY (argtype))
1797 continue;
1798
1799 argtype = TYPE_MAIN_VARIANT (argtype);
1800 if (argtype != char_type_node)
1801 continue;
1802
1803 tree callarg = CALL_EXPR_ARG (exp, argno);
1804 if (TREE_CODE (callarg) == ADDR_EXPR)
1805 callarg = TREE_OPERAND (callarg, 0);
1806
1807 /* See if the destination is declared with attribute "nonstring". */
1808 tree decl = get_attr_nonstring_decl (callarg);
1809 if (!decl)
1810 continue;
1811
1812 /* The maximum number of array elements accessed. */
1813 offset_int wibnd = 0;
1814
1815 if (argno && fncode == BUILT_IN_STRNCAT)
1816 {
1817 /* See if the bound in strncat is derived from the length
1818 of the strlen of the destination (as it's expected to be).
1819 If so, reset BOUND and FNCODE to trigger a warning. */
1820 tree dstarg = CALL_EXPR_ARG (exp, 0);
1821 if (is_strlen_related_p (dstarg, bound))
1822 {
1823 /* The bound applies to the destination, not to the source,
1824 so reset these to trigger a warning without mentioning
1825 the bound. */
1826 bound = NULL;
1827 fncode = 0;
1828 }
1829 else if (bndrng[1])
1830 /* Use the upper bound of the range for strncat. */
1831 wibnd = wi::to_offset (bndrng[1]);
1832 }
1833 else if (bndrng[0])
1834 /* Use the lower bound of the range for functions other than
1835 strncat. */
1836 wibnd = wi::to_offset (bndrng[0]);
1837
1838 /* Determine the size of the argument array if it is one. */
1839 offset_int asize = wibnd;
1840 bool known_size = false;
1841 tree type = TREE_TYPE (decl);
1842
1843 /* Determine the array size. For arrays of unknown bound and
1844 pointers reset BOUND to trigger the appropriate warning. */
1845 if (TREE_CODE (type) == ARRAY_TYPE)
1846 {
1847 if (tree arrbnd = TYPE_DOMAIN (type))
1848 {
1849 if ((arrbnd = TYPE_MAX_VALUE (arrbnd)))
1850 {
1851 asize = wi::to_offset (arrbnd) + 1;
1852 known_size = true;
1853 }
1854 }
1855 else if (bound == void_type_node)
1856 bound = NULL_TREE;
1857 }
1858 else if (bound == void_type_node)
1859 bound = NULL_TREE;
1860
1861 /* In a call to strncat with a bound in a range whose lower but
1862 not upper bound is less than the array size, reset ASIZE to
1863 be the same as the bound and the other variable to trigger
1864 the apprpriate warning below. */
1865 if (fncode == BUILT_IN_STRNCAT
1866 && bndrng[0] != bndrng[1]
1867 && wi::ltu_p (wi::to_offset (bndrng[0]), asize)
1868 && (!known_size
1869 || wi::ltu_p (asize, wibnd)))
1870 {
1871 asize = wibnd;
1872 bound = NULL_TREE;
1873 fncode = 0;
1874 }
1875
1876 bool warned = false;
1877
1878 auto_diagnostic_group d;
1879 if (wi::ltu_p (asize, wibnd))
1880 {
1881 if (bndrng[0] == bndrng[1])
1882 warned = warning_at (loc, OPT_Wstringop_overread,
1883 "%qD argument %i declared attribute "
1884 "%<nonstring%> is smaller than the specified "
1885 "bound %wu",
1886 fndecl, argno + 1, wibnd.to_uhwi ());
1887 else if (wi::ltu_p (asize, wi::to_offset (bndrng[0])))
1888 warned = warning_at (loc, OPT_Wstringop_overread,
1889 "%qD argument %i declared attribute "
1890 "%<nonstring%> is smaller than "
1891 "the specified bound [%E, %E]",
1892 fndecl, argno + 1, bndrng[0], bndrng[1]);
1893 else
1894 warned = warning_at (loc, OPT_Wstringop_overread,
1895 "%qD argument %i declared attribute "
1896 "%<nonstring%> may be smaller than "
1897 "the specified bound [%E, %E]",
1898 fndecl, argno + 1, bndrng[0], bndrng[1]);
1899 }
1900 else if (fncode == BUILT_IN_STRNCAT)
1901 ; /* Avoid warning for calls to strncat() when the bound
1902 is equal to the size of the non-string argument. */
1903 else if (!bound)
1904 warned = warning_at (loc, OPT_Wstringop_overread,
1905 "%qD argument %i declared attribute %<nonstring%>",
1906 fndecl, argno + 1);
1907
1908 if (warned)
1909 {
1910 inform (DECL_SOURCE_LOCATION (decl),
1911 "argument %qD declared here", decl);
1912 any_arg_warned = true;
1913 }
1914 }
1915
1916 if (any_arg_warned)
1917 TREE_NO_WARNING (exp) = true;
1918
1919 return any_arg_warned;
1920 }
1921
1922 /* Issue an error if CALL_EXPR was flagged as requiring
1923 tall-call optimization. */
1924
1925 void
1926 maybe_complain_about_tail_call (tree call_expr, const char *reason)
1927 {
1928 gcc_assert (TREE_CODE (call_expr) == CALL_EXPR);
1929 if (!CALL_EXPR_MUST_TAIL_CALL (call_expr))
1930 return;
1931
1932 error_at (EXPR_LOCATION (call_expr), "cannot tail-call: %s", reason);
1933 }
1934
1935 /* Returns the type of the argument ARGNO to function with type FNTYPE
1936 or null when the typoe cannot be determined or no such argument exists. */
1937
1938 static tree
1939 fntype_argno_type (tree fntype, unsigned argno)
1940 {
1941 if (!prototype_p (fntype))
1942 return NULL_TREE;
1943
1944 tree argtype;
1945 function_args_iterator it;
1946 FOREACH_FUNCTION_ARGS (fntype, argtype, it)
1947 if (argno-- == 0)
1948 return argtype;
1949
1950 return NULL_TREE;
1951 }
1952
1953 /* Helper to append the "human readable" attribute access specification
1954 described by ACCESS to the array ATTRSTR with size STRSIZE. Used in
1955 diagnostics. */
1956
1957 static inline void
1958 append_attrname (const std::pair<int, attr_access> &access,
1959 char *attrstr, size_t strsize)
1960 {
1961 if (access.second.internal_p)
1962 return;
1963
1964 tree str = access.second.to_external_string ();
1965 gcc_assert (strsize >= (size_t) TREE_STRING_LENGTH (str));
1966 strcpy (attrstr, TREE_STRING_POINTER (str));
1967 }
1968
1969 /* Iterate over attribute access read-only, read-write, and write-only
1970 arguments and diagnose past-the-end accesses and related problems
1971 in the function call EXP. */
1972
1973 static void
1974 maybe_warn_rdwr_sizes (rdwr_map *rwm, tree fndecl, tree fntype, tree exp)
1975 {
1976 auto_diagnostic_group adg;
1977
1978 /* Set if a warning has been issued for any argument (used to decide
1979 whether to emit an informational note at the end). */
1980 bool any_warned = false;
1981
1982 /* A string describing the attributes that the warnings issued by this
1983 function apply to. Used to print one informational note per function
1984 call, rather than one per warning. That reduces clutter. */
1985 char attrstr[80];
1986 attrstr[0] = 0;
1987
1988 for (rdwr_map::iterator it = rwm->begin (); it != rwm->end (); ++it)
1989 {
1990 std::pair<int, attr_access> access = *it;
1991
1992 /* Get the function call arguments corresponding to the attribute's
1993 positional arguments. When both arguments have been specified
1994 there will be two entries in *RWM, one for each. They are
1995 cross-referenced by their respective argument numbers in
1996 ACCESS.PTRARG and ACCESS.SIZARG. */
1997 const int ptridx = access.second.ptrarg;
1998 const int sizidx = access.second.sizarg;
1999
2000 gcc_assert (ptridx != -1);
2001 gcc_assert (access.first == ptridx || access.first == sizidx);
2002
2003 /* The pointer is set to null for the entry corresponding to
2004 the size argument. Skip it. It's handled when the entry
2005 corresponding to the pointer argument comes up. */
2006 if (!access.second.ptr)
2007 continue;
2008
2009 tree ptrtype = fntype_argno_type (fntype, ptridx);
2010 tree argtype = TREE_TYPE (ptrtype);
2011
2012 /* The size of the access by the call. */
2013 tree access_size;
2014 if (sizidx == -1)
2015 {
2016 /* If only the pointer attribute operand was specified and
2017 not size, set SIZE to the greater of MINSIZE or size of
2018 one element of the pointed to type to detect smaller
2019 objects (null pointers are diagnosed in this case only
2020 if the pointer is also declared with attribute nonnull. */
2021 if (access.second.minsize
2022 && access.second.minsize != HOST_WIDE_INT_M1U)
2023 access_size = build_int_cstu (sizetype, access.second.minsize);
2024 else
2025 access_size = size_one_node;
2026 }
2027 else
2028 access_size = rwm->get (sizidx)->size;
2029
2030 /* Format the value or range to avoid an explosion of messages. */
2031 char sizstr[80];
2032 tree sizrng[2] = { size_zero_node, build_all_ones_cst (sizetype) };
2033 if (get_size_range (access_size, sizrng, true))
2034 {
2035 const char *s0 = print_generic_expr_to_str (sizrng[0]);
2036 if (tree_int_cst_equal (sizrng[0], sizrng[1]))
2037 {
2038 gcc_checking_assert (strlen (s0) < sizeof sizstr);
2039 strcpy (sizstr, s0);
2040 }
2041 else
2042 {
2043 const char *s1 = print_generic_expr_to_str (sizrng[1]);
2044 gcc_checking_assert (strlen (s0) + strlen (s1)
2045 < sizeof sizstr - 4);
2046 sprintf (sizstr, "[%s, %s]", s0, s1);
2047 }
2048 }
2049 else
2050 *sizstr = '\0';
2051
2052 /* Set if a warning has been issued for the current argument. */
2053 bool arg_warned = false;
2054 location_t loc = EXPR_LOCATION (exp);
2055 tree ptr = access.second.ptr;
2056 if (*sizstr
2057 && tree_int_cst_sgn (sizrng[0]) < 0
2058 && tree_int_cst_sgn (sizrng[1]) < 0)
2059 {
2060 /* Warn about negative sizes. */
2061 if (access.second.internal_p)
2062 {
2063 const std::string argtypestr
2064 = access.second.array_as_string (ptrtype);
2065
2066 arg_warned = warning_at (loc, OPT_Wstringop_overflow_,
2067 "%Kbound argument %i value %s is "
2068 "negative for a variable length array "
2069 "argument %i of type %s",
2070 exp, sizidx + 1, sizstr,
2071 ptridx + 1, argtypestr.c_str ());
2072 }
2073 else
2074 arg_warned = warning_at (loc, OPT_Wstringop_overflow_,
2075 "%Kargument %i value %s is negative",
2076 exp, sizidx + 1, sizstr);
2077
2078 if (arg_warned)
2079 {
2080 append_attrname (access, attrstr, sizeof attrstr);
2081 /* Remember a warning has been issued and avoid warning
2082 again below for the same attribute. */
2083 any_warned = true;
2084 continue;
2085 }
2086 }
2087
2088 if (tree_int_cst_sgn (sizrng[0]) >= 0)
2089 {
2090 if (COMPLETE_TYPE_P (argtype))
2091 {
2092 /* Multiply ACCESS_SIZE by the size of the type the pointer
2093 argument points to. If it's incomplete the size is used
2094 as is. */
2095 if (tree argsize = TYPE_SIZE_UNIT (argtype))
2096 if (TREE_CODE (argsize) == INTEGER_CST)
2097 {
2098 const int prec = TYPE_PRECISION (sizetype);
2099 wide_int minsize = wi::to_wide (sizrng[0], prec);
2100 minsize *= wi::to_wide (argsize, prec);
2101 access_size = wide_int_to_tree (sizetype, minsize);
2102 }
2103 }
2104 }
2105 else
2106 access_size = NULL_TREE;
2107
2108 if (integer_zerop (ptr))
2109 {
2110 if (sizidx >= 0 && tree_int_cst_sgn (sizrng[0]) > 0)
2111 {
2112 /* Warn about null pointers with positive sizes. This is
2113 different from also declaring the pointer argument with
2114 attribute nonnull when the function accepts null pointers
2115 only when the corresponding size is zero. */
2116 if (access.second.internal_p)
2117 {
2118 const std::string argtypestr
2119 = access.second.array_as_string (ptrtype);
2120
2121 arg_warned = warning_at (loc, OPT_Wnonnull,
2122 "%Kargument %i of variable length "
2123 "array %s is null but "
2124 "the corresponding bound argument "
2125 "%i value is %s",
2126 exp, sizidx + 1, argtypestr.c_str (),
2127 ptridx + 1, sizstr);
2128 }
2129 else
2130 arg_warned = warning_at (loc, OPT_Wnonnull,
2131 "%Kargument %i is null but "
2132 "the corresponding size argument "
2133 "%i value is %s",
2134 exp, ptridx + 1, sizidx + 1,
2135 sizstr);
2136 }
2137 else if (access_size && access.second.static_p)
2138 {
2139 /* Warn about null pointers for [static N] array arguments
2140 but do not warn for ordinary (i.e., nonstatic) arrays. */
2141 arg_warned = warning_at (loc, OPT_Wnonnull,
2142 "%Kargument %i to %<%T[static %E]%> "
2143 "is null where non-null expected",
2144 exp, ptridx + 1, argtype,
2145 access_size);
2146 }
2147
2148 if (arg_warned)
2149 {
2150 append_attrname (access, attrstr, sizeof attrstr);
2151 /* Remember a warning has been issued and avoid warning
2152 again below for the same attribute. */
2153 any_warned = true;
2154 continue;
2155 }
2156 }
2157
2158 access_data data (ptr, access.second.mode, NULL_TREE, false,
2159 NULL_TREE, false);
2160 access_ref* const pobj = (access.second.mode == access_write_only
2161 ? &data.dst : &data.src);
2162 tree objsize = compute_objsize (ptr, 1, pobj);
2163
2164 /* The size of the destination or source object. */
2165 tree dstsize = NULL_TREE, srcsize = NULL_TREE;
2166 if (access.second.mode == access_read_only
2167 || access.second.mode == access_none)
2168 {
2169 /* For a read-only argument there is no destination. For
2170 no access, set the source as well and differentiate via
2171 the access flag below. */
2172 srcsize = objsize;
2173 if (access.second.mode == access_read_only
2174 || access.second.mode == access_none)
2175 {
2176 /* For a read-only attribute there is no destination so
2177 clear OBJSIZE. This emits "reading N bytes" kind of
2178 diagnostics instead of the "writing N bytes" kind,
2179 unless MODE is none. */
2180 objsize = NULL_TREE;
2181 }
2182 }
2183 else
2184 dstsize = objsize;
2185
2186 /* Clear the no-warning bit in case it was set by check_access
2187 in a prior iteration so that accesses via different arguments
2188 are diagnosed. */
2189 TREE_NO_WARNING (exp) = false;
2190 access_mode mode = data.mode;
2191 if (mode == access_deferred)
2192 mode = TYPE_READONLY (argtype) ? access_read_only : access_read_write;
2193 check_access (exp, access_size, /*maxread=*/ NULL_TREE, srcsize,
2194 dstsize, mode, &data);
2195
2196 if (TREE_NO_WARNING (exp))
2197 {
2198 any_warned = true;
2199
2200 if (access.second.internal_p)
2201 inform (loc, "referencing argument %u of type %qT",
2202 ptridx + 1, ptrtype);
2203 else
2204 /* If check_access issued a warning above, append the relevant
2205 attribute to the string. */
2206 append_attrname (access, attrstr, sizeof attrstr);
2207 }
2208 }
2209
2210 if (*attrstr)
2211 {
2212 if (fndecl)
2213 inform (DECL_SOURCE_LOCATION (fndecl),
2214 "in a call to function %qD declared with attribute %qs",
2215 fndecl, attrstr);
2216 else
2217 inform (EXPR_LOCATION (fndecl),
2218 "in a call with type %qT and attribute %qs",
2219 fntype, attrstr);
2220 }
2221 else if (any_warned)
2222 {
2223 if (fndecl)
2224 inform (DECL_SOURCE_LOCATION (fndecl),
2225 "in a call to function %qD", fndecl);
2226 else
2227 inform (EXPR_LOCATION (fndecl),
2228 "in a call with type %qT", fntype);
2229 }
2230
2231 /* Set the bit in case if was cleared and not set above. */
2232 TREE_NO_WARNING (exp) = true;
2233 }
2234
2235 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
2236 CALL_EXPR EXP.
2237
2238 NUM_ACTUALS is the total number of parameters.
2239
2240 N_NAMED_ARGS is the total number of named arguments.
2241
2242 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
2243 value, or null.
2244
2245 FNDECL is the tree code for the target of this call (if known)
2246
2247 ARGS_SO_FAR holds state needed by the target to know where to place
2248 the next argument.
2249
2250 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
2251 for arguments which are passed in registers.
2252
2253 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
2254 and may be modified by this routine.
2255
2256 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
2257 flags which may be modified by this routine.
2258
2259 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
2260 that requires allocation of stack space.
2261
2262 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
2263 the thunked-to function. */
2264
2265 static void
2266 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
2267 struct arg_data *args,
2268 struct args_size *args_size,
2269 int n_named_args ATTRIBUTE_UNUSED,
2270 tree exp, tree struct_value_addr_value,
2271 tree fndecl, tree fntype,
2272 cumulative_args_t args_so_far,
2273 int reg_parm_stack_space,
2274 rtx *old_stack_level,
2275 poly_int64_pod *old_pending_adj,
2276 int *must_preallocate, int *ecf_flags,
2277 bool *may_tailcall, bool call_from_thunk_p)
2278 {
2279 CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
2280 location_t loc = EXPR_LOCATION (exp);
2281
2282 /* Count arg position in order args appear. */
2283 int argpos;
2284
2285 int i;
2286
2287 args_size->constant = 0;
2288 args_size->var = 0;
2289
2290 bitmap_obstack_initialize (NULL);
2291
2292 /* In this loop, we consider args in the order they are written.
2293 We fill up ARGS from the back. */
2294
2295 i = num_actuals - 1;
2296 {
2297 int j = i;
2298 call_expr_arg_iterator iter;
2299 tree arg;
2300 bitmap slots = NULL;
2301
2302 if (struct_value_addr_value)
2303 {
2304 args[j].tree_value = struct_value_addr_value;
2305 j--;
2306 }
2307 argpos = 0;
2308 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2309 {
2310 tree argtype = TREE_TYPE (arg);
2311
2312 if (targetm.calls.split_complex_arg
2313 && argtype
2314 && TREE_CODE (argtype) == COMPLEX_TYPE
2315 && targetm.calls.split_complex_arg (argtype))
2316 {
2317 tree subtype = TREE_TYPE (argtype);
2318 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
2319 j--;
2320 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
2321 }
2322 else
2323 args[j].tree_value = arg;
2324 j--;
2325 argpos++;
2326 }
2327
2328 if (slots)
2329 BITMAP_FREE (slots);
2330 }
2331
2332 bitmap_obstack_release (NULL);
2333
2334 tree fntypeattrs = TYPE_ATTRIBUTES (fntype);
2335 /* Extract attribute alloc_size from the type of the called expression
2336 (which could be a function or a function pointer) and if set, store
2337 the indices of the corresponding arguments in ALLOC_IDX, and then
2338 the actual argument(s) at those indices in ALLOC_ARGS. */
2339 int alloc_idx[2] = { -1, -1 };
2340 if (tree alloc_size = lookup_attribute ("alloc_size", fntypeattrs))
2341 {
2342 tree args = TREE_VALUE (alloc_size);
2343 alloc_idx[0] = TREE_INT_CST_LOW (TREE_VALUE (args)) - 1;
2344 if (TREE_CHAIN (args))
2345 alloc_idx[1] = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args))) - 1;
2346 }
2347
2348 /* Array for up to the two attribute alloc_size arguments. */
2349 tree alloc_args[] = { NULL_TREE, NULL_TREE };
2350
2351 /* Map of attribute accewss specifications for function arguments. */
2352 rdwr_map rdwr_idx;
2353 init_attr_rdwr_indices (&rdwr_idx, fntypeattrs);
2354
2355 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
2356 for (argpos = 0; argpos < num_actuals; i--, argpos++)
2357 {
2358 tree type = TREE_TYPE (args[i].tree_value);
2359 int unsignedp;
2360
2361 /* Replace erroneous argument with constant zero. */
2362 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
2363 args[i].tree_value = integer_zero_node, type = integer_type_node;
2364
2365 /* If TYPE is a transparent union or record, pass things the way
2366 we would pass the first field of the union or record. We have
2367 already verified that the modes are the same. */
2368 if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
2369 type = TREE_TYPE (first_field (type));
2370
2371 /* Decide where to pass this arg.
2372
2373 args[i].reg is nonzero if all or part is passed in registers.
2374
2375 args[i].partial is nonzero if part but not all is passed in registers,
2376 and the exact value says how many bytes are passed in registers.
2377
2378 args[i].pass_on_stack is nonzero if the argument must at least be
2379 computed on the stack. It may then be loaded back into registers
2380 if args[i].reg is nonzero.
2381
2382 These decisions are driven by the FUNCTION_... macros and must agree
2383 with those made by function.c. */
2384
2385 /* See if this argument should be passed by invisible reference. */
2386 function_arg_info arg (type, argpos < n_named_args);
2387 if (pass_by_reference (args_so_far_pnt, arg))
2388 {
2389 bool callee_copies;
2390 tree base = NULL_TREE;
2391
2392 callee_copies = reference_callee_copied (args_so_far_pnt, arg);
2393
2394 /* If we're compiling a thunk, pass through invisible references
2395 instead of making a copy. */
2396 if (call_from_thunk_p
2397 || (callee_copies
2398 && !TREE_ADDRESSABLE (type)
2399 && (base = get_base_address (args[i].tree_value))
2400 && TREE_CODE (base) != SSA_NAME
2401 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
2402 {
2403 /* We may have turned the parameter value into an SSA name.
2404 Go back to the original parameter so we can take the
2405 address. */
2406 if (TREE_CODE (args[i].tree_value) == SSA_NAME)
2407 {
2408 gcc_assert (SSA_NAME_IS_DEFAULT_DEF (args[i].tree_value));
2409 args[i].tree_value = SSA_NAME_VAR (args[i].tree_value);
2410 gcc_assert (TREE_CODE (args[i].tree_value) == PARM_DECL);
2411 }
2412 /* Argument setup code may have copied the value to register. We
2413 revert that optimization now because the tail call code must
2414 use the original location. */
2415 if (TREE_CODE (args[i].tree_value) == PARM_DECL
2416 && !MEM_P (DECL_RTL (args[i].tree_value))
2417 && DECL_INCOMING_RTL (args[i].tree_value)
2418 && MEM_P (DECL_INCOMING_RTL (args[i].tree_value)))
2419 set_decl_rtl (args[i].tree_value,
2420 DECL_INCOMING_RTL (args[i].tree_value));
2421
2422 mark_addressable (args[i].tree_value);
2423
2424 /* We can't use sibcalls if a callee-copied argument is
2425 stored in the current function's frame. */
2426 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
2427 {
2428 *may_tailcall = false;
2429 maybe_complain_about_tail_call (exp,
2430 "a callee-copied argument is"
2431 " stored in the current"
2432 " function's frame");
2433 }
2434
2435 args[i].tree_value = build_fold_addr_expr_loc (loc,
2436 args[i].tree_value);
2437 type = TREE_TYPE (args[i].tree_value);
2438
2439 if (*ecf_flags & ECF_CONST)
2440 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
2441 }
2442 else
2443 {
2444 /* We make a copy of the object and pass the address to the
2445 function being called. */
2446 rtx copy;
2447
2448 if (!COMPLETE_TYPE_P (type)
2449 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2450 || (flag_stack_check == GENERIC_STACK_CHECK
2451 && compare_tree_int (TYPE_SIZE_UNIT (type),
2452 STACK_CHECK_MAX_VAR_SIZE) > 0))
2453 {
2454 /* This is a variable-sized object. Make space on the stack
2455 for it. */
2456 rtx size_rtx = expr_size (args[i].tree_value);
2457
2458 if (*old_stack_level == 0)
2459 {
2460 emit_stack_save (SAVE_BLOCK, old_stack_level);
2461 *old_pending_adj = pending_stack_adjust;
2462 pending_stack_adjust = 0;
2463 }
2464
2465 /* We can pass TRUE as the 4th argument because we just
2466 saved the stack pointer and will restore it right after
2467 the call. */
2468 copy = allocate_dynamic_stack_space (size_rtx,
2469 TYPE_ALIGN (type),
2470 TYPE_ALIGN (type),
2471 max_int_size_in_bytes
2472 (type),
2473 true);
2474 copy = gen_rtx_MEM (BLKmode, copy);
2475 set_mem_attributes (copy, type, 1);
2476 }
2477 else
2478 copy = assign_temp (type, 1, 0);
2479
2480 store_expr (args[i].tree_value, copy, 0, false, false);
2481
2482 /* Just change the const function to pure and then let
2483 the next test clear the pure based on
2484 callee_copies. */
2485 if (*ecf_flags & ECF_CONST)
2486 {
2487 *ecf_flags &= ~ECF_CONST;
2488 *ecf_flags |= ECF_PURE;
2489 }
2490
2491 if (!callee_copies && *ecf_flags & ECF_PURE)
2492 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
2493
2494 args[i].tree_value
2495 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
2496 type = TREE_TYPE (args[i].tree_value);
2497 *may_tailcall = false;
2498 maybe_complain_about_tail_call (exp,
2499 "argument must be passed"
2500 " by copying");
2501 }
2502 arg.pass_by_reference = true;
2503 }
2504
2505 unsignedp = TYPE_UNSIGNED (type);
2506 arg.type = type;
2507 arg.mode
2508 = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
2509 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
2510
2511 args[i].unsignedp = unsignedp;
2512 args[i].mode = arg.mode;
2513
2514 targetm.calls.warn_parameter_passing_abi (args_so_far, type);
2515
2516 args[i].reg = targetm.calls.function_arg (args_so_far, arg);
2517
2518 if (args[i].reg && CONST_INT_P (args[i].reg))
2519 args[i].reg = NULL;
2520
2521 /* If this is a sibling call and the machine has register windows, the
2522 register window has to be unwinded before calling the routine, so
2523 arguments have to go into the incoming registers. */
2524 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
2525 args[i].tail_call_reg
2526 = targetm.calls.function_incoming_arg (args_so_far, arg);
2527 else
2528 args[i].tail_call_reg = args[i].reg;
2529
2530 if (args[i].reg)
2531 args[i].partial = targetm.calls.arg_partial_bytes (args_so_far, arg);
2532
2533 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (arg);
2534
2535 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
2536 it means that we are to pass this arg in the register(s) designated
2537 by the PARALLEL, but also to pass it in the stack. */
2538 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
2539 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
2540 args[i].pass_on_stack = 1;
2541
2542 /* If this is an addressable type, we must preallocate the stack
2543 since we must evaluate the object into its final location.
2544
2545 If this is to be passed in both registers and the stack, it is simpler
2546 to preallocate. */
2547 if (TREE_ADDRESSABLE (type)
2548 || (args[i].pass_on_stack && args[i].reg != 0))
2549 *must_preallocate = 1;
2550
2551 /* Compute the stack-size of this argument. */
2552 if (args[i].reg == 0 || args[i].partial != 0
2553 || reg_parm_stack_space > 0
2554 || args[i].pass_on_stack)
2555 locate_and_pad_parm (arg.mode, type,
2556 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2557 1,
2558 #else
2559 args[i].reg != 0,
2560 #endif
2561 reg_parm_stack_space,
2562 args[i].pass_on_stack ? 0 : args[i].partial,
2563 fndecl, args_size, &args[i].locate);
2564 #ifdef BLOCK_REG_PADDING
2565 else
2566 /* The argument is passed entirely in registers. See at which
2567 end it should be padded. */
2568 args[i].locate.where_pad =
2569 BLOCK_REG_PADDING (arg.mode, type,
2570 int_size_in_bytes (type) <= UNITS_PER_WORD);
2571 #endif
2572
2573 /* Update ARGS_SIZE, the total stack space for args so far. */
2574
2575 args_size->constant += args[i].locate.size.constant;
2576 if (args[i].locate.size.var)
2577 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
2578
2579 /* Increment ARGS_SO_FAR, which has info about which arg-registers
2580 have been used, etc. */
2581
2582 /* ??? Traditionally we've passed TYPE_MODE here, instead of the
2583 promoted_mode used for function_arg above. However, the
2584 corresponding handling of incoming arguments in function.c
2585 does pass the promoted mode. */
2586 arg.mode = TYPE_MODE (type);
2587 targetm.calls.function_arg_advance (args_so_far, arg);
2588
2589 /* Store argument values for functions decorated with attribute
2590 alloc_size. */
2591 if (argpos == alloc_idx[0])
2592 alloc_args[0] = args[i].tree_value;
2593 else if (argpos == alloc_idx[1])
2594 alloc_args[1] = args[i].tree_value;
2595
2596 /* Save the actual argument that corresponds to the access attribute
2597 operand for later processing. */
2598 if (attr_access *access = rdwr_idx.get (argpos))
2599 {
2600 if (POINTER_TYPE_P (type))
2601 {
2602 access->ptr = args[i].tree_value;
2603 // A nonnull ACCESS->SIZE contains VLA bounds. */
2604 }
2605 else
2606 {
2607 access->size = args[i].tree_value;
2608 gcc_assert (access->ptr == NULL_TREE);
2609 }
2610 }
2611 }
2612
2613 if (alloc_args[0])
2614 {
2615 /* Check the arguments of functions decorated with attribute
2616 alloc_size. */
2617 maybe_warn_alloc_args_overflow (fndecl, exp, alloc_args, alloc_idx);
2618 }
2619
2620 /* Detect passing non-string arguments to functions expecting
2621 nul-terminated strings. */
2622 maybe_warn_nonstring_arg (fndecl, exp);
2623
2624 /* Check attribute access arguments. */
2625 maybe_warn_rdwr_sizes (&rdwr_idx, fndecl, fntype, exp);
2626
2627 /* Check calls to operator new for mismatched forms and attempts
2628 to deallocate unallocated objects. */
2629 maybe_emit_free_warning (exp);
2630 }
2631
2632 /* Update ARGS_SIZE to contain the total size for the argument block.
2633 Return the original constant component of the argument block's size.
2634
2635 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
2636 for arguments passed in registers. */
2637
2638 static poly_int64
2639 compute_argument_block_size (int reg_parm_stack_space,
2640 struct args_size *args_size,
2641 tree fndecl ATTRIBUTE_UNUSED,
2642 tree fntype ATTRIBUTE_UNUSED,
2643 int preferred_stack_boundary ATTRIBUTE_UNUSED)
2644 {
2645 poly_int64 unadjusted_args_size = args_size->constant;
2646
2647 /* For accumulate outgoing args mode we don't need to align, since the frame
2648 will be already aligned. Align to STACK_BOUNDARY in order to prevent
2649 backends from generating misaligned frame sizes. */
2650 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
2651 preferred_stack_boundary = STACK_BOUNDARY;
2652
2653 /* Compute the actual size of the argument block required. The variable
2654 and constant sizes must be combined, the size may have to be rounded,
2655 and there may be a minimum required size. */
2656
2657 if (args_size->var)
2658 {
2659 args_size->var = ARGS_SIZE_TREE (*args_size);
2660 args_size->constant = 0;
2661
2662 preferred_stack_boundary /= BITS_PER_UNIT;
2663 if (preferred_stack_boundary > 1)
2664 {
2665 /* We don't handle this case yet. To handle it correctly we have
2666 to add the delta, round and subtract the delta.
2667 Currently no machine description requires this support. */
2668 gcc_assert (multiple_p (stack_pointer_delta,
2669 preferred_stack_boundary));
2670 args_size->var = round_up (args_size->var, preferred_stack_boundary);
2671 }
2672
2673 if (reg_parm_stack_space > 0)
2674 {
2675 args_size->var
2676 = size_binop (MAX_EXPR, args_size->var,
2677 ssize_int (reg_parm_stack_space));
2678
2679 /* The area corresponding to register parameters is not to count in
2680 the size of the block we need. So make the adjustment. */
2681 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2682 args_size->var
2683 = size_binop (MINUS_EXPR, args_size->var,
2684 ssize_int (reg_parm_stack_space));
2685 }
2686 }
2687 else
2688 {
2689 preferred_stack_boundary /= BITS_PER_UNIT;
2690 if (preferred_stack_boundary < 1)
2691 preferred_stack_boundary = 1;
2692 args_size->constant = (aligned_upper_bound (args_size->constant
2693 + stack_pointer_delta,
2694 preferred_stack_boundary)
2695 - stack_pointer_delta);
2696
2697 args_size->constant = upper_bound (args_size->constant,
2698 reg_parm_stack_space);
2699
2700 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2701 args_size->constant -= reg_parm_stack_space;
2702 }
2703 return unadjusted_args_size;
2704 }
2705
2706 /* Precompute parameters as needed for a function call.
2707
2708 FLAGS is mask of ECF_* constants.
2709
2710 NUM_ACTUALS is the number of arguments.
2711
2712 ARGS is an array containing information for each argument; this
2713 routine fills in the INITIAL_VALUE and VALUE fields for each
2714 precomputed argument. */
2715
2716 static void
2717 precompute_arguments (int num_actuals, struct arg_data *args)
2718 {
2719 int i;
2720
2721 /* If this is a libcall, then precompute all arguments so that we do not
2722 get extraneous instructions emitted as part of the libcall sequence. */
2723
2724 /* If we preallocated the stack space, and some arguments must be passed
2725 on the stack, then we must precompute any parameter which contains a
2726 function call which will store arguments on the stack.
2727 Otherwise, evaluating the parameter may clobber previous parameters
2728 which have already been stored into the stack. (we have code to avoid
2729 such case by saving the outgoing stack arguments, but it results in
2730 worse code) */
2731 if (!ACCUMULATE_OUTGOING_ARGS)
2732 return;
2733
2734 for (i = 0; i < num_actuals; i++)
2735 {
2736 tree type;
2737 machine_mode mode;
2738
2739 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
2740 continue;
2741
2742 /* If this is an addressable type, we cannot pre-evaluate it. */
2743 type = TREE_TYPE (args[i].tree_value);
2744 gcc_assert (!TREE_ADDRESSABLE (type));
2745
2746 args[i].initial_value = args[i].value
2747 = expand_normal (args[i].tree_value);
2748
2749 mode = TYPE_MODE (type);
2750 if (mode != args[i].mode)
2751 {
2752 int unsignedp = args[i].unsignedp;
2753 args[i].value
2754 = convert_modes (args[i].mode, mode,
2755 args[i].value, args[i].unsignedp);
2756
2757 /* CSE will replace this only if it contains args[i].value
2758 pseudo, so convert it down to the declared mode using
2759 a SUBREG. */
2760 if (REG_P (args[i].value)
2761 && GET_MODE_CLASS (args[i].mode) == MODE_INT
2762 && promote_mode (type, mode, &unsignedp) != args[i].mode)
2763 {
2764 args[i].initial_value
2765 = gen_lowpart_SUBREG (mode, args[i].value);
2766 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
2767 SUBREG_PROMOTED_SET (args[i].initial_value, args[i].unsignedp);
2768 }
2769 }
2770 }
2771 }
2772
2773 /* Given the current state of MUST_PREALLOCATE and information about
2774 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
2775 compute and return the final value for MUST_PREALLOCATE. */
2776
2777 static int
2778 finalize_must_preallocate (int must_preallocate, int num_actuals,
2779 struct arg_data *args, struct args_size *args_size)
2780 {
2781 /* See if we have or want to preallocate stack space.
2782
2783 If we would have to push a partially-in-regs parm
2784 before other stack parms, preallocate stack space instead.
2785
2786 If the size of some parm is not a multiple of the required stack
2787 alignment, we must preallocate.
2788
2789 If the total size of arguments that would otherwise create a copy in
2790 a temporary (such as a CALL) is more than half the total argument list
2791 size, preallocation is faster.
2792
2793 Another reason to preallocate is if we have a machine (like the m88k)
2794 where stack alignment is required to be maintained between every
2795 pair of insns, not just when the call is made. However, we assume here
2796 that such machines either do not have push insns (and hence preallocation
2797 would occur anyway) or the problem is taken care of with
2798 PUSH_ROUNDING. */
2799
2800 if (! must_preallocate)
2801 {
2802 int partial_seen = 0;
2803 poly_int64 copy_to_evaluate_size = 0;
2804 int i;
2805
2806 for (i = 0; i < num_actuals && ! must_preallocate; i++)
2807 {
2808 if (args[i].partial > 0 && ! args[i].pass_on_stack)
2809 partial_seen = 1;
2810 else if (partial_seen && args[i].reg == 0)
2811 must_preallocate = 1;
2812
2813 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
2814 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
2815 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
2816 || TREE_CODE (args[i].tree_value) == COND_EXPR
2817 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
2818 copy_to_evaluate_size
2819 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2820 }
2821
2822 if (maybe_ne (args_size->constant, 0)
2823 && maybe_ge (copy_to_evaluate_size * 2, args_size->constant))
2824 must_preallocate = 1;
2825 }
2826 return must_preallocate;
2827 }
2828
2829 /* If we preallocated stack space, compute the address of each argument
2830 and store it into the ARGS array.
2831
2832 We need not ensure it is a valid memory address here; it will be
2833 validized when it is used.
2834
2835 ARGBLOCK is an rtx for the address of the outgoing arguments. */
2836
2837 static void
2838 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
2839 {
2840 if (argblock)
2841 {
2842 rtx arg_reg = argblock;
2843 int i;
2844 poly_int64 arg_offset = 0;
2845
2846 if (GET_CODE (argblock) == PLUS)
2847 {
2848 arg_reg = XEXP (argblock, 0);
2849 arg_offset = rtx_to_poly_int64 (XEXP (argblock, 1));
2850 }
2851
2852 for (i = 0; i < num_actuals; i++)
2853 {
2854 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
2855 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
2856 rtx addr;
2857 unsigned int align, boundary;
2858 poly_uint64 units_on_stack = 0;
2859 machine_mode partial_mode = VOIDmode;
2860
2861 /* Skip this parm if it will not be passed on the stack. */
2862 if (! args[i].pass_on_stack
2863 && args[i].reg != 0
2864 && args[i].partial == 0)
2865 continue;
2866
2867 if (TYPE_EMPTY_P (TREE_TYPE (args[i].tree_value)))
2868 continue;
2869
2870 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, offset);
2871 addr = plus_constant (Pmode, addr, arg_offset);
2872
2873 if (args[i].partial != 0)
2874 {
2875 /* Only part of the parameter is being passed on the stack.
2876 Generate a simple memory reference of the correct size. */
2877 units_on_stack = args[i].locate.size.constant;
2878 poly_uint64 bits_on_stack = units_on_stack * BITS_PER_UNIT;
2879 partial_mode = int_mode_for_size (bits_on_stack, 1).else_blk ();
2880 args[i].stack = gen_rtx_MEM (partial_mode, addr);
2881 set_mem_size (args[i].stack, units_on_stack);
2882 }
2883 else
2884 {
2885 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
2886 set_mem_attributes (args[i].stack,
2887 TREE_TYPE (args[i].tree_value), 1);
2888 }
2889 align = BITS_PER_UNIT;
2890 boundary = args[i].locate.boundary;
2891 poly_int64 offset_val;
2892 if (args[i].locate.where_pad != PAD_DOWNWARD)
2893 align = boundary;
2894 else if (poly_int_rtx_p (offset, &offset_val))
2895 {
2896 align = least_bit_hwi (boundary);
2897 unsigned int offset_align
2898 = known_alignment (offset_val) * BITS_PER_UNIT;
2899 if (offset_align != 0)
2900 align = MIN (align, offset_align);
2901 }
2902 set_mem_align (args[i].stack, align);
2903
2904 addr = simplify_gen_binary (PLUS, Pmode, arg_reg, slot_offset);
2905 addr = plus_constant (Pmode, addr, arg_offset);
2906
2907 if (args[i].partial != 0)
2908 {
2909 /* Only part of the parameter is being passed on the stack.
2910 Generate a simple memory reference of the correct size.
2911 */
2912 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
2913 set_mem_size (args[i].stack_slot, units_on_stack);
2914 }
2915 else
2916 {
2917 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
2918 set_mem_attributes (args[i].stack_slot,
2919 TREE_TYPE (args[i].tree_value), 1);
2920 }
2921 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
2922
2923 /* Function incoming arguments may overlap with sibling call
2924 outgoing arguments and we cannot allow reordering of reads
2925 from function arguments with stores to outgoing arguments
2926 of sibling calls. */
2927 set_mem_alias_set (args[i].stack, 0);
2928 set_mem_alias_set (args[i].stack_slot, 0);
2929 }
2930 }
2931 }
2932
2933 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
2934 in a call instruction.
2935
2936 FNDECL is the tree node for the target function. For an indirect call
2937 FNDECL will be NULL_TREE.
2938
2939 ADDR is the operand 0 of CALL_EXPR for this call. */
2940
2941 static rtx
2942 rtx_for_function_call (tree fndecl, tree addr)
2943 {
2944 rtx funexp;
2945
2946 /* Get the function to call, in the form of RTL. */
2947 if (fndecl)
2948 {
2949 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
2950 TREE_USED (fndecl) = 1;
2951
2952 /* Get a SYMBOL_REF rtx for the function address. */
2953 funexp = XEXP (DECL_RTL (fndecl), 0);
2954 }
2955 else
2956 /* Generate an rtx (probably a pseudo-register) for the address. */
2957 {
2958 push_temp_slots ();
2959 funexp = expand_normal (addr);
2960 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
2961 }
2962 return funexp;
2963 }
2964
2965 /* Return the static chain for this function, if any. */
2966
2967 rtx
2968 rtx_for_static_chain (const_tree fndecl_or_type, bool incoming_p)
2969 {
2970 if (DECL_P (fndecl_or_type) && !DECL_STATIC_CHAIN (fndecl_or_type))
2971 return NULL;
2972
2973 return targetm.calls.static_chain (fndecl_or_type, incoming_p);
2974 }
2975
2976 /* Internal state for internal_arg_pointer_based_exp and its helpers. */
2977 static struct
2978 {
2979 /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
2980 or NULL_RTX if none has been scanned yet. */
2981 rtx_insn *scan_start;
2982 /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
2983 based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
2984 pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
2985 with fixed offset, or PC if this is with variable or unknown offset. */
2986 vec<rtx> cache;
2987 } internal_arg_pointer_exp_state;
2988
2989 static rtx internal_arg_pointer_based_exp (const_rtx, bool);
2990
2991 /* Helper function for internal_arg_pointer_based_exp. Scan insns in
2992 the tail call sequence, starting with first insn that hasn't been
2993 scanned yet, and note for each pseudo on the LHS whether it is based
2994 on crtl->args.internal_arg_pointer or not, and what offset from that
2995 that pointer it has. */
2996
2997 static void
2998 internal_arg_pointer_based_exp_scan (void)
2999 {
3000 rtx_insn *insn, *scan_start = internal_arg_pointer_exp_state.scan_start;
3001
3002 if (scan_start == NULL_RTX)
3003 insn = get_insns ();
3004 else
3005 insn = NEXT_INSN (scan_start);
3006
3007 while (insn)
3008 {
3009 rtx set = single_set (insn);
3010 if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
3011 {
3012 rtx val = NULL_RTX;
3013 unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
3014 /* Punt on pseudos set multiple times. */
3015 if (idx < internal_arg_pointer_exp_state.cache.length ()
3016 && (internal_arg_pointer_exp_state.cache[idx]
3017 != NULL_RTX))
3018 val = pc_rtx;
3019 else
3020 val = internal_arg_pointer_based_exp (SET_SRC (set), false);
3021 if (val != NULL_RTX)
3022 {
3023 if (idx >= internal_arg_pointer_exp_state.cache.length ())
3024 internal_arg_pointer_exp_state.cache
3025 .safe_grow_cleared (idx + 1, true);
3026 internal_arg_pointer_exp_state.cache[idx] = val;
3027 }
3028 }
3029 if (NEXT_INSN (insn) == NULL_RTX)
3030 scan_start = insn;
3031 insn = NEXT_INSN (insn);
3032 }
3033
3034 internal_arg_pointer_exp_state.scan_start = scan_start;
3035 }
3036
3037 /* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
3038 NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
3039 it with fixed offset, or PC if this is with variable or unknown offset.
3040 TOPLEVEL is true if the function is invoked at the topmost level. */
3041
3042 static rtx
3043 internal_arg_pointer_based_exp (const_rtx rtl, bool toplevel)
3044 {
3045 if (CONSTANT_P (rtl))
3046 return NULL_RTX;
3047
3048 if (rtl == crtl->args.internal_arg_pointer)
3049 return const0_rtx;
3050
3051 if (REG_P (rtl) && HARD_REGISTER_P (rtl))
3052 return NULL_RTX;
3053
3054 poly_int64 offset;
3055 if (GET_CODE (rtl) == PLUS && poly_int_rtx_p (XEXP (rtl, 1), &offset))
3056 {
3057 rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
3058 if (val == NULL_RTX || val == pc_rtx)
3059 return val;
3060 return plus_constant (Pmode, val, offset);
3061 }
3062
3063 /* When called at the topmost level, scan pseudo assignments in between the
3064 last scanned instruction in the tail call sequence and the latest insn
3065 in that sequence. */
3066 if (toplevel)
3067 internal_arg_pointer_based_exp_scan ();
3068
3069 if (REG_P (rtl))
3070 {
3071 unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
3072 if (idx < internal_arg_pointer_exp_state.cache.length ())
3073 return internal_arg_pointer_exp_state.cache[idx];
3074
3075 return NULL_RTX;
3076 }
3077
3078 subrtx_iterator::array_type array;
3079 FOR_EACH_SUBRTX (iter, array, rtl, NONCONST)
3080 {
3081 const_rtx x = *iter;
3082 if (REG_P (x) && internal_arg_pointer_based_exp (x, false) != NULL_RTX)
3083 return pc_rtx;
3084 if (MEM_P (x))
3085 iter.skip_subrtxes ();
3086 }
3087
3088 return NULL_RTX;
3089 }
3090
3091 /* Return true if SIZE bytes starting from address ADDR might overlap an
3092 already-clobbered argument area. This function is used to determine
3093 if we should give up a sibcall. */
3094
3095 static bool
3096 mem_might_overlap_already_clobbered_arg_p (rtx addr, poly_uint64 size)
3097 {
3098 poly_int64 i;
3099 unsigned HOST_WIDE_INT start, end;
3100 rtx val;
3101
3102 if (bitmap_empty_p (stored_args_map)
3103 && stored_args_watermark == HOST_WIDE_INT_M1U)
3104 return false;
3105 val = internal_arg_pointer_based_exp (addr, true);
3106 if (val == NULL_RTX)
3107 return false;
3108 else if (!poly_int_rtx_p (val, &i))
3109 return true;
3110
3111 if (known_eq (size, 0U))
3112 return false;
3113
3114 if (STACK_GROWS_DOWNWARD)
3115 i -= crtl->args.pretend_args_size;
3116 else
3117 i += crtl->args.pretend_args_size;
3118
3119 if (ARGS_GROW_DOWNWARD)
3120 i = -i - size;
3121
3122 /* We can ignore any references to the function's pretend args,
3123 which at this point would manifest as negative values of I. */
3124 if (known_le (i, 0) && known_le (size, poly_uint64 (-i)))
3125 return false;
3126
3127 start = maybe_lt (i, 0) ? 0 : constant_lower_bound (i);
3128 if (!(i + size).is_constant (&end))
3129 end = HOST_WIDE_INT_M1U;
3130
3131 if (end > stored_args_watermark)
3132 return true;
3133
3134 end = MIN (end, SBITMAP_SIZE (stored_args_map));
3135 for (unsigned HOST_WIDE_INT k = start; k < end; ++k)
3136 if (bitmap_bit_p (stored_args_map, k))
3137 return true;
3138
3139 return false;
3140 }
3141
3142 /* Do the register loads required for any wholly-register parms or any
3143 parms which are passed both on the stack and in a register. Their
3144 expressions were already evaluated.
3145
3146 Mark all register-parms as living through the call, putting these USE
3147 insns in the CALL_INSN_FUNCTION_USAGE field.
3148
3149 When IS_SIBCALL, perform the check_sibcall_argument_overlap
3150 checking, setting *SIBCALL_FAILURE if appropriate. */
3151
3152 static void
3153 load_register_parameters (struct arg_data *args, int num_actuals,
3154 rtx *call_fusage, int flags, int is_sibcall,
3155 int *sibcall_failure)
3156 {
3157 int i, j;
3158
3159 for (i = 0; i < num_actuals; i++)
3160 {
3161 rtx reg = ((flags & ECF_SIBCALL)
3162 ? args[i].tail_call_reg : args[i].reg);
3163 if (reg)
3164 {
3165 int partial = args[i].partial;
3166 int nregs;
3167 poly_int64 size = 0;
3168 HOST_WIDE_INT const_size = 0;
3169 rtx_insn *before_arg = get_last_insn ();
3170 tree type = TREE_TYPE (args[i].tree_value);
3171 if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
3172 type = TREE_TYPE (first_field (type));
3173 /* Set non-negative if we must move a word at a time, even if
3174 just one word (e.g, partial == 4 && mode == DFmode). Set
3175 to -1 if we just use a normal move insn. This value can be
3176 zero if the argument is a zero size structure. */
3177 nregs = -1;
3178 if (GET_CODE (reg) == PARALLEL)
3179 ;
3180 else if (partial)
3181 {
3182 gcc_assert (partial % UNITS_PER_WORD == 0);
3183 nregs = partial / UNITS_PER_WORD;
3184 }
3185 else if (TYPE_MODE (type) == BLKmode)
3186 {
3187 /* Variable-sized parameters should be described by a
3188 PARALLEL instead. */
3189 const_size = int_size_in_bytes (type);
3190 gcc_assert (const_size >= 0);
3191 nregs = (const_size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3192 size = const_size;
3193 }
3194 else
3195 size = GET_MODE_SIZE (args[i].mode);
3196
3197 /* Handle calls that pass values in multiple non-contiguous
3198 locations. The Irix 6 ABI has examples of this. */
3199
3200 if (GET_CODE (reg) == PARALLEL)
3201 emit_group_move (reg, args[i].parallel_value);
3202
3203 /* If simple case, just do move. If normal partial, store_one_arg
3204 has already loaded the register for us. In all other cases,
3205 load the register(s) from memory. */
3206
3207 else if (nregs == -1)
3208 {
3209 emit_move_insn (reg, args[i].value);
3210 #ifdef BLOCK_REG_PADDING
3211 /* Handle case where we have a value that needs shifting
3212 up to the msb. eg. a QImode value and we're padding
3213 upward on a BYTES_BIG_ENDIAN machine. */
3214 if (args[i].locate.where_pad
3215 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))
3216 {
3217 gcc_checking_assert (ordered_p (size, UNITS_PER_WORD));
3218 if (maybe_lt (size, UNITS_PER_WORD))
3219 {
3220 rtx x;
3221 poly_int64 shift
3222 = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3223
3224 /* Assigning REG here rather than a temp makes
3225 CALL_FUSAGE report the whole reg as used.
3226 Strictly speaking, the call only uses SIZE
3227 bytes at the msb end, but it doesn't seem worth
3228 generating rtl to say that. */
3229 reg = gen_rtx_REG (word_mode, REGNO (reg));
3230 x = expand_shift (LSHIFT_EXPR, word_mode,
3231 reg, shift, reg, 1);
3232 if (x != reg)
3233 emit_move_insn (reg, x);
3234 }
3235 }
3236 #endif
3237 }
3238
3239 /* If we have pre-computed the values to put in the registers in
3240 the case of non-aligned structures, copy them in now. */
3241
3242 else if (args[i].n_aligned_regs != 0)
3243 for (j = 0; j < args[i].n_aligned_regs; j++)
3244 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
3245 args[i].aligned_regs[j]);
3246
3247 else if (partial == 0 || args[i].pass_on_stack)
3248 {
3249 /* SIZE and CONST_SIZE are 0 for partial arguments and
3250 the size of a BLKmode type otherwise. */
3251 gcc_checking_assert (known_eq (size, const_size));
3252 rtx mem = validize_mem (copy_rtx (args[i].value));
3253
3254 /* Check for overlap with already clobbered argument area,
3255 providing that this has non-zero size. */
3256 if (is_sibcall
3257 && const_size != 0
3258 && (mem_might_overlap_already_clobbered_arg_p
3259 (XEXP (args[i].value, 0), const_size)))
3260 *sibcall_failure = 1;
3261
3262 if (const_size % UNITS_PER_WORD == 0
3263 || MEM_ALIGN (mem) % BITS_PER_WORD == 0)
3264 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
3265 else
3266 {
3267 if (nregs > 1)
3268 move_block_to_reg (REGNO (reg), mem, nregs - 1,
3269 args[i].mode);
3270 rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
3271 unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
3272 unsigned int bitsize = const_size * BITS_PER_UNIT - bitoff;
3273 rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest,
3274 word_mode, word_mode, false,
3275 NULL);
3276 if (BYTES_BIG_ENDIAN)
3277 x = expand_shift (LSHIFT_EXPR, word_mode, x,
3278 BITS_PER_WORD - bitsize, dest, 1);
3279 if (x != dest)
3280 emit_move_insn (dest, x);
3281 }
3282
3283 /* Handle a BLKmode that needs shifting. */
3284 if (nregs == 1 && const_size < UNITS_PER_WORD
3285 #ifdef BLOCK_REG_PADDING
3286 && args[i].locate.where_pad == PAD_DOWNWARD
3287 #else
3288 && BYTES_BIG_ENDIAN
3289 #endif
3290 )
3291 {
3292 rtx dest = gen_rtx_REG (word_mode, REGNO (reg));
3293 int shift = (UNITS_PER_WORD - const_size) * BITS_PER_UNIT;
3294 enum tree_code dir = (BYTES_BIG_ENDIAN
3295 ? RSHIFT_EXPR : LSHIFT_EXPR);
3296 rtx x;
3297
3298 x = expand_shift (dir, word_mode, dest, shift, dest, 1);
3299 if (x != dest)
3300 emit_move_insn (dest, x);
3301 }
3302 }
3303
3304 /* When a parameter is a block, and perhaps in other cases, it is
3305 possible that it did a load from an argument slot that was
3306 already clobbered. */
3307 if (is_sibcall
3308 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
3309 *sibcall_failure = 1;
3310
3311 /* Handle calls that pass values in multiple non-contiguous
3312 locations. The Irix 6 ABI has examples of this. */
3313 if (GET_CODE (reg) == PARALLEL)
3314 use_group_regs (call_fusage, reg);
3315 else if (nregs == -1)
3316 use_reg_mode (call_fusage, reg, TYPE_MODE (type));
3317 else if (nregs > 0)
3318 use_regs (call_fusage, REGNO (reg), nregs);
3319 }
3320 }
3321 }
3322
3323 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
3324 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
3325 bytes, then we would need to push some additional bytes to pad the
3326 arguments. So, we try to compute an adjust to the stack pointer for an
3327 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
3328 bytes. Then, when the arguments are pushed the stack will be perfectly
3329 aligned.
3330
3331 Return true if this optimization is possible, storing the adjustment
3332 in ADJUSTMENT_OUT and setting ARGS_SIZE->CONSTANT to the number of
3333 bytes that should be popped after the call. */
3334
3335 static bool
3336 combine_pending_stack_adjustment_and_call (poly_int64_pod *adjustment_out,
3337 poly_int64 unadjusted_args_size,
3338 struct args_size *args_size,
3339 unsigned int preferred_unit_stack_boundary)
3340 {
3341 /* The number of bytes to pop so that the stack will be
3342 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
3343 poly_int64 adjustment;
3344 /* The alignment of the stack after the arguments are pushed, if we
3345 just pushed the arguments without adjust the stack here. */
3346 unsigned HOST_WIDE_INT unadjusted_alignment;
3347
3348 if (!known_misalignment (stack_pointer_delta + unadjusted_args_size,
3349 preferred_unit_stack_boundary,
3350 &unadjusted_alignment))
3351 return false;
3352
3353 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
3354 as possible -- leaving just enough left to cancel out the
3355 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
3356 PENDING_STACK_ADJUST is non-negative, and congruent to
3357 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
3358
3359 /* Begin by trying to pop all the bytes. */
3360 unsigned HOST_WIDE_INT tmp_misalignment;
3361 if (!known_misalignment (pending_stack_adjust,
3362 preferred_unit_stack_boundary,
3363 &tmp_misalignment))
3364 return false;
3365 unadjusted_alignment -= tmp_misalignment;
3366 adjustment = pending_stack_adjust;
3367 /* Push enough additional bytes that the stack will be aligned
3368 after the arguments are pushed. */
3369 if (preferred_unit_stack_boundary > 1 && unadjusted_alignment)
3370 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
3371
3372 /* We need to know whether the adjusted argument size
3373 (UNADJUSTED_ARGS_SIZE - ADJUSTMENT) constitutes an allocation
3374 or a deallocation. */
3375 if (!ordered_p (adjustment, unadjusted_args_size))
3376 return false;
3377
3378 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
3379 bytes after the call. The right number is the entire
3380 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
3381 by the arguments in the first place. */
3382 args_size->constant
3383 = pending_stack_adjust - adjustment + unadjusted_args_size;
3384
3385 *adjustment_out = adjustment;
3386 return true;
3387 }
3388
3389 /* Scan X expression if it does not dereference any argument slots
3390 we already clobbered by tail call arguments (as noted in stored_args_map
3391 bitmap).
3392 Return nonzero if X expression dereferences such argument slots,
3393 zero otherwise. */
3394
3395 static int
3396 check_sibcall_argument_overlap_1 (rtx x)
3397 {
3398 RTX_CODE code;
3399 int i, j;
3400 const char *fmt;
3401
3402 if (x == NULL_RTX)
3403 return 0;
3404
3405 code = GET_CODE (x);
3406
3407 /* We need not check the operands of the CALL expression itself. */
3408 if (code == CALL)
3409 return 0;
3410
3411 if (code == MEM)
3412 return (mem_might_overlap_already_clobbered_arg_p
3413 (XEXP (x, 0), GET_MODE_SIZE (GET_MODE (x))));
3414
3415 /* Scan all subexpressions. */
3416 fmt = GET_RTX_FORMAT (code);
3417 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3418 {
3419 if (*fmt == 'e')
3420 {
3421 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
3422 return 1;
3423 }
3424 else if (*fmt == 'E')
3425 {
3426 for (j = 0; j < XVECLEN (x, i); j++)
3427 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
3428 return 1;
3429 }
3430 }
3431 return 0;
3432 }
3433
3434 /* Scan sequence after INSN if it does not dereference any argument slots
3435 we already clobbered by tail call arguments (as noted in stored_args_map
3436 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
3437 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
3438 should be 0). Return nonzero if sequence after INSN dereferences such argument
3439 slots, zero otherwise. */
3440
3441 static int
3442 check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg,
3443 int mark_stored_args_map)
3444 {
3445 poly_uint64 low, high;
3446 unsigned HOST_WIDE_INT const_low, const_high;
3447
3448 if (insn == NULL_RTX)
3449 insn = get_insns ();
3450 else
3451 insn = NEXT_INSN (insn);
3452
3453 for (; insn; insn = NEXT_INSN (insn))
3454 if (INSN_P (insn)
3455 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
3456 break;
3457
3458 if (mark_stored_args_map)
3459 {
3460 if (ARGS_GROW_DOWNWARD)
3461 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
3462 else
3463 low = arg->locate.slot_offset.constant;
3464 high = low + arg->locate.size.constant;
3465
3466 const_low = constant_lower_bound (low);
3467 if (high.is_constant (&const_high))
3468 for (unsigned HOST_WIDE_INT i = const_low; i < const_high; ++i)
3469 bitmap_set_bit (stored_args_map, i);
3470 else
3471 stored_args_watermark = MIN (stored_args_watermark, const_low);
3472 }
3473 return insn != NULL_RTX;
3474 }
3475
3476 /* Given that a function returns a value of mode MODE at the most
3477 significant end of hard register VALUE, shift VALUE left or right
3478 as specified by LEFT_P. Return true if some action was needed. */
3479
3480 bool
3481 shift_return_value (machine_mode mode, bool left_p, rtx value)
3482 {
3483 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
3484 machine_mode value_mode = GET_MODE (value);
3485 poly_int64 shift = GET_MODE_BITSIZE (value_mode) - GET_MODE_BITSIZE (mode);
3486
3487 if (known_eq (shift, 0))
3488 return false;
3489
3490 /* Use ashr rather than lshr for right shifts. This is for the benefit
3491 of the MIPS port, which requires SImode values to be sign-extended
3492 when stored in 64-bit registers. */
3493 if (!force_expand_binop (value_mode, left_p ? ashl_optab : ashr_optab,
3494 value, gen_int_shift_amount (value_mode, shift),
3495 value, 1, OPTAB_WIDEN))
3496 gcc_unreachable ();
3497 return true;
3498 }
3499
3500 /* If X is a likely-spilled register value, copy it to a pseudo
3501 register and return that register. Return X otherwise. */
3502
3503 static rtx
3504 avoid_likely_spilled_reg (rtx x)
3505 {
3506 rtx new_rtx;
3507
3508 if (REG_P (x)
3509 && HARD_REGISTER_P (x)
3510 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
3511 {
3512 /* Make sure that we generate a REG rather than a CONCAT.
3513 Moves into CONCATs can need nontrivial instructions,
3514 and the whole point of this function is to avoid
3515 using the hard register directly in such a situation. */
3516 generating_concat_p = 0;
3517 new_rtx = gen_reg_rtx (GET_MODE (x));
3518 generating_concat_p = 1;
3519 emit_move_insn (new_rtx, x);
3520 return new_rtx;
3521 }
3522 return x;
3523 }
3524
3525 /* Helper function for expand_call.
3526 Return false is EXP is not implementable as a sibling call. */
3527
3528 static bool
3529 can_implement_as_sibling_call_p (tree exp,
3530 rtx structure_value_addr,
3531 tree funtype,
3532 tree fndecl,
3533 int flags,
3534 tree addr,
3535 const args_size &args_size)
3536 {
3537 if (!targetm.have_sibcall_epilogue ())
3538 {
3539 maybe_complain_about_tail_call
3540 (exp,
3541 "machine description does not have"
3542 " a sibcall_epilogue instruction pattern");
3543 return false;
3544 }
3545
3546 /* Doing sibling call optimization needs some work, since
3547 structure_value_addr can be allocated on the stack.
3548 It does not seem worth the effort since few optimizable
3549 sibling calls will return a structure. */
3550 if (structure_value_addr != NULL_RTX)
3551 {
3552 maybe_complain_about_tail_call (exp, "callee returns a structure");
3553 return false;
3554 }
3555
3556 /* Check whether the target is able to optimize the call
3557 into a sibcall. */
3558 if (!targetm.function_ok_for_sibcall (fndecl, exp))
3559 {
3560 maybe_complain_about_tail_call (exp,
3561 "target is not able to optimize the"
3562 " call into a sibling call");
3563 return false;
3564 }
3565
3566 /* Functions that do not return exactly once may not be sibcall
3567 optimized. */
3568 if (flags & ECF_RETURNS_TWICE)
3569 {
3570 maybe_complain_about_tail_call (exp, "callee returns twice");
3571 return false;
3572 }
3573 if (flags & ECF_NORETURN)
3574 {
3575 maybe_complain_about_tail_call (exp, "callee does not return");
3576 return false;
3577 }
3578
3579 if (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr))))
3580 {
3581 maybe_complain_about_tail_call (exp, "volatile function type");
3582 return false;
3583 }
3584
3585 /* If the called function is nested in the current one, it might access
3586 some of the caller's arguments, but could clobber them beforehand if
3587 the argument areas are shared. */
3588 if (fndecl && decl_function_context (fndecl) == current_function_decl)
3589 {
3590 maybe_complain_about_tail_call (exp, "nested function");
3591 return false;
3592 }
3593
3594 /* If this function requires more stack slots than the current
3595 function, we cannot change it into a sibling call.
3596 crtl->args.pretend_args_size is not part of the
3597 stack allocated by our caller. */
3598 if (maybe_gt (args_size.constant,
3599 crtl->args.size - crtl->args.pretend_args_size))
3600 {
3601 maybe_complain_about_tail_call (exp,
3602 "callee required more stack slots"
3603 " than the caller");
3604 return false;
3605 }
3606
3607 /* If the callee pops its own arguments, then it must pop exactly
3608 the same number of arguments as the current function. */
3609 if (maybe_ne (targetm.calls.return_pops_args (fndecl, funtype,
3610 args_size.constant),
3611 targetm.calls.return_pops_args (current_function_decl,
3612 TREE_TYPE
3613 (current_function_decl),
3614 crtl->args.size)))
3615 {
3616 maybe_complain_about_tail_call (exp,
3617 "inconsistent number of"
3618 " popped arguments");
3619 return false;
3620 }
3621
3622 if (!lang_hooks.decls.ok_for_sibcall (fndecl))
3623 {
3624 maybe_complain_about_tail_call (exp, "frontend does not support"
3625 " sibling call");
3626 return false;
3627 }
3628
3629 /* All checks passed. */
3630 return true;
3631 }
3632
3633 /* Update stack alignment when the parameter is passed in the stack
3634 since the outgoing parameter requires extra alignment on the calling
3635 function side. */
3636
3637 static void
3638 update_stack_alignment_for_call (struct locate_and_pad_arg_data *locate)
3639 {
3640 if (crtl->stack_alignment_needed < locate->boundary)
3641 crtl->stack_alignment_needed = locate->boundary;
3642 if (crtl->preferred_stack_boundary < locate->boundary)
3643 crtl->preferred_stack_boundary = locate->boundary;
3644 }
3645
3646 /* Generate all the code for a CALL_EXPR exp
3647 and return an rtx for its value.
3648 Store the value in TARGET (specified as an rtx) if convenient.
3649 If the value is stored in TARGET then TARGET is returned.
3650 If IGNORE is nonzero, then we ignore the value of the function call. */
3651
3652 rtx
3653 expand_call (tree exp, rtx target, int ignore)
3654 {
3655 /* Nonzero if we are currently expanding a call. */
3656 static int currently_expanding_call = 0;
3657
3658 /* RTX for the function to be called. */
3659 rtx funexp;
3660 /* Sequence of insns to perform a normal "call". */
3661 rtx_insn *normal_call_insns = NULL;
3662 /* Sequence of insns to perform a tail "call". */
3663 rtx_insn *tail_call_insns = NULL;
3664 /* Data type of the function. */
3665 tree funtype;
3666 tree type_arg_types;
3667 tree rettype;
3668 /* Declaration of the function being called,
3669 or 0 if the function is computed (not known by name). */
3670 tree fndecl = 0;
3671 /* The type of the function being called. */
3672 tree fntype;
3673 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
3674 bool must_tail_call = CALL_EXPR_MUST_TAIL_CALL (exp);
3675 int pass;
3676
3677 /* Register in which non-BLKmode value will be returned,
3678 or 0 if no value or if value is BLKmode. */
3679 rtx valreg;
3680 /* Address where we should return a BLKmode value;
3681 0 if value not BLKmode. */
3682 rtx structure_value_addr = 0;
3683 /* Nonzero if that address is being passed by treating it as
3684 an extra, implicit first parameter. Otherwise,
3685 it is passed by being copied directly into struct_value_rtx. */
3686 int structure_value_addr_parm = 0;
3687 /* Holds the value of implicit argument for the struct value. */
3688 tree structure_value_addr_value = NULL_TREE;
3689 /* Size of aggregate value wanted, or zero if none wanted
3690 or if we are using the non-reentrant PCC calling convention
3691 or expecting the value in registers. */
3692 poly_int64 struct_value_size = 0;
3693 /* Nonzero if called function returns an aggregate in memory PCC style,
3694 by returning the address of where to find it. */
3695 int pcc_struct_value = 0;
3696 rtx struct_value = 0;
3697
3698 /* Number of actual parameters in this call, including struct value addr. */
3699 int num_actuals;
3700 /* Number of named args. Args after this are anonymous ones
3701 and they must all go on the stack. */
3702 int n_named_args;
3703 /* Number of complex actual arguments that need to be split. */
3704 int num_complex_actuals = 0;
3705
3706 /* Vector of information about each argument.
3707 Arguments are numbered in the order they will be pushed,
3708 not the order they are written. */
3709 struct arg_data *args;
3710
3711 /* Total size in bytes of all the stack-parms scanned so far. */
3712 struct args_size args_size;
3713 struct args_size adjusted_args_size;
3714 /* Size of arguments before any adjustments (such as rounding). */
3715 poly_int64 unadjusted_args_size;
3716 /* Data on reg parms scanned so far. */
3717 CUMULATIVE_ARGS args_so_far_v;
3718 cumulative_args_t args_so_far;
3719 /* Nonzero if a reg parm has been scanned. */
3720 int reg_parm_seen;
3721 /* Nonzero if this is an indirect function call. */
3722
3723 /* Nonzero if we must avoid push-insns in the args for this call.
3724 If stack space is allocated for register parameters, but not by the
3725 caller, then it is preallocated in the fixed part of the stack frame.
3726 So the entire argument block must then be preallocated (i.e., we
3727 ignore PUSH_ROUNDING in that case). */
3728
3729 int must_preallocate = !PUSH_ARGS;
3730
3731 /* Size of the stack reserved for parameter registers. */
3732 int reg_parm_stack_space = 0;
3733
3734 /* Address of space preallocated for stack parms
3735 (on machines that lack push insns), or 0 if space not preallocated. */
3736 rtx argblock = 0;
3737
3738 /* Mask of ECF_ and ERF_ flags. */
3739 int flags = 0;
3740 int return_flags = 0;
3741 #ifdef REG_PARM_STACK_SPACE
3742 /* Define the boundary of the register parm stack space that needs to be
3743 saved, if any. */
3744 int low_to_save, high_to_save;
3745 rtx save_area = 0; /* Place that it is saved */
3746 #endif
3747
3748 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3749 char *initial_stack_usage_map = stack_usage_map;
3750 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
3751 char *stack_usage_map_buf = NULL;
3752
3753 poly_int64 old_stack_allocated;
3754
3755 /* State variables to track stack modifications. */
3756 rtx old_stack_level = 0;
3757 int old_stack_arg_under_construction = 0;
3758 poly_int64 old_pending_adj = 0;
3759 int old_inhibit_defer_pop = inhibit_defer_pop;
3760
3761 /* Some stack pointer alterations we make are performed via
3762 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
3763 which we then also need to save/restore along the way. */
3764 poly_int64 old_stack_pointer_delta = 0;
3765
3766 rtx call_fusage;
3767 tree addr = CALL_EXPR_FN (exp);
3768 int i;
3769 /* The alignment of the stack, in bits. */
3770 unsigned HOST_WIDE_INT preferred_stack_boundary;
3771 /* The alignment of the stack, in bytes. */
3772 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
3773 /* The static chain value to use for this call. */
3774 rtx static_chain_value;
3775 /* See if this is "nothrow" function call. */
3776 if (TREE_NOTHROW (exp))
3777 flags |= ECF_NOTHROW;
3778
3779 /* See if we can find a DECL-node for the actual function, and get the
3780 function attributes (flags) from the function decl or type node. */
3781 fndecl = get_callee_fndecl (exp);
3782 if (fndecl)
3783 {
3784 fntype = TREE_TYPE (fndecl);
3785 flags |= flags_from_decl_or_type (fndecl);
3786 return_flags |= decl_return_flags (fndecl);
3787 }
3788 else
3789 {
3790 fntype = TREE_TYPE (TREE_TYPE (addr));
3791 flags |= flags_from_decl_or_type (fntype);
3792 if (CALL_EXPR_BY_DESCRIPTOR (exp))
3793 flags |= ECF_BY_DESCRIPTOR;
3794 }
3795 rettype = TREE_TYPE (exp);
3796
3797 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
3798
3799 /* Warn if this value is an aggregate type,
3800 regardless of which calling convention we are using for it. */
3801 if (AGGREGATE_TYPE_P (rettype))
3802 warning (OPT_Waggregate_return, "function call has aggregate value");
3803
3804 /* If the result of a non looping pure or const function call is
3805 ignored (or void), and none of its arguments are volatile, we can
3806 avoid expanding the call and just evaluate the arguments for
3807 side-effects. */
3808 if ((flags & (ECF_CONST | ECF_PURE))
3809 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
3810 && (ignore || target == const0_rtx
3811 || TYPE_MODE (rettype) == VOIDmode))
3812 {
3813 bool volatilep = false;
3814 tree arg;
3815 call_expr_arg_iterator iter;
3816
3817 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3818 if (TREE_THIS_VOLATILE (arg))
3819 {
3820 volatilep = true;
3821 break;
3822 }
3823
3824 if (! volatilep)
3825 {
3826 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3827 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
3828 return const0_rtx;
3829 }
3830 }
3831
3832 #ifdef REG_PARM_STACK_SPACE
3833 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
3834 #endif
3835
3836 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
3837 && reg_parm_stack_space > 0 && PUSH_ARGS)
3838 must_preallocate = 1;
3839
3840 /* Set up a place to return a structure. */
3841
3842 /* Cater to broken compilers. */
3843 if (aggregate_value_p (exp, fntype))
3844 {
3845 /* This call returns a big structure. */
3846 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
3847
3848 #ifdef PCC_STATIC_STRUCT_RETURN
3849 {
3850 pcc_struct_value = 1;
3851 }
3852 #else /* not PCC_STATIC_STRUCT_RETURN */
3853 {
3854 if (!poly_int_tree_p (TYPE_SIZE_UNIT (rettype), &struct_value_size))
3855 struct_value_size = -1;
3856
3857 /* Even if it is semantically safe to use the target as the return
3858 slot, it may be not sufficiently aligned for the return type. */
3859 if (CALL_EXPR_RETURN_SLOT_OPT (exp)
3860 && target
3861 && MEM_P (target)
3862 /* If rettype is addressable, we may not create a temporary.
3863 If target is properly aligned at runtime and the compiler
3864 just doesn't know about it, it will work fine, otherwise it
3865 will be UB. */
3866 && (TREE_ADDRESSABLE (rettype)
3867 || !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
3868 && targetm.slow_unaligned_access (TYPE_MODE (rettype),
3869 MEM_ALIGN (target)))))
3870 structure_value_addr = XEXP (target, 0);
3871 else
3872 {
3873 /* For variable-sized objects, we must be called with a target
3874 specified. If we were to allocate space on the stack here,
3875 we would have no way of knowing when to free it. */
3876 rtx d = assign_temp (rettype, 1, 1);
3877 structure_value_addr = XEXP (d, 0);
3878 target = 0;
3879 }
3880 }
3881 #endif /* not PCC_STATIC_STRUCT_RETURN */
3882 }
3883
3884 /* Figure out the amount to which the stack should be aligned. */
3885 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3886 if (fndecl)
3887 {
3888 struct cgraph_rtl_info *i = cgraph_node::rtl_info (fndecl);
3889 /* Without automatic stack alignment, we can't increase preferred
3890 stack boundary. With automatic stack alignment, it is
3891 unnecessary since unless we can guarantee that all callers will
3892 align the outgoing stack properly, callee has to align its
3893 stack anyway. */
3894 if (i
3895 && i->preferred_incoming_stack_boundary
3896 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
3897 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
3898 }
3899
3900 /* Operand 0 is a pointer-to-function; get the type of the function. */
3901 funtype = TREE_TYPE (addr);
3902 gcc_assert (POINTER_TYPE_P (funtype));
3903 funtype = TREE_TYPE (funtype);
3904
3905 /* Count whether there are actual complex arguments that need to be split
3906 into their real and imaginary parts. Munge the type_arg_types
3907 appropriately here as well. */
3908 if (targetm.calls.split_complex_arg)
3909 {
3910 call_expr_arg_iterator iter;
3911 tree arg;
3912 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3913 {
3914 tree type = TREE_TYPE (arg);
3915 if (type && TREE_CODE (type) == COMPLEX_TYPE
3916 && targetm.calls.split_complex_arg (type))
3917 num_complex_actuals++;
3918 }
3919 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
3920 }
3921 else
3922 type_arg_types = TYPE_ARG_TYPES (funtype);
3923
3924 if (flags & ECF_MAY_BE_ALLOCA)
3925 cfun->calls_alloca = 1;
3926
3927 /* If struct_value_rtx is 0, it means pass the address
3928 as if it were an extra parameter. Put the argument expression
3929 in structure_value_addr_value. */
3930 if (structure_value_addr && struct_value == 0)
3931 {
3932 /* If structure_value_addr is a REG other than
3933 virtual_outgoing_args_rtx, we can use always use it. If it
3934 is not a REG, we must always copy it into a register.
3935 If it is virtual_outgoing_args_rtx, we must copy it to another
3936 register in some cases. */
3937 rtx temp = (!REG_P (structure_value_addr)
3938 || (ACCUMULATE_OUTGOING_ARGS
3939 && stack_arg_under_construction
3940 && structure_value_addr == virtual_outgoing_args_rtx)
3941 ? copy_addr_to_reg (convert_memory_address
3942 (Pmode, structure_value_addr))
3943 : structure_value_addr);
3944
3945 structure_value_addr_value =
3946 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
3947 structure_value_addr_parm = 1;
3948 }
3949
3950 /* Count the arguments and set NUM_ACTUALS. */
3951 num_actuals =
3952 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
3953
3954 /* Compute number of named args.
3955 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
3956
3957 if (type_arg_types != 0)
3958 n_named_args
3959 = (list_length (type_arg_types)
3960 /* Count the struct value address, if it is passed as a parm. */
3961 + structure_value_addr_parm);
3962 else
3963 /* If we know nothing, treat all args as named. */
3964 n_named_args = num_actuals;
3965
3966 /* Start updating where the next arg would go.
3967
3968 On some machines (such as the PA) indirect calls have a different
3969 calling convention than normal calls. The fourth argument in
3970 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
3971 or not. */
3972 INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
3973 args_so_far = pack_cumulative_args (&args_so_far_v);
3974
3975 /* Now possibly adjust the number of named args.
3976 Normally, don't include the last named arg if anonymous args follow.
3977 We do include the last named arg if
3978 targetm.calls.strict_argument_naming() returns nonzero.
3979 (If no anonymous args follow, the result of list_length is actually
3980 one too large. This is harmless.)
3981
3982 If targetm.calls.pretend_outgoing_varargs_named() returns
3983 nonzero, and targetm.calls.strict_argument_naming() returns zero,
3984 this machine will be able to place unnamed args that were passed
3985 in registers into the stack. So treat all args as named. This
3986 allows the insns emitting for a specific argument list to be
3987 independent of the function declaration.
3988
3989 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
3990 we do not have any reliable way to pass unnamed args in
3991 registers, so we must force them into memory. */
3992
3993 if (type_arg_types != 0
3994 && targetm.calls.strict_argument_naming (args_so_far))
3995 ;
3996 else if (type_arg_types != 0
3997 && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
3998 /* Don't include the last named arg. */
3999 --n_named_args;
4000 else
4001 /* Treat all args as named. */
4002 n_named_args = num_actuals;
4003
4004 /* Make a vector to hold all the information about each arg. */
4005 args = XCNEWVEC (struct arg_data, num_actuals);
4006
4007 /* Build up entries in the ARGS array, compute the size of the
4008 arguments into ARGS_SIZE, etc. */
4009 initialize_argument_information (num_actuals, args, &args_size,
4010 n_named_args, exp,
4011 structure_value_addr_value, fndecl, fntype,
4012 args_so_far, reg_parm_stack_space,
4013 &old_stack_level, &old_pending_adj,
4014 &must_preallocate, &flags,
4015 &try_tail_call, CALL_FROM_THUNK_P (exp));
4016
4017 if (args_size.var)
4018 must_preallocate = 1;
4019
4020 /* Now make final decision about preallocating stack space. */
4021 must_preallocate = finalize_must_preallocate (must_preallocate,
4022 num_actuals, args,
4023 &args_size);
4024
4025 /* If the structure value address will reference the stack pointer, we
4026 must stabilize it. We don't need to do this if we know that we are
4027 not going to adjust the stack pointer in processing this call. */
4028
4029 if (structure_value_addr
4030 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
4031 || reg_mentioned_p (virtual_outgoing_args_rtx,
4032 structure_value_addr))
4033 && (args_size.var
4034 || (!ACCUMULATE_OUTGOING_ARGS
4035 && maybe_ne (args_size.constant, 0))))
4036 structure_value_addr = copy_to_reg (structure_value_addr);
4037
4038 /* Tail calls can make things harder to debug, and we've traditionally
4039 pushed these optimizations into -O2. Don't try if we're already
4040 expanding a call, as that means we're an argument. Don't try if
4041 there's cleanups, as we know there's code to follow the call. */
4042 if (currently_expanding_call++ != 0
4043 || (!flag_optimize_sibling_calls && !CALL_FROM_THUNK_P (exp))
4044 || args_size.var
4045 || dbg_cnt (tail_call) == false)
4046 try_tail_call = 0;
4047
4048 /* Workaround buggy C/C++ wrappers around Fortran routines with
4049 character(len=constant) arguments if the hidden string length arguments
4050 are passed on the stack; if the callers forget to pass those arguments,
4051 attempting to tail call in such routines leads to stack corruption.
4052 Avoid tail calls in functions where at least one such hidden string
4053 length argument is passed (partially or fully) on the stack in the
4054 caller and the callee needs to pass any arguments on the stack.
4055 See PR90329. */
4056 if (try_tail_call && maybe_ne (args_size.constant, 0))
4057 for (tree arg = DECL_ARGUMENTS (current_function_decl);
4058 arg; arg = DECL_CHAIN (arg))
4059 if (DECL_HIDDEN_STRING_LENGTH (arg) && DECL_INCOMING_RTL (arg))
4060 {
4061 subrtx_iterator::array_type array;
4062 FOR_EACH_SUBRTX (iter, array, DECL_INCOMING_RTL (arg), NONCONST)
4063 if (MEM_P (*iter))
4064 {
4065 try_tail_call = 0;
4066 break;
4067 }
4068 }
4069
4070 /* If the user has marked the function as requiring tail-call
4071 optimization, attempt it. */
4072 if (must_tail_call)
4073 try_tail_call = 1;
4074
4075 /* Rest of purposes for tail call optimizations to fail. */
4076 if (try_tail_call)
4077 try_tail_call = can_implement_as_sibling_call_p (exp,
4078 structure_value_addr,
4079 funtype,
4080 fndecl,
4081 flags, addr, args_size);
4082
4083 /* Check if caller and callee disagree in promotion of function
4084 return value. */
4085 if (try_tail_call)
4086 {
4087 machine_mode caller_mode, caller_promoted_mode;
4088 machine_mode callee_mode, callee_promoted_mode;
4089 int caller_unsignedp, callee_unsignedp;
4090 tree caller_res = DECL_RESULT (current_function_decl);
4091
4092 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
4093 caller_mode = DECL_MODE (caller_res);
4094 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
4095 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
4096 caller_promoted_mode
4097 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
4098 &caller_unsignedp,
4099 TREE_TYPE (current_function_decl), 1);
4100 callee_promoted_mode
4101 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
4102 &callee_unsignedp,
4103 funtype, 1);
4104 if (caller_mode != VOIDmode
4105 && (caller_promoted_mode != callee_promoted_mode
4106 || ((caller_mode != caller_promoted_mode
4107 || callee_mode != callee_promoted_mode)
4108 && (caller_unsignedp != callee_unsignedp
4109 || partial_subreg_p (caller_mode, callee_mode)))))
4110 {
4111 try_tail_call = 0;
4112 maybe_complain_about_tail_call (exp,
4113 "caller and callee disagree in"
4114 " promotion of function"
4115 " return value");
4116 }
4117 }
4118
4119 /* Ensure current function's preferred stack boundary is at least
4120 what we need. Stack alignment may also increase preferred stack
4121 boundary. */
4122 for (i = 0; i < num_actuals; i++)
4123 if (reg_parm_stack_space > 0
4124 || args[i].reg == 0
4125 || args[i].partial != 0
4126 || args[i].pass_on_stack)
4127 update_stack_alignment_for_call (&args[i].locate);
4128 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
4129 crtl->preferred_stack_boundary = preferred_stack_boundary;
4130 else
4131 preferred_stack_boundary = crtl->preferred_stack_boundary;
4132
4133 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
4134
4135 if (flag_callgraph_info)
4136 record_final_call (fndecl, EXPR_LOCATION (exp));
4137
4138 /* We want to make two insn chains; one for a sibling call, the other
4139 for a normal call. We will select one of the two chains after
4140 initial RTL generation is complete. */
4141 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
4142 {
4143 int sibcall_failure = 0;
4144 /* We want to emit any pending stack adjustments before the tail
4145 recursion "call". That way we know any adjustment after the tail
4146 recursion call can be ignored if we indeed use the tail
4147 call expansion. */
4148 saved_pending_stack_adjust save;
4149 rtx_insn *insns, *before_call, *after_args;
4150 rtx next_arg_reg;
4151
4152 if (pass == 0)
4153 {
4154 /* State variables we need to save and restore between
4155 iterations. */
4156 save_pending_stack_adjust (&save);
4157 }
4158 if (pass)
4159 flags &= ~ECF_SIBCALL;
4160 else
4161 flags |= ECF_SIBCALL;
4162
4163 /* Other state variables that we must reinitialize each time
4164 through the loop (that are not initialized by the loop itself). */
4165 argblock = 0;
4166 call_fusage = 0;
4167
4168 /* Start a new sequence for the normal call case.
4169
4170 From this point on, if the sibling call fails, we want to set
4171 sibcall_failure instead of continuing the loop. */
4172 start_sequence ();
4173
4174 /* Don't let pending stack adjusts add up to too much.
4175 Also, do all pending adjustments now if there is any chance
4176 this might be a call to alloca or if we are expanding a sibling
4177 call sequence.
4178 Also do the adjustments before a throwing call, otherwise
4179 exception handling can fail; PR 19225. */
4180 if (maybe_ge (pending_stack_adjust, 32)
4181 || (maybe_ne (pending_stack_adjust, 0)
4182 && (flags & ECF_MAY_BE_ALLOCA))
4183 || (maybe_ne (pending_stack_adjust, 0)
4184 && flag_exceptions && !(flags & ECF_NOTHROW))
4185 || pass == 0)
4186 do_pending_stack_adjust ();
4187
4188 /* Precompute any arguments as needed. */
4189 if (pass)
4190 precompute_arguments (num_actuals, args);
4191
4192 /* Now we are about to start emitting insns that can be deleted
4193 if a libcall is deleted. */
4194 if (pass && (flags & ECF_MALLOC))
4195 start_sequence ();
4196
4197 if (pass == 0
4198 && crtl->stack_protect_guard
4199 && targetm.stack_protect_runtime_enabled_p ())
4200 stack_protect_epilogue ();
4201
4202 adjusted_args_size = args_size;
4203 /* Compute the actual size of the argument block required. The variable
4204 and constant sizes must be combined, the size may have to be rounded,
4205 and there may be a minimum required size. When generating a sibcall
4206 pattern, do not round up, since we'll be re-using whatever space our
4207 caller provided. */
4208 unadjusted_args_size
4209 = compute_argument_block_size (reg_parm_stack_space,
4210 &adjusted_args_size,
4211 fndecl, fntype,
4212 (pass == 0 ? 0
4213 : preferred_stack_boundary));
4214
4215 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
4216
4217 /* The argument block when performing a sibling call is the
4218 incoming argument block. */
4219 if (pass == 0)
4220 {
4221 argblock = crtl->args.internal_arg_pointer;
4222 if (STACK_GROWS_DOWNWARD)
4223 argblock
4224 = plus_constant (Pmode, argblock, crtl->args.pretend_args_size);
4225 else
4226 argblock
4227 = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size);
4228
4229 HOST_WIDE_INT map_size = constant_lower_bound (args_size.constant);
4230 stored_args_map = sbitmap_alloc (map_size);
4231 bitmap_clear (stored_args_map);
4232 stored_args_watermark = HOST_WIDE_INT_M1U;
4233 }
4234
4235 /* If we have no actual push instructions, or shouldn't use them,
4236 make space for all args right now. */
4237 else if (adjusted_args_size.var != 0)
4238 {
4239 if (old_stack_level == 0)
4240 {
4241 emit_stack_save (SAVE_BLOCK, &old_stack_level);
4242 old_stack_pointer_delta = stack_pointer_delta;
4243 old_pending_adj = pending_stack_adjust;
4244 pending_stack_adjust = 0;
4245 /* stack_arg_under_construction says whether a stack arg is
4246 being constructed at the old stack level. Pushing the stack
4247 gets a clean outgoing argument block. */
4248 old_stack_arg_under_construction = stack_arg_under_construction;
4249 stack_arg_under_construction = 0;
4250 }
4251 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
4252 if (flag_stack_usage_info)
4253 current_function_has_unbounded_dynamic_stack_size = 1;
4254 }
4255 else
4256 {
4257 /* Note that we must go through the motions of allocating an argument
4258 block even if the size is zero because we may be storing args
4259 in the area reserved for register arguments, which may be part of
4260 the stack frame. */
4261
4262 poly_int64 needed = adjusted_args_size.constant;
4263
4264 /* Store the maximum argument space used. It will be pushed by
4265 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
4266 checking). */
4267
4268 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
4269 needed);
4270
4271 if (must_preallocate)
4272 {
4273 if (ACCUMULATE_OUTGOING_ARGS)
4274 {
4275 /* Since the stack pointer will never be pushed, it is
4276 possible for the evaluation of a parm to clobber
4277 something we have already written to the stack.
4278 Since most function calls on RISC machines do not use
4279 the stack, this is uncommon, but must work correctly.
4280
4281 Therefore, we save any area of the stack that was already
4282 written and that we are using. Here we set up to do this
4283 by making a new stack usage map from the old one. The
4284 actual save will be done by store_one_arg.
4285
4286 Another approach might be to try to reorder the argument
4287 evaluations to avoid this conflicting stack usage. */
4288
4289 /* Since we will be writing into the entire argument area,
4290 the map must be allocated for its entire size, not just
4291 the part that is the responsibility of the caller. */
4292 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
4293 needed += reg_parm_stack_space;
4294
4295 poly_int64 limit = needed;
4296 if (ARGS_GROW_DOWNWARD)
4297 limit += 1;
4298
4299 /* For polynomial sizes, this is the maximum possible
4300 size needed for arguments with a constant size
4301 and offset. */
4302 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
4303 highest_outgoing_arg_in_use
4304 = MAX (initial_highest_arg_in_use, const_limit);
4305
4306 free (stack_usage_map_buf);
4307 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
4308 stack_usage_map = stack_usage_map_buf;
4309
4310 if (initial_highest_arg_in_use)
4311 memcpy (stack_usage_map, initial_stack_usage_map,
4312 initial_highest_arg_in_use);
4313
4314 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
4315 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
4316 (highest_outgoing_arg_in_use
4317 - initial_highest_arg_in_use));
4318 needed = 0;
4319
4320 /* The address of the outgoing argument list must not be
4321 copied to a register here, because argblock would be left
4322 pointing to the wrong place after the call to
4323 allocate_dynamic_stack_space below. */
4324
4325 argblock = virtual_outgoing_args_rtx;
4326 }
4327 else
4328 {
4329 /* Try to reuse some or all of the pending_stack_adjust
4330 to get this space. */
4331 if (inhibit_defer_pop == 0
4332 && (combine_pending_stack_adjustment_and_call
4333 (&needed,
4334 unadjusted_args_size,
4335 &adjusted_args_size,
4336 preferred_unit_stack_boundary)))
4337 {
4338 /* combine_pending_stack_adjustment_and_call computes
4339 an adjustment before the arguments are allocated.
4340 Account for them and see whether or not the stack
4341 needs to go up or down. */
4342 needed = unadjusted_args_size - needed;
4343
4344 /* Checked by
4345 combine_pending_stack_adjustment_and_call. */
4346 gcc_checking_assert (ordered_p (needed, 0));
4347 if (maybe_lt (needed, 0))
4348 {
4349 /* We're releasing stack space. */
4350 /* ??? We can avoid any adjustment at all if we're
4351 already aligned. FIXME. */
4352 pending_stack_adjust = -needed;
4353 do_pending_stack_adjust ();
4354 needed = 0;
4355 }
4356 else
4357 /* We need to allocate space. We'll do that in
4358 push_block below. */
4359 pending_stack_adjust = 0;
4360 }
4361
4362 /* Special case this because overhead of `push_block' in
4363 this case is non-trivial. */
4364 if (known_eq (needed, 0))
4365 argblock = virtual_outgoing_args_rtx;
4366 else
4367 {
4368 rtx needed_rtx = gen_int_mode (needed, Pmode);
4369 argblock = push_block (needed_rtx, 0, 0);
4370 if (ARGS_GROW_DOWNWARD)
4371 argblock = plus_constant (Pmode, argblock, needed);
4372 }
4373
4374 /* We only really need to call `copy_to_reg' in the case
4375 where push insns are going to be used to pass ARGBLOCK
4376 to a function call in ARGS. In that case, the stack
4377 pointer changes value from the allocation point to the
4378 call point, and hence the value of
4379 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
4380 as well always do it. */
4381 argblock = copy_to_reg (argblock);
4382 }
4383 }
4384 }
4385
4386 if (ACCUMULATE_OUTGOING_ARGS)
4387 {
4388 /* The save/restore code in store_one_arg handles all
4389 cases except one: a constructor call (including a C
4390 function returning a BLKmode struct) to initialize
4391 an argument. */
4392 if (stack_arg_under_construction)
4393 {
4394 rtx push_size
4395 = (gen_int_mode
4396 (adjusted_args_size.constant
4397 + (OUTGOING_REG_PARM_STACK_SPACE (!fndecl ? fntype
4398 : TREE_TYPE (fndecl))
4399 ? 0 : reg_parm_stack_space), Pmode));
4400 if (old_stack_level == 0)
4401 {
4402 emit_stack_save (SAVE_BLOCK, &old_stack_level);
4403 old_stack_pointer_delta = stack_pointer_delta;
4404 old_pending_adj = pending_stack_adjust;
4405 pending_stack_adjust = 0;
4406 /* stack_arg_under_construction says whether a stack
4407 arg is being constructed at the old stack level.
4408 Pushing the stack gets a clean outgoing argument
4409 block. */
4410 old_stack_arg_under_construction
4411 = stack_arg_under_construction;
4412 stack_arg_under_construction = 0;
4413 /* Make a new map for the new argument list. */
4414 free (stack_usage_map_buf);
4415 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
4416 stack_usage_map = stack_usage_map_buf;
4417 highest_outgoing_arg_in_use = 0;
4418 stack_usage_watermark = HOST_WIDE_INT_M1U;
4419 }
4420 /* We can pass TRUE as the 4th argument because we just
4421 saved the stack pointer and will restore it right after
4422 the call. */
4423 allocate_dynamic_stack_space (push_size, 0, BIGGEST_ALIGNMENT,
4424 -1, true);
4425 }
4426
4427 /* If argument evaluation might modify the stack pointer,
4428 copy the address of the argument list to a register. */
4429 for (i = 0; i < num_actuals; i++)
4430 if (args[i].pass_on_stack)
4431 {
4432 argblock = copy_addr_to_reg (argblock);
4433 break;
4434 }
4435 }
4436
4437 compute_argument_addresses (args, argblock, num_actuals);
4438
4439 /* Stack is properly aligned, pops can't safely be deferred during
4440 the evaluation of the arguments. */
4441 NO_DEFER_POP;
4442
4443 /* Precompute all register parameters. It isn't safe to compute
4444 anything once we have started filling any specific hard regs.
4445 TLS symbols sometimes need a call to resolve. Precompute
4446 register parameters before any stack pointer manipulation
4447 to avoid unaligned stack in the called function. */
4448 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
4449
4450 OK_DEFER_POP;
4451
4452 /* Perform stack alignment before the first push (the last arg). */
4453 if (argblock == 0
4454 && maybe_gt (adjusted_args_size.constant, reg_parm_stack_space)
4455 && maybe_ne (adjusted_args_size.constant, unadjusted_args_size))
4456 {
4457 /* When the stack adjustment is pending, we get better code
4458 by combining the adjustments. */
4459 if (maybe_ne (pending_stack_adjust, 0)
4460 && ! inhibit_defer_pop
4461 && (combine_pending_stack_adjustment_and_call
4462 (&pending_stack_adjust,
4463 unadjusted_args_size,
4464 &adjusted_args_size,
4465 preferred_unit_stack_boundary)))
4466 do_pending_stack_adjust ();
4467 else if (argblock == 0)
4468 anti_adjust_stack (gen_int_mode (adjusted_args_size.constant
4469 - unadjusted_args_size,
4470 Pmode));
4471 }
4472 /* Now that the stack is properly aligned, pops can't safely
4473 be deferred during the evaluation of the arguments. */
4474 NO_DEFER_POP;
4475
4476 /* Record the maximum pushed stack space size. We need to delay
4477 doing it this far to take into account the optimization done
4478 by combine_pending_stack_adjustment_and_call. */
4479 if (flag_stack_usage_info
4480 && !ACCUMULATE_OUTGOING_ARGS
4481 && pass
4482 && adjusted_args_size.var == 0)
4483 {
4484 poly_int64 pushed = (adjusted_args_size.constant
4485 + pending_stack_adjust);
4486 current_function_pushed_stack_size
4487 = upper_bound (current_function_pushed_stack_size, pushed);
4488 }
4489
4490 funexp = rtx_for_function_call (fndecl, addr);
4491
4492 if (CALL_EXPR_STATIC_CHAIN (exp))
4493 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
4494 else
4495 static_chain_value = 0;
4496
4497 #ifdef REG_PARM_STACK_SPACE
4498 /* Save the fixed argument area if it's part of the caller's frame and
4499 is clobbered by argument setup for this call. */
4500 if (ACCUMULATE_OUTGOING_ARGS && pass)
4501 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
4502 &low_to_save, &high_to_save);
4503 #endif
4504
4505 /* Now store (and compute if necessary) all non-register parms.
4506 These come before register parms, since they can require block-moves,
4507 which could clobber the registers used for register parms.
4508 Parms which have partial registers are not stored here,
4509 but we do preallocate space here if they want that. */
4510
4511 for (i = 0; i < num_actuals; i++)
4512 {
4513 if (args[i].reg == 0 || args[i].pass_on_stack)
4514 {
4515 rtx_insn *before_arg = get_last_insn ();
4516
4517 /* We don't allow passing huge (> 2^30 B) arguments
4518 by value. It would cause an overflow later on. */
4519 if (constant_lower_bound (adjusted_args_size.constant)
4520 >= (1 << (HOST_BITS_PER_INT - 2)))
4521 {
4522 sorry ("passing too large argument on stack");
4523 continue;
4524 }
4525
4526 if (store_one_arg (&args[i], argblock, flags,
4527 adjusted_args_size.var != 0,
4528 reg_parm_stack_space)
4529 || (pass == 0
4530 && check_sibcall_argument_overlap (before_arg,
4531 &args[i], 1)))
4532 sibcall_failure = 1;
4533 }
4534
4535 if (args[i].stack)
4536 call_fusage
4537 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
4538 gen_rtx_USE (VOIDmode, args[i].stack),
4539 call_fusage);
4540 }
4541
4542 /* If we have a parm that is passed in registers but not in memory
4543 and whose alignment does not permit a direct copy into registers,
4544 make a group of pseudos that correspond to each register that we
4545 will later fill. */
4546 if (STRICT_ALIGNMENT)
4547 store_unaligned_arguments_into_pseudos (args, num_actuals);
4548
4549 /* Now store any partially-in-registers parm.
4550 This is the last place a block-move can happen. */
4551 if (reg_parm_seen)
4552 for (i = 0; i < num_actuals; i++)
4553 if (args[i].partial != 0 && ! args[i].pass_on_stack)
4554 {
4555 rtx_insn *before_arg = get_last_insn ();
4556
4557 /* On targets with weird calling conventions (e.g. PA) it's
4558 hard to ensure that all cases of argument overlap between
4559 stack and registers work. Play it safe and bail out. */
4560 if (ARGS_GROW_DOWNWARD && !STACK_GROWS_DOWNWARD)
4561 {
4562 sibcall_failure = 1;
4563 break;
4564 }
4565
4566 if (store_one_arg (&args[i], argblock, flags,
4567 adjusted_args_size.var != 0,
4568 reg_parm_stack_space)
4569 || (pass == 0
4570 && check_sibcall_argument_overlap (before_arg,
4571 &args[i], 1)))
4572 sibcall_failure = 1;
4573 }
4574
4575 bool any_regs = false;
4576 for (i = 0; i < num_actuals; i++)
4577 if (args[i].reg != NULL_RTX)
4578 {
4579 any_regs = true;
4580 targetm.calls.call_args (args[i].reg, funtype);
4581 }
4582 if (!any_regs)
4583 targetm.calls.call_args (pc_rtx, funtype);
4584
4585 /* Figure out the register where the value, if any, will come back. */
4586 valreg = 0;
4587 if (TYPE_MODE (rettype) != VOIDmode
4588 && ! structure_value_addr)
4589 {
4590 if (pcc_struct_value)
4591 valreg = hard_function_value (build_pointer_type (rettype),
4592 fndecl, NULL, (pass == 0));
4593 else
4594 valreg = hard_function_value (rettype, fndecl, fntype,
4595 (pass == 0));
4596
4597 /* If VALREG is a PARALLEL whose first member has a zero
4598 offset, use that. This is for targets such as m68k that
4599 return the same value in multiple places. */
4600 if (GET_CODE (valreg) == PARALLEL)
4601 {
4602 rtx elem = XVECEXP (valreg, 0, 0);
4603 rtx where = XEXP (elem, 0);
4604 rtx offset = XEXP (elem, 1);
4605 if (offset == const0_rtx
4606 && GET_MODE (where) == GET_MODE (valreg))
4607 valreg = where;
4608 }
4609 }
4610
4611 /* If register arguments require space on the stack and stack space
4612 was not preallocated, allocate stack space here for arguments
4613 passed in registers. */
4614 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
4615 && !ACCUMULATE_OUTGOING_ARGS
4616 && must_preallocate == 0 && reg_parm_stack_space > 0)
4617 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
4618
4619 /* Pass the function the address in which to return a
4620 structure value. */
4621 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
4622 {
4623 structure_value_addr
4624 = convert_memory_address (Pmode, structure_value_addr);
4625 emit_move_insn (struct_value,
4626 force_reg (Pmode,
4627 force_operand (structure_value_addr,
4628 NULL_RTX)));
4629
4630 if (REG_P (struct_value))
4631 use_reg (&call_fusage, struct_value);
4632 }
4633
4634 after_args = get_last_insn ();
4635 funexp = prepare_call_address (fndecl ? fndecl : fntype, funexp,
4636 static_chain_value, &call_fusage,
4637 reg_parm_seen, flags);
4638
4639 load_register_parameters (args, num_actuals, &call_fusage, flags,
4640 pass == 0, &sibcall_failure);
4641
4642 /* Save a pointer to the last insn before the call, so that we can
4643 later safely search backwards to find the CALL_INSN. */
4644 before_call = get_last_insn ();
4645
4646 /* Set up next argument register. For sibling calls on machines
4647 with register windows this should be the incoming register. */
4648 if (pass == 0)
4649 next_arg_reg = targetm.calls.function_incoming_arg
4650 (args_so_far, function_arg_info::end_marker ());
4651 else
4652 next_arg_reg = targetm.calls.function_arg
4653 (args_so_far, function_arg_info::end_marker ());
4654
4655 if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
4656 {
4657 int arg_nr = return_flags & ERF_RETURN_ARG_MASK;
4658 arg_nr = num_actuals - arg_nr - 1;
4659 if (arg_nr >= 0
4660 && arg_nr < num_actuals
4661 && args[arg_nr].reg
4662 && valreg
4663 && REG_P (valreg)
4664 && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg))
4665 call_fusage
4666 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)),
4667 gen_rtx_SET (valreg, args[arg_nr].reg),
4668 call_fusage);
4669 }
4670 /* All arguments and registers used for the call must be set up by
4671 now! */
4672
4673 /* Stack must be properly aligned now. */
4674 gcc_assert (!pass
4675 || multiple_p (stack_pointer_delta,
4676 preferred_unit_stack_boundary));
4677
4678 /* Generate the actual call instruction. */
4679 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
4680 adjusted_args_size.constant, struct_value_size,
4681 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
4682 flags, args_so_far);
4683
4684 if (flag_ipa_ra)
4685 {
4686 rtx_call_insn *last;
4687 rtx datum = NULL_RTX;
4688 if (fndecl != NULL_TREE)
4689 {
4690 datum = XEXP (DECL_RTL (fndecl), 0);
4691 gcc_assert (datum != NULL_RTX
4692 && GET_CODE (datum) == SYMBOL_REF);
4693 }
4694 last = last_call_insn ();
4695 add_reg_note (last, REG_CALL_DECL, datum);
4696 }
4697
4698 /* If the call setup or the call itself overlaps with anything
4699 of the argument setup we probably clobbered our call address.
4700 In that case we can't do sibcalls. */
4701 if (pass == 0
4702 && check_sibcall_argument_overlap (after_args, 0, 0))
4703 sibcall_failure = 1;
4704
4705 /* If a non-BLKmode value is returned at the most significant end
4706 of a register, shift the register right by the appropriate amount
4707 and update VALREG accordingly. BLKmode values are handled by the
4708 group load/store machinery below. */
4709 if (!structure_value_addr
4710 && !pcc_struct_value
4711 && TYPE_MODE (rettype) != VOIDmode
4712 && TYPE_MODE (rettype) != BLKmode
4713 && REG_P (valreg)
4714 && targetm.calls.return_in_msb (rettype))
4715 {
4716 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
4717 sibcall_failure = 1;
4718 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
4719 }
4720
4721 if (pass && (flags & ECF_MALLOC))
4722 {
4723 rtx temp = gen_reg_rtx (GET_MODE (valreg));
4724 rtx_insn *last, *insns;
4725
4726 /* The return value from a malloc-like function is a pointer. */
4727 if (TREE_CODE (rettype) == POINTER_TYPE)
4728 mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT);
4729
4730 emit_move_insn (temp, valreg);
4731
4732 /* The return value from a malloc-like function cannot alias
4733 anything else. */
4734 last = get_last_insn ();
4735 add_reg_note (last, REG_NOALIAS, temp);
4736
4737 /* Write out the sequence. */
4738 insns = get_insns ();
4739 end_sequence ();
4740 emit_insn (insns);
4741 valreg = temp;
4742 }
4743
4744 /* For calls to `setjmp', etc., inform
4745 function.c:setjmp_warnings that it should complain if
4746 nonvolatile values are live. For functions that cannot
4747 return, inform flow that control does not fall through. */
4748
4749 if ((flags & ECF_NORETURN) || pass == 0)
4750 {
4751 /* The barrier must be emitted
4752 immediately after the CALL_INSN. Some ports emit more
4753 than just a CALL_INSN above, so we must search for it here. */
4754
4755 rtx_insn *last = get_last_insn ();
4756 while (!CALL_P (last))
4757 {
4758 last = PREV_INSN (last);
4759 /* There was no CALL_INSN? */
4760 gcc_assert (last != before_call);
4761 }
4762
4763 emit_barrier_after (last);
4764
4765 /* Stack adjustments after a noreturn call are dead code.
4766 However when NO_DEFER_POP is in effect, we must preserve
4767 stack_pointer_delta. */
4768 if (inhibit_defer_pop == 0)
4769 {
4770 stack_pointer_delta = old_stack_allocated;
4771 pending_stack_adjust = 0;
4772 }
4773 }
4774
4775 /* If value type not void, return an rtx for the value. */
4776
4777 if (TYPE_MODE (rettype) == VOIDmode
4778 || ignore)
4779 target = const0_rtx;
4780 else if (structure_value_addr)
4781 {
4782 if (target == 0 || !MEM_P (target))
4783 {
4784 target
4785 = gen_rtx_MEM (TYPE_MODE (rettype),
4786 memory_address (TYPE_MODE (rettype),
4787 structure_value_addr));
4788 set_mem_attributes (target, rettype, 1);
4789 }
4790 }
4791 else if (pcc_struct_value)
4792 {
4793 /* This is the special C++ case where we need to
4794 know what the true target was. We take care to
4795 never use this value more than once in one expression. */
4796 target = gen_rtx_MEM (TYPE_MODE (rettype),
4797 copy_to_reg (valreg));
4798 set_mem_attributes (target, rettype, 1);
4799 }
4800 /* Handle calls that return values in multiple non-contiguous locations.
4801 The Irix 6 ABI has examples of this. */
4802 else if (GET_CODE (valreg) == PARALLEL)
4803 {
4804 if (target == 0)
4805 target = emit_group_move_into_temps (valreg);
4806 else if (rtx_equal_p (target, valreg))
4807 ;
4808 else if (GET_CODE (target) == PARALLEL)
4809 /* Handle the result of a emit_group_move_into_temps
4810 call in the previous pass. */
4811 emit_group_move (target, valreg);
4812 else
4813 emit_group_store (target, valreg, rettype,
4814 int_size_in_bytes (rettype));
4815 }
4816 else if (target
4817 && GET_MODE (target) == TYPE_MODE (rettype)
4818 && GET_MODE (target) == GET_MODE (valreg))
4819 {
4820 bool may_overlap = false;
4821
4822 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
4823 reg to a plain register. */
4824 if (!REG_P (target) || HARD_REGISTER_P (target))
4825 valreg = avoid_likely_spilled_reg (valreg);
4826
4827 /* If TARGET is a MEM in the argument area, and we have
4828 saved part of the argument area, then we can't store
4829 directly into TARGET as it may get overwritten when we
4830 restore the argument save area below. Don't work too
4831 hard though and simply force TARGET to a register if it
4832 is a MEM; the optimizer is quite likely to sort it out. */
4833 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
4834 for (i = 0; i < num_actuals; i++)
4835 if (args[i].save_area)
4836 {
4837 may_overlap = true;
4838 break;
4839 }
4840
4841 if (may_overlap)
4842 target = copy_to_reg (valreg);
4843 else
4844 {
4845 /* TARGET and VALREG cannot be equal at this point
4846 because the latter would not have
4847 REG_FUNCTION_VALUE_P true, while the former would if
4848 it were referring to the same register.
4849
4850 If they refer to the same register, this move will be
4851 a no-op, except when function inlining is being
4852 done. */
4853 emit_move_insn (target, valreg);
4854
4855 /* If we are setting a MEM, this code must be executed.
4856 Since it is emitted after the call insn, sibcall
4857 optimization cannot be performed in that case. */
4858 if (MEM_P (target))
4859 sibcall_failure = 1;
4860 }
4861 }
4862 else
4863 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
4864
4865 /* If we promoted this return value, make the proper SUBREG.
4866 TARGET might be const0_rtx here, so be careful. */
4867 if (REG_P (target)
4868 && TYPE_MODE (rettype) != BLKmode
4869 && GET_MODE (target) != TYPE_MODE (rettype))
4870 {
4871 tree type = rettype;
4872 int unsignedp = TYPE_UNSIGNED (type);
4873 machine_mode pmode;
4874
4875 /* Ensure we promote as expected, and get the new unsignedness. */
4876 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
4877 funtype, 1);
4878 gcc_assert (GET_MODE (target) == pmode);
4879
4880 poly_uint64 offset = subreg_lowpart_offset (TYPE_MODE (type),
4881 GET_MODE (target));
4882 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
4883 SUBREG_PROMOTED_VAR_P (target) = 1;
4884 SUBREG_PROMOTED_SET (target, unsignedp);
4885 }
4886
4887 /* If size of args is variable or this was a constructor call for a stack
4888 argument, restore saved stack-pointer value. */
4889
4890 if (old_stack_level)
4891 {
4892 rtx_insn *prev = get_last_insn ();
4893
4894 emit_stack_restore (SAVE_BLOCK, old_stack_level);
4895 stack_pointer_delta = old_stack_pointer_delta;
4896
4897 fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
4898
4899 pending_stack_adjust = old_pending_adj;
4900 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
4901 stack_arg_under_construction = old_stack_arg_under_construction;
4902 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4903 stack_usage_map = initial_stack_usage_map;
4904 stack_usage_watermark = initial_stack_usage_watermark;
4905 sibcall_failure = 1;
4906 }
4907 else if (ACCUMULATE_OUTGOING_ARGS && pass)
4908 {
4909 #ifdef REG_PARM_STACK_SPACE
4910 if (save_area)
4911 restore_fixed_argument_area (save_area, argblock,
4912 high_to_save, low_to_save);
4913 #endif
4914
4915 /* If we saved any argument areas, restore them. */
4916 for (i = 0; i < num_actuals; i++)
4917 if (args[i].save_area)
4918 {
4919 machine_mode save_mode = GET_MODE (args[i].save_area);
4920 rtx stack_area
4921 = gen_rtx_MEM (save_mode,
4922 memory_address (save_mode,
4923 XEXP (args[i].stack_slot, 0)));
4924
4925 if (save_mode != BLKmode)
4926 emit_move_insn (stack_area, args[i].save_area);
4927 else
4928 emit_block_move (stack_area, args[i].save_area,
4929 (gen_int_mode
4930 (args[i].locate.size.constant, Pmode)),
4931 BLOCK_OP_CALL_PARM);
4932 }
4933
4934 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4935 stack_usage_map = initial_stack_usage_map;
4936 stack_usage_watermark = initial_stack_usage_watermark;
4937 }
4938
4939 /* If this was alloca, record the new stack level. */
4940 if (flags & ECF_MAY_BE_ALLOCA)
4941 record_new_stack_level ();
4942
4943 /* Free up storage we no longer need. */
4944 for (i = 0; i < num_actuals; ++i)
4945 free (args[i].aligned_regs);
4946
4947 targetm.calls.end_call_args ();
4948
4949 insns = get_insns ();
4950 end_sequence ();
4951
4952 if (pass == 0)
4953 {
4954 tail_call_insns = insns;
4955
4956 /* Restore the pending stack adjustment now that we have
4957 finished generating the sibling call sequence. */
4958
4959 restore_pending_stack_adjust (&save);
4960
4961 /* Prepare arg structure for next iteration. */
4962 for (i = 0; i < num_actuals; i++)
4963 {
4964 args[i].value = 0;
4965 args[i].aligned_regs = 0;
4966 args[i].stack = 0;
4967 }
4968
4969 sbitmap_free (stored_args_map);
4970 internal_arg_pointer_exp_state.scan_start = NULL;
4971 internal_arg_pointer_exp_state.cache.release ();
4972 }
4973 else
4974 {
4975 normal_call_insns = insns;
4976
4977 /* Verify that we've deallocated all the stack we used. */
4978 gcc_assert ((flags & ECF_NORETURN)
4979 || known_eq (old_stack_allocated,
4980 stack_pointer_delta
4981 - pending_stack_adjust));
4982 }
4983
4984 /* If something prevents making this a sibling call,
4985 zero out the sequence. */
4986 if (sibcall_failure)
4987 tail_call_insns = NULL;
4988 else
4989 break;
4990 }
4991
4992 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
4993 arguments too, as argument area is now clobbered by the call. */
4994 if (tail_call_insns)
4995 {
4996 emit_insn (tail_call_insns);
4997 crtl->tail_call_emit = true;
4998 }
4999 else
5000 {
5001 emit_insn (normal_call_insns);
5002 if (try_tail_call)
5003 /* Ideally we'd emit a message for all of the ways that it could
5004 have failed. */
5005 maybe_complain_about_tail_call (exp, "tail call production failed");
5006 }
5007
5008 currently_expanding_call--;
5009
5010 free (stack_usage_map_buf);
5011 free (args);
5012 return target;
5013 }
5014
5015 /* A sibling call sequence invalidates any REG_EQUIV notes made for
5016 this function's incoming arguments.
5017
5018 At the start of RTL generation we know the only REG_EQUIV notes
5019 in the rtl chain are those for incoming arguments, so we can look
5020 for REG_EQUIV notes between the start of the function and the
5021 NOTE_INSN_FUNCTION_BEG.
5022
5023 This is (slight) overkill. We could keep track of the highest
5024 argument we clobber and be more selective in removing notes, but it
5025 does not seem to be worth the effort. */
5026
5027 void
5028 fixup_tail_calls (void)
5029 {
5030 rtx_insn *insn;
5031
5032 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5033 {
5034 rtx note;
5035
5036 /* There are never REG_EQUIV notes for the incoming arguments
5037 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
5038 if (NOTE_P (insn)
5039 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
5040 break;
5041
5042 note = find_reg_note (insn, REG_EQUIV, 0);
5043 if (note)
5044 remove_note (insn, note);
5045 note = find_reg_note (insn, REG_EQUIV, 0);
5046 gcc_assert (!note);
5047 }
5048 }
5049
5050 /* Traverse a list of TYPES and expand all complex types into their
5051 components. */
5052 static tree
5053 split_complex_types (tree types)
5054 {
5055 tree p;
5056
5057 /* Before allocating memory, check for the common case of no complex. */
5058 for (p = types; p; p = TREE_CHAIN (p))
5059 {
5060 tree type = TREE_VALUE (p);
5061 if (TREE_CODE (type) == COMPLEX_TYPE
5062 && targetm.calls.split_complex_arg (type))
5063 goto found;
5064 }
5065 return types;
5066
5067 found:
5068 types = copy_list (types);
5069
5070 for (p = types; p; p = TREE_CHAIN (p))
5071 {
5072 tree complex_type = TREE_VALUE (p);
5073
5074 if (TREE_CODE (complex_type) == COMPLEX_TYPE
5075 && targetm.calls.split_complex_arg (complex_type))
5076 {
5077 tree next, imag;
5078
5079 /* Rewrite complex type with component type. */
5080 TREE_VALUE (p) = TREE_TYPE (complex_type);
5081 next = TREE_CHAIN (p);
5082
5083 /* Add another component type for the imaginary part. */
5084 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
5085 TREE_CHAIN (p) = imag;
5086 TREE_CHAIN (imag) = next;
5087
5088 /* Skip the newly created node. */
5089 p = TREE_CHAIN (p);
5090 }
5091 }
5092
5093 return types;
5094 }
5095 \f
5096 /* Output a library call to function ORGFUN (a SYMBOL_REF rtx)
5097 for a value of mode OUTMODE,
5098 with NARGS different arguments, passed as ARGS.
5099 Store the return value if RETVAL is nonzero: store it in VALUE if
5100 VALUE is nonnull, otherwise pick a convenient location. In either
5101 case return the location of the stored value.
5102
5103 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
5104 `const' calls, LCT_PURE for `pure' calls, or another LCT_ value for
5105 other types of library calls. */
5106
5107 rtx
5108 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
5109 enum libcall_type fn_type,
5110 machine_mode outmode, int nargs, rtx_mode_t *args)
5111 {
5112 /* Total size in bytes of all the stack-parms scanned so far. */
5113 struct args_size args_size;
5114 /* Size of arguments before any adjustments (such as rounding). */
5115 struct args_size original_args_size;
5116 int argnum;
5117 rtx fun;
5118 /* Todo, choose the correct decl type of orgfun. Sadly this information
5119 isn't present here, so we default to native calling abi here. */
5120 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
5121 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
5122 int count;
5123 rtx argblock = 0;
5124 CUMULATIVE_ARGS args_so_far_v;
5125 cumulative_args_t args_so_far;
5126 struct arg
5127 {
5128 rtx value;
5129 machine_mode mode;
5130 rtx reg;
5131 int partial;
5132 struct locate_and_pad_arg_data locate;
5133 rtx save_area;
5134 };
5135 struct arg *argvec;
5136 int old_inhibit_defer_pop = inhibit_defer_pop;
5137 rtx call_fusage = 0;
5138 rtx mem_value = 0;
5139 rtx valreg;
5140 int pcc_struct_value = 0;
5141 poly_int64 struct_value_size = 0;
5142 int flags;
5143 int reg_parm_stack_space = 0;
5144 poly_int64 needed;
5145 rtx_insn *before_call;
5146 bool have_push_fusage;
5147 tree tfom; /* type_for_mode (outmode, 0) */
5148
5149 #ifdef REG_PARM_STACK_SPACE
5150 /* Define the boundary of the register parm stack space that needs to be
5151 save, if any. */
5152 int low_to_save = 0, high_to_save = 0;
5153 rtx save_area = 0; /* Place that it is saved. */
5154 #endif
5155
5156 /* Size of the stack reserved for parameter registers. */
5157 unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
5158 char *initial_stack_usage_map = stack_usage_map;
5159 unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
5160 char *stack_usage_map_buf = NULL;
5161
5162 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
5163
5164 #ifdef REG_PARM_STACK_SPACE
5165 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
5166 #endif
5167
5168 /* By default, library functions cannot throw. */
5169 flags = ECF_NOTHROW;
5170
5171 switch (fn_type)
5172 {
5173 case LCT_NORMAL:
5174 break;
5175 case LCT_CONST:
5176 flags |= ECF_CONST;
5177 break;
5178 case LCT_PURE:
5179 flags |= ECF_PURE;
5180 break;
5181 case LCT_NORETURN:
5182 flags |= ECF_NORETURN;
5183 break;
5184 case LCT_THROW:
5185 flags &= ~ECF_NOTHROW;
5186 break;
5187 case LCT_RETURNS_TWICE:
5188 flags = ECF_RETURNS_TWICE;
5189 break;
5190 }
5191 fun = orgfun;
5192
5193 /* Ensure current function's preferred stack boundary is at least
5194 what we need. */
5195 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
5196 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
5197
5198 /* If this kind of value comes back in memory,
5199 decide where in memory it should come back. */
5200 if (outmode != VOIDmode)
5201 {
5202 tfom = lang_hooks.types.type_for_mode (outmode, 0);
5203 if (aggregate_value_p (tfom, 0))
5204 {
5205 #ifdef PCC_STATIC_STRUCT_RETURN
5206 rtx pointer_reg
5207 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
5208 mem_value = gen_rtx_MEM (outmode, pointer_reg);
5209 pcc_struct_value = 1;
5210 if (value == 0)
5211 value = gen_reg_rtx (outmode);
5212 #else /* not PCC_STATIC_STRUCT_RETURN */
5213 struct_value_size = GET_MODE_SIZE (outmode);
5214 if (value != 0 && MEM_P (value))
5215 mem_value = value;
5216 else
5217 mem_value = assign_temp (tfom, 1, 1);
5218 #endif
5219 /* This call returns a big structure. */
5220 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
5221 }
5222 }
5223 else
5224 tfom = void_type_node;
5225
5226 /* ??? Unfinished: must pass the memory address as an argument. */
5227
5228 /* Copy all the libcall-arguments out of the varargs data
5229 and into a vector ARGVEC.
5230
5231 Compute how to pass each argument. We only support a very small subset
5232 of the full argument passing conventions to limit complexity here since
5233 library functions shouldn't have many args. */
5234
5235 argvec = XALLOCAVEC (struct arg, nargs + 1);
5236 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
5237
5238 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
5239 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
5240 #else
5241 INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
5242 #endif
5243 args_so_far = pack_cumulative_args (&args_so_far_v);
5244
5245 args_size.constant = 0;
5246 args_size.var = 0;
5247
5248 count = 0;
5249
5250 push_temp_slots ();
5251
5252 /* If there's a structure value address to be passed,
5253 either pass it in the special place, or pass it as an extra argument. */
5254 if (mem_value && struct_value == 0 && ! pcc_struct_value)
5255 {
5256 rtx addr = XEXP (mem_value, 0);
5257
5258 nargs++;
5259
5260 /* Make sure it is a reasonable operand for a move or push insn. */
5261 if (!REG_P (addr) && !MEM_P (addr)
5262 && !(CONSTANT_P (addr)
5263 && targetm.legitimate_constant_p (Pmode, addr)))
5264 addr = force_operand (addr, NULL_RTX);
5265
5266 argvec[count].value = addr;
5267 argvec[count].mode = Pmode;
5268 argvec[count].partial = 0;
5269
5270 function_arg_info ptr_arg (Pmode, /*named=*/true);
5271 argvec[count].reg = targetm.calls.function_arg (args_so_far, ptr_arg);
5272 gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, ptr_arg) == 0);
5273
5274 locate_and_pad_parm (Pmode, NULL_TREE,
5275 #ifdef STACK_PARMS_IN_REG_PARM_AREA
5276 1,
5277 #else
5278 argvec[count].reg != 0,
5279 #endif
5280 reg_parm_stack_space, 0,
5281 NULL_TREE, &args_size, &argvec[count].locate);
5282
5283 if (argvec[count].reg == 0 || argvec[count].partial != 0
5284 || reg_parm_stack_space > 0)
5285 args_size.constant += argvec[count].locate.size.constant;
5286
5287 targetm.calls.function_arg_advance (args_so_far, ptr_arg);
5288
5289 count++;
5290 }
5291
5292 for (unsigned int i = 0; count < nargs; i++, count++)
5293 {
5294 rtx val = args[i].first;
5295 function_arg_info arg (args[i].second, /*named=*/true);
5296 int unsigned_p = 0;
5297
5298 /* We cannot convert the arg value to the mode the library wants here;
5299 must do it earlier where we know the signedness of the arg. */
5300 gcc_assert (arg.mode != BLKmode
5301 && (GET_MODE (val) == arg.mode
5302 || GET_MODE (val) == VOIDmode));
5303
5304 /* Make sure it is a reasonable operand for a move or push insn. */
5305 if (!REG_P (val) && !MEM_P (val)
5306 && !(CONSTANT_P (val)
5307 && targetm.legitimate_constant_p (arg.mode, val)))
5308 val = force_operand (val, NULL_RTX);
5309
5310 if (pass_by_reference (&args_so_far_v, arg))
5311 {
5312 rtx slot;
5313 int must_copy = !reference_callee_copied (&args_so_far_v, arg);
5314
5315 /* If this was a CONST function, it is now PURE since it now
5316 reads memory. */
5317 if (flags & ECF_CONST)
5318 {
5319 flags &= ~ECF_CONST;
5320 flags |= ECF_PURE;
5321 }
5322
5323 if (MEM_P (val) && !must_copy)
5324 {
5325 tree val_expr = MEM_EXPR (val);
5326 if (val_expr)
5327 mark_addressable (val_expr);
5328 slot = val;
5329 }
5330 else
5331 {
5332 slot = assign_temp (lang_hooks.types.type_for_mode (arg.mode, 0),
5333 1, 1);
5334 emit_move_insn (slot, val);
5335 }
5336
5337 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
5338 gen_rtx_USE (VOIDmode, slot),
5339 call_fusage);
5340 if (must_copy)
5341 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
5342 gen_rtx_CLOBBER (VOIDmode,
5343 slot),
5344 call_fusage);
5345
5346 arg.mode = Pmode;
5347 arg.pass_by_reference = true;
5348 val = force_operand (XEXP (slot, 0), NULL_RTX);
5349 }
5350
5351 arg.mode = promote_function_mode (NULL_TREE, arg.mode, &unsigned_p,
5352 NULL_TREE, 0);
5353 argvec[count].mode = arg.mode;
5354 argvec[count].value = convert_modes (arg.mode, GET_MODE (val), val,
5355 unsigned_p);
5356 argvec[count].reg = targetm.calls.function_arg (args_so_far, arg);
5357
5358 argvec[count].partial
5359 = targetm.calls.arg_partial_bytes (args_so_far, arg);
5360
5361 if (argvec[count].reg == 0
5362 || argvec[count].partial != 0
5363 || reg_parm_stack_space > 0)
5364 {
5365 locate_and_pad_parm (arg.mode, NULL_TREE,
5366 #ifdef STACK_PARMS_IN_REG_PARM_AREA
5367 1,
5368 #else
5369 argvec[count].reg != 0,
5370 #endif
5371 reg_parm_stack_space, argvec[count].partial,
5372 NULL_TREE, &args_size, &argvec[count].locate);
5373 args_size.constant += argvec[count].locate.size.constant;
5374 gcc_assert (!argvec[count].locate.size.var);
5375 }
5376 #ifdef BLOCK_REG_PADDING
5377 else
5378 /* The argument is passed entirely in registers. See at which
5379 end it should be padded. */
5380 argvec[count].locate.where_pad =
5381 BLOCK_REG_PADDING (arg.mode, NULL_TREE,
5382 known_le (GET_MODE_SIZE (arg.mode),
5383 UNITS_PER_WORD));
5384 #endif
5385
5386 targetm.calls.function_arg_advance (args_so_far, arg);
5387 }
5388
5389 for (int i = 0; i < nargs; i++)
5390 if (reg_parm_stack_space > 0
5391 || argvec[i].reg == 0
5392 || argvec[i].partial != 0)
5393 update_stack_alignment_for_call (&argvec[i].locate);
5394
5395 /* If this machine requires an external definition for library
5396 functions, write one out. */
5397 assemble_external_libcall (fun);
5398
5399 original_args_size = args_size;
5400 args_size.constant = (aligned_upper_bound (args_size.constant
5401 + stack_pointer_delta,
5402 STACK_BYTES)
5403 - stack_pointer_delta);
5404
5405 args_size.constant = upper_bound (args_size.constant,
5406 reg_parm_stack_space);
5407
5408 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
5409 args_size.constant -= reg_parm_stack_space;
5410
5411 crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
5412 args_size.constant);
5413
5414 if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
5415 {
5416 poly_int64 pushed = args_size.constant + pending_stack_adjust;
5417 current_function_pushed_stack_size
5418 = upper_bound (current_function_pushed_stack_size, pushed);
5419 }
5420
5421 if (ACCUMULATE_OUTGOING_ARGS)
5422 {
5423 /* Since the stack pointer will never be pushed, it is possible for
5424 the evaluation of a parm to clobber something we have already
5425 written to the stack. Since most function calls on RISC machines
5426 do not use the stack, this is uncommon, but must work correctly.
5427
5428 Therefore, we save any area of the stack that was already written
5429 and that we are using. Here we set up to do this by making a new
5430 stack usage map from the old one.
5431
5432 Another approach might be to try to reorder the argument
5433 evaluations to avoid this conflicting stack usage. */
5434
5435 needed = args_size.constant;
5436
5437 /* Since we will be writing into the entire argument area, the
5438 map must be allocated for its entire size, not just the part that
5439 is the responsibility of the caller. */
5440 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
5441 needed += reg_parm_stack_space;
5442
5443 poly_int64 limit = needed;
5444 if (ARGS_GROW_DOWNWARD)
5445 limit += 1;
5446
5447 /* For polynomial sizes, this is the maximum possible size needed
5448 for arguments with a constant size and offset. */
5449 HOST_WIDE_INT const_limit = constant_lower_bound (limit);
5450 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
5451 const_limit);
5452
5453 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
5454 stack_usage_map = stack_usage_map_buf;
5455
5456 if (initial_highest_arg_in_use)
5457 memcpy (stack_usage_map, initial_stack_usage_map,
5458 initial_highest_arg_in_use);
5459
5460 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
5461 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
5462 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
5463 needed = 0;
5464
5465 /* We must be careful to use virtual regs before they're instantiated,
5466 and real regs afterwards. Loop optimization, for example, can create
5467 new libcalls after we've instantiated the virtual regs, and if we
5468 use virtuals anyway, they won't match the rtl patterns. */
5469
5470 if (virtuals_instantiated)
5471 argblock = plus_constant (Pmode, stack_pointer_rtx,
5472 STACK_POINTER_OFFSET);
5473 else
5474 argblock = virtual_outgoing_args_rtx;
5475 }
5476 else
5477 {
5478 if (!PUSH_ARGS)
5479 argblock = push_block (gen_int_mode (args_size.constant, Pmode), 0, 0);
5480 }
5481
5482 /* We push args individually in reverse order, perform stack alignment
5483 before the first push (the last arg). */
5484 if (argblock == 0)
5485 anti_adjust_stack (gen_int_mode (args_size.constant
5486 - original_args_size.constant,
5487 Pmode));
5488
5489 argnum = nargs - 1;
5490
5491 #ifdef REG_PARM_STACK_SPACE
5492 if (ACCUMULATE_OUTGOING_ARGS)
5493 {
5494 /* The argument list is the property of the called routine and it
5495 may clobber it. If the fixed area has been used for previous
5496 parameters, we must save and restore it. */
5497 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
5498 &low_to_save, &high_to_save);
5499 }
5500 #endif
5501
5502 /* When expanding a normal call, args are stored in push order,
5503 which is the reverse of what we have here. */
5504 bool any_regs = false;
5505 for (int i = nargs; i-- > 0; )
5506 if (argvec[i].reg != NULL_RTX)
5507 {
5508 targetm.calls.call_args (argvec[i].reg, NULL_TREE);
5509 any_regs = true;
5510 }
5511 if (!any_regs)
5512 targetm.calls.call_args (pc_rtx, NULL_TREE);
5513
5514 /* Push the args that need to be pushed. */
5515
5516 have_push_fusage = false;
5517
5518 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5519 are to be pushed. */
5520 for (count = 0; count < nargs; count++, argnum--)
5521 {
5522 machine_mode mode = argvec[argnum].mode;
5523 rtx val = argvec[argnum].value;
5524 rtx reg = argvec[argnum].reg;
5525 int partial = argvec[argnum].partial;
5526 unsigned int parm_align = argvec[argnum].locate.boundary;
5527 poly_int64 lower_bound = 0, upper_bound = 0;
5528
5529 if (! (reg != 0 && partial == 0))
5530 {
5531 rtx use;
5532
5533 if (ACCUMULATE_OUTGOING_ARGS)
5534 {
5535 /* If this is being stored into a pre-allocated, fixed-size,
5536 stack area, save any previous data at that location. */
5537
5538 if (ARGS_GROW_DOWNWARD)
5539 {
5540 /* stack_slot is negative, but we want to index stack_usage_map
5541 with positive values. */
5542 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
5543 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
5544 }
5545 else
5546 {
5547 lower_bound = argvec[argnum].locate.slot_offset.constant;
5548 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
5549 }
5550
5551 if (stack_region_maybe_used_p (lower_bound, upper_bound,
5552 reg_parm_stack_space))
5553 {
5554 /* We need to make a save area. */
5555 poly_uint64 size
5556 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
5557 machine_mode save_mode
5558 = int_mode_for_size (size, 1).else_blk ();
5559 rtx adr
5560 = plus_constant (Pmode, argblock,
5561 argvec[argnum].locate.offset.constant);
5562 rtx stack_area
5563 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
5564
5565 if (save_mode == BLKmode)
5566 {
5567 argvec[argnum].save_area
5568 = assign_stack_temp (BLKmode,
5569 argvec[argnum].locate.size.constant
5570 );
5571
5572 emit_block_move (validize_mem
5573 (copy_rtx (argvec[argnum].save_area)),
5574 stack_area,
5575 (gen_int_mode
5576 (argvec[argnum].locate.size.constant,
5577 Pmode)),
5578 BLOCK_OP_CALL_PARM);
5579 }
5580 else
5581 {
5582 argvec[argnum].save_area = gen_reg_rtx (save_mode);
5583
5584 emit_move_insn (argvec[argnum].save_area, stack_area);
5585 }
5586 }
5587 }
5588
5589 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
5590 partial, reg, 0, argblock,
5591 (gen_int_mode
5592 (argvec[argnum].locate.offset.constant, Pmode)),
5593 reg_parm_stack_space,
5594 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad), false);
5595
5596 /* Now mark the segment we just used. */
5597 if (ACCUMULATE_OUTGOING_ARGS)
5598 mark_stack_region_used (lower_bound, upper_bound);
5599
5600 NO_DEFER_POP;
5601
5602 /* Indicate argument access so that alias.c knows that these
5603 values are live. */
5604 if (argblock)
5605 use = plus_constant (Pmode, argblock,
5606 argvec[argnum].locate.offset.constant);
5607 else if (have_push_fusage)
5608 continue;
5609 else
5610 {
5611 /* When arguments are pushed, trying to tell alias.c where
5612 exactly this argument is won't work, because the
5613 auto-increment causes confusion. So we merely indicate
5614 that we access something with a known mode somewhere on
5615 the stack. */
5616 use = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
5617 gen_rtx_SCRATCH (Pmode));
5618 have_push_fusage = true;
5619 }
5620 use = gen_rtx_MEM (argvec[argnum].mode, use);
5621 use = gen_rtx_USE (VOIDmode, use);
5622 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
5623 }
5624 }
5625
5626 argnum = nargs - 1;
5627
5628 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
5629
5630 /* Now load any reg parms into their regs. */
5631
5632 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5633 are to be pushed. */
5634 for (count = 0; count < nargs; count++, argnum--)
5635 {
5636 machine_mode mode = argvec[argnum].mode;
5637 rtx val = argvec[argnum].value;
5638 rtx reg = argvec[argnum].reg;
5639 int partial = argvec[argnum].partial;
5640
5641 /* Handle calls that pass values in multiple non-contiguous
5642 locations. The PA64 has examples of this for library calls. */
5643 if (reg != 0 && GET_CODE (reg) == PARALLEL)
5644 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
5645 else if (reg != 0 && partial == 0)
5646 {
5647 emit_move_insn (reg, val);
5648 #ifdef BLOCK_REG_PADDING
5649 poly_int64 size = GET_MODE_SIZE (argvec[argnum].mode);
5650
5651 /* Copied from load_register_parameters. */
5652
5653 /* Handle case where we have a value that needs shifting
5654 up to the msb. eg. a QImode value and we're padding
5655 upward on a BYTES_BIG_ENDIAN machine. */
5656 if (known_lt (size, UNITS_PER_WORD)
5657 && (argvec[argnum].locate.where_pad
5658 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
5659 {
5660 rtx x;
5661 poly_int64 shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
5662
5663 /* Assigning REG here rather than a temp makes CALL_FUSAGE
5664 report the whole reg as used. Strictly speaking, the
5665 call only uses SIZE bytes at the msb end, but it doesn't
5666 seem worth generating rtl to say that. */
5667 reg = gen_rtx_REG (word_mode, REGNO (reg));
5668 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
5669 if (x != reg)
5670 emit_move_insn (reg, x);
5671 }
5672 #endif
5673 }
5674
5675 NO_DEFER_POP;
5676 }
5677
5678 /* Any regs containing parms remain in use through the call. */
5679 for (count = 0; count < nargs; count++)
5680 {
5681 rtx reg = argvec[count].reg;
5682 if (reg != 0 && GET_CODE (reg) == PARALLEL)
5683 use_group_regs (&call_fusage, reg);
5684 else if (reg != 0)
5685 {
5686 int partial = argvec[count].partial;
5687 if (partial)
5688 {
5689 int nregs;
5690 gcc_assert (partial % UNITS_PER_WORD == 0);
5691 nregs = partial / UNITS_PER_WORD;
5692 use_regs (&call_fusage, REGNO (reg), nregs);
5693 }
5694 else
5695 use_reg (&call_fusage, reg);
5696 }
5697 }
5698
5699 /* Pass the function the address in which to return a structure value. */
5700 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
5701 {
5702 emit_move_insn (struct_value,
5703 force_reg (Pmode,
5704 force_operand (XEXP (mem_value, 0),
5705 NULL_RTX)));
5706 if (REG_P (struct_value))
5707 use_reg (&call_fusage, struct_value);
5708 }
5709
5710 /* Don't allow popping to be deferred, since then
5711 cse'ing of library calls could delete a call and leave the pop. */
5712 NO_DEFER_POP;
5713 valreg = (mem_value == 0 && outmode != VOIDmode
5714 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
5715
5716 /* Stack must be properly aligned now. */
5717 gcc_assert (multiple_p (stack_pointer_delta,
5718 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
5719
5720 before_call = get_last_insn ();
5721
5722 if (flag_callgraph_info)
5723 record_final_call (SYMBOL_REF_DECL (orgfun), UNKNOWN_LOCATION);
5724
5725 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
5726 will set inhibit_defer_pop to that value. */
5727 /* The return type is needed to decide how many bytes the function pops.
5728 Signedness plays no role in that, so for simplicity, we pretend it's
5729 always signed. We also assume that the list of arguments passed has
5730 no impact, so we pretend it is unknown. */
5731
5732 emit_call_1 (fun, NULL,
5733 get_identifier (XSTR (orgfun, 0)),
5734 build_function_type (tfom, NULL_TREE),
5735 original_args_size.constant, args_size.constant,
5736 struct_value_size,
5737 targetm.calls.function_arg (args_so_far,
5738 function_arg_info::end_marker ()),
5739 valreg,
5740 old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
5741
5742 if (flag_ipa_ra)
5743 {
5744 rtx datum = orgfun;
5745 gcc_assert (GET_CODE (datum) == SYMBOL_REF);
5746 rtx_call_insn *last = last_call_insn ();
5747 add_reg_note (last, REG_CALL_DECL, datum);
5748 }
5749
5750 /* Right-shift returned value if necessary. */
5751 if (!pcc_struct_value
5752 && TYPE_MODE (tfom) != BLKmode
5753 && targetm.calls.return_in_msb (tfom))
5754 {
5755 shift_return_value (TYPE_MODE (tfom), false, valreg);
5756 valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
5757 }
5758
5759 targetm.calls.end_call_args ();
5760
5761 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
5762 that it should complain if nonvolatile values are live. For
5763 functions that cannot return, inform flow that control does not
5764 fall through. */
5765 if (flags & ECF_NORETURN)
5766 {
5767 /* The barrier note must be emitted
5768 immediately after the CALL_INSN. Some ports emit more than
5769 just a CALL_INSN above, so we must search for it here. */
5770 rtx_insn *last = get_last_insn ();
5771 while (!CALL_P (last))
5772 {
5773 last = PREV_INSN (last);
5774 /* There was no CALL_INSN? */
5775 gcc_assert (last != before_call);
5776 }
5777
5778 emit_barrier_after (last);
5779 }
5780
5781 /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW
5782 and LCT_RETURNS_TWICE, cannot perform non-local gotos. */
5783 if (flags & ECF_NOTHROW)
5784 {
5785 rtx_insn *last = get_last_insn ();
5786 while (!CALL_P (last))
5787 {
5788 last = PREV_INSN (last);
5789 /* There was no CALL_INSN? */
5790 gcc_assert (last != before_call);
5791 }
5792
5793 make_reg_eh_region_note_nothrow_nononlocal (last);
5794 }
5795
5796 /* Now restore inhibit_defer_pop to its actual original value. */
5797 OK_DEFER_POP;
5798
5799 pop_temp_slots ();
5800
5801 /* Copy the value to the right place. */
5802 if (outmode != VOIDmode && retval)
5803 {
5804 if (mem_value)
5805 {
5806 if (value == 0)
5807 value = mem_value;
5808 if (value != mem_value)
5809 emit_move_insn (value, mem_value);
5810 }
5811 else if (GET_CODE (valreg) == PARALLEL)
5812 {
5813 if (value == 0)
5814 value = gen_reg_rtx (outmode);
5815 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
5816 }
5817 else
5818 {
5819 /* Convert to the proper mode if a promotion has been active. */
5820 if (GET_MODE (valreg) != outmode)
5821 {
5822 int unsignedp = TYPE_UNSIGNED (tfom);
5823
5824 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
5825 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
5826 == GET_MODE (valreg));
5827 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
5828 }
5829
5830 if (value != 0)
5831 emit_move_insn (value, valreg);
5832 else
5833 value = valreg;
5834 }
5835 }
5836
5837 if (ACCUMULATE_OUTGOING_ARGS)
5838 {
5839 #ifdef REG_PARM_STACK_SPACE
5840 if (save_area)
5841 restore_fixed_argument_area (save_area, argblock,
5842 high_to_save, low_to_save);
5843 #endif
5844
5845 /* If we saved any argument areas, restore them. */
5846 for (count = 0; count < nargs; count++)
5847 if (argvec[count].save_area)
5848 {
5849 machine_mode save_mode = GET_MODE (argvec[count].save_area);
5850 rtx adr = plus_constant (Pmode, argblock,
5851 argvec[count].locate.offset.constant);
5852 rtx stack_area = gen_rtx_MEM (save_mode,
5853 memory_address (save_mode, adr));
5854
5855 if (save_mode == BLKmode)
5856 emit_block_move (stack_area,
5857 validize_mem
5858 (copy_rtx (argvec[count].save_area)),
5859 (gen_int_mode
5860 (argvec[count].locate.size.constant, Pmode)),
5861 BLOCK_OP_CALL_PARM);
5862 else
5863 emit_move_insn (stack_area, argvec[count].save_area);
5864 }
5865
5866 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
5867 stack_usage_map = initial_stack_usage_map;
5868 stack_usage_watermark = initial_stack_usage_watermark;
5869 }
5870
5871 free (stack_usage_map_buf);
5872
5873 return value;
5874
5875 }
5876 \f
5877
5878 /* Store a single argument for a function call
5879 into the register or memory area where it must be passed.
5880 *ARG describes the argument value and where to pass it.
5881
5882 ARGBLOCK is the address of the stack-block for all the arguments,
5883 or 0 on a machine where arguments are pushed individually.
5884
5885 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
5886 so must be careful about how the stack is used.
5887
5888 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
5889 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
5890 that we need not worry about saving and restoring the stack.
5891
5892 FNDECL is the declaration of the function we are calling.
5893
5894 Return nonzero if this arg should cause sibcall failure,
5895 zero otherwise. */
5896
5897 static int
5898 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
5899 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
5900 {
5901 tree pval = arg->tree_value;
5902 rtx reg = 0;
5903 int partial = 0;
5904 poly_int64 used = 0;
5905 poly_int64 lower_bound = 0, upper_bound = 0;
5906 int sibcall_failure = 0;
5907
5908 if (TREE_CODE (pval) == ERROR_MARK)
5909 return 1;
5910
5911 /* Push a new temporary level for any temporaries we make for
5912 this argument. */
5913 push_temp_slots ();
5914
5915 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
5916 {
5917 /* If this is being stored into a pre-allocated, fixed-size, stack area,
5918 save any previous data at that location. */
5919 if (argblock && ! variable_size && arg->stack)
5920 {
5921 if (ARGS_GROW_DOWNWARD)
5922 {
5923 /* stack_slot is negative, but we want to index stack_usage_map
5924 with positive values. */
5925 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
5926 {
5927 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5928 upper_bound = -rtx_to_poly_int64 (offset) + 1;
5929 }
5930 else
5931 upper_bound = 0;
5932
5933 lower_bound = upper_bound - arg->locate.size.constant;
5934 }
5935 else
5936 {
5937 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
5938 {
5939 rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5940 lower_bound = rtx_to_poly_int64 (offset);
5941 }
5942 else
5943 lower_bound = 0;
5944
5945 upper_bound = lower_bound + arg->locate.size.constant;
5946 }
5947
5948 if (stack_region_maybe_used_p (lower_bound, upper_bound,
5949 reg_parm_stack_space))
5950 {
5951 /* We need to make a save area. */
5952 poly_uint64 size = arg->locate.size.constant * BITS_PER_UNIT;
5953 machine_mode save_mode
5954 = int_mode_for_size (size, 1).else_blk ();
5955 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
5956 rtx stack_area = gen_rtx_MEM (save_mode, adr);
5957
5958 if (save_mode == BLKmode)
5959 {
5960 arg->save_area
5961 = assign_temp (TREE_TYPE (arg->tree_value), 1, 1);
5962 preserve_temp_slots (arg->save_area);
5963 emit_block_move (validize_mem (copy_rtx (arg->save_area)),
5964 stack_area,
5965 (gen_int_mode
5966 (arg->locate.size.constant, Pmode)),
5967 BLOCK_OP_CALL_PARM);
5968 }
5969 else
5970 {
5971 arg->save_area = gen_reg_rtx (save_mode);
5972 emit_move_insn (arg->save_area, stack_area);
5973 }
5974 }
5975 }
5976 }
5977
5978 /* If this isn't going to be placed on both the stack and in registers,
5979 set up the register and number of words. */
5980 if (! arg->pass_on_stack)
5981 {
5982 if (flags & ECF_SIBCALL)
5983 reg = arg->tail_call_reg;
5984 else
5985 reg = arg->reg;
5986 partial = arg->partial;
5987 }
5988
5989 /* Being passed entirely in a register. We shouldn't be called in
5990 this case. */
5991 gcc_assert (reg == 0 || partial != 0);
5992
5993 /* If this arg needs special alignment, don't load the registers
5994 here. */
5995 if (arg->n_aligned_regs != 0)
5996 reg = 0;
5997
5998 /* If this is being passed partially in a register, we can't evaluate
5999 it directly into its stack slot. Otherwise, we can. */
6000 if (arg->value == 0)
6001 {
6002 /* stack_arg_under_construction is nonzero if a function argument is
6003 being evaluated directly into the outgoing argument list and
6004 expand_call must take special action to preserve the argument list
6005 if it is called recursively.
6006
6007 For scalar function arguments stack_usage_map is sufficient to
6008 determine which stack slots must be saved and restored. Scalar
6009 arguments in general have pass_on_stack == 0.
6010
6011 If this argument is initialized by a function which takes the
6012 address of the argument (a C++ constructor or a C function
6013 returning a BLKmode structure), then stack_usage_map is
6014 insufficient and expand_call must push the stack around the
6015 function call. Such arguments have pass_on_stack == 1.
6016
6017 Note that it is always safe to set stack_arg_under_construction,
6018 but this generates suboptimal code if set when not needed. */
6019
6020 if (arg->pass_on_stack)
6021 stack_arg_under_construction++;
6022
6023 arg->value = expand_expr (pval,
6024 (partial
6025 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
6026 ? NULL_RTX : arg->stack,
6027 VOIDmode, EXPAND_STACK_PARM);
6028
6029 /* If we are promoting object (or for any other reason) the mode
6030 doesn't agree, convert the mode. */
6031
6032 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
6033 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
6034 arg->value, arg->unsignedp);
6035
6036 if (arg->pass_on_stack)
6037 stack_arg_under_construction--;
6038 }
6039
6040 /* Check for overlap with already clobbered argument area. */
6041 if ((flags & ECF_SIBCALL)
6042 && MEM_P (arg->value)
6043 && mem_might_overlap_already_clobbered_arg_p (XEXP (arg->value, 0),
6044 arg->locate.size.constant))
6045 sibcall_failure = 1;
6046
6047 /* Don't allow anything left on stack from computation
6048 of argument to alloca. */
6049 if (flags & ECF_MAY_BE_ALLOCA)
6050 do_pending_stack_adjust ();
6051
6052 if (arg->value == arg->stack)
6053 /* If the value is already in the stack slot, we are done. */
6054 ;
6055 else if (arg->mode != BLKmode)
6056 {
6057 unsigned int parm_align;
6058
6059 /* Argument is a scalar, not entirely passed in registers.
6060 (If part is passed in registers, arg->partial says how much
6061 and emit_push_insn will take care of putting it there.)
6062
6063 Push it, and if its size is less than the
6064 amount of space allocated to it,
6065 also bump stack pointer by the additional space.
6066 Note that in C the default argument promotions
6067 will prevent such mismatches. */
6068
6069 poly_int64 size = (TYPE_EMPTY_P (TREE_TYPE (pval))
6070 ? 0 : GET_MODE_SIZE (arg->mode));
6071
6072 /* Compute how much space the push instruction will push.
6073 On many machines, pushing a byte will advance the stack
6074 pointer by a halfword. */
6075 #ifdef PUSH_ROUNDING
6076 size = PUSH_ROUNDING (size);
6077 #endif
6078 used = size;
6079
6080 /* Compute how much space the argument should get:
6081 round up to a multiple of the alignment for arguments. */
6082 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
6083 != PAD_NONE)
6084 /* At the moment we don't (need to) support ABIs for which the
6085 padding isn't known at compile time. In principle it should
6086 be easy to add though. */
6087 used = force_align_up (size, PARM_BOUNDARY / BITS_PER_UNIT);
6088
6089 /* Compute the alignment of the pushed argument. */
6090 parm_align = arg->locate.boundary;
6091 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
6092 == PAD_DOWNWARD)
6093 {
6094 poly_int64 pad = used - size;
6095 unsigned int pad_align = known_alignment (pad) * BITS_PER_UNIT;
6096 if (pad_align != 0)
6097 parm_align = MIN (parm_align, pad_align);
6098 }
6099
6100 /* This isn't already where we want it on the stack, so put it there.
6101 This can either be done with push or copy insns. */
6102 if (maybe_ne (used, 0)
6103 && !emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval),
6104 NULL_RTX, parm_align, partial, reg, used - size,
6105 argblock, ARGS_SIZE_RTX (arg->locate.offset),
6106 reg_parm_stack_space,
6107 ARGS_SIZE_RTX (arg->locate.alignment_pad), true))
6108 sibcall_failure = 1;
6109
6110 /* Unless this is a partially-in-register argument, the argument is now
6111 in the stack. */
6112 if (partial == 0)
6113 arg->value = arg->stack;
6114 }
6115 else
6116 {
6117 /* BLKmode, at least partly to be pushed. */
6118
6119 unsigned int parm_align;
6120 poly_int64 excess;
6121 rtx size_rtx;
6122
6123 /* Pushing a nonscalar.
6124 If part is passed in registers, PARTIAL says how much
6125 and emit_push_insn will take care of putting it there. */
6126
6127 /* Round its size up to a multiple
6128 of the allocation unit for arguments. */
6129
6130 if (arg->locate.size.var != 0)
6131 {
6132 excess = 0;
6133 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
6134 }
6135 else
6136 {
6137 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
6138 for BLKmode is careful to avoid it. */
6139 excess = (arg->locate.size.constant
6140 - arg_int_size_in_bytes (TREE_TYPE (pval))
6141 + partial);
6142 size_rtx = expand_expr (arg_size_in_bytes (TREE_TYPE (pval)),
6143 NULL_RTX, TYPE_MODE (sizetype),
6144 EXPAND_NORMAL);
6145 }
6146
6147 parm_align = arg->locate.boundary;
6148
6149 /* When an argument is padded down, the block is aligned to
6150 PARM_BOUNDARY, but the actual argument isn't. */
6151 if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
6152 == PAD_DOWNWARD)
6153 {
6154 if (arg->locate.size.var)
6155 parm_align = BITS_PER_UNIT;
6156 else
6157 {
6158 unsigned int excess_align
6159 = known_alignment (excess) * BITS_PER_UNIT;
6160 if (excess_align != 0)
6161 parm_align = MIN (parm_align, excess_align);
6162 }
6163 }
6164
6165 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
6166 {
6167 /* emit_push_insn might not work properly if arg->value and
6168 argblock + arg->locate.offset areas overlap. */
6169 rtx x = arg->value;
6170 poly_int64 i = 0;
6171
6172 if (strip_offset (XEXP (x, 0), &i)
6173 == crtl->args.internal_arg_pointer)
6174 {
6175 /* arg.locate doesn't contain the pretend_args_size offset,
6176 it's part of argblock. Ensure we don't count it in I. */
6177 if (STACK_GROWS_DOWNWARD)
6178 i -= crtl->args.pretend_args_size;
6179 else
6180 i += crtl->args.pretend_args_size;
6181
6182 /* expand_call should ensure this. */
6183 gcc_assert (!arg->locate.offset.var
6184 && arg->locate.size.var == 0);
6185 poly_int64 size_val = rtx_to_poly_int64 (size_rtx);
6186
6187 if (known_eq (arg->locate.offset.constant, i))
6188 {
6189 /* Even though they appear to be at the same location,
6190 if part of the outgoing argument is in registers,
6191 they aren't really at the same location. Check for
6192 this by making sure that the incoming size is the
6193 same as the outgoing size. */
6194 if (maybe_ne (arg->locate.size.constant, size_val))
6195 sibcall_failure = 1;
6196 }
6197 else if (maybe_in_range_p (arg->locate.offset.constant,
6198 i, size_val))
6199 sibcall_failure = 1;
6200 /* Use arg->locate.size.constant instead of size_rtx
6201 because we only care about the part of the argument
6202 on the stack. */
6203 else if (maybe_in_range_p (i, arg->locate.offset.constant,
6204 arg->locate.size.constant))
6205 sibcall_failure = 1;
6206 }
6207 }
6208
6209 if (!CONST_INT_P (size_rtx) || INTVAL (size_rtx) != 0)
6210 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
6211 parm_align, partial, reg, excess, argblock,
6212 ARGS_SIZE_RTX (arg->locate.offset),
6213 reg_parm_stack_space,
6214 ARGS_SIZE_RTX (arg->locate.alignment_pad), false);
6215
6216 /* Unless this is a partially-in-register argument, the argument is now
6217 in the stack.
6218
6219 ??? Unlike the case above, in which we want the actual
6220 address of the data, so that we can load it directly into a
6221 register, here we want the address of the stack slot, so that
6222 it's properly aligned for word-by-word copying or something
6223 like that. It's not clear that this is always correct. */
6224 if (partial == 0)
6225 arg->value = arg->stack_slot;
6226 }
6227
6228 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
6229 {
6230 tree type = TREE_TYPE (arg->tree_value);
6231 arg->parallel_value
6232 = emit_group_load_into_temps (arg->reg, arg->value, type,
6233 int_size_in_bytes (type));
6234 }
6235
6236 /* Mark all slots this store used. */
6237 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
6238 && argblock && ! variable_size && arg->stack)
6239 mark_stack_region_used (lower_bound, upper_bound);
6240
6241 /* Once we have pushed something, pops can't safely
6242 be deferred during the rest of the arguments. */
6243 NO_DEFER_POP;
6244
6245 /* Free any temporary slots made in processing this argument. */
6246 pop_temp_slots ();
6247
6248 return sibcall_failure;
6249 }
6250
6251 /* Nonzero if we do not know how to pass ARG solely in registers. */
6252
6253 bool
6254 must_pass_in_stack_var_size (const function_arg_info &arg)
6255 {
6256 if (!arg.type)
6257 return false;
6258
6259 /* If the type has variable size... */
6260 if (!poly_int_tree_p (TYPE_SIZE (arg.type)))
6261 return true;
6262
6263 /* If the type is marked as addressable (it is required
6264 to be constructed into the stack)... */
6265 if (TREE_ADDRESSABLE (arg.type))
6266 return true;
6267
6268 return false;
6269 }
6270
6271 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
6272 takes trailing padding of a structure into account. */
6273 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
6274
6275 bool
6276 must_pass_in_stack_var_size_or_pad (const function_arg_info &arg)
6277 {
6278 if (!arg.type)
6279 return false;
6280
6281 /* If the type has variable size... */
6282 if (TREE_CODE (TYPE_SIZE (arg.type)) != INTEGER_CST)
6283 return true;
6284
6285 /* If the type is marked as addressable (it is required
6286 to be constructed into the stack)... */
6287 if (TREE_ADDRESSABLE (arg.type))
6288 return true;
6289
6290 if (TYPE_EMPTY_P (arg.type))
6291 return false;
6292
6293 /* If the padding and mode of the type is such that a copy into
6294 a register would put it into the wrong part of the register. */
6295 if (arg.mode == BLKmode
6296 && int_size_in_bytes (arg.type) % (PARM_BOUNDARY / BITS_PER_UNIT)
6297 && (targetm.calls.function_arg_padding (arg.mode, arg.type)
6298 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
6299 return true;
6300
6301 return false;
6302 }
6303
6304 /* Return true if TYPE must be passed on the stack when passed to
6305 the "..." arguments of a function. */
6306
6307 bool
6308 must_pass_va_arg_in_stack (tree type)
6309 {
6310 function_arg_info arg (type, /*named=*/false);
6311 return targetm.calls.must_pass_in_stack (arg);
6312 }
6313
6314 /* Return true if FIELD is the C++17 empty base field that should
6315 be ignored for ABI calling convention decisions in order to
6316 maintain ABI compatibility between C++14 and earlier, which doesn't
6317 add this FIELD to classes with empty bases, and C++17 and later
6318 which does. */
6319
6320 bool
6321 cxx17_empty_base_field_p (const_tree field)
6322 {
6323 return (DECL_FIELD_ABI_IGNORED (field)
6324 && DECL_ARTIFICIAL (field)
6325 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (field))
6326 && !lookup_attribute ("no_unique_address", DECL_ATTRIBUTES (field)));
6327 }
6328
6329 /* Tell the garbage collector about GTY markers in this source file. */
6330 #include "gt-calls.h"