Move MEMMODEL_* from coretypes.h to memmodel.h
[gcc.git] / gcc / calls.c
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "predict.h"
29 #include "memmodel.h"
30 #include "tm_p.h"
31 #include "stringpool.h"
32 #include "expmed.h"
33 #include "optabs.h"
34 #include "emit-rtl.h"
35 #include "cgraph.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "varasm.h"
40 #include "internal-fn.h"
41 #include "dojump.h"
42 #include "explow.h"
43 #include "calls.h"
44 #include "expr.h"
45 #include "output.h"
46 #include "langhooks.h"
47 #include "except.h"
48 #include "dbgcnt.h"
49 #include "rtl-iter.h"
50 #include "tree-chkp.h"
51 #include "rtl-chkp.h"
52
53
54 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
55 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
56
57 /* Data structure and subroutines used within expand_call. */
58
59 struct arg_data
60 {
61 /* Tree node for this argument. */
62 tree tree_value;
63 /* Mode for value; TYPE_MODE unless promoted. */
64 machine_mode mode;
65 /* Current RTL value for argument, or 0 if it isn't precomputed. */
66 rtx value;
67 /* Initially-compute RTL value for argument; only for const functions. */
68 rtx initial_value;
69 /* Register to pass this argument in, 0 if passed on stack, or an
70 PARALLEL if the arg is to be copied into multiple non-contiguous
71 registers. */
72 rtx reg;
73 /* Register to pass this argument in when generating tail call sequence.
74 This is not the same register as for normal calls on machines with
75 register windows. */
76 rtx tail_call_reg;
77 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
78 form for emit_group_move. */
79 rtx parallel_value;
80 /* If value is passed in neither reg nor stack, this field holds a number
81 of a special slot to be used. */
82 rtx special_slot;
83 /* For pointer bounds hold an index of parm bounds are bound to. -1 if
84 there is no such pointer. */
85 int pointer_arg;
86 /* If pointer_arg refers a structure, then pointer_offset holds an offset
87 of a pointer in this structure. */
88 int pointer_offset;
89 /* If REG was promoted from the actual mode of the argument expression,
90 indicates whether the promotion is sign- or zero-extended. */
91 int unsignedp;
92 /* Number of bytes to put in registers. 0 means put the whole arg
93 in registers. Also 0 if not passed in registers. */
94 int partial;
95 /* Nonzero if argument must be passed on stack.
96 Note that some arguments may be passed on the stack
97 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
98 pass_on_stack identifies arguments that *cannot* go in registers. */
99 int pass_on_stack;
100 /* Some fields packaged up for locate_and_pad_parm. */
101 struct locate_and_pad_arg_data locate;
102 /* Location on the stack at which parameter should be stored. The store
103 has already been done if STACK == VALUE. */
104 rtx stack;
105 /* Location on the stack of the start of this argument slot. This can
106 differ from STACK if this arg pads downward. This location is known
107 to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
108 rtx stack_slot;
109 /* Place that this stack area has been saved, if needed. */
110 rtx save_area;
111 /* If an argument's alignment does not permit direct copying into registers,
112 copy in smaller-sized pieces into pseudos. These are stored in a
113 block pointed to by this field. The next field says how many
114 word-sized pseudos we made. */
115 rtx *aligned_regs;
116 int n_aligned_regs;
117 };
118
119 /* A vector of one char per byte of stack space. A byte if nonzero if
120 the corresponding stack location has been used.
121 This vector is used to prevent a function call within an argument from
122 clobbering any stack already set up. */
123 static char *stack_usage_map;
124
125 /* Size of STACK_USAGE_MAP. */
126 static int highest_outgoing_arg_in_use;
127
128 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
129 stack location's tail call argument has been already stored into the stack.
130 This bitmap is used to prevent sibling call optimization if function tries
131 to use parent's incoming argument slots when they have been already
132 overwritten with tail call arguments. */
133 static sbitmap stored_args_map;
134
135 /* stack_arg_under_construction is nonzero when an argument may be
136 initialized with a constructor call (including a C function that
137 returns a BLKmode struct) and expand_call must take special action
138 to make sure the object being constructed does not overlap the
139 argument list for the constructor call. */
140 static int stack_arg_under_construction;
141
142 static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
143 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
144 cumulative_args_t);
145 static void precompute_register_parameters (int, struct arg_data *, int *);
146 static void store_bounds (struct arg_data *, struct arg_data *);
147 static int store_one_arg (struct arg_data *, rtx, int, int, int);
148 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
149 static int finalize_must_preallocate (int, int, struct arg_data *,
150 struct args_size *);
151 static void precompute_arguments (int, struct arg_data *);
152 static int compute_argument_block_size (int, struct args_size *, tree, tree, int);
153 static void initialize_argument_information (int, struct arg_data *,
154 struct args_size *, int,
155 tree, tree,
156 tree, tree, cumulative_args_t, int,
157 rtx *, int *, int *, int *,
158 bool *, bool);
159 static void compute_argument_addresses (struct arg_data *, rtx, int);
160 static rtx rtx_for_function_call (tree, tree);
161 static void load_register_parameters (struct arg_data *, int, rtx *, int,
162 int, int *);
163 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
164 machine_mode, int, va_list);
165 static int special_function_p (const_tree, int);
166 static int check_sibcall_argument_overlap_1 (rtx);
167 static int check_sibcall_argument_overlap (rtx_insn *, struct arg_data *, int);
168
169 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
170 unsigned int);
171 static tree split_complex_types (tree);
172
173 #ifdef REG_PARM_STACK_SPACE
174 static rtx save_fixed_argument_area (int, rtx, int *, int *);
175 static void restore_fixed_argument_area (rtx, rtx, int, int);
176 #endif
177 \f
178 /* Force FUNEXP into a form suitable for the address of a CALL,
179 and return that as an rtx. Also load the static chain register
180 if FNDECL is a nested function.
181
182 CALL_FUSAGE points to a variable holding the prospective
183 CALL_INSN_FUNCTION_USAGE information. */
184
185 rtx
186 prepare_call_address (tree fndecl_or_type, rtx funexp, rtx static_chain_value,
187 rtx *call_fusage, int reg_parm_seen, int sibcallp)
188 {
189 /* Make a valid memory address and copy constants through pseudo-regs,
190 but not for a constant address if -fno-function-cse. */
191 if (GET_CODE (funexp) != SYMBOL_REF)
192 /* If we are using registers for parameters, force the
193 function address into a register now. */
194 funexp = ((reg_parm_seen
195 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
196 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
197 : memory_address (FUNCTION_MODE, funexp));
198 else
199 {
200 /* funexp could be a SYMBOL_REF represents a function pointer which is
201 of ptr_mode. In this case, it should be converted into address mode
202 to be a valid address for memory rtx pattern. See PR 64971. */
203 if (GET_MODE (funexp) != Pmode)
204 funexp = convert_memory_address (Pmode, funexp);
205
206 if (! sibcallp)
207 {
208 if (!NO_FUNCTION_CSE && optimize && ! flag_no_function_cse)
209 funexp = force_reg (Pmode, funexp);
210 }
211 }
212
213 if (static_chain_value != 0
214 && (TREE_CODE (fndecl_or_type) != FUNCTION_DECL
215 || DECL_STATIC_CHAIN (fndecl_or_type)))
216 {
217 rtx chain;
218
219 chain = targetm.calls.static_chain (fndecl_or_type, false);
220 static_chain_value = convert_memory_address (Pmode, static_chain_value);
221
222 emit_move_insn (chain, static_chain_value);
223 if (REG_P (chain))
224 use_reg (call_fusage, chain);
225 }
226
227 return funexp;
228 }
229
230 /* Generate instructions to call function FUNEXP,
231 and optionally pop the results.
232 The CALL_INSN is the first insn generated.
233
234 FNDECL is the declaration node of the function. This is given to the
235 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
236 its own args.
237
238 FUNTYPE is the data type of the function. This is given to the hook
239 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
240 own args. We used to allow an identifier for library functions, but
241 that doesn't work when the return type is an aggregate type and the
242 calling convention says that the pointer to this aggregate is to be
243 popped by the callee.
244
245 STACK_SIZE is the number of bytes of arguments on the stack,
246 ROUNDED_STACK_SIZE is that number rounded up to
247 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
248 both to put into the call insn and to generate explicit popping
249 code if necessary.
250
251 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
252 It is zero if this call doesn't want a structure value.
253
254 NEXT_ARG_REG is the rtx that results from executing
255 targetm.calls.function_arg (&args_so_far, VOIDmode, void_type_node, true)
256 just after all the args have had their registers assigned.
257 This could be whatever you like, but normally it is the first
258 arg-register beyond those used for args in this call,
259 or 0 if all the arg-registers are used in this call.
260 It is passed on to `gen_call' so you can put this info in the call insn.
261
262 VALREG is a hard register in which a value is returned,
263 or 0 if the call does not return a value.
264
265 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
266 the args to this call were processed.
267 We restore `inhibit_defer_pop' to that value.
268
269 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
270 denote registers used by the called function. */
271
272 static void
273 emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
274 tree funtype ATTRIBUTE_UNUSED,
275 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
276 HOST_WIDE_INT rounded_stack_size,
277 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
278 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
279 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
280 cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
281 {
282 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
283 rtx call, funmem, pat;
284 int already_popped = 0;
285 HOST_WIDE_INT n_popped = 0;
286
287 /* Sibling call patterns never pop arguments (no sibcall(_value)_pop
288 patterns exist). Any popping that the callee does on return will
289 be from our caller's frame rather than ours. */
290 if (!(ecf_flags & ECF_SIBCALL))
291 {
292 n_popped += targetm.calls.return_pops_args (fndecl, funtype, stack_size);
293
294 #ifdef CALL_POPS_ARGS
295 n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
296 #endif
297 }
298
299 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
300 and we don't want to load it into a register as an optimization,
301 because prepare_call_address already did it if it should be done. */
302 if (GET_CODE (funexp) != SYMBOL_REF)
303 funexp = memory_address (FUNCTION_MODE, funexp);
304
305 funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
306 if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
307 {
308 tree t = fndecl;
309
310 /* Although a built-in FUNCTION_DECL and its non-__builtin
311 counterpart compare equal and get a shared mem_attrs, they
312 produce different dump output in compare-debug compilations,
313 if an entry gets garbage collected in one compilation, then
314 adds a different (but equivalent) entry, while the other
315 doesn't run the garbage collector at the same spot and then
316 shares the mem_attr with the equivalent entry. */
317 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
318 {
319 tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
320 if (t2)
321 t = t2;
322 }
323
324 set_mem_expr (funmem, t);
325 }
326 else if (fntree)
327 set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
328
329 if (ecf_flags & ECF_SIBCALL)
330 {
331 if (valreg)
332 pat = targetm.gen_sibcall_value (valreg, funmem,
333 rounded_stack_size_rtx,
334 next_arg_reg, NULL_RTX);
335 else
336 pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx,
337 next_arg_reg, GEN_INT (struct_value_size));
338 }
339 /* If the target has "call" or "call_value" insns, then prefer them
340 if no arguments are actually popped. If the target does not have
341 "call" or "call_value" insns, then we must use the popping versions
342 even if the call has no arguments to pop. */
343 else if (n_popped > 0
344 || !(valreg
345 ? targetm.have_call_value ()
346 : targetm.have_call ()))
347 {
348 rtx n_pop = GEN_INT (n_popped);
349
350 /* If this subroutine pops its own args, record that in the call insn
351 if possible, for the sake of frame pointer elimination. */
352
353 if (valreg)
354 pat = targetm.gen_call_value_pop (valreg, funmem,
355 rounded_stack_size_rtx,
356 next_arg_reg, n_pop);
357 else
358 pat = targetm.gen_call_pop (funmem, rounded_stack_size_rtx,
359 next_arg_reg, n_pop);
360
361 already_popped = 1;
362 }
363 else
364 {
365 if (valreg)
366 pat = targetm.gen_call_value (valreg, funmem, rounded_stack_size_rtx,
367 next_arg_reg, NULL_RTX);
368 else
369 pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg,
370 GEN_INT (struct_value_size));
371 }
372 emit_insn (pat);
373
374 /* Find the call we just emitted. */
375 rtx_call_insn *call_insn = last_call_insn ();
376
377 /* Some target create a fresh MEM instead of reusing the one provided
378 above. Set its MEM_EXPR. */
379 call = get_call_rtx_from (call_insn);
380 if (call
381 && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
382 && MEM_EXPR (funmem) != NULL_TREE)
383 set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
384
385 /* Mark instrumented calls. */
386 if (call && fntree)
387 CALL_EXPR_WITH_BOUNDS_P (call) = CALL_WITH_BOUNDS_P (fntree);
388
389 /* Put the register usage information there. */
390 add_function_usage_to (call_insn, call_fusage);
391
392 /* If this is a const call, then set the insn's unchanging bit. */
393 if (ecf_flags & ECF_CONST)
394 RTL_CONST_CALL_P (call_insn) = 1;
395
396 /* If this is a pure call, then set the insn's unchanging bit. */
397 if (ecf_flags & ECF_PURE)
398 RTL_PURE_CALL_P (call_insn) = 1;
399
400 /* If this is a const call, then set the insn's unchanging bit. */
401 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
402 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
403
404 /* Create a nothrow REG_EH_REGION note, if needed. */
405 make_reg_eh_region_note (call_insn, ecf_flags, 0);
406
407 if (ecf_flags & ECF_NORETURN)
408 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
409
410 if (ecf_flags & ECF_RETURNS_TWICE)
411 {
412 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
413 cfun->calls_setjmp = 1;
414 }
415
416 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
417
418 /* Restore this now, so that we do defer pops for this call's args
419 if the context of the call as a whole permits. */
420 inhibit_defer_pop = old_inhibit_defer_pop;
421
422 if (n_popped > 0)
423 {
424 if (!already_popped)
425 CALL_INSN_FUNCTION_USAGE (call_insn)
426 = gen_rtx_EXPR_LIST (VOIDmode,
427 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
428 CALL_INSN_FUNCTION_USAGE (call_insn));
429 rounded_stack_size -= n_popped;
430 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
431 stack_pointer_delta -= n_popped;
432
433 add_reg_note (call_insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
434
435 /* If popup is needed, stack realign must use DRAP */
436 if (SUPPORTS_STACK_ALIGNMENT)
437 crtl->need_drap = true;
438 }
439 /* For noreturn calls when not accumulating outgoing args force
440 REG_ARGS_SIZE note to prevent crossjumping of calls with different
441 args sizes. */
442 else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
443 add_reg_note (call_insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
444
445 if (!ACCUMULATE_OUTGOING_ARGS)
446 {
447 /* If returning from the subroutine does not automatically pop the args,
448 we need an instruction to pop them sooner or later.
449 Perhaps do it now; perhaps just record how much space to pop later.
450
451 If returning from the subroutine does pop the args, indicate that the
452 stack pointer will be changed. */
453
454 if (rounded_stack_size != 0)
455 {
456 if (ecf_flags & ECF_NORETURN)
457 /* Just pretend we did the pop. */
458 stack_pointer_delta -= rounded_stack_size;
459 else if (flag_defer_pop && inhibit_defer_pop == 0
460 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
461 pending_stack_adjust += rounded_stack_size;
462 else
463 adjust_stack (rounded_stack_size_rtx);
464 }
465 }
466 /* When we accumulate outgoing args, we must avoid any stack manipulations.
467 Restore the stack pointer to its original value now. Usually
468 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
469 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
470 popping variants of functions exist as well.
471
472 ??? We may optimize similar to defer_pop above, but it is
473 probably not worthwhile.
474
475 ??? It will be worthwhile to enable combine_stack_adjustments even for
476 such machines. */
477 else if (n_popped)
478 anti_adjust_stack (GEN_INT (n_popped));
479 }
480
481 /* Determine if the function identified by FNDECL is one with
482 special properties we wish to know about. Modify FLAGS accordingly.
483
484 For example, if the function might return more than one time (setjmp), then
485 set ECF_RETURNS_TWICE.
486
487 Set ECF_MAY_BE_ALLOCA for any memory allocation function that might allocate
488 space from the stack such as alloca. */
489
490 static int
491 special_function_p (const_tree fndecl, int flags)
492 {
493 tree name_decl = DECL_NAME (fndecl);
494
495 /* For instrumentation clones we want to derive flags
496 from the original name. */
497 if (cgraph_node::get (fndecl)
498 && cgraph_node::get (fndecl)->instrumentation_clone)
499 name_decl = DECL_NAME (cgraph_node::get (fndecl)->orig_decl);
500
501 if (fndecl && name_decl
502 && IDENTIFIER_LENGTH (name_decl) <= 11
503 /* Exclude functions not at the file scope, or not `extern',
504 since they are not the magic functions we would otherwise
505 think they are.
506 FIXME: this should be handled with attributes, not with this
507 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
508 because you can declare fork() inside a function if you
509 wish. */
510 && (DECL_CONTEXT (fndecl) == NULL_TREE
511 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
512 && TREE_PUBLIC (fndecl))
513 {
514 const char *name = IDENTIFIER_POINTER (name_decl);
515 const char *tname = name;
516
517 /* We assume that alloca will always be called by name. It
518 makes no sense to pass it as a pointer-to-function to
519 anything that does not understand its behavior. */
520 if (IDENTIFIER_LENGTH (name_decl) == 6
521 && name[0] == 'a'
522 && ! strcmp (name, "alloca"))
523 flags |= ECF_MAY_BE_ALLOCA;
524
525 /* Disregard prefix _ or __. */
526 if (name[0] == '_')
527 {
528 if (name[1] == '_')
529 tname += 2;
530 else
531 tname += 1;
532 }
533
534 /* ECF_RETURNS_TWICE is safe even for -ffreestanding. */
535 if (! strcmp (tname, "setjmp")
536 || ! strcmp (tname, "sigsetjmp")
537 || ! strcmp (name, "savectx")
538 || ! strcmp (name, "vfork")
539 || ! strcmp (name, "getcontext"))
540 flags |= ECF_RETURNS_TWICE;
541 }
542
543 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
544 switch (DECL_FUNCTION_CODE (fndecl))
545 {
546 case BUILT_IN_ALLOCA:
547 case BUILT_IN_ALLOCA_WITH_ALIGN:
548 flags |= ECF_MAY_BE_ALLOCA;
549 break;
550 default:
551 break;
552 }
553
554 return flags;
555 }
556
557 /* Similar to special_function_p; return a set of ERF_ flags for the
558 function FNDECL. */
559 static int
560 decl_return_flags (tree fndecl)
561 {
562 tree attr;
563 tree type = TREE_TYPE (fndecl);
564 if (!type)
565 return 0;
566
567 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
568 if (!attr)
569 return 0;
570
571 attr = TREE_VALUE (TREE_VALUE (attr));
572 if (!attr || TREE_STRING_LENGTH (attr) < 1)
573 return 0;
574
575 switch (TREE_STRING_POINTER (attr)[0])
576 {
577 case '1':
578 case '2':
579 case '3':
580 case '4':
581 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
582
583 case 'm':
584 return ERF_NOALIAS;
585
586 case '.':
587 default:
588 return 0;
589 }
590 }
591
592 /* Return nonzero when FNDECL represents a call to setjmp. */
593
594 int
595 setjmp_call_p (const_tree fndecl)
596 {
597 if (DECL_IS_RETURNS_TWICE (fndecl))
598 return ECF_RETURNS_TWICE;
599 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
600 }
601
602
603 /* Return true if STMT may be an alloca call. */
604
605 bool
606 gimple_maybe_alloca_call_p (const gimple *stmt)
607 {
608 tree fndecl;
609
610 if (!is_gimple_call (stmt))
611 return false;
612
613 fndecl = gimple_call_fndecl (stmt);
614 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
615 return true;
616
617 return false;
618 }
619
620 /* Return true if STMT is a builtin alloca call. */
621
622 bool
623 gimple_alloca_call_p (const gimple *stmt)
624 {
625 tree fndecl;
626
627 if (!is_gimple_call (stmt))
628 return false;
629
630 fndecl = gimple_call_fndecl (stmt);
631 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
632 switch (DECL_FUNCTION_CODE (fndecl))
633 {
634 case BUILT_IN_ALLOCA:
635 case BUILT_IN_ALLOCA_WITH_ALIGN:
636 return true;
637 default:
638 break;
639 }
640
641 return false;
642 }
643
644 /* Return true when exp contains a builtin alloca call. */
645
646 bool
647 alloca_call_p (const_tree exp)
648 {
649 tree fndecl;
650 if (TREE_CODE (exp) == CALL_EXPR
651 && (fndecl = get_callee_fndecl (exp))
652 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
653 switch (DECL_FUNCTION_CODE (fndecl))
654 {
655 case BUILT_IN_ALLOCA:
656 case BUILT_IN_ALLOCA_WITH_ALIGN:
657 return true;
658 default:
659 break;
660 }
661
662 return false;
663 }
664
665 /* Return TRUE if FNDECL is either a TM builtin or a TM cloned
666 function. Return FALSE otherwise. */
667
668 static bool
669 is_tm_builtin (const_tree fndecl)
670 {
671 if (fndecl == NULL)
672 return false;
673
674 if (decl_is_tm_clone (fndecl))
675 return true;
676
677 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
678 {
679 switch (DECL_FUNCTION_CODE (fndecl))
680 {
681 case BUILT_IN_TM_COMMIT:
682 case BUILT_IN_TM_COMMIT_EH:
683 case BUILT_IN_TM_ABORT:
684 case BUILT_IN_TM_IRREVOCABLE:
685 case BUILT_IN_TM_GETTMCLONE_IRR:
686 case BUILT_IN_TM_MEMCPY:
687 case BUILT_IN_TM_MEMMOVE:
688 case BUILT_IN_TM_MEMSET:
689 CASE_BUILT_IN_TM_STORE (1):
690 CASE_BUILT_IN_TM_STORE (2):
691 CASE_BUILT_IN_TM_STORE (4):
692 CASE_BUILT_IN_TM_STORE (8):
693 CASE_BUILT_IN_TM_STORE (FLOAT):
694 CASE_BUILT_IN_TM_STORE (DOUBLE):
695 CASE_BUILT_IN_TM_STORE (LDOUBLE):
696 CASE_BUILT_IN_TM_STORE (M64):
697 CASE_BUILT_IN_TM_STORE (M128):
698 CASE_BUILT_IN_TM_STORE (M256):
699 CASE_BUILT_IN_TM_LOAD (1):
700 CASE_BUILT_IN_TM_LOAD (2):
701 CASE_BUILT_IN_TM_LOAD (4):
702 CASE_BUILT_IN_TM_LOAD (8):
703 CASE_BUILT_IN_TM_LOAD (FLOAT):
704 CASE_BUILT_IN_TM_LOAD (DOUBLE):
705 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
706 CASE_BUILT_IN_TM_LOAD (M64):
707 CASE_BUILT_IN_TM_LOAD (M128):
708 CASE_BUILT_IN_TM_LOAD (M256):
709 case BUILT_IN_TM_LOG:
710 case BUILT_IN_TM_LOG_1:
711 case BUILT_IN_TM_LOG_2:
712 case BUILT_IN_TM_LOG_4:
713 case BUILT_IN_TM_LOG_8:
714 case BUILT_IN_TM_LOG_FLOAT:
715 case BUILT_IN_TM_LOG_DOUBLE:
716 case BUILT_IN_TM_LOG_LDOUBLE:
717 case BUILT_IN_TM_LOG_M64:
718 case BUILT_IN_TM_LOG_M128:
719 case BUILT_IN_TM_LOG_M256:
720 return true;
721 default:
722 break;
723 }
724 }
725 return false;
726 }
727
728 /* Detect flags (function attributes) from the function decl or type node. */
729
730 int
731 flags_from_decl_or_type (const_tree exp)
732 {
733 int flags = 0;
734
735 if (DECL_P (exp))
736 {
737 /* The function exp may have the `malloc' attribute. */
738 if (DECL_IS_MALLOC (exp))
739 flags |= ECF_MALLOC;
740
741 /* The function exp may have the `returns_twice' attribute. */
742 if (DECL_IS_RETURNS_TWICE (exp))
743 flags |= ECF_RETURNS_TWICE;
744
745 /* Process the pure and const attributes. */
746 if (TREE_READONLY (exp))
747 flags |= ECF_CONST;
748 if (DECL_PURE_P (exp))
749 flags |= ECF_PURE;
750 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
751 flags |= ECF_LOOPING_CONST_OR_PURE;
752
753 if (DECL_IS_NOVOPS (exp))
754 flags |= ECF_NOVOPS;
755 if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
756 flags |= ECF_LEAF;
757
758 if (TREE_NOTHROW (exp))
759 flags |= ECF_NOTHROW;
760
761 if (flag_tm)
762 {
763 if (is_tm_builtin (exp))
764 flags |= ECF_TM_BUILTIN;
765 else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
766 || lookup_attribute ("transaction_pure",
767 TYPE_ATTRIBUTES (TREE_TYPE (exp))))
768 flags |= ECF_TM_PURE;
769 }
770
771 flags = special_function_p (exp, flags);
772 }
773 else if (TYPE_P (exp))
774 {
775 if (TYPE_READONLY (exp))
776 flags |= ECF_CONST;
777
778 if (flag_tm
779 && ((flags & ECF_CONST) != 0
780 || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
781 flags |= ECF_TM_PURE;
782 }
783 else
784 gcc_unreachable ();
785
786 if (TREE_THIS_VOLATILE (exp))
787 {
788 flags |= ECF_NORETURN;
789 if (flags & (ECF_CONST|ECF_PURE))
790 flags |= ECF_LOOPING_CONST_OR_PURE;
791 }
792
793 return flags;
794 }
795
796 /* Detect flags from a CALL_EXPR. */
797
798 int
799 call_expr_flags (const_tree t)
800 {
801 int flags;
802 tree decl = get_callee_fndecl (t);
803
804 if (decl)
805 flags = flags_from_decl_or_type (decl);
806 else if (CALL_EXPR_FN (t) == NULL_TREE)
807 flags = internal_fn_flags (CALL_EXPR_IFN (t));
808 else
809 {
810 t = TREE_TYPE (CALL_EXPR_FN (t));
811 if (t && TREE_CODE (t) == POINTER_TYPE)
812 flags = flags_from_decl_or_type (TREE_TYPE (t));
813 else
814 flags = 0;
815 }
816
817 return flags;
818 }
819
820 /* Return true if TYPE should be passed by invisible reference. */
821
822 bool
823 pass_by_reference (CUMULATIVE_ARGS *ca, machine_mode mode,
824 tree type, bool named_arg)
825 {
826 if (type)
827 {
828 /* If this type contains non-trivial constructors, then it is
829 forbidden for the middle-end to create any new copies. */
830 if (TREE_ADDRESSABLE (type))
831 return true;
832
833 /* GCC post 3.4 passes *all* variable sized types by reference. */
834 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
835 return true;
836
837 /* If a record type should be passed the same as its first (and only)
838 member, use the type and mode of that member. */
839 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
840 {
841 type = TREE_TYPE (first_field (type));
842 mode = TYPE_MODE (type);
843 }
844 }
845
846 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
847 type, named_arg);
848 }
849
850 /* Return true if TYPE, which is passed by reference, should be callee
851 copied instead of caller copied. */
852
853 bool
854 reference_callee_copied (CUMULATIVE_ARGS *ca, machine_mode mode,
855 tree type, bool named_arg)
856 {
857 if (type && TREE_ADDRESSABLE (type))
858 return false;
859 return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
860 named_arg);
861 }
862
863
864 /* Precompute all register parameters as described by ARGS, storing values
865 into fields within the ARGS array.
866
867 NUM_ACTUALS indicates the total number elements in the ARGS array.
868
869 Set REG_PARM_SEEN if we encounter a register parameter. */
870
871 static void
872 precompute_register_parameters (int num_actuals, struct arg_data *args,
873 int *reg_parm_seen)
874 {
875 int i;
876
877 *reg_parm_seen = 0;
878
879 for (i = 0; i < num_actuals; i++)
880 if (args[i].reg != 0 && ! args[i].pass_on_stack)
881 {
882 *reg_parm_seen = 1;
883
884 if (args[i].value == 0)
885 {
886 push_temp_slots ();
887 args[i].value = expand_normal (args[i].tree_value);
888 preserve_temp_slots (args[i].value);
889 pop_temp_slots ();
890 }
891
892 /* If we are to promote the function arg to a wider mode,
893 do it now. */
894
895 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
896 args[i].value
897 = convert_modes (args[i].mode,
898 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
899 args[i].value, args[i].unsignedp);
900
901 /* If the value is a non-legitimate constant, force it into a
902 pseudo now. TLS symbols sometimes need a call to resolve. */
903 if (CONSTANT_P (args[i].value)
904 && !targetm.legitimate_constant_p (args[i].mode, args[i].value))
905 args[i].value = force_reg (args[i].mode, args[i].value);
906
907 /* If we're going to have to load the value by parts, pull the
908 parts into pseudos. The part extraction process can involve
909 non-trivial computation. */
910 if (GET_CODE (args[i].reg) == PARALLEL)
911 {
912 tree type = TREE_TYPE (args[i].tree_value);
913 args[i].parallel_value
914 = emit_group_load_into_temps (args[i].reg, args[i].value,
915 type, int_size_in_bytes (type));
916 }
917
918 /* If the value is expensive, and we are inside an appropriately
919 short loop, put the value into a pseudo and then put the pseudo
920 into the hard reg.
921
922 For small register classes, also do this if this call uses
923 register parameters. This is to avoid reload conflicts while
924 loading the parameters registers. */
925
926 else if ((! (REG_P (args[i].value)
927 || (GET_CODE (args[i].value) == SUBREG
928 && REG_P (SUBREG_REG (args[i].value)))))
929 && args[i].mode != BLKmode
930 && (set_src_cost (args[i].value, args[i].mode,
931 optimize_insn_for_speed_p ())
932 > COSTS_N_INSNS (1))
933 && ((*reg_parm_seen
934 && targetm.small_register_classes_for_mode_p (args[i].mode))
935 || optimize))
936 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
937 }
938 }
939
940 #ifdef REG_PARM_STACK_SPACE
941
942 /* The argument list is the property of the called routine and it
943 may clobber it. If the fixed area has been used for previous
944 parameters, we must save and restore it. */
945
946 static rtx
947 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
948 {
949 int low;
950 int high;
951
952 /* Compute the boundary of the area that needs to be saved, if any. */
953 high = reg_parm_stack_space;
954 if (ARGS_GROW_DOWNWARD)
955 high += 1;
956
957 if (high > highest_outgoing_arg_in_use)
958 high = highest_outgoing_arg_in_use;
959
960 for (low = 0; low < high; low++)
961 if (stack_usage_map[low] != 0)
962 {
963 int num_to_save;
964 machine_mode save_mode;
965 int delta;
966 rtx addr;
967 rtx stack_area;
968 rtx save_area;
969
970 while (stack_usage_map[--high] == 0)
971 ;
972
973 *low_to_save = low;
974 *high_to_save = high;
975
976 num_to_save = high - low + 1;
977 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
978
979 /* If we don't have the required alignment, must do this
980 in BLKmode. */
981 if ((low & (MIN (GET_MODE_SIZE (save_mode),
982 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
983 save_mode = BLKmode;
984
985 if (ARGS_GROW_DOWNWARD)
986 delta = -high;
987 else
988 delta = low;
989
990 addr = plus_constant (Pmode, argblock, delta);
991 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
992
993 set_mem_align (stack_area, PARM_BOUNDARY);
994 if (save_mode == BLKmode)
995 {
996 save_area = assign_stack_temp (BLKmode, num_to_save);
997 emit_block_move (validize_mem (save_area), stack_area,
998 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
999 }
1000 else
1001 {
1002 save_area = gen_reg_rtx (save_mode);
1003 emit_move_insn (save_area, stack_area);
1004 }
1005
1006 return save_area;
1007 }
1008
1009 return NULL_RTX;
1010 }
1011
1012 static void
1013 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
1014 {
1015 machine_mode save_mode = GET_MODE (save_area);
1016 int delta;
1017 rtx addr, stack_area;
1018
1019 if (ARGS_GROW_DOWNWARD)
1020 delta = -high_to_save;
1021 else
1022 delta = low_to_save;
1023
1024 addr = plus_constant (Pmode, argblock, delta);
1025 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
1026 set_mem_align (stack_area, PARM_BOUNDARY);
1027
1028 if (save_mode != BLKmode)
1029 emit_move_insn (stack_area, save_area);
1030 else
1031 emit_block_move (stack_area, validize_mem (save_area),
1032 GEN_INT (high_to_save - low_to_save + 1),
1033 BLOCK_OP_CALL_PARM);
1034 }
1035 #endif /* REG_PARM_STACK_SPACE */
1036
1037 /* If any elements in ARGS refer to parameters that are to be passed in
1038 registers, but not in memory, and whose alignment does not permit a
1039 direct copy into registers. Copy the values into a group of pseudos
1040 which we will later copy into the appropriate hard registers.
1041
1042 Pseudos for each unaligned argument will be stored into the array
1043 args[argnum].aligned_regs. The caller is responsible for deallocating
1044 the aligned_regs array if it is nonzero. */
1045
1046 static void
1047 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
1048 {
1049 int i, j;
1050
1051 for (i = 0; i < num_actuals; i++)
1052 if (args[i].reg != 0 && ! args[i].pass_on_stack
1053 && GET_CODE (args[i].reg) != PARALLEL
1054 && args[i].mode == BLKmode
1055 && MEM_P (args[i].value)
1056 && (MEM_ALIGN (args[i].value)
1057 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1058 {
1059 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1060 int endian_correction = 0;
1061
1062 if (args[i].partial)
1063 {
1064 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
1065 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
1066 }
1067 else
1068 {
1069 args[i].n_aligned_regs
1070 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1071 }
1072
1073 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
1074
1075 /* Structures smaller than a word are normally aligned to the
1076 least significant byte. On a BYTES_BIG_ENDIAN machine,
1077 this means we must skip the empty high order bytes when
1078 calculating the bit offset. */
1079 if (bytes < UNITS_PER_WORD
1080 #ifdef BLOCK_REG_PADDING
1081 && (BLOCK_REG_PADDING (args[i].mode,
1082 TREE_TYPE (args[i].tree_value), 1)
1083 == downward)
1084 #else
1085 && BYTES_BIG_ENDIAN
1086 #endif
1087 )
1088 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
1089
1090 for (j = 0; j < args[i].n_aligned_regs; j++)
1091 {
1092 rtx reg = gen_reg_rtx (word_mode);
1093 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1094 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1095
1096 args[i].aligned_regs[j] = reg;
1097 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1098 word_mode, word_mode, false);
1099
1100 /* There is no need to restrict this code to loading items
1101 in TYPE_ALIGN sized hunks. The bitfield instructions can
1102 load up entire word sized registers efficiently.
1103
1104 ??? This may not be needed anymore.
1105 We use to emit a clobber here but that doesn't let later
1106 passes optimize the instructions we emit. By storing 0 into
1107 the register later passes know the first AND to zero out the
1108 bitfield being set in the register is unnecessary. The store
1109 of 0 will be deleted as will at least the first AND. */
1110
1111 emit_move_insn (reg, const0_rtx);
1112
1113 bytes -= bitsize / BITS_PER_UNIT;
1114 store_bit_field (reg, bitsize, endian_correction, 0, 0,
1115 word_mode, word, false);
1116 }
1117 }
1118 }
1119
1120 /* Issue an error if CALL_EXPR was flagged as requiring
1121 tall-call optimization. */
1122
1123 static void
1124 maybe_complain_about_tail_call (tree call_expr, const char *reason)
1125 {
1126 gcc_assert (TREE_CODE (call_expr) == CALL_EXPR);
1127 if (!CALL_EXPR_MUST_TAIL_CALL (call_expr))
1128 return;
1129
1130 error_at (EXPR_LOCATION (call_expr), "cannot tail-call: %s", reason);
1131 }
1132
1133 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
1134 CALL_EXPR EXP.
1135
1136 NUM_ACTUALS is the total number of parameters.
1137
1138 N_NAMED_ARGS is the total number of named arguments.
1139
1140 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
1141 value, or null.
1142
1143 FNDECL is the tree code for the target of this call (if known)
1144
1145 ARGS_SO_FAR holds state needed by the target to know where to place
1146 the next argument.
1147
1148 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1149 for arguments which are passed in registers.
1150
1151 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1152 and may be modified by this routine.
1153
1154 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1155 flags which may be modified by this routine.
1156
1157 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
1158 that requires allocation of stack space.
1159
1160 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
1161 the thunked-to function. */
1162
1163 static void
1164 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
1165 struct arg_data *args,
1166 struct args_size *args_size,
1167 int n_named_args ATTRIBUTE_UNUSED,
1168 tree exp, tree struct_value_addr_value,
1169 tree fndecl, tree fntype,
1170 cumulative_args_t args_so_far,
1171 int reg_parm_stack_space,
1172 rtx *old_stack_level, int *old_pending_adj,
1173 int *must_preallocate, int *ecf_flags,
1174 bool *may_tailcall, bool call_from_thunk_p)
1175 {
1176 CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
1177 location_t loc = EXPR_LOCATION (exp);
1178
1179 /* Count arg position in order args appear. */
1180 int argpos;
1181
1182 int i;
1183
1184 args_size->constant = 0;
1185 args_size->var = 0;
1186
1187 bitmap_obstack_initialize (NULL);
1188
1189 /* In this loop, we consider args in the order they are written.
1190 We fill up ARGS from the back. */
1191
1192 i = num_actuals - 1;
1193 {
1194 int j = i, ptr_arg = -1;
1195 call_expr_arg_iterator iter;
1196 tree arg;
1197 bitmap slots = NULL;
1198
1199 if (struct_value_addr_value)
1200 {
1201 args[j].tree_value = struct_value_addr_value;
1202 j--;
1203
1204 /* If we pass structure address then we need to
1205 create bounds for it. Since created bounds is
1206 a call statement, we expand it right here to avoid
1207 fixing all other places where it may be expanded. */
1208 if (CALL_WITH_BOUNDS_P (exp))
1209 {
1210 args[j].value = gen_reg_rtx (targetm.chkp_bound_mode ());
1211 args[j].tree_value
1212 = chkp_make_bounds_for_struct_addr (struct_value_addr_value);
1213 expand_expr_real (args[j].tree_value, args[j].value, VOIDmode,
1214 EXPAND_NORMAL, 0, false);
1215 args[j].pointer_arg = j + 1;
1216 j--;
1217 }
1218 }
1219 argpos = 0;
1220 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
1221 {
1222 tree argtype = TREE_TYPE (arg);
1223
1224 /* Remember last param with pointer and associate it
1225 with following pointer bounds. */
1226 if (CALL_WITH_BOUNDS_P (exp)
1227 && chkp_type_has_pointer (argtype))
1228 {
1229 if (slots)
1230 BITMAP_FREE (slots);
1231 ptr_arg = j;
1232 if (!BOUNDED_TYPE_P (argtype))
1233 {
1234 slots = BITMAP_ALLOC (NULL);
1235 chkp_find_bound_slots (argtype, slots);
1236 }
1237 }
1238 else if (CALL_WITH_BOUNDS_P (exp)
1239 && pass_by_reference (NULL, TYPE_MODE (argtype), argtype,
1240 argpos < n_named_args))
1241 {
1242 if (slots)
1243 BITMAP_FREE (slots);
1244 ptr_arg = j;
1245 }
1246 else if (POINTER_BOUNDS_TYPE_P (argtype))
1247 {
1248 /* We expect bounds in instrumented calls only.
1249 Otherwise it is a sign we lost flag due to some optimization
1250 and may emit call args incorrectly. */
1251 gcc_assert (CALL_WITH_BOUNDS_P (exp));
1252
1253 /* For structures look for the next available pointer. */
1254 if (ptr_arg != -1 && slots)
1255 {
1256 unsigned bnd_no = bitmap_first_set_bit (slots);
1257 args[j].pointer_offset =
1258 bnd_no * POINTER_SIZE / BITS_PER_UNIT;
1259
1260 bitmap_clear_bit (slots, bnd_no);
1261
1262 /* Check we have no more pointers in the structure. */
1263 if (bitmap_empty_p (slots))
1264 BITMAP_FREE (slots);
1265 }
1266 args[j].pointer_arg = ptr_arg;
1267
1268 /* Check we covered all pointers in the previous
1269 non bounds arg. */
1270 if (!slots)
1271 ptr_arg = -1;
1272 }
1273 else
1274 ptr_arg = -1;
1275
1276 if (targetm.calls.split_complex_arg
1277 && argtype
1278 && TREE_CODE (argtype) == COMPLEX_TYPE
1279 && targetm.calls.split_complex_arg (argtype))
1280 {
1281 tree subtype = TREE_TYPE (argtype);
1282 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
1283 j--;
1284 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
1285 }
1286 else
1287 args[j].tree_value = arg;
1288 j--;
1289 argpos++;
1290 }
1291
1292 if (slots)
1293 BITMAP_FREE (slots);
1294 }
1295
1296 bitmap_obstack_release (NULL);
1297
1298 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1299 for (argpos = 0; argpos < num_actuals; i--, argpos++)
1300 {
1301 tree type = TREE_TYPE (args[i].tree_value);
1302 int unsignedp;
1303 machine_mode mode;
1304
1305 /* Replace erroneous argument with constant zero. */
1306 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1307 args[i].tree_value = integer_zero_node, type = integer_type_node;
1308
1309 /* If TYPE is a transparent union or record, pass things the way
1310 we would pass the first field of the union or record. We have
1311 already verified that the modes are the same. */
1312 if ((TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == RECORD_TYPE)
1313 && TYPE_TRANSPARENT_AGGR (type))
1314 type = TREE_TYPE (first_field (type));
1315
1316 /* Decide where to pass this arg.
1317
1318 args[i].reg is nonzero if all or part is passed in registers.
1319
1320 args[i].partial is nonzero if part but not all is passed in registers,
1321 and the exact value says how many bytes are passed in registers.
1322
1323 args[i].pass_on_stack is nonzero if the argument must at least be
1324 computed on the stack. It may then be loaded back into registers
1325 if args[i].reg is nonzero.
1326
1327 These decisions are driven by the FUNCTION_... macros and must agree
1328 with those made by function.c. */
1329
1330 /* See if this argument should be passed by invisible reference. */
1331 if (pass_by_reference (args_so_far_pnt, TYPE_MODE (type),
1332 type, argpos < n_named_args))
1333 {
1334 bool callee_copies;
1335 tree base = NULL_TREE;
1336
1337 callee_copies
1338 = reference_callee_copied (args_so_far_pnt, TYPE_MODE (type),
1339 type, argpos < n_named_args);
1340
1341 /* If we're compiling a thunk, pass through invisible references
1342 instead of making a copy. */
1343 if (call_from_thunk_p
1344 || (callee_copies
1345 && !TREE_ADDRESSABLE (type)
1346 && (base = get_base_address (args[i].tree_value))
1347 && TREE_CODE (base) != SSA_NAME
1348 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
1349 {
1350 /* We may have turned the parameter value into an SSA name.
1351 Go back to the original parameter so we can take the
1352 address. */
1353 if (TREE_CODE (args[i].tree_value) == SSA_NAME)
1354 {
1355 gcc_assert (SSA_NAME_IS_DEFAULT_DEF (args[i].tree_value));
1356 args[i].tree_value = SSA_NAME_VAR (args[i].tree_value);
1357 gcc_assert (TREE_CODE (args[i].tree_value) == PARM_DECL);
1358 }
1359 /* Argument setup code may have copied the value to register. We
1360 revert that optimization now because the tail call code must
1361 use the original location. */
1362 if (TREE_CODE (args[i].tree_value) == PARM_DECL
1363 && !MEM_P (DECL_RTL (args[i].tree_value))
1364 && DECL_INCOMING_RTL (args[i].tree_value)
1365 && MEM_P (DECL_INCOMING_RTL (args[i].tree_value)))
1366 set_decl_rtl (args[i].tree_value,
1367 DECL_INCOMING_RTL (args[i].tree_value));
1368
1369 mark_addressable (args[i].tree_value);
1370
1371 /* We can't use sibcalls if a callee-copied argument is
1372 stored in the current function's frame. */
1373 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1374 {
1375 *may_tailcall = false;
1376 maybe_complain_about_tail_call (exp,
1377 "a callee-copied argument is"
1378 " stored in the current "
1379 " function's frame");
1380 }
1381
1382 args[i].tree_value = build_fold_addr_expr_loc (loc,
1383 args[i].tree_value);
1384 type = TREE_TYPE (args[i].tree_value);
1385
1386 if (*ecf_flags & ECF_CONST)
1387 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
1388 }
1389 else
1390 {
1391 /* We make a copy of the object and pass the address to the
1392 function being called. */
1393 rtx copy;
1394
1395 if (!COMPLETE_TYPE_P (type)
1396 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
1397 || (flag_stack_check == GENERIC_STACK_CHECK
1398 && compare_tree_int (TYPE_SIZE_UNIT (type),
1399 STACK_CHECK_MAX_VAR_SIZE) > 0))
1400 {
1401 /* This is a variable-sized object. Make space on the stack
1402 for it. */
1403 rtx size_rtx = expr_size (args[i].tree_value);
1404
1405 if (*old_stack_level == 0)
1406 {
1407 emit_stack_save (SAVE_BLOCK, old_stack_level);
1408 *old_pending_adj = pending_stack_adjust;
1409 pending_stack_adjust = 0;
1410 }
1411
1412 /* We can pass TRUE as the 4th argument because we just
1413 saved the stack pointer and will restore it right after
1414 the call. */
1415 copy = allocate_dynamic_stack_space (size_rtx,
1416 TYPE_ALIGN (type),
1417 TYPE_ALIGN (type),
1418 true);
1419 copy = gen_rtx_MEM (BLKmode, copy);
1420 set_mem_attributes (copy, type, 1);
1421 }
1422 else
1423 copy = assign_temp (type, 1, 0);
1424
1425 store_expr (args[i].tree_value, copy, 0, false, false);
1426
1427 /* Just change the const function to pure and then let
1428 the next test clear the pure based on
1429 callee_copies. */
1430 if (*ecf_flags & ECF_CONST)
1431 {
1432 *ecf_flags &= ~ECF_CONST;
1433 *ecf_flags |= ECF_PURE;
1434 }
1435
1436 if (!callee_copies && *ecf_flags & ECF_PURE)
1437 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
1438
1439 args[i].tree_value
1440 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
1441 type = TREE_TYPE (args[i].tree_value);
1442 *may_tailcall = false;
1443 maybe_complain_about_tail_call (exp,
1444 "argument must be passed"
1445 " by copying");
1446 }
1447 }
1448
1449 unsignedp = TYPE_UNSIGNED (type);
1450 mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
1451 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
1452
1453 args[i].unsignedp = unsignedp;
1454 args[i].mode = mode;
1455
1456 args[i].reg = targetm.calls.function_arg (args_so_far, mode, type,
1457 argpos < n_named_args);
1458
1459 if (args[i].reg && CONST_INT_P (args[i].reg))
1460 {
1461 args[i].special_slot = args[i].reg;
1462 args[i].reg = NULL;
1463 }
1464
1465 /* If this is a sibling call and the machine has register windows, the
1466 register window has to be unwinded before calling the routine, so
1467 arguments have to go into the incoming registers. */
1468 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
1469 args[i].tail_call_reg
1470 = targetm.calls.function_incoming_arg (args_so_far, mode, type,
1471 argpos < n_named_args);
1472 else
1473 args[i].tail_call_reg = args[i].reg;
1474
1475 if (args[i].reg)
1476 args[i].partial
1477 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1478 argpos < n_named_args);
1479
1480 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
1481
1482 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1483 it means that we are to pass this arg in the register(s) designated
1484 by the PARALLEL, but also to pass it in the stack. */
1485 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1486 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1487 args[i].pass_on_stack = 1;
1488
1489 /* If this is an addressable type, we must preallocate the stack
1490 since we must evaluate the object into its final location.
1491
1492 If this is to be passed in both registers and the stack, it is simpler
1493 to preallocate. */
1494 if (TREE_ADDRESSABLE (type)
1495 || (args[i].pass_on_stack && args[i].reg != 0))
1496 *must_preallocate = 1;
1497
1498 /* No stack allocation and padding for bounds. */
1499 if (POINTER_BOUNDS_P (args[i].tree_value))
1500 ;
1501 /* Compute the stack-size of this argument. */
1502 else if (args[i].reg == 0 || args[i].partial != 0
1503 || reg_parm_stack_space > 0
1504 || args[i].pass_on_stack)
1505 locate_and_pad_parm (mode, type,
1506 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1507 1,
1508 #else
1509 args[i].reg != 0,
1510 #endif
1511 reg_parm_stack_space,
1512 args[i].pass_on_stack ? 0 : args[i].partial,
1513 fndecl, args_size, &args[i].locate);
1514 #ifdef BLOCK_REG_PADDING
1515 else
1516 /* The argument is passed entirely in registers. See at which
1517 end it should be padded. */
1518 args[i].locate.where_pad =
1519 BLOCK_REG_PADDING (mode, type,
1520 int_size_in_bytes (type) <= UNITS_PER_WORD);
1521 #endif
1522
1523 /* Update ARGS_SIZE, the total stack space for args so far. */
1524
1525 args_size->constant += args[i].locate.size.constant;
1526 if (args[i].locate.size.var)
1527 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1528
1529 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1530 have been used, etc. */
1531
1532 targetm.calls.function_arg_advance (args_so_far, TYPE_MODE (type),
1533 type, argpos < n_named_args);
1534 }
1535 }
1536
1537 /* Update ARGS_SIZE to contain the total size for the argument block.
1538 Return the original constant component of the argument block's size.
1539
1540 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1541 for arguments passed in registers. */
1542
1543 static int
1544 compute_argument_block_size (int reg_parm_stack_space,
1545 struct args_size *args_size,
1546 tree fndecl ATTRIBUTE_UNUSED,
1547 tree fntype ATTRIBUTE_UNUSED,
1548 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1549 {
1550 int unadjusted_args_size = args_size->constant;
1551
1552 /* For accumulate outgoing args mode we don't need to align, since the frame
1553 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1554 backends from generating misaligned frame sizes. */
1555 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1556 preferred_stack_boundary = STACK_BOUNDARY;
1557
1558 /* Compute the actual size of the argument block required. The variable
1559 and constant sizes must be combined, the size may have to be rounded,
1560 and there may be a minimum required size. */
1561
1562 if (args_size->var)
1563 {
1564 args_size->var = ARGS_SIZE_TREE (*args_size);
1565 args_size->constant = 0;
1566
1567 preferred_stack_boundary /= BITS_PER_UNIT;
1568 if (preferred_stack_boundary > 1)
1569 {
1570 /* We don't handle this case yet. To handle it correctly we have
1571 to add the delta, round and subtract the delta.
1572 Currently no machine description requires this support. */
1573 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
1574 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1575 }
1576
1577 if (reg_parm_stack_space > 0)
1578 {
1579 args_size->var
1580 = size_binop (MAX_EXPR, args_size->var,
1581 ssize_int (reg_parm_stack_space));
1582
1583 /* The area corresponding to register parameters is not to count in
1584 the size of the block we need. So make the adjustment. */
1585 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
1586 args_size->var
1587 = size_binop (MINUS_EXPR, args_size->var,
1588 ssize_int (reg_parm_stack_space));
1589 }
1590 }
1591 else
1592 {
1593 preferred_stack_boundary /= BITS_PER_UNIT;
1594 if (preferred_stack_boundary < 1)
1595 preferred_stack_boundary = 1;
1596 args_size->constant = (((args_size->constant
1597 + stack_pointer_delta
1598 + preferred_stack_boundary - 1)
1599 / preferred_stack_boundary
1600 * preferred_stack_boundary)
1601 - stack_pointer_delta);
1602
1603 args_size->constant = MAX (args_size->constant,
1604 reg_parm_stack_space);
1605
1606 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
1607 args_size->constant -= reg_parm_stack_space;
1608 }
1609 return unadjusted_args_size;
1610 }
1611
1612 /* Precompute parameters as needed for a function call.
1613
1614 FLAGS is mask of ECF_* constants.
1615
1616 NUM_ACTUALS is the number of arguments.
1617
1618 ARGS is an array containing information for each argument; this
1619 routine fills in the INITIAL_VALUE and VALUE fields for each
1620 precomputed argument. */
1621
1622 static void
1623 precompute_arguments (int num_actuals, struct arg_data *args)
1624 {
1625 int i;
1626
1627 /* If this is a libcall, then precompute all arguments so that we do not
1628 get extraneous instructions emitted as part of the libcall sequence. */
1629
1630 /* If we preallocated the stack space, and some arguments must be passed
1631 on the stack, then we must precompute any parameter which contains a
1632 function call which will store arguments on the stack.
1633 Otherwise, evaluating the parameter may clobber previous parameters
1634 which have already been stored into the stack. (we have code to avoid
1635 such case by saving the outgoing stack arguments, but it results in
1636 worse code) */
1637 if (!ACCUMULATE_OUTGOING_ARGS)
1638 return;
1639
1640 for (i = 0; i < num_actuals; i++)
1641 {
1642 tree type;
1643 machine_mode mode;
1644
1645 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
1646 continue;
1647
1648 /* If this is an addressable type, we cannot pre-evaluate it. */
1649 type = TREE_TYPE (args[i].tree_value);
1650 gcc_assert (!TREE_ADDRESSABLE (type));
1651
1652 args[i].initial_value = args[i].value
1653 = expand_normal (args[i].tree_value);
1654
1655 mode = TYPE_MODE (type);
1656 if (mode != args[i].mode)
1657 {
1658 int unsignedp = args[i].unsignedp;
1659 args[i].value
1660 = convert_modes (args[i].mode, mode,
1661 args[i].value, args[i].unsignedp);
1662
1663 /* CSE will replace this only if it contains args[i].value
1664 pseudo, so convert it down to the declared mode using
1665 a SUBREG. */
1666 if (REG_P (args[i].value)
1667 && GET_MODE_CLASS (args[i].mode) == MODE_INT
1668 && promote_mode (type, mode, &unsignedp) != args[i].mode)
1669 {
1670 args[i].initial_value
1671 = gen_lowpart_SUBREG (mode, args[i].value);
1672 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1673 SUBREG_PROMOTED_SET (args[i].initial_value, args[i].unsignedp);
1674 }
1675 }
1676 }
1677 }
1678
1679 /* Given the current state of MUST_PREALLOCATE and information about
1680 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1681 compute and return the final value for MUST_PREALLOCATE. */
1682
1683 static int
1684 finalize_must_preallocate (int must_preallocate, int num_actuals,
1685 struct arg_data *args, struct args_size *args_size)
1686 {
1687 /* See if we have or want to preallocate stack space.
1688
1689 If we would have to push a partially-in-regs parm
1690 before other stack parms, preallocate stack space instead.
1691
1692 If the size of some parm is not a multiple of the required stack
1693 alignment, we must preallocate.
1694
1695 If the total size of arguments that would otherwise create a copy in
1696 a temporary (such as a CALL) is more than half the total argument list
1697 size, preallocation is faster.
1698
1699 Another reason to preallocate is if we have a machine (like the m88k)
1700 where stack alignment is required to be maintained between every
1701 pair of insns, not just when the call is made. However, we assume here
1702 that such machines either do not have push insns (and hence preallocation
1703 would occur anyway) or the problem is taken care of with
1704 PUSH_ROUNDING. */
1705
1706 if (! must_preallocate)
1707 {
1708 int partial_seen = 0;
1709 int copy_to_evaluate_size = 0;
1710 int i;
1711
1712 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1713 {
1714 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1715 partial_seen = 1;
1716 else if (partial_seen && args[i].reg == 0)
1717 must_preallocate = 1;
1718 /* We preallocate in case there are bounds passed
1719 in the bounds table to have precomputed address
1720 for bounds association. */
1721 else if (POINTER_BOUNDS_P (args[i].tree_value)
1722 && !args[i].reg)
1723 must_preallocate = 1;
1724
1725 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1726 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1727 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1728 || TREE_CODE (args[i].tree_value) == COND_EXPR
1729 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1730 copy_to_evaluate_size
1731 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1732 }
1733
1734 if (copy_to_evaluate_size * 2 >= args_size->constant
1735 && args_size->constant > 0)
1736 must_preallocate = 1;
1737 }
1738 return must_preallocate;
1739 }
1740
1741 /* If we preallocated stack space, compute the address of each argument
1742 and store it into the ARGS array.
1743
1744 We need not ensure it is a valid memory address here; it will be
1745 validized when it is used.
1746
1747 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1748
1749 static void
1750 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1751 {
1752 if (argblock)
1753 {
1754 rtx arg_reg = argblock;
1755 int i, arg_offset = 0;
1756
1757 if (GET_CODE (argblock) == PLUS)
1758 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1759
1760 for (i = 0; i < num_actuals; i++)
1761 {
1762 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1763 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1764 rtx addr;
1765 unsigned int align, boundary;
1766 unsigned int units_on_stack = 0;
1767 machine_mode partial_mode = VOIDmode;
1768
1769 /* Skip this parm if it will not be passed on the stack. */
1770 if (! args[i].pass_on_stack
1771 && args[i].reg != 0
1772 && args[i].partial == 0)
1773 continue;
1774
1775 /* Pointer Bounds are never passed on the stack. */
1776 if (POINTER_BOUNDS_P (args[i].tree_value))
1777 continue;
1778
1779 if (CONST_INT_P (offset))
1780 addr = plus_constant (Pmode, arg_reg, INTVAL (offset));
1781 else
1782 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1783
1784 addr = plus_constant (Pmode, addr, arg_offset);
1785
1786 if (args[i].partial != 0)
1787 {
1788 /* Only part of the parameter is being passed on the stack.
1789 Generate a simple memory reference of the correct size. */
1790 units_on_stack = args[i].locate.size.constant;
1791 partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT,
1792 MODE_INT, 1);
1793 args[i].stack = gen_rtx_MEM (partial_mode, addr);
1794 set_mem_size (args[i].stack, units_on_stack);
1795 }
1796 else
1797 {
1798 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1799 set_mem_attributes (args[i].stack,
1800 TREE_TYPE (args[i].tree_value), 1);
1801 }
1802 align = BITS_PER_UNIT;
1803 boundary = args[i].locate.boundary;
1804 if (args[i].locate.where_pad != downward)
1805 align = boundary;
1806 else if (CONST_INT_P (offset))
1807 {
1808 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1809 align = least_bit_hwi (align);
1810 }
1811 set_mem_align (args[i].stack, align);
1812
1813 if (CONST_INT_P (slot_offset))
1814 addr = plus_constant (Pmode, arg_reg, INTVAL (slot_offset));
1815 else
1816 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1817
1818 addr = plus_constant (Pmode, addr, arg_offset);
1819
1820 if (args[i].partial != 0)
1821 {
1822 /* Only part of the parameter is being passed on the stack.
1823 Generate a simple memory reference of the correct size.
1824 */
1825 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
1826 set_mem_size (args[i].stack_slot, units_on_stack);
1827 }
1828 else
1829 {
1830 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1831 set_mem_attributes (args[i].stack_slot,
1832 TREE_TYPE (args[i].tree_value), 1);
1833 }
1834 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
1835
1836 /* Function incoming arguments may overlap with sibling call
1837 outgoing arguments and we cannot allow reordering of reads
1838 from function arguments with stores to outgoing arguments
1839 of sibling calls. */
1840 set_mem_alias_set (args[i].stack, 0);
1841 set_mem_alias_set (args[i].stack_slot, 0);
1842 }
1843 }
1844 }
1845
1846 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1847 in a call instruction.
1848
1849 FNDECL is the tree node for the target function. For an indirect call
1850 FNDECL will be NULL_TREE.
1851
1852 ADDR is the operand 0 of CALL_EXPR for this call. */
1853
1854 static rtx
1855 rtx_for_function_call (tree fndecl, tree addr)
1856 {
1857 rtx funexp;
1858
1859 /* Get the function to call, in the form of RTL. */
1860 if (fndecl)
1861 {
1862 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
1863 TREE_USED (fndecl) = 1;
1864
1865 /* Get a SYMBOL_REF rtx for the function address. */
1866 funexp = XEXP (DECL_RTL (fndecl), 0);
1867 }
1868 else
1869 /* Generate an rtx (probably a pseudo-register) for the address. */
1870 {
1871 push_temp_slots ();
1872 funexp = expand_normal (addr);
1873 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1874 }
1875 return funexp;
1876 }
1877
1878 /* Internal state for internal_arg_pointer_based_exp and its helpers. */
1879 static struct
1880 {
1881 /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
1882 or NULL_RTX if none has been scanned yet. */
1883 rtx_insn *scan_start;
1884 /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
1885 based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
1886 pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
1887 with fixed offset, or PC if this is with variable or unknown offset. */
1888 vec<rtx> cache;
1889 } internal_arg_pointer_exp_state;
1890
1891 static rtx internal_arg_pointer_based_exp (const_rtx, bool);
1892
1893 /* Helper function for internal_arg_pointer_based_exp. Scan insns in
1894 the tail call sequence, starting with first insn that hasn't been
1895 scanned yet, and note for each pseudo on the LHS whether it is based
1896 on crtl->args.internal_arg_pointer or not, and what offset from that
1897 that pointer it has. */
1898
1899 static void
1900 internal_arg_pointer_based_exp_scan (void)
1901 {
1902 rtx_insn *insn, *scan_start = internal_arg_pointer_exp_state.scan_start;
1903
1904 if (scan_start == NULL_RTX)
1905 insn = get_insns ();
1906 else
1907 insn = NEXT_INSN (scan_start);
1908
1909 while (insn)
1910 {
1911 rtx set = single_set (insn);
1912 if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
1913 {
1914 rtx val = NULL_RTX;
1915 unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
1916 /* Punt on pseudos set multiple times. */
1917 if (idx < internal_arg_pointer_exp_state.cache.length ()
1918 && (internal_arg_pointer_exp_state.cache[idx]
1919 != NULL_RTX))
1920 val = pc_rtx;
1921 else
1922 val = internal_arg_pointer_based_exp (SET_SRC (set), false);
1923 if (val != NULL_RTX)
1924 {
1925 if (idx >= internal_arg_pointer_exp_state.cache.length ())
1926 internal_arg_pointer_exp_state.cache
1927 .safe_grow_cleared (idx + 1);
1928 internal_arg_pointer_exp_state.cache[idx] = val;
1929 }
1930 }
1931 if (NEXT_INSN (insn) == NULL_RTX)
1932 scan_start = insn;
1933 insn = NEXT_INSN (insn);
1934 }
1935
1936 internal_arg_pointer_exp_state.scan_start = scan_start;
1937 }
1938
1939 /* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
1940 NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
1941 it with fixed offset, or PC if this is with variable or unknown offset.
1942 TOPLEVEL is true if the function is invoked at the topmost level. */
1943
1944 static rtx
1945 internal_arg_pointer_based_exp (const_rtx rtl, bool toplevel)
1946 {
1947 if (CONSTANT_P (rtl))
1948 return NULL_RTX;
1949
1950 if (rtl == crtl->args.internal_arg_pointer)
1951 return const0_rtx;
1952
1953 if (REG_P (rtl) && HARD_REGISTER_P (rtl))
1954 return NULL_RTX;
1955
1956 if (GET_CODE (rtl) == PLUS && CONST_INT_P (XEXP (rtl, 1)))
1957 {
1958 rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
1959 if (val == NULL_RTX || val == pc_rtx)
1960 return val;
1961 return plus_constant (Pmode, val, INTVAL (XEXP (rtl, 1)));
1962 }
1963
1964 /* When called at the topmost level, scan pseudo assignments in between the
1965 last scanned instruction in the tail call sequence and the latest insn
1966 in that sequence. */
1967 if (toplevel)
1968 internal_arg_pointer_based_exp_scan ();
1969
1970 if (REG_P (rtl))
1971 {
1972 unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
1973 if (idx < internal_arg_pointer_exp_state.cache.length ())
1974 return internal_arg_pointer_exp_state.cache[idx];
1975
1976 return NULL_RTX;
1977 }
1978
1979 subrtx_iterator::array_type array;
1980 FOR_EACH_SUBRTX (iter, array, rtl, NONCONST)
1981 {
1982 const_rtx x = *iter;
1983 if (REG_P (x) && internal_arg_pointer_based_exp (x, false) != NULL_RTX)
1984 return pc_rtx;
1985 if (MEM_P (x))
1986 iter.skip_subrtxes ();
1987 }
1988
1989 return NULL_RTX;
1990 }
1991
1992 /* Return true if and only if SIZE storage units (usually bytes)
1993 starting from address ADDR overlap with already clobbered argument
1994 area. This function is used to determine if we should give up a
1995 sibcall. */
1996
1997 static bool
1998 mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
1999 {
2000 HOST_WIDE_INT i;
2001 rtx val;
2002
2003 if (bitmap_empty_p (stored_args_map))
2004 return false;
2005 val = internal_arg_pointer_based_exp (addr, true);
2006 if (val == NULL_RTX)
2007 return false;
2008 else if (val == pc_rtx)
2009 return true;
2010 else
2011 i = INTVAL (val);
2012
2013 if (STACK_GROWS_DOWNWARD)
2014 i -= crtl->args.pretend_args_size;
2015 else
2016 i += crtl->args.pretend_args_size;
2017
2018
2019 if (ARGS_GROW_DOWNWARD)
2020 i = -i - size;
2021
2022 if (size > 0)
2023 {
2024 unsigned HOST_WIDE_INT k;
2025
2026 for (k = 0; k < size; k++)
2027 if (i + k < SBITMAP_SIZE (stored_args_map)
2028 && bitmap_bit_p (stored_args_map, i + k))
2029 return true;
2030 }
2031
2032 return false;
2033 }
2034
2035 /* Do the register loads required for any wholly-register parms or any
2036 parms which are passed both on the stack and in a register. Their
2037 expressions were already evaluated.
2038
2039 Mark all register-parms as living through the call, putting these USE
2040 insns in the CALL_INSN_FUNCTION_USAGE field.
2041
2042 When IS_SIBCALL, perform the check_sibcall_argument_overlap
2043 checking, setting *SIBCALL_FAILURE if appropriate. */
2044
2045 static void
2046 load_register_parameters (struct arg_data *args, int num_actuals,
2047 rtx *call_fusage, int flags, int is_sibcall,
2048 int *sibcall_failure)
2049 {
2050 int i, j;
2051
2052 for (i = 0; i < num_actuals; i++)
2053 {
2054 rtx reg = ((flags & ECF_SIBCALL)
2055 ? args[i].tail_call_reg : args[i].reg);
2056 if (reg)
2057 {
2058 int partial = args[i].partial;
2059 int nregs;
2060 int size = 0;
2061 rtx_insn *before_arg = get_last_insn ();
2062 /* Set non-negative if we must move a word at a time, even if
2063 just one word (e.g, partial == 4 && mode == DFmode). Set
2064 to -1 if we just use a normal move insn. This value can be
2065 zero if the argument is a zero size structure. */
2066 nregs = -1;
2067 if (GET_CODE (reg) == PARALLEL)
2068 ;
2069 else if (partial)
2070 {
2071 gcc_assert (partial % UNITS_PER_WORD == 0);
2072 nregs = partial / UNITS_PER_WORD;
2073 }
2074 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
2075 {
2076 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2077 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2078 }
2079 else
2080 size = GET_MODE_SIZE (args[i].mode);
2081
2082 /* Handle calls that pass values in multiple non-contiguous
2083 locations. The Irix 6 ABI has examples of this. */
2084
2085 if (GET_CODE (reg) == PARALLEL)
2086 emit_group_move (reg, args[i].parallel_value);
2087
2088 /* If simple case, just do move. If normal partial, store_one_arg
2089 has already loaded the register for us. In all other cases,
2090 load the register(s) from memory. */
2091
2092 else if (nregs == -1)
2093 {
2094 emit_move_insn (reg, args[i].value);
2095 #ifdef BLOCK_REG_PADDING
2096 /* Handle case where we have a value that needs shifting
2097 up to the msb. eg. a QImode value and we're padding
2098 upward on a BYTES_BIG_ENDIAN machine. */
2099 if (size < UNITS_PER_WORD
2100 && (args[i].locate.where_pad
2101 == (BYTES_BIG_ENDIAN ? upward : downward)))
2102 {
2103 rtx x;
2104 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2105
2106 /* Assigning REG here rather than a temp makes CALL_FUSAGE
2107 report the whole reg as used. Strictly speaking, the
2108 call only uses SIZE bytes at the msb end, but it doesn't
2109 seem worth generating rtl to say that. */
2110 reg = gen_rtx_REG (word_mode, REGNO (reg));
2111 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
2112 if (x != reg)
2113 emit_move_insn (reg, x);
2114 }
2115 #endif
2116 }
2117
2118 /* If we have pre-computed the values to put in the registers in
2119 the case of non-aligned structures, copy them in now. */
2120
2121 else if (args[i].n_aligned_regs != 0)
2122 for (j = 0; j < args[i].n_aligned_regs; j++)
2123 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
2124 args[i].aligned_regs[j]);
2125
2126 else if (partial == 0 || args[i].pass_on_stack)
2127 {
2128 rtx mem = validize_mem (copy_rtx (args[i].value));
2129
2130 /* Check for overlap with already clobbered argument area,
2131 providing that this has non-zero size. */
2132 if (is_sibcall
2133 && size != 0
2134 && (mem_overlaps_already_clobbered_arg_p
2135 (XEXP (args[i].value, 0), size)))
2136 *sibcall_failure = 1;
2137
2138 if (size % UNITS_PER_WORD == 0
2139 || MEM_ALIGN (mem) % BITS_PER_WORD == 0)
2140 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
2141 else
2142 {
2143 if (nregs > 1)
2144 move_block_to_reg (REGNO (reg), mem, nregs - 1,
2145 args[i].mode);
2146 rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
2147 unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
2148 unsigned int bitsize = size * BITS_PER_UNIT - bitoff;
2149 rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest,
2150 word_mode, word_mode, false);
2151 if (BYTES_BIG_ENDIAN)
2152 x = expand_shift (LSHIFT_EXPR, word_mode, x,
2153 BITS_PER_WORD - bitsize, dest, 1);
2154 if (x != dest)
2155 emit_move_insn (dest, x);
2156 }
2157
2158 /* Handle a BLKmode that needs shifting. */
2159 if (nregs == 1 && size < UNITS_PER_WORD
2160 #ifdef BLOCK_REG_PADDING
2161 && args[i].locate.where_pad == downward
2162 #else
2163 && BYTES_BIG_ENDIAN
2164 #endif
2165 )
2166 {
2167 rtx dest = gen_rtx_REG (word_mode, REGNO (reg));
2168 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2169 enum tree_code dir = (BYTES_BIG_ENDIAN
2170 ? RSHIFT_EXPR : LSHIFT_EXPR);
2171 rtx x;
2172
2173 x = expand_shift (dir, word_mode, dest, shift, dest, 1);
2174 if (x != dest)
2175 emit_move_insn (dest, x);
2176 }
2177 }
2178
2179 /* When a parameter is a block, and perhaps in other cases, it is
2180 possible that it did a load from an argument slot that was
2181 already clobbered. */
2182 if (is_sibcall
2183 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
2184 *sibcall_failure = 1;
2185
2186 /* Handle calls that pass values in multiple non-contiguous
2187 locations. The Irix 6 ABI has examples of this. */
2188 if (GET_CODE (reg) == PARALLEL)
2189 use_group_regs (call_fusage, reg);
2190 else if (nregs == -1)
2191 use_reg_mode (call_fusage, reg,
2192 TYPE_MODE (TREE_TYPE (args[i].tree_value)));
2193 else if (nregs > 0)
2194 use_regs (call_fusage, REGNO (reg), nregs);
2195 }
2196 }
2197 }
2198
2199 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
2200 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
2201 bytes, then we would need to push some additional bytes to pad the
2202 arguments. So, we compute an adjust to the stack pointer for an
2203 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
2204 bytes. Then, when the arguments are pushed the stack will be perfectly
2205 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
2206 be popped after the call. Returns the adjustment. */
2207
2208 static int
2209 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
2210 struct args_size *args_size,
2211 unsigned int preferred_unit_stack_boundary)
2212 {
2213 /* The number of bytes to pop so that the stack will be
2214 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
2215 HOST_WIDE_INT adjustment;
2216 /* The alignment of the stack after the arguments are pushed, if we
2217 just pushed the arguments without adjust the stack here. */
2218 unsigned HOST_WIDE_INT unadjusted_alignment;
2219
2220 unadjusted_alignment
2221 = ((stack_pointer_delta + unadjusted_args_size)
2222 % preferred_unit_stack_boundary);
2223
2224 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
2225 as possible -- leaving just enough left to cancel out the
2226 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
2227 PENDING_STACK_ADJUST is non-negative, and congruent to
2228 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
2229
2230 /* Begin by trying to pop all the bytes. */
2231 unadjusted_alignment
2232 = (unadjusted_alignment
2233 - (pending_stack_adjust % preferred_unit_stack_boundary));
2234 adjustment = pending_stack_adjust;
2235 /* Push enough additional bytes that the stack will be aligned
2236 after the arguments are pushed. */
2237 if (preferred_unit_stack_boundary > 1)
2238 {
2239 if (unadjusted_alignment > 0)
2240 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
2241 else
2242 adjustment += unadjusted_alignment;
2243 }
2244
2245 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
2246 bytes after the call. The right number is the entire
2247 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
2248 by the arguments in the first place. */
2249 args_size->constant
2250 = pending_stack_adjust - adjustment + unadjusted_args_size;
2251
2252 return adjustment;
2253 }
2254
2255 /* Scan X expression if it does not dereference any argument slots
2256 we already clobbered by tail call arguments (as noted in stored_args_map
2257 bitmap).
2258 Return nonzero if X expression dereferences such argument slots,
2259 zero otherwise. */
2260
2261 static int
2262 check_sibcall_argument_overlap_1 (rtx x)
2263 {
2264 RTX_CODE code;
2265 int i, j;
2266 const char *fmt;
2267
2268 if (x == NULL_RTX)
2269 return 0;
2270
2271 code = GET_CODE (x);
2272
2273 /* We need not check the operands of the CALL expression itself. */
2274 if (code == CALL)
2275 return 0;
2276
2277 if (code == MEM)
2278 return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
2279 GET_MODE_SIZE (GET_MODE (x)));
2280
2281 /* Scan all subexpressions. */
2282 fmt = GET_RTX_FORMAT (code);
2283 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2284 {
2285 if (*fmt == 'e')
2286 {
2287 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
2288 return 1;
2289 }
2290 else if (*fmt == 'E')
2291 {
2292 for (j = 0; j < XVECLEN (x, i); j++)
2293 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
2294 return 1;
2295 }
2296 }
2297 return 0;
2298 }
2299
2300 /* Scan sequence after INSN if it does not dereference any argument slots
2301 we already clobbered by tail call arguments (as noted in stored_args_map
2302 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
2303 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
2304 should be 0). Return nonzero if sequence after INSN dereferences such argument
2305 slots, zero otherwise. */
2306
2307 static int
2308 check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg,
2309 int mark_stored_args_map)
2310 {
2311 int low, high;
2312
2313 if (insn == NULL_RTX)
2314 insn = get_insns ();
2315 else
2316 insn = NEXT_INSN (insn);
2317
2318 for (; insn; insn = NEXT_INSN (insn))
2319 if (INSN_P (insn)
2320 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
2321 break;
2322
2323 if (mark_stored_args_map)
2324 {
2325 if (ARGS_GROW_DOWNWARD)
2326 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
2327 else
2328 low = arg->locate.slot_offset.constant;
2329
2330 for (high = low + arg->locate.size.constant; low < high; low++)
2331 bitmap_set_bit (stored_args_map, low);
2332 }
2333 return insn != NULL_RTX;
2334 }
2335
2336 /* Given that a function returns a value of mode MODE at the most
2337 significant end of hard register VALUE, shift VALUE left or right
2338 as specified by LEFT_P. Return true if some action was needed. */
2339
2340 bool
2341 shift_return_value (machine_mode mode, bool left_p, rtx value)
2342 {
2343 HOST_WIDE_INT shift;
2344
2345 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
2346 shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
2347 if (shift == 0)
2348 return false;
2349
2350 /* Use ashr rather than lshr for right shifts. This is for the benefit
2351 of the MIPS port, which requires SImode values to be sign-extended
2352 when stored in 64-bit registers. */
2353 if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
2354 value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
2355 gcc_unreachable ();
2356 return true;
2357 }
2358
2359 /* If X is a likely-spilled register value, copy it to a pseudo
2360 register and return that register. Return X otherwise. */
2361
2362 static rtx
2363 avoid_likely_spilled_reg (rtx x)
2364 {
2365 rtx new_rtx;
2366
2367 if (REG_P (x)
2368 && HARD_REGISTER_P (x)
2369 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
2370 {
2371 /* Make sure that we generate a REG rather than a CONCAT.
2372 Moves into CONCATs can need nontrivial instructions,
2373 and the whole point of this function is to avoid
2374 using the hard register directly in such a situation. */
2375 generating_concat_p = 0;
2376 new_rtx = gen_reg_rtx (GET_MODE (x));
2377 generating_concat_p = 1;
2378 emit_move_insn (new_rtx, x);
2379 return new_rtx;
2380 }
2381 return x;
2382 }
2383
2384 /* Helper function for expand_call.
2385 Return false is EXP is not implementable as a sibling call. */
2386
2387 static bool
2388 can_implement_as_sibling_call_p (tree exp,
2389 rtx structure_value_addr,
2390 tree funtype,
2391 int reg_parm_stack_space ATTRIBUTE_UNUSED,
2392 tree fndecl,
2393 int flags,
2394 tree addr,
2395 const args_size &args_size)
2396 {
2397 if (!targetm.have_sibcall_epilogue ())
2398 {
2399 maybe_complain_about_tail_call
2400 (exp,
2401 "machine description does not have"
2402 " a sibcall_epilogue instruction pattern");
2403 return false;
2404 }
2405
2406 /* Doing sibling call optimization needs some work, since
2407 structure_value_addr can be allocated on the stack.
2408 It does not seem worth the effort since few optimizable
2409 sibling calls will return a structure. */
2410 if (structure_value_addr != NULL_RTX)
2411 {
2412 maybe_complain_about_tail_call (exp, "callee returns a structure");
2413 return false;
2414 }
2415
2416 #ifdef REG_PARM_STACK_SPACE
2417 /* If outgoing reg parm stack space changes, we can not do sibcall. */
2418 if (OUTGOING_REG_PARM_STACK_SPACE (funtype)
2419 != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl))
2420 || (reg_parm_stack_space != REG_PARM_STACK_SPACE (current_function_decl)))
2421 {
2422 maybe_complain_about_tail_call (exp,
2423 "inconsistent size of stack space"
2424 " allocated for arguments which are"
2425 " passed in registers");
2426 return false;
2427 }
2428 #endif
2429
2430 /* Check whether the target is able to optimize the call
2431 into a sibcall. */
2432 if (!targetm.function_ok_for_sibcall (fndecl, exp))
2433 {
2434 maybe_complain_about_tail_call (exp,
2435 "target is not able to optimize the"
2436 " call into a sibling call");
2437 return false;
2438 }
2439
2440 /* Functions that do not return exactly once may not be sibcall
2441 optimized. */
2442 if (flags & ECF_RETURNS_TWICE)
2443 {
2444 maybe_complain_about_tail_call (exp, "callee returns twice");
2445 return false;
2446 }
2447 if (flags & ECF_NORETURN)
2448 {
2449 maybe_complain_about_tail_call (exp, "callee does not return");
2450 return false;
2451 }
2452
2453 if (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr))))
2454 {
2455 maybe_complain_about_tail_call (exp, "volatile function type");
2456 return false;
2457 }
2458
2459 /* If the called function is nested in the current one, it might access
2460 some of the caller's arguments, but could clobber them beforehand if
2461 the argument areas are shared. */
2462 if (fndecl && decl_function_context (fndecl) == current_function_decl)
2463 {
2464 maybe_complain_about_tail_call (exp, "nested function");
2465 return false;
2466 }
2467
2468 /* If this function requires more stack slots than the current
2469 function, we cannot change it into a sibling call.
2470 crtl->args.pretend_args_size is not part of the
2471 stack allocated by our caller. */
2472 if (args_size.constant > (crtl->args.size - crtl->args.pretend_args_size))
2473 {
2474 maybe_complain_about_tail_call (exp,
2475 "callee required more stack slots"
2476 " than the caller");
2477 return false;
2478 }
2479
2480 /* If the callee pops its own arguments, then it must pop exactly
2481 the same number of arguments as the current function. */
2482 if (targetm.calls.return_pops_args (fndecl, funtype, args_size.constant)
2483 != targetm.calls.return_pops_args (current_function_decl,
2484 TREE_TYPE (current_function_decl),
2485 crtl->args.size))
2486 {
2487 maybe_complain_about_tail_call (exp,
2488 "inconsistent number of"
2489 " popped arguments");
2490 return false;
2491 }
2492
2493 if (!lang_hooks.decls.ok_for_sibcall (fndecl))
2494 {
2495 maybe_complain_about_tail_call (exp, "frontend does not support"
2496 " sibling call");
2497 return false;
2498 }
2499
2500 /* All checks passed. */
2501 return true;
2502 }
2503
2504 /* Generate all the code for a CALL_EXPR exp
2505 and return an rtx for its value.
2506 Store the value in TARGET (specified as an rtx) if convenient.
2507 If the value is stored in TARGET then TARGET is returned.
2508 If IGNORE is nonzero, then we ignore the value of the function call. */
2509
2510 rtx
2511 expand_call (tree exp, rtx target, int ignore)
2512 {
2513 /* Nonzero if we are currently expanding a call. */
2514 static int currently_expanding_call = 0;
2515
2516 /* RTX for the function to be called. */
2517 rtx funexp;
2518 /* Sequence of insns to perform a normal "call". */
2519 rtx_insn *normal_call_insns = NULL;
2520 /* Sequence of insns to perform a tail "call". */
2521 rtx_insn *tail_call_insns = NULL;
2522 /* Data type of the function. */
2523 tree funtype;
2524 tree type_arg_types;
2525 tree rettype;
2526 /* Declaration of the function being called,
2527 or 0 if the function is computed (not known by name). */
2528 tree fndecl = 0;
2529 /* The type of the function being called. */
2530 tree fntype;
2531 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
2532 bool must_tail_call = CALL_EXPR_MUST_TAIL_CALL (exp);
2533 int pass;
2534
2535 /* Register in which non-BLKmode value will be returned,
2536 or 0 if no value or if value is BLKmode. */
2537 rtx valreg;
2538 /* Register(s) in which bounds are returned. */
2539 rtx valbnd = NULL;
2540 /* Address where we should return a BLKmode value;
2541 0 if value not BLKmode. */
2542 rtx structure_value_addr = 0;
2543 /* Nonzero if that address is being passed by treating it as
2544 an extra, implicit first parameter. Otherwise,
2545 it is passed by being copied directly into struct_value_rtx. */
2546 int structure_value_addr_parm = 0;
2547 /* Holds the value of implicit argument for the struct value. */
2548 tree structure_value_addr_value = NULL_TREE;
2549 /* Size of aggregate value wanted, or zero if none wanted
2550 or if we are using the non-reentrant PCC calling convention
2551 or expecting the value in registers. */
2552 HOST_WIDE_INT struct_value_size = 0;
2553 /* Nonzero if called function returns an aggregate in memory PCC style,
2554 by returning the address of where to find it. */
2555 int pcc_struct_value = 0;
2556 rtx struct_value = 0;
2557
2558 /* Number of actual parameters in this call, including struct value addr. */
2559 int num_actuals;
2560 /* Number of named args. Args after this are anonymous ones
2561 and they must all go on the stack. */
2562 int n_named_args;
2563 /* Number of complex actual arguments that need to be split. */
2564 int num_complex_actuals = 0;
2565
2566 /* Vector of information about each argument.
2567 Arguments are numbered in the order they will be pushed,
2568 not the order they are written. */
2569 struct arg_data *args;
2570
2571 /* Total size in bytes of all the stack-parms scanned so far. */
2572 struct args_size args_size;
2573 struct args_size adjusted_args_size;
2574 /* Size of arguments before any adjustments (such as rounding). */
2575 int unadjusted_args_size;
2576 /* Data on reg parms scanned so far. */
2577 CUMULATIVE_ARGS args_so_far_v;
2578 cumulative_args_t args_so_far;
2579 /* Nonzero if a reg parm has been scanned. */
2580 int reg_parm_seen;
2581 /* Nonzero if this is an indirect function call. */
2582
2583 /* Nonzero if we must avoid push-insns in the args for this call.
2584 If stack space is allocated for register parameters, but not by the
2585 caller, then it is preallocated in the fixed part of the stack frame.
2586 So the entire argument block must then be preallocated (i.e., we
2587 ignore PUSH_ROUNDING in that case). */
2588
2589 int must_preallocate = !PUSH_ARGS;
2590
2591 /* Size of the stack reserved for parameter registers. */
2592 int reg_parm_stack_space = 0;
2593
2594 /* Address of space preallocated for stack parms
2595 (on machines that lack push insns), or 0 if space not preallocated. */
2596 rtx argblock = 0;
2597
2598 /* Mask of ECF_ and ERF_ flags. */
2599 int flags = 0;
2600 int return_flags = 0;
2601 #ifdef REG_PARM_STACK_SPACE
2602 /* Define the boundary of the register parm stack space that needs to be
2603 saved, if any. */
2604 int low_to_save, high_to_save;
2605 rtx save_area = 0; /* Place that it is saved */
2606 #endif
2607
2608 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2609 char *initial_stack_usage_map = stack_usage_map;
2610 char *stack_usage_map_buf = NULL;
2611
2612 int old_stack_allocated;
2613
2614 /* State variables to track stack modifications. */
2615 rtx old_stack_level = 0;
2616 int old_stack_arg_under_construction = 0;
2617 int old_pending_adj = 0;
2618 int old_inhibit_defer_pop = inhibit_defer_pop;
2619
2620 /* Some stack pointer alterations we make are performed via
2621 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
2622 which we then also need to save/restore along the way. */
2623 int old_stack_pointer_delta = 0;
2624
2625 rtx call_fusage;
2626 tree addr = CALL_EXPR_FN (exp);
2627 int i;
2628 /* The alignment of the stack, in bits. */
2629 unsigned HOST_WIDE_INT preferred_stack_boundary;
2630 /* The alignment of the stack, in bytes. */
2631 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
2632 /* The static chain value to use for this call. */
2633 rtx static_chain_value;
2634 /* See if this is "nothrow" function call. */
2635 if (TREE_NOTHROW (exp))
2636 flags |= ECF_NOTHROW;
2637
2638 /* See if we can find a DECL-node for the actual function, and get the
2639 function attributes (flags) from the function decl or type node. */
2640 fndecl = get_callee_fndecl (exp);
2641 if (fndecl)
2642 {
2643 fntype = TREE_TYPE (fndecl);
2644 flags |= flags_from_decl_or_type (fndecl);
2645 return_flags |= decl_return_flags (fndecl);
2646 }
2647 else
2648 {
2649 fntype = TREE_TYPE (TREE_TYPE (addr));
2650 flags |= flags_from_decl_or_type (fntype);
2651 }
2652 rettype = TREE_TYPE (exp);
2653
2654 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
2655
2656 /* Warn if this value is an aggregate type,
2657 regardless of which calling convention we are using for it. */
2658 if (AGGREGATE_TYPE_P (rettype))
2659 warning (OPT_Waggregate_return, "function call has aggregate value");
2660
2661 /* If the result of a non looping pure or const function call is
2662 ignored (or void), and none of its arguments are volatile, we can
2663 avoid expanding the call and just evaluate the arguments for
2664 side-effects. */
2665 if ((flags & (ECF_CONST | ECF_PURE))
2666 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
2667 && (ignore || target == const0_rtx
2668 || TYPE_MODE (rettype) == VOIDmode))
2669 {
2670 bool volatilep = false;
2671 tree arg;
2672 call_expr_arg_iterator iter;
2673
2674 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2675 if (TREE_THIS_VOLATILE (arg))
2676 {
2677 volatilep = true;
2678 break;
2679 }
2680
2681 if (! volatilep)
2682 {
2683 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2684 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
2685 return const0_rtx;
2686 }
2687 }
2688
2689 #ifdef REG_PARM_STACK_SPACE
2690 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
2691 #endif
2692
2693 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
2694 && reg_parm_stack_space > 0 && PUSH_ARGS)
2695 must_preallocate = 1;
2696
2697 /* Set up a place to return a structure. */
2698
2699 /* Cater to broken compilers. */
2700 if (aggregate_value_p (exp, fntype))
2701 {
2702 /* This call returns a big structure. */
2703 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
2704
2705 #ifdef PCC_STATIC_STRUCT_RETURN
2706 {
2707 pcc_struct_value = 1;
2708 }
2709 #else /* not PCC_STATIC_STRUCT_RETURN */
2710 {
2711 struct_value_size = int_size_in_bytes (rettype);
2712
2713 /* Even if it is semantically safe to use the target as the return
2714 slot, it may be not sufficiently aligned for the return type. */
2715 if (CALL_EXPR_RETURN_SLOT_OPT (exp)
2716 && target
2717 && MEM_P (target)
2718 && !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
2719 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (rettype),
2720 MEM_ALIGN (target))))
2721 structure_value_addr = XEXP (target, 0);
2722 else
2723 {
2724 /* For variable-sized objects, we must be called with a target
2725 specified. If we were to allocate space on the stack here,
2726 we would have no way of knowing when to free it. */
2727 rtx d = assign_temp (rettype, 1, 1);
2728 structure_value_addr = XEXP (d, 0);
2729 target = 0;
2730 }
2731 }
2732 #endif /* not PCC_STATIC_STRUCT_RETURN */
2733 }
2734
2735 /* Figure out the amount to which the stack should be aligned. */
2736 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2737 if (fndecl)
2738 {
2739 struct cgraph_rtl_info *i = cgraph_node::rtl_info (fndecl);
2740 /* Without automatic stack alignment, we can't increase preferred
2741 stack boundary. With automatic stack alignment, it is
2742 unnecessary since unless we can guarantee that all callers will
2743 align the outgoing stack properly, callee has to align its
2744 stack anyway. */
2745 if (i
2746 && i->preferred_incoming_stack_boundary
2747 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
2748 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2749 }
2750
2751 /* Operand 0 is a pointer-to-function; get the type of the function. */
2752 funtype = TREE_TYPE (addr);
2753 gcc_assert (POINTER_TYPE_P (funtype));
2754 funtype = TREE_TYPE (funtype);
2755
2756 /* Count whether there are actual complex arguments that need to be split
2757 into their real and imaginary parts. Munge the type_arg_types
2758 appropriately here as well. */
2759 if (targetm.calls.split_complex_arg)
2760 {
2761 call_expr_arg_iterator iter;
2762 tree arg;
2763 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2764 {
2765 tree type = TREE_TYPE (arg);
2766 if (type && TREE_CODE (type) == COMPLEX_TYPE
2767 && targetm.calls.split_complex_arg (type))
2768 num_complex_actuals++;
2769 }
2770 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2771 }
2772 else
2773 type_arg_types = TYPE_ARG_TYPES (funtype);
2774
2775 if (flags & ECF_MAY_BE_ALLOCA)
2776 cfun->calls_alloca = 1;
2777
2778 /* If struct_value_rtx is 0, it means pass the address
2779 as if it were an extra parameter. Put the argument expression
2780 in structure_value_addr_value. */
2781 if (structure_value_addr && struct_value == 0)
2782 {
2783 /* If structure_value_addr is a REG other than
2784 virtual_outgoing_args_rtx, we can use always use it. If it
2785 is not a REG, we must always copy it into a register.
2786 If it is virtual_outgoing_args_rtx, we must copy it to another
2787 register in some cases. */
2788 rtx temp = (!REG_P (structure_value_addr)
2789 || (ACCUMULATE_OUTGOING_ARGS
2790 && stack_arg_under_construction
2791 && structure_value_addr == virtual_outgoing_args_rtx)
2792 ? copy_addr_to_reg (convert_memory_address
2793 (Pmode, structure_value_addr))
2794 : structure_value_addr);
2795
2796 structure_value_addr_value =
2797 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
2798 structure_value_addr_parm = CALL_WITH_BOUNDS_P (exp) ? 2 : 1;
2799 }
2800
2801 /* Count the arguments and set NUM_ACTUALS. */
2802 num_actuals =
2803 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
2804
2805 /* Compute number of named args.
2806 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2807
2808 if (type_arg_types != 0)
2809 n_named_args
2810 = (list_length (type_arg_types)
2811 /* Count the struct value address, if it is passed as a parm. */
2812 + structure_value_addr_parm);
2813 else
2814 /* If we know nothing, treat all args as named. */
2815 n_named_args = num_actuals;
2816
2817 /* Start updating where the next arg would go.
2818
2819 On some machines (such as the PA) indirect calls have a different
2820 calling convention than normal calls. The fourth argument in
2821 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2822 or not. */
2823 INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
2824 args_so_far = pack_cumulative_args (&args_so_far_v);
2825
2826 /* Now possibly adjust the number of named args.
2827 Normally, don't include the last named arg if anonymous args follow.
2828 We do include the last named arg if
2829 targetm.calls.strict_argument_naming() returns nonzero.
2830 (If no anonymous args follow, the result of list_length is actually
2831 one too large. This is harmless.)
2832
2833 If targetm.calls.pretend_outgoing_varargs_named() returns
2834 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2835 this machine will be able to place unnamed args that were passed
2836 in registers into the stack. So treat all args as named. This
2837 allows the insns emitting for a specific argument list to be
2838 independent of the function declaration.
2839
2840 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2841 we do not have any reliable way to pass unnamed args in
2842 registers, so we must force them into memory. */
2843
2844 if (type_arg_types != 0
2845 && targetm.calls.strict_argument_naming (args_so_far))
2846 ;
2847 else if (type_arg_types != 0
2848 && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
2849 /* Don't include the last named arg. */
2850 --n_named_args;
2851 else
2852 /* Treat all args as named. */
2853 n_named_args = num_actuals;
2854
2855 /* Make a vector to hold all the information about each arg. */
2856 args = XALLOCAVEC (struct arg_data, num_actuals);
2857 memset (args, 0, num_actuals * sizeof (struct arg_data));
2858
2859 /* Build up entries in the ARGS array, compute the size of the
2860 arguments into ARGS_SIZE, etc. */
2861 initialize_argument_information (num_actuals, args, &args_size,
2862 n_named_args, exp,
2863 structure_value_addr_value, fndecl, fntype,
2864 args_so_far, reg_parm_stack_space,
2865 &old_stack_level, &old_pending_adj,
2866 &must_preallocate, &flags,
2867 &try_tail_call, CALL_FROM_THUNK_P (exp));
2868
2869 if (args_size.var)
2870 must_preallocate = 1;
2871
2872 /* Now make final decision about preallocating stack space. */
2873 must_preallocate = finalize_must_preallocate (must_preallocate,
2874 num_actuals, args,
2875 &args_size);
2876
2877 /* If the structure value address will reference the stack pointer, we
2878 must stabilize it. We don't need to do this if we know that we are
2879 not going to adjust the stack pointer in processing this call. */
2880
2881 if (structure_value_addr
2882 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2883 || reg_mentioned_p (virtual_outgoing_args_rtx,
2884 structure_value_addr))
2885 && (args_size.var
2886 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2887 structure_value_addr = copy_to_reg (structure_value_addr);
2888
2889 /* Tail calls can make things harder to debug, and we've traditionally
2890 pushed these optimizations into -O2. Don't try if we're already
2891 expanding a call, as that means we're an argument. Don't try if
2892 there's cleanups, as we know there's code to follow the call. */
2893
2894 if (currently_expanding_call++ != 0
2895 || !flag_optimize_sibling_calls
2896 || args_size.var
2897 || dbg_cnt (tail_call) == false)
2898 try_tail_call = 0;
2899
2900 /* If the user has marked the function as requiring tail-call
2901 optimization, attempt it. */
2902 if (must_tail_call)
2903 try_tail_call = 1;
2904
2905 /* Rest of purposes for tail call optimizations to fail. */
2906 if (try_tail_call)
2907 try_tail_call = can_implement_as_sibling_call_p (exp,
2908 structure_value_addr,
2909 funtype,
2910 reg_parm_stack_space,
2911 fndecl,
2912 flags, addr, args_size);
2913
2914 /* Check if caller and callee disagree in promotion of function
2915 return value. */
2916 if (try_tail_call)
2917 {
2918 machine_mode caller_mode, caller_promoted_mode;
2919 machine_mode callee_mode, callee_promoted_mode;
2920 int caller_unsignedp, callee_unsignedp;
2921 tree caller_res = DECL_RESULT (current_function_decl);
2922
2923 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
2924 caller_mode = DECL_MODE (caller_res);
2925 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
2926 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
2927 caller_promoted_mode
2928 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
2929 &caller_unsignedp,
2930 TREE_TYPE (current_function_decl), 1);
2931 callee_promoted_mode
2932 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
2933 &callee_unsignedp,
2934 funtype, 1);
2935 if (caller_mode != VOIDmode
2936 && (caller_promoted_mode != callee_promoted_mode
2937 || ((caller_mode != caller_promoted_mode
2938 || callee_mode != callee_promoted_mode)
2939 && (caller_unsignedp != callee_unsignedp
2940 || GET_MODE_BITSIZE (caller_mode)
2941 < GET_MODE_BITSIZE (callee_mode)))))
2942 {
2943 try_tail_call = 0;
2944 maybe_complain_about_tail_call (exp,
2945 "caller and callee disagree in"
2946 " promotion of function"
2947 " return value");
2948 }
2949 }
2950
2951 /* Ensure current function's preferred stack boundary is at least
2952 what we need. Stack alignment may also increase preferred stack
2953 boundary. */
2954 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
2955 crtl->preferred_stack_boundary = preferred_stack_boundary;
2956 else
2957 preferred_stack_boundary = crtl->preferred_stack_boundary;
2958
2959 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2960
2961 /* We want to make two insn chains; one for a sibling call, the other
2962 for a normal call. We will select one of the two chains after
2963 initial RTL generation is complete. */
2964 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2965 {
2966 int sibcall_failure = 0;
2967 /* We want to emit any pending stack adjustments before the tail
2968 recursion "call". That way we know any adjustment after the tail
2969 recursion call can be ignored if we indeed use the tail
2970 call expansion. */
2971 saved_pending_stack_adjust save;
2972 rtx_insn *insns, *before_call, *after_args;
2973 rtx next_arg_reg;
2974
2975 if (pass == 0)
2976 {
2977 /* State variables we need to save and restore between
2978 iterations. */
2979 save_pending_stack_adjust (&save);
2980 }
2981 if (pass)
2982 flags &= ~ECF_SIBCALL;
2983 else
2984 flags |= ECF_SIBCALL;
2985
2986 /* Other state variables that we must reinitialize each time
2987 through the loop (that are not initialized by the loop itself). */
2988 argblock = 0;
2989 call_fusage = 0;
2990
2991 /* Start a new sequence for the normal call case.
2992
2993 From this point on, if the sibling call fails, we want to set
2994 sibcall_failure instead of continuing the loop. */
2995 start_sequence ();
2996
2997 /* Don't let pending stack adjusts add up to too much.
2998 Also, do all pending adjustments now if there is any chance
2999 this might be a call to alloca or if we are expanding a sibling
3000 call sequence.
3001 Also do the adjustments before a throwing call, otherwise
3002 exception handling can fail; PR 19225. */
3003 if (pending_stack_adjust >= 32
3004 || (pending_stack_adjust > 0
3005 && (flags & ECF_MAY_BE_ALLOCA))
3006 || (pending_stack_adjust > 0
3007 && flag_exceptions && !(flags & ECF_NOTHROW))
3008 || pass == 0)
3009 do_pending_stack_adjust ();
3010
3011 /* Precompute any arguments as needed. */
3012 if (pass)
3013 precompute_arguments (num_actuals, args);
3014
3015 /* Now we are about to start emitting insns that can be deleted
3016 if a libcall is deleted. */
3017 if (pass && (flags & ECF_MALLOC))
3018 start_sequence ();
3019
3020 if (pass == 0 && crtl->stack_protect_guard)
3021 stack_protect_epilogue ();
3022
3023 adjusted_args_size = args_size;
3024 /* Compute the actual size of the argument block required. The variable
3025 and constant sizes must be combined, the size may have to be rounded,
3026 and there may be a minimum required size. When generating a sibcall
3027 pattern, do not round up, since we'll be re-using whatever space our
3028 caller provided. */
3029 unadjusted_args_size
3030 = compute_argument_block_size (reg_parm_stack_space,
3031 &adjusted_args_size,
3032 fndecl, fntype,
3033 (pass == 0 ? 0
3034 : preferred_stack_boundary));
3035
3036 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
3037
3038 /* The argument block when performing a sibling call is the
3039 incoming argument block. */
3040 if (pass == 0)
3041 {
3042 argblock = crtl->args.internal_arg_pointer;
3043 if (STACK_GROWS_DOWNWARD)
3044 argblock
3045 = plus_constant (Pmode, argblock, crtl->args.pretend_args_size);
3046 else
3047 argblock
3048 = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size);
3049
3050 stored_args_map = sbitmap_alloc (args_size.constant);
3051 bitmap_clear (stored_args_map);
3052 }
3053
3054 /* If we have no actual push instructions, or shouldn't use them,
3055 make space for all args right now. */
3056 else if (adjusted_args_size.var != 0)
3057 {
3058 if (old_stack_level == 0)
3059 {
3060 emit_stack_save (SAVE_BLOCK, &old_stack_level);
3061 old_stack_pointer_delta = stack_pointer_delta;
3062 old_pending_adj = pending_stack_adjust;
3063 pending_stack_adjust = 0;
3064 /* stack_arg_under_construction says whether a stack arg is
3065 being constructed at the old stack level. Pushing the stack
3066 gets a clean outgoing argument block. */
3067 old_stack_arg_under_construction = stack_arg_under_construction;
3068 stack_arg_under_construction = 0;
3069 }
3070 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
3071 if (flag_stack_usage_info)
3072 current_function_has_unbounded_dynamic_stack_size = 1;
3073 }
3074 else
3075 {
3076 /* Note that we must go through the motions of allocating an argument
3077 block even if the size is zero because we may be storing args
3078 in the area reserved for register arguments, which may be part of
3079 the stack frame. */
3080
3081 int needed = adjusted_args_size.constant;
3082
3083 /* Store the maximum argument space used. It will be pushed by
3084 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
3085 checking). */
3086
3087 if (needed > crtl->outgoing_args_size)
3088 crtl->outgoing_args_size = needed;
3089
3090 if (must_preallocate)
3091 {
3092 if (ACCUMULATE_OUTGOING_ARGS)
3093 {
3094 /* Since the stack pointer will never be pushed, it is
3095 possible for the evaluation of a parm to clobber
3096 something we have already written to the stack.
3097 Since most function calls on RISC machines do not use
3098 the stack, this is uncommon, but must work correctly.
3099
3100 Therefore, we save any area of the stack that was already
3101 written and that we are using. Here we set up to do this
3102 by making a new stack usage map from the old one. The
3103 actual save will be done by store_one_arg.
3104
3105 Another approach might be to try to reorder the argument
3106 evaluations to avoid this conflicting stack usage. */
3107
3108 /* Since we will be writing into the entire argument area,
3109 the map must be allocated for its entire size, not just
3110 the part that is the responsibility of the caller. */
3111 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
3112 needed += reg_parm_stack_space;
3113
3114 if (ARGS_GROW_DOWNWARD)
3115 highest_outgoing_arg_in_use
3116 = MAX (initial_highest_arg_in_use, needed + 1);
3117 else
3118 highest_outgoing_arg_in_use
3119 = MAX (initial_highest_arg_in_use, needed);
3120
3121 free (stack_usage_map_buf);
3122 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
3123 stack_usage_map = stack_usage_map_buf;
3124
3125 if (initial_highest_arg_in_use)
3126 memcpy (stack_usage_map, initial_stack_usage_map,
3127 initial_highest_arg_in_use);
3128
3129 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3130 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3131 (highest_outgoing_arg_in_use
3132 - initial_highest_arg_in_use));
3133 needed = 0;
3134
3135 /* The address of the outgoing argument list must not be
3136 copied to a register here, because argblock would be left
3137 pointing to the wrong place after the call to
3138 allocate_dynamic_stack_space below. */
3139
3140 argblock = virtual_outgoing_args_rtx;
3141 }
3142 else
3143 {
3144 if (inhibit_defer_pop == 0)
3145 {
3146 /* Try to reuse some or all of the pending_stack_adjust
3147 to get this space. */
3148 needed
3149 = (combine_pending_stack_adjustment_and_call
3150 (unadjusted_args_size,
3151 &adjusted_args_size,
3152 preferred_unit_stack_boundary));
3153
3154 /* combine_pending_stack_adjustment_and_call computes
3155 an adjustment before the arguments are allocated.
3156 Account for them and see whether or not the stack
3157 needs to go up or down. */
3158 needed = unadjusted_args_size - needed;
3159
3160 if (needed < 0)
3161 {
3162 /* We're releasing stack space. */
3163 /* ??? We can avoid any adjustment at all if we're
3164 already aligned. FIXME. */
3165 pending_stack_adjust = -needed;
3166 do_pending_stack_adjust ();
3167 needed = 0;
3168 }
3169 else
3170 /* We need to allocate space. We'll do that in
3171 push_block below. */
3172 pending_stack_adjust = 0;
3173 }
3174
3175 /* Special case this because overhead of `push_block' in
3176 this case is non-trivial. */
3177 if (needed == 0)
3178 argblock = virtual_outgoing_args_rtx;
3179 else
3180 {
3181 argblock = push_block (GEN_INT (needed), 0, 0);
3182 if (ARGS_GROW_DOWNWARD)
3183 argblock = plus_constant (Pmode, argblock, needed);
3184 }
3185
3186 /* We only really need to call `copy_to_reg' in the case
3187 where push insns are going to be used to pass ARGBLOCK
3188 to a function call in ARGS. In that case, the stack
3189 pointer changes value from the allocation point to the
3190 call point, and hence the value of
3191 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
3192 as well always do it. */
3193 argblock = copy_to_reg (argblock);
3194 }
3195 }
3196 }
3197
3198 if (ACCUMULATE_OUTGOING_ARGS)
3199 {
3200 /* The save/restore code in store_one_arg handles all
3201 cases except one: a constructor call (including a C
3202 function returning a BLKmode struct) to initialize
3203 an argument. */
3204 if (stack_arg_under_construction)
3205 {
3206 rtx push_size
3207 = GEN_INT (adjusted_args_size.constant
3208 + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype
3209 : TREE_TYPE (fndecl))) ? 0
3210 : reg_parm_stack_space));
3211 if (old_stack_level == 0)
3212 {
3213 emit_stack_save (SAVE_BLOCK, &old_stack_level);
3214 old_stack_pointer_delta = stack_pointer_delta;
3215 old_pending_adj = pending_stack_adjust;
3216 pending_stack_adjust = 0;
3217 /* stack_arg_under_construction says whether a stack
3218 arg is being constructed at the old stack level.
3219 Pushing the stack gets a clean outgoing argument
3220 block. */
3221 old_stack_arg_under_construction
3222 = stack_arg_under_construction;
3223 stack_arg_under_construction = 0;
3224 /* Make a new map for the new argument list. */
3225 free (stack_usage_map_buf);
3226 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
3227 stack_usage_map = stack_usage_map_buf;
3228 highest_outgoing_arg_in_use = 0;
3229 }
3230 /* We can pass TRUE as the 4th argument because we just
3231 saved the stack pointer and will restore it right after
3232 the call. */
3233 allocate_dynamic_stack_space (push_size, 0,
3234 BIGGEST_ALIGNMENT, true);
3235 }
3236
3237 /* If argument evaluation might modify the stack pointer,
3238 copy the address of the argument list to a register. */
3239 for (i = 0; i < num_actuals; i++)
3240 if (args[i].pass_on_stack)
3241 {
3242 argblock = copy_addr_to_reg (argblock);
3243 break;
3244 }
3245 }
3246
3247 compute_argument_addresses (args, argblock, num_actuals);
3248
3249 /* Stack is properly aligned, pops can't safely be deferred during
3250 the evaluation of the arguments. */
3251 NO_DEFER_POP;
3252
3253 /* Precompute all register parameters. It isn't safe to compute
3254 anything once we have started filling any specific hard regs.
3255 TLS symbols sometimes need a call to resolve. Precompute
3256 register parameters before any stack pointer manipulation
3257 to avoid unaligned stack in the called function. */
3258 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
3259
3260 OK_DEFER_POP;
3261
3262 /* Perform stack alignment before the first push (the last arg). */
3263 if (argblock == 0
3264 && adjusted_args_size.constant > reg_parm_stack_space
3265 && adjusted_args_size.constant != unadjusted_args_size)
3266 {
3267 /* When the stack adjustment is pending, we get better code
3268 by combining the adjustments. */
3269 if (pending_stack_adjust
3270 && ! inhibit_defer_pop)
3271 {
3272 pending_stack_adjust
3273 = (combine_pending_stack_adjustment_and_call
3274 (unadjusted_args_size,
3275 &adjusted_args_size,
3276 preferred_unit_stack_boundary));
3277 do_pending_stack_adjust ();
3278 }
3279 else if (argblock == 0)
3280 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
3281 - unadjusted_args_size));
3282 }
3283 /* Now that the stack is properly aligned, pops can't safely
3284 be deferred during the evaluation of the arguments. */
3285 NO_DEFER_POP;
3286
3287 /* Record the maximum pushed stack space size. We need to delay
3288 doing it this far to take into account the optimization done
3289 by combine_pending_stack_adjustment_and_call. */
3290 if (flag_stack_usage_info
3291 && !ACCUMULATE_OUTGOING_ARGS
3292 && pass
3293 && adjusted_args_size.var == 0)
3294 {
3295 int pushed = adjusted_args_size.constant + pending_stack_adjust;
3296 if (pushed > current_function_pushed_stack_size)
3297 current_function_pushed_stack_size = pushed;
3298 }
3299
3300 funexp = rtx_for_function_call (fndecl, addr);
3301
3302 if (CALL_EXPR_STATIC_CHAIN (exp))
3303 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
3304 else
3305 static_chain_value = 0;
3306
3307 #ifdef REG_PARM_STACK_SPACE
3308 /* Save the fixed argument area if it's part of the caller's frame and
3309 is clobbered by argument setup for this call. */
3310 if (ACCUMULATE_OUTGOING_ARGS && pass)
3311 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3312 &low_to_save, &high_to_save);
3313 #endif
3314
3315 /* Now store (and compute if necessary) all non-register parms.
3316 These come before register parms, since they can require block-moves,
3317 which could clobber the registers used for register parms.
3318 Parms which have partial registers are not stored here,
3319 but we do preallocate space here if they want that. */
3320
3321 for (i = 0; i < num_actuals; i++)
3322 {
3323 /* Delay bounds until all other args are stored. */
3324 if (POINTER_BOUNDS_P (args[i].tree_value))
3325 continue;
3326 else if (args[i].reg == 0 || args[i].pass_on_stack)
3327 {
3328 rtx_insn *before_arg = get_last_insn ();
3329
3330 /* We don't allow passing huge (> 2^30 B) arguments
3331 by value. It would cause an overflow later on. */
3332 if (adjusted_args_size.constant
3333 >= (1 << (HOST_BITS_PER_INT - 2)))
3334 {
3335 sorry ("passing too large argument on stack");
3336 continue;
3337 }
3338
3339 if (store_one_arg (&args[i], argblock, flags,
3340 adjusted_args_size.var != 0,
3341 reg_parm_stack_space)
3342 || (pass == 0
3343 && check_sibcall_argument_overlap (before_arg,
3344 &args[i], 1)))
3345 sibcall_failure = 1;
3346 }
3347
3348 if (args[i].stack)
3349 call_fusage
3350 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
3351 gen_rtx_USE (VOIDmode, args[i].stack),
3352 call_fusage);
3353 }
3354
3355 /* If we have a parm that is passed in registers but not in memory
3356 and whose alignment does not permit a direct copy into registers,
3357 make a group of pseudos that correspond to each register that we
3358 will later fill. */
3359 if (STRICT_ALIGNMENT)
3360 store_unaligned_arguments_into_pseudos (args, num_actuals);
3361
3362 /* Now store any partially-in-registers parm.
3363 This is the last place a block-move can happen. */
3364 if (reg_parm_seen)
3365 for (i = 0; i < num_actuals; i++)
3366 if (args[i].partial != 0 && ! args[i].pass_on_stack)
3367 {
3368 rtx_insn *before_arg = get_last_insn ();
3369
3370 /* On targets with weird calling conventions (e.g. PA) it's
3371 hard to ensure that all cases of argument overlap between
3372 stack and registers work. Play it safe and bail out. */
3373 if (ARGS_GROW_DOWNWARD && !STACK_GROWS_DOWNWARD)
3374 {
3375 sibcall_failure = 1;
3376 break;
3377 }
3378
3379 if (store_one_arg (&args[i], argblock, flags,
3380 adjusted_args_size.var != 0,
3381 reg_parm_stack_space)
3382 || (pass == 0
3383 && check_sibcall_argument_overlap (before_arg,
3384 &args[i], 1)))
3385 sibcall_failure = 1;
3386 }
3387
3388 bool any_regs = false;
3389 for (i = 0; i < num_actuals; i++)
3390 if (args[i].reg != NULL_RTX)
3391 {
3392 any_regs = true;
3393 targetm.calls.call_args (args[i].reg, funtype);
3394 }
3395 if (!any_regs)
3396 targetm.calls.call_args (pc_rtx, funtype);
3397
3398 /* Figure out the register where the value, if any, will come back. */
3399 valreg = 0;
3400 valbnd = 0;
3401 if (TYPE_MODE (rettype) != VOIDmode
3402 && ! structure_value_addr)
3403 {
3404 if (pcc_struct_value)
3405 {
3406 valreg = hard_function_value (build_pointer_type (rettype),
3407 fndecl, NULL, (pass == 0));
3408 if (CALL_WITH_BOUNDS_P (exp))
3409 valbnd = targetm.calls.
3410 chkp_function_value_bounds (build_pointer_type (rettype),
3411 fndecl, (pass == 0));
3412 }
3413 else
3414 {
3415 valreg = hard_function_value (rettype, fndecl, fntype,
3416 (pass == 0));
3417 if (CALL_WITH_BOUNDS_P (exp))
3418 valbnd = targetm.calls.chkp_function_value_bounds (rettype,
3419 fndecl,
3420 (pass == 0));
3421 }
3422
3423 /* If VALREG is a PARALLEL whose first member has a zero
3424 offset, use that. This is for targets such as m68k that
3425 return the same value in multiple places. */
3426 if (GET_CODE (valreg) == PARALLEL)
3427 {
3428 rtx elem = XVECEXP (valreg, 0, 0);
3429 rtx where = XEXP (elem, 0);
3430 rtx offset = XEXP (elem, 1);
3431 if (offset == const0_rtx
3432 && GET_MODE (where) == GET_MODE (valreg))
3433 valreg = where;
3434 }
3435 }
3436
3437 /* Store all bounds not passed in registers. */
3438 for (i = 0; i < num_actuals; i++)
3439 {
3440 if (POINTER_BOUNDS_P (args[i].tree_value)
3441 && !args[i].reg)
3442 store_bounds (&args[i],
3443 args[i].pointer_arg == -1
3444 ? NULL
3445 : &args[args[i].pointer_arg]);
3446 }
3447
3448 /* If register arguments require space on the stack and stack space
3449 was not preallocated, allocate stack space here for arguments
3450 passed in registers. */
3451 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
3452 && !ACCUMULATE_OUTGOING_ARGS
3453 && must_preallocate == 0 && reg_parm_stack_space > 0)
3454 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
3455
3456 /* Pass the function the address in which to return a
3457 structure value. */
3458 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
3459 {
3460 structure_value_addr
3461 = convert_memory_address (Pmode, structure_value_addr);
3462 emit_move_insn (struct_value,
3463 force_reg (Pmode,
3464 force_operand (structure_value_addr,
3465 NULL_RTX)));
3466
3467 if (REG_P (struct_value))
3468 use_reg (&call_fusage, struct_value);
3469 }
3470
3471 after_args = get_last_insn ();
3472 funexp = prepare_call_address (fndecl ? fndecl : fntype, funexp,
3473 static_chain_value, &call_fusage,
3474 reg_parm_seen, pass == 0);
3475
3476 load_register_parameters (args, num_actuals, &call_fusage, flags,
3477 pass == 0, &sibcall_failure);
3478
3479 /* Save a pointer to the last insn before the call, so that we can
3480 later safely search backwards to find the CALL_INSN. */
3481 before_call = get_last_insn ();
3482
3483 /* Set up next argument register. For sibling calls on machines
3484 with register windows this should be the incoming register. */
3485 if (pass == 0)
3486 next_arg_reg = targetm.calls.function_incoming_arg (args_so_far,
3487 VOIDmode,
3488 void_type_node,
3489 true);
3490 else
3491 next_arg_reg = targetm.calls.function_arg (args_so_far,
3492 VOIDmode, void_type_node,
3493 true);
3494
3495 if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
3496 {
3497 int arg_nr = return_flags & ERF_RETURN_ARG_MASK;
3498 arg_nr = num_actuals - arg_nr - 1;
3499 if (arg_nr >= 0
3500 && arg_nr < num_actuals
3501 && args[arg_nr].reg
3502 && valreg
3503 && REG_P (valreg)
3504 && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg))
3505 call_fusage
3506 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)),
3507 gen_rtx_SET (valreg, args[arg_nr].reg),
3508 call_fusage);
3509 }
3510 /* All arguments and registers used for the call must be set up by
3511 now! */
3512
3513 /* Stack must be properly aligned now. */
3514 gcc_assert (!pass
3515 || !(stack_pointer_delta % preferred_unit_stack_boundary));
3516
3517 /* Generate the actual call instruction. */
3518 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
3519 adjusted_args_size.constant, struct_value_size,
3520 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
3521 flags, args_so_far);
3522
3523 if (flag_ipa_ra)
3524 {
3525 rtx_call_insn *last;
3526 rtx datum = NULL_RTX;
3527 if (fndecl != NULL_TREE)
3528 {
3529 datum = XEXP (DECL_RTL (fndecl), 0);
3530 gcc_assert (datum != NULL_RTX
3531 && GET_CODE (datum) == SYMBOL_REF);
3532 }
3533 last = last_call_insn ();
3534 add_reg_note (last, REG_CALL_DECL, datum);
3535 }
3536
3537 /* If the call setup or the call itself overlaps with anything
3538 of the argument setup we probably clobbered our call address.
3539 In that case we can't do sibcalls. */
3540 if (pass == 0
3541 && check_sibcall_argument_overlap (after_args, 0, 0))
3542 sibcall_failure = 1;
3543
3544 /* If a non-BLKmode value is returned at the most significant end
3545 of a register, shift the register right by the appropriate amount
3546 and update VALREG accordingly. BLKmode values are handled by the
3547 group load/store machinery below. */
3548 if (!structure_value_addr
3549 && !pcc_struct_value
3550 && TYPE_MODE (rettype) != VOIDmode
3551 && TYPE_MODE (rettype) != BLKmode
3552 && REG_P (valreg)
3553 && targetm.calls.return_in_msb (rettype))
3554 {
3555 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
3556 sibcall_failure = 1;
3557 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
3558 }
3559
3560 if (pass && (flags & ECF_MALLOC))
3561 {
3562 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3563 rtx_insn *last, *insns;
3564
3565 /* The return value from a malloc-like function is a pointer. */
3566 if (TREE_CODE (rettype) == POINTER_TYPE)
3567 mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT);
3568
3569 emit_move_insn (temp, valreg);
3570
3571 /* The return value from a malloc-like function can not alias
3572 anything else. */
3573 last = get_last_insn ();
3574 add_reg_note (last, REG_NOALIAS, temp);
3575
3576 /* Write out the sequence. */
3577 insns = get_insns ();
3578 end_sequence ();
3579 emit_insn (insns);
3580 valreg = temp;
3581 }
3582
3583 /* For calls to `setjmp', etc., inform
3584 function.c:setjmp_warnings that it should complain if
3585 nonvolatile values are live. For functions that cannot
3586 return, inform flow that control does not fall through. */
3587
3588 if ((flags & ECF_NORETURN) || pass == 0)
3589 {
3590 /* The barrier must be emitted
3591 immediately after the CALL_INSN. Some ports emit more
3592 than just a CALL_INSN above, so we must search for it here. */
3593
3594 rtx_insn *last = get_last_insn ();
3595 while (!CALL_P (last))
3596 {
3597 last = PREV_INSN (last);
3598 /* There was no CALL_INSN? */
3599 gcc_assert (last != before_call);
3600 }
3601
3602 emit_barrier_after (last);
3603
3604 /* Stack adjustments after a noreturn call are dead code.
3605 However when NO_DEFER_POP is in effect, we must preserve
3606 stack_pointer_delta. */
3607 if (inhibit_defer_pop == 0)
3608 {
3609 stack_pointer_delta = old_stack_allocated;
3610 pending_stack_adjust = 0;
3611 }
3612 }
3613
3614 /* If value type not void, return an rtx for the value. */
3615
3616 if (TYPE_MODE (rettype) == VOIDmode
3617 || ignore)
3618 target = const0_rtx;
3619 else if (structure_value_addr)
3620 {
3621 if (target == 0 || !MEM_P (target))
3622 {
3623 target
3624 = gen_rtx_MEM (TYPE_MODE (rettype),
3625 memory_address (TYPE_MODE (rettype),
3626 structure_value_addr));
3627 set_mem_attributes (target, rettype, 1);
3628 }
3629 }
3630 else if (pcc_struct_value)
3631 {
3632 /* This is the special C++ case where we need to
3633 know what the true target was. We take care to
3634 never use this value more than once in one expression. */
3635 target = gen_rtx_MEM (TYPE_MODE (rettype),
3636 copy_to_reg (valreg));
3637 set_mem_attributes (target, rettype, 1);
3638 }
3639 /* Handle calls that return values in multiple non-contiguous locations.
3640 The Irix 6 ABI has examples of this. */
3641 else if (GET_CODE (valreg) == PARALLEL)
3642 {
3643 if (target == 0)
3644 target = emit_group_move_into_temps (valreg);
3645 else if (rtx_equal_p (target, valreg))
3646 ;
3647 else if (GET_CODE (target) == PARALLEL)
3648 /* Handle the result of a emit_group_move_into_temps
3649 call in the previous pass. */
3650 emit_group_move (target, valreg);
3651 else
3652 emit_group_store (target, valreg, rettype,
3653 int_size_in_bytes (rettype));
3654 }
3655 else if (target
3656 && GET_MODE (target) == TYPE_MODE (rettype)
3657 && GET_MODE (target) == GET_MODE (valreg))
3658 {
3659 bool may_overlap = false;
3660
3661 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
3662 reg to a plain register. */
3663 if (!REG_P (target) || HARD_REGISTER_P (target))
3664 valreg = avoid_likely_spilled_reg (valreg);
3665
3666 /* If TARGET is a MEM in the argument area, and we have
3667 saved part of the argument area, then we can't store
3668 directly into TARGET as it may get overwritten when we
3669 restore the argument save area below. Don't work too
3670 hard though and simply force TARGET to a register if it
3671 is a MEM; the optimizer is quite likely to sort it out. */
3672 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
3673 for (i = 0; i < num_actuals; i++)
3674 if (args[i].save_area)
3675 {
3676 may_overlap = true;
3677 break;
3678 }
3679
3680 if (may_overlap)
3681 target = copy_to_reg (valreg);
3682 else
3683 {
3684 /* TARGET and VALREG cannot be equal at this point
3685 because the latter would not have
3686 REG_FUNCTION_VALUE_P true, while the former would if
3687 it were referring to the same register.
3688
3689 If they refer to the same register, this move will be
3690 a no-op, except when function inlining is being
3691 done. */
3692 emit_move_insn (target, valreg);
3693
3694 /* If we are setting a MEM, this code must be executed.
3695 Since it is emitted after the call insn, sibcall
3696 optimization cannot be performed in that case. */
3697 if (MEM_P (target))
3698 sibcall_failure = 1;
3699 }
3700 }
3701 else
3702 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
3703
3704 /* If we promoted this return value, make the proper SUBREG.
3705 TARGET might be const0_rtx here, so be careful. */
3706 if (REG_P (target)
3707 && TYPE_MODE (rettype) != BLKmode
3708 && GET_MODE (target) != TYPE_MODE (rettype))
3709 {
3710 tree type = rettype;
3711 int unsignedp = TYPE_UNSIGNED (type);
3712 int offset = 0;
3713 machine_mode pmode;
3714
3715 /* Ensure we promote as expected, and get the new unsignedness. */
3716 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
3717 funtype, 1);
3718 gcc_assert (GET_MODE (target) == pmode);
3719
3720 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3721 && (GET_MODE_SIZE (GET_MODE (target))
3722 > GET_MODE_SIZE (TYPE_MODE (type))))
3723 {
3724 offset = GET_MODE_SIZE (GET_MODE (target))
3725 - GET_MODE_SIZE (TYPE_MODE (type));
3726 if (! BYTES_BIG_ENDIAN)
3727 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3728 else if (! WORDS_BIG_ENDIAN)
3729 offset %= UNITS_PER_WORD;
3730 }
3731
3732 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3733 SUBREG_PROMOTED_VAR_P (target) = 1;
3734 SUBREG_PROMOTED_SET (target, unsignedp);
3735 }
3736
3737 /* If size of args is variable or this was a constructor call for a stack
3738 argument, restore saved stack-pointer value. */
3739
3740 if (old_stack_level)
3741 {
3742 rtx_insn *prev = get_last_insn ();
3743
3744 emit_stack_restore (SAVE_BLOCK, old_stack_level);
3745 stack_pointer_delta = old_stack_pointer_delta;
3746
3747 fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
3748
3749 pending_stack_adjust = old_pending_adj;
3750 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
3751 stack_arg_under_construction = old_stack_arg_under_construction;
3752 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3753 stack_usage_map = initial_stack_usage_map;
3754 sibcall_failure = 1;
3755 }
3756 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3757 {
3758 #ifdef REG_PARM_STACK_SPACE
3759 if (save_area)
3760 restore_fixed_argument_area (save_area, argblock,
3761 high_to_save, low_to_save);
3762 #endif
3763
3764 /* If we saved any argument areas, restore them. */
3765 for (i = 0; i < num_actuals; i++)
3766 if (args[i].save_area)
3767 {
3768 machine_mode save_mode = GET_MODE (args[i].save_area);
3769 rtx stack_area
3770 = gen_rtx_MEM (save_mode,
3771 memory_address (save_mode,
3772 XEXP (args[i].stack_slot, 0)));
3773
3774 if (save_mode != BLKmode)
3775 emit_move_insn (stack_area, args[i].save_area);
3776 else
3777 emit_block_move (stack_area, args[i].save_area,
3778 GEN_INT (args[i].locate.size.constant),
3779 BLOCK_OP_CALL_PARM);
3780 }
3781
3782 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3783 stack_usage_map = initial_stack_usage_map;
3784 }
3785
3786 /* If this was alloca, record the new stack level. */
3787 if (flags & ECF_MAY_BE_ALLOCA)
3788 record_new_stack_level ();
3789
3790 /* Free up storage we no longer need. */
3791 for (i = 0; i < num_actuals; ++i)
3792 free (args[i].aligned_regs);
3793
3794 targetm.calls.end_call_args ();
3795
3796 insns = get_insns ();
3797 end_sequence ();
3798
3799 if (pass == 0)
3800 {
3801 tail_call_insns = insns;
3802
3803 /* Restore the pending stack adjustment now that we have
3804 finished generating the sibling call sequence. */
3805
3806 restore_pending_stack_adjust (&save);
3807
3808 /* Prepare arg structure for next iteration. */
3809 for (i = 0; i < num_actuals; i++)
3810 {
3811 args[i].value = 0;
3812 args[i].aligned_regs = 0;
3813 args[i].stack = 0;
3814 }
3815
3816 sbitmap_free (stored_args_map);
3817 internal_arg_pointer_exp_state.scan_start = NULL;
3818 internal_arg_pointer_exp_state.cache.release ();
3819 }
3820 else
3821 {
3822 normal_call_insns = insns;
3823
3824 /* Verify that we've deallocated all the stack we used. */
3825 gcc_assert ((flags & ECF_NORETURN)
3826 || (old_stack_allocated
3827 == stack_pointer_delta - pending_stack_adjust));
3828 }
3829
3830 /* If something prevents making this a sibling call,
3831 zero out the sequence. */
3832 if (sibcall_failure)
3833 tail_call_insns = NULL;
3834 else
3835 break;
3836 }
3837
3838 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
3839 arguments too, as argument area is now clobbered by the call. */
3840 if (tail_call_insns)
3841 {
3842 emit_insn (tail_call_insns);
3843 crtl->tail_call_emit = true;
3844 }
3845 else
3846 {
3847 emit_insn (normal_call_insns);
3848 if (try_tail_call)
3849 /* Ideally we'd emit a message for all of the ways that it could
3850 have failed. */
3851 maybe_complain_about_tail_call (exp, "tail call production failed");
3852 }
3853
3854 currently_expanding_call--;
3855
3856 free (stack_usage_map_buf);
3857
3858 /* Join result with returned bounds so caller may use them if needed. */
3859 target = chkp_join_splitted_slot (target, valbnd);
3860
3861 return target;
3862 }
3863
3864 /* A sibling call sequence invalidates any REG_EQUIV notes made for
3865 this function's incoming arguments.
3866
3867 At the start of RTL generation we know the only REG_EQUIV notes
3868 in the rtl chain are those for incoming arguments, so we can look
3869 for REG_EQUIV notes between the start of the function and the
3870 NOTE_INSN_FUNCTION_BEG.
3871
3872 This is (slight) overkill. We could keep track of the highest
3873 argument we clobber and be more selective in removing notes, but it
3874 does not seem to be worth the effort. */
3875
3876 void
3877 fixup_tail_calls (void)
3878 {
3879 rtx_insn *insn;
3880
3881 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3882 {
3883 rtx note;
3884
3885 /* There are never REG_EQUIV notes for the incoming arguments
3886 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
3887 if (NOTE_P (insn)
3888 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
3889 break;
3890
3891 note = find_reg_note (insn, REG_EQUIV, 0);
3892 if (note)
3893 remove_note (insn, note);
3894 note = find_reg_note (insn, REG_EQUIV, 0);
3895 gcc_assert (!note);
3896 }
3897 }
3898
3899 /* Traverse a list of TYPES and expand all complex types into their
3900 components. */
3901 static tree
3902 split_complex_types (tree types)
3903 {
3904 tree p;
3905
3906 /* Before allocating memory, check for the common case of no complex. */
3907 for (p = types; p; p = TREE_CHAIN (p))
3908 {
3909 tree type = TREE_VALUE (p);
3910 if (TREE_CODE (type) == COMPLEX_TYPE
3911 && targetm.calls.split_complex_arg (type))
3912 goto found;
3913 }
3914 return types;
3915
3916 found:
3917 types = copy_list (types);
3918
3919 for (p = types; p; p = TREE_CHAIN (p))
3920 {
3921 tree complex_type = TREE_VALUE (p);
3922
3923 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3924 && targetm.calls.split_complex_arg (complex_type))
3925 {
3926 tree next, imag;
3927
3928 /* Rewrite complex type with component type. */
3929 TREE_VALUE (p) = TREE_TYPE (complex_type);
3930 next = TREE_CHAIN (p);
3931
3932 /* Add another component type for the imaginary part. */
3933 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3934 TREE_CHAIN (p) = imag;
3935 TREE_CHAIN (imag) = next;
3936
3937 /* Skip the newly created node. */
3938 p = TREE_CHAIN (p);
3939 }
3940 }
3941
3942 return types;
3943 }
3944 \f
3945 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3946 The RETVAL parameter specifies whether return value needs to be saved, other
3947 parameters are documented in the emit_library_call function below. */
3948
3949 static rtx
3950 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3951 enum libcall_type fn_type,
3952 machine_mode outmode, int nargs, va_list p)
3953 {
3954 /* Total size in bytes of all the stack-parms scanned so far. */
3955 struct args_size args_size;
3956 /* Size of arguments before any adjustments (such as rounding). */
3957 struct args_size original_args_size;
3958 int argnum;
3959 rtx fun;
3960 /* Todo, choose the correct decl type of orgfun. Sadly this information
3961 isn't present here, so we default to native calling abi here. */
3962 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
3963 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
3964 int count;
3965 rtx argblock = 0;
3966 CUMULATIVE_ARGS args_so_far_v;
3967 cumulative_args_t args_so_far;
3968 struct arg
3969 {
3970 rtx value;
3971 machine_mode mode;
3972 rtx reg;
3973 int partial;
3974 struct locate_and_pad_arg_data locate;
3975 rtx save_area;
3976 };
3977 struct arg *argvec;
3978 int old_inhibit_defer_pop = inhibit_defer_pop;
3979 rtx call_fusage = 0;
3980 rtx mem_value = 0;
3981 rtx valreg;
3982 int pcc_struct_value = 0;
3983 int struct_value_size = 0;
3984 int flags;
3985 int reg_parm_stack_space = 0;
3986 int needed;
3987 rtx_insn *before_call;
3988 bool have_push_fusage;
3989 tree tfom; /* type_for_mode (outmode, 0) */
3990
3991 #ifdef REG_PARM_STACK_SPACE
3992 /* Define the boundary of the register parm stack space that needs to be
3993 save, if any. */
3994 int low_to_save = 0, high_to_save = 0;
3995 rtx save_area = 0; /* Place that it is saved. */
3996 #endif
3997
3998 /* Size of the stack reserved for parameter registers. */
3999 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
4000 char *initial_stack_usage_map = stack_usage_map;
4001 char *stack_usage_map_buf = NULL;
4002
4003 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
4004
4005 #ifdef REG_PARM_STACK_SPACE
4006 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
4007 #endif
4008
4009 /* By default, library functions cannot throw. */
4010 flags = ECF_NOTHROW;
4011
4012 switch (fn_type)
4013 {
4014 case LCT_NORMAL:
4015 break;
4016 case LCT_CONST:
4017 flags |= ECF_CONST;
4018 break;
4019 case LCT_PURE:
4020 flags |= ECF_PURE;
4021 break;
4022 case LCT_NORETURN:
4023 flags |= ECF_NORETURN;
4024 break;
4025 case LCT_THROW:
4026 flags &= ~ECF_NOTHROW;
4027 break;
4028 case LCT_RETURNS_TWICE:
4029 flags = ECF_RETURNS_TWICE;
4030 break;
4031 }
4032 fun = orgfun;
4033
4034 /* Ensure current function's preferred stack boundary is at least
4035 what we need. */
4036 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
4037 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4038
4039 /* If this kind of value comes back in memory,
4040 decide where in memory it should come back. */
4041 if (outmode != VOIDmode)
4042 {
4043 tfom = lang_hooks.types.type_for_mode (outmode, 0);
4044 if (aggregate_value_p (tfom, 0))
4045 {
4046 #ifdef PCC_STATIC_STRUCT_RETURN
4047 rtx pointer_reg
4048 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
4049 mem_value = gen_rtx_MEM (outmode, pointer_reg);
4050 pcc_struct_value = 1;
4051 if (value == 0)
4052 value = gen_reg_rtx (outmode);
4053 #else /* not PCC_STATIC_STRUCT_RETURN */
4054 struct_value_size = GET_MODE_SIZE (outmode);
4055 if (value != 0 && MEM_P (value))
4056 mem_value = value;
4057 else
4058 mem_value = assign_temp (tfom, 1, 1);
4059 #endif
4060 /* This call returns a big structure. */
4061 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
4062 }
4063 }
4064 else
4065 tfom = void_type_node;
4066
4067 /* ??? Unfinished: must pass the memory address as an argument. */
4068
4069 /* Copy all the libcall-arguments out of the varargs data
4070 and into a vector ARGVEC.
4071
4072 Compute how to pass each argument. We only support a very small subset
4073 of the full argument passing conventions to limit complexity here since
4074 library functions shouldn't have many args. */
4075
4076 argvec = XALLOCAVEC (struct arg, nargs + 1);
4077 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
4078
4079 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
4080 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
4081 #else
4082 INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
4083 #endif
4084 args_so_far = pack_cumulative_args (&args_so_far_v);
4085
4086 args_size.constant = 0;
4087 args_size.var = 0;
4088
4089 count = 0;
4090
4091 push_temp_slots ();
4092
4093 /* If there's a structure value address to be passed,
4094 either pass it in the special place, or pass it as an extra argument. */
4095 if (mem_value && struct_value == 0 && ! pcc_struct_value)
4096 {
4097 rtx addr = XEXP (mem_value, 0);
4098
4099 nargs++;
4100
4101 /* Make sure it is a reasonable operand for a move or push insn. */
4102 if (!REG_P (addr) && !MEM_P (addr)
4103 && !(CONSTANT_P (addr)
4104 && targetm.legitimate_constant_p (Pmode, addr)))
4105 addr = force_operand (addr, NULL_RTX);
4106
4107 argvec[count].value = addr;
4108 argvec[count].mode = Pmode;
4109 argvec[count].partial = 0;
4110
4111 argvec[count].reg = targetm.calls.function_arg (args_so_far,
4112 Pmode, NULL_TREE, true);
4113 gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, Pmode,
4114 NULL_TREE, 1) == 0);
4115
4116 locate_and_pad_parm (Pmode, NULL_TREE,
4117 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4118 1,
4119 #else
4120 argvec[count].reg != 0,
4121 #endif
4122 reg_parm_stack_space, 0,
4123 NULL_TREE, &args_size, &argvec[count].locate);
4124
4125 if (argvec[count].reg == 0 || argvec[count].partial != 0
4126 || reg_parm_stack_space > 0)
4127 args_size.constant += argvec[count].locate.size.constant;
4128
4129 targetm.calls.function_arg_advance (args_so_far, Pmode, (tree) 0, true);
4130
4131 count++;
4132 }
4133
4134 for (; count < nargs; count++)
4135 {
4136 rtx val = va_arg (p, rtx);
4137 machine_mode mode = (machine_mode) va_arg (p, int);
4138 int unsigned_p = 0;
4139
4140 /* We cannot convert the arg value to the mode the library wants here;
4141 must do it earlier where we know the signedness of the arg. */
4142 gcc_assert (mode != BLKmode
4143 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
4144
4145 /* Make sure it is a reasonable operand for a move or push insn. */
4146 if (!REG_P (val) && !MEM_P (val)
4147 && !(CONSTANT_P (val) && targetm.legitimate_constant_p (mode, val)))
4148 val = force_operand (val, NULL_RTX);
4149
4150 if (pass_by_reference (&args_so_far_v, mode, NULL_TREE, 1))
4151 {
4152 rtx slot;
4153 int must_copy
4154 = !reference_callee_copied (&args_so_far_v, mode, NULL_TREE, 1);
4155
4156 /* If this was a CONST function, it is now PURE since it now
4157 reads memory. */
4158 if (flags & ECF_CONST)
4159 {
4160 flags &= ~ECF_CONST;
4161 flags |= ECF_PURE;
4162 }
4163
4164 if (MEM_P (val) && !must_copy)
4165 {
4166 tree val_expr = MEM_EXPR (val);
4167 if (val_expr)
4168 mark_addressable (val_expr);
4169 slot = val;
4170 }
4171 else
4172 {
4173 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
4174 1, 1);
4175 emit_move_insn (slot, val);
4176 }
4177
4178 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4179 gen_rtx_USE (VOIDmode, slot),
4180 call_fusage);
4181 if (must_copy)
4182 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4183 gen_rtx_CLOBBER (VOIDmode,
4184 slot),
4185 call_fusage);
4186
4187 mode = Pmode;
4188 val = force_operand (XEXP (slot, 0), NULL_RTX);
4189 }
4190
4191 mode = promote_function_mode (NULL_TREE, mode, &unsigned_p, NULL_TREE, 0);
4192 argvec[count].mode = mode;
4193 argvec[count].value = convert_modes (mode, GET_MODE (val), val, unsigned_p);
4194 argvec[count].reg = targetm.calls.function_arg (args_so_far, mode,
4195 NULL_TREE, true);
4196
4197 argvec[count].partial
4198 = targetm.calls.arg_partial_bytes (args_so_far, mode, NULL_TREE, 1);
4199
4200 if (argvec[count].reg == 0
4201 || argvec[count].partial != 0
4202 || reg_parm_stack_space > 0)
4203 {
4204 locate_and_pad_parm (mode, NULL_TREE,
4205 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4206 1,
4207 #else
4208 argvec[count].reg != 0,
4209 #endif
4210 reg_parm_stack_space, argvec[count].partial,
4211 NULL_TREE, &args_size, &argvec[count].locate);
4212 args_size.constant += argvec[count].locate.size.constant;
4213 gcc_assert (!argvec[count].locate.size.var);
4214 }
4215 #ifdef BLOCK_REG_PADDING
4216 else
4217 /* The argument is passed entirely in registers. See at which
4218 end it should be padded. */
4219 argvec[count].locate.where_pad =
4220 BLOCK_REG_PADDING (mode, NULL_TREE,
4221 GET_MODE_SIZE (mode) <= UNITS_PER_WORD);
4222 #endif
4223
4224 targetm.calls.function_arg_advance (args_so_far, mode, (tree) 0, true);
4225 }
4226
4227 /* If this machine requires an external definition for library
4228 functions, write one out. */
4229 assemble_external_libcall (fun);
4230
4231 original_args_size = args_size;
4232 args_size.constant = (((args_size.constant
4233 + stack_pointer_delta
4234 + STACK_BYTES - 1)
4235 / STACK_BYTES
4236 * STACK_BYTES)
4237 - stack_pointer_delta);
4238
4239 args_size.constant = MAX (args_size.constant,
4240 reg_parm_stack_space);
4241
4242 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
4243 args_size.constant -= reg_parm_stack_space;
4244
4245 if (args_size.constant > crtl->outgoing_args_size)
4246 crtl->outgoing_args_size = args_size.constant;
4247
4248 if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
4249 {
4250 int pushed = args_size.constant + pending_stack_adjust;
4251 if (pushed > current_function_pushed_stack_size)
4252 current_function_pushed_stack_size = pushed;
4253 }
4254
4255 if (ACCUMULATE_OUTGOING_ARGS)
4256 {
4257 /* Since the stack pointer will never be pushed, it is possible for
4258 the evaluation of a parm to clobber something we have already
4259 written to the stack. Since most function calls on RISC machines
4260 do not use the stack, this is uncommon, but must work correctly.
4261
4262 Therefore, we save any area of the stack that was already written
4263 and that we are using. Here we set up to do this by making a new
4264 stack usage map from the old one.
4265
4266 Another approach might be to try to reorder the argument
4267 evaluations to avoid this conflicting stack usage. */
4268
4269 needed = args_size.constant;
4270
4271 /* Since we will be writing into the entire argument area, the
4272 map must be allocated for its entire size, not just the part that
4273 is the responsibility of the caller. */
4274 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
4275 needed += reg_parm_stack_space;
4276
4277 if (ARGS_GROW_DOWNWARD)
4278 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
4279 needed + 1);
4280 else
4281 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, needed);
4282
4283 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
4284 stack_usage_map = stack_usage_map_buf;
4285
4286 if (initial_highest_arg_in_use)
4287 memcpy (stack_usage_map, initial_stack_usage_map,
4288 initial_highest_arg_in_use);
4289
4290 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
4291 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
4292 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
4293 needed = 0;
4294
4295 /* We must be careful to use virtual regs before they're instantiated,
4296 and real regs afterwards. Loop optimization, for example, can create
4297 new libcalls after we've instantiated the virtual regs, and if we
4298 use virtuals anyway, they won't match the rtl patterns. */
4299
4300 if (virtuals_instantiated)
4301 argblock = plus_constant (Pmode, stack_pointer_rtx,
4302 STACK_POINTER_OFFSET);
4303 else
4304 argblock = virtual_outgoing_args_rtx;
4305 }
4306 else
4307 {
4308 if (!PUSH_ARGS)
4309 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
4310 }
4311
4312 /* We push args individually in reverse order, perform stack alignment
4313 before the first push (the last arg). */
4314 if (argblock == 0)
4315 anti_adjust_stack (GEN_INT (args_size.constant
4316 - original_args_size.constant));
4317
4318 argnum = nargs - 1;
4319
4320 #ifdef REG_PARM_STACK_SPACE
4321 if (ACCUMULATE_OUTGOING_ARGS)
4322 {
4323 /* The argument list is the property of the called routine and it
4324 may clobber it. If the fixed area has been used for previous
4325 parameters, we must save and restore it. */
4326 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
4327 &low_to_save, &high_to_save);
4328 }
4329 #endif
4330
4331 /* When expanding a normal call, args are stored in push order,
4332 which is the reverse of what we have here. */
4333 bool any_regs = false;
4334 for (int i = nargs; i-- > 0; )
4335 if (argvec[i].reg != NULL_RTX)
4336 {
4337 targetm.calls.call_args (argvec[i].reg, NULL_TREE);
4338 any_regs = true;
4339 }
4340 if (!any_regs)
4341 targetm.calls.call_args (pc_rtx, NULL_TREE);
4342
4343 /* Push the args that need to be pushed. */
4344
4345 have_push_fusage = false;
4346
4347 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4348 are to be pushed. */
4349 for (count = 0; count < nargs; count++, argnum--)
4350 {
4351 machine_mode mode = argvec[argnum].mode;
4352 rtx val = argvec[argnum].value;
4353 rtx reg = argvec[argnum].reg;
4354 int partial = argvec[argnum].partial;
4355 unsigned int parm_align = argvec[argnum].locate.boundary;
4356 int lower_bound = 0, upper_bound = 0, i;
4357
4358 if (! (reg != 0 && partial == 0))
4359 {
4360 rtx use;
4361
4362 if (ACCUMULATE_OUTGOING_ARGS)
4363 {
4364 /* If this is being stored into a pre-allocated, fixed-size,
4365 stack area, save any previous data at that location. */
4366
4367 if (ARGS_GROW_DOWNWARD)
4368 {
4369 /* stack_slot is negative, but we want to index stack_usage_map
4370 with positive values. */
4371 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
4372 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
4373 }
4374 else
4375 {
4376 lower_bound = argvec[argnum].locate.slot_offset.constant;
4377 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
4378 }
4379
4380 i = lower_bound;
4381 /* Don't worry about things in the fixed argument area;
4382 it has already been saved. */
4383 if (i < reg_parm_stack_space)
4384 i = reg_parm_stack_space;
4385 while (i < upper_bound && stack_usage_map[i] == 0)
4386 i++;
4387
4388 if (i < upper_bound)
4389 {
4390 /* We need to make a save area. */
4391 unsigned int size
4392 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
4393 machine_mode save_mode
4394 = mode_for_size (size, MODE_INT, 1);
4395 rtx adr
4396 = plus_constant (Pmode, argblock,
4397 argvec[argnum].locate.offset.constant);
4398 rtx stack_area
4399 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
4400
4401 if (save_mode == BLKmode)
4402 {
4403 argvec[argnum].save_area
4404 = assign_stack_temp (BLKmode,
4405 argvec[argnum].locate.size.constant
4406 );
4407
4408 emit_block_move (validize_mem
4409 (copy_rtx (argvec[argnum].save_area)),
4410 stack_area,
4411 GEN_INT (argvec[argnum].locate.size.constant),
4412 BLOCK_OP_CALL_PARM);
4413 }
4414 else
4415 {
4416 argvec[argnum].save_area = gen_reg_rtx (save_mode);
4417
4418 emit_move_insn (argvec[argnum].save_area, stack_area);
4419 }
4420 }
4421 }
4422
4423 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
4424 partial, reg, 0, argblock,
4425 GEN_INT (argvec[argnum].locate.offset.constant),
4426 reg_parm_stack_space,
4427 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad), false);
4428
4429 /* Now mark the segment we just used. */
4430 if (ACCUMULATE_OUTGOING_ARGS)
4431 for (i = lower_bound; i < upper_bound; i++)
4432 stack_usage_map[i] = 1;
4433
4434 NO_DEFER_POP;
4435
4436 /* Indicate argument access so that alias.c knows that these
4437 values are live. */
4438 if (argblock)
4439 use = plus_constant (Pmode, argblock,
4440 argvec[argnum].locate.offset.constant);
4441 else if (have_push_fusage)
4442 continue;
4443 else
4444 {
4445 /* When arguments are pushed, trying to tell alias.c where
4446 exactly this argument is won't work, because the
4447 auto-increment causes confusion. So we merely indicate
4448 that we access something with a known mode somewhere on
4449 the stack. */
4450 use = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4451 gen_rtx_SCRATCH (Pmode));
4452 have_push_fusage = true;
4453 }
4454 use = gen_rtx_MEM (argvec[argnum].mode, use);
4455 use = gen_rtx_USE (VOIDmode, use);
4456 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
4457 }
4458 }
4459
4460 argnum = nargs - 1;
4461
4462 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
4463
4464 /* Now load any reg parms into their regs. */
4465
4466 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4467 are to be pushed. */
4468 for (count = 0; count < nargs; count++, argnum--)
4469 {
4470 machine_mode mode = argvec[argnum].mode;
4471 rtx val = argvec[argnum].value;
4472 rtx reg = argvec[argnum].reg;
4473 int partial = argvec[argnum].partial;
4474 #ifdef BLOCK_REG_PADDING
4475 int size = 0;
4476 #endif
4477
4478 /* Handle calls that pass values in multiple non-contiguous
4479 locations. The PA64 has examples of this for library calls. */
4480 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4481 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
4482 else if (reg != 0 && partial == 0)
4483 {
4484 emit_move_insn (reg, val);
4485 #ifdef BLOCK_REG_PADDING
4486 size = GET_MODE_SIZE (argvec[argnum].mode);
4487
4488 /* Copied from load_register_parameters. */
4489
4490 /* Handle case where we have a value that needs shifting
4491 up to the msb. eg. a QImode value and we're padding
4492 upward on a BYTES_BIG_ENDIAN machine. */
4493 if (size < UNITS_PER_WORD
4494 && (argvec[argnum].locate.where_pad
4495 == (BYTES_BIG_ENDIAN ? upward : downward)))
4496 {
4497 rtx x;
4498 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
4499
4500 /* Assigning REG here rather than a temp makes CALL_FUSAGE
4501 report the whole reg as used. Strictly speaking, the
4502 call only uses SIZE bytes at the msb end, but it doesn't
4503 seem worth generating rtl to say that. */
4504 reg = gen_rtx_REG (word_mode, REGNO (reg));
4505 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
4506 if (x != reg)
4507 emit_move_insn (reg, x);
4508 }
4509 #endif
4510 }
4511
4512 NO_DEFER_POP;
4513 }
4514
4515 /* Any regs containing parms remain in use through the call. */
4516 for (count = 0; count < nargs; count++)
4517 {
4518 rtx reg = argvec[count].reg;
4519 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4520 use_group_regs (&call_fusage, reg);
4521 else if (reg != 0)
4522 {
4523 int partial = argvec[count].partial;
4524 if (partial)
4525 {
4526 int nregs;
4527 gcc_assert (partial % UNITS_PER_WORD == 0);
4528 nregs = partial / UNITS_PER_WORD;
4529 use_regs (&call_fusage, REGNO (reg), nregs);
4530 }
4531 else
4532 use_reg (&call_fusage, reg);
4533 }
4534 }
4535
4536 /* Pass the function the address in which to return a structure value. */
4537 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
4538 {
4539 emit_move_insn (struct_value,
4540 force_reg (Pmode,
4541 force_operand (XEXP (mem_value, 0),
4542 NULL_RTX)));
4543 if (REG_P (struct_value))
4544 use_reg (&call_fusage, struct_value);
4545 }
4546
4547 /* Don't allow popping to be deferred, since then
4548 cse'ing of library calls could delete a call and leave the pop. */
4549 NO_DEFER_POP;
4550 valreg = (mem_value == 0 && outmode != VOIDmode
4551 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
4552
4553 /* Stack must be properly aligned now. */
4554 gcc_assert (!(stack_pointer_delta
4555 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
4556
4557 before_call = get_last_insn ();
4558
4559 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
4560 will set inhibit_defer_pop to that value. */
4561 /* The return type is needed to decide how many bytes the function pops.
4562 Signedness plays no role in that, so for simplicity, we pretend it's
4563 always signed. We also assume that the list of arguments passed has
4564 no impact, so we pretend it is unknown. */
4565
4566 emit_call_1 (fun, NULL,
4567 get_identifier (XSTR (orgfun, 0)),
4568 build_function_type (tfom, NULL_TREE),
4569 original_args_size.constant, args_size.constant,
4570 struct_value_size,
4571 targetm.calls.function_arg (args_so_far,
4572 VOIDmode, void_type_node, true),
4573 valreg,
4574 old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
4575
4576 if (flag_ipa_ra)
4577 {
4578 rtx datum = orgfun;
4579 gcc_assert (GET_CODE (datum) == SYMBOL_REF);
4580 rtx_call_insn *last = last_call_insn ();
4581 add_reg_note (last, REG_CALL_DECL, datum);
4582 }
4583
4584 /* Right-shift returned value if necessary. */
4585 if (!pcc_struct_value
4586 && TYPE_MODE (tfom) != BLKmode
4587 && targetm.calls.return_in_msb (tfom))
4588 {
4589 shift_return_value (TYPE_MODE (tfom), false, valreg);
4590 valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
4591 }
4592
4593 targetm.calls.end_call_args ();
4594
4595 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
4596 that it should complain if nonvolatile values are live. For
4597 functions that cannot return, inform flow that control does not
4598 fall through. */
4599 if (flags & ECF_NORETURN)
4600 {
4601 /* The barrier note must be emitted
4602 immediately after the CALL_INSN. Some ports emit more than
4603 just a CALL_INSN above, so we must search for it here. */
4604 rtx_insn *last = get_last_insn ();
4605 while (!CALL_P (last))
4606 {
4607 last = PREV_INSN (last);
4608 /* There was no CALL_INSN? */
4609 gcc_assert (last != before_call);
4610 }
4611
4612 emit_barrier_after (last);
4613 }
4614
4615 /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW
4616 and LCT_RETURNS_TWICE, cannot perform non-local gotos. */
4617 if (flags & ECF_NOTHROW)
4618 {
4619 rtx_insn *last = get_last_insn ();
4620 while (!CALL_P (last))
4621 {
4622 last = PREV_INSN (last);
4623 /* There was no CALL_INSN? */
4624 gcc_assert (last != before_call);
4625 }
4626
4627 make_reg_eh_region_note_nothrow_nononlocal (last);
4628 }
4629
4630 /* Now restore inhibit_defer_pop to its actual original value. */
4631 OK_DEFER_POP;
4632
4633 pop_temp_slots ();
4634
4635 /* Copy the value to the right place. */
4636 if (outmode != VOIDmode && retval)
4637 {
4638 if (mem_value)
4639 {
4640 if (value == 0)
4641 value = mem_value;
4642 if (value != mem_value)
4643 emit_move_insn (value, mem_value);
4644 }
4645 else if (GET_CODE (valreg) == PARALLEL)
4646 {
4647 if (value == 0)
4648 value = gen_reg_rtx (outmode);
4649 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
4650 }
4651 else
4652 {
4653 /* Convert to the proper mode if a promotion has been active. */
4654 if (GET_MODE (valreg) != outmode)
4655 {
4656 int unsignedp = TYPE_UNSIGNED (tfom);
4657
4658 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
4659 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
4660 == GET_MODE (valreg));
4661 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
4662 }
4663
4664 if (value != 0)
4665 emit_move_insn (value, valreg);
4666 else
4667 value = valreg;
4668 }
4669 }
4670
4671 if (ACCUMULATE_OUTGOING_ARGS)
4672 {
4673 #ifdef REG_PARM_STACK_SPACE
4674 if (save_area)
4675 restore_fixed_argument_area (save_area, argblock,
4676 high_to_save, low_to_save);
4677 #endif
4678
4679 /* If we saved any argument areas, restore them. */
4680 for (count = 0; count < nargs; count++)
4681 if (argvec[count].save_area)
4682 {
4683 machine_mode save_mode = GET_MODE (argvec[count].save_area);
4684 rtx adr = plus_constant (Pmode, argblock,
4685 argvec[count].locate.offset.constant);
4686 rtx stack_area = gen_rtx_MEM (save_mode,
4687 memory_address (save_mode, adr));
4688
4689 if (save_mode == BLKmode)
4690 emit_block_move (stack_area,
4691 validize_mem
4692 (copy_rtx (argvec[count].save_area)),
4693 GEN_INT (argvec[count].locate.size.constant),
4694 BLOCK_OP_CALL_PARM);
4695 else
4696 emit_move_insn (stack_area, argvec[count].save_area);
4697 }
4698
4699 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4700 stack_usage_map = initial_stack_usage_map;
4701 }
4702
4703 free (stack_usage_map_buf);
4704
4705 return value;
4706
4707 }
4708 \f
4709 /* Output a library call to function FUN (a SYMBOL_REF rtx)
4710 (emitting the queue unless NO_QUEUE is nonzero),
4711 for a value of mode OUTMODE,
4712 with NARGS different arguments, passed as alternating rtx values
4713 and machine_modes to convert them to.
4714
4715 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
4716 `const' calls, LCT_PURE for `pure' calls, or other LCT_ value for
4717 other types of library calls. */
4718
4719 void
4720 emit_library_call (rtx orgfun, enum libcall_type fn_type,
4721 machine_mode outmode, int nargs, ...)
4722 {
4723 va_list p;
4724
4725 va_start (p, nargs);
4726 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4727 va_end (p);
4728 }
4729 \f
4730 /* Like emit_library_call except that an extra argument, VALUE,
4731 comes second and says where to store the result.
4732 (If VALUE is zero, this function chooses a convenient way
4733 to return the value.
4734
4735 This function returns an rtx for where the value is to be found.
4736 If VALUE is nonzero, VALUE is returned. */
4737
4738 rtx
4739 emit_library_call_value (rtx orgfun, rtx value,
4740 enum libcall_type fn_type,
4741 machine_mode outmode, int nargs, ...)
4742 {
4743 rtx result;
4744 va_list p;
4745
4746 va_start (p, nargs);
4747 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4748 nargs, p);
4749 va_end (p);
4750
4751 return result;
4752 }
4753 \f
4754
4755 /* Store pointer bounds argument ARG into Bounds Table entry
4756 associated with PARM. */
4757 static void
4758 store_bounds (struct arg_data *arg, struct arg_data *parm)
4759 {
4760 rtx slot = NULL, ptr = NULL, addr = NULL;
4761
4762 /* We may pass bounds not associated with any pointer. */
4763 if (!parm)
4764 {
4765 gcc_assert (arg->special_slot);
4766 slot = arg->special_slot;
4767 ptr = const0_rtx;
4768 }
4769 /* Find pointer associated with bounds and where it is
4770 passed. */
4771 else
4772 {
4773 if (!parm->reg)
4774 {
4775 gcc_assert (!arg->special_slot);
4776
4777 addr = adjust_address (parm->stack, Pmode, arg->pointer_offset);
4778 }
4779 else if (REG_P (parm->reg))
4780 {
4781 gcc_assert (arg->special_slot);
4782 slot = arg->special_slot;
4783
4784 if (MEM_P (parm->value))
4785 addr = adjust_address (parm->value, Pmode, arg->pointer_offset);
4786 else if (REG_P (parm->value))
4787 ptr = gen_rtx_SUBREG (Pmode, parm->value, arg->pointer_offset);
4788 else
4789 {
4790 gcc_assert (!arg->pointer_offset);
4791 ptr = parm->value;
4792 }
4793 }
4794 else
4795 {
4796 gcc_assert (GET_CODE (parm->reg) == PARALLEL);
4797
4798 gcc_assert (arg->special_slot);
4799 slot = arg->special_slot;
4800
4801 if (parm->parallel_value)
4802 ptr = chkp_get_value_with_offs (parm->parallel_value,
4803 GEN_INT (arg->pointer_offset));
4804 else
4805 gcc_unreachable ();
4806 }
4807 }
4808
4809 /* Expand bounds. */
4810 if (!arg->value)
4811 arg->value = expand_normal (arg->tree_value);
4812
4813 targetm.calls.store_bounds_for_arg (ptr, addr, arg->value, slot);
4814 }
4815
4816 /* Store a single argument for a function call
4817 into the register or memory area where it must be passed.
4818 *ARG describes the argument value and where to pass it.
4819
4820 ARGBLOCK is the address of the stack-block for all the arguments,
4821 or 0 on a machine where arguments are pushed individually.
4822
4823 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4824 so must be careful about how the stack is used.
4825
4826 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4827 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4828 that we need not worry about saving and restoring the stack.
4829
4830 FNDECL is the declaration of the function we are calling.
4831
4832 Return nonzero if this arg should cause sibcall failure,
4833 zero otherwise. */
4834
4835 static int
4836 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4837 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
4838 {
4839 tree pval = arg->tree_value;
4840 rtx reg = 0;
4841 int partial = 0;
4842 int used = 0;
4843 int i, lower_bound = 0, upper_bound = 0;
4844 int sibcall_failure = 0;
4845
4846 if (TREE_CODE (pval) == ERROR_MARK)
4847 return 1;
4848
4849 /* Push a new temporary level for any temporaries we make for
4850 this argument. */
4851 push_temp_slots ();
4852
4853 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4854 {
4855 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4856 save any previous data at that location. */
4857 if (argblock && ! variable_size && arg->stack)
4858 {
4859 if (ARGS_GROW_DOWNWARD)
4860 {
4861 /* stack_slot is negative, but we want to index stack_usage_map
4862 with positive values. */
4863 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4864 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4865 else
4866 upper_bound = 0;
4867
4868 lower_bound = upper_bound - arg->locate.size.constant;
4869 }
4870 else
4871 {
4872 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4873 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4874 else
4875 lower_bound = 0;
4876
4877 upper_bound = lower_bound + arg->locate.size.constant;
4878 }
4879
4880 i = lower_bound;
4881 /* Don't worry about things in the fixed argument area;
4882 it has already been saved. */
4883 if (i < reg_parm_stack_space)
4884 i = reg_parm_stack_space;
4885 while (i < upper_bound && stack_usage_map[i] == 0)
4886 i++;
4887
4888 if (i < upper_bound)
4889 {
4890 /* We need to make a save area. */
4891 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4892 machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4893 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4894 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4895
4896 if (save_mode == BLKmode)
4897 {
4898 arg->save_area
4899 = assign_temp (TREE_TYPE (arg->tree_value), 1, 1);
4900 preserve_temp_slots (arg->save_area);
4901 emit_block_move (validize_mem (copy_rtx (arg->save_area)),
4902 stack_area,
4903 GEN_INT (arg->locate.size.constant),
4904 BLOCK_OP_CALL_PARM);
4905 }
4906 else
4907 {
4908 arg->save_area = gen_reg_rtx (save_mode);
4909 emit_move_insn (arg->save_area, stack_area);
4910 }
4911 }
4912 }
4913 }
4914
4915 /* If this isn't going to be placed on both the stack and in registers,
4916 set up the register and number of words. */
4917 if (! arg->pass_on_stack)
4918 {
4919 if (flags & ECF_SIBCALL)
4920 reg = arg->tail_call_reg;
4921 else
4922 reg = arg->reg;
4923 partial = arg->partial;
4924 }
4925
4926 /* Being passed entirely in a register. We shouldn't be called in
4927 this case. */
4928 gcc_assert (reg == 0 || partial != 0);
4929
4930 /* If this arg needs special alignment, don't load the registers
4931 here. */
4932 if (arg->n_aligned_regs != 0)
4933 reg = 0;
4934
4935 /* If this is being passed partially in a register, we can't evaluate
4936 it directly into its stack slot. Otherwise, we can. */
4937 if (arg->value == 0)
4938 {
4939 /* stack_arg_under_construction is nonzero if a function argument is
4940 being evaluated directly into the outgoing argument list and
4941 expand_call must take special action to preserve the argument list
4942 if it is called recursively.
4943
4944 For scalar function arguments stack_usage_map is sufficient to
4945 determine which stack slots must be saved and restored. Scalar
4946 arguments in general have pass_on_stack == 0.
4947
4948 If this argument is initialized by a function which takes the
4949 address of the argument (a C++ constructor or a C function
4950 returning a BLKmode structure), then stack_usage_map is
4951 insufficient and expand_call must push the stack around the
4952 function call. Such arguments have pass_on_stack == 1.
4953
4954 Note that it is always safe to set stack_arg_under_construction,
4955 but this generates suboptimal code if set when not needed. */
4956
4957 if (arg->pass_on_stack)
4958 stack_arg_under_construction++;
4959
4960 arg->value = expand_expr (pval,
4961 (partial
4962 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4963 ? NULL_RTX : arg->stack,
4964 VOIDmode, EXPAND_STACK_PARM);
4965
4966 /* If we are promoting object (or for any other reason) the mode
4967 doesn't agree, convert the mode. */
4968
4969 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4970 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4971 arg->value, arg->unsignedp);
4972
4973 if (arg->pass_on_stack)
4974 stack_arg_under_construction--;
4975 }
4976
4977 /* Check for overlap with already clobbered argument area. */
4978 if ((flags & ECF_SIBCALL)
4979 && MEM_P (arg->value)
4980 && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0),
4981 arg->locate.size.constant))
4982 sibcall_failure = 1;
4983
4984 /* Don't allow anything left on stack from computation
4985 of argument to alloca. */
4986 if (flags & ECF_MAY_BE_ALLOCA)
4987 do_pending_stack_adjust ();
4988
4989 if (arg->value == arg->stack)
4990 /* If the value is already in the stack slot, we are done. */
4991 ;
4992 else if (arg->mode != BLKmode)
4993 {
4994 int size;
4995 unsigned int parm_align;
4996
4997 /* Argument is a scalar, not entirely passed in registers.
4998 (If part is passed in registers, arg->partial says how much
4999 and emit_push_insn will take care of putting it there.)
5000
5001 Push it, and if its size is less than the
5002 amount of space allocated to it,
5003 also bump stack pointer by the additional space.
5004 Note that in C the default argument promotions
5005 will prevent such mismatches. */
5006
5007 size = GET_MODE_SIZE (arg->mode);
5008 /* Compute how much space the push instruction will push.
5009 On many machines, pushing a byte will advance the stack
5010 pointer by a halfword. */
5011 #ifdef PUSH_ROUNDING
5012 size = PUSH_ROUNDING (size);
5013 #endif
5014 used = size;
5015
5016 /* Compute how much space the argument should get:
5017 round up to a multiple of the alignment for arguments. */
5018 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
5019 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
5020 / (PARM_BOUNDARY / BITS_PER_UNIT))
5021 * (PARM_BOUNDARY / BITS_PER_UNIT));
5022
5023 /* Compute the alignment of the pushed argument. */
5024 parm_align = arg->locate.boundary;
5025 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
5026 {
5027 int pad = used - size;
5028 if (pad)
5029 {
5030 unsigned int pad_align = least_bit_hwi (pad) * BITS_PER_UNIT;
5031 parm_align = MIN (parm_align, pad_align);
5032 }
5033 }
5034
5035 /* This isn't already where we want it on the stack, so put it there.
5036 This can either be done with push or copy insns. */
5037 if (!emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
5038 parm_align, partial, reg, used - size, argblock,
5039 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
5040 ARGS_SIZE_RTX (arg->locate.alignment_pad), true))
5041 sibcall_failure = 1;
5042
5043 /* Unless this is a partially-in-register argument, the argument is now
5044 in the stack. */
5045 if (partial == 0)
5046 arg->value = arg->stack;
5047 }
5048 else
5049 {
5050 /* BLKmode, at least partly to be pushed. */
5051
5052 unsigned int parm_align;
5053 int excess;
5054 rtx size_rtx;
5055
5056 /* Pushing a nonscalar.
5057 If part is passed in registers, PARTIAL says how much
5058 and emit_push_insn will take care of putting it there. */
5059
5060 /* Round its size up to a multiple
5061 of the allocation unit for arguments. */
5062
5063 if (arg->locate.size.var != 0)
5064 {
5065 excess = 0;
5066 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
5067 }
5068 else
5069 {
5070 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
5071 for BLKmode is careful to avoid it. */
5072 excess = (arg->locate.size.constant
5073 - int_size_in_bytes (TREE_TYPE (pval))
5074 + partial);
5075 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
5076 NULL_RTX, TYPE_MODE (sizetype),
5077 EXPAND_NORMAL);
5078 }
5079
5080 parm_align = arg->locate.boundary;
5081
5082 /* When an argument is padded down, the block is aligned to
5083 PARM_BOUNDARY, but the actual argument isn't. */
5084 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
5085 {
5086 if (arg->locate.size.var)
5087 parm_align = BITS_PER_UNIT;
5088 else if (excess)
5089 {
5090 unsigned int excess_align = least_bit_hwi (excess) * BITS_PER_UNIT;
5091 parm_align = MIN (parm_align, excess_align);
5092 }
5093 }
5094
5095 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
5096 {
5097 /* emit_push_insn might not work properly if arg->value and
5098 argblock + arg->locate.offset areas overlap. */
5099 rtx x = arg->value;
5100 int i = 0;
5101
5102 if (XEXP (x, 0) == crtl->args.internal_arg_pointer
5103 || (GET_CODE (XEXP (x, 0)) == PLUS
5104 && XEXP (XEXP (x, 0), 0) ==
5105 crtl->args.internal_arg_pointer
5106 && CONST_INT_P (XEXP (XEXP (x, 0), 1))))
5107 {
5108 if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
5109 i = INTVAL (XEXP (XEXP (x, 0), 1));
5110
5111 /* arg.locate doesn't contain the pretend_args_size offset,
5112 it's part of argblock. Ensure we don't count it in I. */
5113 if (STACK_GROWS_DOWNWARD)
5114 i -= crtl->args.pretend_args_size;
5115 else
5116 i += crtl->args.pretend_args_size;
5117
5118 /* expand_call should ensure this. */
5119 gcc_assert (!arg->locate.offset.var
5120 && arg->locate.size.var == 0
5121 && CONST_INT_P (size_rtx));
5122
5123 if (arg->locate.offset.constant > i)
5124 {
5125 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
5126 sibcall_failure = 1;
5127 }
5128 else if (arg->locate.offset.constant < i)
5129 {
5130 /* Use arg->locate.size.constant instead of size_rtx
5131 because we only care about the part of the argument
5132 on the stack. */
5133 if (i < (arg->locate.offset.constant
5134 + arg->locate.size.constant))
5135 sibcall_failure = 1;
5136 }
5137 else
5138 {
5139 /* Even though they appear to be at the same location,
5140 if part of the outgoing argument is in registers,
5141 they aren't really at the same location. Check for
5142 this by making sure that the incoming size is the
5143 same as the outgoing size. */
5144 if (arg->locate.size.constant != INTVAL (size_rtx))
5145 sibcall_failure = 1;
5146 }
5147 }
5148 }
5149
5150 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
5151 parm_align, partial, reg, excess, argblock,
5152 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
5153 ARGS_SIZE_RTX (arg->locate.alignment_pad), false);
5154
5155 /* Unless this is a partially-in-register argument, the argument is now
5156 in the stack.
5157
5158 ??? Unlike the case above, in which we want the actual
5159 address of the data, so that we can load it directly into a
5160 register, here we want the address of the stack slot, so that
5161 it's properly aligned for word-by-word copying or something
5162 like that. It's not clear that this is always correct. */
5163 if (partial == 0)
5164 arg->value = arg->stack_slot;
5165 }
5166
5167 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
5168 {
5169 tree type = TREE_TYPE (arg->tree_value);
5170 arg->parallel_value
5171 = emit_group_load_into_temps (arg->reg, arg->value, type,
5172 int_size_in_bytes (type));
5173 }
5174
5175 /* Mark all slots this store used. */
5176 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
5177 && argblock && ! variable_size && arg->stack)
5178 for (i = lower_bound; i < upper_bound; i++)
5179 stack_usage_map[i] = 1;
5180
5181 /* Once we have pushed something, pops can't safely
5182 be deferred during the rest of the arguments. */
5183 NO_DEFER_POP;
5184
5185 /* Free any temporary slots made in processing this argument. */
5186 pop_temp_slots ();
5187
5188 return sibcall_failure;
5189 }
5190
5191 /* Nonzero if we do not know how to pass TYPE solely in registers. */
5192
5193 bool
5194 must_pass_in_stack_var_size (machine_mode mode ATTRIBUTE_UNUSED,
5195 const_tree type)
5196 {
5197 if (!type)
5198 return false;
5199
5200 /* If the type has variable size... */
5201 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5202 return true;
5203
5204 /* If the type is marked as addressable (it is required
5205 to be constructed into the stack)... */
5206 if (TREE_ADDRESSABLE (type))
5207 return true;
5208
5209 return false;
5210 }
5211
5212 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
5213 takes trailing padding of a structure into account. */
5214 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
5215
5216 bool
5217 must_pass_in_stack_var_size_or_pad (machine_mode mode, const_tree type)
5218 {
5219 if (!type)
5220 return false;
5221
5222 /* If the type has variable size... */
5223 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5224 return true;
5225
5226 /* If the type is marked as addressable (it is required
5227 to be constructed into the stack)... */
5228 if (TREE_ADDRESSABLE (type))
5229 return true;
5230
5231 /* If the padding and mode of the type is such that a copy into
5232 a register would put it into the wrong part of the register. */
5233 if (mode == BLKmode
5234 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
5235 && (FUNCTION_ARG_PADDING (mode, type)
5236 == (BYTES_BIG_ENDIAN ? upward : downward)))
5237 return true;
5238
5239 return false;
5240 }