add default for NO_FUNCTION_CSE
[gcc.git] / gcc / calls.c
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "rtl.h"
25 #include "hash-set.h"
26 #include "machmode.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "stor-layout.h"
37 #include "varasm.h"
38 #include "stringpool.h"
39 #include "attribs.h"
40 #include "predict.h"
41 #include "hashtab.h"
42 #include "hard-reg-set.h"
43 #include "function.h"
44 #include "basic-block.h"
45 #include "tree-ssa-alias.h"
46 #include "internal-fn.h"
47 #include "gimple-expr.h"
48 #include "is-a.h"
49 #include "gimple.h"
50 #include "flags.h"
51 #include "statistics.h"
52 #include "real.h"
53 #include "fixed-value.h"
54 #include "insn-config.h"
55 #include "expmed.h"
56 #include "dojump.h"
57 #include "explow.h"
58 #include "calls.h"
59 #include "emit-rtl.h"
60 #include "stmt.h"
61 #include "expr.h"
62 #include "insn-codes.h"
63 #include "optabs.h"
64 #include "libfuncs.h"
65 #include "regs.h"
66 #include "diagnostic-core.h"
67 #include "output.h"
68 #include "tm_p.h"
69 #include "timevar.h"
70 #include "sbitmap.h"
71 #include "bitmap.h"
72 #include "langhooks.h"
73 #include "target.h"
74 #include "hash-map.h"
75 #include "plugin-api.h"
76 #include "ipa-ref.h"
77 #include "cgraph.h"
78 #include "except.h"
79 #include "dbgcnt.h"
80 #include "rtl-iter.h"
81 #include "tree-chkp.h"
82 #include "rtl-chkp.h"
83
84 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
85 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
86
87 /* Data structure and subroutines used within expand_call. */
88
89 struct arg_data
90 {
91 /* Tree node for this argument. */
92 tree tree_value;
93 /* Mode for value; TYPE_MODE unless promoted. */
94 machine_mode mode;
95 /* Current RTL value for argument, or 0 if it isn't precomputed. */
96 rtx value;
97 /* Initially-compute RTL value for argument; only for const functions. */
98 rtx initial_value;
99 /* Register to pass this argument in, 0 if passed on stack, or an
100 PARALLEL if the arg is to be copied into multiple non-contiguous
101 registers. */
102 rtx reg;
103 /* Register to pass this argument in when generating tail call sequence.
104 This is not the same register as for normal calls on machines with
105 register windows. */
106 rtx tail_call_reg;
107 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
108 form for emit_group_move. */
109 rtx parallel_value;
110 /* If value is passed in neither reg nor stack, this field holds a number
111 of a special slot to be used. */
112 rtx special_slot;
113 /* For pointer bounds hold an index of parm bounds are bound to. -1 if
114 there is no such pointer. */
115 int pointer_arg;
116 /* If pointer_arg refers a structure, then pointer_offset holds an offset
117 of a pointer in this structure. */
118 int pointer_offset;
119 /* If REG was promoted from the actual mode of the argument expression,
120 indicates whether the promotion is sign- or zero-extended. */
121 int unsignedp;
122 /* Number of bytes to put in registers. 0 means put the whole arg
123 in registers. Also 0 if not passed in registers. */
124 int partial;
125 /* Nonzero if argument must be passed on stack.
126 Note that some arguments may be passed on the stack
127 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
128 pass_on_stack identifies arguments that *cannot* go in registers. */
129 int pass_on_stack;
130 /* Some fields packaged up for locate_and_pad_parm. */
131 struct locate_and_pad_arg_data locate;
132 /* Location on the stack at which parameter should be stored. The store
133 has already been done if STACK == VALUE. */
134 rtx stack;
135 /* Location on the stack of the start of this argument slot. This can
136 differ from STACK if this arg pads downward. This location is known
137 to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
138 rtx stack_slot;
139 /* Place that this stack area has been saved, if needed. */
140 rtx save_area;
141 /* If an argument's alignment does not permit direct copying into registers,
142 copy in smaller-sized pieces into pseudos. These are stored in a
143 block pointed to by this field. The next field says how many
144 word-sized pseudos we made. */
145 rtx *aligned_regs;
146 int n_aligned_regs;
147 };
148
149 /* A vector of one char per byte of stack space. A byte if nonzero if
150 the corresponding stack location has been used.
151 This vector is used to prevent a function call within an argument from
152 clobbering any stack already set up. */
153 static char *stack_usage_map;
154
155 /* Size of STACK_USAGE_MAP. */
156 static int highest_outgoing_arg_in_use;
157
158 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
159 stack location's tail call argument has been already stored into the stack.
160 This bitmap is used to prevent sibling call optimization if function tries
161 to use parent's incoming argument slots when they have been already
162 overwritten with tail call arguments. */
163 static sbitmap stored_args_map;
164
165 /* stack_arg_under_construction is nonzero when an argument may be
166 initialized with a constructor call (including a C function that
167 returns a BLKmode struct) and expand_call must take special action
168 to make sure the object being constructed does not overlap the
169 argument list for the constructor call. */
170 static int stack_arg_under_construction;
171
172 static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
173 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
174 cumulative_args_t);
175 static void precompute_register_parameters (int, struct arg_data *, int *);
176 static void store_bounds (struct arg_data *, struct arg_data *);
177 static int store_one_arg (struct arg_data *, rtx, int, int, int);
178 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
179 static int finalize_must_preallocate (int, int, struct arg_data *,
180 struct args_size *);
181 static void precompute_arguments (int, struct arg_data *);
182 static int compute_argument_block_size (int, struct args_size *, tree, tree, int);
183 static void initialize_argument_information (int, struct arg_data *,
184 struct args_size *, int,
185 tree, tree,
186 tree, tree, cumulative_args_t, int,
187 rtx *, int *, int *, int *,
188 bool *, bool);
189 static void compute_argument_addresses (struct arg_data *, rtx, int);
190 static rtx rtx_for_function_call (tree, tree);
191 static void load_register_parameters (struct arg_data *, int, rtx *, int,
192 int, int *);
193 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
194 machine_mode, int, va_list);
195 static int special_function_p (const_tree, int);
196 static int check_sibcall_argument_overlap_1 (rtx);
197 static int check_sibcall_argument_overlap (rtx_insn *, struct arg_data *, int);
198
199 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
200 unsigned int);
201 static tree split_complex_types (tree);
202
203 #ifdef REG_PARM_STACK_SPACE
204 static rtx save_fixed_argument_area (int, rtx, int *, int *);
205 static void restore_fixed_argument_area (rtx, rtx, int, int);
206 #endif
207 \f
208 /* Force FUNEXP into a form suitable for the address of a CALL,
209 and return that as an rtx. Also load the static chain register
210 if FNDECL is a nested function.
211
212 CALL_FUSAGE points to a variable holding the prospective
213 CALL_INSN_FUNCTION_USAGE information. */
214
215 rtx
216 prepare_call_address (tree fndecl_or_type, rtx funexp, rtx static_chain_value,
217 rtx *call_fusage, int reg_parm_seen, int sibcallp)
218 {
219 /* Make a valid memory address and copy constants through pseudo-regs,
220 but not for a constant address if -fno-function-cse. */
221 if (GET_CODE (funexp) != SYMBOL_REF)
222 /* If we are using registers for parameters, force the
223 function address into a register now. */
224 funexp = ((reg_parm_seen
225 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
226 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
227 : memory_address (FUNCTION_MODE, funexp));
228 else if (! sibcallp)
229 {
230 if (!NO_FUNCTION_CSE && optimize && ! flag_no_function_cse)
231 funexp = force_reg (Pmode, funexp);
232 }
233
234 if (static_chain_value != 0
235 && (TREE_CODE (fndecl_or_type) != FUNCTION_DECL
236 || DECL_STATIC_CHAIN (fndecl_or_type)))
237 {
238 rtx chain;
239
240 chain = targetm.calls.static_chain (fndecl_or_type, false);
241 static_chain_value = convert_memory_address (Pmode, static_chain_value);
242
243 emit_move_insn (chain, static_chain_value);
244 if (REG_P (chain))
245 use_reg (call_fusage, chain);
246 }
247
248 return funexp;
249 }
250
251 /* Generate instructions to call function FUNEXP,
252 and optionally pop the results.
253 The CALL_INSN is the first insn generated.
254
255 FNDECL is the declaration node of the function. This is given to the
256 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
257 its own args.
258
259 FUNTYPE is the data type of the function. This is given to the hook
260 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
261 own args. We used to allow an identifier for library functions, but
262 that doesn't work when the return type is an aggregate type and the
263 calling convention says that the pointer to this aggregate is to be
264 popped by the callee.
265
266 STACK_SIZE is the number of bytes of arguments on the stack,
267 ROUNDED_STACK_SIZE is that number rounded up to
268 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
269 both to put into the call insn and to generate explicit popping
270 code if necessary.
271
272 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
273 It is zero if this call doesn't want a structure value.
274
275 NEXT_ARG_REG is the rtx that results from executing
276 targetm.calls.function_arg (&args_so_far, VOIDmode, void_type_node, true)
277 just after all the args have had their registers assigned.
278 This could be whatever you like, but normally it is the first
279 arg-register beyond those used for args in this call,
280 or 0 if all the arg-registers are used in this call.
281 It is passed on to `gen_call' so you can put this info in the call insn.
282
283 VALREG is a hard register in which a value is returned,
284 or 0 if the call does not return a value.
285
286 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
287 the args to this call were processed.
288 We restore `inhibit_defer_pop' to that value.
289
290 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
291 denote registers used by the called function. */
292
293 static void
294 emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
295 tree funtype ATTRIBUTE_UNUSED,
296 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
297 HOST_WIDE_INT rounded_stack_size,
298 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
299 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
300 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
301 cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
302 {
303 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
304 rtx_insn *call_insn;
305 rtx call, funmem;
306 int already_popped = 0;
307 HOST_WIDE_INT n_popped
308 = targetm.calls.return_pops_args (fndecl, funtype, stack_size);
309
310 #ifdef CALL_POPS_ARGS
311 n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
312 #endif
313
314 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
315 and we don't want to load it into a register as an optimization,
316 because prepare_call_address already did it if it should be done. */
317 if (GET_CODE (funexp) != SYMBOL_REF)
318 funexp = memory_address (FUNCTION_MODE, funexp);
319
320 funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
321 if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
322 {
323 tree t = fndecl;
324
325 /* Although a built-in FUNCTION_DECL and its non-__builtin
326 counterpart compare equal and get a shared mem_attrs, they
327 produce different dump output in compare-debug compilations,
328 if an entry gets garbage collected in one compilation, then
329 adds a different (but equivalent) entry, while the other
330 doesn't run the garbage collector at the same spot and then
331 shares the mem_attr with the equivalent entry. */
332 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
333 {
334 tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
335 if (t2)
336 t = t2;
337 }
338
339 set_mem_expr (funmem, t);
340 }
341 else if (fntree)
342 set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
343
344 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
345 if ((ecf_flags & ECF_SIBCALL)
346 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
347 && (n_popped > 0 || stack_size == 0))
348 {
349 rtx n_pop = GEN_INT (n_popped);
350 rtx pat;
351
352 /* If this subroutine pops its own args, record that in the call insn
353 if possible, for the sake of frame pointer elimination. */
354
355 if (valreg)
356 pat = GEN_SIBCALL_VALUE_POP (valreg, funmem, rounded_stack_size_rtx,
357 next_arg_reg, n_pop);
358 else
359 pat = GEN_SIBCALL_POP (funmem, rounded_stack_size_rtx, next_arg_reg,
360 n_pop);
361
362 emit_call_insn (pat);
363 already_popped = 1;
364 }
365 else
366 #endif
367
368 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
369 /* If the target has "call" or "call_value" insns, then prefer them
370 if no arguments are actually popped. If the target does not have
371 "call" or "call_value" insns, then we must use the popping versions
372 even if the call has no arguments to pop. */
373 #if defined (HAVE_call) && defined (HAVE_call_value)
374 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
375 && n_popped > 0)
376 #else
377 if (HAVE_call_pop && HAVE_call_value_pop)
378 #endif
379 {
380 rtx n_pop = GEN_INT (n_popped);
381 rtx pat;
382
383 /* If this subroutine pops its own args, record that in the call insn
384 if possible, for the sake of frame pointer elimination. */
385
386 if (valreg)
387 pat = GEN_CALL_VALUE_POP (valreg, funmem, rounded_stack_size_rtx,
388 next_arg_reg, n_pop);
389 else
390 pat = GEN_CALL_POP (funmem, rounded_stack_size_rtx, next_arg_reg,
391 n_pop);
392
393 emit_call_insn (pat);
394 already_popped = 1;
395 }
396 else
397 #endif
398
399 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
400 if ((ecf_flags & ECF_SIBCALL)
401 && HAVE_sibcall && HAVE_sibcall_value)
402 {
403 if (valreg)
404 emit_call_insn (GEN_SIBCALL_VALUE (valreg, funmem,
405 rounded_stack_size_rtx,
406 next_arg_reg, NULL_RTX));
407 else
408 emit_call_insn (GEN_SIBCALL (funmem, rounded_stack_size_rtx,
409 next_arg_reg,
410 GEN_INT (struct_value_size)));
411 }
412 else
413 #endif
414
415 #if defined (HAVE_call) && defined (HAVE_call_value)
416 if (HAVE_call && HAVE_call_value)
417 {
418 if (valreg)
419 emit_call_insn (GEN_CALL_VALUE (valreg, funmem, rounded_stack_size_rtx,
420 next_arg_reg, NULL_RTX));
421 else
422 emit_call_insn (GEN_CALL (funmem, rounded_stack_size_rtx, next_arg_reg,
423 GEN_INT (struct_value_size)));
424 }
425 else
426 #endif
427 gcc_unreachable ();
428
429 /* Find the call we just emitted. */
430 call_insn = last_call_insn ();
431
432 /* Some target create a fresh MEM instead of reusing the one provided
433 above. Set its MEM_EXPR. */
434 call = get_call_rtx_from (call_insn);
435 if (call
436 && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
437 && MEM_EXPR (funmem) != NULL_TREE)
438 set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
439
440 /* Mark instrumented calls. */
441 if (call && fntree)
442 CALL_EXPR_WITH_BOUNDS_P (call) = CALL_WITH_BOUNDS_P (fntree);
443
444 /* Put the register usage information there. */
445 add_function_usage_to (call_insn, call_fusage);
446
447 /* If this is a const call, then set the insn's unchanging bit. */
448 if (ecf_flags & ECF_CONST)
449 RTL_CONST_CALL_P (call_insn) = 1;
450
451 /* If this is a pure call, then set the insn's unchanging bit. */
452 if (ecf_flags & ECF_PURE)
453 RTL_PURE_CALL_P (call_insn) = 1;
454
455 /* If this is a const call, then set the insn's unchanging bit. */
456 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
457 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
458
459 /* Create a nothrow REG_EH_REGION note, if needed. */
460 make_reg_eh_region_note (call_insn, ecf_flags, 0);
461
462 if (ecf_flags & ECF_NORETURN)
463 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
464
465 if (ecf_flags & ECF_RETURNS_TWICE)
466 {
467 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
468 cfun->calls_setjmp = 1;
469 }
470
471 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
472
473 /* Restore this now, so that we do defer pops for this call's args
474 if the context of the call as a whole permits. */
475 inhibit_defer_pop = old_inhibit_defer_pop;
476
477 if (n_popped > 0)
478 {
479 if (!already_popped)
480 CALL_INSN_FUNCTION_USAGE (call_insn)
481 = gen_rtx_EXPR_LIST (VOIDmode,
482 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
483 CALL_INSN_FUNCTION_USAGE (call_insn));
484 rounded_stack_size -= n_popped;
485 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
486 stack_pointer_delta -= n_popped;
487
488 add_reg_note (call_insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
489
490 /* If popup is needed, stack realign must use DRAP */
491 if (SUPPORTS_STACK_ALIGNMENT)
492 crtl->need_drap = true;
493 }
494 /* For noreturn calls when not accumulating outgoing args force
495 REG_ARGS_SIZE note to prevent crossjumping of calls with different
496 args sizes. */
497 else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
498 add_reg_note (call_insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
499
500 if (!ACCUMULATE_OUTGOING_ARGS)
501 {
502 /* If returning from the subroutine does not automatically pop the args,
503 we need an instruction to pop them sooner or later.
504 Perhaps do it now; perhaps just record how much space to pop later.
505
506 If returning from the subroutine does pop the args, indicate that the
507 stack pointer will be changed. */
508
509 if (rounded_stack_size != 0)
510 {
511 if (ecf_flags & ECF_NORETURN)
512 /* Just pretend we did the pop. */
513 stack_pointer_delta -= rounded_stack_size;
514 else if (flag_defer_pop && inhibit_defer_pop == 0
515 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
516 pending_stack_adjust += rounded_stack_size;
517 else
518 adjust_stack (rounded_stack_size_rtx);
519 }
520 }
521 /* When we accumulate outgoing args, we must avoid any stack manipulations.
522 Restore the stack pointer to its original value now. Usually
523 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
524 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
525 popping variants of functions exist as well.
526
527 ??? We may optimize similar to defer_pop above, but it is
528 probably not worthwhile.
529
530 ??? It will be worthwhile to enable combine_stack_adjustments even for
531 such machines. */
532 else if (n_popped)
533 anti_adjust_stack (GEN_INT (n_popped));
534 }
535
536 /* Determine if the function identified by NAME and FNDECL is one with
537 special properties we wish to know about.
538
539 For example, if the function might return more than one time (setjmp), then
540 set RETURNS_TWICE to a nonzero value.
541
542 Similarly set NORETURN if the function is in the longjmp family.
543
544 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
545 space from the stack such as alloca. */
546
547 static int
548 special_function_p (const_tree fndecl, int flags)
549 {
550 tree name_decl = DECL_NAME (fndecl);
551
552 /* For instrumentation clones we want to derive flags
553 from the original name. */
554 if (cgraph_node::get (fndecl)
555 && cgraph_node::get (fndecl)->instrumentation_clone)
556 name_decl = DECL_NAME (cgraph_node::get (fndecl)->orig_decl);
557
558 if (fndecl && name_decl
559 && IDENTIFIER_LENGTH (name_decl) <= 17
560 /* Exclude functions not at the file scope, or not `extern',
561 since they are not the magic functions we would otherwise
562 think they are.
563 FIXME: this should be handled with attributes, not with this
564 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
565 because you can declare fork() inside a function if you
566 wish. */
567 && (DECL_CONTEXT (fndecl) == NULL_TREE
568 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
569 && TREE_PUBLIC (fndecl))
570 {
571 const char *name = IDENTIFIER_POINTER (name_decl);
572 const char *tname = name;
573
574 /* We assume that alloca will always be called by name. It
575 makes no sense to pass it as a pointer-to-function to
576 anything that does not understand its behavior. */
577 if (((IDENTIFIER_LENGTH (name_decl) == 6
578 && name[0] == 'a'
579 && ! strcmp (name, "alloca"))
580 || (IDENTIFIER_LENGTH (name_decl) == 16
581 && name[0] == '_'
582 && ! strcmp (name, "__builtin_alloca"))))
583 flags |= ECF_MAY_BE_ALLOCA;
584
585 /* Disregard prefix _, __, __x or __builtin_. */
586 if (name[0] == '_')
587 {
588 if (name[1] == '_'
589 && name[2] == 'b'
590 && !strncmp (name + 3, "uiltin_", 7))
591 tname += 10;
592 else if (name[1] == '_' && name[2] == 'x')
593 tname += 3;
594 else if (name[1] == '_')
595 tname += 2;
596 else
597 tname += 1;
598 }
599
600 if (tname[0] == 's')
601 {
602 if ((tname[1] == 'e'
603 && (! strcmp (tname, "setjmp")
604 || ! strcmp (tname, "setjmp_syscall")))
605 || (tname[1] == 'i'
606 && ! strcmp (tname, "sigsetjmp"))
607 || (tname[1] == 'a'
608 && ! strcmp (tname, "savectx")))
609 flags |= ECF_RETURNS_TWICE | ECF_LEAF;
610
611 if (tname[1] == 'i'
612 && ! strcmp (tname, "siglongjmp"))
613 flags |= ECF_NORETURN;
614 }
615 else if ((tname[0] == 'q' && tname[1] == 's'
616 && ! strcmp (tname, "qsetjmp"))
617 || (tname[0] == 'v' && tname[1] == 'f'
618 && ! strcmp (tname, "vfork"))
619 || (tname[0] == 'g' && tname[1] == 'e'
620 && !strcmp (tname, "getcontext")))
621 flags |= ECF_RETURNS_TWICE | ECF_LEAF;
622
623 else if (tname[0] == 'l' && tname[1] == 'o'
624 && ! strcmp (tname, "longjmp"))
625 flags |= ECF_NORETURN;
626 }
627
628 return flags;
629 }
630
631 /* Similar to special_function_p; return a set of ERF_ flags for the
632 function FNDECL. */
633 static int
634 decl_return_flags (tree fndecl)
635 {
636 tree attr;
637 tree type = TREE_TYPE (fndecl);
638 if (!type)
639 return 0;
640
641 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
642 if (!attr)
643 return 0;
644
645 attr = TREE_VALUE (TREE_VALUE (attr));
646 if (!attr || TREE_STRING_LENGTH (attr) < 1)
647 return 0;
648
649 switch (TREE_STRING_POINTER (attr)[0])
650 {
651 case '1':
652 case '2':
653 case '3':
654 case '4':
655 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
656
657 case 'm':
658 return ERF_NOALIAS;
659
660 case '.':
661 default:
662 return 0;
663 }
664 }
665
666 /* Return nonzero when FNDECL represents a call to setjmp. */
667
668 int
669 setjmp_call_p (const_tree fndecl)
670 {
671 if (DECL_IS_RETURNS_TWICE (fndecl))
672 return ECF_RETURNS_TWICE;
673 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
674 }
675
676
677 /* Return true if STMT is an alloca call. */
678
679 bool
680 gimple_alloca_call_p (const_gimple stmt)
681 {
682 tree fndecl;
683
684 if (!is_gimple_call (stmt))
685 return false;
686
687 fndecl = gimple_call_fndecl (stmt);
688 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
689 return true;
690
691 return false;
692 }
693
694 /* Return true when exp contains alloca call. */
695
696 bool
697 alloca_call_p (const_tree exp)
698 {
699 tree fndecl;
700 if (TREE_CODE (exp) == CALL_EXPR
701 && (fndecl = get_callee_fndecl (exp))
702 && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
703 return true;
704 return false;
705 }
706
707 /* Return TRUE if FNDECL is either a TM builtin or a TM cloned
708 function. Return FALSE otherwise. */
709
710 static bool
711 is_tm_builtin (const_tree fndecl)
712 {
713 if (fndecl == NULL)
714 return false;
715
716 if (decl_is_tm_clone (fndecl))
717 return true;
718
719 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
720 {
721 switch (DECL_FUNCTION_CODE (fndecl))
722 {
723 case BUILT_IN_TM_COMMIT:
724 case BUILT_IN_TM_COMMIT_EH:
725 case BUILT_IN_TM_ABORT:
726 case BUILT_IN_TM_IRREVOCABLE:
727 case BUILT_IN_TM_GETTMCLONE_IRR:
728 case BUILT_IN_TM_MEMCPY:
729 case BUILT_IN_TM_MEMMOVE:
730 case BUILT_IN_TM_MEMSET:
731 CASE_BUILT_IN_TM_STORE (1):
732 CASE_BUILT_IN_TM_STORE (2):
733 CASE_BUILT_IN_TM_STORE (4):
734 CASE_BUILT_IN_TM_STORE (8):
735 CASE_BUILT_IN_TM_STORE (FLOAT):
736 CASE_BUILT_IN_TM_STORE (DOUBLE):
737 CASE_BUILT_IN_TM_STORE (LDOUBLE):
738 CASE_BUILT_IN_TM_STORE (M64):
739 CASE_BUILT_IN_TM_STORE (M128):
740 CASE_BUILT_IN_TM_STORE (M256):
741 CASE_BUILT_IN_TM_LOAD (1):
742 CASE_BUILT_IN_TM_LOAD (2):
743 CASE_BUILT_IN_TM_LOAD (4):
744 CASE_BUILT_IN_TM_LOAD (8):
745 CASE_BUILT_IN_TM_LOAD (FLOAT):
746 CASE_BUILT_IN_TM_LOAD (DOUBLE):
747 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
748 CASE_BUILT_IN_TM_LOAD (M64):
749 CASE_BUILT_IN_TM_LOAD (M128):
750 CASE_BUILT_IN_TM_LOAD (M256):
751 case BUILT_IN_TM_LOG:
752 case BUILT_IN_TM_LOG_1:
753 case BUILT_IN_TM_LOG_2:
754 case BUILT_IN_TM_LOG_4:
755 case BUILT_IN_TM_LOG_8:
756 case BUILT_IN_TM_LOG_FLOAT:
757 case BUILT_IN_TM_LOG_DOUBLE:
758 case BUILT_IN_TM_LOG_LDOUBLE:
759 case BUILT_IN_TM_LOG_M64:
760 case BUILT_IN_TM_LOG_M128:
761 case BUILT_IN_TM_LOG_M256:
762 return true;
763 default:
764 break;
765 }
766 }
767 return false;
768 }
769
770 /* Detect flags (function attributes) from the function decl or type node. */
771
772 int
773 flags_from_decl_or_type (const_tree exp)
774 {
775 int flags = 0;
776
777 if (DECL_P (exp))
778 {
779 /* The function exp may have the `malloc' attribute. */
780 if (DECL_IS_MALLOC (exp))
781 flags |= ECF_MALLOC;
782
783 /* The function exp may have the `returns_twice' attribute. */
784 if (DECL_IS_RETURNS_TWICE (exp))
785 flags |= ECF_RETURNS_TWICE;
786
787 /* Process the pure and const attributes. */
788 if (TREE_READONLY (exp))
789 flags |= ECF_CONST;
790 if (DECL_PURE_P (exp))
791 flags |= ECF_PURE;
792 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
793 flags |= ECF_LOOPING_CONST_OR_PURE;
794
795 if (DECL_IS_NOVOPS (exp))
796 flags |= ECF_NOVOPS;
797 if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
798 flags |= ECF_LEAF;
799
800 if (TREE_NOTHROW (exp))
801 flags |= ECF_NOTHROW;
802
803 if (flag_tm)
804 {
805 if (is_tm_builtin (exp))
806 flags |= ECF_TM_BUILTIN;
807 else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
808 || lookup_attribute ("transaction_pure",
809 TYPE_ATTRIBUTES (TREE_TYPE (exp))))
810 flags |= ECF_TM_PURE;
811 }
812
813 flags = special_function_p (exp, flags);
814 }
815 else if (TYPE_P (exp))
816 {
817 if (TYPE_READONLY (exp))
818 flags |= ECF_CONST;
819
820 if (flag_tm
821 && ((flags & ECF_CONST) != 0
822 || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
823 flags |= ECF_TM_PURE;
824 }
825 else
826 gcc_unreachable ();
827
828 if (TREE_THIS_VOLATILE (exp))
829 {
830 flags |= ECF_NORETURN;
831 if (flags & (ECF_CONST|ECF_PURE))
832 flags |= ECF_LOOPING_CONST_OR_PURE;
833 }
834
835 return flags;
836 }
837
838 /* Detect flags from a CALL_EXPR. */
839
840 int
841 call_expr_flags (const_tree t)
842 {
843 int flags;
844 tree decl = get_callee_fndecl (t);
845
846 if (decl)
847 flags = flags_from_decl_or_type (decl);
848 else if (CALL_EXPR_FN (t) == NULL_TREE)
849 flags = internal_fn_flags (CALL_EXPR_IFN (t));
850 else
851 {
852 t = TREE_TYPE (CALL_EXPR_FN (t));
853 if (t && TREE_CODE (t) == POINTER_TYPE)
854 flags = flags_from_decl_or_type (TREE_TYPE (t));
855 else
856 flags = 0;
857 }
858
859 return flags;
860 }
861
862 /* Precompute all register parameters as described by ARGS, storing values
863 into fields within the ARGS array.
864
865 NUM_ACTUALS indicates the total number elements in the ARGS array.
866
867 Set REG_PARM_SEEN if we encounter a register parameter. */
868
869 static void
870 precompute_register_parameters (int num_actuals, struct arg_data *args,
871 int *reg_parm_seen)
872 {
873 int i;
874
875 *reg_parm_seen = 0;
876
877 for (i = 0; i < num_actuals; i++)
878 if (args[i].reg != 0 && ! args[i].pass_on_stack)
879 {
880 *reg_parm_seen = 1;
881
882 if (args[i].value == 0)
883 {
884 push_temp_slots ();
885 args[i].value = expand_normal (args[i].tree_value);
886 preserve_temp_slots (args[i].value);
887 pop_temp_slots ();
888 }
889
890 /* If we are to promote the function arg to a wider mode,
891 do it now. */
892
893 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
894 args[i].value
895 = convert_modes (args[i].mode,
896 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
897 args[i].value, args[i].unsignedp);
898
899 /* If the value is a non-legitimate constant, force it into a
900 pseudo now. TLS symbols sometimes need a call to resolve. */
901 if (CONSTANT_P (args[i].value)
902 && !targetm.legitimate_constant_p (args[i].mode, args[i].value))
903 args[i].value = force_reg (args[i].mode, args[i].value);
904
905 /* If we're going to have to load the value by parts, pull the
906 parts into pseudos. The part extraction process can involve
907 non-trivial computation. */
908 if (GET_CODE (args[i].reg) == PARALLEL)
909 {
910 tree type = TREE_TYPE (args[i].tree_value);
911 args[i].parallel_value
912 = emit_group_load_into_temps (args[i].reg, args[i].value,
913 type, int_size_in_bytes (type));
914 }
915
916 /* If the value is expensive, and we are inside an appropriately
917 short loop, put the value into a pseudo and then put the pseudo
918 into the hard reg.
919
920 For small register classes, also do this if this call uses
921 register parameters. This is to avoid reload conflicts while
922 loading the parameters registers. */
923
924 else if ((! (REG_P (args[i].value)
925 || (GET_CODE (args[i].value) == SUBREG
926 && REG_P (SUBREG_REG (args[i].value)))))
927 && args[i].mode != BLKmode
928 && set_src_cost (args[i].value, optimize_insn_for_speed_p ())
929 > COSTS_N_INSNS (1)
930 && ((*reg_parm_seen
931 && targetm.small_register_classes_for_mode_p (args[i].mode))
932 || optimize))
933 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
934 }
935 }
936
937 #ifdef REG_PARM_STACK_SPACE
938
939 /* The argument list is the property of the called routine and it
940 may clobber it. If the fixed area has been used for previous
941 parameters, we must save and restore it. */
942
943 static rtx
944 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
945 {
946 int low;
947 int high;
948
949 /* Compute the boundary of the area that needs to be saved, if any. */
950 high = reg_parm_stack_space;
951 #ifdef ARGS_GROW_DOWNWARD
952 high += 1;
953 #endif
954 if (high > highest_outgoing_arg_in_use)
955 high = highest_outgoing_arg_in_use;
956
957 for (low = 0; low < high; low++)
958 if (stack_usage_map[low] != 0)
959 {
960 int num_to_save;
961 machine_mode save_mode;
962 int delta;
963 rtx addr;
964 rtx stack_area;
965 rtx save_area;
966
967 while (stack_usage_map[--high] == 0)
968 ;
969
970 *low_to_save = low;
971 *high_to_save = high;
972
973 num_to_save = high - low + 1;
974 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
975
976 /* If we don't have the required alignment, must do this
977 in BLKmode. */
978 if ((low & (MIN (GET_MODE_SIZE (save_mode),
979 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
980 save_mode = BLKmode;
981
982 #ifdef ARGS_GROW_DOWNWARD
983 delta = -high;
984 #else
985 delta = low;
986 #endif
987 addr = plus_constant (Pmode, argblock, delta);
988 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
989
990 set_mem_align (stack_area, PARM_BOUNDARY);
991 if (save_mode == BLKmode)
992 {
993 save_area = assign_stack_temp (BLKmode, num_to_save);
994 emit_block_move (validize_mem (save_area), stack_area,
995 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
996 }
997 else
998 {
999 save_area = gen_reg_rtx (save_mode);
1000 emit_move_insn (save_area, stack_area);
1001 }
1002
1003 return save_area;
1004 }
1005
1006 return NULL_RTX;
1007 }
1008
1009 static void
1010 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
1011 {
1012 machine_mode save_mode = GET_MODE (save_area);
1013 int delta;
1014 rtx addr, stack_area;
1015
1016 #ifdef ARGS_GROW_DOWNWARD
1017 delta = -high_to_save;
1018 #else
1019 delta = low_to_save;
1020 #endif
1021 addr = plus_constant (Pmode, argblock, delta);
1022 stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
1023 set_mem_align (stack_area, PARM_BOUNDARY);
1024
1025 if (save_mode != BLKmode)
1026 emit_move_insn (stack_area, save_area);
1027 else
1028 emit_block_move (stack_area, validize_mem (save_area),
1029 GEN_INT (high_to_save - low_to_save + 1),
1030 BLOCK_OP_CALL_PARM);
1031 }
1032 #endif /* REG_PARM_STACK_SPACE */
1033
1034 /* If any elements in ARGS refer to parameters that are to be passed in
1035 registers, but not in memory, and whose alignment does not permit a
1036 direct copy into registers. Copy the values into a group of pseudos
1037 which we will later copy into the appropriate hard registers.
1038
1039 Pseudos for each unaligned argument will be stored into the array
1040 args[argnum].aligned_regs. The caller is responsible for deallocating
1041 the aligned_regs array if it is nonzero. */
1042
1043 static void
1044 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
1045 {
1046 int i, j;
1047
1048 for (i = 0; i < num_actuals; i++)
1049 if (args[i].reg != 0 && ! args[i].pass_on_stack
1050 && GET_CODE (args[i].reg) != PARALLEL
1051 && args[i].mode == BLKmode
1052 && MEM_P (args[i].value)
1053 && (MEM_ALIGN (args[i].value)
1054 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1055 {
1056 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1057 int endian_correction = 0;
1058
1059 if (args[i].partial)
1060 {
1061 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
1062 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
1063 }
1064 else
1065 {
1066 args[i].n_aligned_regs
1067 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1068 }
1069
1070 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
1071
1072 /* Structures smaller than a word are normally aligned to the
1073 least significant byte. On a BYTES_BIG_ENDIAN machine,
1074 this means we must skip the empty high order bytes when
1075 calculating the bit offset. */
1076 if (bytes < UNITS_PER_WORD
1077 #ifdef BLOCK_REG_PADDING
1078 && (BLOCK_REG_PADDING (args[i].mode,
1079 TREE_TYPE (args[i].tree_value), 1)
1080 == downward)
1081 #else
1082 && BYTES_BIG_ENDIAN
1083 #endif
1084 )
1085 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
1086
1087 for (j = 0; j < args[i].n_aligned_regs; j++)
1088 {
1089 rtx reg = gen_reg_rtx (word_mode);
1090 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1091 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1092
1093 args[i].aligned_regs[j] = reg;
1094 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1095 word_mode, word_mode);
1096
1097 /* There is no need to restrict this code to loading items
1098 in TYPE_ALIGN sized hunks. The bitfield instructions can
1099 load up entire word sized registers efficiently.
1100
1101 ??? This may not be needed anymore.
1102 We use to emit a clobber here but that doesn't let later
1103 passes optimize the instructions we emit. By storing 0 into
1104 the register later passes know the first AND to zero out the
1105 bitfield being set in the register is unnecessary. The store
1106 of 0 will be deleted as will at least the first AND. */
1107
1108 emit_move_insn (reg, const0_rtx);
1109
1110 bytes -= bitsize / BITS_PER_UNIT;
1111 store_bit_field (reg, bitsize, endian_correction, 0, 0,
1112 word_mode, word);
1113 }
1114 }
1115 }
1116
1117 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
1118 CALL_EXPR EXP.
1119
1120 NUM_ACTUALS is the total number of parameters.
1121
1122 N_NAMED_ARGS is the total number of named arguments.
1123
1124 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
1125 value, or null.
1126
1127 FNDECL is the tree code for the target of this call (if known)
1128
1129 ARGS_SO_FAR holds state needed by the target to know where to place
1130 the next argument.
1131
1132 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1133 for arguments which are passed in registers.
1134
1135 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1136 and may be modified by this routine.
1137
1138 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1139 flags which may may be modified by this routine.
1140
1141 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
1142 that requires allocation of stack space.
1143
1144 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
1145 the thunked-to function. */
1146
1147 static void
1148 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
1149 struct arg_data *args,
1150 struct args_size *args_size,
1151 int n_named_args ATTRIBUTE_UNUSED,
1152 tree exp, tree struct_value_addr_value,
1153 tree fndecl, tree fntype,
1154 cumulative_args_t args_so_far,
1155 int reg_parm_stack_space,
1156 rtx *old_stack_level, int *old_pending_adj,
1157 int *must_preallocate, int *ecf_flags,
1158 bool *may_tailcall, bool call_from_thunk_p)
1159 {
1160 CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
1161 location_t loc = EXPR_LOCATION (exp);
1162
1163 /* Count arg position in order args appear. */
1164 int argpos;
1165
1166 int i;
1167
1168 args_size->constant = 0;
1169 args_size->var = 0;
1170
1171 bitmap_obstack_initialize (NULL);
1172
1173 /* In this loop, we consider args in the order they are written.
1174 We fill up ARGS from the back. */
1175
1176 i = num_actuals - 1;
1177 {
1178 int j = i, ptr_arg = -1;
1179 call_expr_arg_iterator iter;
1180 tree arg;
1181 bitmap slots = NULL;
1182
1183 if (struct_value_addr_value)
1184 {
1185 args[j].tree_value = struct_value_addr_value;
1186 j--;
1187
1188 /* If we pass structure address then we need to
1189 create bounds for it. Since created bounds is
1190 a call statement, we expand it right here to avoid
1191 fixing all other places where it may be expanded. */
1192 if (CALL_WITH_BOUNDS_P (exp))
1193 {
1194 args[j].value = gen_reg_rtx (targetm.chkp_bound_mode ());
1195 args[j].tree_value
1196 = chkp_make_bounds_for_struct_addr (struct_value_addr_value);
1197 expand_expr_real (args[j].tree_value, args[j].value, VOIDmode,
1198 EXPAND_NORMAL, 0, false);
1199 args[j].pointer_arg = j + 1;
1200 j--;
1201 }
1202 }
1203 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
1204 {
1205 tree argtype = TREE_TYPE (arg);
1206
1207 /* Remember last param with pointer and associate it
1208 with following pointer bounds. */
1209 if (CALL_WITH_BOUNDS_P (exp)
1210 && chkp_type_has_pointer (argtype))
1211 {
1212 if (slots)
1213 BITMAP_FREE (slots);
1214 ptr_arg = j;
1215 if (!BOUNDED_TYPE_P (argtype))
1216 {
1217 slots = BITMAP_ALLOC (NULL);
1218 chkp_find_bound_slots (argtype, slots);
1219 }
1220 }
1221 else if (POINTER_BOUNDS_TYPE_P (argtype))
1222 {
1223 /* We expect bounds in instrumented calls only.
1224 Otherwise it is a sign we lost flag due to some optimization
1225 and may emit call args incorrectly. */
1226 gcc_assert (CALL_WITH_BOUNDS_P (exp));
1227
1228 /* For structures look for the next available pointer. */
1229 if (ptr_arg != -1 && slots)
1230 {
1231 unsigned bnd_no = bitmap_first_set_bit (slots);
1232 args[j].pointer_offset =
1233 bnd_no * POINTER_SIZE / BITS_PER_UNIT;
1234
1235 bitmap_clear_bit (slots, bnd_no);
1236
1237 /* Check we have no more pointers in the structure. */
1238 if (bitmap_empty_p (slots))
1239 BITMAP_FREE (slots);
1240 }
1241 args[j].pointer_arg = ptr_arg;
1242
1243 /* Check we covered all pointers in the previous
1244 non bounds arg. */
1245 if (!slots)
1246 ptr_arg = -1;
1247 }
1248 else
1249 ptr_arg = -1;
1250
1251 if (targetm.calls.split_complex_arg
1252 && argtype
1253 && TREE_CODE (argtype) == COMPLEX_TYPE
1254 && targetm.calls.split_complex_arg (argtype))
1255 {
1256 tree subtype = TREE_TYPE (argtype);
1257 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
1258 j--;
1259 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
1260 }
1261 else
1262 args[j].tree_value = arg;
1263 j--;
1264 }
1265
1266 if (slots)
1267 BITMAP_FREE (slots);
1268 }
1269
1270 bitmap_obstack_release (NULL);
1271
1272 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1273 for (argpos = 0; argpos < num_actuals; i--, argpos++)
1274 {
1275 tree type = TREE_TYPE (args[i].tree_value);
1276 int unsignedp;
1277 machine_mode mode;
1278
1279 /* Replace erroneous argument with constant zero. */
1280 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1281 args[i].tree_value = integer_zero_node, type = integer_type_node;
1282
1283 /* If TYPE is a transparent union or record, pass things the way
1284 we would pass the first field of the union or record. We have
1285 already verified that the modes are the same. */
1286 if ((TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == RECORD_TYPE)
1287 && TYPE_TRANSPARENT_AGGR (type))
1288 type = TREE_TYPE (first_field (type));
1289
1290 /* Decide where to pass this arg.
1291
1292 args[i].reg is nonzero if all or part is passed in registers.
1293
1294 args[i].partial is nonzero if part but not all is passed in registers,
1295 and the exact value says how many bytes are passed in registers.
1296
1297 args[i].pass_on_stack is nonzero if the argument must at least be
1298 computed on the stack. It may then be loaded back into registers
1299 if args[i].reg is nonzero.
1300
1301 These decisions are driven by the FUNCTION_... macros and must agree
1302 with those made by function.c. */
1303
1304 /* See if this argument should be passed by invisible reference. */
1305 if (pass_by_reference (args_so_far_pnt, TYPE_MODE (type),
1306 type, argpos < n_named_args))
1307 {
1308 bool callee_copies;
1309 tree base = NULL_TREE;
1310
1311 callee_copies
1312 = reference_callee_copied (args_so_far_pnt, TYPE_MODE (type),
1313 type, argpos < n_named_args);
1314
1315 /* If we're compiling a thunk, pass through invisible references
1316 instead of making a copy. */
1317 if (call_from_thunk_p
1318 || (callee_copies
1319 && !TREE_ADDRESSABLE (type)
1320 && (base = get_base_address (args[i].tree_value))
1321 && TREE_CODE (base) != SSA_NAME
1322 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
1323 {
1324 /* We may have turned the parameter value into an SSA name.
1325 Go back to the original parameter so we can take the
1326 address. */
1327 if (TREE_CODE (args[i].tree_value) == SSA_NAME)
1328 {
1329 gcc_assert (SSA_NAME_IS_DEFAULT_DEF (args[i].tree_value));
1330 args[i].tree_value = SSA_NAME_VAR (args[i].tree_value);
1331 gcc_assert (TREE_CODE (args[i].tree_value) == PARM_DECL);
1332 }
1333 /* Argument setup code may have copied the value to register. We
1334 revert that optimization now because the tail call code must
1335 use the original location. */
1336 if (TREE_CODE (args[i].tree_value) == PARM_DECL
1337 && !MEM_P (DECL_RTL (args[i].tree_value))
1338 && DECL_INCOMING_RTL (args[i].tree_value)
1339 && MEM_P (DECL_INCOMING_RTL (args[i].tree_value)))
1340 set_decl_rtl (args[i].tree_value,
1341 DECL_INCOMING_RTL (args[i].tree_value));
1342
1343 mark_addressable (args[i].tree_value);
1344
1345 /* We can't use sibcalls if a callee-copied argument is
1346 stored in the current function's frame. */
1347 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1348 *may_tailcall = false;
1349
1350 args[i].tree_value = build_fold_addr_expr_loc (loc,
1351 args[i].tree_value);
1352 type = TREE_TYPE (args[i].tree_value);
1353
1354 if (*ecf_flags & ECF_CONST)
1355 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
1356 }
1357 else
1358 {
1359 /* We make a copy of the object and pass the address to the
1360 function being called. */
1361 rtx copy;
1362
1363 if (!COMPLETE_TYPE_P (type)
1364 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
1365 || (flag_stack_check == GENERIC_STACK_CHECK
1366 && compare_tree_int (TYPE_SIZE_UNIT (type),
1367 STACK_CHECK_MAX_VAR_SIZE) > 0))
1368 {
1369 /* This is a variable-sized object. Make space on the stack
1370 for it. */
1371 rtx size_rtx = expr_size (args[i].tree_value);
1372
1373 if (*old_stack_level == 0)
1374 {
1375 emit_stack_save (SAVE_BLOCK, old_stack_level);
1376 *old_pending_adj = pending_stack_adjust;
1377 pending_stack_adjust = 0;
1378 }
1379
1380 /* We can pass TRUE as the 4th argument because we just
1381 saved the stack pointer and will restore it right after
1382 the call. */
1383 copy = allocate_dynamic_stack_space (size_rtx,
1384 TYPE_ALIGN (type),
1385 TYPE_ALIGN (type),
1386 true);
1387 copy = gen_rtx_MEM (BLKmode, copy);
1388 set_mem_attributes (copy, type, 1);
1389 }
1390 else
1391 copy = assign_temp (type, 1, 0);
1392
1393 store_expr (args[i].tree_value, copy, 0, false);
1394
1395 /* Just change the const function to pure and then let
1396 the next test clear the pure based on
1397 callee_copies. */
1398 if (*ecf_flags & ECF_CONST)
1399 {
1400 *ecf_flags &= ~ECF_CONST;
1401 *ecf_flags |= ECF_PURE;
1402 }
1403
1404 if (!callee_copies && *ecf_flags & ECF_PURE)
1405 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
1406
1407 args[i].tree_value
1408 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
1409 type = TREE_TYPE (args[i].tree_value);
1410 *may_tailcall = false;
1411 }
1412 }
1413
1414 unsignedp = TYPE_UNSIGNED (type);
1415 mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
1416 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
1417
1418 args[i].unsignedp = unsignedp;
1419 args[i].mode = mode;
1420
1421 args[i].reg = targetm.calls.function_arg (args_so_far, mode, type,
1422 argpos < n_named_args);
1423
1424 if (args[i].reg && CONST_INT_P (args[i].reg))
1425 {
1426 args[i].special_slot = args[i].reg;
1427 args[i].reg = NULL;
1428 }
1429
1430 /* If this is a sibling call and the machine has register windows, the
1431 register window has to be unwinded before calling the routine, so
1432 arguments have to go into the incoming registers. */
1433 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
1434 args[i].tail_call_reg
1435 = targetm.calls.function_incoming_arg (args_so_far, mode, type,
1436 argpos < n_named_args);
1437 else
1438 args[i].tail_call_reg = args[i].reg;
1439
1440 if (args[i].reg)
1441 args[i].partial
1442 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1443 argpos < n_named_args);
1444
1445 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
1446
1447 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1448 it means that we are to pass this arg in the register(s) designated
1449 by the PARALLEL, but also to pass it in the stack. */
1450 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1451 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1452 args[i].pass_on_stack = 1;
1453
1454 /* If this is an addressable type, we must preallocate the stack
1455 since we must evaluate the object into its final location.
1456
1457 If this is to be passed in both registers and the stack, it is simpler
1458 to preallocate. */
1459 if (TREE_ADDRESSABLE (type)
1460 || (args[i].pass_on_stack && args[i].reg != 0))
1461 *must_preallocate = 1;
1462
1463 /* No stack allocation and padding for bounds. */
1464 if (POINTER_BOUNDS_P (args[i].tree_value))
1465 ;
1466 /* Compute the stack-size of this argument. */
1467 else if (args[i].reg == 0 || args[i].partial != 0
1468 || reg_parm_stack_space > 0
1469 || args[i].pass_on_stack)
1470 locate_and_pad_parm (mode, type,
1471 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1472 1,
1473 #else
1474 args[i].reg != 0,
1475 #endif
1476 reg_parm_stack_space,
1477 args[i].pass_on_stack ? 0 : args[i].partial,
1478 fndecl, args_size, &args[i].locate);
1479 #ifdef BLOCK_REG_PADDING
1480 else
1481 /* The argument is passed entirely in registers. See at which
1482 end it should be padded. */
1483 args[i].locate.where_pad =
1484 BLOCK_REG_PADDING (mode, type,
1485 int_size_in_bytes (type) <= UNITS_PER_WORD);
1486 #endif
1487
1488 /* Update ARGS_SIZE, the total stack space for args so far. */
1489
1490 args_size->constant += args[i].locate.size.constant;
1491 if (args[i].locate.size.var)
1492 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1493
1494 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1495 have been used, etc. */
1496
1497 targetm.calls.function_arg_advance (args_so_far, TYPE_MODE (type),
1498 type, argpos < n_named_args);
1499 }
1500 }
1501
1502 /* Update ARGS_SIZE to contain the total size for the argument block.
1503 Return the original constant component of the argument block's size.
1504
1505 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1506 for arguments passed in registers. */
1507
1508 static int
1509 compute_argument_block_size (int reg_parm_stack_space,
1510 struct args_size *args_size,
1511 tree fndecl ATTRIBUTE_UNUSED,
1512 tree fntype ATTRIBUTE_UNUSED,
1513 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1514 {
1515 int unadjusted_args_size = args_size->constant;
1516
1517 /* For accumulate outgoing args mode we don't need to align, since the frame
1518 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1519 backends from generating misaligned frame sizes. */
1520 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1521 preferred_stack_boundary = STACK_BOUNDARY;
1522
1523 /* Compute the actual size of the argument block required. The variable
1524 and constant sizes must be combined, the size may have to be rounded,
1525 and there may be a minimum required size. */
1526
1527 if (args_size->var)
1528 {
1529 args_size->var = ARGS_SIZE_TREE (*args_size);
1530 args_size->constant = 0;
1531
1532 preferred_stack_boundary /= BITS_PER_UNIT;
1533 if (preferred_stack_boundary > 1)
1534 {
1535 /* We don't handle this case yet. To handle it correctly we have
1536 to add the delta, round and subtract the delta.
1537 Currently no machine description requires this support. */
1538 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
1539 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1540 }
1541
1542 if (reg_parm_stack_space > 0)
1543 {
1544 args_size->var
1545 = size_binop (MAX_EXPR, args_size->var,
1546 ssize_int (reg_parm_stack_space));
1547
1548 /* The area corresponding to register parameters is not to count in
1549 the size of the block we need. So make the adjustment. */
1550 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
1551 args_size->var
1552 = size_binop (MINUS_EXPR, args_size->var,
1553 ssize_int (reg_parm_stack_space));
1554 }
1555 }
1556 else
1557 {
1558 preferred_stack_boundary /= BITS_PER_UNIT;
1559 if (preferred_stack_boundary < 1)
1560 preferred_stack_boundary = 1;
1561 args_size->constant = (((args_size->constant
1562 + stack_pointer_delta
1563 + preferred_stack_boundary - 1)
1564 / preferred_stack_boundary
1565 * preferred_stack_boundary)
1566 - stack_pointer_delta);
1567
1568 args_size->constant = MAX (args_size->constant,
1569 reg_parm_stack_space);
1570
1571 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
1572 args_size->constant -= reg_parm_stack_space;
1573 }
1574 return unadjusted_args_size;
1575 }
1576
1577 /* Precompute parameters as needed for a function call.
1578
1579 FLAGS is mask of ECF_* constants.
1580
1581 NUM_ACTUALS is the number of arguments.
1582
1583 ARGS is an array containing information for each argument; this
1584 routine fills in the INITIAL_VALUE and VALUE fields for each
1585 precomputed argument. */
1586
1587 static void
1588 precompute_arguments (int num_actuals, struct arg_data *args)
1589 {
1590 int i;
1591
1592 /* If this is a libcall, then precompute all arguments so that we do not
1593 get extraneous instructions emitted as part of the libcall sequence. */
1594
1595 /* If we preallocated the stack space, and some arguments must be passed
1596 on the stack, then we must precompute any parameter which contains a
1597 function call which will store arguments on the stack.
1598 Otherwise, evaluating the parameter may clobber previous parameters
1599 which have already been stored into the stack. (we have code to avoid
1600 such case by saving the outgoing stack arguments, but it results in
1601 worse code) */
1602 if (!ACCUMULATE_OUTGOING_ARGS)
1603 return;
1604
1605 for (i = 0; i < num_actuals; i++)
1606 {
1607 tree type;
1608 machine_mode mode;
1609
1610 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
1611 continue;
1612
1613 /* If this is an addressable type, we cannot pre-evaluate it. */
1614 type = TREE_TYPE (args[i].tree_value);
1615 gcc_assert (!TREE_ADDRESSABLE (type));
1616
1617 args[i].initial_value = args[i].value
1618 = expand_normal (args[i].tree_value);
1619
1620 mode = TYPE_MODE (type);
1621 if (mode != args[i].mode)
1622 {
1623 int unsignedp = args[i].unsignedp;
1624 args[i].value
1625 = convert_modes (args[i].mode, mode,
1626 args[i].value, args[i].unsignedp);
1627
1628 /* CSE will replace this only if it contains args[i].value
1629 pseudo, so convert it down to the declared mode using
1630 a SUBREG. */
1631 if (REG_P (args[i].value)
1632 && GET_MODE_CLASS (args[i].mode) == MODE_INT
1633 && promote_mode (type, mode, &unsignedp) != args[i].mode)
1634 {
1635 args[i].initial_value
1636 = gen_lowpart_SUBREG (mode, args[i].value);
1637 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1638 SUBREG_PROMOTED_SET (args[i].initial_value, args[i].unsignedp);
1639 }
1640 }
1641 }
1642 }
1643
1644 /* Given the current state of MUST_PREALLOCATE and information about
1645 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1646 compute and return the final value for MUST_PREALLOCATE. */
1647
1648 static int
1649 finalize_must_preallocate (int must_preallocate, int num_actuals,
1650 struct arg_data *args, struct args_size *args_size)
1651 {
1652 /* See if we have or want to preallocate stack space.
1653
1654 If we would have to push a partially-in-regs parm
1655 before other stack parms, preallocate stack space instead.
1656
1657 If the size of some parm is not a multiple of the required stack
1658 alignment, we must preallocate.
1659
1660 If the total size of arguments that would otherwise create a copy in
1661 a temporary (such as a CALL) is more than half the total argument list
1662 size, preallocation is faster.
1663
1664 Another reason to preallocate is if we have a machine (like the m88k)
1665 where stack alignment is required to be maintained between every
1666 pair of insns, not just when the call is made. However, we assume here
1667 that such machines either do not have push insns (and hence preallocation
1668 would occur anyway) or the problem is taken care of with
1669 PUSH_ROUNDING. */
1670
1671 if (! must_preallocate)
1672 {
1673 int partial_seen = 0;
1674 int copy_to_evaluate_size = 0;
1675 int i;
1676
1677 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1678 {
1679 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1680 partial_seen = 1;
1681 else if (partial_seen && args[i].reg == 0)
1682 must_preallocate = 1;
1683 /* We preallocate in case there are bounds passed
1684 in the bounds table to have precomputed address
1685 for bounds association. */
1686 else if (POINTER_BOUNDS_P (args[i].tree_value)
1687 && !args[i].reg)
1688 must_preallocate = 1;
1689
1690 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1691 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1692 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1693 || TREE_CODE (args[i].tree_value) == COND_EXPR
1694 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1695 copy_to_evaluate_size
1696 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1697 }
1698
1699 if (copy_to_evaluate_size * 2 >= args_size->constant
1700 && args_size->constant > 0)
1701 must_preallocate = 1;
1702 }
1703 return must_preallocate;
1704 }
1705
1706 /* If we preallocated stack space, compute the address of each argument
1707 and store it into the ARGS array.
1708
1709 We need not ensure it is a valid memory address here; it will be
1710 validized when it is used.
1711
1712 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1713
1714 static void
1715 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1716 {
1717 if (argblock)
1718 {
1719 rtx arg_reg = argblock;
1720 int i, arg_offset = 0;
1721
1722 if (GET_CODE (argblock) == PLUS)
1723 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1724
1725 for (i = 0; i < num_actuals; i++)
1726 {
1727 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1728 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1729 rtx addr;
1730 unsigned int align, boundary;
1731 unsigned int units_on_stack = 0;
1732 machine_mode partial_mode = VOIDmode;
1733
1734 /* Skip this parm if it will not be passed on the stack. */
1735 if (! args[i].pass_on_stack
1736 && args[i].reg != 0
1737 && args[i].partial == 0)
1738 continue;
1739
1740 /* Pointer Bounds are never passed on the stack. */
1741 if (POINTER_BOUNDS_P (args[i].tree_value))
1742 continue;
1743
1744 if (CONST_INT_P (offset))
1745 addr = plus_constant (Pmode, arg_reg, INTVAL (offset));
1746 else
1747 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1748
1749 addr = plus_constant (Pmode, addr, arg_offset);
1750
1751 if (args[i].partial != 0)
1752 {
1753 /* Only part of the parameter is being passed on the stack.
1754 Generate a simple memory reference of the correct size. */
1755 units_on_stack = args[i].locate.size.constant;
1756 partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT,
1757 MODE_INT, 1);
1758 args[i].stack = gen_rtx_MEM (partial_mode, addr);
1759 set_mem_size (args[i].stack, units_on_stack);
1760 }
1761 else
1762 {
1763 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1764 set_mem_attributes (args[i].stack,
1765 TREE_TYPE (args[i].tree_value), 1);
1766 }
1767 align = BITS_PER_UNIT;
1768 boundary = args[i].locate.boundary;
1769 if (args[i].locate.where_pad != downward)
1770 align = boundary;
1771 else if (CONST_INT_P (offset))
1772 {
1773 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1774 align = align & -align;
1775 }
1776 set_mem_align (args[i].stack, align);
1777
1778 if (CONST_INT_P (slot_offset))
1779 addr = plus_constant (Pmode, arg_reg, INTVAL (slot_offset));
1780 else
1781 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1782
1783 addr = plus_constant (Pmode, addr, arg_offset);
1784
1785 if (args[i].partial != 0)
1786 {
1787 /* Only part of the parameter is being passed on the stack.
1788 Generate a simple memory reference of the correct size.
1789 */
1790 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
1791 set_mem_size (args[i].stack_slot, units_on_stack);
1792 }
1793 else
1794 {
1795 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1796 set_mem_attributes (args[i].stack_slot,
1797 TREE_TYPE (args[i].tree_value), 1);
1798 }
1799 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
1800
1801 /* Function incoming arguments may overlap with sibling call
1802 outgoing arguments and we cannot allow reordering of reads
1803 from function arguments with stores to outgoing arguments
1804 of sibling calls. */
1805 set_mem_alias_set (args[i].stack, 0);
1806 set_mem_alias_set (args[i].stack_slot, 0);
1807 }
1808 }
1809 }
1810
1811 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1812 in a call instruction.
1813
1814 FNDECL is the tree node for the target function. For an indirect call
1815 FNDECL will be NULL_TREE.
1816
1817 ADDR is the operand 0 of CALL_EXPR for this call. */
1818
1819 static rtx
1820 rtx_for_function_call (tree fndecl, tree addr)
1821 {
1822 rtx funexp;
1823
1824 /* Get the function to call, in the form of RTL. */
1825 if (fndecl)
1826 {
1827 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
1828 TREE_USED (fndecl) = 1;
1829
1830 /* Get a SYMBOL_REF rtx for the function address. */
1831 funexp = XEXP (DECL_RTL (fndecl), 0);
1832 }
1833 else
1834 /* Generate an rtx (probably a pseudo-register) for the address. */
1835 {
1836 push_temp_slots ();
1837 funexp = expand_normal (addr);
1838 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1839 }
1840 return funexp;
1841 }
1842
1843 /* Internal state for internal_arg_pointer_based_exp and its helpers. */
1844 static struct
1845 {
1846 /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
1847 or NULL_RTX if none has been scanned yet. */
1848 rtx_insn *scan_start;
1849 /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
1850 based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
1851 pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
1852 with fixed offset, or PC if this is with variable or unknown offset. */
1853 vec<rtx> cache;
1854 } internal_arg_pointer_exp_state;
1855
1856 static rtx internal_arg_pointer_based_exp (const_rtx, bool);
1857
1858 /* Helper function for internal_arg_pointer_based_exp. Scan insns in
1859 the tail call sequence, starting with first insn that hasn't been
1860 scanned yet, and note for each pseudo on the LHS whether it is based
1861 on crtl->args.internal_arg_pointer or not, and what offset from that
1862 that pointer it has. */
1863
1864 static void
1865 internal_arg_pointer_based_exp_scan (void)
1866 {
1867 rtx_insn *insn, *scan_start = internal_arg_pointer_exp_state.scan_start;
1868
1869 if (scan_start == NULL_RTX)
1870 insn = get_insns ();
1871 else
1872 insn = NEXT_INSN (scan_start);
1873
1874 while (insn)
1875 {
1876 rtx set = single_set (insn);
1877 if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
1878 {
1879 rtx val = NULL_RTX;
1880 unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
1881 /* Punt on pseudos set multiple times. */
1882 if (idx < internal_arg_pointer_exp_state.cache.length ()
1883 && (internal_arg_pointer_exp_state.cache[idx]
1884 != NULL_RTX))
1885 val = pc_rtx;
1886 else
1887 val = internal_arg_pointer_based_exp (SET_SRC (set), false);
1888 if (val != NULL_RTX)
1889 {
1890 if (idx >= internal_arg_pointer_exp_state.cache.length ())
1891 internal_arg_pointer_exp_state.cache
1892 .safe_grow_cleared (idx + 1);
1893 internal_arg_pointer_exp_state.cache[idx] = val;
1894 }
1895 }
1896 if (NEXT_INSN (insn) == NULL_RTX)
1897 scan_start = insn;
1898 insn = NEXT_INSN (insn);
1899 }
1900
1901 internal_arg_pointer_exp_state.scan_start = scan_start;
1902 }
1903
1904 /* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
1905 NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
1906 it with fixed offset, or PC if this is with variable or unknown offset.
1907 TOPLEVEL is true if the function is invoked at the topmost level. */
1908
1909 static rtx
1910 internal_arg_pointer_based_exp (const_rtx rtl, bool toplevel)
1911 {
1912 if (CONSTANT_P (rtl))
1913 return NULL_RTX;
1914
1915 if (rtl == crtl->args.internal_arg_pointer)
1916 return const0_rtx;
1917
1918 if (REG_P (rtl) && HARD_REGISTER_P (rtl))
1919 return NULL_RTX;
1920
1921 if (GET_CODE (rtl) == PLUS && CONST_INT_P (XEXP (rtl, 1)))
1922 {
1923 rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
1924 if (val == NULL_RTX || val == pc_rtx)
1925 return val;
1926 return plus_constant (Pmode, val, INTVAL (XEXP (rtl, 1)));
1927 }
1928
1929 /* When called at the topmost level, scan pseudo assignments in between the
1930 last scanned instruction in the tail call sequence and the latest insn
1931 in that sequence. */
1932 if (toplevel)
1933 internal_arg_pointer_based_exp_scan ();
1934
1935 if (REG_P (rtl))
1936 {
1937 unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
1938 if (idx < internal_arg_pointer_exp_state.cache.length ())
1939 return internal_arg_pointer_exp_state.cache[idx];
1940
1941 return NULL_RTX;
1942 }
1943
1944 subrtx_iterator::array_type array;
1945 FOR_EACH_SUBRTX (iter, array, rtl, NONCONST)
1946 {
1947 const_rtx x = *iter;
1948 if (REG_P (x) && internal_arg_pointer_based_exp (x, false) != NULL_RTX)
1949 return pc_rtx;
1950 if (MEM_P (x))
1951 iter.skip_subrtxes ();
1952 }
1953
1954 return NULL_RTX;
1955 }
1956
1957 /* Return true if and only if SIZE storage units (usually bytes)
1958 starting from address ADDR overlap with already clobbered argument
1959 area. This function is used to determine if we should give up a
1960 sibcall. */
1961
1962 static bool
1963 mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
1964 {
1965 HOST_WIDE_INT i;
1966 rtx val;
1967
1968 if (bitmap_empty_p (stored_args_map))
1969 return false;
1970 val = internal_arg_pointer_based_exp (addr, true);
1971 if (val == NULL_RTX)
1972 return false;
1973 else if (val == pc_rtx)
1974 return true;
1975 else
1976 i = INTVAL (val);
1977 #ifdef STACK_GROWS_DOWNWARD
1978 i -= crtl->args.pretend_args_size;
1979 #else
1980 i += crtl->args.pretend_args_size;
1981 #endif
1982
1983 #ifdef ARGS_GROW_DOWNWARD
1984 i = -i - size;
1985 #endif
1986 if (size > 0)
1987 {
1988 unsigned HOST_WIDE_INT k;
1989
1990 for (k = 0; k < size; k++)
1991 if (i + k < SBITMAP_SIZE (stored_args_map)
1992 && bitmap_bit_p (stored_args_map, i + k))
1993 return true;
1994 }
1995
1996 return false;
1997 }
1998
1999 /* Do the register loads required for any wholly-register parms or any
2000 parms which are passed both on the stack and in a register. Their
2001 expressions were already evaluated.
2002
2003 Mark all register-parms as living through the call, putting these USE
2004 insns in the CALL_INSN_FUNCTION_USAGE field.
2005
2006 When IS_SIBCALL, perform the check_sibcall_argument_overlap
2007 checking, setting *SIBCALL_FAILURE if appropriate. */
2008
2009 static void
2010 load_register_parameters (struct arg_data *args, int num_actuals,
2011 rtx *call_fusage, int flags, int is_sibcall,
2012 int *sibcall_failure)
2013 {
2014 int i, j;
2015
2016 for (i = 0; i < num_actuals; i++)
2017 {
2018 rtx reg = ((flags & ECF_SIBCALL)
2019 ? args[i].tail_call_reg : args[i].reg);
2020 if (reg)
2021 {
2022 int partial = args[i].partial;
2023 int nregs;
2024 int size = 0;
2025 rtx_insn *before_arg = get_last_insn ();
2026 /* Set non-negative if we must move a word at a time, even if
2027 just one word (e.g, partial == 4 && mode == DFmode). Set
2028 to -1 if we just use a normal move insn. This value can be
2029 zero if the argument is a zero size structure. */
2030 nregs = -1;
2031 if (GET_CODE (reg) == PARALLEL)
2032 ;
2033 else if (partial)
2034 {
2035 gcc_assert (partial % UNITS_PER_WORD == 0);
2036 nregs = partial / UNITS_PER_WORD;
2037 }
2038 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
2039 {
2040 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2041 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2042 }
2043 else
2044 size = GET_MODE_SIZE (args[i].mode);
2045
2046 /* Handle calls that pass values in multiple non-contiguous
2047 locations. The Irix 6 ABI has examples of this. */
2048
2049 if (GET_CODE (reg) == PARALLEL)
2050 emit_group_move (reg, args[i].parallel_value);
2051
2052 /* If simple case, just do move. If normal partial, store_one_arg
2053 has already loaded the register for us. In all other cases,
2054 load the register(s) from memory. */
2055
2056 else if (nregs == -1)
2057 {
2058 emit_move_insn (reg, args[i].value);
2059 #ifdef BLOCK_REG_PADDING
2060 /* Handle case where we have a value that needs shifting
2061 up to the msb. eg. a QImode value and we're padding
2062 upward on a BYTES_BIG_ENDIAN machine. */
2063 if (size < UNITS_PER_WORD
2064 && (args[i].locate.where_pad
2065 == (BYTES_BIG_ENDIAN ? upward : downward)))
2066 {
2067 rtx x;
2068 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2069
2070 /* Assigning REG here rather than a temp makes CALL_FUSAGE
2071 report the whole reg as used. Strictly speaking, the
2072 call only uses SIZE bytes at the msb end, but it doesn't
2073 seem worth generating rtl to say that. */
2074 reg = gen_rtx_REG (word_mode, REGNO (reg));
2075 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
2076 if (x != reg)
2077 emit_move_insn (reg, x);
2078 }
2079 #endif
2080 }
2081
2082 /* If we have pre-computed the values to put in the registers in
2083 the case of non-aligned structures, copy them in now. */
2084
2085 else if (args[i].n_aligned_regs != 0)
2086 for (j = 0; j < args[i].n_aligned_regs; j++)
2087 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
2088 args[i].aligned_regs[j]);
2089
2090 else if (partial == 0 || args[i].pass_on_stack)
2091 {
2092 rtx mem = validize_mem (copy_rtx (args[i].value));
2093
2094 /* Check for overlap with already clobbered argument area,
2095 providing that this has non-zero size. */
2096 if (is_sibcall
2097 && (size == 0
2098 || mem_overlaps_already_clobbered_arg_p
2099 (XEXP (args[i].value, 0), size)))
2100 *sibcall_failure = 1;
2101
2102 if (size % UNITS_PER_WORD == 0
2103 || MEM_ALIGN (mem) % BITS_PER_WORD == 0)
2104 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
2105 else
2106 {
2107 if (nregs > 1)
2108 move_block_to_reg (REGNO (reg), mem, nregs - 1,
2109 args[i].mode);
2110 rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
2111 unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
2112 unsigned int bitsize = size * BITS_PER_UNIT - bitoff;
2113 rtx x = extract_bit_field (mem, bitsize, bitoff, 1,
2114 dest, word_mode, word_mode);
2115 if (BYTES_BIG_ENDIAN)
2116 x = expand_shift (LSHIFT_EXPR, word_mode, x,
2117 BITS_PER_WORD - bitsize, dest, 1);
2118 if (x != dest)
2119 emit_move_insn (dest, x);
2120 }
2121
2122 /* Handle a BLKmode that needs shifting. */
2123 if (nregs == 1 && size < UNITS_PER_WORD
2124 #ifdef BLOCK_REG_PADDING
2125 && args[i].locate.where_pad == downward
2126 #else
2127 && BYTES_BIG_ENDIAN
2128 #endif
2129 )
2130 {
2131 rtx dest = gen_rtx_REG (word_mode, REGNO (reg));
2132 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2133 enum tree_code dir = (BYTES_BIG_ENDIAN
2134 ? RSHIFT_EXPR : LSHIFT_EXPR);
2135 rtx x;
2136
2137 x = expand_shift (dir, word_mode, dest, shift, dest, 1);
2138 if (x != dest)
2139 emit_move_insn (dest, x);
2140 }
2141 }
2142
2143 /* When a parameter is a block, and perhaps in other cases, it is
2144 possible that it did a load from an argument slot that was
2145 already clobbered. */
2146 if (is_sibcall
2147 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
2148 *sibcall_failure = 1;
2149
2150 /* Handle calls that pass values in multiple non-contiguous
2151 locations. The Irix 6 ABI has examples of this. */
2152 if (GET_CODE (reg) == PARALLEL)
2153 use_group_regs (call_fusage, reg);
2154 else if (nregs == -1)
2155 use_reg_mode (call_fusage, reg,
2156 TYPE_MODE (TREE_TYPE (args[i].tree_value)));
2157 else if (nregs > 0)
2158 use_regs (call_fusage, REGNO (reg), nregs);
2159 }
2160 }
2161 }
2162
2163 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
2164 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
2165 bytes, then we would need to push some additional bytes to pad the
2166 arguments. So, we compute an adjust to the stack pointer for an
2167 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
2168 bytes. Then, when the arguments are pushed the stack will be perfectly
2169 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
2170 be popped after the call. Returns the adjustment. */
2171
2172 static int
2173 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
2174 struct args_size *args_size,
2175 unsigned int preferred_unit_stack_boundary)
2176 {
2177 /* The number of bytes to pop so that the stack will be
2178 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
2179 HOST_WIDE_INT adjustment;
2180 /* The alignment of the stack after the arguments are pushed, if we
2181 just pushed the arguments without adjust the stack here. */
2182 unsigned HOST_WIDE_INT unadjusted_alignment;
2183
2184 unadjusted_alignment
2185 = ((stack_pointer_delta + unadjusted_args_size)
2186 % preferred_unit_stack_boundary);
2187
2188 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
2189 as possible -- leaving just enough left to cancel out the
2190 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
2191 PENDING_STACK_ADJUST is non-negative, and congruent to
2192 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
2193
2194 /* Begin by trying to pop all the bytes. */
2195 unadjusted_alignment
2196 = (unadjusted_alignment
2197 - (pending_stack_adjust % preferred_unit_stack_boundary));
2198 adjustment = pending_stack_adjust;
2199 /* Push enough additional bytes that the stack will be aligned
2200 after the arguments are pushed. */
2201 if (preferred_unit_stack_boundary > 1)
2202 {
2203 if (unadjusted_alignment > 0)
2204 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
2205 else
2206 adjustment += unadjusted_alignment;
2207 }
2208
2209 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
2210 bytes after the call. The right number is the entire
2211 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
2212 by the arguments in the first place. */
2213 args_size->constant
2214 = pending_stack_adjust - adjustment + unadjusted_args_size;
2215
2216 return adjustment;
2217 }
2218
2219 /* Scan X expression if it does not dereference any argument slots
2220 we already clobbered by tail call arguments (as noted in stored_args_map
2221 bitmap).
2222 Return nonzero if X expression dereferences such argument slots,
2223 zero otherwise. */
2224
2225 static int
2226 check_sibcall_argument_overlap_1 (rtx x)
2227 {
2228 RTX_CODE code;
2229 int i, j;
2230 const char *fmt;
2231
2232 if (x == NULL_RTX)
2233 return 0;
2234
2235 code = GET_CODE (x);
2236
2237 /* We need not check the operands of the CALL expression itself. */
2238 if (code == CALL)
2239 return 0;
2240
2241 if (code == MEM)
2242 return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
2243 GET_MODE_SIZE (GET_MODE (x)));
2244
2245 /* Scan all subexpressions. */
2246 fmt = GET_RTX_FORMAT (code);
2247 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2248 {
2249 if (*fmt == 'e')
2250 {
2251 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
2252 return 1;
2253 }
2254 else if (*fmt == 'E')
2255 {
2256 for (j = 0; j < XVECLEN (x, i); j++)
2257 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
2258 return 1;
2259 }
2260 }
2261 return 0;
2262 }
2263
2264 /* Scan sequence after INSN if it does not dereference any argument slots
2265 we already clobbered by tail call arguments (as noted in stored_args_map
2266 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
2267 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
2268 should be 0). Return nonzero if sequence after INSN dereferences such argument
2269 slots, zero otherwise. */
2270
2271 static int
2272 check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg,
2273 int mark_stored_args_map)
2274 {
2275 int low, high;
2276
2277 if (insn == NULL_RTX)
2278 insn = get_insns ();
2279 else
2280 insn = NEXT_INSN (insn);
2281
2282 for (; insn; insn = NEXT_INSN (insn))
2283 if (INSN_P (insn)
2284 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
2285 break;
2286
2287 if (mark_stored_args_map)
2288 {
2289 #ifdef ARGS_GROW_DOWNWARD
2290 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
2291 #else
2292 low = arg->locate.slot_offset.constant;
2293 #endif
2294
2295 for (high = low + arg->locate.size.constant; low < high; low++)
2296 bitmap_set_bit (stored_args_map, low);
2297 }
2298 return insn != NULL_RTX;
2299 }
2300
2301 /* Given that a function returns a value of mode MODE at the most
2302 significant end of hard register VALUE, shift VALUE left or right
2303 as specified by LEFT_P. Return true if some action was needed. */
2304
2305 bool
2306 shift_return_value (machine_mode mode, bool left_p, rtx value)
2307 {
2308 HOST_WIDE_INT shift;
2309
2310 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
2311 shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
2312 if (shift == 0)
2313 return false;
2314
2315 /* Use ashr rather than lshr for right shifts. This is for the benefit
2316 of the MIPS port, which requires SImode values to be sign-extended
2317 when stored in 64-bit registers. */
2318 if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
2319 value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
2320 gcc_unreachable ();
2321 return true;
2322 }
2323
2324 /* If X is a likely-spilled register value, copy it to a pseudo
2325 register and return that register. Return X otherwise. */
2326
2327 static rtx
2328 avoid_likely_spilled_reg (rtx x)
2329 {
2330 rtx new_rtx;
2331
2332 if (REG_P (x)
2333 && HARD_REGISTER_P (x)
2334 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
2335 {
2336 /* Make sure that we generate a REG rather than a CONCAT.
2337 Moves into CONCATs can need nontrivial instructions,
2338 and the whole point of this function is to avoid
2339 using the hard register directly in such a situation. */
2340 generating_concat_p = 0;
2341 new_rtx = gen_reg_rtx (GET_MODE (x));
2342 generating_concat_p = 1;
2343 emit_move_insn (new_rtx, x);
2344 return new_rtx;
2345 }
2346 return x;
2347 }
2348
2349 /* Generate all the code for a CALL_EXPR exp
2350 and return an rtx for its value.
2351 Store the value in TARGET (specified as an rtx) if convenient.
2352 If the value is stored in TARGET then TARGET is returned.
2353 If IGNORE is nonzero, then we ignore the value of the function call. */
2354
2355 rtx
2356 expand_call (tree exp, rtx target, int ignore)
2357 {
2358 /* Nonzero if we are currently expanding a call. */
2359 static int currently_expanding_call = 0;
2360
2361 /* RTX for the function to be called. */
2362 rtx funexp;
2363 /* Sequence of insns to perform a normal "call". */
2364 rtx_insn *normal_call_insns = NULL;
2365 /* Sequence of insns to perform a tail "call". */
2366 rtx_insn *tail_call_insns = NULL;
2367 /* Data type of the function. */
2368 tree funtype;
2369 tree type_arg_types;
2370 tree rettype;
2371 /* Declaration of the function being called,
2372 or 0 if the function is computed (not known by name). */
2373 tree fndecl = 0;
2374 /* The type of the function being called. */
2375 tree fntype;
2376 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
2377 int pass;
2378
2379 /* Register in which non-BLKmode value will be returned,
2380 or 0 if no value or if value is BLKmode. */
2381 rtx valreg;
2382 /* Register(s) in which bounds are returned. */
2383 rtx valbnd = NULL;
2384 /* Address where we should return a BLKmode value;
2385 0 if value not BLKmode. */
2386 rtx structure_value_addr = 0;
2387 /* Nonzero if that address is being passed by treating it as
2388 an extra, implicit first parameter. Otherwise,
2389 it is passed by being copied directly into struct_value_rtx. */
2390 int structure_value_addr_parm = 0;
2391 /* Holds the value of implicit argument for the struct value. */
2392 tree structure_value_addr_value = NULL_TREE;
2393 /* Size of aggregate value wanted, or zero if none wanted
2394 or if we are using the non-reentrant PCC calling convention
2395 or expecting the value in registers. */
2396 HOST_WIDE_INT struct_value_size = 0;
2397 /* Nonzero if called function returns an aggregate in memory PCC style,
2398 by returning the address of where to find it. */
2399 int pcc_struct_value = 0;
2400 rtx struct_value = 0;
2401
2402 /* Number of actual parameters in this call, including struct value addr. */
2403 int num_actuals;
2404 /* Number of named args. Args after this are anonymous ones
2405 and they must all go on the stack. */
2406 int n_named_args;
2407 /* Number of complex actual arguments that need to be split. */
2408 int num_complex_actuals = 0;
2409
2410 /* Vector of information about each argument.
2411 Arguments are numbered in the order they will be pushed,
2412 not the order they are written. */
2413 struct arg_data *args;
2414
2415 /* Total size in bytes of all the stack-parms scanned so far. */
2416 struct args_size args_size;
2417 struct args_size adjusted_args_size;
2418 /* Size of arguments before any adjustments (such as rounding). */
2419 int unadjusted_args_size;
2420 /* Data on reg parms scanned so far. */
2421 CUMULATIVE_ARGS args_so_far_v;
2422 cumulative_args_t args_so_far;
2423 /* Nonzero if a reg parm has been scanned. */
2424 int reg_parm_seen;
2425 /* Nonzero if this is an indirect function call. */
2426
2427 /* Nonzero if we must avoid push-insns in the args for this call.
2428 If stack space is allocated for register parameters, but not by the
2429 caller, then it is preallocated in the fixed part of the stack frame.
2430 So the entire argument block must then be preallocated (i.e., we
2431 ignore PUSH_ROUNDING in that case). */
2432
2433 int must_preallocate = !PUSH_ARGS;
2434
2435 /* Size of the stack reserved for parameter registers. */
2436 int reg_parm_stack_space = 0;
2437
2438 /* Address of space preallocated for stack parms
2439 (on machines that lack push insns), or 0 if space not preallocated. */
2440 rtx argblock = 0;
2441
2442 /* Mask of ECF_ and ERF_ flags. */
2443 int flags = 0;
2444 int return_flags = 0;
2445 #ifdef REG_PARM_STACK_SPACE
2446 /* Define the boundary of the register parm stack space that needs to be
2447 saved, if any. */
2448 int low_to_save, high_to_save;
2449 rtx save_area = 0; /* Place that it is saved */
2450 #endif
2451
2452 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2453 char *initial_stack_usage_map = stack_usage_map;
2454 char *stack_usage_map_buf = NULL;
2455
2456 int old_stack_allocated;
2457
2458 /* State variables to track stack modifications. */
2459 rtx old_stack_level = 0;
2460 int old_stack_arg_under_construction = 0;
2461 int old_pending_adj = 0;
2462 int old_inhibit_defer_pop = inhibit_defer_pop;
2463
2464 /* Some stack pointer alterations we make are performed via
2465 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
2466 which we then also need to save/restore along the way. */
2467 int old_stack_pointer_delta = 0;
2468
2469 rtx call_fusage;
2470 tree addr = CALL_EXPR_FN (exp);
2471 int i;
2472 /* The alignment of the stack, in bits. */
2473 unsigned HOST_WIDE_INT preferred_stack_boundary;
2474 /* The alignment of the stack, in bytes. */
2475 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
2476 /* The static chain value to use for this call. */
2477 rtx static_chain_value;
2478 /* See if this is "nothrow" function call. */
2479 if (TREE_NOTHROW (exp))
2480 flags |= ECF_NOTHROW;
2481
2482 /* See if we can find a DECL-node for the actual function, and get the
2483 function attributes (flags) from the function decl or type node. */
2484 fndecl = get_callee_fndecl (exp);
2485 if (fndecl)
2486 {
2487 fntype = TREE_TYPE (fndecl);
2488 flags |= flags_from_decl_or_type (fndecl);
2489 return_flags |= decl_return_flags (fndecl);
2490 }
2491 else
2492 {
2493 fntype = TREE_TYPE (TREE_TYPE (addr));
2494 flags |= flags_from_decl_or_type (fntype);
2495 }
2496 rettype = TREE_TYPE (exp);
2497
2498 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
2499
2500 /* Warn if this value is an aggregate type,
2501 regardless of which calling convention we are using for it. */
2502 if (AGGREGATE_TYPE_P (rettype))
2503 warning (OPT_Waggregate_return, "function call has aggregate value");
2504
2505 /* If the result of a non looping pure or const function call is
2506 ignored (or void), and none of its arguments are volatile, we can
2507 avoid expanding the call and just evaluate the arguments for
2508 side-effects. */
2509 if ((flags & (ECF_CONST | ECF_PURE))
2510 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
2511 && (ignore || target == const0_rtx
2512 || TYPE_MODE (rettype) == VOIDmode))
2513 {
2514 bool volatilep = false;
2515 tree arg;
2516 call_expr_arg_iterator iter;
2517
2518 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2519 if (TREE_THIS_VOLATILE (arg))
2520 {
2521 volatilep = true;
2522 break;
2523 }
2524
2525 if (! volatilep)
2526 {
2527 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2528 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
2529 return const0_rtx;
2530 }
2531 }
2532
2533 #ifdef REG_PARM_STACK_SPACE
2534 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
2535 #endif
2536
2537 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
2538 && reg_parm_stack_space > 0 && PUSH_ARGS)
2539 must_preallocate = 1;
2540
2541 /* Set up a place to return a structure. */
2542
2543 /* Cater to broken compilers. */
2544 if (aggregate_value_p (exp, fntype))
2545 {
2546 /* This call returns a big structure. */
2547 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
2548
2549 #ifdef PCC_STATIC_STRUCT_RETURN
2550 {
2551 pcc_struct_value = 1;
2552 }
2553 #else /* not PCC_STATIC_STRUCT_RETURN */
2554 {
2555 struct_value_size = int_size_in_bytes (rettype);
2556
2557 /* Even if it is semantically safe to use the target as the return
2558 slot, it may be not sufficiently aligned for the return type. */
2559 if (CALL_EXPR_RETURN_SLOT_OPT (exp)
2560 && target
2561 && MEM_P (target)
2562 && !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
2563 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (rettype),
2564 MEM_ALIGN (target))))
2565 structure_value_addr = XEXP (target, 0);
2566 else
2567 {
2568 /* For variable-sized objects, we must be called with a target
2569 specified. If we were to allocate space on the stack here,
2570 we would have no way of knowing when to free it. */
2571 rtx d = assign_temp (rettype, 1, 1);
2572 structure_value_addr = XEXP (d, 0);
2573 target = 0;
2574 }
2575 }
2576 #endif /* not PCC_STATIC_STRUCT_RETURN */
2577 }
2578
2579 /* Figure out the amount to which the stack should be aligned. */
2580 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2581 if (fndecl)
2582 {
2583 struct cgraph_rtl_info *i = cgraph_node::rtl_info (fndecl);
2584 /* Without automatic stack alignment, we can't increase preferred
2585 stack boundary. With automatic stack alignment, it is
2586 unnecessary since unless we can guarantee that all callers will
2587 align the outgoing stack properly, callee has to align its
2588 stack anyway. */
2589 if (i
2590 && i->preferred_incoming_stack_boundary
2591 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
2592 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2593 }
2594
2595 /* Operand 0 is a pointer-to-function; get the type of the function. */
2596 funtype = TREE_TYPE (addr);
2597 gcc_assert (POINTER_TYPE_P (funtype));
2598 funtype = TREE_TYPE (funtype);
2599
2600 /* Count whether there are actual complex arguments that need to be split
2601 into their real and imaginary parts. Munge the type_arg_types
2602 appropriately here as well. */
2603 if (targetm.calls.split_complex_arg)
2604 {
2605 call_expr_arg_iterator iter;
2606 tree arg;
2607 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2608 {
2609 tree type = TREE_TYPE (arg);
2610 if (type && TREE_CODE (type) == COMPLEX_TYPE
2611 && targetm.calls.split_complex_arg (type))
2612 num_complex_actuals++;
2613 }
2614 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2615 }
2616 else
2617 type_arg_types = TYPE_ARG_TYPES (funtype);
2618
2619 if (flags & ECF_MAY_BE_ALLOCA)
2620 cfun->calls_alloca = 1;
2621
2622 /* If struct_value_rtx is 0, it means pass the address
2623 as if it were an extra parameter. Put the argument expression
2624 in structure_value_addr_value. */
2625 if (structure_value_addr && struct_value == 0)
2626 {
2627 /* If structure_value_addr is a REG other than
2628 virtual_outgoing_args_rtx, we can use always use it. If it
2629 is not a REG, we must always copy it into a register.
2630 If it is virtual_outgoing_args_rtx, we must copy it to another
2631 register in some cases. */
2632 rtx temp = (!REG_P (structure_value_addr)
2633 || (ACCUMULATE_OUTGOING_ARGS
2634 && stack_arg_under_construction
2635 && structure_value_addr == virtual_outgoing_args_rtx)
2636 ? copy_addr_to_reg (convert_memory_address
2637 (Pmode, structure_value_addr))
2638 : structure_value_addr);
2639
2640 structure_value_addr_value =
2641 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
2642 structure_value_addr_parm = CALL_WITH_BOUNDS_P (exp) ? 2 : 1;
2643 }
2644
2645 /* Count the arguments and set NUM_ACTUALS. */
2646 num_actuals =
2647 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
2648
2649 /* Compute number of named args.
2650 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2651
2652 if (type_arg_types != 0)
2653 n_named_args
2654 = (list_length (type_arg_types)
2655 /* Count the struct value address, if it is passed as a parm. */
2656 + structure_value_addr_parm);
2657 else
2658 /* If we know nothing, treat all args as named. */
2659 n_named_args = num_actuals;
2660
2661 /* Start updating where the next arg would go.
2662
2663 On some machines (such as the PA) indirect calls have a different
2664 calling convention than normal calls. The fourth argument in
2665 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2666 or not. */
2667 INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
2668 args_so_far = pack_cumulative_args (&args_so_far_v);
2669
2670 /* Now possibly adjust the number of named args.
2671 Normally, don't include the last named arg if anonymous args follow.
2672 We do include the last named arg if
2673 targetm.calls.strict_argument_naming() returns nonzero.
2674 (If no anonymous args follow, the result of list_length is actually
2675 one too large. This is harmless.)
2676
2677 If targetm.calls.pretend_outgoing_varargs_named() returns
2678 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2679 this machine will be able to place unnamed args that were passed
2680 in registers into the stack. So treat all args as named. This
2681 allows the insns emitting for a specific argument list to be
2682 independent of the function declaration.
2683
2684 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2685 we do not have any reliable way to pass unnamed args in
2686 registers, so we must force them into memory. */
2687
2688 if (type_arg_types != 0
2689 && targetm.calls.strict_argument_naming (args_so_far))
2690 ;
2691 else if (type_arg_types != 0
2692 && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
2693 /* Don't include the last named arg. */
2694 --n_named_args;
2695 else
2696 /* Treat all args as named. */
2697 n_named_args = num_actuals;
2698
2699 /* Make a vector to hold all the information about each arg. */
2700 args = XALLOCAVEC (struct arg_data, num_actuals);
2701 memset (args, 0, num_actuals * sizeof (struct arg_data));
2702
2703 /* Build up entries in the ARGS array, compute the size of the
2704 arguments into ARGS_SIZE, etc. */
2705 initialize_argument_information (num_actuals, args, &args_size,
2706 n_named_args, exp,
2707 structure_value_addr_value, fndecl, fntype,
2708 args_so_far, reg_parm_stack_space,
2709 &old_stack_level, &old_pending_adj,
2710 &must_preallocate, &flags,
2711 &try_tail_call, CALL_FROM_THUNK_P (exp));
2712
2713 if (args_size.var)
2714 must_preallocate = 1;
2715
2716 /* Now make final decision about preallocating stack space. */
2717 must_preallocate = finalize_must_preallocate (must_preallocate,
2718 num_actuals, args,
2719 &args_size);
2720
2721 /* If the structure value address will reference the stack pointer, we
2722 must stabilize it. We don't need to do this if we know that we are
2723 not going to adjust the stack pointer in processing this call. */
2724
2725 if (structure_value_addr
2726 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2727 || reg_mentioned_p (virtual_outgoing_args_rtx,
2728 structure_value_addr))
2729 && (args_size.var
2730 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2731 structure_value_addr = copy_to_reg (structure_value_addr);
2732
2733 /* Tail calls can make things harder to debug, and we've traditionally
2734 pushed these optimizations into -O2. Don't try if we're already
2735 expanding a call, as that means we're an argument. Don't try if
2736 there's cleanups, as we know there's code to follow the call. */
2737
2738 if (currently_expanding_call++ != 0
2739 || !flag_optimize_sibling_calls
2740 || args_size.var
2741 || dbg_cnt (tail_call) == false)
2742 try_tail_call = 0;
2743
2744 /* Rest of purposes for tail call optimizations to fail. */
2745 if (
2746 #ifdef HAVE_sibcall_epilogue
2747 !HAVE_sibcall_epilogue
2748 #else
2749 1
2750 #endif
2751 || !try_tail_call
2752 /* Doing sibling call optimization needs some work, since
2753 structure_value_addr can be allocated on the stack.
2754 It does not seem worth the effort since few optimizable
2755 sibling calls will return a structure. */
2756 || structure_value_addr != NULL_RTX
2757 #ifdef REG_PARM_STACK_SPACE
2758 /* If outgoing reg parm stack space changes, we can not do sibcall. */
2759 || (OUTGOING_REG_PARM_STACK_SPACE (funtype)
2760 != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl)))
2761 || (reg_parm_stack_space != REG_PARM_STACK_SPACE (current_function_decl))
2762 #endif
2763 /* Check whether the target is able to optimize the call
2764 into a sibcall. */
2765 || !targetm.function_ok_for_sibcall (fndecl, exp)
2766 /* Functions that do not return exactly once may not be sibcall
2767 optimized. */
2768 || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
2769 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2770 /* If the called function is nested in the current one, it might access
2771 some of the caller's arguments, but could clobber them beforehand if
2772 the argument areas are shared. */
2773 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2774 /* If this function requires more stack slots than the current
2775 function, we cannot change it into a sibling call.
2776 crtl->args.pretend_args_size is not part of the
2777 stack allocated by our caller. */
2778 || args_size.constant > (crtl->args.size
2779 - crtl->args.pretend_args_size)
2780 /* If the callee pops its own arguments, then it must pop exactly
2781 the same number of arguments as the current function. */
2782 || (targetm.calls.return_pops_args (fndecl, funtype, args_size.constant)
2783 != targetm.calls.return_pops_args (current_function_decl,
2784 TREE_TYPE (current_function_decl),
2785 crtl->args.size))
2786 || !lang_hooks.decls.ok_for_sibcall (fndecl))
2787 try_tail_call = 0;
2788
2789 /* Check if caller and callee disagree in promotion of function
2790 return value. */
2791 if (try_tail_call)
2792 {
2793 machine_mode caller_mode, caller_promoted_mode;
2794 machine_mode callee_mode, callee_promoted_mode;
2795 int caller_unsignedp, callee_unsignedp;
2796 tree caller_res = DECL_RESULT (current_function_decl);
2797
2798 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
2799 caller_mode = DECL_MODE (caller_res);
2800 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
2801 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
2802 caller_promoted_mode
2803 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
2804 &caller_unsignedp,
2805 TREE_TYPE (current_function_decl), 1);
2806 callee_promoted_mode
2807 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
2808 &callee_unsignedp,
2809 funtype, 1);
2810 if (caller_mode != VOIDmode
2811 && (caller_promoted_mode != callee_promoted_mode
2812 || ((caller_mode != caller_promoted_mode
2813 || callee_mode != callee_promoted_mode)
2814 && (caller_unsignedp != callee_unsignedp
2815 || GET_MODE_BITSIZE (caller_mode)
2816 < GET_MODE_BITSIZE (callee_mode)))))
2817 try_tail_call = 0;
2818 }
2819
2820 /* Ensure current function's preferred stack boundary is at least
2821 what we need. Stack alignment may also increase preferred stack
2822 boundary. */
2823 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
2824 crtl->preferred_stack_boundary = preferred_stack_boundary;
2825 else
2826 preferred_stack_boundary = crtl->preferred_stack_boundary;
2827
2828 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2829
2830 /* We want to make two insn chains; one for a sibling call, the other
2831 for a normal call. We will select one of the two chains after
2832 initial RTL generation is complete. */
2833 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2834 {
2835 int sibcall_failure = 0;
2836 /* We want to emit any pending stack adjustments before the tail
2837 recursion "call". That way we know any adjustment after the tail
2838 recursion call can be ignored if we indeed use the tail
2839 call expansion. */
2840 saved_pending_stack_adjust save;
2841 rtx_insn *insns, *before_call, *after_args;
2842 rtx next_arg_reg;
2843
2844 if (pass == 0)
2845 {
2846 /* State variables we need to save and restore between
2847 iterations. */
2848 save_pending_stack_adjust (&save);
2849 }
2850 if (pass)
2851 flags &= ~ECF_SIBCALL;
2852 else
2853 flags |= ECF_SIBCALL;
2854
2855 /* Other state variables that we must reinitialize each time
2856 through the loop (that are not initialized by the loop itself). */
2857 argblock = 0;
2858 call_fusage = 0;
2859
2860 /* Start a new sequence for the normal call case.
2861
2862 From this point on, if the sibling call fails, we want to set
2863 sibcall_failure instead of continuing the loop. */
2864 start_sequence ();
2865
2866 /* Don't let pending stack adjusts add up to too much.
2867 Also, do all pending adjustments now if there is any chance
2868 this might be a call to alloca or if we are expanding a sibling
2869 call sequence.
2870 Also do the adjustments before a throwing call, otherwise
2871 exception handling can fail; PR 19225. */
2872 if (pending_stack_adjust >= 32
2873 || (pending_stack_adjust > 0
2874 && (flags & ECF_MAY_BE_ALLOCA))
2875 || (pending_stack_adjust > 0
2876 && flag_exceptions && !(flags & ECF_NOTHROW))
2877 || pass == 0)
2878 do_pending_stack_adjust ();
2879
2880 /* Precompute any arguments as needed. */
2881 if (pass)
2882 precompute_arguments (num_actuals, args);
2883
2884 /* Now we are about to start emitting insns that can be deleted
2885 if a libcall is deleted. */
2886 if (pass && (flags & ECF_MALLOC))
2887 start_sequence ();
2888
2889 if (pass == 0 && crtl->stack_protect_guard)
2890 stack_protect_epilogue ();
2891
2892 adjusted_args_size = args_size;
2893 /* Compute the actual size of the argument block required. The variable
2894 and constant sizes must be combined, the size may have to be rounded,
2895 and there may be a minimum required size. When generating a sibcall
2896 pattern, do not round up, since we'll be re-using whatever space our
2897 caller provided. */
2898 unadjusted_args_size
2899 = compute_argument_block_size (reg_parm_stack_space,
2900 &adjusted_args_size,
2901 fndecl, fntype,
2902 (pass == 0 ? 0
2903 : preferred_stack_boundary));
2904
2905 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2906
2907 /* The argument block when performing a sibling call is the
2908 incoming argument block. */
2909 if (pass == 0)
2910 {
2911 argblock = crtl->args.internal_arg_pointer;
2912 argblock
2913 #ifdef STACK_GROWS_DOWNWARD
2914 = plus_constant (Pmode, argblock, crtl->args.pretend_args_size);
2915 #else
2916 = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size);
2917 #endif
2918 stored_args_map = sbitmap_alloc (args_size.constant);
2919 bitmap_clear (stored_args_map);
2920 }
2921
2922 /* If we have no actual push instructions, or shouldn't use them,
2923 make space for all args right now. */
2924 else if (adjusted_args_size.var != 0)
2925 {
2926 if (old_stack_level == 0)
2927 {
2928 emit_stack_save (SAVE_BLOCK, &old_stack_level);
2929 old_stack_pointer_delta = stack_pointer_delta;
2930 old_pending_adj = pending_stack_adjust;
2931 pending_stack_adjust = 0;
2932 /* stack_arg_under_construction says whether a stack arg is
2933 being constructed at the old stack level. Pushing the stack
2934 gets a clean outgoing argument block. */
2935 old_stack_arg_under_construction = stack_arg_under_construction;
2936 stack_arg_under_construction = 0;
2937 }
2938 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2939 if (flag_stack_usage_info)
2940 current_function_has_unbounded_dynamic_stack_size = 1;
2941 }
2942 else
2943 {
2944 /* Note that we must go through the motions of allocating an argument
2945 block even if the size is zero because we may be storing args
2946 in the area reserved for register arguments, which may be part of
2947 the stack frame. */
2948
2949 int needed = adjusted_args_size.constant;
2950
2951 /* Store the maximum argument space used. It will be pushed by
2952 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2953 checking). */
2954
2955 if (needed > crtl->outgoing_args_size)
2956 crtl->outgoing_args_size = needed;
2957
2958 if (must_preallocate)
2959 {
2960 if (ACCUMULATE_OUTGOING_ARGS)
2961 {
2962 /* Since the stack pointer will never be pushed, it is
2963 possible for the evaluation of a parm to clobber
2964 something we have already written to the stack.
2965 Since most function calls on RISC machines do not use
2966 the stack, this is uncommon, but must work correctly.
2967
2968 Therefore, we save any area of the stack that was already
2969 written and that we are using. Here we set up to do this
2970 by making a new stack usage map from the old one. The
2971 actual save will be done by store_one_arg.
2972
2973 Another approach might be to try to reorder the argument
2974 evaluations to avoid this conflicting stack usage. */
2975
2976 /* Since we will be writing into the entire argument area,
2977 the map must be allocated for its entire size, not just
2978 the part that is the responsibility of the caller. */
2979 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2980 needed += reg_parm_stack_space;
2981
2982 #ifdef ARGS_GROW_DOWNWARD
2983 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2984 needed + 1);
2985 #else
2986 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2987 needed);
2988 #endif
2989 free (stack_usage_map_buf);
2990 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
2991 stack_usage_map = stack_usage_map_buf;
2992
2993 if (initial_highest_arg_in_use)
2994 memcpy (stack_usage_map, initial_stack_usage_map,
2995 initial_highest_arg_in_use);
2996
2997 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2998 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2999 (highest_outgoing_arg_in_use
3000 - initial_highest_arg_in_use));
3001 needed = 0;
3002
3003 /* The address of the outgoing argument list must not be
3004 copied to a register here, because argblock would be left
3005 pointing to the wrong place after the call to
3006 allocate_dynamic_stack_space below. */
3007
3008 argblock = virtual_outgoing_args_rtx;
3009 }
3010 else
3011 {
3012 if (inhibit_defer_pop == 0)
3013 {
3014 /* Try to reuse some or all of the pending_stack_adjust
3015 to get this space. */
3016 needed
3017 = (combine_pending_stack_adjustment_and_call
3018 (unadjusted_args_size,
3019 &adjusted_args_size,
3020 preferred_unit_stack_boundary));
3021
3022 /* combine_pending_stack_adjustment_and_call computes
3023 an adjustment before the arguments are allocated.
3024 Account for them and see whether or not the stack
3025 needs to go up or down. */
3026 needed = unadjusted_args_size - needed;
3027
3028 if (needed < 0)
3029 {
3030 /* We're releasing stack space. */
3031 /* ??? We can avoid any adjustment at all if we're
3032 already aligned. FIXME. */
3033 pending_stack_adjust = -needed;
3034 do_pending_stack_adjust ();
3035 needed = 0;
3036 }
3037 else
3038 /* We need to allocate space. We'll do that in
3039 push_block below. */
3040 pending_stack_adjust = 0;
3041 }
3042
3043 /* Special case this because overhead of `push_block' in
3044 this case is non-trivial. */
3045 if (needed == 0)
3046 argblock = virtual_outgoing_args_rtx;
3047 else
3048 {
3049 argblock = push_block (GEN_INT (needed), 0, 0);
3050 #ifdef ARGS_GROW_DOWNWARD
3051 argblock = plus_constant (Pmode, argblock, needed);
3052 #endif
3053 }
3054
3055 /* We only really need to call `copy_to_reg' in the case
3056 where push insns are going to be used to pass ARGBLOCK
3057 to a function call in ARGS. In that case, the stack
3058 pointer changes value from the allocation point to the
3059 call point, and hence the value of
3060 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
3061 as well always do it. */
3062 argblock = copy_to_reg (argblock);
3063 }
3064 }
3065 }
3066
3067 if (ACCUMULATE_OUTGOING_ARGS)
3068 {
3069 /* The save/restore code in store_one_arg handles all
3070 cases except one: a constructor call (including a C
3071 function returning a BLKmode struct) to initialize
3072 an argument. */
3073 if (stack_arg_under_construction)
3074 {
3075 rtx push_size
3076 = GEN_INT (adjusted_args_size.constant
3077 + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype
3078 : TREE_TYPE (fndecl))) ? 0
3079 : reg_parm_stack_space));
3080 if (old_stack_level == 0)
3081 {
3082 emit_stack_save (SAVE_BLOCK, &old_stack_level);
3083 old_stack_pointer_delta = stack_pointer_delta;
3084 old_pending_adj = pending_stack_adjust;
3085 pending_stack_adjust = 0;
3086 /* stack_arg_under_construction says whether a stack
3087 arg is being constructed at the old stack level.
3088 Pushing the stack gets a clean outgoing argument
3089 block. */
3090 old_stack_arg_under_construction
3091 = stack_arg_under_construction;
3092 stack_arg_under_construction = 0;
3093 /* Make a new map for the new argument list. */
3094 free (stack_usage_map_buf);
3095 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
3096 stack_usage_map = stack_usage_map_buf;
3097 highest_outgoing_arg_in_use = 0;
3098 }
3099 /* We can pass TRUE as the 4th argument because we just
3100 saved the stack pointer and will restore it right after
3101 the call. */
3102 allocate_dynamic_stack_space (push_size, 0,
3103 BIGGEST_ALIGNMENT, true);
3104 }
3105
3106 /* If argument evaluation might modify the stack pointer,
3107 copy the address of the argument list to a register. */
3108 for (i = 0; i < num_actuals; i++)
3109 if (args[i].pass_on_stack)
3110 {
3111 argblock = copy_addr_to_reg (argblock);
3112 break;
3113 }
3114 }
3115
3116 compute_argument_addresses (args, argblock, num_actuals);
3117
3118 /* Perform stack alignment before the first push (the last arg). */
3119 if (argblock == 0
3120 && adjusted_args_size.constant > reg_parm_stack_space
3121 && adjusted_args_size.constant != unadjusted_args_size)
3122 {
3123 /* When the stack adjustment is pending, we get better code
3124 by combining the adjustments. */
3125 if (pending_stack_adjust
3126 && ! inhibit_defer_pop)
3127 {
3128 pending_stack_adjust
3129 = (combine_pending_stack_adjustment_and_call
3130 (unadjusted_args_size,
3131 &adjusted_args_size,
3132 preferred_unit_stack_boundary));
3133 do_pending_stack_adjust ();
3134 }
3135 else if (argblock == 0)
3136 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
3137 - unadjusted_args_size));
3138 }
3139 /* Now that the stack is properly aligned, pops can't safely
3140 be deferred during the evaluation of the arguments. */
3141 NO_DEFER_POP;
3142
3143 /* Record the maximum pushed stack space size. We need to delay
3144 doing it this far to take into account the optimization done
3145 by combine_pending_stack_adjustment_and_call. */
3146 if (flag_stack_usage_info
3147 && !ACCUMULATE_OUTGOING_ARGS
3148 && pass
3149 && adjusted_args_size.var == 0)
3150 {
3151 int pushed = adjusted_args_size.constant + pending_stack_adjust;
3152 if (pushed > current_function_pushed_stack_size)
3153 current_function_pushed_stack_size = pushed;
3154 }
3155
3156 funexp = rtx_for_function_call (fndecl, addr);
3157
3158 /* Precompute all register parameters. It isn't safe to compute anything
3159 once we have started filling any specific hard regs. */
3160 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
3161
3162 if (CALL_EXPR_STATIC_CHAIN (exp))
3163 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
3164 else
3165 static_chain_value = 0;
3166
3167 #ifdef REG_PARM_STACK_SPACE
3168 /* Save the fixed argument area if it's part of the caller's frame and
3169 is clobbered by argument setup for this call. */
3170 if (ACCUMULATE_OUTGOING_ARGS && pass)
3171 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3172 &low_to_save, &high_to_save);
3173 #endif
3174
3175 /* Now store (and compute if necessary) all non-register parms.
3176 These come before register parms, since they can require block-moves,
3177 which could clobber the registers used for register parms.
3178 Parms which have partial registers are not stored here,
3179 but we do preallocate space here if they want that. */
3180
3181 for (i = 0; i < num_actuals; i++)
3182 {
3183 /* Delay bounds until all other args are stored. */
3184 if (POINTER_BOUNDS_P (args[i].tree_value))
3185 continue;
3186 else if (args[i].reg == 0 || args[i].pass_on_stack)
3187 {
3188 rtx_insn *before_arg = get_last_insn ();
3189
3190 /* We don't allow passing huge (> 2^30 B) arguments
3191 by value. It would cause an overflow later on. */
3192 if (adjusted_args_size.constant
3193 >= (1 << (HOST_BITS_PER_INT - 2)))
3194 {
3195 sorry ("passing too large argument on stack");
3196 continue;
3197 }
3198
3199 if (store_one_arg (&args[i], argblock, flags,
3200 adjusted_args_size.var != 0,
3201 reg_parm_stack_space)
3202 || (pass == 0
3203 && check_sibcall_argument_overlap (before_arg,
3204 &args[i], 1)))
3205 sibcall_failure = 1;
3206 }
3207
3208 if (args[i].stack)
3209 call_fusage
3210 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
3211 gen_rtx_USE (VOIDmode, args[i].stack),
3212 call_fusage);
3213 }
3214
3215 /* If we have a parm that is passed in registers but not in memory
3216 and whose alignment does not permit a direct copy into registers,
3217 make a group of pseudos that correspond to each register that we
3218 will later fill. */
3219 if (STRICT_ALIGNMENT)
3220 store_unaligned_arguments_into_pseudos (args, num_actuals);
3221
3222 /* Now store any partially-in-registers parm.
3223 This is the last place a block-move can happen. */
3224 if (reg_parm_seen)
3225 for (i = 0; i < num_actuals; i++)
3226 if (args[i].partial != 0 && ! args[i].pass_on_stack)
3227 {
3228 rtx_insn *before_arg = get_last_insn ();
3229
3230 if (store_one_arg (&args[i], argblock, flags,
3231 adjusted_args_size.var != 0,
3232 reg_parm_stack_space)
3233 || (pass == 0
3234 && check_sibcall_argument_overlap (before_arg,
3235 &args[i], 1)))
3236 sibcall_failure = 1;
3237 }
3238
3239 bool any_regs = false;
3240 for (i = 0; i < num_actuals; i++)
3241 if (args[i].reg != NULL_RTX)
3242 {
3243 any_regs = true;
3244 targetm.calls.call_args (args[i].reg, funtype);
3245 }
3246 if (!any_regs)
3247 targetm.calls.call_args (pc_rtx, funtype);
3248
3249 /* Figure out the register where the value, if any, will come back. */
3250 valreg = 0;
3251 valbnd = 0;
3252 if (TYPE_MODE (rettype) != VOIDmode
3253 && ! structure_value_addr)
3254 {
3255 if (pcc_struct_value)
3256 {
3257 valreg = hard_function_value (build_pointer_type (rettype),
3258 fndecl, NULL, (pass == 0));
3259 if (CALL_WITH_BOUNDS_P (exp))
3260 valbnd = targetm.calls.
3261 chkp_function_value_bounds (build_pointer_type (rettype),
3262 fndecl, (pass == 0));
3263 }
3264 else
3265 {
3266 valreg = hard_function_value (rettype, fndecl, fntype,
3267 (pass == 0));
3268 if (CALL_WITH_BOUNDS_P (exp))
3269 valbnd = targetm.calls.chkp_function_value_bounds (rettype,
3270 fndecl,
3271 (pass == 0));
3272 }
3273
3274 /* If VALREG is a PARALLEL whose first member has a zero
3275 offset, use that. This is for targets such as m68k that
3276 return the same value in multiple places. */
3277 if (GET_CODE (valreg) == PARALLEL)
3278 {
3279 rtx elem = XVECEXP (valreg, 0, 0);
3280 rtx where = XEXP (elem, 0);
3281 rtx offset = XEXP (elem, 1);
3282 if (offset == const0_rtx
3283 && GET_MODE (where) == GET_MODE (valreg))
3284 valreg = where;
3285 }
3286 }
3287
3288 /* Store all bounds not passed in registers. */
3289 for (i = 0; i < num_actuals; i++)
3290 {
3291 if (POINTER_BOUNDS_P (args[i].tree_value)
3292 && !args[i].reg)
3293 store_bounds (&args[i],
3294 args[i].pointer_arg == -1
3295 ? NULL
3296 : &args[args[i].pointer_arg]);
3297 }
3298
3299 /* If register arguments require space on the stack and stack space
3300 was not preallocated, allocate stack space here for arguments
3301 passed in registers. */
3302 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
3303 && !ACCUMULATE_OUTGOING_ARGS
3304 && must_preallocate == 0 && reg_parm_stack_space > 0)
3305 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
3306
3307 /* Pass the function the address in which to return a
3308 structure value. */
3309 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
3310 {
3311 structure_value_addr
3312 = convert_memory_address (Pmode, structure_value_addr);
3313 emit_move_insn (struct_value,
3314 force_reg (Pmode,
3315 force_operand (structure_value_addr,
3316 NULL_RTX)));
3317
3318 if (REG_P (struct_value))
3319 use_reg (&call_fusage, struct_value);
3320 }
3321
3322 after_args = get_last_insn ();
3323 funexp = prepare_call_address (fndecl ? fndecl : fntype, funexp,
3324 static_chain_value, &call_fusage,
3325 reg_parm_seen, pass == 0);
3326
3327 load_register_parameters (args, num_actuals, &call_fusage, flags,
3328 pass == 0, &sibcall_failure);
3329
3330 /* Save a pointer to the last insn before the call, so that we can
3331 later safely search backwards to find the CALL_INSN. */
3332 before_call = get_last_insn ();
3333
3334 /* Set up next argument register. For sibling calls on machines
3335 with register windows this should be the incoming register. */
3336 if (pass == 0)
3337 next_arg_reg = targetm.calls.function_incoming_arg (args_so_far,
3338 VOIDmode,
3339 void_type_node,
3340 true);
3341 else
3342 next_arg_reg = targetm.calls.function_arg (args_so_far,
3343 VOIDmode, void_type_node,
3344 true);
3345
3346 if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
3347 {
3348 int arg_nr = return_flags & ERF_RETURN_ARG_MASK;
3349 arg_nr = num_actuals - arg_nr - 1;
3350 if (arg_nr >= 0
3351 && arg_nr < num_actuals
3352 && args[arg_nr].reg
3353 && valreg
3354 && REG_P (valreg)
3355 && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg))
3356 call_fusage
3357 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)),
3358 gen_rtx_SET (VOIDmode, valreg, args[arg_nr].reg),
3359 call_fusage);
3360 }
3361 /* All arguments and registers used for the call must be set up by
3362 now! */
3363
3364 /* Stack must be properly aligned now. */
3365 gcc_assert (!pass
3366 || !(stack_pointer_delta % preferred_unit_stack_boundary));
3367
3368 /* Generate the actual call instruction. */
3369 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
3370 adjusted_args_size.constant, struct_value_size,
3371 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
3372 flags, args_so_far);
3373
3374 if (flag_ipa_ra)
3375 {
3376 rtx_call_insn *last;
3377 rtx datum = NULL_RTX;
3378 if (fndecl != NULL_TREE)
3379 {
3380 datum = XEXP (DECL_RTL (fndecl), 0);
3381 gcc_assert (datum != NULL_RTX
3382 && GET_CODE (datum) == SYMBOL_REF);
3383 }
3384 last = last_call_insn ();
3385 add_reg_note (last, REG_CALL_DECL, datum);
3386 }
3387
3388 /* If the call setup or the call itself overlaps with anything
3389 of the argument setup we probably clobbered our call address.
3390 In that case we can't do sibcalls. */
3391 if (pass == 0
3392 && check_sibcall_argument_overlap (after_args, 0, 0))
3393 sibcall_failure = 1;
3394
3395 /* If a non-BLKmode value is returned at the most significant end
3396 of a register, shift the register right by the appropriate amount
3397 and update VALREG accordingly. BLKmode values are handled by the
3398 group load/store machinery below. */
3399 if (!structure_value_addr
3400 && !pcc_struct_value
3401 && TYPE_MODE (rettype) != VOIDmode
3402 && TYPE_MODE (rettype) != BLKmode
3403 && REG_P (valreg)
3404 && targetm.calls.return_in_msb (rettype))
3405 {
3406 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
3407 sibcall_failure = 1;
3408 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
3409 }
3410
3411 if (pass && (flags & ECF_MALLOC))
3412 {
3413 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3414 rtx_insn *last, *insns;
3415
3416 /* The return value from a malloc-like function is a pointer. */
3417 if (TREE_CODE (rettype) == POINTER_TYPE)
3418 mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT);
3419
3420 emit_move_insn (temp, valreg);
3421
3422 /* The return value from a malloc-like function can not alias
3423 anything else. */
3424 last = get_last_insn ();
3425 add_reg_note (last, REG_NOALIAS, temp);
3426
3427 /* Write out the sequence. */
3428 insns = get_insns ();
3429 end_sequence ();
3430 emit_insn (insns);
3431 valreg = temp;
3432 }
3433
3434 /* For calls to `setjmp', etc., inform
3435 function.c:setjmp_warnings that it should complain if
3436 nonvolatile values are live. For functions that cannot
3437 return, inform flow that control does not fall through. */
3438
3439 if ((flags & ECF_NORETURN) || pass == 0)
3440 {
3441 /* The barrier must be emitted
3442 immediately after the CALL_INSN. Some ports emit more
3443 than just a CALL_INSN above, so we must search for it here. */
3444
3445 rtx_insn *last = get_last_insn ();
3446 while (!CALL_P (last))
3447 {
3448 last = PREV_INSN (last);
3449 /* There was no CALL_INSN? */
3450 gcc_assert (last != before_call);
3451 }
3452
3453 emit_barrier_after (last);
3454
3455 /* Stack adjustments after a noreturn call are dead code.
3456 However when NO_DEFER_POP is in effect, we must preserve
3457 stack_pointer_delta. */
3458 if (inhibit_defer_pop == 0)
3459 {
3460 stack_pointer_delta = old_stack_allocated;
3461 pending_stack_adjust = 0;
3462 }
3463 }
3464
3465 /* If value type not void, return an rtx for the value. */
3466
3467 if (TYPE_MODE (rettype) == VOIDmode
3468 || ignore)
3469 target = const0_rtx;
3470 else if (structure_value_addr)
3471 {
3472 if (target == 0 || !MEM_P (target))
3473 {
3474 target
3475 = gen_rtx_MEM (TYPE_MODE (rettype),
3476 memory_address (TYPE_MODE (rettype),
3477 structure_value_addr));
3478 set_mem_attributes (target, rettype, 1);
3479 }
3480 }
3481 else if (pcc_struct_value)
3482 {
3483 /* This is the special C++ case where we need to
3484 know what the true target was. We take care to
3485 never use this value more than once in one expression. */
3486 target = gen_rtx_MEM (TYPE_MODE (rettype),
3487 copy_to_reg (valreg));
3488 set_mem_attributes (target, rettype, 1);
3489 }
3490 /* Handle calls that return values in multiple non-contiguous locations.
3491 The Irix 6 ABI has examples of this. */
3492 else if (GET_CODE (valreg) == PARALLEL)
3493 {
3494 if (target == 0)
3495 target = emit_group_move_into_temps (valreg);
3496 else if (rtx_equal_p (target, valreg))
3497 ;
3498 else if (GET_CODE (target) == PARALLEL)
3499 /* Handle the result of a emit_group_move_into_temps
3500 call in the previous pass. */
3501 emit_group_move (target, valreg);
3502 else
3503 emit_group_store (target, valreg, rettype,
3504 int_size_in_bytes (rettype));
3505 }
3506 else if (target
3507 && GET_MODE (target) == TYPE_MODE (rettype)
3508 && GET_MODE (target) == GET_MODE (valreg))
3509 {
3510 bool may_overlap = false;
3511
3512 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
3513 reg to a plain register. */
3514 if (!REG_P (target) || HARD_REGISTER_P (target))
3515 valreg = avoid_likely_spilled_reg (valreg);
3516
3517 /* If TARGET is a MEM in the argument area, and we have
3518 saved part of the argument area, then we can't store
3519 directly into TARGET as it may get overwritten when we
3520 restore the argument save area below. Don't work too
3521 hard though and simply force TARGET to a register if it
3522 is a MEM; the optimizer is quite likely to sort it out. */
3523 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
3524 for (i = 0; i < num_actuals; i++)
3525 if (args[i].save_area)
3526 {
3527 may_overlap = true;
3528 break;
3529 }
3530
3531 if (may_overlap)
3532 target = copy_to_reg (valreg);
3533 else
3534 {
3535 /* TARGET and VALREG cannot be equal at this point
3536 because the latter would not have
3537 REG_FUNCTION_VALUE_P true, while the former would if
3538 it were referring to the same register.
3539
3540 If they refer to the same register, this move will be
3541 a no-op, except when function inlining is being
3542 done. */
3543 emit_move_insn (target, valreg);
3544
3545 /* If we are setting a MEM, this code must be executed.
3546 Since it is emitted after the call insn, sibcall
3547 optimization cannot be performed in that case. */
3548 if (MEM_P (target))
3549 sibcall_failure = 1;
3550 }
3551 }
3552 else
3553 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
3554
3555 /* If we promoted this return value, make the proper SUBREG.
3556 TARGET might be const0_rtx here, so be careful. */
3557 if (REG_P (target)
3558 && TYPE_MODE (rettype) != BLKmode
3559 && GET_MODE (target) != TYPE_MODE (rettype))
3560 {
3561 tree type = rettype;
3562 int unsignedp = TYPE_UNSIGNED (type);
3563 int offset = 0;
3564 machine_mode pmode;
3565
3566 /* Ensure we promote as expected, and get the new unsignedness. */
3567 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
3568 funtype, 1);
3569 gcc_assert (GET_MODE (target) == pmode);
3570
3571 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3572 && (GET_MODE_SIZE (GET_MODE (target))
3573 > GET_MODE_SIZE (TYPE_MODE (type))))
3574 {
3575 offset = GET_MODE_SIZE (GET_MODE (target))
3576 - GET_MODE_SIZE (TYPE_MODE (type));
3577 if (! BYTES_BIG_ENDIAN)
3578 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3579 else if (! WORDS_BIG_ENDIAN)
3580 offset %= UNITS_PER_WORD;
3581 }
3582
3583 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3584 SUBREG_PROMOTED_VAR_P (target) = 1;
3585 SUBREG_PROMOTED_SET (target, unsignedp);
3586 }
3587
3588 /* If size of args is variable or this was a constructor call for a stack
3589 argument, restore saved stack-pointer value. */
3590
3591 if (old_stack_level)
3592 {
3593 rtx_insn *prev = get_last_insn ();
3594
3595 emit_stack_restore (SAVE_BLOCK, old_stack_level);
3596 stack_pointer_delta = old_stack_pointer_delta;
3597
3598 fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
3599
3600 pending_stack_adjust = old_pending_adj;
3601 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
3602 stack_arg_under_construction = old_stack_arg_under_construction;
3603 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3604 stack_usage_map = initial_stack_usage_map;
3605 sibcall_failure = 1;
3606 }
3607 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3608 {
3609 #ifdef REG_PARM_STACK_SPACE
3610 if (save_area)
3611 restore_fixed_argument_area (save_area, argblock,
3612 high_to_save, low_to_save);
3613 #endif
3614
3615 /* If we saved any argument areas, restore them. */
3616 for (i = 0; i < num_actuals; i++)
3617 if (args[i].save_area)
3618 {
3619 machine_mode save_mode = GET_MODE (args[i].save_area);
3620 rtx stack_area
3621 = gen_rtx_MEM (save_mode,
3622 memory_address (save_mode,
3623 XEXP (args[i].stack_slot, 0)));
3624
3625 if (save_mode != BLKmode)
3626 emit_move_insn (stack_area, args[i].save_area);
3627 else
3628 emit_block_move (stack_area, args[i].save_area,
3629 GEN_INT (args[i].locate.size.constant),
3630 BLOCK_OP_CALL_PARM);
3631 }
3632
3633 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3634 stack_usage_map = initial_stack_usage_map;
3635 }
3636
3637 /* If this was alloca, record the new stack level for nonlocal gotos.
3638 Check for the handler slots since we might not have a save area
3639 for non-local gotos. */
3640
3641 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
3642 update_nonlocal_goto_save_area ();
3643
3644 /* Free up storage we no longer need. */
3645 for (i = 0; i < num_actuals; ++i)
3646 free (args[i].aligned_regs);
3647
3648 targetm.calls.end_call_args ();
3649
3650 insns = get_insns ();
3651 end_sequence ();
3652
3653 if (pass == 0)
3654 {
3655 tail_call_insns = insns;
3656
3657 /* Restore the pending stack adjustment now that we have
3658 finished generating the sibling call sequence. */
3659
3660 restore_pending_stack_adjust (&save);
3661
3662 /* Prepare arg structure for next iteration. */
3663 for (i = 0; i < num_actuals; i++)
3664 {
3665 args[i].value = 0;
3666 args[i].aligned_regs = 0;
3667 args[i].stack = 0;
3668 }
3669
3670 sbitmap_free (stored_args_map);
3671 internal_arg_pointer_exp_state.scan_start = NULL;
3672 internal_arg_pointer_exp_state.cache.release ();
3673 }
3674 else
3675 {
3676 normal_call_insns = insns;
3677
3678 /* Verify that we've deallocated all the stack we used. */
3679 gcc_assert ((flags & ECF_NORETURN)
3680 || (old_stack_allocated
3681 == stack_pointer_delta - pending_stack_adjust));
3682 }
3683
3684 /* If something prevents making this a sibling call,
3685 zero out the sequence. */
3686 if (sibcall_failure)
3687 tail_call_insns = NULL;
3688 else
3689 break;
3690 }
3691
3692 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
3693 arguments too, as argument area is now clobbered by the call. */
3694 if (tail_call_insns)
3695 {
3696 emit_insn (tail_call_insns);
3697 crtl->tail_call_emit = true;
3698 }
3699 else
3700 emit_insn (normal_call_insns);
3701
3702 currently_expanding_call--;
3703
3704 free (stack_usage_map_buf);
3705
3706 /* Join result with returned bounds so caller may use them if needed. */
3707 target = chkp_join_splitted_slot (target, valbnd);
3708
3709 return target;
3710 }
3711
3712 /* A sibling call sequence invalidates any REG_EQUIV notes made for
3713 this function's incoming arguments.
3714
3715 At the start of RTL generation we know the only REG_EQUIV notes
3716 in the rtl chain are those for incoming arguments, so we can look
3717 for REG_EQUIV notes between the start of the function and the
3718 NOTE_INSN_FUNCTION_BEG.
3719
3720 This is (slight) overkill. We could keep track of the highest
3721 argument we clobber and be more selective in removing notes, but it
3722 does not seem to be worth the effort. */
3723
3724 void
3725 fixup_tail_calls (void)
3726 {
3727 rtx_insn *insn;
3728
3729 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3730 {
3731 rtx note;
3732
3733 /* There are never REG_EQUIV notes for the incoming arguments
3734 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
3735 if (NOTE_P (insn)
3736 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
3737 break;
3738
3739 note = find_reg_note (insn, REG_EQUIV, 0);
3740 if (note)
3741 remove_note (insn, note);
3742 note = find_reg_note (insn, REG_EQUIV, 0);
3743 gcc_assert (!note);
3744 }
3745 }
3746
3747 /* Traverse a list of TYPES and expand all complex types into their
3748 components. */
3749 static tree
3750 split_complex_types (tree types)
3751 {
3752 tree p;
3753
3754 /* Before allocating memory, check for the common case of no complex. */
3755 for (p = types; p; p = TREE_CHAIN (p))
3756 {
3757 tree type = TREE_VALUE (p);
3758 if (TREE_CODE (type) == COMPLEX_TYPE
3759 && targetm.calls.split_complex_arg (type))
3760 goto found;
3761 }
3762 return types;
3763
3764 found:
3765 types = copy_list (types);
3766
3767 for (p = types; p; p = TREE_CHAIN (p))
3768 {
3769 tree complex_type = TREE_VALUE (p);
3770
3771 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3772 && targetm.calls.split_complex_arg (complex_type))
3773 {
3774 tree next, imag;
3775
3776 /* Rewrite complex type with component type. */
3777 TREE_VALUE (p) = TREE_TYPE (complex_type);
3778 next = TREE_CHAIN (p);
3779
3780 /* Add another component type for the imaginary part. */
3781 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3782 TREE_CHAIN (p) = imag;
3783 TREE_CHAIN (imag) = next;
3784
3785 /* Skip the newly created node. */
3786 p = TREE_CHAIN (p);
3787 }
3788 }
3789
3790 return types;
3791 }
3792 \f
3793 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3794 The RETVAL parameter specifies whether return value needs to be saved, other
3795 parameters are documented in the emit_library_call function below. */
3796
3797 static rtx
3798 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3799 enum libcall_type fn_type,
3800 machine_mode outmode, int nargs, va_list p)
3801 {
3802 /* Total size in bytes of all the stack-parms scanned so far. */
3803 struct args_size args_size;
3804 /* Size of arguments before any adjustments (such as rounding). */
3805 struct args_size original_args_size;
3806 int argnum;
3807 rtx fun;
3808 /* Todo, choose the correct decl type of orgfun. Sadly this information
3809 isn't present here, so we default to native calling abi here. */
3810 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
3811 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
3812 int count;
3813 rtx argblock = 0;
3814 CUMULATIVE_ARGS args_so_far_v;
3815 cumulative_args_t args_so_far;
3816 struct arg
3817 {
3818 rtx value;
3819 machine_mode mode;
3820 rtx reg;
3821 int partial;
3822 struct locate_and_pad_arg_data locate;
3823 rtx save_area;
3824 };
3825 struct arg *argvec;
3826 int old_inhibit_defer_pop = inhibit_defer_pop;
3827 rtx call_fusage = 0;
3828 rtx mem_value = 0;
3829 rtx valreg;
3830 int pcc_struct_value = 0;
3831 int struct_value_size = 0;
3832 int flags;
3833 int reg_parm_stack_space = 0;
3834 int needed;
3835 rtx_insn *before_call;
3836 bool have_push_fusage;
3837 tree tfom; /* type_for_mode (outmode, 0) */
3838
3839 #ifdef REG_PARM_STACK_SPACE
3840 /* Define the boundary of the register parm stack space that needs to be
3841 save, if any. */
3842 int low_to_save = 0, high_to_save = 0;
3843 rtx save_area = 0; /* Place that it is saved. */
3844 #endif
3845
3846 /* Size of the stack reserved for parameter registers. */
3847 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3848 char *initial_stack_usage_map = stack_usage_map;
3849 char *stack_usage_map_buf = NULL;
3850
3851 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3852
3853 #ifdef REG_PARM_STACK_SPACE
3854 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3855 #endif
3856
3857 /* By default, library functions can not throw. */
3858 flags = ECF_NOTHROW;
3859
3860 switch (fn_type)
3861 {
3862 case LCT_NORMAL:
3863 break;
3864 case LCT_CONST:
3865 flags |= ECF_CONST;
3866 break;
3867 case LCT_PURE:
3868 flags |= ECF_PURE;
3869 break;
3870 case LCT_NORETURN:
3871 flags |= ECF_NORETURN;
3872 break;
3873 case LCT_THROW:
3874 flags = ECF_NORETURN;
3875 break;
3876 case LCT_RETURNS_TWICE:
3877 flags = ECF_RETURNS_TWICE;
3878 break;
3879 }
3880 fun = orgfun;
3881
3882 /* Ensure current function's preferred stack boundary is at least
3883 what we need. */
3884 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3885 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3886
3887 /* If this kind of value comes back in memory,
3888 decide where in memory it should come back. */
3889 if (outmode != VOIDmode)
3890 {
3891 tfom = lang_hooks.types.type_for_mode (outmode, 0);
3892 if (aggregate_value_p (tfom, 0))
3893 {
3894 #ifdef PCC_STATIC_STRUCT_RETURN
3895 rtx pointer_reg
3896 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
3897 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3898 pcc_struct_value = 1;
3899 if (value == 0)
3900 value = gen_reg_rtx (outmode);
3901 #else /* not PCC_STATIC_STRUCT_RETURN */
3902 struct_value_size = GET_MODE_SIZE (outmode);
3903 if (value != 0 && MEM_P (value))
3904 mem_value = value;
3905 else
3906 mem_value = assign_temp (tfom, 1, 1);
3907 #endif
3908 /* This call returns a big structure. */
3909 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
3910 }
3911 }
3912 else
3913 tfom = void_type_node;
3914
3915 /* ??? Unfinished: must pass the memory address as an argument. */
3916
3917 /* Copy all the libcall-arguments out of the varargs data
3918 and into a vector ARGVEC.
3919
3920 Compute how to pass each argument. We only support a very small subset
3921 of the full argument passing conventions to limit complexity here since
3922 library functions shouldn't have many args. */
3923
3924 argvec = XALLOCAVEC (struct arg, nargs + 1);
3925 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3926
3927 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3928 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
3929 #else
3930 INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
3931 #endif
3932 args_so_far = pack_cumulative_args (&args_so_far_v);
3933
3934 args_size.constant = 0;
3935 args_size.var = 0;
3936
3937 count = 0;
3938
3939 push_temp_slots ();
3940
3941 /* If there's a structure value address to be passed,
3942 either pass it in the special place, or pass it as an extra argument. */
3943 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3944 {
3945 rtx addr = XEXP (mem_value, 0);
3946
3947 nargs++;
3948
3949 /* Make sure it is a reasonable operand for a move or push insn. */
3950 if (!REG_P (addr) && !MEM_P (addr)
3951 && !(CONSTANT_P (addr)
3952 && targetm.legitimate_constant_p (Pmode, addr)))
3953 addr = force_operand (addr, NULL_RTX);
3954
3955 argvec[count].value = addr;
3956 argvec[count].mode = Pmode;
3957 argvec[count].partial = 0;
3958
3959 argvec[count].reg = targetm.calls.function_arg (args_so_far,
3960 Pmode, NULL_TREE, true);
3961 gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, Pmode,
3962 NULL_TREE, 1) == 0);
3963
3964 locate_and_pad_parm (Pmode, NULL_TREE,
3965 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3966 1,
3967 #else
3968 argvec[count].reg != 0,
3969 #endif
3970 reg_parm_stack_space, 0,
3971 NULL_TREE, &args_size, &argvec[count].locate);
3972
3973 if (argvec[count].reg == 0 || argvec[count].partial != 0
3974 || reg_parm_stack_space > 0)
3975 args_size.constant += argvec[count].locate.size.constant;
3976
3977 targetm.calls.function_arg_advance (args_so_far, Pmode, (tree) 0, true);
3978
3979 count++;
3980 }
3981
3982 for (; count < nargs; count++)
3983 {
3984 rtx val = va_arg (p, rtx);
3985 machine_mode mode = (machine_mode) va_arg (p, int);
3986 int unsigned_p = 0;
3987
3988 /* We cannot convert the arg value to the mode the library wants here;
3989 must do it earlier where we know the signedness of the arg. */
3990 gcc_assert (mode != BLKmode
3991 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
3992
3993 /* Make sure it is a reasonable operand for a move or push insn. */
3994 if (!REG_P (val) && !MEM_P (val)
3995 && !(CONSTANT_P (val) && targetm.legitimate_constant_p (mode, val)))
3996 val = force_operand (val, NULL_RTX);
3997
3998 if (pass_by_reference (&args_so_far_v, mode, NULL_TREE, 1))
3999 {
4000 rtx slot;
4001 int must_copy
4002 = !reference_callee_copied (&args_so_far_v, mode, NULL_TREE, 1);
4003
4004 /* If this was a CONST function, it is now PURE since it now
4005 reads memory. */
4006 if (flags & ECF_CONST)
4007 {
4008 flags &= ~ECF_CONST;
4009 flags |= ECF_PURE;
4010 }
4011
4012 if (MEM_P (val) && !must_copy)
4013 {
4014 tree val_expr = MEM_EXPR (val);
4015 if (val_expr)
4016 mark_addressable (val_expr);
4017 slot = val;
4018 }
4019 else
4020 {
4021 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
4022 1, 1);
4023 emit_move_insn (slot, val);
4024 }
4025
4026 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4027 gen_rtx_USE (VOIDmode, slot),
4028 call_fusage);
4029 if (must_copy)
4030 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4031 gen_rtx_CLOBBER (VOIDmode,
4032 slot),
4033 call_fusage);
4034
4035 mode = Pmode;
4036 val = force_operand (XEXP (slot, 0), NULL_RTX);
4037 }
4038
4039 mode = promote_function_mode (NULL_TREE, mode, &unsigned_p, NULL_TREE, 0);
4040 argvec[count].mode = mode;
4041 argvec[count].value = convert_modes (mode, GET_MODE (val), val, unsigned_p);
4042 argvec[count].reg = targetm.calls.function_arg (args_so_far, mode,
4043 NULL_TREE, true);
4044
4045 argvec[count].partial
4046 = targetm.calls.arg_partial_bytes (args_so_far, mode, NULL_TREE, 1);
4047
4048 if (argvec[count].reg == 0
4049 || argvec[count].partial != 0
4050 || reg_parm_stack_space > 0)
4051 {
4052 locate_and_pad_parm (mode, NULL_TREE,
4053 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4054 1,
4055 #else
4056 argvec[count].reg != 0,
4057 #endif
4058 reg_parm_stack_space, argvec[count].partial,
4059 NULL_TREE, &args_size, &argvec[count].locate);
4060 args_size.constant += argvec[count].locate.size.constant;
4061 gcc_assert (!argvec[count].locate.size.var);
4062 }
4063 #ifdef BLOCK_REG_PADDING
4064 else
4065 /* The argument is passed entirely in registers. See at which
4066 end it should be padded. */
4067 argvec[count].locate.where_pad =
4068 BLOCK_REG_PADDING (mode, NULL_TREE,
4069 GET_MODE_SIZE (mode) <= UNITS_PER_WORD);
4070 #endif
4071
4072 targetm.calls.function_arg_advance (args_so_far, mode, (tree) 0, true);
4073 }
4074
4075 /* If this machine requires an external definition for library
4076 functions, write one out. */
4077 assemble_external_libcall (fun);
4078
4079 original_args_size = args_size;
4080 args_size.constant = (((args_size.constant
4081 + stack_pointer_delta
4082 + STACK_BYTES - 1)
4083 / STACK_BYTES
4084 * STACK_BYTES)
4085 - stack_pointer_delta);
4086
4087 args_size.constant = MAX (args_size.constant,
4088 reg_parm_stack_space);
4089
4090 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
4091 args_size.constant -= reg_parm_stack_space;
4092
4093 if (args_size.constant > crtl->outgoing_args_size)
4094 crtl->outgoing_args_size = args_size.constant;
4095
4096 if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
4097 {
4098 int pushed = args_size.constant + pending_stack_adjust;
4099 if (pushed > current_function_pushed_stack_size)
4100 current_function_pushed_stack_size = pushed;
4101 }
4102
4103 if (ACCUMULATE_OUTGOING_ARGS)
4104 {
4105 /* Since the stack pointer will never be pushed, it is possible for
4106 the evaluation of a parm to clobber something we have already
4107 written to the stack. Since most function calls on RISC machines
4108 do not use the stack, this is uncommon, but must work correctly.
4109
4110 Therefore, we save any area of the stack that was already written
4111 and that we are using. Here we set up to do this by making a new
4112 stack usage map from the old one.
4113
4114 Another approach might be to try to reorder the argument
4115 evaluations to avoid this conflicting stack usage. */
4116
4117 needed = args_size.constant;
4118
4119 /* Since we will be writing into the entire argument area, the
4120 map must be allocated for its entire size, not just the part that
4121 is the responsibility of the caller. */
4122 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
4123 needed += reg_parm_stack_space;
4124
4125 #ifdef ARGS_GROW_DOWNWARD
4126 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
4127 needed + 1);
4128 #else
4129 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
4130 needed);
4131 #endif
4132 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
4133 stack_usage_map = stack_usage_map_buf;
4134
4135 if (initial_highest_arg_in_use)
4136 memcpy (stack_usage_map, initial_stack_usage_map,
4137 initial_highest_arg_in_use);
4138
4139 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
4140 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
4141 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
4142 needed = 0;
4143
4144 /* We must be careful to use virtual regs before they're instantiated,
4145 and real regs afterwards. Loop optimization, for example, can create
4146 new libcalls after we've instantiated the virtual regs, and if we
4147 use virtuals anyway, they won't match the rtl patterns. */
4148
4149 if (virtuals_instantiated)
4150 argblock = plus_constant (Pmode, stack_pointer_rtx,
4151 STACK_POINTER_OFFSET);
4152 else
4153 argblock = virtual_outgoing_args_rtx;
4154 }
4155 else
4156 {
4157 if (!PUSH_ARGS)
4158 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
4159 }
4160
4161 /* We push args individually in reverse order, perform stack alignment
4162 before the first push (the last arg). */
4163 if (argblock == 0)
4164 anti_adjust_stack (GEN_INT (args_size.constant
4165 - original_args_size.constant));
4166
4167 argnum = nargs - 1;
4168
4169 #ifdef REG_PARM_STACK_SPACE
4170 if (ACCUMULATE_OUTGOING_ARGS)
4171 {
4172 /* The argument list is the property of the called routine and it
4173 may clobber it. If the fixed area has been used for previous
4174 parameters, we must save and restore it. */
4175 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
4176 &low_to_save, &high_to_save);
4177 }
4178 #endif
4179
4180 /* When expanding a normal call, args are stored in push order,
4181 which is the reverse of what we have here. */
4182 bool any_regs = false;
4183 for (int i = nargs; i-- > 0; )
4184 if (argvec[i].reg != NULL_RTX)
4185 {
4186 targetm.calls.call_args (argvec[i].reg, NULL_TREE);
4187 any_regs = true;
4188 }
4189 if (!any_regs)
4190 targetm.calls.call_args (pc_rtx, NULL_TREE);
4191
4192 /* Push the args that need to be pushed. */
4193
4194 have_push_fusage = false;
4195
4196 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4197 are to be pushed. */
4198 for (count = 0; count < nargs; count++, argnum--)
4199 {
4200 machine_mode mode = argvec[argnum].mode;
4201 rtx val = argvec[argnum].value;
4202 rtx reg = argvec[argnum].reg;
4203 int partial = argvec[argnum].partial;
4204 unsigned int parm_align = argvec[argnum].locate.boundary;
4205 int lower_bound = 0, upper_bound = 0, i;
4206
4207 if (! (reg != 0 && partial == 0))
4208 {
4209 rtx use;
4210
4211 if (ACCUMULATE_OUTGOING_ARGS)
4212 {
4213 /* If this is being stored into a pre-allocated, fixed-size,
4214 stack area, save any previous data at that location. */
4215
4216 #ifdef ARGS_GROW_DOWNWARD
4217 /* stack_slot is negative, but we want to index stack_usage_map
4218 with positive values. */
4219 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
4220 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
4221 #else
4222 lower_bound = argvec[argnum].locate.slot_offset.constant;
4223 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
4224 #endif
4225
4226 i = lower_bound;
4227 /* Don't worry about things in the fixed argument area;
4228 it has already been saved. */
4229 if (i < reg_parm_stack_space)
4230 i = reg_parm_stack_space;
4231 while (i < upper_bound && stack_usage_map[i] == 0)
4232 i++;
4233
4234 if (i < upper_bound)
4235 {
4236 /* We need to make a save area. */
4237 unsigned int size
4238 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
4239 machine_mode save_mode
4240 = mode_for_size (size, MODE_INT, 1);
4241 rtx adr
4242 = plus_constant (Pmode, argblock,
4243 argvec[argnum].locate.offset.constant);
4244 rtx stack_area
4245 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
4246
4247 if (save_mode == BLKmode)
4248 {
4249 argvec[argnum].save_area
4250 = assign_stack_temp (BLKmode,
4251 argvec[argnum].locate.size.constant
4252 );
4253
4254 emit_block_move (validize_mem
4255 (copy_rtx (argvec[argnum].save_area)),
4256 stack_area,
4257 GEN_INT (argvec[argnum].locate.size.constant),
4258 BLOCK_OP_CALL_PARM);
4259 }
4260 else
4261 {
4262 argvec[argnum].save_area = gen_reg_rtx (save_mode);
4263
4264 emit_move_insn (argvec[argnum].save_area, stack_area);
4265 }
4266 }
4267 }
4268
4269 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
4270 partial, reg, 0, argblock,
4271 GEN_INT (argvec[argnum].locate.offset.constant),
4272 reg_parm_stack_space,
4273 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
4274
4275 /* Now mark the segment we just used. */
4276 if (ACCUMULATE_OUTGOING_ARGS)
4277 for (i = lower_bound; i < upper_bound; i++)
4278 stack_usage_map[i] = 1;
4279
4280 NO_DEFER_POP;
4281
4282 /* Indicate argument access so that alias.c knows that these
4283 values are live. */
4284 if (argblock)
4285 use = plus_constant (Pmode, argblock,
4286 argvec[argnum].locate.offset.constant);
4287 else if (have_push_fusage)
4288 continue;
4289 else
4290 {
4291 /* When arguments are pushed, trying to tell alias.c where
4292 exactly this argument is won't work, because the
4293 auto-increment causes confusion. So we merely indicate
4294 that we access something with a known mode somewhere on
4295 the stack. */
4296 use = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4297 gen_rtx_SCRATCH (Pmode));
4298 have_push_fusage = true;
4299 }
4300 use = gen_rtx_MEM (argvec[argnum].mode, use);
4301 use = gen_rtx_USE (VOIDmode, use);
4302 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
4303 }
4304 }
4305
4306 argnum = nargs - 1;
4307
4308 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
4309
4310 /* Now load any reg parms into their regs. */
4311
4312 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4313 are to be pushed. */
4314 for (count = 0; count < nargs; count++, argnum--)
4315 {
4316 machine_mode mode = argvec[argnum].mode;
4317 rtx val = argvec[argnum].value;
4318 rtx reg = argvec[argnum].reg;
4319 int partial = argvec[argnum].partial;
4320 #ifdef BLOCK_REG_PADDING
4321 int size = 0;
4322 #endif
4323
4324 /* Handle calls that pass values in multiple non-contiguous
4325 locations. The PA64 has examples of this for library calls. */
4326 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4327 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
4328 else if (reg != 0 && partial == 0)
4329 {
4330 emit_move_insn (reg, val);
4331 #ifdef BLOCK_REG_PADDING
4332 size = GET_MODE_SIZE (argvec[argnum].mode);
4333
4334 /* Copied from load_register_parameters. */
4335
4336 /* Handle case where we have a value that needs shifting
4337 up to the msb. eg. a QImode value and we're padding
4338 upward on a BYTES_BIG_ENDIAN machine. */
4339 if (size < UNITS_PER_WORD
4340 && (argvec[argnum].locate.where_pad
4341 == (BYTES_BIG_ENDIAN ? upward : downward)))
4342 {
4343 rtx x;
4344 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
4345
4346 /* Assigning REG here rather than a temp makes CALL_FUSAGE
4347 report the whole reg as used. Strictly speaking, the
4348 call only uses SIZE bytes at the msb end, but it doesn't
4349 seem worth generating rtl to say that. */
4350 reg = gen_rtx_REG (word_mode, REGNO (reg));
4351 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
4352 if (x != reg)
4353 emit_move_insn (reg, x);
4354 }
4355 #endif
4356 }
4357
4358 NO_DEFER_POP;
4359 }
4360
4361 /* Any regs containing parms remain in use through the call. */
4362 for (count = 0; count < nargs; count++)
4363 {
4364 rtx reg = argvec[count].reg;
4365 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4366 use_group_regs (&call_fusage, reg);
4367 else if (reg != 0)
4368 {
4369 int partial = argvec[count].partial;
4370 if (partial)
4371 {
4372 int nregs;
4373 gcc_assert (partial % UNITS_PER_WORD == 0);
4374 nregs = partial / UNITS_PER_WORD;
4375 use_regs (&call_fusage, REGNO (reg), nregs);
4376 }
4377 else
4378 use_reg (&call_fusage, reg);
4379 }
4380 }
4381
4382 /* Pass the function the address in which to return a structure value. */
4383 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
4384 {
4385 emit_move_insn (struct_value,
4386 force_reg (Pmode,
4387 force_operand (XEXP (mem_value, 0),
4388 NULL_RTX)));
4389 if (REG_P (struct_value))
4390 use_reg (&call_fusage, struct_value);
4391 }
4392
4393 /* Don't allow popping to be deferred, since then
4394 cse'ing of library calls could delete a call and leave the pop. */
4395 NO_DEFER_POP;
4396 valreg = (mem_value == 0 && outmode != VOIDmode
4397 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
4398
4399 /* Stack must be properly aligned now. */
4400 gcc_assert (!(stack_pointer_delta
4401 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
4402
4403 before_call = get_last_insn ();
4404
4405 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
4406 will set inhibit_defer_pop to that value. */
4407 /* The return type is needed to decide how many bytes the function pops.
4408 Signedness plays no role in that, so for simplicity, we pretend it's
4409 always signed. We also assume that the list of arguments passed has
4410 no impact, so we pretend it is unknown. */
4411
4412 emit_call_1 (fun, NULL,
4413 get_identifier (XSTR (orgfun, 0)),
4414 build_function_type (tfom, NULL_TREE),
4415 original_args_size.constant, args_size.constant,
4416 struct_value_size,
4417 targetm.calls.function_arg (args_so_far,
4418 VOIDmode, void_type_node, true),
4419 valreg,
4420 old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
4421
4422 if (flag_ipa_ra)
4423 {
4424 rtx last, datum = orgfun;
4425 gcc_assert (GET_CODE (datum) == SYMBOL_REF);
4426 last = last_call_insn ();
4427 add_reg_note (last, REG_CALL_DECL, datum);
4428 }
4429
4430 /* Right-shift returned value if necessary. */
4431 if (!pcc_struct_value
4432 && TYPE_MODE (tfom) != BLKmode
4433 && targetm.calls.return_in_msb (tfom))
4434 {
4435 shift_return_value (TYPE_MODE (tfom), false, valreg);
4436 valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
4437 }
4438
4439 targetm.calls.end_call_args ();
4440
4441 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
4442 that it should complain if nonvolatile values are live. For
4443 functions that cannot return, inform flow that control does not
4444 fall through. */
4445 if (flags & ECF_NORETURN)
4446 {
4447 /* The barrier note must be emitted
4448 immediately after the CALL_INSN. Some ports emit more than
4449 just a CALL_INSN above, so we must search for it here. */
4450 rtx_insn *last = get_last_insn ();
4451 while (!CALL_P (last))
4452 {
4453 last = PREV_INSN (last);
4454 /* There was no CALL_INSN? */
4455 gcc_assert (last != before_call);
4456 }
4457
4458 emit_barrier_after (last);
4459 }
4460
4461 /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW
4462 and LCT_RETURNS_TWICE, cannot perform non-local gotos. */
4463 if (flags & ECF_NOTHROW)
4464 {
4465 rtx_insn *last = get_last_insn ();
4466 while (!CALL_P (last))
4467 {
4468 last = PREV_INSN (last);
4469 /* There was no CALL_INSN? */
4470 gcc_assert (last != before_call);
4471 }
4472
4473 make_reg_eh_region_note_nothrow_nononlocal (last);
4474 }
4475
4476 /* Now restore inhibit_defer_pop to its actual original value. */
4477 OK_DEFER_POP;
4478
4479 pop_temp_slots ();
4480
4481 /* Copy the value to the right place. */
4482 if (outmode != VOIDmode && retval)
4483 {
4484 if (mem_value)
4485 {
4486 if (value == 0)
4487 value = mem_value;
4488 if (value != mem_value)
4489 emit_move_insn (value, mem_value);
4490 }
4491 else if (GET_CODE (valreg) == PARALLEL)
4492 {
4493 if (value == 0)
4494 value = gen_reg_rtx (outmode);
4495 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
4496 }
4497 else
4498 {
4499 /* Convert to the proper mode if a promotion has been active. */
4500 if (GET_MODE (valreg) != outmode)
4501 {
4502 int unsignedp = TYPE_UNSIGNED (tfom);
4503
4504 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
4505 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
4506 == GET_MODE (valreg));
4507 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
4508 }
4509
4510 if (value != 0)
4511 emit_move_insn (value, valreg);
4512 else
4513 value = valreg;
4514 }
4515 }
4516
4517 if (ACCUMULATE_OUTGOING_ARGS)
4518 {
4519 #ifdef REG_PARM_STACK_SPACE
4520 if (save_area)
4521 restore_fixed_argument_area (save_area, argblock,
4522 high_to_save, low_to_save);
4523 #endif
4524
4525 /* If we saved any argument areas, restore them. */
4526 for (count = 0; count < nargs; count++)
4527 if (argvec[count].save_area)
4528 {
4529 machine_mode save_mode = GET_MODE (argvec[count].save_area);
4530 rtx adr = plus_constant (Pmode, argblock,
4531 argvec[count].locate.offset.constant);
4532 rtx stack_area = gen_rtx_MEM (save_mode,
4533 memory_address (save_mode, adr));
4534
4535 if (save_mode == BLKmode)
4536 emit_block_move (stack_area,
4537 validize_mem
4538 (copy_rtx (argvec[count].save_area)),
4539 GEN_INT (argvec[count].locate.size.constant),
4540 BLOCK_OP_CALL_PARM);
4541 else
4542 emit_move_insn (stack_area, argvec[count].save_area);
4543 }
4544
4545 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4546 stack_usage_map = initial_stack_usage_map;
4547 }
4548
4549 free (stack_usage_map_buf);
4550
4551 return value;
4552
4553 }
4554 \f
4555 /* Output a library call to function FUN (a SYMBOL_REF rtx)
4556 (emitting the queue unless NO_QUEUE is nonzero),
4557 for a value of mode OUTMODE,
4558 with NARGS different arguments, passed as alternating rtx values
4559 and machine_modes to convert them to.
4560
4561 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
4562 `const' calls, LCT_PURE for `pure' calls, or other LCT_ value for
4563 other types of library calls. */
4564
4565 void
4566 emit_library_call (rtx orgfun, enum libcall_type fn_type,
4567 machine_mode outmode, int nargs, ...)
4568 {
4569 va_list p;
4570
4571 va_start (p, nargs);
4572 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4573 va_end (p);
4574 }
4575 \f
4576 /* Like emit_library_call except that an extra argument, VALUE,
4577 comes second and says where to store the result.
4578 (If VALUE is zero, this function chooses a convenient way
4579 to return the value.
4580
4581 This function returns an rtx for where the value is to be found.
4582 If VALUE is nonzero, VALUE is returned. */
4583
4584 rtx
4585 emit_library_call_value (rtx orgfun, rtx value,
4586 enum libcall_type fn_type,
4587 machine_mode outmode, int nargs, ...)
4588 {
4589 rtx result;
4590 va_list p;
4591
4592 va_start (p, nargs);
4593 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4594 nargs, p);
4595 va_end (p);
4596
4597 return result;
4598 }
4599 \f
4600
4601 /* Store pointer bounds argument ARG into Bounds Table entry
4602 associated with PARM. */
4603 static void
4604 store_bounds (struct arg_data *arg, struct arg_data *parm)
4605 {
4606 rtx slot = NULL, ptr = NULL, addr = NULL;
4607
4608 /* We may pass bounds not associated with any pointer. */
4609 if (!parm)
4610 {
4611 gcc_assert (arg->special_slot);
4612 slot = arg->special_slot;
4613 ptr = const0_rtx;
4614 }
4615 /* Find pointer associated with bounds and where it is
4616 passed. */
4617 else
4618 {
4619 if (!parm->reg)
4620 {
4621 gcc_assert (!arg->special_slot);
4622
4623 addr = adjust_address (parm->stack, Pmode, arg->pointer_offset);
4624 }
4625 else if (REG_P (parm->reg))
4626 {
4627 gcc_assert (arg->special_slot);
4628 slot = arg->special_slot;
4629
4630 if (MEM_P (parm->value))
4631 addr = adjust_address (parm->value, Pmode, arg->pointer_offset);
4632 else if (REG_P (parm->value))
4633 ptr = gen_rtx_SUBREG (Pmode, parm->value, arg->pointer_offset);
4634 else
4635 {
4636 gcc_assert (!arg->pointer_offset);
4637 ptr = parm->value;
4638 }
4639 }
4640 else
4641 {
4642 gcc_assert (GET_CODE (parm->reg) == PARALLEL);
4643
4644 gcc_assert (arg->special_slot);
4645 slot = arg->special_slot;
4646
4647 if (parm->parallel_value)
4648 ptr = chkp_get_value_with_offs (parm->parallel_value,
4649 GEN_INT (arg->pointer_offset));
4650 else
4651 gcc_unreachable ();
4652 }
4653 }
4654
4655 /* Expand bounds. */
4656 if (!arg->value)
4657 arg->value = expand_normal (arg->tree_value);
4658
4659 targetm.calls.store_bounds_for_arg (ptr, addr, arg->value, slot);
4660 }
4661
4662 /* Store a single argument for a function call
4663 into the register or memory area where it must be passed.
4664 *ARG describes the argument value and where to pass it.
4665
4666 ARGBLOCK is the address of the stack-block for all the arguments,
4667 or 0 on a machine where arguments are pushed individually.
4668
4669 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4670 so must be careful about how the stack is used.
4671
4672 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4673 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4674 that we need not worry about saving and restoring the stack.
4675
4676 FNDECL is the declaration of the function we are calling.
4677
4678 Return nonzero if this arg should cause sibcall failure,
4679 zero otherwise. */
4680
4681 static int
4682 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4683 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
4684 {
4685 tree pval = arg->tree_value;
4686 rtx reg = 0;
4687 int partial = 0;
4688 int used = 0;
4689 int i, lower_bound = 0, upper_bound = 0;
4690 int sibcall_failure = 0;
4691
4692 if (TREE_CODE (pval) == ERROR_MARK)
4693 return 1;
4694
4695 /* Push a new temporary level for any temporaries we make for
4696 this argument. */
4697 push_temp_slots ();
4698
4699 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4700 {
4701 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4702 save any previous data at that location. */
4703 if (argblock && ! variable_size && arg->stack)
4704 {
4705 #ifdef ARGS_GROW_DOWNWARD
4706 /* stack_slot is negative, but we want to index stack_usage_map
4707 with positive values. */
4708 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4709 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4710 else
4711 upper_bound = 0;
4712
4713 lower_bound = upper_bound - arg->locate.size.constant;
4714 #else
4715 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4716 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4717 else
4718 lower_bound = 0;
4719
4720 upper_bound = lower_bound + arg->locate.size.constant;
4721 #endif
4722
4723 i = lower_bound;
4724 /* Don't worry about things in the fixed argument area;
4725 it has already been saved. */
4726 if (i < reg_parm_stack_space)
4727 i = reg_parm_stack_space;
4728 while (i < upper_bound && stack_usage_map[i] == 0)
4729 i++;
4730
4731 if (i < upper_bound)
4732 {
4733 /* We need to make a save area. */
4734 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4735 machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4736 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4737 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4738
4739 if (save_mode == BLKmode)
4740 {
4741 arg->save_area
4742 = assign_temp (TREE_TYPE (arg->tree_value), 1, 1);
4743 preserve_temp_slots (arg->save_area);
4744 emit_block_move (validize_mem (copy_rtx (arg->save_area)),
4745 stack_area,
4746 GEN_INT (arg->locate.size.constant),
4747 BLOCK_OP_CALL_PARM);
4748 }
4749 else
4750 {
4751 arg->save_area = gen_reg_rtx (save_mode);
4752 emit_move_insn (arg->save_area, stack_area);
4753 }
4754 }
4755 }
4756 }
4757
4758 /* If this isn't going to be placed on both the stack and in registers,
4759 set up the register and number of words. */
4760 if (! arg->pass_on_stack)
4761 {
4762 if (flags & ECF_SIBCALL)
4763 reg = arg->tail_call_reg;
4764 else
4765 reg = arg->reg;
4766 partial = arg->partial;
4767 }
4768
4769 /* Being passed entirely in a register. We shouldn't be called in
4770 this case. */
4771 gcc_assert (reg == 0 || partial != 0);
4772
4773 /* If this arg needs special alignment, don't load the registers
4774 here. */
4775 if (arg->n_aligned_regs != 0)
4776 reg = 0;
4777
4778 /* If this is being passed partially in a register, we can't evaluate
4779 it directly into its stack slot. Otherwise, we can. */
4780 if (arg->value == 0)
4781 {
4782 /* stack_arg_under_construction is nonzero if a function argument is
4783 being evaluated directly into the outgoing argument list and
4784 expand_call must take special action to preserve the argument list
4785 if it is called recursively.
4786
4787 For scalar function arguments stack_usage_map is sufficient to
4788 determine which stack slots must be saved and restored. Scalar
4789 arguments in general have pass_on_stack == 0.
4790
4791 If this argument is initialized by a function which takes the
4792 address of the argument (a C++ constructor or a C function
4793 returning a BLKmode structure), then stack_usage_map is
4794 insufficient and expand_call must push the stack around the
4795 function call. Such arguments have pass_on_stack == 1.
4796
4797 Note that it is always safe to set stack_arg_under_construction,
4798 but this generates suboptimal code if set when not needed. */
4799
4800 if (arg->pass_on_stack)
4801 stack_arg_under_construction++;
4802
4803 arg->value = expand_expr (pval,
4804 (partial
4805 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4806 ? NULL_RTX : arg->stack,
4807 VOIDmode, EXPAND_STACK_PARM);
4808
4809 /* If we are promoting object (or for any other reason) the mode
4810 doesn't agree, convert the mode. */
4811
4812 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4813 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4814 arg->value, arg->unsignedp);
4815
4816 if (arg->pass_on_stack)
4817 stack_arg_under_construction--;
4818 }
4819
4820 /* Check for overlap with already clobbered argument area. */
4821 if ((flags & ECF_SIBCALL)
4822 && MEM_P (arg->value)
4823 && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0),
4824 arg->locate.size.constant))
4825 sibcall_failure = 1;
4826
4827 /* Don't allow anything left on stack from computation
4828 of argument to alloca. */
4829 if (flags & ECF_MAY_BE_ALLOCA)
4830 do_pending_stack_adjust ();
4831
4832 if (arg->value == arg->stack)
4833 /* If the value is already in the stack slot, we are done. */
4834 ;
4835 else if (arg->mode != BLKmode)
4836 {
4837 int size;
4838 unsigned int parm_align;
4839
4840 /* Argument is a scalar, not entirely passed in registers.
4841 (If part is passed in registers, arg->partial says how much
4842 and emit_push_insn will take care of putting it there.)
4843
4844 Push it, and if its size is less than the
4845 amount of space allocated to it,
4846 also bump stack pointer by the additional space.
4847 Note that in C the default argument promotions
4848 will prevent such mismatches. */
4849
4850 size = GET_MODE_SIZE (arg->mode);
4851 /* Compute how much space the push instruction will push.
4852 On many machines, pushing a byte will advance the stack
4853 pointer by a halfword. */
4854 #ifdef PUSH_ROUNDING
4855 size = PUSH_ROUNDING (size);
4856 #endif
4857 used = size;
4858
4859 /* Compute how much space the argument should get:
4860 round up to a multiple of the alignment for arguments. */
4861 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4862 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4863 / (PARM_BOUNDARY / BITS_PER_UNIT))
4864 * (PARM_BOUNDARY / BITS_PER_UNIT));
4865
4866 /* Compute the alignment of the pushed argument. */
4867 parm_align = arg->locate.boundary;
4868 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4869 {
4870 int pad = used - size;
4871 if (pad)
4872 {
4873 unsigned int pad_align = (pad & -pad) * BITS_PER_UNIT;
4874 parm_align = MIN (parm_align, pad_align);
4875 }
4876 }
4877
4878 /* This isn't already where we want it on the stack, so put it there.
4879 This can either be done with push or copy insns. */
4880 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4881 parm_align, partial, reg, used - size, argblock,
4882 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4883 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4884
4885 /* Unless this is a partially-in-register argument, the argument is now
4886 in the stack. */
4887 if (partial == 0)
4888 arg->value = arg->stack;
4889 }
4890 else
4891 {
4892 /* BLKmode, at least partly to be pushed. */
4893
4894 unsigned int parm_align;
4895 int excess;
4896 rtx size_rtx;
4897
4898 /* Pushing a nonscalar.
4899 If part is passed in registers, PARTIAL says how much
4900 and emit_push_insn will take care of putting it there. */
4901
4902 /* Round its size up to a multiple
4903 of the allocation unit for arguments. */
4904
4905 if (arg->locate.size.var != 0)
4906 {
4907 excess = 0;
4908 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4909 }
4910 else
4911 {
4912 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
4913 for BLKmode is careful to avoid it. */
4914 excess = (arg->locate.size.constant
4915 - int_size_in_bytes (TREE_TYPE (pval))
4916 + partial);
4917 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4918 NULL_RTX, TYPE_MODE (sizetype),
4919 EXPAND_NORMAL);
4920 }
4921
4922 parm_align = arg->locate.boundary;
4923
4924 /* When an argument is padded down, the block is aligned to
4925 PARM_BOUNDARY, but the actual argument isn't. */
4926 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4927 {
4928 if (arg->locate.size.var)
4929 parm_align = BITS_PER_UNIT;
4930 else if (excess)
4931 {
4932 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4933 parm_align = MIN (parm_align, excess_align);
4934 }
4935 }
4936
4937 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4938 {
4939 /* emit_push_insn might not work properly if arg->value and
4940 argblock + arg->locate.offset areas overlap. */
4941 rtx x = arg->value;
4942 int i = 0;
4943
4944 if (XEXP (x, 0) == crtl->args.internal_arg_pointer
4945 || (GET_CODE (XEXP (x, 0)) == PLUS
4946 && XEXP (XEXP (x, 0), 0) ==
4947 crtl->args.internal_arg_pointer
4948 && CONST_INT_P (XEXP (XEXP (x, 0), 1))))
4949 {
4950 if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
4951 i = INTVAL (XEXP (XEXP (x, 0), 1));
4952
4953 /* expand_call should ensure this. */
4954 gcc_assert (!arg->locate.offset.var
4955 && arg->locate.size.var == 0
4956 && CONST_INT_P (size_rtx));
4957
4958 if (arg->locate.offset.constant > i)
4959 {
4960 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4961 sibcall_failure = 1;
4962 }
4963 else if (arg->locate.offset.constant < i)
4964 {
4965 /* Use arg->locate.size.constant instead of size_rtx
4966 because we only care about the part of the argument
4967 on the stack. */
4968 if (i < (arg->locate.offset.constant
4969 + arg->locate.size.constant))
4970 sibcall_failure = 1;
4971 }
4972 else
4973 {
4974 /* Even though they appear to be at the same location,
4975 if part of the outgoing argument is in registers,
4976 they aren't really at the same location. Check for
4977 this by making sure that the incoming size is the
4978 same as the outgoing size. */
4979 if (arg->locate.size.constant != INTVAL (size_rtx))
4980 sibcall_failure = 1;
4981 }
4982 }
4983 }
4984
4985 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4986 parm_align, partial, reg, excess, argblock,
4987 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4988 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4989
4990 /* Unless this is a partially-in-register argument, the argument is now
4991 in the stack.
4992
4993 ??? Unlike the case above, in which we want the actual
4994 address of the data, so that we can load it directly into a
4995 register, here we want the address of the stack slot, so that
4996 it's properly aligned for word-by-word copying or something
4997 like that. It's not clear that this is always correct. */
4998 if (partial == 0)
4999 arg->value = arg->stack_slot;
5000 }
5001
5002 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
5003 {
5004 tree type = TREE_TYPE (arg->tree_value);
5005 arg->parallel_value
5006 = emit_group_load_into_temps (arg->reg, arg->value, type,
5007 int_size_in_bytes (type));
5008 }
5009
5010 /* Mark all slots this store used. */
5011 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
5012 && argblock && ! variable_size && arg->stack)
5013 for (i = lower_bound; i < upper_bound; i++)
5014 stack_usage_map[i] = 1;
5015
5016 /* Once we have pushed something, pops can't safely
5017 be deferred during the rest of the arguments. */
5018 NO_DEFER_POP;
5019
5020 /* Free any temporary slots made in processing this argument. */
5021 pop_temp_slots ();
5022
5023 return sibcall_failure;
5024 }
5025
5026 /* Nonzero if we do not know how to pass TYPE solely in registers. */
5027
5028 bool
5029 must_pass_in_stack_var_size (machine_mode mode ATTRIBUTE_UNUSED,
5030 const_tree type)
5031 {
5032 if (!type)
5033 return false;
5034
5035 /* If the type has variable size... */
5036 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5037 return true;
5038
5039 /* If the type is marked as addressable (it is required
5040 to be constructed into the stack)... */
5041 if (TREE_ADDRESSABLE (type))
5042 return true;
5043
5044 return false;
5045 }
5046
5047 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
5048 takes trailing padding of a structure into account. */
5049 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
5050
5051 bool
5052 must_pass_in_stack_var_size_or_pad (machine_mode mode, const_tree type)
5053 {
5054 if (!type)
5055 return false;
5056
5057 /* If the type has variable size... */
5058 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5059 return true;
5060
5061 /* If the type is marked as addressable (it is required
5062 to be constructed into the stack)... */
5063 if (TREE_ADDRESSABLE (type))
5064 return true;
5065
5066 /* If the padding and mode of the type is such that a copy into
5067 a register would put it into the wrong part of the register. */
5068 if (mode == BLKmode
5069 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
5070 && (FUNCTION_ARG_PADDING (mode, type)
5071 == (BYTES_BIG_ENDIAN ? upward : downward)))
5072 return true;
5073
5074 return false;
5075 }