gengtype.c (main): Make uintptr_t a known type.
[gcc.git] / gcc / calls.c
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4 2011, 2012 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "flags.h"
30 #include "expr.h"
31 #include "optabs.h"
32 #include "libfuncs.h"
33 #include "function.h"
34 #include "regs.h"
35 #include "diagnostic-core.h"
36 #include "output.h"
37 #include "tm_p.h"
38 #include "timevar.h"
39 #include "sbitmap.h"
40 #include "langhooks.h"
41 #include "target.h"
42 #include "cgraph.h"
43 #include "except.h"
44 #include "dbgcnt.h"
45 #include "tree-flow.h"
46
47 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
48 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
49
50 /* Data structure and subroutines used within expand_call. */
51
52 struct arg_data
53 {
54 /* Tree node for this argument. */
55 tree tree_value;
56 /* Mode for value; TYPE_MODE unless promoted. */
57 enum machine_mode mode;
58 /* Current RTL value for argument, or 0 if it isn't precomputed. */
59 rtx value;
60 /* Initially-compute RTL value for argument; only for const functions. */
61 rtx initial_value;
62 /* Register to pass this argument in, 0 if passed on stack, or an
63 PARALLEL if the arg is to be copied into multiple non-contiguous
64 registers. */
65 rtx reg;
66 /* Register to pass this argument in when generating tail call sequence.
67 This is not the same register as for normal calls on machines with
68 register windows. */
69 rtx tail_call_reg;
70 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
71 form for emit_group_move. */
72 rtx parallel_value;
73 /* If REG was promoted from the actual mode of the argument expression,
74 indicates whether the promotion is sign- or zero-extended. */
75 int unsignedp;
76 /* Number of bytes to put in registers. 0 means put the whole arg
77 in registers. Also 0 if not passed in registers. */
78 int partial;
79 /* Nonzero if argument must be passed on stack.
80 Note that some arguments may be passed on the stack
81 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
82 pass_on_stack identifies arguments that *cannot* go in registers. */
83 int pass_on_stack;
84 /* Some fields packaged up for locate_and_pad_parm. */
85 struct locate_and_pad_arg_data locate;
86 /* Location on the stack at which parameter should be stored. The store
87 has already been done if STACK == VALUE. */
88 rtx stack;
89 /* Location on the stack of the start of this argument slot. This can
90 differ from STACK if this arg pads downward. This location is known
91 to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
92 rtx stack_slot;
93 /* Place that this stack area has been saved, if needed. */
94 rtx save_area;
95 /* If an argument's alignment does not permit direct copying into registers,
96 copy in smaller-sized pieces into pseudos. These are stored in a
97 block pointed to by this field. The next field says how many
98 word-sized pseudos we made. */
99 rtx *aligned_regs;
100 int n_aligned_regs;
101 };
102
103 /* A vector of one char per byte of stack space. A byte if nonzero if
104 the corresponding stack location has been used.
105 This vector is used to prevent a function call within an argument from
106 clobbering any stack already set up. */
107 static char *stack_usage_map;
108
109 /* Size of STACK_USAGE_MAP. */
110 static int highest_outgoing_arg_in_use;
111
112 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
113 stack location's tail call argument has been already stored into the stack.
114 This bitmap is used to prevent sibling call optimization if function tries
115 to use parent's incoming argument slots when they have been already
116 overwritten with tail call arguments. */
117 static sbitmap stored_args_map;
118
119 /* stack_arg_under_construction is nonzero when an argument may be
120 initialized with a constructor call (including a C function that
121 returns a BLKmode struct) and expand_call must take special action
122 to make sure the object being constructed does not overlap the
123 argument list for the constructor call. */
124 static int stack_arg_under_construction;
125
126 static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
127 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
128 cumulative_args_t);
129 static void precompute_register_parameters (int, struct arg_data *, int *);
130 static int store_one_arg (struct arg_data *, rtx, int, int, int);
131 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
132 static int finalize_must_preallocate (int, int, struct arg_data *,
133 struct args_size *);
134 static void precompute_arguments (int, struct arg_data *);
135 static int compute_argument_block_size (int, struct args_size *, tree, tree, int);
136 static void initialize_argument_information (int, struct arg_data *,
137 struct args_size *, int,
138 tree, tree,
139 tree, tree, cumulative_args_t, int,
140 rtx *, int *, int *, int *,
141 bool *, bool);
142 static void compute_argument_addresses (struct arg_data *, rtx, int);
143 static rtx rtx_for_function_call (tree, tree);
144 static void load_register_parameters (struct arg_data *, int, rtx *, int,
145 int, int *);
146 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
147 enum machine_mode, int, va_list);
148 static int special_function_p (const_tree, int);
149 static int check_sibcall_argument_overlap_1 (rtx);
150 static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
151
152 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
153 unsigned int);
154 static tree split_complex_types (tree);
155
156 #ifdef REG_PARM_STACK_SPACE
157 static rtx save_fixed_argument_area (int, rtx, int *, int *);
158 static void restore_fixed_argument_area (rtx, rtx, int, int);
159 #endif
160 \f
161 /* Force FUNEXP into a form suitable for the address of a CALL,
162 and return that as an rtx. Also load the static chain register
163 if FNDECL is a nested function.
164
165 CALL_FUSAGE points to a variable holding the prospective
166 CALL_INSN_FUNCTION_USAGE information. */
167
168 rtx
169 prepare_call_address (tree fndecl, rtx funexp, rtx static_chain_value,
170 rtx *call_fusage, int reg_parm_seen, int sibcallp)
171 {
172 /* Make a valid memory address and copy constants through pseudo-regs,
173 but not for a constant address if -fno-function-cse. */
174 if (GET_CODE (funexp) != SYMBOL_REF)
175 /* If we are using registers for parameters, force the
176 function address into a register now. */
177 funexp = ((reg_parm_seen
178 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
179 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
180 : memory_address (FUNCTION_MODE, funexp));
181 else if (! sibcallp)
182 {
183 #ifndef NO_FUNCTION_CSE
184 if (optimize && ! flag_no_function_cse)
185 funexp = force_reg (Pmode, funexp);
186 #endif
187 }
188
189 if (static_chain_value != 0)
190 {
191 rtx chain;
192
193 gcc_assert (fndecl);
194 chain = targetm.calls.static_chain (fndecl, false);
195 static_chain_value = convert_memory_address (Pmode, static_chain_value);
196
197 emit_move_insn (chain, static_chain_value);
198 if (REG_P (chain))
199 use_reg (call_fusage, chain);
200 }
201
202 return funexp;
203 }
204
205 /* Generate instructions to call function FUNEXP,
206 and optionally pop the results.
207 The CALL_INSN is the first insn generated.
208
209 FNDECL is the declaration node of the function. This is given to the
210 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
211 its own args.
212
213 FUNTYPE is the data type of the function. This is given to the hook
214 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
215 own args. We used to allow an identifier for library functions, but
216 that doesn't work when the return type is an aggregate type and the
217 calling convention says that the pointer to this aggregate is to be
218 popped by the callee.
219
220 STACK_SIZE is the number of bytes of arguments on the stack,
221 ROUNDED_STACK_SIZE is that number rounded up to
222 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
223 both to put into the call insn and to generate explicit popping
224 code if necessary.
225
226 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
227 It is zero if this call doesn't want a structure value.
228
229 NEXT_ARG_REG is the rtx that results from executing
230 targetm.calls.function_arg (&args_so_far, VOIDmode, void_type_node, true)
231 just after all the args have had their registers assigned.
232 This could be whatever you like, but normally it is the first
233 arg-register beyond those used for args in this call,
234 or 0 if all the arg-registers are used in this call.
235 It is passed on to `gen_call' so you can put this info in the call insn.
236
237 VALREG is a hard register in which a value is returned,
238 or 0 if the call does not return a value.
239
240 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
241 the args to this call were processed.
242 We restore `inhibit_defer_pop' to that value.
243
244 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
245 denote registers used by the called function. */
246
247 static void
248 emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
249 tree funtype ATTRIBUTE_UNUSED,
250 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
251 HOST_WIDE_INT rounded_stack_size,
252 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
253 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
254 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
255 cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
256 {
257 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
258 rtx call_insn, call, funmem;
259 int already_popped = 0;
260 HOST_WIDE_INT n_popped
261 = targetm.calls.return_pops_args (fndecl, funtype, stack_size);
262
263 #ifdef CALL_POPS_ARGS
264 n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
265 #endif
266
267 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
268 and we don't want to load it into a register as an optimization,
269 because prepare_call_address already did it if it should be done. */
270 if (GET_CODE (funexp) != SYMBOL_REF)
271 funexp = memory_address (FUNCTION_MODE, funexp);
272
273 funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
274 if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
275 {
276 tree t = fndecl;
277
278 /* Although a built-in FUNCTION_DECL and its non-__builtin
279 counterpart compare equal and get a shared mem_attrs, they
280 produce different dump output in compare-debug compilations,
281 if an entry gets garbage collected in one compilation, then
282 adds a different (but equivalent) entry, while the other
283 doesn't run the garbage collector at the same spot and then
284 shares the mem_attr with the equivalent entry. */
285 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
286 {
287 tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
288 if (t2)
289 t = t2;
290 }
291
292 set_mem_expr (funmem, t);
293 }
294 else if (fntree)
295 set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
296
297 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
298 if ((ecf_flags & ECF_SIBCALL)
299 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
300 && (n_popped > 0 || stack_size == 0))
301 {
302 rtx n_pop = GEN_INT (n_popped);
303 rtx pat;
304
305 /* If this subroutine pops its own args, record that in the call insn
306 if possible, for the sake of frame pointer elimination. */
307
308 if (valreg)
309 pat = GEN_SIBCALL_VALUE_POP (valreg, funmem, rounded_stack_size_rtx,
310 next_arg_reg, n_pop);
311 else
312 pat = GEN_SIBCALL_POP (funmem, rounded_stack_size_rtx, next_arg_reg,
313 n_pop);
314
315 emit_call_insn (pat);
316 already_popped = 1;
317 }
318 else
319 #endif
320
321 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
322 /* If the target has "call" or "call_value" insns, then prefer them
323 if no arguments are actually popped. If the target does not have
324 "call" or "call_value" insns, then we must use the popping versions
325 even if the call has no arguments to pop. */
326 #if defined (HAVE_call) && defined (HAVE_call_value)
327 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
328 && n_popped > 0)
329 #else
330 if (HAVE_call_pop && HAVE_call_value_pop)
331 #endif
332 {
333 rtx n_pop = GEN_INT (n_popped);
334 rtx pat;
335
336 /* If this subroutine pops its own args, record that in the call insn
337 if possible, for the sake of frame pointer elimination. */
338
339 if (valreg)
340 pat = GEN_CALL_VALUE_POP (valreg, funmem, rounded_stack_size_rtx,
341 next_arg_reg, n_pop);
342 else
343 pat = GEN_CALL_POP (funmem, rounded_stack_size_rtx, next_arg_reg,
344 n_pop);
345
346 emit_call_insn (pat);
347 already_popped = 1;
348 }
349 else
350 #endif
351
352 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
353 if ((ecf_flags & ECF_SIBCALL)
354 && HAVE_sibcall && HAVE_sibcall_value)
355 {
356 if (valreg)
357 emit_call_insn (GEN_SIBCALL_VALUE (valreg, funmem,
358 rounded_stack_size_rtx,
359 next_arg_reg, NULL_RTX));
360 else
361 emit_call_insn (GEN_SIBCALL (funmem, rounded_stack_size_rtx,
362 next_arg_reg,
363 GEN_INT (struct_value_size)));
364 }
365 else
366 #endif
367
368 #if defined (HAVE_call) && defined (HAVE_call_value)
369 if (HAVE_call && HAVE_call_value)
370 {
371 if (valreg)
372 emit_call_insn (GEN_CALL_VALUE (valreg, funmem, rounded_stack_size_rtx,
373 next_arg_reg, NULL_RTX));
374 else
375 emit_call_insn (GEN_CALL (funmem, rounded_stack_size_rtx, next_arg_reg,
376 GEN_INT (struct_value_size)));
377 }
378 else
379 #endif
380 gcc_unreachable ();
381
382 /* Find the call we just emitted. */
383 call_insn = last_call_insn ();
384
385 /* Some target create a fresh MEM instead of reusing the one provided
386 above. Set its MEM_EXPR. */
387 call = PATTERN (call_insn);
388 if (GET_CODE (call) == PARALLEL)
389 call = XVECEXP (call, 0, 0);
390 if (GET_CODE (call) == SET)
391 call = SET_SRC (call);
392 if (GET_CODE (call) == CALL
393 && MEM_P (XEXP (call, 0))
394 && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
395 && MEM_EXPR (funmem) != NULL_TREE)
396 set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
397
398 /* Put the register usage information there. */
399 add_function_usage_to (call_insn, call_fusage);
400
401 /* If this is a const call, then set the insn's unchanging bit. */
402 if (ecf_flags & ECF_CONST)
403 RTL_CONST_CALL_P (call_insn) = 1;
404
405 /* If this is a pure call, then set the insn's unchanging bit. */
406 if (ecf_flags & ECF_PURE)
407 RTL_PURE_CALL_P (call_insn) = 1;
408
409 /* If this is a const call, then set the insn's unchanging bit. */
410 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
411 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
412
413 /* Create a nothrow REG_EH_REGION note, if needed. */
414 make_reg_eh_region_note (call_insn, ecf_flags, 0);
415
416 if (ecf_flags & ECF_NORETURN)
417 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
418
419 if (ecf_flags & ECF_RETURNS_TWICE)
420 {
421 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
422 cfun->calls_setjmp = 1;
423 }
424
425 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
426
427 /* Restore this now, so that we do defer pops for this call's args
428 if the context of the call as a whole permits. */
429 inhibit_defer_pop = old_inhibit_defer_pop;
430
431 if (n_popped > 0)
432 {
433 if (!already_popped)
434 CALL_INSN_FUNCTION_USAGE (call_insn)
435 = gen_rtx_EXPR_LIST (VOIDmode,
436 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
437 CALL_INSN_FUNCTION_USAGE (call_insn));
438 rounded_stack_size -= n_popped;
439 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
440 stack_pointer_delta -= n_popped;
441
442 add_reg_note (call_insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
443
444 /* If popup is needed, stack realign must use DRAP */
445 if (SUPPORTS_STACK_ALIGNMENT)
446 crtl->need_drap = true;
447 }
448 /* For noreturn calls when not accumulating outgoing args force
449 REG_ARGS_SIZE note to prevent crossjumping of calls with different
450 args sizes. */
451 else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
452 add_reg_note (call_insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
453
454 if (!ACCUMULATE_OUTGOING_ARGS)
455 {
456 /* If returning from the subroutine does not automatically pop the args,
457 we need an instruction to pop them sooner or later.
458 Perhaps do it now; perhaps just record how much space to pop later.
459
460 If returning from the subroutine does pop the args, indicate that the
461 stack pointer will be changed. */
462
463 if (rounded_stack_size != 0)
464 {
465 if (ecf_flags & ECF_NORETURN)
466 /* Just pretend we did the pop. */
467 stack_pointer_delta -= rounded_stack_size;
468 else if (flag_defer_pop && inhibit_defer_pop == 0
469 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
470 pending_stack_adjust += rounded_stack_size;
471 else
472 adjust_stack (rounded_stack_size_rtx);
473 }
474 }
475 /* When we accumulate outgoing args, we must avoid any stack manipulations.
476 Restore the stack pointer to its original value now. Usually
477 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
478 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
479 popping variants of functions exist as well.
480
481 ??? We may optimize similar to defer_pop above, but it is
482 probably not worthwhile.
483
484 ??? It will be worthwhile to enable combine_stack_adjustments even for
485 such machines. */
486 else if (n_popped)
487 anti_adjust_stack (GEN_INT (n_popped));
488 }
489
490 /* Determine if the function identified by NAME and FNDECL is one with
491 special properties we wish to know about.
492
493 For example, if the function might return more than one time (setjmp), then
494 set RETURNS_TWICE to a nonzero value.
495
496 Similarly set NORETURN if the function is in the longjmp family.
497
498 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
499 space from the stack such as alloca. */
500
501 static int
502 special_function_p (const_tree fndecl, int flags)
503 {
504 if (fndecl && DECL_NAME (fndecl)
505 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
506 /* Exclude functions not at the file scope, or not `extern',
507 since they are not the magic functions we would otherwise
508 think they are.
509 FIXME: this should be handled with attributes, not with this
510 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
511 because you can declare fork() inside a function if you
512 wish. */
513 && (DECL_CONTEXT (fndecl) == NULL_TREE
514 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
515 && TREE_PUBLIC (fndecl))
516 {
517 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
518 const char *tname = name;
519
520 /* We assume that alloca will always be called by name. It
521 makes no sense to pass it as a pointer-to-function to
522 anything that does not understand its behavior. */
523 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
524 && name[0] == 'a'
525 && ! strcmp (name, "alloca"))
526 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
527 && name[0] == '_'
528 && ! strcmp (name, "__builtin_alloca"))))
529 flags |= ECF_MAY_BE_ALLOCA;
530
531 /* Disregard prefix _, __, __x or __builtin_. */
532 if (name[0] == '_')
533 {
534 if (name[1] == '_'
535 && name[2] == 'b'
536 && !strncmp (name + 3, "uiltin_", 7))
537 tname += 10;
538 else if (name[1] == '_' && name[2] == 'x')
539 tname += 3;
540 else if (name[1] == '_')
541 tname += 2;
542 else
543 tname += 1;
544 }
545
546 if (tname[0] == 's')
547 {
548 if ((tname[1] == 'e'
549 && (! strcmp (tname, "setjmp")
550 || ! strcmp (tname, "setjmp_syscall")))
551 || (tname[1] == 'i'
552 && ! strcmp (tname, "sigsetjmp"))
553 || (tname[1] == 'a'
554 && ! strcmp (tname, "savectx")))
555 flags |= ECF_RETURNS_TWICE;
556
557 if (tname[1] == 'i'
558 && ! strcmp (tname, "siglongjmp"))
559 flags |= ECF_NORETURN;
560 }
561 else if ((tname[0] == 'q' && tname[1] == 's'
562 && ! strcmp (tname, "qsetjmp"))
563 || (tname[0] == 'v' && tname[1] == 'f'
564 && ! strcmp (tname, "vfork"))
565 || (tname[0] == 'g' && tname[1] == 'e'
566 && !strcmp (tname, "getcontext")))
567 flags |= ECF_RETURNS_TWICE;
568
569 else if (tname[0] == 'l' && tname[1] == 'o'
570 && ! strcmp (tname, "longjmp"))
571 flags |= ECF_NORETURN;
572 }
573
574 return flags;
575 }
576
577 /* Return nonzero when FNDECL represents a call to setjmp. */
578
579 int
580 setjmp_call_p (const_tree fndecl)
581 {
582 if (DECL_IS_RETURNS_TWICE (fndecl))
583 return ECF_RETURNS_TWICE;
584 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
585 }
586
587
588 /* Return true if STMT is an alloca call. */
589
590 bool
591 gimple_alloca_call_p (const_gimple stmt)
592 {
593 tree fndecl;
594
595 if (!is_gimple_call (stmt))
596 return false;
597
598 fndecl = gimple_call_fndecl (stmt);
599 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
600 return true;
601
602 return false;
603 }
604
605 /* Return true when exp contains alloca call. */
606
607 bool
608 alloca_call_p (const_tree exp)
609 {
610 if (TREE_CODE (exp) == CALL_EXPR
611 && TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
612 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0)) == FUNCTION_DECL)
613 && (special_function_p (TREE_OPERAND (CALL_EXPR_FN (exp), 0), 0)
614 & ECF_MAY_BE_ALLOCA))
615 return true;
616 return false;
617 }
618
619 /* Return TRUE if FNDECL is either a TM builtin or a TM cloned
620 function. Return FALSE otherwise. */
621
622 static bool
623 is_tm_builtin (const_tree fndecl)
624 {
625 if (fndecl == NULL)
626 return false;
627
628 if (decl_is_tm_clone (fndecl))
629 return true;
630
631 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
632 {
633 switch (DECL_FUNCTION_CODE (fndecl))
634 {
635 case BUILT_IN_TM_COMMIT:
636 case BUILT_IN_TM_COMMIT_EH:
637 case BUILT_IN_TM_ABORT:
638 case BUILT_IN_TM_IRREVOCABLE:
639 case BUILT_IN_TM_GETTMCLONE_IRR:
640 case BUILT_IN_TM_MEMCPY:
641 case BUILT_IN_TM_MEMMOVE:
642 case BUILT_IN_TM_MEMSET:
643 CASE_BUILT_IN_TM_STORE (1):
644 CASE_BUILT_IN_TM_STORE (2):
645 CASE_BUILT_IN_TM_STORE (4):
646 CASE_BUILT_IN_TM_STORE (8):
647 CASE_BUILT_IN_TM_STORE (FLOAT):
648 CASE_BUILT_IN_TM_STORE (DOUBLE):
649 CASE_BUILT_IN_TM_STORE (LDOUBLE):
650 CASE_BUILT_IN_TM_STORE (M64):
651 CASE_BUILT_IN_TM_STORE (M128):
652 CASE_BUILT_IN_TM_STORE (M256):
653 CASE_BUILT_IN_TM_LOAD (1):
654 CASE_BUILT_IN_TM_LOAD (2):
655 CASE_BUILT_IN_TM_LOAD (4):
656 CASE_BUILT_IN_TM_LOAD (8):
657 CASE_BUILT_IN_TM_LOAD (FLOAT):
658 CASE_BUILT_IN_TM_LOAD (DOUBLE):
659 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
660 CASE_BUILT_IN_TM_LOAD (M64):
661 CASE_BUILT_IN_TM_LOAD (M128):
662 CASE_BUILT_IN_TM_LOAD (M256):
663 case BUILT_IN_TM_LOG:
664 case BUILT_IN_TM_LOG_1:
665 case BUILT_IN_TM_LOG_2:
666 case BUILT_IN_TM_LOG_4:
667 case BUILT_IN_TM_LOG_8:
668 case BUILT_IN_TM_LOG_FLOAT:
669 case BUILT_IN_TM_LOG_DOUBLE:
670 case BUILT_IN_TM_LOG_LDOUBLE:
671 case BUILT_IN_TM_LOG_M64:
672 case BUILT_IN_TM_LOG_M128:
673 case BUILT_IN_TM_LOG_M256:
674 return true;
675 default:
676 break;
677 }
678 }
679 return false;
680 }
681
682 /* Detect flags (function attributes) from the function decl or type node. */
683
684 int
685 flags_from_decl_or_type (const_tree exp)
686 {
687 int flags = 0;
688
689 if (DECL_P (exp))
690 {
691 /* The function exp may have the `malloc' attribute. */
692 if (DECL_IS_MALLOC (exp))
693 flags |= ECF_MALLOC;
694
695 /* The function exp may have the `returns_twice' attribute. */
696 if (DECL_IS_RETURNS_TWICE (exp))
697 flags |= ECF_RETURNS_TWICE;
698
699 /* Process the pure and const attributes. */
700 if (TREE_READONLY (exp))
701 flags |= ECF_CONST;
702 if (DECL_PURE_P (exp))
703 flags |= ECF_PURE;
704 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
705 flags |= ECF_LOOPING_CONST_OR_PURE;
706
707 if (DECL_IS_NOVOPS (exp))
708 flags |= ECF_NOVOPS;
709 if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
710 flags |= ECF_LEAF;
711
712 if (TREE_NOTHROW (exp))
713 flags |= ECF_NOTHROW;
714
715 if (flag_tm)
716 {
717 if (is_tm_builtin (exp))
718 flags |= ECF_TM_BUILTIN;
719 else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
720 || lookup_attribute ("transaction_pure",
721 TYPE_ATTRIBUTES (TREE_TYPE (exp))))
722 flags |= ECF_TM_PURE;
723 }
724
725 flags = special_function_p (exp, flags);
726 }
727 else if (TYPE_P (exp))
728 {
729 if (TYPE_READONLY (exp))
730 flags |= ECF_CONST;
731
732 if (flag_tm
733 && ((flags & ECF_CONST) != 0
734 || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
735 flags |= ECF_TM_PURE;
736 }
737
738 if (TREE_THIS_VOLATILE (exp))
739 {
740 flags |= ECF_NORETURN;
741 if (flags & (ECF_CONST|ECF_PURE))
742 flags |= ECF_LOOPING_CONST_OR_PURE;
743 }
744
745 return flags;
746 }
747
748 /* Detect flags from a CALL_EXPR. */
749
750 int
751 call_expr_flags (const_tree t)
752 {
753 int flags;
754 tree decl = get_callee_fndecl (t);
755
756 if (decl)
757 flags = flags_from_decl_or_type (decl);
758 else
759 {
760 t = TREE_TYPE (CALL_EXPR_FN (t));
761 if (t && TREE_CODE (t) == POINTER_TYPE)
762 flags = flags_from_decl_or_type (TREE_TYPE (t));
763 else
764 flags = 0;
765 }
766
767 return flags;
768 }
769
770 /* Precompute all register parameters as described by ARGS, storing values
771 into fields within the ARGS array.
772
773 NUM_ACTUALS indicates the total number elements in the ARGS array.
774
775 Set REG_PARM_SEEN if we encounter a register parameter. */
776
777 static void
778 precompute_register_parameters (int num_actuals, struct arg_data *args,
779 int *reg_parm_seen)
780 {
781 int i;
782
783 *reg_parm_seen = 0;
784
785 for (i = 0; i < num_actuals; i++)
786 if (args[i].reg != 0 && ! args[i].pass_on_stack)
787 {
788 *reg_parm_seen = 1;
789
790 if (args[i].value == 0)
791 {
792 push_temp_slots ();
793 args[i].value = expand_normal (args[i].tree_value);
794 preserve_temp_slots (args[i].value);
795 pop_temp_slots ();
796 }
797
798 /* If we are to promote the function arg to a wider mode,
799 do it now. */
800
801 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
802 args[i].value
803 = convert_modes (args[i].mode,
804 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
805 args[i].value, args[i].unsignedp);
806
807 /* If the value is a non-legitimate constant, force it into a
808 pseudo now. TLS symbols sometimes need a call to resolve. */
809 if (CONSTANT_P (args[i].value)
810 && !targetm.legitimate_constant_p (args[i].mode, args[i].value))
811 args[i].value = force_reg (args[i].mode, args[i].value);
812
813 /* If we're going to have to load the value by parts, pull the
814 parts into pseudos. The part extraction process can involve
815 non-trivial computation. */
816 if (GET_CODE (args[i].reg) == PARALLEL)
817 {
818 tree type = TREE_TYPE (args[i].tree_value);
819 args[i].parallel_value
820 = emit_group_load_into_temps (args[i].reg, args[i].value,
821 type, int_size_in_bytes (type));
822 }
823
824 /* If the value is expensive, and we are inside an appropriately
825 short loop, put the value into a pseudo and then put the pseudo
826 into the hard reg.
827
828 For small register classes, also do this if this call uses
829 register parameters. This is to avoid reload conflicts while
830 loading the parameters registers. */
831
832 else if ((! (REG_P (args[i].value)
833 || (GET_CODE (args[i].value) == SUBREG
834 && REG_P (SUBREG_REG (args[i].value)))))
835 && args[i].mode != BLKmode
836 && set_src_cost (args[i].value, optimize_insn_for_speed_p ())
837 > COSTS_N_INSNS (1)
838 && ((*reg_parm_seen
839 && targetm.small_register_classes_for_mode_p (args[i].mode))
840 || optimize))
841 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
842 }
843 }
844
845 #ifdef REG_PARM_STACK_SPACE
846
847 /* The argument list is the property of the called routine and it
848 may clobber it. If the fixed area has been used for previous
849 parameters, we must save and restore it. */
850
851 static rtx
852 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
853 {
854 int low;
855 int high;
856
857 /* Compute the boundary of the area that needs to be saved, if any. */
858 high = reg_parm_stack_space;
859 #ifdef ARGS_GROW_DOWNWARD
860 high += 1;
861 #endif
862 if (high > highest_outgoing_arg_in_use)
863 high = highest_outgoing_arg_in_use;
864
865 for (low = 0; low < high; low++)
866 if (stack_usage_map[low] != 0)
867 {
868 int num_to_save;
869 enum machine_mode save_mode;
870 int delta;
871 rtx stack_area;
872 rtx save_area;
873
874 while (stack_usage_map[--high] == 0)
875 ;
876
877 *low_to_save = low;
878 *high_to_save = high;
879
880 num_to_save = high - low + 1;
881 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
882
883 /* If we don't have the required alignment, must do this
884 in BLKmode. */
885 if ((low & (MIN (GET_MODE_SIZE (save_mode),
886 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
887 save_mode = BLKmode;
888
889 #ifdef ARGS_GROW_DOWNWARD
890 delta = -high;
891 #else
892 delta = low;
893 #endif
894 stack_area = gen_rtx_MEM (save_mode,
895 memory_address (save_mode,
896 plus_constant (argblock,
897 delta)));
898
899 set_mem_align (stack_area, PARM_BOUNDARY);
900 if (save_mode == BLKmode)
901 {
902 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
903 emit_block_move (validize_mem (save_area), stack_area,
904 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
905 }
906 else
907 {
908 save_area = gen_reg_rtx (save_mode);
909 emit_move_insn (save_area, stack_area);
910 }
911
912 return save_area;
913 }
914
915 return NULL_RTX;
916 }
917
918 static void
919 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
920 {
921 enum machine_mode save_mode = GET_MODE (save_area);
922 int delta;
923 rtx stack_area;
924
925 #ifdef ARGS_GROW_DOWNWARD
926 delta = -high_to_save;
927 #else
928 delta = low_to_save;
929 #endif
930 stack_area = gen_rtx_MEM (save_mode,
931 memory_address (save_mode,
932 plus_constant (argblock, delta)));
933 set_mem_align (stack_area, PARM_BOUNDARY);
934
935 if (save_mode != BLKmode)
936 emit_move_insn (stack_area, save_area);
937 else
938 emit_block_move (stack_area, validize_mem (save_area),
939 GEN_INT (high_to_save - low_to_save + 1),
940 BLOCK_OP_CALL_PARM);
941 }
942 #endif /* REG_PARM_STACK_SPACE */
943
944 /* If any elements in ARGS refer to parameters that are to be passed in
945 registers, but not in memory, and whose alignment does not permit a
946 direct copy into registers. Copy the values into a group of pseudos
947 which we will later copy into the appropriate hard registers.
948
949 Pseudos for each unaligned argument will be stored into the array
950 args[argnum].aligned_regs. The caller is responsible for deallocating
951 the aligned_regs array if it is nonzero. */
952
953 static void
954 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
955 {
956 int i, j;
957
958 for (i = 0; i < num_actuals; i++)
959 if (args[i].reg != 0 && ! args[i].pass_on_stack
960 && args[i].mode == BLKmode
961 && MEM_P (args[i].value)
962 && (MEM_ALIGN (args[i].value)
963 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
964 {
965 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
966 int endian_correction = 0;
967
968 if (args[i].partial)
969 {
970 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
971 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
972 }
973 else
974 {
975 args[i].n_aligned_regs
976 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
977 }
978
979 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
980
981 /* Structures smaller than a word are normally aligned to the
982 least significant byte. On a BYTES_BIG_ENDIAN machine,
983 this means we must skip the empty high order bytes when
984 calculating the bit offset. */
985 if (bytes < UNITS_PER_WORD
986 #ifdef BLOCK_REG_PADDING
987 && (BLOCK_REG_PADDING (args[i].mode,
988 TREE_TYPE (args[i].tree_value), 1)
989 == downward)
990 #else
991 && BYTES_BIG_ENDIAN
992 #endif
993 )
994 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
995
996 for (j = 0; j < args[i].n_aligned_regs; j++)
997 {
998 rtx reg = gen_reg_rtx (word_mode);
999 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1000 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1001
1002 args[i].aligned_regs[j] = reg;
1003 word = extract_bit_field (word, bitsize, 0, 1, false, NULL_RTX,
1004 word_mode, word_mode);
1005
1006 /* There is no need to restrict this code to loading items
1007 in TYPE_ALIGN sized hunks. The bitfield instructions can
1008 load up entire word sized registers efficiently.
1009
1010 ??? This may not be needed anymore.
1011 We use to emit a clobber here but that doesn't let later
1012 passes optimize the instructions we emit. By storing 0 into
1013 the register later passes know the first AND to zero out the
1014 bitfield being set in the register is unnecessary. The store
1015 of 0 will be deleted as will at least the first AND. */
1016
1017 emit_move_insn (reg, const0_rtx);
1018
1019 bytes -= bitsize / BITS_PER_UNIT;
1020 store_bit_field (reg, bitsize, endian_correction, 0, 0,
1021 word_mode, word);
1022 }
1023 }
1024 }
1025
1026 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
1027 CALL_EXPR EXP.
1028
1029 NUM_ACTUALS is the total number of parameters.
1030
1031 N_NAMED_ARGS is the total number of named arguments.
1032
1033 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
1034 value, or null.
1035
1036 FNDECL is the tree code for the target of this call (if known)
1037
1038 ARGS_SO_FAR holds state needed by the target to know where to place
1039 the next argument.
1040
1041 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1042 for arguments which are passed in registers.
1043
1044 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1045 and may be modified by this routine.
1046
1047 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1048 flags which may may be modified by this routine.
1049
1050 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
1051 that requires allocation of stack space.
1052
1053 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
1054 the thunked-to function. */
1055
1056 static void
1057 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
1058 struct arg_data *args,
1059 struct args_size *args_size,
1060 int n_named_args ATTRIBUTE_UNUSED,
1061 tree exp, tree struct_value_addr_value,
1062 tree fndecl, tree fntype,
1063 cumulative_args_t args_so_far,
1064 int reg_parm_stack_space,
1065 rtx *old_stack_level, int *old_pending_adj,
1066 int *must_preallocate, int *ecf_flags,
1067 bool *may_tailcall, bool call_from_thunk_p)
1068 {
1069 CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
1070 location_t loc = EXPR_LOCATION (exp);
1071 /* 1 if scanning parms front to back, -1 if scanning back to front. */
1072 int inc;
1073
1074 /* Count arg position in order args appear. */
1075 int argpos;
1076
1077 int i;
1078
1079 args_size->constant = 0;
1080 args_size->var = 0;
1081
1082 /* In this loop, we consider args in the order they are written.
1083 We fill up ARGS from the front or from the back if necessary
1084 so that in any case the first arg to be pushed ends up at the front. */
1085
1086 if (PUSH_ARGS_REVERSED)
1087 {
1088 i = num_actuals - 1, inc = -1;
1089 /* In this case, must reverse order of args
1090 so that we compute and push the last arg first. */
1091 }
1092 else
1093 {
1094 i = 0, inc = 1;
1095 }
1096
1097 /* First fill in the actual arguments in the ARGS array, splitting
1098 complex arguments if necessary. */
1099 {
1100 int j = i;
1101 call_expr_arg_iterator iter;
1102 tree arg;
1103
1104 if (struct_value_addr_value)
1105 {
1106 args[j].tree_value = struct_value_addr_value;
1107 j += inc;
1108 }
1109 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
1110 {
1111 tree argtype = TREE_TYPE (arg);
1112 if (targetm.calls.split_complex_arg
1113 && argtype
1114 && TREE_CODE (argtype) == COMPLEX_TYPE
1115 && targetm.calls.split_complex_arg (argtype))
1116 {
1117 tree subtype = TREE_TYPE (argtype);
1118 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
1119 j += inc;
1120 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
1121 }
1122 else
1123 args[j].tree_value = arg;
1124 j += inc;
1125 }
1126 }
1127
1128 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1129 for (argpos = 0; argpos < num_actuals; i += inc, argpos++)
1130 {
1131 tree type = TREE_TYPE (args[i].tree_value);
1132 int unsignedp;
1133 enum machine_mode mode;
1134
1135 /* Replace erroneous argument with constant zero. */
1136 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1137 args[i].tree_value = integer_zero_node, type = integer_type_node;
1138
1139 /* If TYPE is a transparent union or record, pass things the way
1140 we would pass the first field of the union or record. We have
1141 already verified that the modes are the same. */
1142 if ((TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == RECORD_TYPE)
1143 && TYPE_TRANSPARENT_AGGR (type))
1144 type = TREE_TYPE (first_field (type));
1145
1146 /* Decide where to pass this arg.
1147
1148 args[i].reg is nonzero if all or part is passed in registers.
1149
1150 args[i].partial is nonzero if part but not all is passed in registers,
1151 and the exact value says how many bytes are passed in registers.
1152
1153 args[i].pass_on_stack is nonzero if the argument must at least be
1154 computed on the stack. It may then be loaded back into registers
1155 if args[i].reg is nonzero.
1156
1157 These decisions are driven by the FUNCTION_... macros and must agree
1158 with those made by function.c. */
1159
1160 /* See if this argument should be passed by invisible reference. */
1161 if (pass_by_reference (args_so_far_pnt, TYPE_MODE (type),
1162 type, argpos < n_named_args))
1163 {
1164 bool callee_copies;
1165 tree base = NULL_TREE;
1166
1167 callee_copies
1168 = reference_callee_copied (args_so_far_pnt, TYPE_MODE (type),
1169 type, argpos < n_named_args);
1170
1171 /* If we're compiling a thunk, pass through invisible references
1172 instead of making a copy. */
1173 if (call_from_thunk_p
1174 || (callee_copies
1175 && !TREE_ADDRESSABLE (type)
1176 && (base = get_base_address (args[i].tree_value))
1177 && TREE_CODE (base) != SSA_NAME
1178 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
1179 {
1180 mark_addressable (args[i].tree_value);
1181
1182 /* We can't use sibcalls if a callee-copied argument is
1183 stored in the current function's frame. */
1184 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1185 *may_tailcall = false;
1186
1187 args[i].tree_value = build_fold_addr_expr_loc (loc,
1188 args[i].tree_value);
1189 type = TREE_TYPE (args[i].tree_value);
1190
1191 if (*ecf_flags & ECF_CONST)
1192 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
1193 }
1194 else
1195 {
1196 /* We make a copy of the object and pass the address to the
1197 function being called. */
1198 rtx copy;
1199
1200 if (!COMPLETE_TYPE_P (type)
1201 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
1202 || (flag_stack_check == GENERIC_STACK_CHECK
1203 && compare_tree_int (TYPE_SIZE_UNIT (type),
1204 STACK_CHECK_MAX_VAR_SIZE) > 0))
1205 {
1206 /* This is a variable-sized object. Make space on the stack
1207 for it. */
1208 rtx size_rtx = expr_size (args[i].tree_value);
1209
1210 if (*old_stack_level == 0)
1211 {
1212 emit_stack_save (SAVE_BLOCK, old_stack_level);
1213 *old_pending_adj = pending_stack_adjust;
1214 pending_stack_adjust = 0;
1215 }
1216
1217 /* We can pass TRUE as the 4th argument because we just
1218 saved the stack pointer and will restore it right after
1219 the call. */
1220 copy = allocate_dynamic_stack_space (size_rtx,
1221 TYPE_ALIGN (type),
1222 TYPE_ALIGN (type),
1223 true);
1224 copy = gen_rtx_MEM (BLKmode, copy);
1225 set_mem_attributes (copy, type, 1);
1226 }
1227 else
1228 copy = assign_temp (type, 0, 1, 0);
1229
1230 store_expr (args[i].tree_value, copy, 0, false);
1231
1232 /* Just change the const function to pure and then let
1233 the next test clear the pure based on
1234 callee_copies. */
1235 if (*ecf_flags & ECF_CONST)
1236 {
1237 *ecf_flags &= ~ECF_CONST;
1238 *ecf_flags |= ECF_PURE;
1239 }
1240
1241 if (!callee_copies && *ecf_flags & ECF_PURE)
1242 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
1243
1244 args[i].tree_value
1245 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
1246 type = TREE_TYPE (args[i].tree_value);
1247 *may_tailcall = false;
1248 }
1249 }
1250
1251 unsignedp = TYPE_UNSIGNED (type);
1252 mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
1253 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
1254
1255 args[i].unsignedp = unsignedp;
1256 args[i].mode = mode;
1257
1258 args[i].reg = targetm.calls.function_arg (args_so_far, mode, type,
1259 argpos < n_named_args);
1260
1261 /* If this is a sibling call and the machine has register windows, the
1262 register window has to be unwinded before calling the routine, so
1263 arguments have to go into the incoming registers. */
1264 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
1265 args[i].tail_call_reg
1266 = targetm.calls.function_incoming_arg (args_so_far, mode, type,
1267 argpos < n_named_args);
1268 else
1269 args[i].tail_call_reg = args[i].reg;
1270
1271 if (args[i].reg)
1272 args[i].partial
1273 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1274 argpos < n_named_args);
1275
1276 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
1277
1278 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1279 it means that we are to pass this arg in the register(s) designated
1280 by the PARALLEL, but also to pass it in the stack. */
1281 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1282 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1283 args[i].pass_on_stack = 1;
1284
1285 /* If this is an addressable type, we must preallocate the stack
1286 since we must evaluate the object into its final location.
1287
1288 If this is to be passed in both registers and the stack, it is simpler
1289 to preallocate. */
1290 if (TREE_ADDRESSABLE (type)
1291 || (args[i].pass_on_stack && args[i].reg != 0))
1292 *must_preallocate = 1;
1293
1294 /* Compute the stack-size of this argument. */
1295 if (args[i].reg == 0 || args[i].partial != 0
1296 || reg_parm_stack_space > 0
1297 || args[i].pass_on_stack)
1298 locate_and_pad_parm (mode, type,
1299 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1300 1,
1301 #else
1302 args[i].reg != 0,
1303 #endif
1304 args[i].pass_on_stack ? 0 : args[i].partial,
1305 fndecl, args_size, &args[i].locate);
1306 #ifdef BLOCK_REG_PADDING
1307 else
1308 /* The argument is passed entirely in registers. See at which
1309 end it should be padded. */
1310 args[i].locate.where_pad =
1311 BLOCK_REG_PADDING (mode, type,
1312 int_size_in_bytes (type) <= UNITS_PER_WORD);
1313 #endif
1314
1315 /* Update ARGS_SIZE, the total stack space for args so far. */
1316
1317 args_size->constant += args[i].locate.size.constant;
1318 if (args[i].locate.size.var)
1319 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1320
1321 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1322 have been used, etc. */
1323
1324 targetm.calls.function_arg_advance (args_so_far, TYPE_MODE (type),
1325 type, argpos < n_named_args);
1326 }
1327 }
1328
1329 /* Update ARGS_SIZE to contain the total size for the argument block.
1330 Return the original constant component of the argument block's size.
1331
1332 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1333 for arguments passed in registers. */
1334
1335 static int
1336 compute_argument_block_size (int reg_parm_stack_space,
1337 struct args_size *args_size,
1338 tree fndecl ATTRIBUTE_UNUSED,
1339 tree fntype ATTRIBUTE_UNUSED,
1340 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1341 {
1342 int unadjusted_args_size = args_size->constant;
1343
1344 /* For accumulate outgoing args mode we don't need to align, since the frame
1345 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1346 backends from generating misaligned frame sizes. */
1347 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1348 preferred_stack_boundary = STACK_BOUNDARY;
1349
1350 /* Compute the actual size of the argument block required. The variable
1351 and constant sizes must be combined, the size may have to be rounded,
1352 and there may be a minimum required size. */
1353
1354 if (args_size->var)
1355 {
1356 args_size->var = ARGS_SIZE_TREE (*args_size);
1357 args_size->constant = 0;
1358
1359 preferred_stack_boundary /= BITS_PER_UNIT;
1360 if (preferred_stack_boundary > 1)
1361 {
1362 /* We don't handle this case yet. To handle it correctly we have
1363 to add the delta, round and subtract the delta.
1364 Currently no machine description requires this support. */
1365 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
1366 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1367 }
1368
1369 if (reg_parm_stack_space > 0)
1370 {
1371 args_size->var
1372 = size_binop (MAX_EXPR, args_size->var,
1373 ssize_int (reg_parm_stack_space));
1374
1375 /* The area corresponding to register parameters is not to count in
1376 the size of the block we need. So make the adjustment. */
1377 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
1378 args_size->var
1379 = size_binop (MINUS_EXPR, args_size->var,
1380 ssize_int (reg_parm_stack_space));
1381 }
1382 }
1383 else
1384 {
1385 preferred_stack_boundary /= BITS_PER_UNIT;
1386 if (preferred_stack_boundary < 1)
1387 preferred_stack_boundary = 1;
1388 args_size->constant = (((args_size->constant
1389 + stack_pointer_delta
1390 + preferred_stack_boundary - 1)
1391 / preferred_stack_boundary
1392 * preferred_stack_boundary)
1393 - stack_pointer_delta);
1394
1395 args_size->constant = MAX (args_size->constant,
1396 reg_parm_stack_space);
1397
1398 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
1399 args_size->constant -= reg_parm_stack_space;
1400 }
1401 return unadjusted_args_size;
1402 }
1403
1404 /* Precompute parameters as needed for a function call.
1405
1406 FLAGS is mask of ECF_* constants.
1407
1408 NUM_ACTUALS is the number of arguments.
1409
1410 ARGS is an array containing information for each argument; this
1411 routine fills in the INITIAL_VALUE and VALUE fields for each
1412 precomputed argument. */
1413
1414 static void
1415 precompute_arguments (int num_actuals, struct arg_data *args)
1416 {
1417 int i;
1418
1419 /* If this is a libcall, then precompute all arguments so that we do not
1420 get extraneous instructions emitted as part of the libcall sequence. */
1421
1422 /* If we preallocated the stack space, and some arguments must be passed
1423 on the stack, then we must precompute any parameter which contains a
1424 function call which will store arguments on the stack.
1425 Otherwise, evaluating the parameter may clobber previous parameters
1426 which have already been stored into the stack. (we have code to avoid
1427 such case by saving the outgoing stack arguments, but it results in
1428 worse code) */
1429 if (!ACCUMULATE_OUTGOING_ARGS)
1430 return;
1431
1432 for (i = 0; i < num_actuals; i++)
1433 {
1434 tree type;
1435 enum machine_mode mode;
1436
1437 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
1438 continue;
1439
1440 /* If this is an addressable type, we cannot pre-evaluate it. */
1441 type = TREE_TYPE (args[i].tree_value);
1442 gcc_assert (!TREE_ADDRESSABLE (type));
1443
1444 args[i].initial_value = args[i].value
1445 = expand_normal (args[i].tree_value);
1446
1447 mode = TYPE_MODE (type);
1448 if (mode != args[i].mode)
1449 {
1450 int unsignedp = args[i].unsignedp;
1451 args[i].value
1452 = convert_modes (args[i].mode, mode,
1453 args[i].value, args[i].unsignedp);
1454
1455 /* CSE will replace this only if it contains args[i].value
1456 pseudo, so convert it down to the declared mode using
1457 a SUBREG. */
1458 if (REG_P (args[i].value)
1459 && GET_MODE_CLASS (args[i].mode) == MODE_INT
1460 && promote_mode (type, mode, &unsignedp) != args[i].mode)
1461 {
1462 args[i].initial_value
1463 = gen_lowpart_SUBREG (mode, args[i].value);
1464 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1465 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1466 args[i].unsignedp);
1467 }
1468 }
1469 }
1470 }
1471
1472 /* Given the current state of MUST_PREALLOCATE and information about
1473 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1474 compute and return the final value for MUST_PREALLOCATE. */
1475
1476 static int
1477 finalize_must_preallocate (int must_preallocate, int num_actuals,
1478 struct arg_data *args, struct args_size *args_size)
1479 {
1480 /* See if we have or want to preallocate stack space.
1481
1482 If we would have to push a partially-in-regs parm
1483 before other stack parms, preallocate stack space instead.
1484
1485 If the size of some parm is not a multiple of the required stack
1486 alignment, we must preallocate.
1487
1488 If the total size of arguments that would otherwise create a copy in
1489 a temporary (such as a CALL) is more than half the total argument list
1490 size, preallocation is faster.
1491
1492 Another reason to preallocate is if we have a machine (like the m88k)
1493 where stack alignment is required to be maintained between every
1494 pair of insns, not just when the call is made. However, we assume here
1495 that such machines either do not have push insns (and hence preallocation
1496 would occur anyway) or the problem is taken care of with
1497 PUSH_ROUNDING. */
1498
1499 if (! must_preallocate)
1500 {
1501 int partial_seen = 0;
1502 int copy_to_evaluate_size = 0;
1503 int i;
1504
1505 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1506 {
1507 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1508 partial_seen = 1;
1509 else if (partial_seen && args[i].reg == 0)
1510 must_preallocate = 1;
1511
1512 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1513 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1514 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1515 || TREE_CODE (args[i].tree_value) == COND_EXPR
1516 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1517 copy_to_evaluate_size
1518 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1519 }
1520
1521 if (copy_to_evaluate_size * 2 >= args_size->constant
1522 && args_size->constant > 0)
1523 must_preallocate = 1;
1524 }
1525 return must_preallocate;
1526 }
1527
1528 /* If we preallocated stack space, compute the address of each argument
1529 and store it into the ARGS array.
1530
1531 We need not ensure it is a valid memory address here; it will be
1532 validized when it is used.
1533
1534 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1535
1536 static void
1537 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1538 {
1539 if (argblock)
1540 {
1541 rtx arg_reg = argblock;
1542 int i, arg_offset = 0;
1543
1544 if (GET_CODE (argblock) == PLUS)
1545 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1546
1547 for (i = 0; i < num_actuals; i++)
1548 {
1549 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1550 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1551 rtx addr;
1552 unsigned int align, boundary;
1553 unsigned int units_on_stack = 0;
1554 enum machine_mode partial_mode = VOIDmode;
1555
1556 /* Skip this parm if it will not be passed on the stack. */
1557 if (! args[i].pass_on_stack
1558 && args[i].reg != 0
1559 && args[i].partial == 0)
1560 continue;
1561
1562 if (CONST_INT_P (offset))
1563 addr = plus_constant (arg_reg, INTVAL (offset));
1564 else
1565 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1566
1567 addr = plus_constant (addr, arg_offset);
1568
1569 if (args[i].partial != 0)
1570 {
1571 /* Only part of the parameter is being passed on the stack.
1572 Generate a simple memory reference of the correct size. */
1573 units_on_stack = args[i].locate.size.constant;
1574 partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT,
1575 MODE_INT, 1);
1576 args[i].stack = gen_rtx_MEM (partial_mode, addr);
1577 set_mem_size (args[i].stack, units_on_stack);
1578 }
1579 else
1580 {
1581 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1582 set_mem_attributes (args[i].stack,
1583 TREE_TYPE (args[i].tree_value), 1);
1584 }
1585 align = BITS_PER_UNIT;
1586 boundary = args[i].locate.boundary;
1587 if (args[i].locate.where_pad != downward)
1588 align = boundary;
1589 else if (CONST_INT_P (offset))
1590 {
1591 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1592 align = align & -align;
1593 }
1594 set_mem_align (args[i].stack, align);
1595
1596 if (CONST_INT_P (slot_offset))
1597 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1598 else
1599 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1600
1601 addr = plus_constant (addr, arg_offset);
1602
1603 if (args[i].partial != 0)
1604 {
1605 /* Only part of the parameter is being passed on the stack.
1606 Generate a simple memory reference of the correct size.
1607 */
1608 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
1609 set_mem_size (args[i].stack_slot, units_on_stack);
1610 }
1611 else
1612 {
1613 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1614 set_mem_attributes (args[i].stack_slot,
1615 TREE_TYPE (args[i].tree_value), 1);
1616 }
1617 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
1618
1619 /* Function incoming arguments may overlap with sibling call
1620 outgoing arguments and we cannot allow reordering of reads
1621 from function arguments with stores to outgoing arguments
1622 of sibling calls. */
1623 set_mem_alias_set (args[i].stack, 0);
1624 set_mem_alias_set (args[i].stack_slot, 0);
1625 }
1626 }
1627 }
1628
1629 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1630 in a call instruction.
1631
1632 FNDECL is the tree node for the target function. For an indirect call
1633 FNDECL will be NULL_TREE.
1634
1635 ADDR is the operand 0 of CALL_EXPR for this call. */
1636
1637 static rtx
1638 rtx_for_function_call (tree fndecl, tree addr)
1639 {
1640 rtx funexp;
1641
1642 /* Get the function to call, in the form of RTL. */
1643 if (fndecl)
1644 {
1645 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
1646 TREE_USED (fndecl) = 1;
1647
1648 /* Get a SYMBOL_REF rtx for the function address. */
1649 funexp = XEXP (DECL_RTL (fndecl), 0);
1650 }
1651 else
1652 /* Generate an rtx (probably a pseudo-register) for the address. */
1653 {
1654 push_temp_slots ();
1655 funexp = expand_normal (addr);
1656 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1657 }
1658 return funexp;
1659 }
1660
1661 /* Internal state for internal_arg_pointer_based_exp and its helpers. */
1662 static struct
1663 {
1664 /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
1665 or NULL_RTX if none has been scanned yet. */
1666 rtx scan_start;
1667 /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
1668 based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
1669 pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
1670 with fixed offset, or PC if this is with variable or unknown offset. */
1671 VEC(rtx, heap) *cache;
1672 } internal_arg_pointer_exp_state;
1673
1674 static rtx internal_arg_pointer_based_exp (rtx, bool);
1675
1676 /* Helper function for internal_arg_pointer_based_exp. Scan insns in
1677 the tail call sequence, starting with first insn that hasn't been
1678 scanned yet, and note for each pseudo on the LHS whether it is based
1679 on crtl->args.internal_arg_pointer or not, and what offset from that
1680 that pointer it has. */
1681
1682 static void
1683 internal_arg_pointer_based_exp_scan (void)
1684 {
1685 rtx insn, scan_start = internal_arg_pointer_exp_state.scan_start;
1686
1687 if (scan_start == NULL_RTX)
1688 insn = get_insns ();
1689 else
1690 insn = NEXT_INSN (scan_start);
1691
1692 while (insn)
1693 {
1694 rtx set = single_set (insn);
1695 if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
1696 {
1697 rtx val = NULL_RTX;
1698 unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
1699 /* Punt on pseudos set multiple times. */
1700 if (idx < VEC_length (rtx, internal_arg_pointer_exp_state.cache)
1701 && (VEC_index (rtx, internal_arg_pointer_exp_state.cache, idx)
1702 != NULL_RTX))
1703 val = pc_rtx;
1704 else
1705 val = internal_arg_pointer_based_exp (SET_SRC (set), false);
1706 if (val != NULL_RTX)
1707 {
1708 if (idx
1709 >= VEC_length (rtx, internal_arg_pointer_exp_state.cache))
1710 VEC_safe_grow_cleared (rtx, heap,
1711 internal_arg_pointer_exp_state.cache,
1712 idx + 1);
1713 VEC_replace (rtx, internal_arg_pointer_exp_state.cache,
1714 idx, val);
1715 }
1716 }
1717 if (NEXT_INSN (insn) == NULL_RTX)
1718 scan_start = insn;
1719 insn = NEXT_INSN (insn);
1720 }
1721
1722 internal_arg_pointer_exp_state.scan_start = scan_start;
1723 }
1724
1725 /* Helper function for internal_arg_pointer_based_exp, called through
1726 for_each_rtx. Return 1 if *LOC is a register based on
1727 crtl->args.internal_arg_pointer. Return -1 if *LOC is not based on it
1728 and the subexpressions need not be examined. Otherwise return 0. */
1729
1730 static int
1731 internal_arg_pointer_based_exp_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
1732 {
1733 if (REG_P (*loc) && internal_arg_pointer_based_exp (*loc, false) != NULL_RTX)
1734 return 1;
1735 if (MEM_P (*loc))
1736 return -1;
1737 return 0;
1738 }
1739
1740 /* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
1741 NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
1742 it with fixed offset, or PC if this is with variable or unknown offset.
1743 TOPLEVEL is true if the function is invoked at the topmost level. */
1744
1745 static rtx
1746 internal_arg_pointer_based_exp (rtx rtl, bool toplevel)
1747 {
1748 if (CONSTANT_P (rtl))
1749 return NULL_RTX;
1750
1751 if (rtl == crtl->args.internal_arg_pointer)
1752 return const0_rtx;
1753
1754 if (REG_P (rtl) && HARD_REGISTER_P (rtl))
1755 return NULL_RTX;
1756
1757 if (GET_CODE (rtl) == PLUS && CONST_INT_P (XEXP (rtl, 1)))
1758 {
1759 rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
1760 if (val == NULL_RTX || val == pc_rtx)
1761 return val;
1762 return plus_constant (val, INTVAL (XEXP (rtl, 1)));
1763 }
1764
1765 /* When called at the topmost level, scan pseudo assignments in between the
1766 last scanned instruction in the tail call sequence and the latest insn
1767 in that sequence. */
1768 if (toplevel)
1769 internal_arg_pointer_based_exp_scan ();
1770
1771 if (REG_P (rtl))
1772 {
1773 unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
1774 if (idx < VEC_length (rtx, internal_arg_pointer_exp_state.cache))
1775 return VEC_index (rtx, internal_arg_pointer_exp_state.cache, idx);
1776
1777 return NULL_RTX;
1778 }
1779
1780 if (for_each_rtx (&rtl, internal_arg_pointer_based_exp_1, NULL))
1781 return pc_rtx;
1782
1783 return NULL_RTX;
1784 }
1785
1786 /* Return true if and only if SIZE storage units (usually bytes)
1787 starting from address ADDR overlap with already clobbered argument
1788 area. This function is used to determine if we should give up a
1789 sibcall. */
1790
1791 static bool
1792 mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
1793 {
1794 HOST_WIDE_INT i;
1795 rtx val;
1796
1797 if (sbitmap_empty_p (stored_args_map))
1798 return false;
1799 val = internal_arg_pointer_based_exp (addr, true);
1800 if (val == NULL_RTX)
1801 return false;
1802 else if (val == pc_rtx)
1803 return true;
1804 else
1805 i = INTVAL (val);
1806 #ifdef STACK_GROWS_DOWNWARD
1807 i -= crtl->args.pretend_args_size;
1808 #else
1809 i += crtl->args.pretend_args_size;
1810 #endif
1811
1812 #ifdef ARGS_GROW_DOWNWARD
1813 i = -i - size;
1814 #endif
1815 if (size > 0)
1816 {
1817 unsigned HOST_WIDE_INT k;
1818
1819 for (k = 0; k < size; k++)
1820 if (i + k < stored_args_map->n_bits
1821 && TEST_BIT (stored_args_map, i + k))
1822 return true;
1823 }
1824
1825 return false;
1826 }
1827
1828 /* Do the register loads required for any wholly-register parms or any
1829 parms which are passed both on the stack and in a register. Their
1830 expressions were already evaluated.
1831
1832 Mark all register-parms as living through the call, putting these USE
1833 insns in the CALL_INSN_FUNCTION_USAGE field.
1834
1835 When IS_SIBCALL, perform the check_sibcall_argument_overlap
1836 checking, setting *SIBCALL_FAILURE if appropriate. */
1837
1838 static void
1839 load_register_parameters (struct arg_data *args, int num_actuals,
1840 rtx *call_fusage, int flags, int is_sibcall,
1841 int *sibcall_failure)
1842 {
1843 int i, j;
1844
1845 for (i = 0; i < num_actuals; i++)
1846 {
1847 rtx reg = ((flags & ECF_SIBCALL)
1848 ? args[i].tail_call_reg : args[i].reg);
1849 if (reg)
1850 {
1851 int partial = args[i].partial;
1852 int nregs;
1853 int size = 0;
1854 rtx before_arg = get_last_insn ();
1855 /* Set non-negative if we must move a word at a time, even if
1856 just one word (e.g, partial == 4 && mode == DFmode). Set
1857 to -1 if we just use a normal move insn. This value can be
1858 zero if the argument is a zero size structure. */
1859 nregs = -1;
1860 if (GET_CODE (reg) == PARALLEL)
1861 ;
1862 else if (partial)
1863 {
1864 gcc_assert (partial % UNITS_PER_WORD == 0);
1865 nregs = partial / UNITS_PER_WORD;
1866 }
1867 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1868 {
1869 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1870 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1871 }
1872 else
1873 size = GET_MODE_SIZE (args[i].mode);
1874
1875 /* Handle calls that pass values in multiple non-contiguous
1876 locations. The Irix 6 ABI has examples of this. */
1877
1878 if (GET_CODE (reg) == PARALLEL)
1879 emit_group_move (reg, args[i].parallel_value);
1880
1881 /* If simple case, just do move. If normal partial, store_one_arg
1882 has already loaded the register for us. In all other cases,
1883 load the register(s) from memory. */
1884
1885 else if (nregs == -1)
1886 {
1887 emit_move_insn (reg, args[i].value);
1888 #ifdef BLOCK_REG_PADDING
1889 /* Handle case where we have a value that needs shifting
1890 up to the msb. eg. a QImode value and we're padding
1891 upward on a BYTES_BIG_ENDIAN machine. */
1892 if (size < UNITS_PER_WORD
1893 && (args[i].locate.where_pad
1894 == (BYTES_BIG_ENDIAN ? upward : downward)))
1895 {
1896 rtx x;
1897 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1898
1899 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1900 report the whole reg as used. Strictly speaking, the
1901 call only uses SIZE bytes at the msb end, but it doesn't
1902 seem worth generating rtl to say that. */
1903 reg = gen_rtx_REG (word_mode, REGNO (reg));
1904 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
1905 if (x != reg)
1906 emit_move_insn (reg, x);
1907 }
1908 #endif
1909 }
1910
1911 /* If we have pre-computed the values to put in the registers in
1912 the case of non-aligned structures, copy them in now. */
1913
1914 else if (args[i].n_aligned_regs != 0)
1915 for (j = 0; j < args[i].n_aligned_regs; j++)
1916 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1917 args[i].aligned_regs[j]);
1918
1919 else if (partial == 0 || args[i].pass_on_stack)
1920 {
1921 rtx mem = validize_mem (args[i].value);
1922
1923 /* Check for overlap with already clobbered argument area,
1924 providing that this has non-zero size. */
1925 if (is_sibcall
1926 && (size == 0
1927 || mem_overlaps_already_clobbered_arg_p
1928 (XEXP (args[i].value, 0), size)))
1929 *sibcall_failure = 1;
1930
1931 /* Handle a BLKmode that needs shifting. */
1932 if (nregs == 1 && size < UNITS_PER_WORD
1933 #ifdef BLOCK_REG_PADDING
1934 && args[i].locate.where_pad == downward
1935 #else
1936 && BYTES_BIG_ENDIAN
1937 #endif
1938 )
1939 {
1940 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1941 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1942 rtx x = gen_reg_rtx (word_mode);
1943 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1944 enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
1945 : LSHIFT_EXPR;
1946
1947 emit_move_insn (x, tem);
1948 x = expand_shift (dir, word_mode, x, shift, ri, 1);
1949 if (x != ri)
1950 emit_move_insn (ri, x);
1951 }
1952 else
1953 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1954 }
1955
1956 /* When a parameter is a block, and perhaps in other cases, it is
1957 possible that it did a load from an argument slot that was
1958 already clobbered. */
1959 if (is_sibcall
1960 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1961 *sibcall_failure = 1;
1962
1963 /* Handle calls that pass values in multiple non-contiguous
1964 locations. The Irix 6 ABI has examples of this. */
1965 if (GET_CODE (reg) == PARALLEL)
1966 use_group_regs (call_fusage, reg);
1967 else if (nregs == -1)
1968 use_reg_mode (call_fusage, reg,
1969 TYPE_MODE (TREE_TYPE (args[i].tree_value)));
1970 else if (nregs > 0)
1971 use_regs (call_fusage, REGNO (reg), nregs);
1972 }
1973 }
1974 }
1975
1976 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1977 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1978 bytes, then we would need to push some additional bytes to pad the
1979 arguments. So, we compute an adjust to the stack pointer for an
1980 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1981 bytes. Then, when the arguments are pushed the stack will be perfectly
1982 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1983 be popped after the call. Returns the adjustment. */
1984
1985 static int
1986 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1987 struct args_size *args_size,
1988 unsigned int preferred_unit_stack_boundary)
1989 {
1990 /* The number of bytes to pop so that the stack will be
1991 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1992 HOST_WIDE_INT adjustment;
1993 /* The alignment of the stack after the arguments are pushed, if we
1994 just pushed the arguments without adjust the stack here. */
1995 unsigned HOST_WIDE_INT unadjusted_alignment;
1996
1997 unadjusted_alignment
1998 = ((stack_pointer_delta + unadjusted_args_size)
1999 % preferred_unit_stack_boundary);
2000
2001 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
2002 as possible -- leaving just enough left to cancel out the
2003 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
2004 PENDING_STACK_ADJUST is non-negative, and congruent to
2005 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
2006
2007 /* Begin by trying to pop all the bytes. */
2008 unadjusted_alignment
2009 = (unadjusted_alignment
2010 - (pending_stack_adjust % preferred_unit_stack_boundary));
2011 adjustment = pending_stack_adjust;
2012 /* Push enough additional bytes that the stack will be aligned
2013 after the arguments are pushed. */
2014 if (preferred_unit_stack_boundary > 1)
2015 {
2016 if (unadjusted_alignment > 0)
2017 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
2018 else
2019 adjustment += unadjusted_alignment;
2020 }
2021
2022 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
2023 bytes after the call. The right number is the entire
2024 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
2025 by the arguments in the first place. */
2026 args_size->constant
2027 = pending_stack_adjust - adjustment + unadjusted_args_size;
2028
2029 return adjustment;
2030 }
2031
2032 /* Scan X expression if it does not dereference any argument slots
2033 we already clobbered by tail call arguments (as noted in stored_args_map
2034 bitmap).
2035 Return nonzero if X expression dereferences such argument slots,
2036 zero otherwise. */
2037
2038 static int
2039 check_sibcall_argument_overlap_1 (rtx x)
2040 {
2041 RTX_CODE code;
2042 int i, j;
2043 const char *fmt;
2044
2045 if (x == NULL_RTX)
2046 return 0;
2047
2048 code = GET_CODE (x);
2049
2050 /* We need not check the operands of the CALL expression itself. */
2051 if (code == CALL)
2052 return 0;
2053
2054 if (code == MEM)
2055 return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
2056 GET_MODE_SIZE (GET_MODE (x)));
2057
2058 /* Scan all subexpressions. */
2059 fmt = GET_RTX_FORMAT (code);
2060 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2061 {
2062 if (*fmt == 'e')
2063 {
2064 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
2065 return 1;
2066 }
2067 else if (*fmt == 'E')
2068 {
2069 for (j = 0; j < XVECLEN (x, i); j++)
2070 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
2071 return 1;
2072 }
2073 }
2074 return 0;
2075 }
2076
2077 /* Scan sequence after INSN if it does not dereference any argument slots
2078 we already clobbered by tail call arguments (as noted in stored_args_map
2079 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
2080 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
2081 should be 0). Return nonzero if sequence after INSN dereferences such argument
2082 slots, zero otherwise. */
2083
2084 static int
2085 check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
2086 {
2087 int low, high;
2088
2089 if (insn == NULL_RTX)
2090 insn = get_insns ();
2091 else
2092 insn = NEXT_INSN (insn);
2093
2094 for (; insn; insn = NEXT_INSN (insn))
2095 if (INSN_P (insn)
2096 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
2097 break;
2098
2099 if (mark_stored_args_map)
2100 {
2101 #ifdef ARGS_GROW_DOWNWARD
2102 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
2103 #else
2104 low = arg->locate.slot_offset.constant;
2105 #endif
2106
2107 for (high = low + arg->locate.size.constant; low < high; low++)
2108 SET_BIT (stored_args_map, low);
2109 }
2110 return insn != NULL_RTX;
2111 }
2112
2113 /* Given that a function returns a value of mode MODE at the most
2114 significant end of hard register VALUE, shift VALUE left or right
2115 as specified by LEFT_P. Return true if some action was needed. */
2116
2117 bool
2118 shift_return_value (enum machine_mode mode, bool left_p, rtx value)
2119 {
2120 HOST_WIDE_INT shift;
2121
2122 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
2123 shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
2124 if (shift == 0)
2125 return false;
2126
2127 /* Use ashr rather than lshr for right shifts. This is for the benefit
2128 of the MIPS port, which requires SImode values to be sign-extended
2129 when stored in 64-bit registers. */
2130 if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
2131 value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
2132 gcc_unreachable ();
2133 return true;
2134 }
2135
2136 /* If X is a likely-spilled register value, copy it to a pseudo
2137 register and return that register. Return X otherwise. */
2138
2139 static rtx
2140 avoid_likely_spilled_reg (rtx x)
2141 {
2142 rtx new_rtx;
2143
2144 if (REG_P (x)
2145 && HARD_REGISTER_P (x)
2146 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
2147 {
2148 /* Make sure that we generate a REG rather than a CONCAT.
2149 Moves into CONCATs can need nontrivial instructions,
2150 and the whole point of this function is to avoid
2151 using the hard register directly in such a situation. */
2152 generating_concat_p = 0;
2153 new_rtx = gen_reg_rtx (GET_MODE (x));
2154 generating_concat_p = 1;
2155 emit_move_insn (new_rtx, x);
2156 return new_rtx;
2157 }
2158 return x;
2159 }
2160
2161 /* Generate all the code for a CALL_EXPR exp
2162 and return an rtx for its value.
2163 Store the value in TARGET (specified as an rtx) if convenient.
2164 If the value is stored in TARGET then TARGET is returned.
2165 If IGNORE is nonzero, then we ignore the value of the function call. */
2166
2167 rtx
2168 expand_call (tree exp, rtx target, int ignore)
2169 {
2170 /* Nonzero if we are currently expanding a call. */
2171 static int currently_expanding_call = 0;
2172
2173 /* RTX for the function to be called. */
2174 rtx funexp;
2175 /* Sequence of insns to perform a normal "call". */
2176 rtx normal_call_insns = NULL_RTX;
2177 /* Sequence of insns to perform a tail "call". */
2178 rtx tail_call_insns = NULL_RTX;
2179 /* Data type of the function. */
2180 tree funtype;
2181 tree type_arg_types;
2182 tree rettype;
2183 /* Declaration of the function being called,
2184 or 0 if the function is computed (not known by name). */
2185 tree fndecl = 0;
2186 /* The type of the function being called. */
2187 tree fntype;
2188 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
2189 int pass;
2190
2191 /* Register in which non-BLKmode value will be returned,
2192 or 0 if no value or if value is BLKmode. */
2193 rtx valreg;
2194 /* Address where we should return a BLKmode value;
2195 0 if value not BLKmode. */
2196 rtx structure_value_addr = 0;
2197 /* Nonzero if that address is being passed by treating it as
2198 an extra, implicit first parameter. Otherwise,
2199 it is passed by being copied directly into struct_value_rtx. */
2200 int structure_value_addr_parm = 0;
2201 /* Holds the value of implicit argument for the struct value. */
2202 tree structure_value_addr_value = NULL_TREE;
2203 /* Size of aggregate value wanted, or zero if none wanted
2204 or if we are using the non-reentrant PCC calling convention
2205 or expecting the value in registers. */
2206 HOST_WIDE_INT struct_value_size = 0;
2207 /* Nonzero if called function returns an aggregate in memory PCC style,
2208 by returning the address of where to find it. */
2209 int pcc_struct_value = 0;
2210 rtx struct_value = 0;
2211
2212 /* Number of actual parameters in this call, including struct value addr. */
2213 int num_actuals;
2214 /* Number of named args. Args after this are anonymous ones
2215 and they must all go on the stack. */
2216 int n_named_args;
2217 /* Number of complex actual arguments that need to be split. */
2218 int num_complex_actuals = 0;
2219
2220 /* Vector of information about each argument.
2221 Arguments are numbered in the order they will be pushed,
2222 not the order they are written. */
2223 struct arg_data *args;
2224
2225 /* Total size in bytes of all the stack-parms scanned so far. */
2226 struct args_size args_size;
2227 struct args_size adjusted_args_size;
2228 /* Size of arguments before any adjustments (such as rounding). */
2229 int unadjusted_args_size;
2230 /* Data on reg parms scanned so far. */
2231 CUMULATIVE_ARGS args_so_far_v;
2232 cumulative_args_t args_so_far;
2233 /* Nonzero if a reg parm has been scanned. */
2234 int reg_parm_seen;
2235 /* Nonzero if this is an indirect function call. */
2236
2237 /* Nonzero if we must avoid push-insns in the args for this call.
2238 If stack space is allocated for register parameters, but not by the
2239 caller, then it is preallocated in the fixed part of the stack frame.
2240 So the entire argument block must then be preallocated (i.e., we
2241 ignore PUSH_ROUNDING in that case). */
2242
2243 int must_preallocate = !PUSH_ARGS;
2244
2245 /* Size of the stack reserved for parameter registers. */
2246 int reg_parm_stack_space = 0;
2247
2248 /* Address of space preallocated for stack parms
2249 (on machines that lack push insns), or 0 if space not preallocated. */
2250 rtx argblock = 0;
2251
2252 /* Mask of ECF_ flags. */
2253 int flags = 0;
2254 #ifdef REG_PARM_STACK_SPACE
2255 /* Define the boundary of the register parm stack space that needs to be
2256 saved, if any. */
2257 int low_to_save, high_to_save;
2258 rtx save_area = 0; /* Place that it is saved */
2259 #endif
2260
2261 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2262 char *initial_stack_usage_map = stack_usage_map;
2263 char *stack_usage_map_buf = NULL;
2264
2265 int old_stack_allocated;
2266
2267 /* State variables to track stack modifications. */
2268 rtx old_stack_level = 0;
2269 int old_stack_arg_under_construction = 0;
2270 int old_pending_adj = 0;
2271 int old_inhibit_defer_pop = inhibit_defer_pop;
2272
2273 /* Some stack pointer alterations we make are performed via
2274 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
2275 which we then also need to save/restore along the way. */
2276 int old_stack_pointer_delta = 0;
2277
2278 rtx call_fusage;
2279 tree addr = CALL_EXPR_FN (exp);
2280 int i;
2281 /* The alignment of the stack, in bits. */
2282 unsigned HOST_WIDE_INT preferred_stack_boundary;
2283 /* The alignment of the stack, in bytes. */
2284 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
2285 /* The static chain value to use for this call. */
2286 rtx static_chain_value;
2287 /* See if this is "nothrow" function call. */
2288 if (TREE_NOTHROW (exp))
2289 flags |= ECF_NOTHROW;
2290
2291 /* See if we can find a DECL-node for the actual function, and get the
2292 function attributes (flags) from the function decl or type node. */
2293 fndecl = get_callee_fndecl (exp);
2294 if (fndecl)
2295 {
2296 fntype = TREE_TYPE (fndecl);
2297 flags |= flags_from_decl_or_type (fndecl);
2298 }
2299 else
2300 {
2301 fntype = TREE_TYPE (TREE_TYPE (addr));
2302 flags |= flags_from_decl_or_type (fntype);
2303 }
2304 rettype = TREE_TYPE (exp);
2305
2306 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
2307
2308 /* Warn if this value is an aggregate type,
2309 regardless of which calling convention we are using for it. */
2310 if (AGGREGATE_TYPE_P (rettype))
2311 warning (OPT_Waggregate_return, "function call has aggregate value");
2312
2313 /* If the result of a non looping pure or const function call is
2314 ignored (or void), and none of its arguments are volatile, we can
2315 avoid expanding the call and just evaluate the arguments for
2316 side-effects. */
2317 if ((flags & (ECF_CONST | ECF_PURE))
2318 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
2319 && (ignore || target == const0_rtx
2320 || TYPE_MODE (rettype) == VOIDmode))
2321 {
2322 bool volatilep = false;
2323 tree arg;
2324 call_expr_arg_iterator iter;
2325
2326 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2327 if (TREE_THIS_VOLATILE (arg))
2328 {
2329 volatilep = true;
2330 break;
2331 }
2332
2333 if (! volatilep)
2334 {
2335 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2336 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
2337 return const0_rtx;
2338 }
2339 }
2340
2341 #ifdef REG_PARM_STACK_SPACE
2342 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
2343 #endif
2344
2345 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
2346 && reg_parm_stack_space > 0 && PUSH_ARGS)
2347 must_preallocate = 1;
2348
2349 /* Set up a place to return a structure. */
2350
2351 /* Cater to broken compilers. */
2352 if (aggregate_value_p (exp, fntype))
2353 {
2354 /* This call returns a big structure. */
2355 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
2356
2357 #ifdef PCC_STATIC_STRUCT_RETURN
2358 {
2359 pcc_struct_value = 1;
2360 }
2361 #else /* not PCC_STATIC_STRUCT_RETURN */
2362 {
2363 struct_value_size = int_size_in_bytes (rettype);
2364
2365 if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp))
2366 structure_value_addr = XEXP (target, 0);
2367 else
2368 {
2369 /* For variable-sized objects, we must be called with a target
2370 specified. If we were to allocate space on the stack here,
2371 we would have no way of knowing when to free it. */
2372 rtx d = assign_temp (rettype, 0, 1, 1);
2373
2374 mark_temp_addr_taken (d);
2375 structure_value_addr = XEXP (d, 0);
2376 target = 0;
2377 }
2378 }
2379 #endif /* not PCC_STATIC_STRUCT_RETURN */
2380 }
2381
2382 /* Figure out the amount to which the stack should be aligned. */
2383 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2384 if (fndecl)
2385 {
2386 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2387 /* Without automatic stack alignment, we can't increase preferred
2388 stack boundary. With automatic stack alignment, it is
2389 unnecessary since unless we can guarantee that all callers will
2390 align the outgoing stack properly, callee has to align its
2391 stack anyway. */
2392 if (i
2393 && i->preferred_incoming_stack_boundary
2394 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
2395 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2396 }
2397
2398 /* Operand 0 is a pointer-to-function; get the type of the function. */
2399 funtype = TREE_TYPE (addr);
2400 gcc_assert (POINTER_TYPE_P (funtype));
2401 funtype = TREE_TYPE (funtype);
2402
2403 /* Count whether there are actual complex arguments that need to be split
2404 into their real and imaginary parts. Munge the type_arg_types
2405 appropriately here as well. */
2406 if (targetm.calls.split_complex_arg)
2407 {
2408 call_expr_arg_iterator iter;
2409 tree arg;
2410 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2411 {
2412 tree type = TREE_TYPE (arg);
2413 if (type && TREE_CODE (type) == COMPLEX_TYPE
2414 && targetm.calls.split_complex_arg (type))
2415 num_complex_actuals++;
2416 }
2417 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2418 }
2419 else
2420 type_arg_types = TYPE_ARG_TYPES (funtype);
2421
2422 if (flags & ECF_MAY_BE_ALLOCA)
2423 cfun->calls_alloca = 1;
2424
2425 /* If struct_value_rtx is 0, it means pass the address
2426 as if it were an extra parameter. Put the argument expression
2427 in structure_value_addr_value. */
2428 if (structure_value_addr && struct_value == 0)
2429 {
2430 /* If structure_value_addr is a REG other than
2431 virtual_outgoing_args_rtx, we can use always use it. If it
2432 is not a REG, we must always copy it into a register.
2433 If it is virtual_outgoing_args_rtx, we must copy it to another
2434 register in some cases. */
2435 rtx temp = (!REG_P (structure_value_addr)
2436 || (ACCUMULATE_OUTGOING_ARGS
2437 && stack_arg_under_construction
2438 && structure_value_addr == virtual_outgoing_args_rtx)
2439 ? copy_addr_to_reg (convert_memory_address
2440 (Pmode, structure_value_addr))
2441 : structure_value_addr);
2442
2443 structure_value_addr_value =
2444 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
2445 structure_value_addr_parm = 1;
2446 }
2447
2448 /* Count the arguments and set NUM_ACTUALS. */
2449 num_actuals =
2450 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
2451
2452 /* Compute number of named args.
2453 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2454
2455 if (type_arg_types != 0)
2456 n_named_args
2457 = (list_length (type_arg_types)
2458 /* Count the struct value address, if it is passed as a parm. */
2459 + structure_value_addr_parm);
2460 else
2461 /* If we know nothing, treat all args as named. */
2462 n_named_args = num_actuals;
2463
2464 /* Start updating where the next arg would go.
2465
2466 On some machines (such as the PA) indirect calls have a different
2467 calling convention than normal calls. The fourth argument in
2468 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2469 or not. */
2470 INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
2471 args_so_far = pack_cumulative_args (&args_so_far_v);
2472
2473 /* Now possibly adjust the number of named args.
2474 Normally, don't include the last named arg if anonymous args follow.
2475 We do include the last named arg if
2476 targetm.calls.strict_argument_naming() returns nonzero.
2477 (If no anonymous args follow, the result of list_length is actually
2478 one too large. This is harmless.)
2479
2480 If targetm.calls.pretend_outgoing_varargs_named() returns
2481 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2482 this machine will be able to place unnamed args that were passed
2483 in registers into the stack. So treat all args as named. This
2484 allows the insns emitting for a specific argument list to be
2485 independent of the function declaration.
2486
2487 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2488 we do not have any reliable way to pass unnamed args in
2489 registers, so we must force them into memory. */
2490
2491 if (type_arg_types != 0
2492 && targetm.calls.strict_argument_naming (args_so_far))
2493 ;
2494 else if (type_arg_types != 0
2495 && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
2496 /* Don't include the last named arg. */
2497 --n_named_args;
2498 else
2499 /* Treat all args as named. */
2500 n_named_args = num_actuals;
2501
2502 /* Make a vector to hold all the information about each arg. */
2503 args = XALLOCAVEC (struct arg_data, num_actuals);
2504 memset (args, 0, num_actuals * sizeof (struct arg_data));
2505
2506 /* Build up entries in the ARGS array, compute the size of the
2507 arguments into ARGS_SIZE, etc. */
2508 initialize_argument_information (num_actuals, args, &args_size,
2509 n_named_args, exp,
2510 structure_value_addr_value, fndecl, fntype,
2511 args_so_far, reg_parm_stack_space,
2512 &old_stack_level, &old_pending_adj,
2513 &must_preallocate, &flags,
2514 &try_tail_call, CALL_FROM_THUNK_P (exp));
2515
2516 if (args_size.var)
2517 must_preallocate = 1;
2518
2519 /* Now make final decision about preallocating stack space. */
2520 must_preallocate = finalize_must_preallocate (must_preallocate,
2521 num_actuals, args,
2522 &args_size);
2523
2524 /* If the structure value address will reference the stack pointer, we
2525 must stabilize it. We don't need to do this if we know that we are
2526 not going to adjust the stack pointer in processing this call. */
2527
2528 if (structure_value_addr
2529 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2530 || reg_mentioned_p (virtual_outgoing_args_rtx,
2531 structure_value_addr))
2532 && (args_size.var
2533 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2534 structure_value_addr = copy_to_reg (structure_value_addr);
2535
2536 /* Tail calls can make things harder to debug, and we've traditionally
2537 pushed these optimizations into -O2. Don't try if we're already
2538 expanding a call, as that means we're an argument. Don't try if
2539 there's cleanups, as we know there's code to follow the call. */
2540
2541 if (currently_expanding_call++ != 0
2542 || !flag_optimize_sibling_calls
2543 || args_size.var
2544 || dbg_cnt (tail_call) == false)
2545 try_tail_call = 0;
2546
2547 /* Rest of purposes for tail call optimizations to fail. */
2548 if (
2549 #ifdef HAVE_sibcall_epilogue
2550 !HAVE_sibcall_epilogue
2551 #else
2552 1
2553 #endif
2554 || !try_tail_call
2555 /* Doing sibling call optimization needs some work, since
2556 structure_value_addr can be allocated on the stack.
2557 It does not seem worth the effort since few optimizable
2558 sibling calls will return a structure. */
2559 || structure_value_addr != NULL_RTX
2560 #ifdef REG_PARM_STACK_SPACE
2561 /* If outgoing reg parm stack space changes, we can not do sibcall. */
2562 || (OUTGOING_REG_PARM_STACK_SPACE (funtype)
2563 != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl)))
2564 || (reg_parm_stack_space != REG_PARM_STACK_SPACE (fndecl))
2565 #endif
2566 /* Check whether the target is able to optimize the call
2567 into a sibcall. */
2568 || !targetm.function_ok_for_sibcall (fndecl, exp)
2569 /* Functions that do not return exactly once may not be sibcall
2570 optimized. */
2571 || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
2572 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2573 /* If the called function is nested in the current one, it might access
2574 some of the caller's arguments, but could clobber them beforehand if
2575 the argument areas are shared. */
2576 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2577 /* If this function requires more stack slots than the current
2578 function, we cannot change it into a sibling call.
2579 crtl->args.pretend_args_size is not part of the
2580 stack allocated by our caller. */
2581 || args_size.constant > (crtl->args.size
2582 - crtl->args.pretend_args_size)
2583 /* If the callee pops its own arguments, then it must pop exactly
2584 the same number of arguments as the current function. */
2585 || (targetm.calls.return_pops_args (fndecl, funtype, args_size.constant)
2586 != targetm.calls.return_pops_args (current_function_decl,
2587 TREE_TYPE (current_function_decl),
2588 crtl->args.size))
2589 || !lang_hooks.decls.ok_for_sibcall (fndecl))
2590 try_tail_call = 0;
2591
2592 /* Check if caller and callee disagree in promotion of function
2593 return value. */
2594 if (try_tail_call)
2595 {
2596 enum machine_mode caller_mode, caller_promoted_mode;
2597 enum machine_mode callee_mode, callee_promoted_mode;
2598 int caller_unsignedp, callee_unsignedp;
2599 tree caller_res = DECL_RESULT (current_function_decl);
2600
2601 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
2602 caller_mode = DECL_MODE (caller_res);
2603 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
2604 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
2605 caller_promoted_mode
2606 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
2607 &caller_unsignedp,
2608 TREE_TYPE (current_function_decl), 1);
2609 callee_promoted_mode
2610 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
2611 &callee_unsignedp,
2612 funtype, 1);
2613 if (caller_mode != VOIDmode
2614 && (caller_promoted_mode != callee_promoted_mode
2615 || ((caller_mode != caller_promoted_mode
2616 || callee_mode != callee_promoted_mode)
2617 && (caller_unsignedp != callee_unsignedp
2618 || GET_MODE_BITSIZE (caller_mode)
2619 < GET_MODE_BITSIZE (callee_mode)))))
2620 try_tail_call = 0;
2621 }
2622
2623 /* Ensure current function's preferred stack boundary is at least
2624 what we need. Stack alignment may also increase preferred stack
2625 boundary. */
2626 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
2627 crtl->preferred_stack_boundary = preferred_stack_boundary;
2628 else
2629 preferred_stack_boundary = crtl->preferred_stack_boundary;
2630
2631 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2632
2633 /* We want to make two insn chains; one for a sibling call, the other
2634 for a normal call. We will select one of the two chains after
2635 initial RTL generation is complete. */
2636 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2637 {
2638 int sibcall_failure = 0;
2639 /* We want to emit any pending stack adjustments before the tail
2640 recursion "call". That way we know any adjustment after the tail
2641 recursion call can be ignored if we indeed use the tail
2642 call expansion. */
2643 int save_pending_stack_adjust = 0;
2644 int save_stack_pointer_delta = 0;
2645 rtx insns;
2646 rtx before_call, next_arg_reg, after_args;
2647
2648 if (pass == 0)
2649 {
2650 /* State variables we need to save and restore between
2651 iterations. */
2652 save_pending_stack_adjust = pending_stack_adjust;
2653 save_stack_pointer_delta = stack_pointer_delta;
2654 }
2655 if (pass)
2656 flags &= ~ECF_SIBCALL;
2657 else
2658 flags |= ECF_SIBCALL;
2659
2660 /* Other state variables that we must reinitialize each time
2661 through the loop (that are not initialized by the loop itself). */
2662 argblock = 0;
2663 call_fusage = 0;
2664
2665 /* Start a new sequence for the normal call case.
2666
2667 From this point on, if the sibling call fails, we want to set
2668 sibcall_failure instead of continuing the loop. */
2669 start_sequence ();
2670
2671 /* Don't let pending stack adjusts add up to too much.
2672 Also, do all pending adjustments now if there is any chance
2673 this might be a call to alloca or if we are expanding a sibling
2674 call sequence.
2675 Also do the adjustments before a throwing call, otherwise
2676 exception handling can fail; PR 19225. */
2677 if (pending_stack_adjust >= 32
2678 || (pending_stack_adjust > 0
2679 && (flags & ECF_MAY_BE_ALLOCA))
2680 || (pending_stack_adjust > 0
2681 && flag_exceptions && !(flags & ECF_NOTHROW))
2682 || pass == 0)
2683 do_pending_stack_adjust ();
2684
2685 /* Precompute any arguments as needed. */
2686 if (pass)
2687 precompute_arguments (num_actuals, args);
2688
2689 /* Now we are about to start emitting insns that can be deleted
2690 if a libcall is deleted. */
2691 if (pass && (flags & ECF_MALLOC))
2692 start_sequence ();
2693
2694 if (pass == 0 && crtl->stack_protect_guard)
2695 stack_protect_epilogue ();
2696
2697 adjusted_args_size = args_size;
2698 /* Compute the actual size of the argument block required. The variable
2699 and constant sizes must be combined, the size may have to be rounded,
2700 and there may be a minimum required size. When generating a sibcall
2701 pattern, do not round up, since we'll be re-using whatever space our
2702 caller provided. */
2703 unadjusted_args_size
2704 = compute_argument_block_size (reg_parm_stack_space,
2705 &adjusted_args_size,
2706 fndecl, fntype,
2707 (pass == 0 ? 0
2708 : preferred_stack_boundary));
2709
2710 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2711
2712 /* The argument block when performing a sibling call is the
2713 incoming argument block. */
2714 if (pass == 0)
2715 {
2716 argblock = crtl->args.internal_arg_pointer;
2717 argblock
2718 #ifdef STACK_GROWS_DOWNWARD
2719 = plus_constant (argblock, crtl->args.pretend_args_size);
2720 #else
2721 = plus_constant (argblock, -crtl->args.pretend_args_size);
2722 #endif
2723 stored_args_map = sbitmap_alloc (args_size.constant);
2724 sbitmap_zero (stored_args_map);
2725 }
2726
2727 /* If we have no actual push instructions, or shouldn't use them,
2728 make space for all args right now. */
2729 else if (adjusted_args_size.var != 0)
2730 {
2731 if (old_stack_level == 0)
2732 {
2733 emit_stack_save (SAVE_BLOCK, &old_stack_level);
2734 old_stack_pointer_delta = stack_pointer_delta;
2735 old_pending_adj = pending_stack_adjust;
2736 pending_stack_adjust = 0;
2737 /* stack_arg_under_construction says whether a stack arg is
2738 being constructed at the old stack level. Pushing the stack
2739 gets a clean outgoing argument block. */
2740 old_stack_arg_under_construction = stack_arg_under_construction;
2741 stack_arg_under_construction = 0;
2742 }
2743 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2744 if (flag_stack_usage_info)
2745 current_function_has_unbounded_dynamic_stack_size = 1;
2746 }
2747 else
2748 {
2749 /* Note that we must go through the motions of allocating an argument
2750 block even if the size is zero because we may be storing args
2751 in the area reserved for register arguments, which may be part of
2752 the stack frame. */
2753
2754 int needed = adjusted_args_size.constant;
2755
2756 /* Store the maximum argument space used. It will be pushed by
2757 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2758 checking). */
2759
2760 if (needed > crtl->outgoing_args_size)
2761 crtl->outgoing_args_size = needed;
2762
2763 if (must_preallocate)
2764 {
2765 if (ACCUMULATE_OUTGOING_ARGS)
2766 {
2767 /* Since the stack pointer will never be pushed, it is
2768 possible for the evaluation of a parm to clobber
2769 something we have already written to the stack.
2770 Since most function calls on RISC machines do not use
2771 the stack, this is uncommon, but must work correctly.
2772
2773 Therefore, we save any area of the stack that was already
2774 written and that we are using. Here we set up to do this
2775 by making a new stack usage map from the old one. The
2776 actual save will be done by store_one_arg.
2777
2778 Another approach might be to try to reorder the argument
2779 evaluations to avoid this conflicting stack usage. */
2780
2781 /* Since we will be writing into the entire argument area,
2782 the map must be allocated for its entire size, not just
2783 the part that is the responsibility of the caller. */
2784 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2785 needed += reg_parm_stack_space;
2786
2787 #ifdef ARGS_GROW_DOWNWARD
2788 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2789 needed + 1);
2790 #else
2791 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2792 needed);
2793 #endif
2794 free (stack_usage_map_buf);
2795 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
2796 stack_usage_map = stack_usage_map_buf;
2797
2798 if (initial_highest_arg_in_use)
2799 memcpy (stack_usage_map, initial_stack_usage_map,
2800 initial_highest_arg_in_use);
2801
2802 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2803 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2804 (highest_outgoing_arg_in_use
2805 - initial_highest_arg_in_use));
2806 needed = 0;
2807
2808 /* The address of the outgoing argument list must not be
2809 copied to a register here, because argblock would be left
2810 pointing to the wrong place after the call to
2811 allocate_dynamic_stack_space below. */
2812
2813 argblock = virtual_outgoing_args_rtx;
2814 }
2815 else
2816 {
2817 if (inhibit_defer_pop == 0)
2818 {
2819 /* Try to reuse some or all of the pending_stack_adjust
2820 to get this space. */
2821 needed
2822 = (combine_pending_stack_adjustment_and_call
2823 (unadjusted_args_size,
2824 &adjusted_args_size,
2825 preferred_unit_stack_boundary));
2826
2827 /* combine_pending_stack_adjustment_and_call computes
2828 an adjustment before the arguments are allocated.
2829 Account for them and see whether or not the stack
2830 needs to go up or down. */
2831 needed = unadjusted_args_size - needed;
2832
2833 if (needed < 0)
2834 {
2835 /* We're releasing stack space. */
2836 /* ??? We can avoid any adjustment at all if we're
2837 already aligned. FIXME. */
2838 pending_stack_adjust = -needed;
2839 do_pending_stack_adjust ();
2840 needed = 0;
2841 }
2842 else
2843 /* We need to allocate space. We'll do that in
2844 push_block below. */
2845 pending_stack_adjust = 0;
2846 }
2847
2848 /* Special case this because overhead of `push_block' in
2849 this case is non-trivial. */
2850 if (needed == 0)
2851 argblock = virtual_outgoing_args_rtx;
2852 else
2853 {
2854 argblock = push_block (GEN_INT (needed), 0, 0);
2855 #ifdef ARGS_GROW_DOWNWARD
2856 argblock = plus_constant (argblock, needed);
2857 #endif
2858 }
2859
2860 /* We only really need to call `copy_to_reg' in the case
2861 where push insns are going to be used to pass ARGBLOCK
2862 to a function call in ARGS. In that case, the stack
2863 pointer changes value from the allocation point to the
2864 call point, and hence the value of
2865 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2866 as well always do it. */
2867 argblock = copy_to_reg (argblock);
2868 }
2869 }
2870 }
2871
2872 if (ACCUMULATE_OUTGOING_ARGS)
2873 {
2874 /* The save/restore code in store_one_arg handles all
2875 cases except one: a constructor call (including a C
2876 function returning a BLKmode struct) to initialize
2877 an argument. */
2878 if (stack_arg_under_construction)
2879 {
2880 rtx push_size
2881 = GEN_INT (adjusted_args_size.constant
2882 + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype
2883 : TREE_TYPE (fndecl))) ? 0
2884 : reg_parm_stack_space));
2885 if (old_stack_level == 0)
2886 {
2887 emit_stack_save (SAVE_BLOCK, &old_stack_level);
2888 old_stack_pointer_delta = stack_pointer_delta;
2889 old_pending_adj = pending_stack_adjust;
2890 pending_stack_adjust = 0;
2891 /* stack_arg_under_construction says whether a stack
2892 arg is being constructed at the old stack level.
2893 Pushing the stack gets a clean outgoing argument
2894 block. */
2895 old_stack_arg_under_construction
2896 = stack_arg_under_construction;
2897 stack_arg_under_construction = 0;
2898 /* Make a new map for the new argument list. */
2899 free (stack_usage_map_buf);
2900 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
2901 stack_usage_map = stack_usage_map_buf;
2902 highest_outgoing_arg_in_use = 0;
2903 }
2904 /* We can pass TRUE as the 4th argument because we just
2905 saved the stack pointer and will restore it right after
2906 the call. */
2907 allocate_dynamic_stack_space (push_size, 0,
2908 BIGGEST_ALIGNMENT, true);
2909 }
2910
2911 /* If argument evaluation might modify the stack pointer,
2912 copy the address of the argument list to a register. */
2913 for (i = 0; i < num_actuals; i++)
2914 if (args[i].pass_on_stack)
2915 {
2916 argblock = copy_addr_to_reg (argblock);
2917 break;
2918 }
2919 }
2920
2921 compute_argument_addresses (args, argblock, num_actuals);
2922
2923 /* If we push args individually in reverse order, perform stack alignment
2924 before the first push (the last arg). */
2925 if (PUSH_ARGS_REVERSED && argblock == 0
2926 && adjusted_args_size.constant != unadjusted_args_size)
2927 {
2928 /* When the stack adjustment is pending, we get better code
2929 by combining the adjustments. */
2930 if (pending_stack_adjust
2931 && ! inhibit_defer_pop)
2932 {
2933 pending_stack_adjust
2934 = (combine_pending_stack_adjustment_and_call
2935 (unadjusted_args_size,
2936 &adjusted_args_size,
2937 preferred_unit_stack_boundary));
2938 do_pending_stack_adjust ();
2939 }
2940 else if (argblock == 0)
2941 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2942 - unadjusted_args_size));
2943 }
2944 /* Now that the stack is properly aligned, pops can't safely
2945 be deferred during the evaluation of the arguments. */
2946 NO_DEFER_POP;
2947
2948 /* Record the maximum pushed stack space size. We need to delay
2949 doing it this far to take into account the optimization done
2950 by combine_pending_stack_adjustment_and_call. */
2951 if (flag_stack_usage_info
2952 && !ACCUMULATE_OUTGOING_ARGS
2953 && pass
2954 && adjusted_args_size.var == 0)
2955 {
2956 int pushed = adjusted_args_size.constant + pending_stack_adjust;
2957 if (pushed > current_function_pushed_stack_size)
2958 current_function_pushed_stack_size = pushed;
2959 }
2960
2961 funexp = rtx_for_function_call (fndecl, addr);
2962
2963 /* Figure out the register where the value, if any, will come back. */
2964 valreg = 0;
2965 if (TYPE_MODE (rettype) != VOIDmode
2966 && ! structure_value_addr)
2967 {
2968 if (pcc_struct_value)
2969 valreg = hard_function_value (build_pointer_type (rettype),
2970 fndecl, NULL, (pass == 0));
2971 else
2972 valreg = hard_function_value (rettype, fndecl, fntype,
2973 (pass == 0));
2974
2975 /* If VALREG is a PARALLEL whose first member has a zero
2976 offset, use that. This is for targets such as m68k that
2977 return the same value in multiple places. */
2978 if (GET_CODE (valreg) == PARALLEL)
2979 {
2980 rtx elem = XVECEXP (valreg, 0, 0);
2981 rtx where = XEXP (elem, 0);
2982 rtx offset = XEXP (elem, 1);
2983 if (offset == const0_rtx
2984 && GET_MODE (where) == GET_MODE (valreg))
2985 valreg = where;
2986 }
2987 }
2988
2989 /* Precompute all register parameters. It isn't safe to compute anything
2990 once we have started filling any specific hard regs. */
2991 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
2992
2993 if (CALL_EXPR_STATIC_CHAIN (exp))
2994 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
2995 else
2996 static_chain_value = 0;
2997
2998 #ifdef REG_PARM_STACK_SPACE
2999 /* Save the fixed argument area if it's part of the caller's frame and
3000 is clobbered by argument setup for this call. */
3001 if (ACCUMULATE_OUTGOING_ARGS && pass)
3002 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3003 &low_to_save, &high_to_save);
3004 #endif
3005
3006 /* Now store (and compute if necessary) all non-register parms.
3007 These come before register parms, since they can require block-moves,
3008 which could clobber the registers used for register parms.
3009 Parms which have partial registers are not stored here,
3010 but we do preallocate space here if they want that. */
3011
3012 for (i = 0; i < num_actuals; i++)
3013 {
3014 if (args[i].reg == 0 || args[i].pass_on_stack)
3015 {
3016 rtx before_arg = get_last_insn ();
3017
3018 if (store_one_arg (&args[i], argblock, flags,
3019 adjusted_args_size.var != 0,
3020 reg_parm_stack_space)
3021 || (pass == 0
3022 && check_sibcall_argument_overlap (before_arg,
3023 &args[i], 1)))
3024 sibcall_failure = 1;
3025 }
3026
3027 if (args[i].stack)
3028 call_fusage
3029 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
3030 gen_rtx_USE (VOIDmode, args[i].stack),
3031 call_fusage);
3032 }
3033
3034 /* If we have a parm that is passed in registers but not in memory
3035 and whose alignment does not permit a direct copy into registers,
3036 make a group of pseudos that correspond to each register that we
3037 will later fill. */
3038 if (STRICT_ALIGNMENT)
3039 store_unaligned_arguments_into_pseudos (args, num_actuals);
3040
3041 /* Now store any partially-in-registers parm.
3042 This is the last place a block-move can happen. */
3043 if (reg_parm_seen)
3044 for (i = 0; i < num_actuals; i++)
3045 if (args[i].partial != 0 && ! args[i].pass_on_stack)
3046 {
3047 rtx before_arg = get_last_insn ();
3048
3049 if (store_one_arg (&args[i], argblock, flags,
3050 adjusted_args_size.var != 0,
3051 reg_parm_stack_space)
3052 || (pass == 0
3053 && check_sibcall_argument_overlap (before_arg,
3054 &args[i], 1)))
3055 sibcall_failure = 1;
3056 }
3057
3058 /* If we pushed args in forward order, perform stack alignment
3059 after pushing the last arg. */
3060 if (!PUSH_ARGS_REVERSED && argblock == 0)
3061 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
3062 - unadjusted_args_size));
3063
3064 /* If register arguments require space on the stack and stack space
3065 was not preallocated, allocate stack space here for arguments
3066 passed in registers. */
3067 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
3068 && !ACCUMULATE_OUTGOING_ARGS
3069 && must_preallocate == 0 && reg_parm_stack_space > 0)
3070 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
3071
3072 /* Pass the function the address in which to return a
3073 structure value. */
3074 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
3075 {
3076 structure_value_addr
3077 = convert_memory_address (Pmode, structure_value_addr);
3078 emit_move_insn (struct_value,
3079 force_reg (Pmode,
3080 force_operand (structure_value_addr,
3081 NULL_RTX)));
3082
3083 if (REG_P (struct_value))
3084 use_reg (&call_fusage, struct_value);
3085 }
3086
3087 after_args = get_last_insn ();
3088 funexp = prepare_call_address (fndecl, funexp, static_chain_value,
3089 &call_fusage, reg_parm_seen, pass == 0);
3090
3091 load_register_parameters (args, num_actuals, &call_fusage, flags,
3092 pass == 0, &sibcall_failure);
3093
3094 /* Save a pointer to the last insn before the call, so that we can
3095 later safely search backwards to find the CALL_INSN. */
3096 before_call = get_last_insn ();
3097
3098 /* Set up next argument register. For sibling calls on machines
3099 with register windows this should be the incoming register. */
3100 if (pass == 0)
3101 next_arg_reg = targetm.calls.function_incoming_arg (args_so_far,
3102 VOIDmode,
3103 void_type_node,
3104 true);
3105 else
3106 next_arg_reg = targetm.calls.function_arg (args_so_far,
3107 VOIDmode, void_type_node,
3108 true);
3109
3110 /* All arguments and registers used for the call must be set up by
3111 now! */
3112
3113 /* Stack must be properly aligned now. */
3114 gcc_assert (!pass
3115 || !(stack_pointer_delta % preferred_unit_stack_boundary));
3116
3117 /* Generate the actual call instruction. */
3118 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
3119 adjusted_args_size.constant, struct_value_size,
3120 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
3121 flags, args_so_far);
3122
3123 /* If the call setup or the call itself overlaps with anything
3124 of the argument setup we probably clobbered our call address.
3125 In that case we can't do sibcalls. */
3126 if (pass == 0
3127 && check_sibcall_argument_overlap (after_args, 0, 0))
3128 sibcall_failure = 1;
3129
3130 /* If a non-BLKmode value is returned at the most significant end
3131 of a register, shift the register right by the appropriate amount
3132 and update VALREG accordingly. BLKmode values are handled by the
3133 group load/store machinery below. */
3134 if (!structure_value_addr
3135 && !pcc_struct_value
3136 && TYPE_MODE (rettype) != BLKmode
3137 && targetm.calls.return_in_msb (rettype))
3138 {
3139 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
3140 sibcall_failure = 1;
3141 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
3142 }
3143
3144 if (pass && (flags & ECF_MALLOC))
3145 {
3146 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3147 rtx last, insns;
3148
3149 /* The return value from a malloc-like function is a pointer. */
3150 if (TREE_CODE (rettype) == POINTER_TYPE)
3151 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
3152
3153 emit_move_insn (temp, valreg);
3154
3155 /* The return value from a malloc-like function can not alias
3156 anything else. */
3157 last = get_last_insn ();
3158 add_reg_note (last, REG_NOALIAS, temp);
3159
3160 /* Write out the sequence. */
3161 insns = get_insns ();
3162 end_sequence ();
3163 emit_insn (insns);
3164 valreg = temp;
3165 }
3166
3167 /* For calls to `setjmp', etc., inform
3168 function.c:setjmp_warnings that it should complain if
3169 nonvolatile values are live. For functions that cannot
3170 return, inform flow that control does not fall through. */
3171
3172 if ((flags & ECF_NORETURN) || pass == 0)
3173 {
3174 /* The barrier must be emitted
3175 immediately after the CALL_INSN. Some ports emit more
3176 than just a CALL_INSN above, so we must search for it here. */
3177
3178 rtx last = get_last_insn ();
3179 while (!CALL_P (last))
3180 {
3181 last = PREV_INSN (last);
3182 /* There was no CALL_INSN? */
3183 gcc_assert (last != before_call);
3184 }
3185
3186 emit_barrier_after (last);
3187
3188 /* Stack adjustments after a noreturn call are dead code.
3189 However when NO_DEFER_POP is in effect, we must preserve
3190 stack_pointer_delta. */
3191 if (inhibit_defer_pop == 0)
3192 {
3193 stack_pointer_delta = old_stack_allocated;
3194 pending_stack_adjust = 0;
3195 }
3196 }
3197
3198 /* If value type not void, return an rtx for the value. */
3199
3200 if (TYPE_MODE (rettype) == VOIDmode
3201 || ignore)
3202 target = const0_rtx;
3203 else if (structure_value_addr)
3204 {
3205 if (target == 0 || !MEM_P (target))
3206 {
3207 target
3208 = gen_rtx_MEM (TYPE_MODE (rettype),
3209 memory_address (TYPE_MODE (rettype),
3210 structure_value_addr));
3211 set_mem_attributes (target, rettype, 1);
3212 }
3213 }
3214 else if (pcc_struct_value)
3215 {
3216 /* This is the special C++ case where we need to
3217 know what the true target was. We take care to
3218 never use this value more than once in one expression. */
3219 target = gen_rtx_MEM (TYPE_MODE (rettype),
3220 copy_to_reg (valreg));
3221 set_mem_attributes (target, rettype, 1);
3222 }
3223 /* Handle calls that return values in multiple non-contiguous locations.
3224 The Irix 6 ABI has examples of this. */
3225 else if (GET_CODE (valreg) == PARALLEL)
3226 {
3227 if (target == 0)
3228 {
3229 /* This will only be assigned once, so it can be readonly. */
3230 tree nt = build_qualified_type (rettype,
3231 (TYPE_QUALS (rettype)
3232 | TYPE_QUAL_CONST));
3233
3234 target = assign_temp (nt, 0, 1, 1);
3235 }
3236
3237 if (! rtx_equal_p (target, valreg))
3238 emit_group_store (target, valreg, rettype,
3239 int_size_in_bytes (rettype));
3240
3241 /* We can not support sibling calls for this case. */
3242 sibcall_failure = 1;
3243 }
3244 else if (target
3245 && GET_MODE (target) == TYPE_MODE (rettype)
3246 && GET_MODE (target) == GET_MODE (valreg))
3247 {
3248 bool may_overlap = false;
3249
3250 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
3251 reg to a plain register. */
3252 if (!REG_P (target) || HARD_REGISTER_P (target))
3253 valreg = avoid_likely_spilled_reg (valreg);
3254
3255 /* If TARGET is a MEM in the argument area, and we have
3256 saved part of the argument area, then we can't store
3257 directly into TARGET as it may get overwritten when we
3258 restore the argument save area below. Don't work too
3259 hard though and simply force TARGET to a register if it
3260 is a MEM; the optimizer is quite likely to sort it out. */
3261 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
3262 for (i = 0; i < num_actuals; i++)
3263 if (args[i].save_area)
3264 {
3265 may_overlap = true;
3266 break;
3267 }
3268
3269 if (may_overlap)
3270 target = copy_to_reg (valreg);
3271 else
3272 {
3273 /* TARGET and VALREG cannot be equal at this point
3274 because the latter would not have
3275 REG_FUNCTION_VALUE_P true, while the former would if
3276 it were referring to the same register.
3277
3278 If they refer to the same register, this move will be
3279 a no-op, except when function inlining is being
3280 done. */
3281 emit_move_insn (target, valreg);
3282
3283 /* If we are setting a MEM, this code must be executed.
3284 Since it is emitted after the call insn, sibcall
3285 optimization cannot be performed in that case. */
3286 if (MEM_P (target))
3287 sibcall_failure = 1;
3288 }
3289 }
3290 else if (TYPE_MODE (rettype) == BLKmode)
3291 {
3292 rtx val = valreg;
3293 if (GET_MODE (val) != BLKmode)
3294 val = avoid_likely_spilled_reg (val);
3295 target = copy_blkmode_from_reg (target, val, rettype);
3296
3297 /* We can not support sibling calls for this case. */
3298 sibcall_failure = 1;
3299 }
3300 else
3301 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
3302
3303 /* If we promoted this return value, make the proper SUBREG.
3304 TARGET might be const0_rtx here, so be careful. */
3305 if (REG_P (target)
3306 && TYPE_MODE (rettype) != BLKmode
3307 && GET_MODE (target) != TYPE_MODE (rettype))
3308 {
3309 tree type = rettype;
3310 int unsignedp = TYPE_UNSIGNED (type);
3311 int offset = 0;
3312 enum machine_mode pmode;
3313
3314 /* Ensure we promote as expected, and get the new unsignedness. */
3315 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
3316 funtype, 1);
3317 gcc_assert (GET_MODE (target) == pmode);
3318
3319 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3320 && (GET_MODE_SIZE (GET_MODE (target))
3321 > GET_MODE_SIZE (TYPE_MODE (type))))
3322 {
3323 offset = GET_MODE_SIZE (GET_MODE (target))
3324 - GET_MODE_SIZE (TYPE_MODE (type));
3325 if (! BYTES_BIG_ENDIAN)
3326 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3327 else if (! WORDS_BIG_ENDIAN)
3328 offset %= UNITS_PER_WORD;
3329 }
3330
3331 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3332 SUBREG_PROMOTED_VAR_P (target) = 1;
3333 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3334 }
3335
3336 /* If size of args is variable or this was a constructor call for a stack
3337 argument, restore saved stack-pointer value. */
3338
3339 if (old_stack_level)
3340 {
3341 rtx prev = get_last_insn ();
3342
3343 emit_stack_restore (SAVE_BLOCK, old_stack_level);
3344 stack_pointer_delta = old_stack_pointer_delta;
3345
3346 fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
3347
3348 pending_stack_adjust = old_pending_adj;
3349 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
3350 stack_arg_under_construction = old_stack_arg_under_construction;
3351 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3352 stack_usage_map = initial_stack_usage_map;
3353 sibcall_failure = 1;
3354 }
3355 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3356 {
3357 #ifdef REG_PARM_STACK_SPACE
3358 if (save_area)
3359 restore_fixed_argument_area (save_area, argblock,
3360 high_to_save, low_to_save);
3361 #endif
3362
3363 /* If we saved any argument areas, restore them. */
3364 for (i = 0; i < num_actuals; i++)
3365 if (args[i].save_area)
3366 {
3367 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3368 rtx stack_area
3369 = gen_rtx_MEM (save_mode,
3370 memory_address (save_mode,
3371 XEXP (args[i].stack_slot, 0)));
3372
3373 if (save_mode != BLKmode)
3374 emit_move_insn (stack_area, args[i].save_area);
3375 else
3376 emit_block_move (stack_area, args[i].save_area,
3377 GEN_INT (args[i].locate.size.constant),
3378 BLOCK_OP_CALL_PARM);
3379 }
3380
3381 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3382 stack_usage_map = initial_stack_usage_map;
3383 }
3384
3385 /* If this was alloca, record the new stack level for nonlocal gotos.
3386 Check for the handler slots since we might not have a save area
3387 for non-local gotos. */
3388
3389 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
3390 update_nonlocal_goto_save_area ();
3391
3392 /* Free up storage we no longer need. */
3393 for (i = 0; i < num_actuals; ++i)
3394 free (args[i].aligned_regs);
3395
3396 insns = get_insns ();
3397 end_sequence ();
3398
3399 if (pass == 0)
3400 {
3401 tail_call_insns = insns;
3402
3403 /* Restore the pending stack adjustment now that we have
3404 finished generating the sibling call sequence. */
3405
3406 pending_stack_adjust = save_pending_stack_adjust;
3407 stack_pointer_delta = save_stack_pointer_delta;
3408
3409 /* Prepare arg structure for next iteration. */
3410 for (i = 0; i < num_actuals; i++)
3411 {
3412 args[i].value = 0;
3413 args[i].aligned_regs = 0;
3414 args[i].stack = 0;
3415 }
3416
3417 sbitmap_free (stored_args_map);
3418 internal_arg_pointer_exp_state.scan_start = NULL_RTX;
3419 VEC_free (rtx, heap, internal_arg_pointer_exp_state.cache);
3420 }
3421 else
3422 {
3423 normal_call_insns = insns;
3424
3425 /* Verify that we've deallocated all the stack we used. */
3426 gcc_assert ((flags & ECF_NORETURN)
3427 || (old_stack_allocated
3428 == stack_pointer_delta - pending_stack_adjust));
3429 }
3430
3431 /* If something prevents making this a sibling call,
3432 zero out the sequence. */
3433 if (sibcall_failure)
3434 tail_call_insns = NULL_RTX;
3435 else
3436 break;
3437 }
3438
3439 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
3440 arguments too, as argument area is now clobbered by the call. */
3441 if (tail_call_insns)
3442 {
3443 emit_insn (tail_call_insns);
3444 crtl->tail_call_emit = true;
3445 }
3446 else
3447 emit_insn (normal_call_insns);
3448
3449 currently_expanding_call--;
3450
3451 free (stack_usage_map_buf);
3452
3453 return target;
3454 }
3455
3456 /* A sibling call sequence invalidates any REG_EQUIV notes made for
3457 this function's incoming arguments.
3458
3459 At the start of RTL generation we know the only REG_EQUIV notes
3460 in the rtl chain are those for incoming arguments, so we can look
3461 for REG_EQUIV notes between the start of the function and the
3462 NOTE_INSN_FUNCTION_BEG.
3463
3464 This is (slight) overkill. We could keep track of the highest
3465 argument we clobber and be more selective in removing notes, but it
3466 does not seem to be worth the effort. */
3467
3468 void
3469 fixup_tail_calls (void)
3470 {
3471 rtx insn;
3472
3473 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3474 {
3475 rtx note;
3476
3477 /* There are never REG_EQUIV notes for the incoming arguments
3478 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
3479 if (NOTE_P (insn)
3480 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
3481 break;
3482
3483 note = find_reg_note (insn, REG_EQUIV, 0);
3484 if (note)
3485 remove_note (insn, note);
3486 note = find_reg_note (insn, REG_EQUIV, 0);
3487 gcc_assert (!note);
3488 }
3489 }
3490
3491 /* Traverse a list of TYPES and expand all complex types into their
3492 components. */
3493 static tree
3494 split_complex_types (tree types)
3495 {
3496 tree p;
3497
3498 /* Before allocating memory, check for the common case of no complex. */
3499 for (p = types; p; p = TREE_CHAIN (p))
3500 {
3501 tree type = TREE_VALUE (p);
3502 if (TREE_CODE (type) == COMPLEX_TYPE
3503 && targetm.calls.split_complex_arg (type))
3504 goto found;
3505 }
3506 return types;
3507
3508 found:
3509 types = copy_list (types);
3510
3511 for (p = types; p; p = TREE_CHAIN (p))
3512 {
3513 tree complex_type = TREE_VALUE (p);
3514
3515 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3516 && targetm.calls.split_complex_arg (complex_type))
3517 {
3518 tree next, imag;
3519
3520 /* Rewrite complex type with component type. */
3521 TREE_VALUE (p) = TREE_TYPE (complex_type);
3522 next = TREE_CHAIN (p);
3523
3524 /* Add another component type for the imaginary part. */
3525 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3526 TREE_CHAIN (p) = imag;
3527 TREE_CHAIN (imag) = next;
3528
3529 /* Skip the newly created node. */
3530 p = TREE_CHAIN (p);
3531 }
3532 }
3533
3534 return types;
3535 }
3536 \f
3537 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3538 The RETVAL parameter specifies whether return value needs to be saved, other
3539 parameters are documented in the emit_library_call function below. */
3540
3541 static rtx
3542 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3543 enum libcall_type fn_type,
3544 enum machine_mode outmode, int nargs, va_list p)
3545 {
3546 /* Total size in bytes of all the stack-parms scanned so far. */
3547 struct args_size args_size;
3548 /* Size of arguments before any adjustments (such as rounding). */
3549 struct args_size original_args_size;
3550 int argnum;
3551 rtx fun;
3552 /* Todo, choose the correct decl type of orgfun. Sadly this information
3553 isn't present here, so we default to native calling abi here. */
3554 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
3555 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
3556 int inc;
3557 int count;
3558 rtx argblock = 0;
3559 CUMULATIVE_ARGS args_so_far_v;
3560 cumulative_args_t args_so_far;
3561 struct arg
3562 {
3563 rtx value;
3564 enum machine_mode mode;
3565 rtx reg;
3566 int partial;
3567 struct locate_and_pad_arg_data locate;
3568 rtx save_area;
3569 };
3570 struct arg *argvec;
3571 int old_inhibit_defer_pop = inhibit_defer_pop;
3572 rtx call_fusage = 0;
3573 rtx mem_value = 0;
3574 rtx valreg;
3575 int pcc_struct_value = 0;
3576 int struct_value_size = 0;
3577 int flags;
3578 int reg_parm_stack_space = 0;
3579 int needed;
3580 rtx before_call;
3581 tree tfom; /* type_for_mode (outmode, 0) */
3582
3583 #ifdef REG_PARM_STACK_SPACE
3584 /* Define the boundary of the register parm stack space that needs to be
3585 save, if any. */
3586 int low_to_save = 0, high_to_save = 0;
3587 rtx save_area = 0; /* Place that it is saved. */
3588 #endif
3589
3590 /* Size of the stack reserved for parameter registers. */
3591 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3592 char *initial_stack_usage_map = stack_usage_map;
3593 char *stack_usage_map_buf = NULL;
3594
3595 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3596
3597 #ifdef REG_PARM_STACK_SPACE
3598 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3599 #endif
3600
3601 /* By default, library functions can not throw. */
3602 flags = ECF_NOTHROW;
3603
3604 switch (fn_type)
3605 {
3606 case LCT_NORMAL:
3607 break;
3608 case LCT_CONST:
3609 flags |= ECF_CONST;
3610 break;
3611 case LCT_PURE:
3612 flags |= ECF_PURE;
3613 break;
3614 case LCT_NORETURN:
3615 flags |= ECF_NORETURN;
3616 break;
3617 case LCT_THROW:
3618 flags = ECF_NORETURN;
3619 break;
3620 case LCT_RETURNS_TWICE:
3621 flags = ECF_RETURNS_TWICE;
3622 break;
3623 }
3624 fun = orgfun;
3625
3626 /* Ensure current function's preferred stack boundary is at least
3627 what we need. */
3628 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3629 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3630
3631 /* If this kind of value comes back in memory,
3632 decide where in memory it should come back. */
3633 if (outmode != VOIDmode)
3634 {
3635 tfom = lang_hooks.types.type_for_mode (outmode, 0);
3636 if (aggregate_value_p (tfom, 0))
3637 {
3638 #ifdef PCC_STATIC_STRUCT_RETURN
3639 rtx pointer_reg
3640 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
3641 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3642 pcc_struct_value = 1;
3643 if (value == 0)
3644 value = gen_reg_rtx (outmode);
3645 #else /* not PCC_STATIC_STRUCT_RETURN */
3646 struct_value_size = GET_MODE_SIZE (outmode);
3647 if (value != 0 && MEM_P (value))
3648 mem_value = value;
3649 else
3650 mem_value = assign_temp (tfom, 0, 1, 1);
3651 #endif
3652 /* This call returns a big structure. */
3653 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
3654 }
3655 }
3656 else
3657 tfom = void_type_node;
3658
3659 /* ??? Unfinished: must pass the memory address as an argument. */
3660
3661 /* Copy all the libcall-arguments out of the varargs data
3662 and into a vector ARGVEC.
3663
3664 Compute how to pass each argument. We only support a very small subset
3665 of the full argument passing conventions to limit complexity here since
3666 library functions shouldn't have many args. */
3667
3668 argvec = XALLOCAVEC (struct arg, nargs + 1);
3669 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3670
3671 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3672 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
3673 #else
3674 INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
3675 #endif
3676 args_so_far = pack_cumulative_args (&args_so_far_v);
3677
3678 args_size.constant = 0;
3679 args_size.var = 0;
3680
3681 count = 0;
3682
3683 push_temp_slots ();
3684
3685 /* If there's a structure value address to be passed,
3686 either pass it in the special place, or pass it as an extra argument. */
3687 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3688 {
3689 rtx addr = XEXP (mem_value, 0);
3690
3691 nargs++;
3692
3693 /* Make sure it is a reasonable operand for a move or push insn. */
3694 if (!REG_P (addr) && !MEM_P (addr)
3695 && !(CONSTANT_P (addr)
3696 && targetm.legitimate_constant_p (Pmode, addr)))
3697 addr = force_operand (addr, NULL_RTX);
3698
3699 argvec[count].value = addr;
3700 argvec[count].mode = Pmode;
3701 argvec[count].partial = 0;
3702
3703 argvec[count].reg = targetm.calls.function_arg (args_so_far,
3704 Pmode, NULL_TREE, true);
3705 gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, Pmode,
3706 NULL_TREE, 1) == 0);
3707
3708 locate_and_pad_parm (Pmode, NULL_TREE,
3709 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3710 1,
3711 #else
3712 argvec[count].reg != 0,
3713 #endif
3714 0, NULL_TREE, &args_size, &argvec[count].locate);
3715
3716 if (argvec[count].reg == 0 || argvec[count].partial != 0
3717 || reg_parm_stack_space > 0)
3718 args_size.constant += argvec[count].locate.size.constant;
3719
3720 targetm.calls.function_arg_advance (args_so_far, Pmode, (tree) 0, true);
3721
3722 count++;
3723 }
3724
3725 for (; count < nargs; count++)
3726 {
3727 rtx val = va_arg (p, rtx);
3728 enum machine_mode mode = (enum machine_mode) va_arg (p, int);
3729 int unsigned_p = 0;
3730
3731 /* We cannot convert the arg value to the mode the library wants here;
3732 must do it earlier where we know the signedness of the arg. */
3733 gcc_assert (mode != BLKmode
3734 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
3735
3736 /* Make sure it is a reasonable operand for a move or push insn. */
3737 if (!REG_P (val) && !MEM_P (val)
3738 && !(CONSTANT_P (val) && targetm.legitimate_constant_p (mode, val)))
3739 val = force_operand (val, NULL_RTX);
3740
3741 if (pass_by_reference (&args_so_far_v, mode, NULL_TREE, 1))
3742 {
3743 rtx slot;
3744 int must_copy
3745 = !reference_callee_copied (&args_so_far_v, mode, NULL_TREE, 1);
3746
3747 /* If this was a CONST function, it is now PURE since it now
3748 reads memory. */
3749 if (flags & ECF_CONST)
3750 {
3751 flags &= ~ECF_CONST;
3752 flags |= ECF_PURE;
3753 }
3754
3755 if (MEM_P (val) && !must_copy)
3756 {
3757 tree val_expr = MEM_EXPR (val);
3758 if (val_expr)
3759 mark_addressable (val_expr);
3760 slot = val;
3761 }
3762 else
3763 {
3764 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
3765 0, 1, 1);
3766 emit_move_insn (slot, val);
3767 }
3768
3769 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3770 gen_rtx_USE (VOIDmode, slot),
3771 call_fusage);
3772 if (must_copy)
3773 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3774 gen_rtx_CLOBBER (VOIDmode,
3775 slot),
3776 call_fusage);
3777
3778 mode = Pmode;
3779 val = force_operand (XEXP (slot, 0), NULL_RTX);
3780 }
3781
3782 mode = promote_function_mode (NULL_TREE, mode, &unsigned_p, NULL_TREE, 0);
3783 argvec[count].mode = mode;
3784 argvec[count].value = convert_modes (mode, GET_MODE (val), val, unsigned_p);
3785 argvec[count].reg = targetm.calls.function_arg (args_so_far, mode,
3786 NULL_TREE, true);
3787
3788 argvec[count].partial
3789 = targetm.calls.arg_partial_bytes (args_so_far, mode, NULL_TREE, 1);
3790
3791 if (argvec[count].reg == 0
3792 || argvec[count].partial != 0
3793 || reg_parm_stack_space > 0)
3794 {
3795 locate_and_pad_parm (mode, NULL_TREE,
3796 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3797 1,
3798 #else
3799 argvec[count].reg != 0,
3800 #endif
3801 argvec[count].partial,
3802 NULL_TREE, &args_size, &argvec[count].locate);
3803 args_size.constant += argvec[count].locate.size.constant;
3804 gcc_assert (!argvec[count].locate.size.var);
3805 }
3806 #ifdef BLOCK_REG_PADDING
3807 else
3808 /* The argument is passed entirely in registers. See at which
3809 end it should be padded. */
3810 argvec[count].locate.where_pad =
3811 BLOCK_REG_PADDING (mode, NULL_TREE,
3812 GET_MODE_SIZE (mode) <= UNITS_PER_WORD);
3813 #endif
3814
3815 targetm.calls.function_arg_advance (args_so_far, mode, (tree) 0, true);
3816 }
3817
3818 /* If this machine requires an external definition for library
3819 functions, write one out. */
3820 assemble_external_libcall (fun);
3821
3822 original_args_size = args_size;
3823 args_size.constant = (((args_size.constant
3824 + stack_pointer_delta
3825 + STACK_BYTES - 1)
3826 / STACK_BYTES
3827 * STACK_BYTES)
3828 - stack_pointer_delta);
3829
3830 args_size.constant = MAX (args_size.constant,
3831 reg_parm_stack_space);
3832
3833 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
3834 args_size.constant -= reg_parm_stack_space;
3835
3836 if (args_size.constant > crtl->outgoing_args_size)
3837 crtl->outgoing_args_size = args_size.constant;
3838
3839 if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
3840 {
3841 int pushed = args_size.constant + pending_stack_adjust;
3842 if (pushed > current_function_pushed_stack_size)
3843 current_function_pushed_stack_size = pushed;
3844 }
3845
3846 if (ACCUMULATE_OUTGOING_ARGS)
3847 {
3848 /* Since the stack pointer will never be pushed, it is possible for
3849 the evaluation of a parm to clobber something we have already
3850 written to the stack. Since most function calls on RISC machines
3851 do not use the stack, this is uncommon, but must work correctly.
3852
3853 Therefore, we save any area of the stack that was already written
3854 and that we are using. Here we set up to do this by making a new
3855 stack usage map from the old one.
3856
3857 Another approach might be to try to reorder the argument
3858 evaluations to avoid this conflicting stack usage. */
3859
3860 needed = args_size.constant;
3861
3862 /* Since we will be writing into the entire argument area, the
3863 map must be allocated for its entire size, not just the part that
3864 is the responsibility of the caller. */
3865 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
3866 needed += reg_parm_stack_space;
3867
3868 #ifdef ARGS_GROW_DOWNWARD
3869 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3870 needed + 1);
3871 #else
3872 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3873 needed);
3874 #endif
3875 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
3876 stack_usage_map = stack_usage_map_buf;
3877
3878 if (initial_highest_arg_in_use)
3879 memcpy (stack_usage_map, initial_stack_usage_map,
3880 initial_highest_arg_in_use);
3881
3882 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3883 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3884 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3885 needed = 0;
3886
3887 /* We must be careful to use virtual regs before they're instantiated,
3888 and real regs afterwards. Loop optimization, for example, can create
3889 new libcalls after we've instantiated the virtual regs, and if we
3890 use virtuals anyway, they won't match the rtl patterns. */
3891
3892 if (virtuals_instantiated)
3893 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3894 else
3895 argblock = virtual_outgoing_args_rtx;
3896 }
3897 else
3898 {
3899 if (!PUSH_ARGS)
3900 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3901 }
3902
3903 /* If we push args individually in reverse order, perform stack alignment
3904 before the first push (the last arg). */
3905 if (argblock == 0 && PUSH_ARGS_REVERSED)
3906 anti_adjust_stack (GEN_INT (args_size.constant
3907 - original_args_size.constant));
3908
3909 if (PUSH_ARGS_REVERSED)
3910 {
3911 inc = -1;
3912 argnum = nargs - 1;
3913 }
3914 else
3915 {
3916 inc = 1;
3917 argnum = 0;
3918 }
3919
3920 #ifdef REG_PARM_STACK_SPACE
3921 if (ACCUMULATE_OUTGOING_ARGS)
3922 {
3923 /* The argument list is the property of the called routine and it
3924 may clobber it. If the fixed area has been used for previous
3925 parameters, we must save and restore it. */
3926 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3927 &low_to_save, &high_to_save);
3928 }
3929 #endif
3930
3931 /* Push the args that need to be pushed. */
3932
3933 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3934 are to be pushed. */
3935 for (count = 0; count < nargs; count++, argnum += inc)
3936 {
3937 enum machine_mode mode = argvec[argnum].mode;
3938 rtx val = argvec[argnum].value;
3939 rtx reg = argvec[argnum].reg;
3940 int partial = argvec[argnum].partial;
3941 unsigned int parm_align = argvec[argnum].locate.boundary;
3942 int lower_bound = 0, upper_bound = 0, i;
3943
3944 if (! (reg != 0 && partial == 0))
3945 {
3946 rtx use;
3947
3948 if (ACCUMULATE_OUTGOING_ARGS)
3949 {
3950 /* If this is being stored into a pre-allocated, fixed-size,
3951 stack area, save any previous data at that location. */
3952
3953 #ifdef ARGS_GROW_DOWNWARD
3954 /* stack_slot is negative, but we want to index stack_usage_map
3955 with positive values. */
3956 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
3957 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
3958 #else
3959 lower_bound = argvec[argnum].locate.slot_offset.constant;
3960 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
3961 #endif
3962
3963 i = lower_bound;
3964 /* Don't worry about things in the fixed argument area;
3965 it has already been saved. */
3966 if (i < reg_parm_stack_space)
3967 i = reg_parm_stack_space;
3968 while (i < upper_bound && stack_usage_map[i] == 0)
3969 i++;
3970
3971 if (i < upper_bound)
3972 {
3973 /* We need to make a save area. */
3974 unsigned int size
3975 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
3976 enum machine_mode save_mode
3977 = mode_for_size (size, MODE_INT, 1);
3978 rtx adr
3979 = plus_constant (argblock,
3980 argvec[argnum].locate.offset.constant);
3981 rtx stack_area
3982 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
3983
3984 if (save_mode == BLKmode)
3985 {
3986 argvec[argnum].save_area
3987 = assign_stack_temp (BLKmode,
3988 argvec[argnum].locate.size.constant,
3989 0);
3990
3991 emit_block_move (validize_mem (argvec[argnum].save_area),
3992 stack_area,
3993 GEN_INT (argvec[argnum].locate.size.constant),
3994 BLOCK_OP_CALL_PARM);
3995 }
3996 else
3997 {
3998 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3999
4000 emit_move_insn (argvec[argnum].save_area, stack_area);
4001 }
4002 }
4003 }
4004
4005 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
4006 partial, reg, 0, argblock,
4007 GEN_INT (argvec[argnum].locate.offset.constant),
4008 reg_parm_stack_space,
4009 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
4010
4011 /* Now mark the segment we just used. */
4012 if (ACCUMULATE_OUTGOING_ARGS)
4013 for (i = lower_bound; i < upper_bound; i++)
4014 stack_usage_map[i] = 1;
4015
4016 NO_DEFER_POP;
4017
4018 /* Indicate argument access so that alias.c knows that these
4019 values are live. */
4020 if (argblock)
4021 use = plus_constant (argblock,
4022 argvec[argnum].locate.offset.constant);
4023 else
4024 /* When arguments are pushed, trying to tell alias.c where
4025 exactly this argument is won't work, because the
4026 auto-increment causes confusion. So we merely indicate
4027 that we access something with a known mode somewhere on
4028 the stack. */
4029 use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
4030 gen_rtx_SCRATCH (Pmode));
4031 use = gen_rtx_MEM (argvec[argnum].mode, use);
4032 use = gen_rtx_USE (VOIDmode, use);
4033 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
4034 }
4035 }
4036
4037 /* If we pushed args in forward order, perform stack alignment
4038 after pushing the last arg. */
4039 if (argblock == 0 && !PUSH_ARGS_REVERSED)
4040 anti_adjust_stack (GEN_INT (args_size.constant
4041 - original_args_size.constant));
4042
4043 if (PUSH_ARGS_REVERSED)
4044 argnum = nargs - 1;
4045 else
4046 argnum = 0;
4047
4048 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
4049
4050 /* Now load any reg parms into their regs. */
4051
4052 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4053 are to be pushed. */
4054 for (count = 0; count < nargs; count++, argnum += inc)
4055 {
4056 enum machine_mode mode = argvec[argnum].mode;
4057 rtx val = argvec[argnum].value;
4058 rtx reg = argvec[argnum].reg;
4059 int partial = argvec[argnum].partial;
4060 #ifdef BLOCK_REG_PADDING
4061 int size = 0;
4062 #endif
4063
4064 /* Handle calls that pass values in multiple non-contiguous
4065 locations. The PA64 has examples of this for library calls. */
4066 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4067 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
4068 else if (reg != 0 && partial == 0)
4069 {
4070 emit_move_insn (reg, val);
4071 #ifdef BLOCK_REG_PADDING
4072 size = GET_MODE_SIZE (argvec[argnum].mode);
4073
4074 /* Copied from load_register_parameters. */
4075
4076 /* Handle case where we have a value that needs shifting
4077 up to the msb. eg. a QImode value and we're padding
4078 upward on a BYTES_BIG_ENDIAN machine. */
4079 if (size < UNITS_PER_WORD
4080 && (argvec[argnum].locate.where_pad
4081 == (BYTES_BIG_ENDIAN ? upward : downward)))
4082 {
4083 rtx x;
4084 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
4085
4086 /* Assigning REG here rather than a temp makes CALL_FUSAGE
4087 report the whole reg as used. Strictly speaking, the
4088 call only uses SIZE bytes at the msb end, but it doesn't
4089 seem worth generating rtl to say that. */
4090 reg = gen_rtx_REG (word_mode, REGNO (reg));
4091 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
4092 if (x != reg)
4093 emit_move_insn (reg, x);
4094 }
4095 #endif
4096 }
4097
4098 NO_DEFER_POP;
4099 }
4100
4101 /* Any regs containing parms remain in use through the call. */
4102 for (count = 0; count < nargs; count++)
4103 {
4104 rtx reg = argvec[count].reg;
4105 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4106 use_group_regs (&call_fusage, reg);
4107 else if (reg != 0)
4108 {
4109 int partial = argvec[count].partial;
4110 if (partial)
4111 {
4112 int nregs;
4113 gcc_assert (partial % UNITS_PER_WORD == 0);
4114 nregs = partial / UNITS_PER_WORD;
4115 use_regs (&call_fusage, REGNO (reg), nregs);
4116 }
4117 else
4118 use_reg (&call_fusage, reg);
4119 }
4120 }
4121
4122 /* Pass the function the address in which to return a structure value. */
4123 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
4124 {
4125 emit_move_insn (struct_value,
4126 force_reg (Pmode,
4127 force_operand (XEXP (mem_value, 0),
4128 NULL_RTX)));
4129 if (REG_P (struct_value))
4130 use_reg (&call_fusage, struct_value);
4131 }
4132
4133 /* Don't allow popping to be deferred, since then
4134 cse'ing of library calls could delete a call and leave the pop. */
4135 NO_DEFER_POP;
4136 valreg = (mem_value == 0 && outmode != VOIDmode
4137 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
4138
4139 /* Stack must be properly aligned now. */
4140 gcc_assert (!(stack_pointer_delta
4141 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
4142
4143 before_call = get_last_insn ();
4144
4145 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
4146 will set inhibit_defer_pop to that value. */
4147 /* The return type is needed to decide how many bytes the function pops.
4148 Signedness plays no role in that, so for simplicity, we pretend it's
4149 always signed. We also assume that the list of arguments passed has
4150 no impact, so we pretend it is unknown. */
4151
4152 emit_call_1 (fun, NULL,
4153 get_identifier (XSTR (orgfun, 0)),
4154 build_function_type (tfom, NULL_TREE),
4155 original_args_size.constant, args_size.constant,
4156 struct_value_size,
4157 targetm.calls.function_arg (args_so_far,
4158 VOIDmode, void_type_node, true),
4159 valreg,
4160 old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
4161
4162 /* Right-shift returned value if necessary. */
4163 if (!pcc_struct_value
4164 && TYPE_MODE (tfom) != BLKmode
4165 && targetm.calls.return_in_msb (tfom))
4166 {
4167 shift_return_value (TYPE_MODE (tfom), false, valreg);
4168 valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
4169 }
4170
4171 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
4172 that it should complain if nonvolatile values are live. For
4173 functions that cannot return, inform flow that control does not
4174 fall through. */
4175
4176 if (flags & ECF_NORETURN)
4177 {
4178 /* The barrier note must be emitted
4179 immediately after the CALL_INSN. Some ports emit more than
4180 just a CALL_INSN above, so we must search for it here. */
4181
4182 rtx last = get_last_insn ();
4183 while (!CALL_P (last))
4184 {
4185 last = PREV_INSN (last);
4186 /* There was no CALL_INSN? */
4187 gcc_assert (last != before_call);
4188 }
4189
4190 emit_barrier_after (last);
4191 }
4192
4193 /* Now restore inhibit_defer_pop to its actual original value. */
4194 OK_DEFER_POP;
4195
4196 pop_temp_slots ();
4197
4198 /* Copy the value to the right place. */
4199 if (outmode != VOIDmode && retval)
4200 {
4201 if (mem_value)
4202 {
4203 if (value == 0)
4204 value = mem_value;
4205 if (value != mem_value)
4206 emit_move_insn (value, mem_value);
4207 }
4208 else if (GET_CODE (valreg) == PARALLEL)
4209 {
4210 if (value == 0)
4211 value = gen_reg_rtx (outmode);
4212 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
4213 }
4214 else
4215 {
4216 /* Convert to the proper mode if a promotion has been active. */
4217 if (GET_MODE (valreg) != outmode)
4218 {
4219 int unsignedp = TYPE_UNSIGNED (tfom);
4220
4221 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
4222 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
4223 == GET_MODE (valreg));
4224 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
4225 }
4226
4227 if (value != 0)
4228 emit_move_insn (value, valreg);
4229 else
4230 value = valreg;
4231 }
4232 }
4233
4234 if (ACCUMULATE_OUTGOING_ARGS)
4235 {
4236 #ifdef REG_PARM_STACK_SPACE
4237 if (save_area)
4238 restore_fixed_argument_area (save_area, argblock,
4239 high_to_save, low_to_save);
4240 #endif
4241
4242 /* If we saved any argument areas, restore them. */
4243 for (count = 0; count < nargs; count++)
4244 if (argvec[count].save_area)
4245 {
4246 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
4247 rtx adr = plus_constant (argblock,
4248 argvec[count].locate.offset.constant);
4249 rtx stack_area = gen_rtx_MEM (save_mode,
4250 memory_address (save_mode, adr));
4251
4252 if (save_mode == BLKmode)
4253 emit_block_move (stack_area,
4254 validize_mem (argvec[count].save_area),
4255 GEN_INT (argvec[count].locate.size.constant),
4256 BLOCK_OP_CALL_PARM);
4257 else
4258 emit_move_insn (stack_area, argvec[count].save_area);
4259 }
4260
4261 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4262 stack_usage_map = initial_stack_usage_map;
4263 }
4264
4265 free (stack_usage_map_buf);
4266
4267 return value;
4268
4269 }
4270 \f
4271 /* Output a library call to function FUN (a SYMBOL_REF rtx)
4272 (emitting the queue unless NO_QUEUE is nonzero),
4273 for a value of mode OUTMODE,
4274 with NARGS different arguments, passed as alternating rtx values
4275 and machine_modes to convert them to.
4276
4277 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
4278 `const' calls, LCT_PURE for `pure' calls, or other LCT_ value for
4279 other types of library calls. */
4280
4281 void
4282 emit_library_call (rtx orgfun, enum libcall_type fn_type,
4283 enum machine_mode outmode, int nargs, ...)
4284 {
4285 va_list p;
4286
4287 va_start (p, nargs);
4288 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4289 va_end (p);
4290 }
4291 \f
4292 /* Like emit_library_call except that an extra argument, VALUE,
4293 comes second and says where to store the result.
4294 (If VALUE is zero, this function chooses a convenient way
4295 to return the value.
4296
4297 This function returns an rtx for where the value is to be found.
4298 If VALUE is nonzero, VALUE is returned. */
4299
4300 rtx
4301 emit_library_call_value (rtx orgfun, rtx value,
4302 enum libcall_type fn_type,
4303 enum machine_mode outmode, int nargs, ...)
4304 {
4305 rtx result;
4306 va_list p;
4307
4308 va_start (p, nargs);
4309 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4310 nargs, p);
4311 va_end (p);
4312
4313 return result;
4314 }
4315 \f
4316 /* Store a single argument for a function call
4317 into the register or memory area where it must be passed.
4318 *ARG describes the argument value and where to pass it.
4319
4320 ARGBLOCK is the address of the stack-block for all the arguments,
4321 or 0 on a machine where arguments are pushed individually.
4322
4323 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4324 so must be careful about how the stack is used.
4325
4326 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4327 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4328 that we need not worry about saving and restoring the stack.
4329
4330 FNDECL is the declaration of the function we are calling.
4331
4332 Return nonzero if this arg should cause sibcall failure,
4333 zero otherwise. */
4334
4335 static int
4336 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4337 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
4338 {
4339 tree pval = arg->tree_value;
4340 rtx reg = 0;
4341 int partial = 0;
4342 int used = 0;
4343 int i, lower_bound = 0, upper_bound = 0;
4344 int sibcall_failure = 0;
4345
4346 if (TREE_CODE (pval) == ERROR_MARK)
4347 return 1;
4348
4349 /* Push a new temporary level for any temporaries we make for
4350 this argument. */
4351 push_temp_slots ();
4352
4353 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4354 {
4355 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4356 save any previous data at that location. */
4357 if (argblock && ! variable_size && arg->stack)
4358 {
4359 #ifdef ARGS_GROW_DOWNWARD
4360 /* stack_slot is negative, but we want to index stack_usage_map
4361 with positive values. */
4362 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4363 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4364 else
4365 upper_bound = 0;
4366
4367 lower_bound = upper_bound - arg->locate.size.constant;
4368 #else
4369 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4370 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4371 else
4372 lower_bound = 0;
4373
4374 upper_bound = lower_bound + arg->locate.size.constant;
4375 #endif
4376
4377 i = lower_bound;
4378 /* Don't worry about things in the fixed argument area;
4379 it has already been saved. */
4380 if (i < reg_parm_stack_space)
4381 i = reg_parm_stack_space;
4382 while (i < upper_bound && stack_usage_map[i] == 0)
4383 i++;
4384
4385 if (i < upper_bound)
4386 {
4387 /* We need to make a save area. */
4388 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4389 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4390 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4391 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4392
4393 if (save_mode == BLKmode)
4394 {
4395 tree ot = TREE_TYPE (arg->tree_value);
4396 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4397 | TYPE_QUAL_CONST));
4398
4399 arg->save_area = assign_temp (nt, 0, 1, 1);
4400 preserve_temp_slots (arg->save_area);
4401 emit_block_move (validize_mem (arg->save_area), stack_area,
4402 GEN_INT (arg->locate.size.constant),
4403 BLOCK_OP_CALL_PARM);
4404 }
4405 else
4406 {
4407 arg->save_area = gen_reg_rtx (save_mode);
4408 emit_move_insn (arg->save_area, stack_area);
4409 }
4410 }
4411 }
4412 }
4413
4414 /* If this isn't going to be placed on both the stack and in registers,
4415 set up the register and number of words. */
4416 if (! arg->pass_on_stack)
4417 {
4418 if (flags & ECF_SIBCALL)
4419 reg = arg->tail_call_reg;
4420 else
4421 reg = arg->reg;
4422 partial = arg->partial;
4423 }
4424
4425 /* Being passed entirely in a register. We shouldn't be called in
4426 this case. */
4427 gcc_assert (reg == 0 || partial != 0);
4428
4429 /* If this arg needs special alignment, don't load the registers
4430 here. */
4431 if (arg->n_aligned_regs != 0)
4432 reg = 0;
4433
4434 /* If this is being passed partially in a register, we can't evaluate
4435 it directly into its stack slot. Otherwise, we can. */
4436 if (arg->value == 0)
4437 {
4438 /* stack_arg_under_construction is nonzero if a function argument is
4439 being evaluated directly into the outgoing argument list and
4440 expand_call must take special action to preserve the argument list
4441 if it is called recursively.
4442
4443 For scalar function arguments stack_usage_map is sufficient to
4444 determine which stack slots must be saved and restored. Scalar
4445 arguments in general have pass_on_stack == 0.
4446
4447 If this argument is initialized by a function which takes the
4448 address of the argument (a C++ constructor or a C function
4449 returning a BLKmode structure), then stack_usage_map is
4450 insufficient and expand_call must push the stack around the
4451 function call. Such arguments have pass_on_stack == 1.
4452
4453 Note that it is always safe to set stack_arg_under_construction,
4454 but this generates suboptimal code if set when not needed. */
4455
4456 if (arg->pass_on_stack)
4457 stack_arg_under_construction++;
4458
4459 arg->value = expand_expr (pval,
4460 (partial
4461 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4462 ? NULL_RTX : arg->stack,
4463 VOIDmode, EXPAND_STACK_PARM);
4464
4465 /* If we are promoting object (or for any other reason) the mode
4466 doesn't agree, convert the mode. */
4467
4468 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4469 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4470 arg->value, arg->unsignedp);
4471
4472 if (arg->pass_on_stack)
4473 stack_arg_under_construction--;
4474 }
4475
4476 /* Check for overlap with already clobbered argument area. */
4477 if ((flags & ECF_SIBCALL)
4478 && MEM_P (arg->value)
4479 && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0),
4480 arg->locate.size.constant))
4481 sibcall_failure = 1;
4482
4483 /* Don't allow anything left on stack from computation
4484 of argument to alloca. */
4485 if (flags & ECF_MAY_BE_ALLOCA)
4486 do_pending_stack_adjust ();
4487
4488 if (arg->value == arg->stack)
4489 /* If the value is already in the stack slot, we are done. */
4490 ;
4491 else if (arg->mode != BLKmode)
4492 {
4493 int size;
4494 unsigned int parm_align;
4495
4496 /* Argument is a scalar, not entirely passed in registers.
4497 (If part is passed in registers, arg->partial says how much
4498 and emit_push_insn will take care of putting it there.)
4499
4500 Push it, and if its size is less than the
4501 amount of space allocated to it,
4502 also bump stack pointer by the additional space.
4503 Note that in C the default argument promotions
4504 will prevent such mismatches. */
4505
4506 size = GET_MODE_SIZE (arg->mode);
4507 /* Compute how much space the push instruction will push.
4508 On many machines, pushing a byte will advance the stack
4509 pointer by a halfword. */
4510 #ifdef PUSH_ROUNDING
4511 size = PUSH_ROUNDING (size);
4512 #endif
4513 used = size;
4514
4515 /* Compute how much space the argument should get:
4516 round up to a multiple of the alignment for arguments. */
4517 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4518 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4519 / (PARM_BOUNDARY / BITS_PER_UNIT))
4520 * (PARM_BOUNDARY / BITS_PER_UNIT));
4521
4522 /* Compute the alignment of the pushed argument. */
4523 parm_align = arg->locate.boundary;
4524 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4525 {
4526 int pad = used - size;
4527 if (pad)
4528 {
4529 unsigned int pad_align = (pad & -pad) * BITS_PER_UNIT;
4530 parm_align = MIN (parm_align, pad_align);
4531 }
4532 }
4533
4534 /* This isn't already where we want it on the stack, so put it there.
4535 This can either be done with push or copy insns. */
4536 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4537 parm_align, partial, reg, used - size, argblock,
4538 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4539 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4540
4541 /* Unless this is a partially-in-register argument, the argument is now
4542 in the stack. */
4543 if (partial == 0)
4544 arg->value = arg->stack;
4545 }
4546 else
4547 {
4548 /* BLKmode, at least partly to be pushed. */
4549
4550 unsigned int parm_align;
4551 int excess;
4552 rtx size_rtx;
4553
4554 /* Pushing a nonscalar.
4555 If part is passed in registers, PARTIAL says how much
4556 and emit_push_insn will take care of putting it there. */
4557
4558 /* Round its size up to a multiple
4559 of the allocation unit for arguments. */
4560
4561 if (arg->locate.size.var != 0)
4562 {
4563 excess = 0;
4564 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4565 }
4566 else
4567 {
4568 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
4569 for BLKmode is careful to avoid it. */
4570 excess = (arg->locate.size.constant
4571 - int_size_in_bytes (TREE_TYPE (pval))
4572 + partial);
4573 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4574 NULL_RTX, TYPE_MODE (sizetype),
4575 EXPAND_NORMAL);
4576 }
4577
4578 parm_align = arg->locate.boundary;
4579
4580 /* When an argument is padded down, the block is aligned to
4581 PARM_BOUNDARY, but the actual argument isn't. */
4582 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4583 {
4584 if (arg->locate.size.var)
4585 parm_align = BITS_PER_UNIT;
4586 else if (excess)
4587 {
4588 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4589 parm_align = MIN (parm_align, excess_align);
4590 }
4591 }
4592
4593 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4594 {
4595 /* emit_push_insn might not work properly if arg->value and
4596 argblock + arg->locate.offset areas overlap. */
4597 rtx x = arg->value;
4598 int i = 0;
4599
4600 if (XEXP (x, 0) == crtl->args.internal_arg_pointer
4601 || (GET_CODE (XEXP (x, 0)) == PLUS
4602 && XEXP (XEXP (x, 0), 0) ==
4603 crtl->args.internal_arg_pointer
4604 && CONST_INT_P (XEXP (XEXP (x, 0), 1))))
4605 {
4606 if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
4607 i = INTVAL (XEXP (XEXP (x, 0), 1));
4608
4609 /* expand_call should ensure this. */
4610 gcc_assert (!arg->locate.offset.var
4611 && arg->locate.size.var == 0
4612 && CONST_INT_P (size_rtx));
4613
4614 if (arg->locate.offset.constant > i)
4615 {
4616 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4617 sibcall_failure = 1;
4618 }
4619 else if (arg->locate.offset.constant < i)
4620 {
4621 /* Use arg->locate.size.constant instead of size_rtx
4622 because we only care about the part of the argument
4623 on the stack. */
4624 if (i < (arg->locate.offset.constant
4625 + arg->locate.size.constant))
4626 sibcall_failure = 1;
4627 }
4628 else
4629 {
4630 /* Even though they appear to be at the same location,
4631 if part of the outgoing argument is in registers,
4632 they aren't really at the same location. Check for
4633 this by making sure that the incoming size is the
4634 same as the outgoing size. */
4635 if (arg->locate.size.constant != INTVAL (size_rtx))
4636 sibcall_failure = 1;
4637 }
4638 }
4639 }
4640
4641 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4642 parm_align, partial, reg, excess, argblock,
4643 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4644 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4645
4646 /* Unless this is a partially-in-register argument, the argument is now
4647 in the stack.
4648
4649 ??? Unlike the case above, in which we want the actual
4650 address of the data, so that we can load it directly into a
4651 register, here we want the address of the stack slot, so that
4652 it's properly aligned for word-by-word copying or something
4653 like that. It's not clear that this is always correct. */
4654 if (partial == 0)
4655 arg->value = arg->stack_slot;
4656 }
4657
4658 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
4659 {
4660 tree type = TREE_TYPE (arg->tree_value);
4661 arg->parallel_value
4662 = emit_group_load_into_temps (arg->reg, arg->value, type,
4663 int_size_in_bytes (type));
4664 }
4665
4666 /* Mark all slots this store used. */
4667 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4668 && argblock && ! variable_size && arg->stack)
4669 for (i = lower_bound; i < upper_bound; i++)
4670 stack_usage_map[i] = 1;
4671
4672 /* Once we have pushed something, pops can't safely
4673 be deferred during the rest of the arguments. */
4674 NO_DEFER_POP;
4675
4676 /* Free any temporary slots made in processing this argument. Show
4677 that we might have taken the address of something and pushed that
4678 as an operand. */
4679 preserve_temp_slots (NULL_RTX);
4680 free_temp_slots ();
4681 pop_temp_slots ();
4682
4683 return sibcall_failure;
4684 }
4685
4686 /* Nonzero if we do not know how to pass TYPE solely in registers. */
4687
4688 bool
4689 must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
4690 const_tree type)
4691 {
4692 if (!type)
4693 return false;
4694
4695 /* If the type has variable size... */
4696 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4697 return true;
4698
4699 /* If the type is marked as addressable (it is required
4700 to be constructed into the stack)... */
4701 if (TREE_ADDRESSABLE (type))
4702 return true;
4703
4704 return false;
4705 }
4706
4707 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
4708 takes trailing padding of a structure into account. */
4709 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
4710
4711 bool
4712 must_pass_in_stack_var_size_or_pad (enum machine_mode mode, const_tree type)
4713 {
4714 if (!type)
4715 return false;
4716
4717 /* If the type has variable size... */
4718 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4719 return true;
4720
4721 /* If the type is marked as addressable (it is required
4722 to be constructed into the stack)... */
4723 if (TREE_ADDRESSABLE (type))
4724 return true;
4725
4726 /* If the padding and mode of the type is such that a copy into
4727 a register would put it into the wrong part of the register. */
4728 if (mode == BLKmode
4729 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4730 && (FUNCTION_ARG_PADDING (mode, type)
4731 == (BYTES_BIG_ENDIAN ? upward : downward)))
4732 return true;
4733
4734 return false;
4735 }