function.c (pass_by_reference): New.
[gcc.git] / gcc / calls.c
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "expr.h"
30 #include "optabs.h"
31 #include "libfuncs.h"
32 #include "function.h"
33 #include "regs.h"
34 #include "toplev.h"
35 #include "output.h"
36 #include "tm_p.h"
37 #include "timevar.h"
38 #include "sbitmap.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "cgraph.h"
42 #include "except.h"
43
44 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
45 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
46
47 /* Data structure and subroutines used within expand_call. */
48
49 struct arg_data
50 {
51 /* Tree node for this argument. */
52 tree tree_value;
53 /* Mode for value; TYPE_MODE unless promoted. */
54 enum machine_mode mode;
55 /* Current RTL value for argument, or 0 if it isn't precomputed. */
56 rtx value;
57 /* Initially-compute RTL value for argument; only for const functions. */
58 rtx initial_value;
59 /* Register to pass this argument in, 0 if passed on stack, or an
60 PARALLEL if the arg is to be copied into multiple non-contiguous
61 registers. */
62 rtx reg;
63 /* Register to pass this argument in when generating tail call sequence.
64 This is not the same register as for normal calls on machines with
65 register windows. */
66 rtx tail_call_reg;
67 /* If REG was promoted from the actual mode of the argument expression,
68 indicates whether the promotion is sign- or zero-extended. */
69 int unsignedp;
70 /* Number of registers to use. 0 means put the whole arg in registers.
71 Also 0 if not passed in registers. */
72 int partial;
73 /* Nonzero if argument must be passed on stack.
74 Note that some arguments may be passed on the stack
75 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
76 pass_on_stack identifies arguments that *cannot* go in registers. */
77 int pass_on_stack;
78 /* Some fields packaged up for locate_and_pad_parm. */
79 struct locate_and_pad_arg_data locate;
80 /* Location on the stack at which parameter should be stored. The store
81 has already been done if STACK == VALUE. */
82 rtx stack;
83 /* Location on the stack of the start of this argument slot. This can
84 differ from STACK if this arg pads downward. This location is known
85 to be aligned to FUNCTION_ARG_BOUNDARY. */
86 rtx stack_slot;
87 /* Place that this stack area has been saved, if needed. */
88 rtx save_area;
89 /* If an argument's alignment does not permit direct copying into registers,
90 copy in smaller-sized pieces into pseudos. These are stored in a
91 block pointed to by this field. The next field says how many
92 word-sized pseudos we made. */
93 rtx *aligned_regs;
94 int n_aligned_regs;
95 };
96
97 /* A vector of one char per byte of stack space. A byte if nonzero if
98 the corresponding stack location has been used.
99 This vector is used to prevent a function call within an argument from
100 clobbering any stack already set up. */
101 static char *stack_usage_map;
102
103 /* Size of STACK_USAGE_MAP. */
104 static int highest_outgoing_arg_in_use;
105
106 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
107 stack location's tail call argument has been already stored into the stack.
108 This bitmap is used to prevent sibling call optimization if function tries
109 to use parent's incoming argument slots when they have been already
110 overwritten with tail call arguments. */
111 static sbitmap stored_args_map;
112
113 /* stack_arg_under_construction is nonzero when an argument may be
114 initialized with a constructor call (including a C function that
115 returns a BLKmode struct) and expand_call must take special action
116 to make sure the object being constructed does not overlap the
117 argument list for the constructor call. */
118 int stack_arg_under_construction;
119
120 static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
121 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
122 CUMULATIVE_ARGS *);
123 static void precompute_register_parameters (int, struct arg_data *, int *);
124 static int store_one_arg (struct arg_data *, rtx, int, int, int);
125 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
126 static int finalize_must_preallocate (int, int, struct arg_data *,
127 struct args_size *);
128 static void precompute_arguments (int, int, struct arg_data *);
129 static int compute_argument_block_size (int, struct args_size *, int);
130 static void initialize_argument_information (int, struct arg_data *,
131 struct args_size *, int, tree,
132 tree, CUMULATIVE_ARGS *, int,
133 rtx *, int *, int *, int *,
134 bool *, bool);
135 static void compute_argument_addresses (struct arg_data *, rtx, int);
136 static rtx rtx_for_function_call (tree, tree);
137 static void load_register_parameters (struct arg_data *, int, rtx *, int,
138 int, int *);
139 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
140 enum machine_mode, int, va_list);
141 static int special_function_p (tree, int);
142 static int check_sibcall_argument_overlap_1 (rtx);
143 static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
144
145 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
146 int);
147 static tree fix_unsafe_tree (tree);
148 static bool shift_returned_value (tree, rtx *);
149
150 #ifdef REG_PARM_STACK_SPACE
151 static rtx save_fixed_argument_area (int, rtx, int *, int *);
152 static void restore_fixed_argument_area (rtx, rtx, int, int);
153 #endif
154 \f
155 /* Force FUNEXP into a form suitable for the address of a CALL,
156 and return that as an rtx. Also load the static chain register
157 if FNDECL is a nested function.
158
159 CALL_FUSAGE points to a variable holding the prospective
160 CALL_INSN_FUNCTION_USAGE information. */
161
162 rtx
163 prepare_call_address (rtx funexp, rtx static_chain_value,
164 rtx *call_fusage, int reg_parm_seen, int sibcallp)
165 {
166 funexp = protect_from_queue (funexp, 0);
167
168 /* Make a valid memory address and copy constants through pseudo-regs,
169 but not for a constant address if -fno-function-cse. */
170 if (GET_CODE (funexp) != SYMBOL_REF)
171 /* If we are using registers for parameters, force the
172 function address into a register now. */
173 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
174 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
175 : memory_address (FUNCTION_MODE, funexp));
176 else if (! sibcallp)
177 {
178 #ifndef NO_FUNCTION_CSE
179 if (optimize && ! flag_no_function_cse)
180 funexp = force_reg (Pmode, funexp);
181 #endif
182 }
183
184 if (static_chain_value != 0)
185 {
186 static_chain_value = convert_memory_address (Pmode, static_chain_value);
187 emit_move_insn (static_chain_rtx, static_chain_value);
188
189 if (REG_P (static_chain_rtx))
190 use_reg (call_fusage, static_chain_rtx);
191 }
192
193 return funexp;
194 }
195
196 /* Generate instructions to call function FUNEXP,
197 and optionally pop the results.
198 The CALL_INSN is the first insn generated.
199
200 FNDECL is the declaration node of the function. This is given to the
201 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
202
203 FUNTYPE is the data type of the function. This is given to the macro
204 RETURN_POPS_ARGS to determine whether this function pops its own args.
205 We used to allow an identifier for library functions, but that doesn't
206 work when the return type is an aggregate type and the calling convention
207 says that the pointer to this aggregate is to be popped by the callee.
208
209 STACK_SIZE is the number of bytes of arguments on the stack,
210 ROUNDED_STACK_SIZE is that number rounded up to
211 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
212 both to put into the call insn and to generate explicit popping
213 code if necessary.
214
215 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
216 It is zero if this call doesn't want a structure value.
217
218 NEXT_ARG_REG is the rtx that results from executing
219 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
220 just after all the args have had their registers assigned.
221 This could be whatever you like, but normally it is the first
222 arg-register beyond those used for args in this call,
223 or 0 if all the arg-registers are used in this call.
224 It is passed on to `gen_call' so you can put this info in the call insn.
225
226 VALREG is a hard register in which a value is returned,
227 or 0 if the call does not return a value.
228
229 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
230 the args to this call were processed.
231 We restore `inhibit_defer_pop' to that value.
232
233 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
234 denote registers used by the called function. */
235
236 static void
237 emit_call_1 (rtx funexp, tree fntree, tree fndecl ATTRIBUTE_UNUSED,
238 tree funtype ATTRIBUTE_UNUSED,
239 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
240 HOST_WIDE_INT rounded_stack_size,
241 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
242 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
243 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
244 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
245 {
246 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
247 rtx call_insn;
248 int already_popped = 0;
249 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
250 #if defined (HAVE_call) && defined (HAVE_call_value)
251 rtx struct_value_size_rtx;
252 struct_value_size_rtx = GEN_INT (struct_value_size);
253 #endif
254
255 #ifdef CALL_POPS_ARGS
256 n_popped += CALL_POPS_ARGS (* args_so_far);
257 #endif
258
259 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
260 and we don't want to load it into a register as an optimization,
261 because prepare_call_address already did it if it should be done. */
262 if (GET_CODE (funexp) != SYMBOL_REF)
263 funexp = memory_address (FUNCTION_MODE, funexp);
264
265 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
266 if ((ecf_flags & ECF_SIBCALL)
267 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
268 && (n_popped > 0 || stack_size == 0))
269 {
270 rtx n_pop = GEN_INT (n_popped);
271 rtx pat;
272
273 /* If this subroutine pops its own args, record that in the call insn
274 if possible, for the sake of frame pointer elimination. */
275
276 if (valreg)
277 pat = GEN_SIBCALL_VALUE_POP (valreg,
278 gen_rtx_MEM (FUNCTION_MODE, funexp),
279 rounded_stack_size_rtx, next_arg_reg,
280 n_pop);
281 else
282 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
283 rounded_stack_size_rtx, next_arg_reg, n_pop);
284
285 emit_call_insn (pat);
286 already_popped = 1;
287 }
288 else
289 #endif
290
291 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
292 /* If the target has "call" or "call_value" insns, then prefer them
293 if no arguments are actually popped. If the target does not have
294 "call" or "call_value" insns, then we must use the popping versions
295 even if the call has no arguments to pop. */
296 #if defined (HAVE_call) && defined (HAVE_call_value)
297 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
298 && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
299 #else
300 if (HAVE_call_pop && HAVE_call_value_pop)
301 #endif
302 {
303 rtx n_pop = GEN_INT (n_popped);
304 rtx pat;
305
306 /* If this subroutine pops its own args, record that in the call insn
307 if possible, for the sake of frame pointer elimination. */
308
309 if (valreg)
310 pat = GEN_CALL_VALUE_POP (valreg,
311 gen_rtx_MEM (FUNCTION_MODE, funexp),
312 rounded_stack_size_rtx, next_arg_reg, n_pop);
313 else
314 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
315 rounded_stack_size_rtx, next_arg_reg, n_pop);
316
317 emit_call_insn (pat);
318 already_popped = 1;
319 }
320 else
321 #endif
322
323 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
324 if ((ecf_flags & ECF_SIBCALL)
325 && HAVE_sibcall && HAVE_sibcall_value)
326 {
327 if (valreg)
328 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
329 gen_rtx_MEM (FUNCTION_MODE, funexp),
330 rounded_stack_size_rtx,
331 next_arg_reg, NULL_RTX));
332 else
333 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
334 rounded_stack_size_rtx, next_arg_reg,
335 struct_value_size_rtx));
336 }
337 else
338 #endif
339
340 #if defined (HAVE_call) && defined (HAVE_call_value)
341 if (HAVE_call && HAVE_call_value)
342 {
343 if (valreg)
344 emit_call_insn (GEN_CALL_VALUE (valreg,
345 gen_rtx_MEM (FUNCTION_MODE, funexp),
346 rounded_stack_size_rtx, next_arg_reg,
347 NULL_RTX));
348 else
349 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
350 rounded_stack_size_rtx, next_arg_reg,
351 struct_value_size_rtx));
352 }
353 else
354 #endif
355 abort ();
356
357 /* Find the call we just emitted. */
358 call_insn = last_call_insn ();
359
360 /* Mark memory as used for "pure" function call. */
361 if (ecf_flags & ECF_PURE)
362 call_fusage
363 = gen_rtx_EXPR_LIST
364 (VOIDmode,
365 gen_rtx_USE (VOIDmode,
366 gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
367 call_fusage);
368
369 /* Put the register usage information there. */
370 add_function_usage_to (call_insn, call_fusage);
371
372 /* If this is a const call, then set the insn's unchanging bit. */
373 if (ecf_flags & (ECF_CONST | ECF_PURE))
374 CONST_OR_PURE_CALL_P (call_insn) = 1;
375
376 /* If this call can't throw, attach a REG_EH_REGION reg note to that
377 effect. */
378 if (ecf_flags & ECF_NOTHROW)
379 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
380 REG_NOTES (call_insn));
381 else
382 {
383 int rn = lookup_stmt_eh_region (fntree);
384
385 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't
386 throw, which we already took care of. */
387 if (rn > 0)
388 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
389 REG_NOTES (call_insn));
390 note_current_region_may_contain_throw ();
391 }
392
393 if (ecf_flags & ECF_NORETURN)
394 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
395 REG_NOTES (call_insn));
396 if (ecf_flags & ECF_ALWAYS_RETURN)
397 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_ALWAYS_RETURN, const0_rtx,
398 REG_NOTES (call_insn));
399
400 if (ecf_flags & ECF_RETURNS_TWICE)
401 {
402 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
403 REG_NOTES (call_insn));
404 current_function_calls_setjmp = 1;
405 }
406
407 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
408
409 /* Restore this now, so that we do defer pops for this call's args
410 if the context of the call as a whole permits. */
411 inhibit_defer_pop = old_inhibit_defer_pop;
412
413 if (n_popped > 0)
414 {
415 if (!already_popped)
416 CALL_INSN_FUNCTION_USAGE (call_insn)
417 = gen_rtx_EXPR_LIST (VOIDmode,
418 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
419 CALL_INSN_FUNCTION_USAGE (call_insn));
420 rounded_stack_size -= n_popped;
421 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
422 stack_pointer_delta -= n_popped;
423 }
424
425 if (!ACCUMULATE_OUTGOING_ARGS)
426 {
427 /* If returning from the subroutine does not automatically pop the args,
428 we need an instruction to pop them sooner or later.
429 Perhaps do it now; perhaps just record how much space to pop later.
430
431 If returning from the subroutine does pop the args, indicate that the
432 stack pointer will be changed. */
433
434 if (rounded_stack_size != 0)
435 {
436 if (ecf_flags & (ECF_SP_DEPRESSED | ECF_NORETURN | ECF_LONGJMP))
437 /* Just pretend we did the pop. */
438 stack_pointer_delta -= rounded_stack_size;
439 else if (flag_defer_pop && inhibit_defer_pop == 0
440 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
441 pending_stack_adjust += rounded_stack_size;
442 else
443 adjust_stack (rounded_stack_size_rtx);
444 }
445 }
446 /* When we accumulate outgoing args, we must avoid any stack manipulations.
447 Restore the stack pointer to its original value now. Usually
448 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
449 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
450 popping variants of functions exist as well.
451
452 ??? We may optimize similar to defer_pop above, but it is
453 probably not worthwhile.
454
455 ??? It will be worthwhile to enable combine_stack_adjustments even for
456 such machines. */
457 else if (n_popped)
458 anti_adjust_stack (GEN_INT (n_popped));
459 }
460
461 /* Determine if the function identified by NAME and FNDECL is one with
462 special properties we wish to know about.
463
464 For example, if the function might return more than one time (setjmp), then
465 set RETURNS_TWICE to a nonzero value.
466
467 Similarly set LONGJMP for if the function is in the longjmp family.
468
469 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
470 space from the stack such as alloca. */
471
472 static int
473 special_function_p (tree fndecl, int flags)
474 {
475 if (fndecl && DECL_NAME (fndecl)
476 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
477 /* Exclude functions not at the file scope, or not `extern',
478 since they are not the magic functions we would otherwise
479 think they are.
480 FIXME: this should be handled with attributes, not with this
481 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
482 because you can declare fork() inside a function if you
483 wish. */
484 && (DECL_CONTEXT (fndecl) == NULL_TREE
485 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
486 && TREE_PUBLIC (fndecl))
487 {
488 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
489 const char *tname = name;
490
491 /* We assume that alloca will always be called by name. It
492 makes no sense to pass it as a pointer-to-function to
493 anything that does not understand its behavior. */
494 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
495 && name[0] == 'a'
496 && ! strcmp (name, "alloca"))
497 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
498 && name[0] == '_'
499 && ! strcmp (name, "__builtin_alloca"))))
500 flags |= ECF_MAY_BE_ALLOCA;
501
502 /* Disregard prefix _, __ or __x. */
503 if (name[0] == '_')
504 {
505 if (name[1] == '_' && name[2] == 'x')
506 tname += 3;
507 else if (name[1] == '_')
508 tname += 2;
509 else
510 tname += 1;
511 }
512
513 if (tname[0] == 's')
514 {
515 if ((tname[1] == 'e'
516 && (! strcmp (tname, "setjmp")
517 || ! strcmp (tname, "setjmp_syscall")))
518 || (tname[1] == 'i'
519 && ! strcmp (tname, "sigsetjmp"))
520 || (tname[1] == 'a'
521 && ! strcmp (tname, "savectx")))
522 flags |= ECF_RETURNS_TWICE;
523
524 if (tname[1] == 'i'
525 && ! strcmp (tname, "siglongjmp"))
526 flags |= ECF_LONGJMP;
527 }
528 else if ((tname[0] == 'q' && tname[1] == 's'
529 && ! strcmp (tname, "qsetjmp"))
530 || (tname[0] == 'v' && tname[1] == 'f'
531 && ! strcmp (tname, "vfork")))
532 flags |= ECF_RETURNS_TWICE;
533
534 else if (tname[0] == 'l' && tname[1] == 'o'
535 && ! strcmp (tname, "longjmp"))
536 flags |= ECF_LONGJMP;
537 }
538
539 return flags;
540 }
541
542 /* Return nonzero when tree represent call to longjmp. */
543
544 int
545 setjmp_call_p (tree fndecl)
546 {
547 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
548 }
549
550 /* Return true when exp contains alloca call. */
551 bool
552 alloca_call_p (tree exp)
553 {
554 if (TREE_CODE (exp) == CALL_EXPR
555 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
556 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
557 == FUNCTION_DECL)
558 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
559 0) & ECF_MAY_BE_ALLOCA))
560 return true;
561 return false;
562 }
563
564 /* Detect flags (function attributes) from the function decl or type node. */
565
566 int
567 flags_from_decl_or_type (tree exp)
568 {
569 int flags = 0;
570 tree type = exp;
571
572 if (DECL_P (exp))
573 {
574 struct cgraph_rtl_info *i = cgraph_rtl_info (exp);
575 type = TREE_TYPE (exp);
576
577 if (i)
578 {
579 if (i->pure_function)
580 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
581 if (i->const_function)
582 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
583 }
584
585 /* The function exp may have the `malloc' attribute. */
586 if (DECL_IS_MALLOC (exp))
587 flags |= ECF_MALLOC;
588
589 /* The function exp may have the `pure' attribute. */
590 if (DECL_IS_PURE (exp))
591 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
592
593 if (TREE_NOTHROW (exp))
594 flags |= ECF_NOTHROW;
595
596 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
597 flags |= ECF_LIBCALL_BLOCK | ECF_CONST;
598
599 flags = special_function_p (exp, flags);
600 }
601 else if (TYPE_P (exp) && TYPE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
602 flags |= ECF_CONST;
603
604 if (TREE_THIS_VOLATILE (exp))
605 flags |= ECF_NORETURN;
606
607 /* Mark if the function returns with the stack pointer depressed. We
608 cannot consider it pure or constant in that case. */
609 if (TREE_CODE (type) == FUNCTION_TYPE && TYPE_RETURNS_STACK_DEPRESSED (type))
610 {
611 flags |= ECF_SP_DEPRESSED;
612 flags &= ~(ECF_PURE | ECF_CONST | ECF_LIBCALL_BLOCK);
613 }
614
615 return flags;
616 }
617
618 /* Detect flags from a CALL_EXPR. */
619
620 int
621 call_expr_flags (tree t)
622 {
623 int flags;
624 tree decl = get_callee_fndecl (t);
625
626 if (decl)
627 flags = flags_from_decl_or_type (decl);
628 else
629 {
630 t = TREE_TYPE (TREE_OPERAND (t, 0));
631 if (t && TREE_CODE (t) == POINTER_TYPE)
632 flags = flags_from_decl_or_type (TREE_TYPE (t));
633 else
634 flags = 0;
635 }
636
637 return flags;
638 }
639
640 /* Precompute all register parameters as described by ARGS, storing values
641 into fields within the ARGS array.
642
643 NUM_ACTUALS indicates the total number elements in the ARGS array.
644
645 Set REG_PARM_SEEN if we encounter a register parameter. */
646
647 static void
648 precompute_register_parameters (int num_actuals, struct arg_data *args, int *reg_parm_seen)
649 {
650 int i;
651
652 *reg_parm_seen = 0;
653
654 for (i = 0; i < num_actuals; i++)
655 if (args[i].reg != 0 && ! args[i].pass_on_stack)
656 {
657 *reg_parm_seen = 1;
658
659 if (args[i].value == 0)
660 {
661 push_temp_slots ();
662 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
663 VOIDmode, 0);
664 preserve_temp_slots (args[i].value);
665 pop_temp_slots ();
666
667 /* ANSI doesn't require a sequence point here,
668 but PCC has one, so this will avoid some problems. */
669 emit_queue ();
670 }
671
672 /* If the value is a non-legitimate constant, force it into a
673 pseudo now. TLS symbols sometimes need a call to resolve. */
674 if (CONSTANT_P (args[i].value)
675 && !LEGITIMATE_CONSTANT_P (args[i].value))
676 args[i].value = force_reg (args[i].mode, args[i].value);
677
678 /* If we are to promote the function arg to a wider mode,
679 do it now. */
680
681 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
682 args[i].value
683 = convert_modes (args[i].mode,
684 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
685 args[i].value, args[i].unsignedp);
686
687 /* If the value is expensive, and we are inside an appropriately
688 short loop, put the value into a pseudo and then put the pseudo
689 into the hard reg.
690
691 For small register classes, also do this if this call uses
692 register parameters. This is to avoid reload conflicts while
693 loading the parameters registers. */
694
695 if ((! (REG_P (args[i].value)
696 || (GET_CODE (args[i].value) == SUBREG
697 && REG_P (SUBREG_REG (args[i].value)))))
698 && args[i].mode != BLKmode
699 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
700 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
701 || preserve_subexpressions_p ()))
702 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
703 }
704 }
705
706 #ifdef REG_PARM_STACK_SPACE
707
708 /* The argument list is the property of the called routine and it
709 may clobber it. If the fixed area has been used for previous
710 parameters, we must save and restore it. */
711
712 static rtx
713 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
714 {
715 int low;
716 int high;
717
718 /* Compute the boundary of the area that needs to be saved, if any. */
719 high = reg_parm_stack_space;
720 #ifdef ARGS_GROW_DOWNWARD
721 high += 1;
722 #endif
723 if (high > highest_outgoing_arg_in_use)
724 high = highest_outgoing_arg_in_use;
725
726 for (low = 0; low < high; low++)
727 if (stack_usage_map[low] != 0)
728 {
729 int num_to_save;
730 enum machine_mode save_mode;
731 int delta;
732 rtx stack_area;
733 rtx save_area;
734
735 while (stack_usage_map[--high] == 0)
736 ;
737
738 *low_to_save = low;
739 *high_to_save = high;
740
741 num_to_save = high - low + 1;
742 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
743
744 /* If we don't have the required alignment, must do this
745 in BLKmode. */
746 if ((low & (MIN (GET_MODE_SIZE (save_mode),
747 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
748 save_mode = BLKmode;
749
750 #ifdef ARGS_GROW_DOWNWARD
751 delta = -high;
752 #else
753 delta = low;
754 #endif
755 stack_area = gen_rtx_MEM (save_mode,
756 memory_address (save_mode,
757 plus_constant (argblock,
758 delta)));
759
760 set_mem_align (stack_area, PARM_BOUNDARY);
761 if (save_mode == BLKmode)
762 {
763 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
764 emit_block_move (validize_mem (save_area), stack_area,
765 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
766 }
767 else
768 {
769 save_area = gen_reg_rtx (save_mode);
770 emit_move_insn (save_area, stack_area);
771 }
772
773 return save_area;
774 }
775
776 return NULL_RTX;
777 }
778
779 static void
780 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
781 {
782 enum machine_mode save_mode = GET_MODE (save_area);
783 int delta;
784 rtx stack_area;
785
786 #ifdef ARGS_GROW_DOWNWARD
787 delta = -high_to_save;
788 #else
789 delta = low_to_save;
790 #endif
791 stack_area = gen_rtx_MEM (save_mode,
792 memory_address (save_mode,
793 plus_constant (argblock, delta)));
794 set_mem_align (stack_area, PARM_BOUNDARY);
795
796 if (save_mode != BLKmode)
797 emit_move_insn (stack_area, save_area);
798 else
799 emit_block_move (stack_area, validize_mem (save_area),
800 GEN_INT (high_to_save - low_to_save + 1),
801 BLOCK_OP_CALL_PARM);
802 }
803 #endif /* REG_PARM_STACK_SPACE */
804
805 /* If any elements in ARGS refer to parameters that are to be passed in
806 registers, but not in memory, and whose alignment does not permit a
807 direct copy into registers. Copy the values into a group of pseudos
808 which we will later copy into the appropriate hard registers.
809
810 Pseudos for each unaligned argument will be stored into the array
811 args[argnum].aligned_regs. The caller is responsible for deallocating
812 the aligned_regs array if it is nonzero. */
813
814 static void
815 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
816 {
817 int i, j;
818
819 for (i = 0; i < num_actuals; i++)
820 if (args[i].reg != 0 && ! args[i].pass_on_stack
821 && args[i].mode == BLKmode
822 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
823 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
824 {
825 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
826 int nregs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
827 int endian_correction = 0;
828
829 args[i].n_aligned_regs = args[i].partial ? args[i].partial : nregs;
830 args[i].aligned_regs = xmalloc (sizeof (rtx) * args[i].n_aligned_regs);
831
832 /* Structures smaller than a word are normally aligned to the
833 least significant byte. On a BYTES_BIG_ENDIAN machine,
834 this means we must skip the empty high order bytes when
835 calculating the bit offset. */
836 if (bytes < UNITS_PER_WORD
837 #ifdef BLOCK_REG_PADDING
838 && (BLOCK_REG_PADDING (args[i].mode,
839 TREE_TYPE (args[i].tree_value), 1)
840 == downward)
841 #else
842 && BYTES_BIG_ENDIAN
843 #endif
844 )
845 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
846
847 for (j = 0; j < args[i].n_aligned_regs; j++)
848 {
849 rtx reg = gen_reg_rtx (word_mode);
850 rtx word = operand_subword_force (args[i].value, j, BLKmode);
851 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
852
853 args[i].aligned_regs[j] = reg;
854 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
855 word_mode, word_mode);
856
857 /* There is no need to restrict this code to loading items
858 in TYPE_ALIGN sized hunks. The bitfield instructions can
859 load up entire word sized registers efficiently.
860
861 ??? This may not be needed anymore.
862 We use to emit a clobber here but that doesn't let later
863 passes optimize the instructions we emit. By storing 0 into
864 the register later passes know the first AND to zero out the
865 bitfield being set in the register is unnecessary. The store
866 of 0 will be deleted as will at least the first AND. */
867
868 emit_move_insn (reg, const0_rtx);
869
870 bytes -= bitsize / BITS_PER_UNIT;
871 store_bit_field (reg, bitsize, endian_correction, word_mode,
872 word);
873 }
874 }
875 }
876
877 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
878 ACTPARMS.
879
880 NUM_ACTUALS is the total number of parameters.
881
882 N_NAMED_ARGS is the total number of named arguments.
883
884 FNDECL is the tree code for the target of this call (if known)
885
886 ARGS_SO_FAR holds state needed by the target to know where to place
887 the next argument.
888
889 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
890 for arguments which are passed in registers.
891
892 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
893 and may be modified by this routine.
894
895 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
896 flags which may may be modified by this routine.
897
898 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
899 that requires allocation of stack space.
900
901 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
902 the thunked-to function. */
903
904 static void
905 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
906 struct arg_data *args,
907 struct args_size *args_size,
908 int n_named_args ATTRIBUTE_UNUSED,
909 tree actparms, tree fndecl,
910 CUMULATIVE_ARGS *args_so_far,
911 int reg_parm_stack_space,
912 rtx *old_stack_level, int *old_pending_adj,
913 int *must_preallocate, int *ecf_flags,
914 bool *may_tailcall, bool call_from_thunk_p)
915 {
916 /* 1 if scanning parms front to back, -1 if scanning back to front. */
917 int inc;
918
919 /* Count arg position in order args appear. */
920 int argpos;
921
922 int i;
923 tree p;
924
925 args_size->constant = 0;
926 args_size->var = 0;
927
928 /* In this loop, we consider args in the order they are written.
929 We fill up ARGS from the front or from the back if necessary
930 so that in any case the first arg to be pushed ends up at the front. */
931
932 if (PUSH_ARGS_REVERSED)
933 {
934 i = num_actuals - 1, inc = -1;
935 /* In this case, must reverse order of args
936 so that we compute and push the last arg first. */
937 }
938 else
939 {
940 i = 0, inc = 1;
941 }
942
943 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
944 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
945 {
946 tree type = TREE_TYPE (TREE_VALUE (p));
947 int unsignedp;
948 enum machine_mode mode;
949
950 args[i].tree_value = TREE_VALUE (p);
951
952 /* Replace erroneous argument with constant zero. */
953 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
954 args[i].tree_value = integer_zero_node, type = integer_type_node;
955
956 /* If TYPE is a transparent union, pass things the way we would
957 pass the first field of the union. We have already verified that
958 the modes are the same. */
959 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
960 type = TREE_TYPE (TYPE_FIELDS (type));
961
962 /* Decide where to pass this arg.
963
964 args[i].reg is nonzero if all or part is passed in registers.
965
966 args[i].partial is nonzero if part but not all is passed in registers,
967 and the exact value says how many words are passed in registers.
968
969 args[i].pass_on_stack is nonzero if the argument must at least be
970 computed on the stack. It may then be loaded back into registers
971 if args[i].reg is nonzero.
972
973 These decisions are driven by the FUNCTION_... macros and must agree
974 with those made by function.c. */
975
976 /* See if this argument should be passed by invisible reference. */
977 if (pass_by_reference (args_so_far, TYPE_MODE (type),
978 type, argpos < n_named_args))
979 {
980 /* If we're compiling a thunk, pass through invisible
981 references instead of making a copy. */
982 if (call_from_thunk_p
983 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
984 type, argpos < n_named_args)
985 /* If it's in a register, we must make a copy of it too. */
986 /* ??? Is this a sufficient test? Is there a better one? */
987 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
988 && REG_P (DECL_RTL (args[i].tree_value)))
989 && ! TREE_ADDRESSABLE (type))
990 )
991 {
992 /* C++ uses a TARGET_EXPR to indicate that we want to make a
993 new object from the argument. If we are passing by
994 invisible reference, the callee will do that for us, so we
995 can strip off the TARGET_EXPR. This is not always safe,
996 but it is safe in the only case where this is a useful
997 optimization; namely, when the argument is a plain object.
998 In that case, the frontend is just asking the backend to
999 make a bitwise copy of the argument. */
1000
1001 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
1002 && (DECL_P (TREE_OPERAND (args[i].tree_value, 1)))
1003 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1004 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1005
1006 /* We can't use sibcalls if a callee-copied argument is stored
1007 in the current function's frame. */
1008 if (!call_from_thunk_p
1009 && (!DECL_P (args[i].tree_value)
1010 || !TREE_STATIC (args[i].tree_value)))
1011 *may_tailcall = false;
1012
1013 args[i].tree_value = build1 (ADDR_EXPR,
1014 build_pointer_type (type),
1015 args[i].tree_value);
1016 type = build_pointer_type (type);
1017 }
1018 else if (TREE_CODE (args[i].tree_value) == TARGET_EXPR)
1019 {
1020 /* In the V3 C++ ABI, parameters are destroyed in the caller.
1021 We implement this by passing the address of the temporary
1022 rather than expanding it into another allocated slot. */
1023 args[i].tree_value = build1 (ADDR_EXPR,
1024 build_pointer_type (type),
1025 args[i].tree_value);
1026 type = build_pointer_type (type);
1027 *may_tailcall = false;
1028 }
1029 else
1030 {
1031 /* We make a copy of the object and pass the address to the
1032 function being called. */
1033 rtx copy;
1034
1035 if (!COMPLETE_TYPE_P (type)
1036 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1037 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1038 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1039 STACK_CHECK_MAX_VAR_SIZE))))
1040 {
1041 /* This is a variable-sized object. Make space on the stack
1042 for it. */
1043 rtx size_rtx = expr_size (TREE_VALUE (p));
1044
1045 if (*old_stack_level == 0)
1046 {
1047 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1048 *old_pending_adj = pending_stack_adjust;
1049 pending_stack_adjust = 0;
1050 }
1051
1052 copy = gen_rtx_MEM (BLKmode,
1053 allocate_dynamic_stack_space
1054 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1055 set_mem_attributes (copy, type, 1);
1056 }
1057 else
1058 copy = assign_temp (type, 0, 1, 0);
1059
1060 store_expr (args[i].tree_value, copy, 0);
1061 *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1062
1063 args[i].tree_value = build1 (ADDR_EXPR,
1064 build_pointer_type (type),
1065 make_tree (type, copy));
1066 type = build_pointer_type (type);
1067 *may_tailcall = false;
1068 }
1069 }
1070
1071 mode = TYPE_MODE (type);
1072 unsignedp = TYPE_UNSIGNED (type);
1073
1074 if (targetm.calls.promote_function_args (fndecl ? TREE_TYPE (fndecl) : 0))
1075 mode = promote_mode (type, mode, &unsignedp, 1);
1076
1077 args[i].unsignedp = unsignedp;
1078 args[i].mode = mode;
1079
1080 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1081 argpos < n_named_args);
1082 #ifdef FUNCTION_INCOMING_ARG
1083 /* If this is a sibling call and the machine has register windows, the
1084 register window has to be unwinded before calling the routine, so
1085 arguments have to go into the incoming registers. */
1086 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1087 argpos < n_named_args);
1088 #else
1089 args[i].tail_call_reg = args[i].reg;
1090 #endif
1091
1092 if (args[i].reg)
1093 args[i].partial
1094 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1095 argpos < n_named_args);
1096
1097 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
1098
1099 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1100 it means that we are to pass this arg in the register(s) designated
1101 by the PARALLEL, but also to pass it in the stack. */
1102 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1103 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1104 args[i].pass_on_stack = 1;
1105
1106 /* If this is an addressable type, we must preallocate the stack
1107 since we must evaluate the object into its final location.
1108
1109 If this is to be passed in both registers and the stack, it is simpler
1110 to preallocate. */
1111 if (TREE_ADDRESSABLE (type)
1112 || (args[i].pass_on_stack && args[i].reg != 0))
1113 *must_preallocate = 1;
1114
1115 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1116 we cannot consider this function call constant. */
1117 if (TREE_ADDRESSABLE (type))
1118 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
1119
1120 /* Compute the stack-size of this argument. */
1121 if (args[i].reg == 0 || args[i].partial != 0
1122 || reg_parm_stack_space > 0
1123 || args[i].pass_on_stack)
1124 locate_and_pad_parm (mode, type,
1125 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1126 1,
1127 #else
1128 args[i].reg != 0,
1129 #endif
1130 args[i].pass_on_stack ? 0 : args[i].partial,
1131 fndecl, args_size, &args[i].locate);
1132 #ifdef BLOCK_REG_PADDING
1133 else
1134 /* The argument is passed entirely in registers. See at which
1135 end it should be padded. */
1136 args[i].locate.where_pad =
1137 BLOCK_REG_PADDING (mode, type,
1138 int_size_in_bytes (type) <= UNITS_PER_WORD);
1139 #endif
1140
1141 /* Update ARGS_SIZE, the total stack space for args so far. */
1142
1143 args_size->constant += args[i].locate.size.constant;
1144 if (args[i].locate.size.var)
1145 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1146
1147 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1148 have been used, etc. */
1149
1150 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1151 argpos < n_named_args);
1152 }
1153 }
1154
1155 /* Update ARGS_SIZE to contain the total size for the argument block.
1156 Return the original constant component of the argument block's size.
1157
1158 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1159 for arguments passed in registers. */
1160
1161 static int
1162 compute_argument_block_size (int reg_parm_stack_space,
1163 struct args_size *args_size,
1164 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1165 {
1166 int unadjusted_args_size = args_size->constant;
1167
1168 /* For accumulate outgoing args mode we don't need to align, since the frame
1169 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1170 backends from generating misaligned frame sizes. */
1171 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1172 preferred_stack_boundary = STACK_BOUNDARY;
1173
1174 /* Compute the actual size of the argument block required. The variable
1175 and constant sizes must be combined, the size may have to be rounded,
1176 and there may be a minimum required size. */
1177
1178 if (args_size->var)
1179 {
1180 args_size->var = ARGS_SIZE_TREE (*args_size);
1181 args_size->constant = 0;
1182
1183 preferred_stack_boundary /= BITS_PER_UNIT;
1184 if (preferred_stack_boundary > 1)
1185 {
1186 /* We don't handle this case yet. To handle it correctly we have
1187 to add the delta, round and subtract the delta.
1188 Currently no machine description requires this support. */
1189 if (stack_pointer_delta & (preferred_stack_boundary - 1))
1190 abort ();
1191 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1192 }
1193
1194 if (reg_parm_stack_space > 0)
1195 {
1196 args_size->var
1197 = size_binop (MAX_EXPR, args_size->var,
1198 ssize_int (reg_parm_stack_space));
1199
1200 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1201 /* The area corresponding to register parameters is not to count in
1202 the size of the block we need. So make the adjustment. */
1203 args_size->var
1204 = size_binop (MINUS_EXPR, args_size->var,
1205 ssize_int (reg_parm_stack_space));
1206 #endif
1207 }
1208 }
1209 else
1210 {
1211 preferred_stack_boundary /= BITS_PER_UNIT;
1212 if (preferred_stack_boundary < 1)
1213 preferred_stack_boundary = 1;
1214 args_size->constant = (((args_size->constant
1215 + stack_pointer_delta
1216 + preferred_stack_boundary - 1)
1217 / preferred_stack_boundary
1218 * preferred_stack_boundary)
1219 - stack_pointer_delta);
1220
1221 args_size->constant = MAX (args_size->constant,
1222 reg_parm_stack_space);
1223
1224 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1225 args_size->constant -= reg_parm_stack_space;
1226 #endif
1227 }
1228 return unadjusted_args_size;
1229 }
1230
1231 /* Precompute parameters as needed for a function call.
1232
1233 FLAGS is mask of ECF_* constants.
1234
1235 NUM_ACTUALS is the number of arguments.
1236
1237 ARGS is an array containing information for each argument; this
1238 routine fills in the INITIAL_VALUE and VALUE fields for each
1239 precomputed argument. */
1240
1241 static void
1242 precompute_arguments (int flags, int num_actuals, struct arg_data *args)
1243 {
1244 int i;
1245
1246 /* If this is a libcall, then precompute all arguments so that we do not
1247 get extraneous instructions emitted as part of the libcall sequence. */
1248 if ((flags & ECF_LIBCALL_BLOCK) == 0)
1249 return;
1250
1251 for (i = 0; i < num_actuals; i++)
1252 {
1253 enum machine_mode mode;
1254
1255 /* If this is an addressable type, we cannot pre-evaluate it. */
1256 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1257 abort ();
1258
1259 args[i].value
1260 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1261
1262 /* ANSI doesn't require a sequence point here,
1263 but PCC has one, so this will avoid some problems. */
1264 emit_queue ();
1265
1266 args[i].initial_value = args[i].value
1267 = protect_from_queue (args[i].value, 0);
1268
1269 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1270 if (mode != args[i].mode)
1271 {
1272 args[i].value
1273 = convert_modes (args[i].mode, mode,
1274 args[i].value, args[i].unsignedp);
1275 #if defined(PROMOTE_FUNCTION_MODE) && !defined(PROMOTE_MODE)
1276 /* CSE will replace this only if it contains args[i].value
1277 pseudo, so convert it down to the declared mode using
1278 a SUBREG. */
1279 if (REG_P (args[i].value)
1280 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1281 {
1282 args[i].initial_value
1283 = gen_lowpart_SUBREG (mode, args[i].value);
1284 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1285 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1286 args[i].unsignedp);
1287 }
1288 #endif
1289 }
1290 }
1291 }
1292
1293 /* Given the current state of MUST_PREALLOCATE and information about
1294 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1295 compute and return the final value for MUST_PREALLOCATE. */
1296
1297 static int
1298 finalize_must_preallocate (int must_preallocate, int num_actuals, struct arg_data *args, struct args_size *args_size)
1299 {
1300 /* See if we have or want to preallocate stack space.
1301
1302 If we would have to push a partially-in-regs parm
1303 before other stack parms, preallocate stack space instead.
1304
1305 If the size of some parm is not a multiple of the required stack
1306 alignment, we must preallocate.
1307
1308 If the total size of arguments that would otherwise create a copy in
1309 a temporary (such as a CALL) is more than half the total argument list
1310 size, preallocation is faster.
1311
1312 Another reason to preallocate is if we have a machine (like the m88k)
1313 where stack alignment is required to be maintained between every
1314 pair of insns, not just when the call is made. However, we assume here
1315 that such machines either do not have push insns (and hence preallocation
1316 would occur anyway) or the problem is taken care of with
1317 PUSH_ROUNDING. */
1318
1319 if (! must_preallocate)
1320 {
1321 int partial_seen = 0;
1322 int copy_to_evaluate_size = 0;
1323 int i;
1324
1325 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1326 {
1327 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1328 partial_seen = 1;
1329 else if (partial_seen && args[i].reg == 0)
1330 must_preallocate = 1;
1331
1332 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1333 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1334 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1335 || TREE_CODE (args[i].tree_value) == COND_EXPR
1336 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1337 copy_to_evaluate_size
1338 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1339 }
1340
1341 if (copy_to_evaluate_size * 2 >= args_size->constant
1342 && args_size->constant > 0)
1343 must_preallocate = 1;
1344 }
1345 return must_preallocate;
1346 }
1347
1348 /* If we preallocated stack space, compute the address of each argument
1349 and store it into the ARGS array.
1350
1351 We need not ensure it is a valid memory address here; it will be
1352 validized when it is used.
1353
1354 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1355
1356 static void
1357 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1358 {
1359 if (argblock)
1360 {
1361 rtx arg_reg = argblock;
1362 int i, arg_offset = 0;
1363
1364 if (GET_CODE (argblock) == PLUS)
1365 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1366
1367 for (i = 0; i < num_actuals; i++)
1368 {
1369 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1370 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1371 rtx addr;
1372
1373 /* Skip this parm if it will not be passed on the stack. */
1374 if (! args[i].pass_on_stack && args[i].reg != 0)
1375 continue;
1376
1377 if (GET_CODE (offset) == CONST_INT)
1378 addr = plus_constant (arg_reg, INTVAL (offset));
1379 else
1380 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1381
1382 addr = plus_constant (addr, arg_offset);
1383 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1384 set_mem_align (args[i].stack, PARM_BOUNDARY);
1385 set_mem_attributes (args[i].stack,
1386 TREE_TYPE (args[i].tree_value), 1);
1387
1388 if (GET_CODE (slot_offset) == CONST_INT)
1389 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1390 else
1391 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1392
1393 addr = plus_constant (addr, arg_offset);
1394 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1395 set_mem_align (args[i].stack_slot, PARM_BOUNDARY);
1396 set_mem_attributes (args[i].stack_slot,
1397 TREE_TYPE (args[i].tree_value), 1);
1398
1399 /* Function incoming arguments may overlap with sibling call
1400 outgoing arguments and we cannot allow reordering of reads
1401 from function arguments with stores to outgoing arguments
1402 of sibling calls. */
1403 set_mem_alias_set (args[i].stack, 0);
1404 set_mem_alias_set (args[i].stack_slot, 0);
1405 }
1406 }
1407 }
1408
1409 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1410 in a call instruction.
1411
1412 FNDECL is the tree node for the target function. For an indirect call
1413 FNDECL will be NULL_TREE.
1414
1415 ADDR is the operand 0 of CALL_EXPR for this call. */
1416
1417 static rtx
1418 rtx_for_function_call (tree fndecl, tree addr)
1419 {
1420 rtx funexp;
1421
1422 /* Get the function to call, in the form of RTL. */
1423 if (fndecl)
1424 {
1425 /* If this is the first use of the function, see if we need to
1426 make an external definition for it. */
1427 if (! TREE_USED (fndecl))
1428 {
1429 assemble_external (fndecl);
1430 TREE_USED (fndecl) = 1;
1431 }
1432
1433 /* Get a SYMBOL_REF rtx for the function address. */
1434 funexp = XEXP (DECL_RTL (fndecl), 0);
1435 }
1436 else
1437 /* Generate an rtx (probably a pseudo-register) for the address. */
1438 {
1439 push_temp_slots ();
1440 funexp = expand_expr (addr, NULL_RTX, VOIDmode, 0);
1441 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1442 emit_queue ();
1443 }
1444 return funexp;
1445 }
1446
1447 /* Do the register loads required for any wholly-register parms or any
1448 parms which are passed both on the stack and in a register. Their
1449 expressions were already evaluated.
1450
1451 Mark all register-parms as living through the call, putting these USE
1452 insns in the CALL_INSN_FUNCTION_USAGE field.
1453
1454 When IS_SIBCALL, perform the check_sibcall_overlap_argument_overlap
1455 checking, setting *SIBCALL_FAILURE if appropriate. */
1456
1457 static void
1458 load_register_parameters (struct arg_data *args, int num_actuals,
1459 rtx *call_fusage, int flags, int is_sibcall,
1460 int *sibcall_failure)
1461 {
1462 int i, j;
1463
1464 for (i = 0; i < num_actuals; i++)
1465 {
1466 rtx reg = ((flags & ECF_SIBCALL)
1467 ? args[i].tail_call_reg : args[i].reg);
1468 if (reg)
1469 {
1470 int partial = args[i].partial;
1471 int nregs;
1472 int size = 0;
1473 rtx before_arg = get_last_insn ();
1474 /* Set to non-negative if must move a word at a time, even if just
1475 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1476 we just use a normal move insn. This value can be zero if the
1477 argument is a zero size structure with no fields. */
1478 nregs = -1;
1479 if (partial)
1480 nregs = partial;
1481 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1482 {
1483 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1484 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1485 }
1486 else
1487 size = GET_MODE_SIZE (args[i].mode);
1488
1489 /* Handle calls that pass values in multiple non-contiguous
1490 locations. The Irix 6 ABI has examples of this. */
1491
1492 if (GET_CODE (reg) == PARALLEL)
1493 {
1494 tree type = TREE_TYPE (args[i].tree_value);
1495 emit_group_load (reg, args[i].value, type,
1496 int_size_in_bytes (type));
1497 }
1498
1499 /* If simple case, just do move. If normal partial, store_one_arg
1500 has already loaded the register for us. In all other cases,
1501 load the register(s) from memory. */
1502
1503 else if (nregs == -1)
1504 {
1505 emit_move_insn (reg, args[i].value);
1506 #ifdef BLOCK_REG_PADDING
1507 /* Handle case where we have a value that needs shifting
1508 up to the msb. eg. a QImode value and we're padding
1509 upward on a BYTES_BIG_ENDIAN machine. */
1510 if (size < UNITS_PER_WORD
1511 && (args[i].locate.where_pad
1512 == (BYTES_BIG_ENDIAN ? upward : downward)))
1513 {
1514 rtx x;
1515 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1516
1517 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1518 report the whole reg as used. Strictly speaking, the
1519 call only uses SIZE bytes at the msb end, but it doesn't
1520 seem worth generating rtl to say that. */
1521 reg = gen_rtx_REG (word_mode, REGNO (reg));
1522 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
1523 build_int_2 (shift, 0), reg, 1);
1524 if (x != reg)
1525 emit_move_insn (reg, x);
1526 }
1527 #endif
1528 }
1529
1530 /* If we have pre-computed the values to put in the registers in
1531 the case of non-aligned structures, copy them in now. */
1532
1533 else if (args[i].n_aligned_regs != 0)
1534 for (j = 0; j < args[i].n_aligned_regs; j++)
1535 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1536 args[i].aligned_regs[j]);
1537
1538 else if (partial == 0 || args[i].pass_on_stack)
1539 {
1540 rtx mem = validize_mem (args[i].value);
1541
1542 /* Handle a BLKmode that needs shifting. */
1543 if (nregs == 1 && size < UNITS_PER_WORD
1544 #ifdef BLOCK_REG_PADDING
1545 && args[i].locate.where_pad == downward
1546 #else
1547 && BYTES_BIG_ENDIAN
1548 #endif
1549 )
1550 {
1551 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1552 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1553 rtx x = gen_reg_rtx (word_mode);
1554 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1555 enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
1556 : LSHIFT_EXPR;
1557
1558 emit_move_insn (x, tem);
1559 x = expand_shift (dir, word_mode, x,
1560 build_int_2 (shift, 0), ri, 1);
1561 if (x != ri)
1562 emit_move_insn (ri, x);
1563 }
1564 else
1565 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1566 }
1567
1568 /* When a parameter is a block, and perhaps in other cases, it is
1569 possible that it did a load from an argument slot that was
1570 already clobbered. */
1571 if (is_sibcall
1572 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1573 *sibcall_failure = 1;
1574
1575 /* Handle calls that pass values in multiple non-contiguous
1576 locations. The Irix 6 ABI has examples of this. */
1577 if (GET_CODE (reg) == PARALLEL)
1578 use_group_regs (call_fusage, reg);
1579 else if (nregs == -1)
1580 use_reg (call_fusage, reg);
1581 else
1582 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1583 }
1584 }
1585 }
1586
1587 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1588 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1589 bytes, then we would need to push some additional bytes to pad the
1590 arguments. So, we compute an adjust to the stack pointer for an
1591 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1592 bytes. Then, when the arguments are pushed the stack will be perfectly
1593 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1594 be popped after the call. Returns the adjustment. */
1595
1596 static int
1597 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1598 struct args_size *args_size,
1599 int preferred_unit_stack_boundary)
1600 {
1601 /* The number of bytes to pop so that the stack will be
1602 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1603 HOST_WIDE_INT adjustment;
1604 /* The alignment of the stack after the arguments are pushed, if we
1605 just pushed the arguments without adjust the stack here. */
1606 HOST_WIDE_INT unadjusted_alignment;
1607
1608 unadjusted_alignment
1609 = ((stack_pointer_delta + unadjusted_args_size)
1610 % preferred_unit_stack_boundary);
1611
1612 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1613 as possible -- leaving just enough left to cancel out the
1614 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1615 PENDING_STACK_ADJUST is non-negative, and congruent to
1616 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1617
1618 /* Begin by trying to pop all the bytes. */
1619 unadjusted_alignment
1620 = (unadjusted_alignment
1621 - (pending_stack_adjust % preferred_unit_stack_boundary));
1622 adjustment = pending_stack_adjust;
1623 /* Push enough additional bytes that the stack will be aligned
1624 after the arguments are pushed. */
1625 if (preferred_unit_stack_boundary > 1)
1626 {
1627 if (unadjusted_alignment > 0)
1628 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1629 else
1630 adjustment += unadjusted_alignment;
1631 }
1632
1633 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1634 bytes after the call. The right number is the entire
1635 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1636 by the arguments in the first place. */
1637 args_size->constant
1638 = pending_stack_adjust - adjustment + unadjusted_args_size;
1639
1640 return adjustment;
1641 }
1642
1643 /* Scan X expression if it does not dereference any argument slots
1644 we already clobbered by tail call arguments (as noted in stored_args_map
1645 bitmap).
1646 Return nonzero if X expression dereferences such argument slots,
1647 zero otherwise. */
1648
1649 static int
1650 check_sibcall_argument_overlap_1 (rtx x)
1651 {
1652 RTX_CODE code;
1653 int i, j;
1654 unsigned int k;
1655 const char *fmt;
1656
1657 if (x == NULL_RTX)
1658 return 0;
1659
1660 code = GET_CODE (x);
1661
1662 if (code == MEM)
1663 {
1664 if (XEXP (x, 0) == current_function_internal_arg_pointer)
1665 i = 0;
1666 else if (GET_CODE (XEXP (x, 0)) == PLUS
1667 && XEXP (XEXP (x, 0), 0) ==
1668 current_function_internal_arg_pointer
1669 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
1670 i = INTVAL (XEXP (XEXP (x, 0), 1));
1671 else
1672 return 1;
1673
1674 #ifdef ARGS_GROW_DOWNWARD
1675 i = -i - GET_MODE_SIZE (GET_MODE (x));
1676 #endif
1677
1678 for (k = 0; k < GET_MODE_SIZE (GET_MODE (x)); k++)
1679 if (i + k < stored_args_map->n_bits
1680 && TEST_BIT (stored_args_map, i + k))
1681 return 1;
1682
1683 return 0;
1684 }
1685
1686 /* Scan all subexpressions. */
1687 fmt = GET_RTX_FORMAT (code);
1688 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1689 {
1690 if (*fmt == 'e')
1691 {
1692 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1693 return 1;
1694 }
1695 else if (*fmt == 'E')
1696 {
1697 for (j = 0; j < XVECLEN (x, i); j++)
1698 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1699 return 1;
1700 }
1701 }
1702 return 0;
1703 }
1704
1705 /* Scan sequence after INSN if it does not dereference any argument slots
1706 we already clobbered by tail call arguments (as noted in stored_args_map
1707 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1708 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1709 should be 0). Return nonzero if sequence after INSN dereferences such argument
1710 slots, zero otherwise. */
1711
1712 static int
1713 check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
1714 {
1715 int low, high;
1716
1717 if (insn == NULL_RTX)
1718 insn = get_insns ();
1719 else
1720 insn = NEXT_INSN (insn);
1721
1722 for (; insn; insn = NEXT_INSN (insn))
1723 if (INSN_P (insn)
1724 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
1725 break;
1726
1727 if (mark_stored_args_map)
1728 {
1729 #ifdef ARGS_GROW_DOWNWARD
1730 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
1731 #else
1732 low = arg->locate.slot_offset.constant;
1733 #endif
1734
1735 for (high = low + arg->locate.size.constant; low < high; low++)
1736 SET_BIT (stored_args_map, low);
1737 }
1738 return insn != NULL_RTX;
1739 }
1740
1741 static tree
1742 fix_unsafe_tree (tree t)
1743 {
1744 switch (unsafe_for_reeval (t))
1745 {
1746 case 0: /* Safe. */
1747 break;
1748
1749 case 1: /* Mildly unsafe. */
1750 t = unsave_expr (t);
1751 break;
1752
1753 case 2: /* Wildly unsafe. */
1754 {
1755 tree var = build_decl (VAR_DECL, NULL_TREE,
1756 TREE_TYPE (t));
1757 SET_DECL_RTL (var,
1758 expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL));
1759 t = var;
1760 }
1761 break;
1762
1763 default:
1764 abort ();
1765 }
1766 return t;
1767 }
1768
1769
1770 /* If function value *VALUE was returned at the most significant end of a
1771 register, shift it towards the least significant end and convert it to
1772 TYPE's mode. Return true and update *VALUE if some action was needed.
1773
1774 TYPE is the type of the function's return value, which is known not
1775 to have mode BLKmode. */
1776
1777 static bool
1778 shift_returned_value (tree type, rtx *value)
1779 {
1780 if (targetm.calls.return_in_msb (type))
1781 {
1782 HOST_WIDE_INT shift;
1783
1784 shift = (GET_MODE_BITSIZE (GET_MODE (*value))
1785 - BITS_PER_UNIT * int_size_in_bytes (type));
1786 if (shift > 0)
1787 {
1788 /* Shift the value into the low part of the register. */
1789 *value = expand_binop (GET_MODE (*value), lshr_optab, *value,
1790 GEN_INT (shift), 0, 1, OPTAB_WIDEN);
1791
1792 /* Truncate it to the type's mode, or its integer equivalent.
1793 This is subject to TRULY_NOOP_TRUNCATION. */
1794 *value = convert_to_mode (int_mode_for_mode (TYPE_MODE (type)),
1795 *value, 0);
1796
1797 /* Now convert it to the final form. */
1798 *value = gen_lowpart (TYPE_MODE (type), *value);
1799 return true;
1800 }
1801 }
1802 return false;
1803 }
1804
1805 /* Remove all REG_EQUIV notes found in the insn chain. */
1806
1807 static void
1808 purge_reg_equiv_notes (void)
1809 {
1810 rtx insn;
1811
1812 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1813 {
1814 while (1)
1815 {
1816 rtx note = find_reg_note (insn, REG_EQUIV, 0);
1817 if (note)
1818 {
1819 /* Remove the note and keep looking at the notes for
1820 this insn. */
1821 remove_note (insn, note);
1822 continue;
1823 }
1824 break;
1825 }
1826 }
1827 }
1828
1829 /* Clear RTX_UNCHANGING_P flag of incoming argument MEMs. */
1830
1831 static void
1832 purge_mem_unchanging_flag (rtx x)
1833 {
1834 RTX_CODE code;
1835 int i, j;
1836 const char *fmt;
1837
1838 if (x == NULL_RTX)
1839 return;
1840
1841 code = GET_CODE (x);
1842
1843 if (code == MEM)
1844 {
1845 if (RTX_UNCHANGING_P (x)
1846 && (XEXP (x, 0) == current_function_internal_arg_pointer
1847 || (GET_CODE (XEXP (x, 0)) == PLUS
1848 && XEXP (XEXP (x, 0), 0) ==
1849 current_function_internal_arg_pointer
1850 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
1851 RTX_UNCHANGING_P (x) = 0;
1852 return;
1853 }
1854
1855 /* Scan all subexpressions. */
1856 fmt = GET_RTX_FORMAT (code);
1857 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1858 {
1859 if (*fmt == 'e')
1860 purge_mem_unchanging_flag (XEXP (x, i));
1861 else if (*fmt == 'E')
1862 for (j = 0; j < XVECLEN (x, i); j++)
1863 purge_mem_unchanging_flag (XVECEXP (x, i, j));
1864 }
1865 }
1866
1867
1868 /* Generate all the code for a function call
1869 and return an rtx for its value.
1870 Store the value in TARGET (specified as an rtx) if convenient.
1871 If the value is stored in TARGET then TARGET is returned.
1872 If IGNORE is nonzero, then we ignore the value of the function call. */
1873
1874 rtx
1875 expand_call (tree exp, rtx target, int ignore)
1876 {
1877 /* Nonzero if we are currently expanding a call. */
1878 static int currently_expanding_call = 0;
1879
1880 /* List of actual parameters. */
1881 tree actparms = TREE_OPERAND (exp, 1);
1882 /* RTX for the function to be called. */
1883 rtx funexp;
1884 /* Sequence of insns to perform a normal "call". */
1885 rtx normal_call_insns = NULL_RTX;
1886 /* Sequence of insns to perform a tail "call". */
1887 rtx tail_call_insns = NULL_RTX;
1888 /* Data type of the function. */
1889 tree funtype;
1890 tree type_arg_types;
1891 /* Declaration of the function being called,
1892 or 0 if the function is computed (not known by name). */
1893 tree fndecl = 0;
1894 /* The type of the function being called. */
1895 tree fntype;
1896 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
1897 int pass;
1898
1899 /* Register in which non-BLKmode value will be returned,
1900 or 0 if no value or if value is BLKmode. */
1901 rtx valreg;
1902 /* Address where we should return a BLKmode value;
1903 0 if value not BLKmode. */
1904 rtx structure_value_addr = 0;
1905 /* Nonzero if that address is being passed by treating it as
1906 an extra, implicit first parameter. Otherwise,
1907 it is passed by being copied directly into struct_value_rtx. */
1908 int structure_value_addr_parm = 0;
1909 /* Size of aggregate value wanted, or zero if none wanted
1910 or if we are using the non-reentrant PCC calling convention
1911 or expecting the value in registers. */
1912 HOST_WIDE_INT struct_value_size = 0;
1913 /* Nonzero if called function returns an aggregate in memory PCC style,
1914 by returning the address of where to find it. */
1915 int pcc_struct_value = 0;
1916 rtx struct_value = 0;
1917
1918 /* Number of actual parameters in this call, including struct value addr. */
1919 int num_actuals;
1920 /* Number of named args. Args after this are anonymous ones
1921 and they must all go on the stack. */
1922 int n_named_args;
1923
1924 /* Vector of information about each argument.
1925 Arguments are numbered in the order they will be pushed,
1926 not the order they are written. */
1927 struct arg_data *args;
1928
1929 /* Total size in bytes of all the stack-parms scanned so far. */
1930 struct args_size args_size;
1931 struct args_size adjusted_args_size;
1932 /* Size of arguments before any adjustments (such as rounding). */
1933 int unadjusted_args_size;
1934 /* Data on reg parms scanned so far. */
1935 CUMULATIVE_ARGS args_so_far;
1936 /* Nonzero if a reg parm has been scanned. */
1937 int reg_parm_seen;
1938 /* Nonzero if this is an indirect function call. */
1939
1940 /* Nonzero if we must avoid push-insns in the args for this call.
1941 If stack space is allocated for register parameters, but not by the
1942 caller, then it is preallocated in the fixed part of the stack frame.
1943 So the entire argument block must then be preallocated (i.e., we
1944 ignore PUSH_ROUNDING in that case). */
1945
1946 int must_preallocate = !PUSH_ARGS;
1947
1948 /* Size of the stack reserved for parameter registers. */
1949 int reg_parm_stack_space = 0;
1950
1951 /* Address of space preallocated for stack parms
1952 (on machines that lack push insns), or 0 if space not preallocated. */
1953 rtx argblock = 0;
1954
1955 /* Mask of ECF_ flags. */
1956 int flags = 0;
1957 #ifdef REG_PARM_STACK_SPACE
1958 /* Define the boundary of the register parm stack space that needs to be
1959 saved, if any. */
1960 int low_to_save, high_to_save;
1961 rtx save_area = 0; /* Place that it is saved */
1962 #endif
1963
1964 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1965 char *initial_stack_usage_map = stack_usage_map;
1966
1967 int old_stack_allocated;
1968
1969 /* State variables to track stack modifications. */
1970 rtx old_stack_level = 0;
1971 int old_stack_arg_under_construction = 0;
1972 int old_pending_adj = 0;
1973 int old_inhibit_defer_pop = inhibit_defer_pop;
1974
1975 /* Some stack pointer alterations we make are performed via
1976 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
1977 which we then also need to save/restore along the way. */
1978 int old_stack_pointer_delta = 0;
1979
1980 rtx call_fusage;
1981 tree p = TREE_OPERAND (exp, 0);
1982 tree addr = TREE_OPERAND (exp, 0);
1983 int i;
1984 /* The alignment of the stack, in bits. */
1985 HOST_WIDE_INT preferred_stack_boundary;
1986 /* The alignment of the stack, in bytes. */
1987 HOST_WIDE_INT preferred_unit_stack_boundary;
1988 /* The static chain value to use for this call. */
1989 rtx static_chain_value;
1990 /* See if this is "nothrow" function call. */
1991 if (TREE_NOTHROW (exp))
1992 flags |= ECF_NOTHROW;
1993
1994 /* See if we can find a DECL-node for the actual function, and get the
1995 function attributes (flags) from the function decl or type node. */
1996 fndecl = get_callee_fndecl (exp);
1997 if (fndecl)
1998 {
1999 fntype = TREE_TYPE (fndecl);
2000 flags |= flags_from_decl_or_type (fndecl);
2001 }
2002 else
2003 {
2004 fntype = TREE_TYPE (TREE_TYPE (p));
2005 flags |= flags_from_decl_or_type (fntype);
2006 }
2007
2008 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
2009
2010 /* Warn if this value is an aggregate type,
2011 regardless of which calling convention we are using for it. */
2012 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
2013 warning ("function call has aggregate value");
2014
2015 /* If the result of a pure or const function call is ignored (or void),
2016 and none of its arguments are volatile, we can avoid expanding the
2017 call and just evaluate the arguments for side-effects. */
2018 if ((flags & (ECF_CONST | ECF_PURE))
2019 && (ignore || target == const0_rtx
2020 || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
2021 {
2022 bool volatilep = false;
2023 tree arg;
2024
2025 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
2026 if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
2027 {
2028 volatilep = true;
2029 break;
2030 }
2031
2032 if (! volatilep)
2033 {
2034 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
2035 expand_expr (TREE_VALUE (arg), const0_rtx,
2036 VOIDmode, EXPAND_NORMAL);
2037 return const0_rtx;
2038 }
2039 }
2040
2041 #ifdef REG_PARM_STACK_SPACE
2042 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2043 #endif
2044
2045 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2046 if (reg_parm_stack_space > 0 && PUSH_ARGS)
2047 must_preallocate = 1;
2048 #endif
2049
2050 /* Set up a place to return a structure. */
2051
2052 /* Cater to broken compilers. */
2053 if (aggregate_value_p (exp, fndecl))
2054 {
2055 /* This call returns a big structure. */
2056 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
2057
2058 #ifdef PCC_STATIC_STRUCT_RETURN
2059 {
2060 pcc_struct_value = 1;
2061 }
2062 #else /* not PCC_STATIC_STRUCT_RETURN */
2063 {
2064 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2065
2066 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (exp))
2067 {
2068 /* The structure value address arg is already in actparms.
2069 Pull it out. It might be nice to just leave it there, but
2070 we need to set structure_value_addr. */
2071 tree return_arg = TREE_VALUE (actparms);
2072 actparms = TREE_CHAIN (actparms);
2073 structure_value_addr = expand_expr (return_arg, NULL_RTX,
2074 VOIDmode, EXPAND_NORMAL);
2075 }
2076 else if (target && MEM_P (target))
2077 structure_value_addr = XEXP (target, 0);
2078 else
2079 {
2080 /* For variable-sized objects, we must be called with a target
2081 specified. If we were to allocate space on the stack here,
2082 we would have no way of knowing when to free it. */
2083 rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
2084
2085 mark_temp_addr_taken (d);
2086 structure_value_addr = XEXP (d, 0);
2087 target = 0;
2088 }
2089 }
2090 #endif /* not PCC_STATIC_STRUCT_RETURN */
2091 }
2092
2093 /* Figure out the amount to which the stack should be aligned. */
2094 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2095 if (fndecl)
2096 {
2097 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2098 if (i && i->preferred_incoming_stack_boundary)
2099 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2100 }
2101
2102 /* Operand 0 is a pointer-to-function; get the type of the function. */
2103 funtype = TREE_TYPE (addr);
2104 if (! POINTER_TYPE_P (funtype))
2105 abort ();
2106 funtype = TREE_TYPE (funtype);
2107
2108 /* Munge the tree to split complex arguments into their imaginary
2109 and real parts. */
2110 if (targetm.calls.split_complex_arg)
2111 {
2112 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2113 actparms = split_complex_values (actparms);
2114 }
2115 else
2116 type_arg_types = TYPE_ARG_TYPES (funtype);
2117
2118 if (flags & ECF_MAY_BE_ALLOCA)
2119 current_function_calls_alloca = 1;
2120
2121 /* If struct_value_rtx is 0, it means pass the address
2122 as if it were an extra parameter. */
2123 if (structure_value_addr && struct_value == 0)
2124 {
2125 /* If structure_value_addr is a REG other than
2126 virtual_outgoing_args_rtx, we can use always use it. If it
2127 is not a REG, we must always copy it into a register.
2128 If it is virtual_outgoing_args_rtx, we must copy it to another
2129 register in some cases. */
2130 rtx temp = (!REG_P (structure_value_addr)
2131 || (ACCUMULATE_OUTGOING_ARGS
2132 && stack_arg_under_construction
2133 && structure_value_addr == virtual_outgoing_args_rtx)
2134 ? copy_addr_to_reg (convert_memory_address
2135 (Pmode, structure_value_addr))
2136 : structure_value_addr);
2137
2138 actparms
2139 = tree_cons (error_mark_node,
2140 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2141 temp),
2142 actparms);
2143 structure_value_addr_parm = 1;
2144 }
2145
2146 /* Count the arguments and set NUM_ACTUALS. */
2147 for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p))
2148 num_actuals++;
2149
2150 /* Compute number of named args.
2151 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2152
2153 if (type_arg_types != 0)
2154 n_named_args
2155 = (list_length (type_arg_types)
2156 /* Count the struct value address, if it is passed as a parm. */
2157 + structure_value_addr_parm);
2158 else
2159 /* If we know nothing, treat all args as named. */
2160 n_named_args = num_actuals;
2161
2162 /* Start updating where the next arg would go.
2163
2164 On some machines (such as the PA) indirect calls have a different
2165 calling convention than normal calls. The fourth argument in
2166 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2167 or not. */
2168 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl, n_named_args);
2169
2170 /* Now possibly adjust the number of named args.
2171 Normally, don't include the last named arg if anonymous args follow.
2172 We do include the last named arg if
2173 targetm.calls.strict_argument_naming() returns nonzero.
2174 (If no anonymous args follow, the result of list_length is actually
2175 one too large. This is harmless.)
2176
2177 If targetm.calls.pretend_outgoing_varargs_named() returns
2178 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2179 this machine will be able to place unnamed args that were passed
2180 in registers into the stack. So treat all args as named. This
2181 allows the insns emitting for a specific argument list to be
2182 independent of the function declaration.
2183
2184 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2185 we do not have any reliable way to pass unnamed args in
2186 registers, so we must force them into memory. */
2187
2188 if (type_arg_types != 0
2189 && targetm.calls.strict_argument_naming (&args_so_far))
2190 ;
2191 else if (type_arg_types != 0
2192 && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
2193 /* Don't include the last named arg. */
2194 --n_named_args;
2195 else
2196 /* Treat all args as named. */
2197 n_named_args = num_actuals;
2198
2199 /* Make a vector to hold all the information about each arg. */
2200 args = alloca (num_actuals * sizeof (struct arg_data));
2201 memset (args, 0, num_actuals * sizeof (struct arg_data));
2202
2203 /* Build up entries in the ARGS array, compute the size of the
2204 arguments into ARGS_SIZE, etc. */
2205 initialize_argument_information (num_actuals, args, &args_size,
2206 n_named_args, actparms, fndecl,
2207 &args_so_far, reg_parm_stack_space,
2208 &old_stack_level, &old_pending_adj,
2209 &must_preallocate, &flags,
2210 &try_tail_call, CALL_FROM_THUNK_P (exp));
2211
2212 if (args_size.var)
2213 {
2214 /* If this function requires a variable-sized argument list, don't
2215 try to make a cse'able block for this call. We may be able to
2216 do this eventually, but it is too complicated to keep track of
2217 what insns go in the cse'able block and which don't. */
2218
2219 flags &= ~ECF_LIBCALL_BLOCK;
2220 must_preallocate = 1;
2221 }
2222
2223 /* Now make final decision about preallocating stack space. */
2224 must_preallocate = finalize_must_preallocate (must_preallocate,
2225 num_actuals, args,
2226 &args_size);
2227
2228 /* If the structure value address will reference the stack pointer, we
2229 must stabilize it. We don't need to do this if we know that we are
2230 not going to adjust the stack pointer in processing this call. */
2231
2232 if (structure_value_addr
2233 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2234 || reg_mentioned_p (virtual_outgoing_args_rtx,
2235 structure_value_addr))
2236 && (args_size.var
2237 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2238 structure_value_addr = copy_to_reg (structure_value_addr);
2239
2240 /* Tail calls can make things harder to debug, and we're traditionally
2241 pushed these optimizations into -O2. Don't try if we're already
2242 expanding a call, as that means we're an argument. Don't try if
2243 there's cleanups, as we know there's code to follow the call.
2244
2245 If rtx_equal_function_value_matters is false, that means we've
2246 finished with regular parsing. Which means that some of the
2247 machinery we use to generate tail-calls is no longer in place.
2248 This is most often true of sjlj-exceptions, which we couldn't
2249 tail-call to anyway.
2250
2251 If current_nesting_level () == 0, we're being called after
2252 the function body has been expanded. This can happen when
2253 setting up trampolines in expand_function_end. */
2254 if (currently_expanding_call++ != 0
2255 || !flag_optimize_sibling_calls
2256 || !rtx_equal_function_value_matters
2257 || current_nesting_level () == 0
2258 || args_size.var
2259 || lookup_stmt_eh_region (exp) >= 0)
2260 try_tail_call = 0;
2261
2262 /* Rest of purposes for tail call optimizations to fail. */
2263 if (
2264 #ifdef HAVE_sibcall_epilogue
2265 !HAVE_sibcall_epilogue
2266 #else
2267 1
2268 #endif
2269 || !try_tail_call
2270 /* Doing sibling call optimization needs some work, since
2271 structure_value_addr can be allocated on the stack.
2272 It does not seem worth the effort since few optimizable
2273 sibling calls will return a structure. */
2274 || structure_value_addr != NULL_RTX
2275 /* Check whether the target is able to optimize the call
2276 into a sibcall. */
2277 || !targetm.function_ok_for_sibcall (fndecl, exp)
2278 /* Functions that do not return exactly once may not be sibcall
2279 optimized. */
2280 || (flags & (ECF_RETURNS_TWICE | ECF_LONGJMP | ECF_NORETURN))
2281 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2282 /* If the called function is nested in the current one, it might access
2283 some of the caller's arguments, but could clobber them beforehand if
2284 the argument areas are shared. */
2285 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2286 /* If this function requires more stack slots than the current
2287 function, we cannot change it into a sibling call. */
2288 || args_size.constant > current_function_args_size
2289 /* If the callee pops its own arguments, then it must pop exactly
2290 the same number of arguments as the current function. */
2291 || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2292 != RETURN_POPS_ARGS (current_function_decl,
2293 TREE_TYPE (current_function_decl),
2294 current_function_args_size))
2295 || !lang_hooks.decls.ok_for_sibcall (fndecl))
2296 try_tail_call = 0;
2297
2298 if (try_tail_call)
2299 {
2300 int end, inc;
2301 actparms = NULL_TREE;
2302 /* Ok, we're going to give the tail call the old college try.
2303 This means we're going to evaluate the function arguments
2304 up to three times. There are two degrees of badness we can
2305 encounter, those that can be unsaved and those that can't.
2306 (See unsafe_for_reeval commentary for details.)
2307
2308 Generate a new argument list. Pass safe arguments through
2309 unchanged. For the easy badness wrap them in UNSAVE_EXPRs.
2310 For hard badness, evaluate them now and put their resulting
2311 rtx in a temporary VAR_DECL.
2312
2313 initialize_argument_information has ordered the array for the
2314 order to be pushed, and we must remember this when reconstructing
2315 the original argument order. */
2316
2317 if (PUSH_ARGS_REVERSED)
2318 {
2319 inc = 1;
2320 i = 0;
2321 end = num_actuals;
2322 }
2323 else
2324 {
2325 inc = -1;
2326 i = num_actuals - 1;
2327 end = -1;
2328 }
2329
2330 for (; i != end; i += inc)
2331 {
2332 args[i].tree_value = fix_unsafe_tree (args[i].tree_value);
2333 }
2334 /* Do the same for the function address if it is an expression. */
2335 if (!fndecl)
2336 addr = fix_unsafe_tree (addr);
2337 }
2338
2339
2340 /* Ensure current function's preferred stack boundary is at least
2341 what we need. We don't have to increase alignment for recursive
2342 functions. */
2343 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2344 && fndecl != current_function_decl)
2345 cfun->preferred_stack_boundary = preferred_stack_boundary;
2346 if (fndecl == current_function_decl)
2347 cfun->recursive_call_emit = true;
2348
2349 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2350
2351 /* We want to make two insn chains; one for a sibling call, the other
2352 for a normal call. We will select one of the two chains after
2353 initial RTL generation is complete. */
2354 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2355 {
2356 int sibcall_failure = 0;
2357 /* We want to emit any pending stack adjustments before the tail
2358 recursion "call". That way we know any adjustment after the tail
2359 recursion call can be ignored if we indeed use the tail
2360 call expansion. */
2361 int save_pending_stack_adjust = 0;
2362 int save_stack_pointer_delta = 0;
2363 rtx insns;
2364 rtx before_call, next_arg_reg;
2365
2366 if (pass == 0)
2367 {
2368 /* Emit any queued insns now; otherwise they would end up in
2369 only one of the alternates. */
2370 emit_queue ();
2371
2372 /* State variables we need to save and restore between
2373 iterations. */
2374 save_pending_stack_adjust = pending_stack_adjust;
2375 save_stack_pointer_delta = stack_pointer_delta;
2376 }
2377 if (pass)
2378 flags &= ~ECF_SIBCALL;
2379 else
2380 flags |= ECF_SIBCALL;
2381
2382 /* Other state variables that we must reinitialize each time
2383 through the loop (that are not initialized by the loop itself). */
2384 argblock = 0;
2385 call_fusage = 0;
2386
2387 /* Start a new sequence for the normal call case.
2388
2389 From this point on, if the sibling call fails, we want to set
2390 sibcall_failure instead of continuing the loop. */
2391 start_sequence ();
2392
2393 if (pass == 0)
2394 {
2395 /* We know at this point that there are not currently any
2396 pending cleanups. If, however, in the process of evaluating
2397 the arguments we were to create some, we'll need to be
2398 able to get rid of them. */
2399 expand_start_target_temps ();
2400 }
2401
2402 /* Don't let pending stack adjusts add up to too much.
2403 Also, do all pending adjustments now if there is any chance
2404 this might be a call to alloca or if we are expanding a sibling
2405 call sequence or if we are calling a function that is to return
2406 with stack pointer depressed. */
2407 if (pending_stack_adjust >= 32
2408 || (pending_stack_adjust > 0
2409 && (flags & (ECF_MAY_BE_ALLOCA | ECF_SP_DEPRESSED)))
2410 || pass == 0)
2411 do_pending_stack_adjust ();
2412
2413 /* When calling a const function, we must pop the stack args right away,
2414 so that the pop is deleted or moved with the call. */
2415 if (pass && (flags & ECF_LIBCALL_BLOCK))
2416 NO_DEFER_POP;
2417
2418 /* Precompute any arguments as needed. */
2419 if (pass)
2420 precompute_arguments (flags, num_actuals, args);
2421
2422 /* Now we are about to start emitting insns that can be deleted
2423 if a libcall is deleted. */
2424 if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
2425 start_sequence ();
2426
2427 adjusted_args_size = args_size;
2428 /* Compute the actual size of the argument block required. The variable
2429 and constant sizes must be combined, the size may have to be rounded,
2430 and there may be a minimum required size. When generating a sibcall
2431 pattern, do not round up, since we'll be re-using whatever space our
2432 caller provided. */
2433 unadjusted_args_size
2434 = compute_argument_block_size (reg_parm_stack_space,
2435 &adjusted_args_size,
2436 (pass == 0 ? 0
2437 : preferred_stack_boundary));
2438
2439 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2440
2441 /* The argument block when performing a sibling call is the
2442 incoming argument block. */
2443 if (pass == 0)
2444 {
2445 argblock = virtual_incoming_args_rtx;
2446 argblock
2447 #ifdef STACK_GROWS_DOWNWARD
2448 = plus_constant (argblock, current_function_pretend_args_size);
2449 #else
2450 = plus_constant (argblock, -current_function_pretend_args_size);
2451 #endif
2452 stored_args_map = sbitmap_alloc (args_size.constant);
2453 sbitmap_zero (stored_args_map);
2454 }
2455
2456 /* If we have no actual push instructions, or shouldn't use them,
2457 make space for all args right now. */
2458 else if (adjusted_args_size.var != 0)
2459 {
2460 if (old_stack_level == 0)
2461 {
2462 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2463 old_stack_pointer_delta = stack_pointer_delta;
2464 old_pending_adj = pending_stack_adjust;
2465 pending_stack_adjust = 0;
2466 /* stack_arg_under_construction says whether a stack arg is
2467 being constructed at the old stack level. Pushing the stack
2468 gets a clean outgoing argument block. */
2469 old_stack_arg_under_construction = stack_arg_under_construction;
2470 stack_arg_under_construction = 0;
2471 }
2472 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2473 }
2474 else
2475 {
2476 /* Note that we must go through the motions of allocating an argument
2477 block even if the size is zero because we may be storing args
2478 in the area reserved for register arguments, which may be part of
2479 the stack frame. */
2480
2481 int needed = adjusted_args_size.constant;
2482
2483 /* Store the maximum argument space used. It will be pushed by
2484 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2485 checking). */
2486
2487 if (needed > current_function_outgoing_args_size)
2488 current_function_outgoing_args_size = needed;
2489
2490 if (must_preallocate)
2491 {
2492 if (ACCUMULATE_OUTGOING_ARGS)
2493 {
2494 /* Since the stack pointer will never be pushed, it is
2495 possible for the evaluation of a parm to clobber
2496 something we have already written to the stack.
2497 Since most function calls on RISC machines do not use
2498 the stack, this is uncommon, but must work correctly.
2499
2500 Therefore, we save any area of the stack that was already
2501 written and that we are using. Here we set up to do this
2502 by making a new stack usage map from the old one. The
2503 actual save will be done by store_one_arg.
2504
2505 Another approach might be to try to reorder the argument
2506 evaluations to avoid this conflicting stack usage. */
2507
2508 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2509 /* Since we will be writing into the entire argument area,
2510 the map must be allocated for its entire size, not just
2511 the part that is the responsibility of the caller. */
2512 needed += reg_parm_stack_space;
2513 #endif
2514
2515 #ifdef ARGS_GROW_DOWNWARD
2516 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2517 needed + 1);
2518 #else
2519 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2520 needed);
2521 #endif
2522 stack_usage_map = alloca (highest_outgoing_arg_in_use);
2523
2524 if (initial_highest_arg_in_use)
2525 memcpy (stack_usage_map, initial_stack_usage_map,
2526 initial_highest_arg_in_use);
2527
2528 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2529 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2530 (highest_outgoing_arg_in_use
2531 - initial_highest_arg_in_use));
2532 needed = 0;
2533
2534 /* The address of the outgoing argument list must not be
2535 copied to a register here, because argblock would be left
2536 pointing to the wrong place after the call to
2537 allocate_dynamic_stack_space below. */
2538
2539 argblock = virtual_outgoing_args_rtx;
2540 }
2541 else
2542 {
2543 if (inhibit_defer_pop == 0)
2544 {
2545 /* Try to reuse some or all of the pending_stack_adjust
2546 to get this space. */
2547 needed
2548 = (combine_pending_stack_adjustment_and_call
2549 (unadjusted_args_size,
2550 &adjusted_args_size,
2551 preferred_unit_stack_boundary));
2552
2553 /* combine_pending_stack_adjustment_and_call computes
2554 an adjustment before the arguments are allocated.
2555 Account for them and see whether or not the stack
2556 needs to go up or down. */
2557 needed = unadjusted_args_size - needed;
2558
2559 if (needed < 0)
2560 {
2561 /* We're releasing stack space. */
2562 /* ??? We can avoid any adjustment at all if we're
2563 already aligned. FIXME. */
2564 pending_stack_adjust = -needed;
2565 do_pending_stack_adjust ();
2566 needed = 0;
2567 }
2568 else
2569 /* We need to allocate space. We'll do that in
2570 push_block below. */
2571 pending_stack_adjust = 0;
2572 }
2573
2574 /* Special case this because overhead of `push_block' in
2575 this case is non-trivial. */
2576 if (needed == 0)
2577 argblock = virtual_outgoing_args_rtx;
2578 else
2579 {
2580 argblock = push_block (GEN_INT (needed), 0, 0);
2581 #ifdef ARGS_GROW_DOWNWARD
2582 argblock = plus_constant (argblock, needed);
2583 #endif
2584 }
2585
2586 /* We only really need to call `copy_to_reg' in the case
2587 where push insns are going to be used to pass ARGBLOCK
2588 to a function call in ARGS. In that case, the stack
2589 pointer changes value from the allocation point to the
2590 call point, and hence the value of
2591 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2592 as well always do it. */
2593 argblock = copy_to_reg (argblock);
2594 }
2595 }
2596 }
2597
2598 if (ACCUMULATE_OUTGOING_ARGS)
2599 {
2600 /* The save/restore code in store_one_arg handles all
2601 cases except one: a constructor call (including a C
2602 function returning a BLKmode struct) to initialize
2603 an argument. */
2604 if (stack_arg_under_construction)
2605 {
2606 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2607 rtx push_size = GEN_INT (reg_parm_stack_space
2608 + adjusted_args_size.constant);
2609 #else
2610 rtx push_size = GEN_INT (adjusted_args_size.constant);
2611 #endif
2612 if (old_stack_level == 0)
2613 {
2614 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2615 NULL_RTX);
2616 old_stack_pointer_delta = stack_pointer_delta;
2617 old_pending_adj = pending_stack_adjust;
2618 pending_stack_adjust = 0;
2619 /* stack_arg_under_construction says whether a stack
2620 arg is being constructed at the old stack level.
2621 Pushing the stack gets a clean outgoing argument
2622 block. */
2623 old_stack_arg_under_construction
2624 = stack_arg_under_construction;
2625 stack_arg_under_construction = 0;
2626 /* Make a new map for the new argument list. */
2627 stack_usage_map = alloca (highest_outgoing_arg_in_use);
2628 memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
2629 highest_outgoing_arg_in_use = 0;
2630 }
2631 allocate_dynamic_stack_space (push_size, NULL_RTX,
2632 BITS_PER_UNIT);
2633 }
2634
2635 /* If argument evaluation might modify the stack pointer,
2636 copy the address of the argument list to a register. */
2637 for (i = 0; i < num_actuals; i++)
2638 if (args[i].pass_on_stack)
2639 {
2640 argblock = copy_addr_to_reg (argblock);
2641 break;
2642 }
2643 }
2644
2645 compute_argument_addresses (args, argblock, num_actuals);
2646
2647 /* If we push args individually in reverse order, perform stack alignment
2648 before the first push (the last arg). */
2649 if (PUSH_ARGS_REVERSED && argblock == 0
2650 && adjusted_args_size.constant != unadjusted_args_size)
2651 {
2652 /* When the stack adjustment is pending, we get better code
2653 by combining the adjustments. */
2654 if (pending_stack_adjust
2655 && ! (flags & ECF_LIBCALL_BLOCK)
2656 && ! inhibit_defer_pop)
2657 {
2658 pending_stack_adjust
2659 = (combine_pending_stack_adjustment_and_call
2660 (unadjusted_args_size,
2661 &adjusted_args_size,
2662 preferred_unit_stack_boundary));
2663 do_pending_stack_adjust ();
2664 }
2665 else if (argblock == 0)
2666 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2667 - unadjusted_args_size));
2668 }
2669 /* Now that the stack is properly aligned, pops can't safely
2670 be deferred during the evaluation of the arguments. */
2671 NO_DEFER_POP;
2672
2673 funexp = rtx_for_function_call (fndecl, addr);
2674
2675 /* Figure out the register where the value, if any, will come back. */
2676 valreg = 0;
2677 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2678 && ! structure_value_addr)
2679 {
2680 if (pcc_struct_value)
2681 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2682 fndecl, (pass == 0));
2683 else
2684 valreg = hard_function_value (TREE_TYPE (exp), fndecl, (pass == 0));
2685 }
2686
2687 /* Precompute all register parameters. It isn't safe to compute anything
2688 once we have started filling any specific hard regs. */
2689 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
2690
2691 if (TREE_OPERAND (exp, 2))
2692 static_chain_value = expand_expr (TREE_OPERAND (exp, 2),
2693 NULL_RTX, VOIDmode, 0);
2694 else
2695 static_chain_value = 0;
2696
2697 #ifdef REG_PARM_STACK_SPACE
2698 /* Save the fixed argument area if it's part of the caller's frame and
2699 is clobbered by argument setup for this call. */
2700 if (ACCUMULATE_OUTGOING_ARGS && pass)
2701 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2702 &low_to_save, &high_to_save);
2703 #endif
2704
2705 /* Now store (and compute if necessary) all non-register parms.
2706 These come before register parms, since they can require block-moves,
2707 which could clobber the registers used for register parms.
2708 Parms which have partial registers are not stored here,
2709 but we do preallocate space here if they want that. */
2710
2711 for (i = 0; i < num_actuals; i++)
2712 if (args[i].reg == 0 || args[i].pass_on_stack)
2713 {
2714 rtx before_arg = get_last_insn ();
2715
2716 if (store_one_arg (&args[i], argblock, flags,
2717 adjusted_args_size.var != 0,
2718 reg_parm_stack_space)
2719 || (pass == 0
2720 && check_sibcall_argument_overlap (before_arg,
2721 &args[i], 1)))
2722 sibcall_failure = 1;
2723
2724 if (flags & ECF_CONST
2725 && args[i].stack
2726 && args[i].value == args[i].stack)
2727 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
2728 gen_rtx_USE (VOIDmode,
2729 args[i].value),
2730 call_fusage);
2731 }
2732
2733 /* If we have a parm that is passed in registers but not in memory
2734 and whose alignment does not permit a direct copy into registers,
2735 make a group of pseudos that correspond to each register that we
2736 will later fill. */
2737 if (STRICT_ALIGNMENT)
2738 store_unaligned_arguments_into_pseudos (args, num_actuals);
2739
2740 /* Now store any partially-in-registers parm.
2741 This is the last place a block-move can happen. */
2742 if (reg_parm_seen)
2743 for (i = 0; i < num_actuals; i++)
2744 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2745 {
2746 rtx before_arg = get_last_insn ();
2747
2748 if (store_one_arg (&args[i], argblock, flags,
2749 adjusted_args_size.var != 0,
2750 reg_parm_stack_space)
2751 || (pass == 0
2752 && check_sibcall_argument_overlap (before_arg,
2753 &args[i], 1)))
2754 sibcall_failure = 1;
2755 }
2756
2757 /* If we pushed args in forward order, perform stack alignment
2758 after pushing the last arg. */
2759 if (!PUSH_ARGS_REVERSED && argblock == 0)
2760 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2761 - unadjusted_args_size));
2762
2763 /* If register arguments require space on the stack and stack space
2764 was not preallocated, allocate stack space here for arguments
2765 passed in registers. */
2766 #ifdef OUTGOING_REG_PARM_STACK_SPACE
2767 if (!ACCUMULATE_OUTGOING_ARGS
2768 && must_preallocate == 0 && reg_parm_stack_space > 0)
2769 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2770 #endif
2771
2772 /* Pass the function the address in which to return a
2773 structure value. */
2774 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2775 {
2776 structure_value_addr
2777 = convert_memory_address (Pmode, structure_value_addr);
2778 emit_move_insn (struct_value,
2779 force_reg (Pmode,
2780 force_operand (structure_value_addr,
2781 NULL_RTX)));
2782
2783 if (REG_P (struct_value))
2784 use_reg (&call_fusage, struct_value);
2785 }
2786
2787 funexp = prepare_call_address (funexp, static_chain_value,
2788 &call_fusage, reg_parm_seen, pass == 0);
2789
2790 load_register_parameters (args, num_actuals, &call_fusage, flags,
2791 pass == 0, &sibcall_failure);
2792
2793 /* Perform postincrements before actually calling the function. */
2794 emit_queue ();
2795
2796 /* Save a pointer to the last insn before the call, so that we can
2797 later safely search backwards to find the CALL_INSN. */
2798 before_call = get_last_insn ();
2799
2800 /* Set up next argument register. For sibling calls on machines
2801 with register windows this should be the incoming register. */
2802 #ifdef FUNCTION_INCOMING_ARG
2803 if (pass == 0)
2804 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
2805 void_type_node, 1);
2806 else
2807 #endif
2808 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
2809 void_type_node, 1);
2810
2811 /* All arguments and registers used for the call must be set up by
2812 now! */
2813
2814 /* Stack must be properly aligned now. */
2815 if (pass && stack_pointer_delta % preferred_unit_stack_boundary)
2816 abort ();
2817
2818 /* Generate the actual call instruction. */
2819 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
2820 adjusted_args_size.constant, struct_value_size,
2821 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
2822 flags, & args_so_far);
2823
2824 /* If call is cse'able, make appropriate pair of reg-notes around it.
2825 Test valreg so we don't crash; may safely ignore `const'
2826 if return type is void. Disable for PARALLEL return values, because
2827 we have no way to move such values into a pseudo register. */
2828 if (pass && (flags & ECF_LIBCALL_BLOCK))
2829 {
2830 rtx insns;
2831 rtx insn;
2832 bool failed = valreg == 0 || GET_CODE (valreg) == PARALLEL;
2833
2834 insns = get_insns ();
2835
2836 /* Expansion of block moves possibly introduced a loop that may
2837 not appear inside libcall block. */
2838 for (insn = insns; insn; insn = NEXT_INSN (insn))
2839 if (JUMP_P (insn))
2840 failed = true;
2841
2842 if (failed)
2843 {
2844 end_sequence ();
2845 emit_insn (insns);
2846 }
2847 else
2848 {
2849 rtx note = 0;
2850 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2851
2852 /* Mark the return value as a pointer if needed. */
2853 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2854 mark_reg_pointer (temp,
2855 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
2856
2857 end_sequence ();
2858 if (flag_unsafe_math_optimizations
2859 && fndecl
2860 && DECL_BUILT_IN (fndecl)
2861 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRT
2862 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTF
2863 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTL))
2864 note = gen_rtx_fmt_e (SQRT,
2865 GET_MODE (temp),
2866 args[0].initial_value);
2867 else
2868 {
2869 /* Construct an "equal form" for the value which
2870 mentions all the arguments in order as well as
2871 the function name. */
2872 for (i = 0; i < num_actuals; i++)
2873 note = gen_rtx_EXPR_LIST (VOIDmode,
2874 args[i].initial_value, note);
2875 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2876
2877 if (flags & ECF_PURE)
2878 note = gen_rtx_EXPR_LIST (VOIDmode,
2879 gen_rtx_USE (VOIDmode,
2880 gen_rtx_MEM (BLKmode,
2881 gen_rtx_SCRATCH (VOIDmode))),
2882 note);
2883 }
2884 emit_libcall_block (insns, temp, valreg, note);
2885
2886 valreg = temp;
2887 }
2888 }
2889 else if (pass && (flags & ECF_MALLOC))
2890 {
2891 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2892 rtx last, insns;
2893
2894 /* The return value from a malloc-like function is a pointer. */
2895 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2896 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
2897
2898 emit_move_insn (temp, valreg);
2899
2900 /* The return value from a malloc-like function can not alias
2901 anything else. */
2902 last = get_last_insn ();
2903 REG_NOTES (last) =
2904 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2905
2906 /* Write out the sequence. */
2907 insns = get_insns ();
2908 end_sequence ();
2909 emit_insn (insns);
2910 valreg = temp;
2911 }
2912
2913 /* For calls to `setjmp', etc., inform flow.c it should complain
2914 if nonvolatile values are live. For functions that cannot return,
2915 inform flow that control does not fall through. */
2916
2917 if ((flags & (ECF_NORETURN | ECF_LONGJMP)) || pass == 0)
2918 {
2919 /* The barrier must be emitted
2920 immediately after the CALL_INSN. Some ports emit more
2921 than just a CALL_INSN above, so we must search for it here. */
2922
2923 rtx last = get_last_insn ();
2924 while (!CALL_P (last))
2925 {
2926 last = PREV_INSN (last);
2927 /* There was no CALL_INSN? */
2928 if (last == before_call)
2929 abort ();
2930 }
2931
2932 emit_barrier_after (last);
2933
2934 /* Stack adjustments after a noreturn call are dead code.
2935 However when NO_DEFER_POP is in effect, we must preserve
2936 stack_pointer_delta. */
2937 if (inhibit_defer_pop == 0)
2938 {
2939 stack_pointer_delta = old_stack_allocated;
2940 pending_stack_adjust = 0;
2941 }
2942 }
2943
2944 if (flags & ECF_LONGJMP)
2945 current_function_calls_longjmp = 1;
2946
2947 /* If value type not void, return an rtx for the value. */
2948
2949 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2950 || ignore)
2951 target = const0_rtx;
2952 else if (structure_value_addr)
2953 {
2954 if (target == 0 || !MEM_P (target))
2955 {
2956 target
2957 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2958 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2959 structure_value_addr));
2960 set_mem_attributes (target, exp, 1);
2961 }
2962 }
2963 else if (pcc_struct_value)
2964 {
2965 /* This is the special C++ case where we need to
2966 know what the true target was. We take care to
2967 never use this value more than once in one expression. */
2968 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2969 copy_to_reg (valreg));
2970 set_mem_attributes (target, exp, 1);
2971 }
2972 /* Handle calls that return values in multiple non-contiguous locations.
2973 The Irix 6 ABI has examples of this. */
2974 else if (GET_CODE (valreg) == PARALLEL)
2975 {
2976 if (target == 0)
2977 {
2978 /* This will only be assigned once, so it can be readonly. */
2979 tree nt = build_qualified_type (TREE_TYPE (exp),
2980 (TYPE_QUALS (TREE_TYPE (exp))
2981 | TYPE_QUAL_CONST));
2982
2983 target = assign_temp (nt, 0, 1, 1);
2984 preserve_temp_slots (target);
2985 }
2986
2987 if (! rtx_equal_p (target, valreg))
2988 emit_group_store (target, valreg, TREE_TYPE (exp),
2989 int_size_in_bytes (TREE_TYPE (exp)));
2990
2991 /* We can not support sibling calls for this case. */
2992 sibcall_failure = 1;
2993 }
2994 else if (target
2995 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2996 && GET_MODE (target) == GET_MODE (valreg))
2997 {
2998 /* TARGET and VALREG cannot be equal at this point because the
2999 latter would not have REG_FUNCTION_VALUE_P true, while the
3000 former would if it were referring to the same register.
3001
3002 If they refer to the same register, this move will be a no-op,
3003 except when function inlining is being done. */
3004 emit_move_insn (target, valreg);
3005
3006 /* If we are setting a MEM, this code must be executed. Since it is
3007 emitted after the call insn, sibcall optimization cannot be
3008 performed in that case. */
3009 if (MEM_P (target))
3010 sibcall_failure = 1;
3011 }
3012 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
3013 {
3014 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
3015
3016 /* We can not support sibling calls for this case. */
3017 sibcall_failure = 1;
3018 }
3019 else
3020 {
3021 if (shift_returned_value (TREE_TYPE (exp), &valreg))
3022 sibcall_failure = 1;
3023
3024 target = copy_to_reg (valreg);
3025 }
3026
3027 if (targetm.calls.promote_function_return(funtype))
3028 {
3029 /* If we promoted this return value, make the proper SUBREG. TARGET
3030 might be const0_rtx here, so be careful. */
3031 if (REG_P (target)
3032 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3033 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3034 {
3035 tree type = TREE_TYPE (exp);
3036 int unsignedp = TYPE_UNSIGNED (type);
3037 int offset = 0;
3038
3039 /* If we don't promote as expected, something is wrong. */
3040 if (GET_MODE (target)
3041 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
3042 abort ();
3043
3044 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3045 && GET_MODE_SIZE (GET_MODE (target))
3046 > GET_MODE_SIZE (TYPE_MODE (type)))
3047 {
3048 offset = GET_MODE_SIZE (GET_MODE (target))
3049 - GET_MODE_SIZE (TYPE_MODE (type));
3050 if (! BYTES_BIG_ENDIAN)
3051 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3052 else if (! WORDS_BIG_ENDIAN)
3053 offset %= UNITS_PER_WORD;
3054 }
3055 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3056 SUBREG_PROMOTED_VAR_P (target) = 1;
3057 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3058 }
3059 }
3060
3061 /* If size of args is variable or this was a constructor call for a stack
3062 argument, restore saved stack-pointer value. */
3063
3064 if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
3065 {
3066 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3067 stack_pointer_delta = old_stack_pointer_delta;
3068 pending_stack_adjust = old_pending_adj;
3069 stack_arg_under_construction = old_stack_arg_under_construction;
3070 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3071 stack_usage_map = initial_stack_usage_map;
3072 sibcall_failure = 1;
3073 }
3074 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3075 {
3076 #ifdef REG_PARM_STACK_SPACE
3077 if (save_area)
3078 restore_fixed_argument_area (save_area, argblock,
3079 high_to_save, low_to_save);
3080 #endif
3081
3082 /* If we saved any argument areas, restore them. */
3083 for (i = 0; i < num_actuals; i++)
3084 if (args[i].save_area)
3085 {
3086 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3087 rtx stack_area
3088 = gen_rtx_MEM (save_mode,
3089 memory_address (save_mode,
3090 XEXP (args[i].stack_slot, 0)));
3091
3092 if (save_mode != BLKmode)
3093 emit_move_insn (stack_area, args[i].save_area);
3094 else
3095 emit_block_move (stack_area, args[i].save_area,
3096 GEN_INT (args[i].locate.size.constant),
3097 BLOCK_OP_CALL_PARM);
3098 }
3099
3100 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3101 stack_usage_map = initial_stack_usage_map;
3102 }
3103
3104 /* If this was alloca, record the new stack level for nonlocal gotos.
3105 Check for the handler slots since we might not have a save area
3106 for non-local gotos. */
3107
3108 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
3109 update_nonlocal_goto_save_area ();
3110
3111 /* Free up storage we no longer need. */
3112 for (i = 0; i < num_actuals; ++i)
3113 if (args[i].aligned_regs)
3114 free (args[i].aligned_regs);
3115
3116 if (pass == 0)
3117 {
3118 /* Undo the fake expand_start_target_temps we did earlier. If
3119 there had been any cleanups created, we've already set
3120 sibcall_failure. */
3121 expand_end_target_temps ();
3122 }
3123
3124 /* If this function is returning into a memory location marked as
3125 readonly, it means it is initializing that location. We normally treat
3126 functions as not clobbering such locations, so we need to specify that
3127 this one does. We do this by adding the appropriate CLOBBER to the
3128 CALL_INSN function usage list. This cannot be done by emitting a
3129 standalone CLOBBER after the call because the latter would be ignored
3130 by at least the delay slot scheduling pass. We do this now instead of
3131 adding to call_fusage before the call to emit_call_1 because TARGET
3132 may be modified in the meantime. */
3133 if (structure_value_addr != 0 && target != 0
3134 && MEM_P (target) && RTX_UNCHANGING_P (target))
3135 add_function_usage_to
3136 (last_call_insn (),
3137 gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_CLOBBER (VOIDmode, target),
3138 NULL_RTX));
3139
3140 insns = get_insns ();
3141 end_sequence ();
3142
3143 if (pass == 0)
3144 {
3145 tail_call_insns = insns;
3146
3147 /* Restore the pending stack adjustment now that we have
3148 finished generating the sibling call sequence. */
3149
3150 pending_stack_adjust = save_pending_stack_adjust;
3151 stack_pointer_delta = save_stack_pointer_delta;
3152
3153 /* Prepare arg structure for next iteration. */
3154 for (i = 0; i < num_actuals; i++)
3155 {
3156 args[i].value = 0;
3157 args[i].aligned_regs = 0;
3158 args[i].stack = 0;
3159 }
3160
3161 sbitmap_free (stored_args_map);
3162 }
3163 else
3164 {
3165 normal_call_insns = insns;
3166
3167 /* Verify that we've deallocated all the stack we used. */
3168 if (! (flags & (ECF_NORETURN | ECF_LONGJMP))
3169 && old_stack_allocated != stack_pointer_delta
3170 - pending_stack_adjust)
3171 abort ();
3172 }
3173
3174 /* If something prevents making this a sibling call,
3175 zero out the sequence. */
3176 if (sibcall_failure)
3177 tail_call_insns = NULL_RTX;
3178 else
3179 break;
3180 }
3181
3182 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
3183 arguments too, as argument area is now clobbered by the call. */
3184 if (tail_call_insns)
3185 {
3186 emit_insn (tail_call_insns);
3187 cfun->tail_call_emit = true;
3188 }
3189 else
3190 emit_insn (normal_call_insns);
3191
3192 currently_expanding_call--;
3193
3194 /* If this function returns with the stack pointer depressed, ensure
3195 this block saves and restores the stack pointer, show it was
3196 changed, and adjust for any outgoing arg space. */
3197 if (flags & ECF_SP_DEPRESSED)
3198 {
3199 clear_pending_stack_adjust ();
3200 emit_insn (gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx));
3201 emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
3202 }
3203
3204 return target;
3205 }
3206
3207 /* A sibling call sequence invalidates any REG_EQUIV notes made for
3208 this function's incoming arguments.
3209
3210 At the start of RTL generation we know the only REG_EQUIV notes
3211 in the rtl chain are those for incoming arguments, so we can safely
3212 flush any REG_EQUIV note.
3213
3214 This is (slight) overkill. We could keep track of the highest
3215 argument we clobber and be more selective in removing notes, but it
3216 does not seem to be worth the effort. */
3217 void
3218 fixup_tail_calls (void)
3219 {
3220 rtx insn;
3221 tree arg;
3222
3223 purge_reg_equiv_notes ();
3224
3225 /* A sibling call sequence also may invalidate RTX_UNCHANGING_P
3226 flag of some incoming arguments MEM RTLs, because it can write into
3227 those slots. We clear all those bits now.
3228
3229 This is (slight) overkill, we could keep track of which arguments
3230 we actually write into. */
3231 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3232 {
3233 if (INSN_P (insn))
3234 purge_mem_unchanging_flag (PATTERN (insn));
3235 }
3236
3237 /* Similarly, invalidate RTX_UNCHANGING_P for any incoming
3238 arguments passed in registers. */
3239 for (arg = DECL_ARGUMENTS (current_function_decl);
3240 arg;
3241 arg = TREE_CHAIN (arg))
3242 {
3243 if (REG_P (DECL_RTL (arg)))
3244 RTX_UNCHANGING_P (DECL_RTL (arg)) = false;
3245 }
3246 }
3247
3248 /* Traverse an argument list in VALUES and expand all complex
3249 arguments into their components. */
3250 tree
3251 split_complex_values (tree values)
3252 {
3253 tree p;
3254
3255 /* Before allocating memory, check for the common case of no complex. */
3256 for (p = values; p; p = TREE_CHAIN (p))
3257 {
3258 tree type = TREE_TYPE (TREE_VALUE (p));
3259 if (type && TREE_CODE (type) == COMPLEX_TYPE
3260 && targetm.calls.split_complex_arg (type))
3261 goto found;
3262 }
3263 return values;
3264
3265 found:
3266 values = copy_list (values);
3267
3268 for (p = values; p; p = TREE_CHAIN (p))
3269 {
3270 tree complex_value = TREE_VALUE (p);
3271 tree complex_type;
3272
3273 complex_type = TREE_TYPE (complex_value);
3274 if (!complex_type)
3275 continue;
3276
3277 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3278 && targetm.calls.split_complex_arg (complex_type))
3279 {
3280 tree subtype;
3281 tree real, imag, next;
3282
3283 subtype = TREE_TYPE (complex_type);
3284 complex_value = save_expr (complex_value);
3285 real = build1 (REALPART_EXPR, subtype, complex_value);
3286 imag = build1 (IMAGPART_EXPR, subtype, complex_value);
3287
3288 TREE_VALUE (p) = real;
3289 next = TREE_CHAIN (p);
3290 imag = build_tree_list (NULL_TREE, imag);
3291 TREE_CHAIN (p) = imag;
3292 TREE_CHAIN (imag) = next;
3293
3294 /* Skip the newly created node. */
3295 p = TREE_CHAIN (p);
3296 }
3297 }
3298
3299 return values;
3300 }
3301
3302 /* Traverse a list of TYPES and expand all complex types into their
3303 components. */
3304 tree
3305 split_complex_types (tree types)
3306 {
3307 tree p;
3308
3309 /* Before allocating memory, check for the common case of no complex. */
3310 for (p = types; p; p = TREE_CHAIN (p))
3311 {
3312 tree type = TREE_VALUE (p);
3313 if (TREE_CODE (type) == COMPLEX_TYPE
3314 && targetm.calls.split_complex_arg (type))
3315 goto found;
3316 }
3317 return types;
3318
3319 found:
3320 types = copy_list (types);
3321
3322 for (p = types; p; p = TREE_CHAIN (p))
3323 {
3324 tree complex_type = TREE_VALUE (p);
3325
3326 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3327 && targetm.calls.split_complex_arg (complex_type))
3328 {
3329 tree next, imag;
3330
3331 /* Rewrite complex type with component type. */
3332 TREE_VALUE (p) = TREE_TYPE (complex_type);
3333 next = TREE_CHAIN (p);
3334
3335 /* Add another component type for the imaginary part. */
3336 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3337 TREE_CHAIN (p) = imag;
3338 TREE_CHAIN (imag) = next;
3339
3340 /* Skip the newly created node. */
3341 p = TREE_CHAIN (p);
3342 }
3343 }
3344
3345 return types;
3346 }
3347 \f
3348 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3349 The RETVAL parameter specifies whether return value needs to be saved, other
3350 parameters are documented in the emit_library_call function below. */
3351
3352 static rtx
3353 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3354 enum libcall_type fn_type,
3355 enum machine_mode outmode, int nargs, va_list p)
3356 {
3357 /* Total size in bytes of all the stack-parms scanned so far. */
3358 struct args_size args_size;
3359 /* Size of arguments before any adjustments (such as rounding). */
3360 struct args_size original_args_size;
3361 int argnum;
3362 rtx fun;
3363 int inc;
3364 int count;
3365 rtx argblock = 0;
3366 CUMULATIVE_ARGS args_so_far;
3367 struct arg
3368 {
3369 rtx value;
3370 enum machine_mode mode;
3371 rtx reg;
3372 int partial;
3373 struct locate_and_pad_arg_data locate;
3374 rtx save_area;
3375 };
3376 struct arg *argvec;
3377 int old_inhibit_defer_pop = inhibit_defer_pop;
3378 rtx call_fusage = 0;
3379 rtx mem_value = 0;
3380 rtx valreg;
3381 int pcc_struct_value = 0;
3382 int struct_value_size = 0;
3383 int flags;
3384 int reg_parm_stack_space = 0;
3385 int needed;
3386 rtx before_call;
3387 tree tfom; /* type_for_mode (outmode, 0) */
3388
3389 #ifdef REG_PARM_STACK_SPACE
3390 /* Define the boundary of the register parm stack space that needs to be
3391 save, if any. */
3392 int low_to_save, high_to_save;
3393 rtx save_area = 0; /* Place that it is saved. */
3394 #endif
3395
3396 /* Size of the stack reserved for parameter registers. */
3397 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3398 char *initial_stack_usage_map = stack_usage_map;
3399
3400 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3401
3402 #ifdef REG_PARM_STACK_SPACE
3403 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3404 #endif
3405
3406 /* By default, library functions can not throw. */
3407 flags = ECF_NOTHROW;
3408
3409 switch (fn_type)
3410 {
3411 case LCT_NORMAL:
3412 break;
3413 case LCT_CONST:
3414 flags |= ECF_CONST;
3415 break;
3416 case LCT_PURE:
3417 flags |= ECF_PURE;
3418 break;
3419 case LCT_CONST_MAKE_BLOCK:
3420 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
3421 break;
3422 case LCT_PURE_MAKE_BLOCK:
3423 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
3424 break;
3425 case LCT_NORETURN:
3426 flags |= ECF_NORETURN;
3427 break;
3428 case LCT_THROW:
3429 flags = ECF_NORETURN;
3430 break;
3431 case LCT_ALWAYS_RETURN:
3432 flags = ECF_ALWAYS_RETURN;
3433 break;
3434 case LCT_RETURNS_TWICE:
3435 flags = ECF_RETURNS_TWICE;
3436 break;
3437 }
3438 fun = orgfun;
3439
3440 /* Ensure current function's preferred stack boundary is at least
3441 what we need. */
3442 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3443 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3444
3445 /* If this kind of value comes back in memory,
3446 decide where in memory it should come back. */
3447 if (outmode != VOIDmode)
3448 {
3449 tfom = lang_hooks.types.type_for_mode (outmode, 0);
3450 if (aggregate_value_p (tfom, 0))
3451 {
3452 #ifdef PCC_STATIC_STRUCT_RETURN
3453 rtx pointer_reg
3454 = hard_function_value (build_pointer_type (tfom), 0, 0);
3455 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3456 pcc_struct_value = 1;
3457 if (value == 0)
3458 value = gen_reg_rtx (outmode);
3459 #else /* not PCC_STATIC_STRUCT_RETURN */
3460 struct_value_size = GET_MODE_SIZE (outmode);
3461 if (value != 0 && MEM_P (value))
3462 mem_value = value;
3463 else
3464 mem_value = assign_temp (tfom, 0, 1, 1);
3465 #endif
3466 /* This call returns a big structure. */
3467 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3468 }
3469 }
3470 else
3471 tfom = void_type_node;
3472
3473 /* ??? Unfinished: must pass the memory address as an argument. */
3474
3475 /* Copy all the libcall-arguments out of the varargs data
3476 and into a vector ARGVEC.
3477
3478 Compute how to pass each argument. We only support a very small subset
3479 of the full argument passing conventions to limit complexity here since
3480 library functions shouldn't have many args. */
3481
3482 argvec = alloca ((nargs + 1) * sizeof (struct arg));
3483 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3484
3485 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3486 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3487 #else
3488 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0, nargs);
3489 #endif
3490
3491 args_size.constant = 0;
3492 args_size.var = 0;
3493
3494 count = 0;
3495
3496 /* Now we are about to start emitting insns that can be deleted
3497 if a libcall is deleted. */
3498 if (flags & ECF_LIBCALL_BLOCK)
3499 start_sequence ();
3500
3501 push_temp_slots ();
3502
3503 /* If there's a structure value address to be passed,
3504 either pass it in the special place, or pass it as an extra argument. */
3505 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3506 {
3507 rtx addr = XEXP (mem_value, 0);
3508 nargs++;
3509
3510 /* Make sure it is a reasonable operand for a move or push insn. */
3511 if (!REG_P (addr) && !MEM_P (addr)
3512 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3513 addr = force_operand (addr, NULL_RTX);
3514
3515 argvec[count].value = addr;
3516 argvec[count].mode = Pmode;
3517 argvec[count].partial = 0;
3518
3519 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3520 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3521 abort ();
3522
3523 locate_and_pad_parm (Pmode, NULL_TREE,
3524 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3525 1,
3526 #else
3527 argvec[count].reg != 0,
3528 #endif
3529 0, NULL_TREE, &args_size, &argvec[count].locate);
3530
3531 if (argvec[count].reg == 0 || argvec[count].partial != 0
3532 || reg_parm_stack_space > 0)
3533 args_size.constant += argvec[count].locate.size.constant;
3534
3535 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3536
3537 count++;
3538 }
3539
3540 for (; count < nargs; count++)
3541 {
3542 rtx val = va_arg (p, rtx);
3543 enum machine_mode mode = va_arg (p, enum machine_mode);
3544
3545 /* We cannot convert the arg value to the mode the library wants here;
3546 must do it earlier where we know the signedness of the arg. */
3547 if (mode == BLKmode
3548 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3549 abort ();
3550
3551 /* There's no need to call protect_from_queue, because
3552 either emit_move_insn or emit_push_insn will do that. */
3553
3554 /* Make sure it is a reasonable operand for a move or push insn. */
3555 if (!REG_P (val) && !MEM_P (val)
3556 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3557 val = force_operand (val, NULL_RTX);
3558
3559 if (pass_by_reference (&args_so_far, mode, NULL_TREE, 1))
3560 {
3561 rtx slot;
3562 int must_copy = ! FUNCTION_ARG_CALLEE_COPIES (args_so_far, mode,
3563 NULL_TREE, 1);
3564
3565 /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3566 functions, so we have to pretend this isn't such a function. */
3567 if (flags & ECF_LIBCALL_BLOCK)
3568 {
3569 rtx insns = get_insns ();
3570 end_sequence ();
3571 emit_insn (insns);
3572 }
3573 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3574
3575 /* If this was a CONST function, it is now PURE since
3576 it now reads memory. */
3577 if (flags & ECF_CONST)
3578 {
3579 flags &= ~ECF_CONST;
3580 flags |= ECF_PURE;
3581 }
3582
3583 if (GET_MODE (val) == MEM && ! must_copy)
3584 slot = val;
3585 else if (must_copy)
3586 {
3587 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
3588 0, 1, 1);
3589 emit_move_insn (slot, val);
3590 }
3591 else
3592 {
3593 tree type = lang_hooks.types.type_for_mode (mode, 0);
3594
3595 slot
3596 = gen_rtx_MEM (mode,
3597 expand_expr (build1 (ADDR_EXPR,
3598 build_pointer_type (type),
3599 make_tree (type, val)),
3600 NULL_RTX, VOIDmode, 0));
3601 }
3602
3603 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3604 gen_rtx_USE (VOIDmode, slot),
3605 call_fusage);
3606 if (must_copy)
3607 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3608 gen_rtx_CLOBBER (VOIDmode,
3609 slot),
3610 call_fusage);
3611
3612 mode = Pmode;
3613 val = force_operand (XEXP (slot, 0), NULL_RTX);
3614 }
3615
3616 argvec[count].value = val;
3617 argvec[count].mode = mode;
3618
3619 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3620
3621 argvec[count].partial
3622 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3623
3624 locate_and_pad_parm (mode, NULL_TREE,
3625 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3626 1,
3627 #else
3628 argvec[count].reg != 0,
3629 #endif
3630 argvec[count].partial,
3631 NULL_TREE, &args_size, &argvec[count].locate);
3632
3633 if (argvec[count].locate.size.var)
3634 abort ();
3635
3636 if (argvec[count].reg == 0 || argvec[count].partial != 0
3637 || reg_parm_stack_space > 0)
3638 args_size.constant += argvec[count].locate.size.constant;
3639
3640 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3641 }
3642
3643 /* If this machine requires an external definition for library
3644 functions, write one out. */
3645 assemble_external_libcall (fun);
3646
3647 original_args_size = args_size;
3648 args_size.constant = (((args_size.constant
3649 + stack_pointer_delta
3650 + STACK_BYTES - 1)
3651 / STACK_BYTES
3652 * STACK_BYTES)
3653 - stack_pointer_delta);
3654
3655 args_size.constant = MAX (args_size.constant,
3656 reg_parm_stack_space);
3657
3658 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3659 args_size.constant -= reg_parm_stack_space;
3660 #endif
3661
3662 if (args_size.constant > current_function_outgoing_args_size)
3663 current_function_outgoing_args_size = args_size.constant;
3664
3665 if (ACCUMULATE_OUTGOING_ARGS)
3666 {
3667 /* Since the stack pointer will never be pushed, it is possible for
3668 the evaluation of a parm to clobber something we have already
3669 written to the stack. Since most function calls on RISC machines
3670 do not use the stack, this is uncommon, but must work correctly.
3671
3672 Therefore, we save any area of the stack that was already written
3673 and that we are using. Here we set up to do this by making a new
3674 stack usage map from the old one.
3675
3676 Another approach might be to try to reorder the argument
3677 evaluations to avoid this conflicting stack usage. */
3678
3679 needed = args_size.constant;
3680
3681 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3682 /* Since we will be writing into the entire argument area, the
3683 map must be allocated for its entire size, not just the part that
3684 is the responsibility of the caller. */
3685 needed += reg_parm_stack_space;
3686 #endif
3687
3688 #ifdef ARGS_GROW_DOWNWARD
3689 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3690 needed + 1);
3691 #else
3692 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3693 needed);
3694 #endif
3695 stack_usage_map = alloca (highest_outgoing_arg_in_use);
3696
3697 if (initial_highest_arg_in_use)
3698 memcpy (stack_usage_map, initial_stack_usage_map,
3699 initial_highest_arg_in_use);
3700
3701 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3702 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3703 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3704 needed = 0;
3705
3706 /* We must be careful to use virtual regs before they're instantiated,
3707 and real regs afterwards. Loop optimization, for example, can create
3708 new libcalls after we've instantiated the virtual regs, and if we
3709 use virtuals anyway, they won't match the rtl patterns. */
3710
3711 if (virtuals_instantiated)
3712 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3713 else
3714 argblock = virtual_outgoing_args_rtx;
3715 }
3716 else
3717 {
3718 if (!PUSH_ARGS)
3719 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3720 }
3721
3722 /* If we push args individually in reverse order, perform stack alignment
3723 before the first push (the last arg). */
3724 if (argblock == 0 && PUSH_ARGS_REVERSED)
3725 anti_adjust_stack (GEN_INT (args_size.constant
3726 - original_args_size.constant));
3727
3728 if (PUSH_ARGS_REVERSED)
3729 {
3730 inc = -1;
3731 argnum = nargs - 1;
3732 }
3733 else
3734 {
3735 inc = 1;
3736 argnum = 0;
3737 }
3738
3739 #ifdef REG_PARM_STACK_SPACE
3740 if (ACCUMULATE_OUTGOING_ARGS)
3741 {
3742 /* The argument list is the property of the called routine and it
3743 may clobber it. If the fixed area has been used for previous
3744 parameters, we must save and restore it. */
3745 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3746 &low_to_save, &high_to_save);
3747 }
3748 #endif
3749
3750 /* Push the args that need to be pushed. */
3751
3752 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3753 are to be pushed. */
3754 for (count = 0; count < nargs; count++, argnum += inc)
3755 {
3756 enum machine_mode mode = argvec[argnum].mode;
3757 rtx val = argvec[argnum].value;
3758 rtx reg = argvec[argnum].reg;
3759 int partial = argvec[argnum].partial;
3760 int lower_bound = 0, upper_bound = 0, i;
3761
3762 if (! (reg != 0 && partial == 0))
3763 {
3764 if (ACCUMULATE_OUTGOING_ARGS)
3765 {
3766 /* If this is being stored into a pre-allocated, fixed-size,
3767 stack area, save any previous data at that location. */
3768
3769 #ifdef ARGS_GROW_DOWNWARD
3770 /* stack_slot is negative, but we want to index stack_usage_map
3771 with positive values. */
3772 upper_bound = -argvec[argnum].locate.offset.constant + 1;
3773 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
3774 #else
3775 lower_bound = argvec[argnum].locate.offset.constant;
3776 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
3777 #endif
3778
3779 i = lower_bound;
3780 /* Don't worry about things in the fixed argument area;
3781 it has already been saved. */
3782 if (i < reg_parm_stack_space)
3783 i = reg_parm_stack_space;
3784 while (i < upper_bound && stack_usage_map[i] == 0)
3785 i++;
3786
3787 if (i < upper_bound)
3788 {
3789 /* We need to make a save area. */
3790 unsigned int size
3791 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
3792 enum machine_mode save_mode
3793 = mode_for_size (size, MODE_INT, 1);
3794 rtx adr
3795 = plus_constant (argblock,
3796 argvec[argnum].locate.offset.constant);
3797 rtx stack_area
3798 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
3799
3800 if (save_mode == BLKmode)
3801 {
3802 argvec[argnum].save_area
3803 = assign_stack_temp (BLKmode,
3804 argvec[argnum].locate.size.constant,
3805 0);
3806
3807 emit_block_move (validize_mem (argvec[argnum].save_area),
3808 stack_area,
3809 GEN_INT (argvec[argnum].locate.size.constant),
3810 BLOCK_OP_CALL_PARM);
3811 }
3812 else
3813 {
3814 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3815
3816 emit_move_insn (argvec[argnum].save_area, stack_area);
3817 }
3818 }
3819 }
3820
3821 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
3822 partial, reg, 0, argblock,
3823 GEN_INT (argvec[argnum].locate.offset.constant),
3824 reg_parm_stack_space,
3825 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
3826
3827 /* Now mark the segment we just used. */
3828 if (ACCUMULATE_OUTGOING_ARGS)
3829 for (i = lower_bound; i < upper_bound; i++)
3830 stack_usage_map[i] = 1;
3831
3832 NO_DEFER_POP;
3833 }
3834 }
3835
3836 /* If we pushed args in forward order, perform stack alignment
3837 after pushing the last arg. */
3838 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3839 anti_adjust_stack (GEN_INT (args_size.constant
3840 - original_args_size.constant));
3841
3842 if (PUSH_ARGS_REVERSED)
3843 argnum = nargs - 1;
3844 else
3845 argnum = 0;
3846
3847 fun = prepare_call_address (fun, NULL, &call_fusage, 0, 0);
3848
3849 /* Now load any reg parms into their regs. */
3850
3851 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3852 are to be pushed. */
3853 for (count = 0; count < nargs; count++, argnum += inc)
3854 {
3855 enum machine_mode mode = argvec[argnum].mode;
3856 rtx val = argvec[argnum].value;
3857 rtx reg = argvec[argnum].reg;
3858 int partial = argvec[argnum].partial;
3859
3860 /* Handle calls that pass values in multiple non-contiguous
3861 locations. The PA64 has examples of this for library calls. */
3862 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3863 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
3864 else if (reg != 0 && partial == 0)
3865 emit_move_insn (reg, val);
3866
3867 NO_DEFER_POP;
3868 }
3869
3870 /* Any regs containing parms remain in use through the call. */
3871 for (count = 0; count < nargs; count++)
3872 {
3873 rtx reg = argvec[count].reg;
3874 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3875 use_group_regs (&call_fusage, reg);
3876 else if (reg != 0)
3877 use_reg (&call_fusage, reg);
3878 }
3879
3880 /* Pass the function the address in which to return a structure value. */
3881 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3882 {
3883 emit_move_insn (struct_value,
3884 force_reg (Pmode,
3885 force_operand (XEXP (mem_value, 0),
3886 NULL_RTX)));
3887 if (REG_P (struct_value))
3888 use_reg (&call_fusage, struct_value);
3889 }
3890
3891 /* Don't allow popping to be deferred, since then
3892 cse'ing of library calls could delete a call and leave the pop. */
3893 NO_DEFER_POP;
3894 valreg = (mem_value == 0 && outmode != VOIDmode
3895 ? hard_libcall_value (outmode) : NULL_RTX);
3896
3897 /* Stack must be properly aligned now. */
3898 if (stack_pointer_delta & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))
3899 abort ();
3900
3901 before_call = get_last_insn ();
3902
3903 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3904 will set inhibit_defer_pop to that value. */
3905 /* The return type is needed to decide how many bytes the function pops.
3906 Signedness plays no role in that, so for simplicity, we pretend it's
3907 always signed. We also assume that the list of arguments passed has
3908 no impact, so we pretend it is unknown. */
3909
3910 emit_call_1 (fun, NULL,
3911 get_identifier (XSTR (orgfun, 0)),
3912 build_function_type (tfom, NULL_TREE),
3913 original_args_size.constant, args_size.constant,
3914 struct_value_size,
3915 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3916 valreg,
3917 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
3918
3919 /* For calls to `setjmp', etc., inform flow.c it should complain
3920 if nonvolatile values are live. For functions that cannot return,
3921 inform flow that control does not fall through. */
3922
3923 if (flags & (ECF_NORETURN | ECF_LONGJMP))
3924 {
3925 /* The barrier note must be emitted
3926 immediately after the CALL_INSN. Some ports emit more than
3927 just a CALL_INSN above, so we must search for it here. */
3928
3929 rtx last = get_last_insn ();
3930 while (!CALL_P (last))
3931 {
3932 last = PREV_INSN (last);
3933 /* There was no CALL_INSN? */
3934 if (last == before_call)
3935 abort ();
3936 }
3937
3938 emit_barrier_after (last);
3939 }
3940
3941 /* Now restore inhibit_defer_pop to its actual original value. */
3942 OK_DEFER_POP;
3943
3944 /* If call is cse'able, make appropriate pair of reg-notes around it.
3945 Test valreg so we don't crash; may safely ignore `const'
3946 if return type is void. Disable for PARALLEL return values, because
3947 we have no way to move such values into a pseudo register. */
3948 if (flags & ECF_LIBCALL_BLOCK)
3949 {
3950 rtx insns;
3951
3952 if (valreg == 0)
3953 {
3954 insns = get_insns ();
3955 end_sequence ();
3956 emit_insn (insns);
3957 }
3958 else
3959 {
3960 rtx note = 0;
3961 rtx temp;
3962 int i;
3963
3964 if (GET_CODE (valreg) == PARALLEL)
3965 {
3966 temp = gen_reg_rtx (outmode);
3967 emit_group_store (temp, valreg, NULL_TREE,
3968 GET_MODE_SIZE (outmode));
3969 valreg = temp;
3970 }
3971
3972 temp = gen_reg_rtx (GET_MODE (valreg));
3973
3974 /* Construct an "equal form" for the value which mentions all the
3975 arguments in order as well as the function name. */
3976 for (i = 0; i < nargs; i++)
3977 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
3978 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
3979
3980 insns = get_insns ();
3981 end_sequence ();
3982
3983 if (flags & ECF_PURE)
3984 note = gen_rtx_EXPR_LIST (VOIDmode,
3985 gen_rtx_USE (VOIDmode,
3986 gen_rtx_MEM (BLKmode,
3987 gen_rtx_SCRATCH (VOIDmode))),
3988 note);
3989
3990 emit_libcall_block (insns, temp, valreg, note);
3991
3992 valreg = temp;
3993 }
3994 }
3995 pop_temp_slots ();
3996
3997 /* Copy the value to the right place. */
3998 if (outmode != VOIDmode && retval)
3999 {
4000 if (mem_value)
4001 {
4002 if (value == 0)
4003 value = mem_value;
4004 if (value != mem_value)
4005 emit_move_insn (value, mem_value);
4006 }
4007 else if (GET_CODE (valreg) == PARALLEL)
4008 {
4009 if (value == 0)
4010 value = gen_reg_rtx (outmode);
4011 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
4012 }
4013 else if (value != 0)
4014 emit_move_insn (value, valreg);
4015 else
4016 value = valreg;
4017 }
4018
4019 if (ACCUMULATE_OUTGOING_ARGS)
4020 {
4021 #ifdef REG_PARM_STACK_SPACE
4022 if (save_area)
4023 restore_fixed_argument_area (save_area, argblock,
4024 high_to_save, low_to_save);
4025 #endif
4026
4027 /* If we saved any argument areas, restore them. */
4028 for (count = 0; count < nargs; count++)
4029 if (argvec[count].save_area)
4030 {
4031 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
4032 rtx adr = plus_constant (argblock,
4033 argvec[count].locate.offset.constant);
4034 rtx stack_area = gen_rtx_MEM (save_mode,
4035 memory_address (save_mode, adr));
4036
4037 if (save_mode == BLKmode)
4038 emit_block_move (stack_area,
4039 validize_mem (argvec[count].save_area),
4040 GEN_INT (argvec[count].locate.size.constant),
4041 BLOCK_OP_CALL_PARM);
4042 else
4043 emit_move_insn (stack_area, argvec[count].save_area);
4044 }
4045
4046 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4047 stack_usage_map = initial_stack_usage_map;
4048 }
4049
4050 return value;
4051
4052 }
4053 \f
4054 /* Output a library call to function FUN (a SYMBOL_REF rtx)
4055 (emitting the queue unless NO_QUEUE is nonzero),
4056 for a value of mode OUTMODE,
4057 with NARGS different arguments, passed as alternating rtx values
4058 and machine_modes to convert them to.
4059 The rtx values should have been passed through protect_from_queue already.
4060
4061 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
4062 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
4063 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
4064 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
4065 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
4066 or other LCT_ value for other types of library calls. */
4067
4068 void
4069 emit_library_call (rtx orgfun, enum libcall_type fn_type,
4070 enum machine_mode outmode, int nargs, ...)
4071 {
4072 va_list p;
4073
4074 va_start (p, nargs);
4075 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4076 va_end (p);
4077 }
4078 \f
4079 /* Like emit_library_call except that an extra argument, VALUE,
4080 comes second and says where to store the result.
4081 (If VALUE is zero, this function chooses a convenient way
4082 to return the value.
4083
4084 This function returns an rtx for where the value is to be found.
4085 If VALUE is nonzero, VALUE is returned. */
4086
4087 rtx
4088 emit_library_call_value (rtx orgfun, rtx value,
4089 enum libcall_type fn_type,
4090 enum machine_mode outmode, int nargs, ...)
4091 {
4092 rtx result;
4093 va_list p;
4094
4095 va_start (p, nargs);
4096 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4097 nargs, p);
4098 va_end (p);
4099
4100 return result;
4101 }
4102 \f
4103 /* Store a single argument for a function call
4104 into the register or memory area where it must be passed.
4105 *ARG describes the argument value and where to pass it.
4106
4107 ARGBLOCK is the address of the stack-block for all the arguments,
4108 or 0 on a machine where arguments are pushed individually.
4109
4110 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4111 so must be careful about how the stack is used.
4112
4113 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4114 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4115 that we need not worry about saving and restoring the stack.
4116
4117 FNDECL is the declaration of the function we are calling.
4118
4119 Return nonzero if this arg should cause sibcall failure,
4120 zero otherwise. */
4121
4122 static int
4123 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4124 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
4125 {
4126 tree pval = arg->tree_value;
4127 rtx reg = 0;
4128 int partial = 0;
4129 int used = 0;
4130 int i, lower_bound = 0, upper_bound = 0;
4131 int sibcall_failure = 0;
4132
4133 if (TREE_CODE (pval) == ERROR_MARK)
4134 return 1;
4135
4136 /* Push a new temporary level for any temporaries we make for
4137 this argument. */
4138 push_temp_slots ();
4139
4140 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4141 {
4142 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4143 save any previous data at that location. */
4144 if (argblock && ! variable_size && arg->stack)
4145 {
4146 #ifdef ARGS_GROW_DOWNWARD
4147 /* stack_slot is negative, but we want to index stack_usage_map
4148 with positive values. */
4149 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4150 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4151 else
4152 upper_bound = 0;
4153
4154 lower_bound = upper_bound - arg->locate.size.constant;
4155 #else
4156 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4157 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4158 else
4159 lower_bound = 0;
4160
4161 upper_bound = lower_bound + arg->locate.size.constant;
4162 #endif
4163
4164 i = lower_bound;
4165 /* Don't worry about things in the fixed argument area;
4166 it has already been saved. */
4167 if (i < reg_parm_stack_space)
4168 i = reg_parm_stack_space;
4169 while (i < upper_bound && stack_usage_map[i] == 0)
4170 i++;
4171
4172 if (i < upper_bound)
4173 {
4174 /* We need to make a save area. */
4175 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4176 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4177 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4178 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4179
4180 if (save_mode == BLKmode)
4181 {
4182 tree ot = TREE_TYPE (arg->tree_value);
4183 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4184 | TYPE_QUAL_CONST));
4185
4186 arg->save_area = assign_temp (nt, 0, 1, 1);
4187 preserve_temp_slots (arg->save_area);
4188 emit_block_move (validize_mem (arg->save_area), stack_area,
4189 expr_size (arg->tree_value),
4190 BLOCK_OP_CALL_PARM);
4191 }
4192 else
4193 {
4194 arg->save_area = gen_reg_rtx (save_mode);
4195 emit_move_insn (arg->save_area, stack_area);
4196 }
4197 }
4198 }
4199 }
4200
4201 /* If this isn't going to be placed on both the stack and in registers,
4202 set up the register and number of words. */
4203 if (! arg->pass_on_stack)
4204 {
4205 if (flags & ECF_SIBCALL)
4206 reg = arg->tail_call_reg;
4207 else
4208 reg = arg->reg;
4209 partial = arg->partial;
4210 }
4211
4212 if (reg != 0 && partial == 0)
4213 /* Being passed entirely in a register. We shouldn't be called in
4214 this case. */
4215 abort ();
4216
4217 /* If this arg needs special alignment, don't load the registers
4218 here. */
4219 if (arg->n_aligned_regs != 0)
4220 reg = 0;
4221
4222 /* If this is being passed partially in a register, we can't evaluate
4223 it directly into its stack slot. Otherwise, we can. */
4224 if (arg->value == 0)
4225 {
4226 /* stack_arg_under_construction is nonzero if a function argument is
4227 being evaluated directly into the outgoing argument list and
4228 expand_call must take special action to preserve the argument list
4229 if it is called recursively.
4230
4231 For scalar function arguments stack_usage_map is sufficient to
4232 determine which stack slots must be saved and restored. Scalar
4233 arguments in general have pass_on_stack == 0.
4234
4235 If this argument is initialized by a function which takes the
4236 address of the argument (a C++ constructor or a C function
4237 returning a BLKmode structure), then stack_usage_map is
4238 insufficient and expand_call must push the stack around the
4239 function call. Such arguments have pass_on_stack == 1.
4240
4241 Note that it is always safe to set stack_arg_under_construction,
4242 but this generates suboptimal code if set when not needed. */
4243
4244 if (arg->pass_on_stack)
4245 stack_arg_under_construction++;
4246
4247 arg->value = expand_expr (pval,
4248 (partial
4249 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4250 ? NULL_RTX : arg->stack,
4251 VOIDmode, EXPAND_STACK_PARM);
4252
4253 /* If we are promoting object (or for any other reason) the mode
4254 doesn't agree, convert the mode. */
4255
4256 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4257 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4258 arg->value, arg->unsignedp);
4259
4260 if (arg->pass_on_stack)
4261 stack_arg_under_construction--;
4262 }
4263
4264 /* Don't allow anything left on stack from computation
4265 of argument to alloca. */
4266 if (flags & ECF_MAY_BE_ALLOCA)
4267 do_pending_stack_adjust ();
4268
4269 if (arg->value == arg->stack)
4270 /* If the value is already in the stack slot, we are done. */
4271 ;
4272 else if (arg->mode != BLKmode)
4273 {
4274 int size;
4275
4276 /* Argument is a scalar, not entirely passed in registers.
4277 (If part is passed in registers, arg->partial says how much
4278 and emit_push_insn will take care of putting it there.)
4279
4280 Push it, and if its size is less than the
4281 amount of space allocated to it,
4282 also bump stack pointer by the additional space.
4283 Note that in C the default argument promotions
4284 will prevent such mismatches. */
4285
4286 size = GET_MODE_SIZE (arg->mode);
4287 /* Compute how much space the push instruction will push.
4288 On many machines, pushing a byte will advance the stack
4289 pointer by a halfword. */
4290 #ifdef PUSH_ROUNDING
4291 size = PUSH_ROUNDING (size);
4292 #endif
4293 used = size;
4294
4295 /* Compute how much space the argument should get:
4296 round up to a multiple of the alignment for arguments. */
4297 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4298 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4299 / (PARM_BOUNDARY / BITS_PER_UNIT))
4300 * (PARM_BOUNDARY / BITS_PER_UNIT));
4301
4302 /* This isn't already where we want it on the stack, so put it there.
4303 This can either be done with push or copy insns. */
4304 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4305 PARM_BOUNDARY, partial, reg, used - size, argblock,
4306 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4307 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4308
4309 /* Unless this is a partially-in-register argument, the argument is now
4310 in the stack. */
4311 if (partial == 0)
4312 arg->value = arg->stack;
4313 }
4314 else
4315 {
4316 /* BLKmode, at least partly to be pushed. */
4317
4318 unsigned int parm_align;
4319 int excess;
4320 rtx size_rtx;
4321
4322 /* Pushing a nonscalar.
4323 If part is passed in registers, PARTIAL says how much
4324 and emit_push_insn will take care of putting it there. */
4325
4326 /* Round its size up to a multiple
4327 of the allocation unit for arguments. */
4328
4329 if (arg->locate.size.var != 0)
4330 {
4331 excess = 0;
4332 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4333 }
4334 else
4335 {
4336 /* PUSH_ROUNDING has no effect on us, because
4337 emit_push_insn for BLKmode is careful to avoid it. */
4338 if (reg && GET_CODE (reg) == PARALLEL)
4339 {
4340 /* Use the size of the elt to compute excess. */
4341 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
4342 excess = (arg->locate.size.constant
4343 - int_size_in_bytes (TREE_TYPE (pval))
4344 + partial * GET_MODE_SIZE (GET_MODE (elt)));
4345 }
4346 else
4347 excess = (arg->locate.size.constant
4348 - int_size_in_bytes (TREE_TYPE (pval))
4349 + partial * UNITS_PER_WORD);
4350 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4351 NULL_RTX, TYPE_MODE (sizetype), 0);
4352 }
4353
4354 /* Some types will require stricter alignment, which will be
4355 provided for elsewhere in argument layout. */
4356 parm_align = MAX (PARM_BOUNDARY, TYPE_ALIGN (TREE_TYPE (pval)));
4357
4358 /* When an argument is padded down, the block is aligned to
4359 PARM_BOUNDARY, but the actual argument isn't. */
4360 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4361 {
4362 if (arg->locate.size.var)
4363 parm_align = BITS_PER_UNIT;
4364 else if (excess)
4365 {
4366 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4367 parm_align = MIN (parm_align, excess_align);
4368 }
4369 }
4370
4371 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4372 {
4373 /* emit_push_insn might not work properly if arg->value and
4374 argblock + arg->locate.offset areas overlap. */
4375 rtx x = arg->value;
4376 int i = 0;
4377
4378 if (XEXP (x, 0) == current_function_internal_arg_pointer
4379 || (GET_CODE (XEXP (x, 0)) == PLUS
4380 && XEXP (XEXP (x, 0), 0) ==
4381 current_function_internal_arg_pointer
4382 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4383 {
4384 if (XEXP (x, 0) != current_function_internal_arg_pointer)
4385 i = INTVAL (XEXP (XEXP (x, 0), 1));
4386
4387 /* expand_call should ensure this. */
4388 if (arg->locate.offset.var || GET_CODE (size_rtx) != CONST_INT)
4389 abort ();
4390
4391 if (arg->locate.offset.constant > i)
4392 {
4393 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4394 sibcall_failure = 1;
4395 }
4396 else if (arg->locate.offset.constant < i)
4397 {
4398 if (i < arg->locate.offset.constant + INTVAL (size_rtx))
4399 sibcall_failure = 1;
4400 }
4401 }
4402 }
4403
4404 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4405 parm_align, partial, reg, excess, argblock,
4406 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4407 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4408
4409 /* Unless this is a partially-in-register argument, the argument is now
4410 in the stack.
4411
4412 ??? Unlike the case above, in which we want the actual
4413 address of the data, so that we can load it directly into a
4414 register, here we want the address of the stack slot, so that
4415 it's properly aligned for word-by-word copying or something
4416 like that. It's not clear that this is always correct. */
4417 if (partial == 0)
4418 arg->value = arg->stack_slot;
4419 }
4420
4421 /* Mark all slots this store used. */
4422 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4423 && argblock && ! variable_size && arg->stack)
4424 for (i = lower_bound; i < upper_bound; i++)
4425 stack_usage_map[i] = 1;
4426
4427 /* Once we have pushed something, pops can't safely
4428 be deferred during the rest of the arguments. */
4429 NO_DEFER_POP;
4430
4431 /* ANSI doesn't require a sequence point here,
4432 but PCC has one, so this will avoid some problems. */
4433 emit_queue ();
4434
4435 /* Free any temporary slots made in processing this argument. Show
4436 that we might have taken the address of something and pushed that
4437 as an operand. */
4438 preserve_temp_slots (NULL_RTX);
4439 free_temp_slots ();
4440 pop_temp_slots ();
4441
4442 return sibcall_failure;
4443 }
4444
4445 /* Nonzero if we do not know how to pass TYPE solely in registers. */
4446
4447 bool
4448 must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
4449 tree type)
4450 {
4451 if (!type)
4452 return false;
4453
4454 /* If the type has variable size... */
4455 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4456 return true;
4457
4458 /* If the type is marked as addressable (it is required
4459 to be constructed into the stack)... */
4460 if (TREE_ADDRESSABLE (type))
4461 return true;
4462
4463 return false;
4464 }
4465
4466 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
4467 takes trailing padding of a structure into account. */
4468 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
4469
4470 bool
4471 must_pass_in_stack_var_size_or_pad (enum machine_mode mode, tree type)
4472 {
4473 if (!type)
4474 return false;
4475
4476 /* If the type has variable size... */
4477 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4478 return true;
4479
4480 /* If the type is marked as addressable (it is required
4481 to be constructed into the stack)... */
4482 if (TREE_ADDRESSABLE (type))
4483 return true;
4484
4485 /* If the padding and mode of the type is such that a copy into
4486 a register would put it into the wrong part of the register. */
4487 if (mode == BLKmode
4488 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4489 && (FUNCTION_ARG_PADDING (mode, type)
4490 == (BYTES_BIG_ENDIAN ? upward : downward)))
4491 return true;
4492
4493 return false;
4494 }