except.c (expand_eh_region_start, [...]): Remove.
[gcc.git] / gcc / calls.c
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "expr.h"
30 #include "optabs.h"
31 #include "libfuncs.h"
32 #include "function.h"
33 #include "regs.h"
34 #include "toplev.h"
35 #include "output.h"
36 #include "tm_p.h"
37 #include "timevar.h"
38 #include "sbitmap.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "cgraph.h"
42 #include "except.h"
43
44 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
45 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
46
47 /* Data structure and subroutines used within expand_call. */
48
49 struct arg_data
50 {
51 /* Tree node for this argument. */
52 tree tree_value;
53 /* Mode for value; TYPE_MODE unless promoted. */
54 enum machine_mode mode;
55 /* Current RTL value for argument, or 0 if it isn't precomputed. */
56 rtx value;
57 /* Initially-compute RTL value for argument; only for const functions. */
58 rtx initial_value;
59 /* Register to pass this argument in, 0 if passed on stack, or an
60 PARALLEL if the arg is to be copied into multiple non-contiguous
61 registers. */
62 rtx reg;
63 /* Register to pass this argument in when generating tail call sequence.
64 This is not the same register as for normal calls on machines with
65 register windows. */
66 rtx tail_call_reg;
67 /* If REG was promoted from the actual mode of the argument expression,
68 indicates whether the promotion is sign- or zero-extended. */
69 int unsignedp;
70 /* Number of registers to use. 0 means put the whole arg in registers.
71 Also 0 if not passed in registers. */
72 int partial;
73 /* Nonzero if argument must be passed on stack.
74 Note that some arguments may be passed on the stack
75 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
76 pass_on_stack identifies arguments that *cannot* go in registers. */
77 int pass_on_stack;
78 /* Some fields packaged up for locate_and_pad_parm. */
79 struct locate_and_pad_arg_data locate;
80 /* Location on the stack at which parameter should be stored. The store
81 has already been done if STACK == VALUE. */
82 rtx stack;
83 /* Location on the stack of the start of this argument slot. This can
84 differ from STACK if this arg pads downward. This location is known
85 to be aligned to FUNCTION_ARG_BOUNDARY. */
86 rtx stack_slot;
87 /* Place that this stack area has been saved, if needed. */
88 rtx save_area;
89 /* If an argument's alignment does not permit direct copying into registers,
90 copy in smaller-sized pieces into pseudos. These are stored in a
91 block pointed to by this field. The next field says how many
92 word-sized pseudos we made. */
93 rtx *aligned_regs;
94 int n_aligned_regs;
95 };
96
97 /* A vector of one char per byte of stack space. A byte if nonzero if
98 the corresponding stack location has been used.
99 This vector is used to prevent a function call within an argument from
100 clobbering any stack already set up. */
101 static char *stack_usage_map;
102
103 /* Size of STACK_USAGE_MAP. */
104 static int highest_outgoing_arg_in_use;
105
106 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
107 stack location's tail call argument has been already stored into the stack.
108 This bitmap is used to prevent sibling call optimization if function tries
109 to use parent's incoming argument slots when they have been already
110 overwritten with tail call arguments. */
111 static sbitmap stored_args_map;
112
113 /* stack_arg_under_construction is nonzero when an argument may be
114 initialized with a constructor call (including a C function that
115 returns a BLKmode struct) and expand_call must take special action
116 to make sure the object being constructed does not overlap the
117 argument list for the constructor call. */
118 int stack_arg_under_construction;
119
120 static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
121 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
122 CUMULATIVE_ARGS *);
123 static void precompute_register_parameters (int, struct arg_data *, int *);
124 static int store_one_arg (struct arg_data *, rtx, int, int, int);
125 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
126 static int finalize_must_preallocate (int, int, struct arg_data *,
127 struct args_size *);
128 static void precompute_arguments (int, int, struct arg_data *);
129 static int compute_argument_block_size (int, struct args_size *, int);
130 static void initialize_argument_information (int, struct arg_data *,
131 struct args_size *, int, tree,
132 tree, CUMULATIVE_ARGS *, int,
133 rtx *, int *, int *, int *,
134 bool *, bool);
135 static void compute_argument_addresses (struct arg_data *, rtx, int);
136 static rtx rtx_for_function_call (tree, tree);
137 static void load_register_parameters (struct arg_data *, int, rtx *, int,
138 int, int *);
139 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
140 enum machine_mode, int, va_list);
141 static int special_function_p (tree, int);
142 static int check_sibcall_argument_overlap_1 (rtx);
143 static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
144
145 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
146 int);
147 static tree fix_unsafe_tree (tree);
148 static bool shift_returned_value (tree, rtx *);
149
150 #ifdef REG_PARM_STACK_SPACE
151 static rtx save_fixed_argument_area (int, rtx, int *, int *);
152 static void restore_fixed_argument_area (rtx, rtx, int, int);
153 #endif
154 \f
155 /* Force FUNEXP into a form suitable for the address of a CALL,
156 and return that as an rtx. Also load the static chain register
157 if FNDECL is a nested function.
158
159 CALL_FUSAGE points to a variable holding the prospective
160 CALL_INSN_FUNCTION_USAGE information. */
161
162 rtx
163 prepare_call_address (rtx funexp, rtx static_chain_value,
164 rtx *call_fusage, int reg_parm_seen, int sibcallp)
165 {
166 funexp = protect_from_queue (funexp, 0);
167
168 /* Make a valid memory address and copy constants through pseudo-regs,
169 but not for a constant address if -fno-function-cse. */
170 if (GET_CODE (funexp) != SYMBOL_REF)
171 /* If we are using registers for parameters, force the
172 function address into a register now. */
173 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
174 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
175 : memory_address (FUNCTION_MODE, funexp));
176 else if (! sibcallp)
177 {
178 #ifndef NO_FUNCTION_CSE
179 if (optimize && ! flag_no_function_cse)
180 funexp = force_reg (Pmode, funexp);
181 #endif
182 }
183
184 if (static_chain_value != 0)
185 {
186 emit_move_insn (static_chain_rtx, static_chain_value);
187
188 if (REG_P (static_chain_rtx))
189 use_reg (call_fusage, static_chain_rtx);
190 }
191
192 return funexp;
193 }
194
195 /* Generate instructions to call function FUNEXP,
196 and optionally pop the results.
197 The CALL_INSN is the first insn generated.
198
199 FNDECL is the declaration node of the function. This is given to the
200 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
201
202 FUNTYPE is the data type of the function. This is given to the macro
203 RETURN_POPS_ARGS to determine whether this function pops its own args.
204 We used to allow an identifier for library functions, but that doesn't
205 work when the return type is an aggregate type and the calling convention
206 says that the pointer to this aggregate is to be popped by the callee.
207
208 STACK_SIZE is the number of bytes of arguments on the stack,
209 ROUNDED_STACK_SIZE is that number rounded up to
210 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
211 both to put into the call insn and to generate explicit popping
212 code if necessary.
213
214 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
215 It is zero if this call doesn't want a structure value.
216
217 NEXT_ARG_REG is the rtx that results from executing
218 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
219 just after all the args have had their registers assigned.
220 This could be whatever you like, but normally it is the first
221 arg-register beyond those used for args in this call,
222 or 0 if all the arg-registers are used in this call.
223 It is passed on to `gen_call' so you can put this info in the call insn.
224
225 VALREG is a hard register in which a value is returned,
226 or 0 if the call does not return a value.
227
228 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
229 the args to this call were processed.
230 We restore `inhibit_defer_pop' to that value.
231
232 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
233 denote registers used by the called function. */
234
235 static void
236 emit_call_1 (rtx funexp, tree fntree, tree fndecl ATTRIBUTE_UNUSED,
237 tree funtype ATTRIBUTE_UNUSED,
238 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
239 HOST_WIDE_INT rounded_stack_size,
240 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
241 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
242 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
243 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
244 {
245 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
246 rtx call_insn;
247 int already_popped = 0;
248 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
249 #if defined (HAVE_call) && defined (HAVE_call_value)
250 rtx struct_value_size_rtx;
251 struct_value_size_rtx = GEN_INT (struct_value_size);
252 #endif
253
254 #ifdef CALL_POPS_ARGS
255 n_popped += CALL_POPS_ARGS (* args_so_far);
256 #endif
257
258 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
259 and we don't want to load it into a register as an optimization,
260 because prepare_call_address already did it if it should be done. */
261 if (GET_CODE (funexp) != SYMBOL_REF)
262 funexp = memory_address (FUNCTION_MODE, funexp);
263
264 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
265 if ((ecf_flags & ECF_SIBCALL)
266 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
267 && (n_popped > 0 || stack_size == 0))
268 {
269 rtx n_pop = GEN_INT (n_popped);
270 rtx pat;
271
272 /* If this subroutine pops its own args, record that in the call insn
273 if possible, for the sake of frame pointer elimination. */
274
275 if (valreg)
276 pat = GEN_SIBCALL_VALUE_POP (valreg,
277 gen_rtx_MEM (FUNCTION_MODE, funexp),
278 rounded_stack_size_rtx, next_arg_reg,
279 n_pop);
280 else
281 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
282 rounded_stack_size_rtx, next_arg_reg, n_pop);
283
284 emit_call_insn (pat);
285 already_popped = 1;
286 }
287 else
288 #endif
289
290 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
291 /* If the target has "call" or "call_value" insns, then prefer them
292 if no arguments are actually popped. If the target does not have
293 "call" or "call_value" insns, then we must use the popping versions
294 even if the call has no arguments to pop. */
295 #if defined (HAVE_call) && defined (HAVE_call_value)
296 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
297 && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
298 #else
299 if (HAVE_call_pop && HAVE_call_value_pop)
300 #endif
301 {
302 rtx n_pop = GEN_INT (n_popped);
303 rtx pat;
304
305 /* If this subroutine pops its own args, record that in the call insn
306 if possible, for the sake of frame pointer elimination. */
307
308 if (valreg)
309 pat = GEN_CALL_VALUE_POP (valreg,
310 gen_rtx_MEM (FUNCTION_MODE, funexp),
311 rounded_stack_size_rtx, next_arg_reg, n_pop);
312 else
313 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
314 rounded_stack_size_rtx, next_arg_reg, n_pop);
315
316 emit_call_insn (pat);
317 already_popped = 1;
318 }
319 else
320 #endif
321
322 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
323 if ((ecf_flags & ECF_SIBCALL)
324 && HAVE_sibcall && HAVE_sibcall_value)
325 {
326 if (valreg)
327 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
328 gen_rtx_MEM (FUNCTION_MODE, funexp),
329 rounded_stack_size_rtx,
330 next_arg_reg, NULL_RTX));
331 else
332 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
333 rounded_stack_size_rtx, next_arg_reg,
334 struct_value_size_rtx));
335 }
336 else
337 #endif
338
339 #if defined (HAVE_call) && defined (HAVE_call_value)
340 if (HAVE_call && HAVE_call_value)
341 {
342 if (valreg)
343 emit_call_insn (GEN_CALL_VALUE (valreg,
344 gen_rtx_MEM (FUNCTION_MODE, funexp),
345 rounded_stack_size_rtx, next_arg_reg,
346 NULL_RTX));
347 else
348 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
349 rounded_stack_size_rtx, next_arg_reg,
350 struct_value_size_rtx));
351 }
352 else
353 #endif
354 abort ();
355
356 /* Find the call we just emitted. */
357 call_insn = last_call_insn ();
358
359 /* Mark memory as used for "pure" function call. */
360 if (ecf_flags & ECF_PURE)
361 call_fusage
362 = gen_rtx_EXPR_LIST
363 (VOIDmode,
364 gen_rtx_USE (VOIDmode,
365 gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
366 call_fusage);
367
368 /* Put the register usage information there. */
369 add_function_usage_to (call_insn, call_fusage);
370
371 /* If this is a const call, then set the insn's unchanging bit. */
372 if (ecf_flags & (ECF_CONST | ECF_PURE))
373 CONST_OR_PURE_CALL_P (call_insn) = 1;
374
375 /* If this call can't throw, attach a REG_EH_REGION reg note to that
376 effect. */
377 if (ecf_flags & ECF_NOTHROW)
378 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
379 REG_NOTES (call_insn));
380 else
381 {
382 int rn = lookup_stmt_eh_region (fntree);
383
384 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't
385 throw, which we already took care of. */
386 if (rn > 0)
387 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
388 REG_NOTES (call_insn));
389 note_current_region_may_contain_throw ();
390 }
391
392 if (ecf_flags & ECF_NORETURN)
393 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
394 REG_NOTES (call_insn));
395 if (ecf_flags & ECF_ALWAYS_RETURN)
396 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_ALWAYS_RETURN, const0_rtx,
397 REG_NOTES (call_insn));
398
399 if (ecf_flags & ECF_RETURNS_TWICE)
400 {
401 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
402 REG_NOTES (call_insn));
403 current_function_calls_setjmp = 1;
404 }
405
406 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
407
408 /* Restore this now, so that we do defer pops for this call's args
409 if the context of the call as a whole permits. */
410 inhibit_defer_pop = old_inhibit_defer_pop;
411
412 if (n_popped > 0)
413 {
414 if (!already_popped)
415 CALL_INSN_FUNCTION_USAGE (call_insn)
416 = gen_rtx_EXPR_LIST (VOIDmode,
417 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
418 CALL_INSN_FUNCTION_USAGE (call_insn));
419 rounded_stack_size -= n_popped;
420 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
421 stack_pointer_delta -= n_popped;
422 }
423
424 if (!ACCUMULATE_OUTGOING_ARGS)
425 {
426 /* If returning from the subroutine does not automatically pop the args,
427 we need an instruction to pop them sooner or later.
428 Perhaps do it now; perhaps just record how much space to pop later.
429
430 If returning from the subroutine does pop the args, indicate that the
431 stack pointer will be changed. */
432
433 if (rounded_stack_size != 0)
434 {
435 if (ecf_flags & (ECF_SP_DEPRESSED | ECF_NORETURN | ECF_LONGJMP))
436 /* Just pretend we did the pop. */
437 stack_pointer_delta -= rounded_stack_size;
438 else if (flag_defer_pop && inhibit_defer_pop == 0
439 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
440 pending_stack_adjust += rounded_stack_size;
441 else
442 adjust_stack (rounded_stack_size_rtx);
443 }
444 }
445 /* When we accumulate outgoing args, we must avoid any stack manipulations.
446 Restore the stack pointer to its original value now. Usually
447 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
448 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
449 popping variants of functions exist as well.
450
451 ??? We may optimize similar to defer_pop above, but it is
452 probably not worthwhile.
453
454 ??? It will be worthwhile to enable combine_stack_adjustments even for
455 such machines. */
456 else if (n_popped)
457 anti_adjust_stack (GEN_INT (n_popped));
458 }
459
460 /* Determine if the function identified by NAME and FNDECL is one with
461 special properties we wish to know about.
462
463 For example, if the function might return more than one time (setjmp), then
464 set RETURNS_TWICE to a nonzero value.
465
466 Similarly set LONGJMP for if the function is in the longjmp family.
467
468 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
469 space from the stack such as alloca. */
470
471 static int
472 special_function_p (tree fndecl, int flags)
473 {
474 if (fndecl && DECL_NAME (fndecl)
475 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
476 /* Exclude functions not at the file scope, or not `extern',
477 since they are not the magic functions we would otherwise
478 think they are.
479 FIXME: this should be handled with attributes, not with this
480 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
481 because you can declare fork() inside a function if you
482 wish. */
483 && (DECL_CONTEXT (fndecl) == NULL_TREE
484 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
485 && TREE_PUBLIC (fndecl))
486 {
487 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
488 const char *tname = name;
489
490 /* We assume that alloca will always be called by name. It
491 makes no sense to pass it as a pointer-to-function to
492 anything that does not understand its behavior. */
493 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
494 && name[0] == 'a'
495 && ! strcmp (name, "alloca"))
496 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
497 && name[0] == '_'
498 && ! strcmp (name, "__builtin_alloca"))))
499 flags |= ECF_MAY_BE_ALLOCA;
500
501 /* Disregard prefix _, __ or __x. */
502 if (name[0] == '_')
503 {
504 if (name[1] == '_' && name[2] == 'x')
505 tname += 3;
506 else if (name[1] == '_')
507 tname += 2;
508 else
509 tname += 1;
510 }
511
512 if (tname[0] == 's')
513 {
514 if ((tname[1] == 'e'
515 && (! strcmp (tname, "setjmp")
516 || ! strcmp (tname, "setjmp_syscall")))
517 || (tname[1] == 'i'
518 && ! strcmp (tname, "sigsetjmp"))
519 || (tname[1] == 'a'
520 && ! strcmp (tname, "savectx")))
521 flags |= ECF_RETURNS_TWICE;
522
523 if (tname[1] == 'i'
524 && ! strcmp (tname, "siglongjmp"))
525 flags |= ECF_LONGJMP;
526 }
527 else if ((tname[0] == 'q' && tname[1] == 's'
528 && ! strcmp (tname, "qsetjmp"))
529 || (tname[0] == 'v' && tname[1] == 'f'
530 && ! strcmp (tname, "vfork")))
531 flags |= ECF_RETURNS_TWICE;
532
533 else if (tname[0] == 'l' && tname[1] == 'o'
534 && ! strcmp (tname, "longjmp"))
535 flags |= ECF_LONGJMP;
536 }
537
538 return flags;
539 }
540
541 /* Return nonzero when tree represent call to longjmp. */
542
543 int
544 setjmp_call_p (tree fndecl)
545 {
546 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
547 }
548
549 /* Return true when exp contains alloca call. */
550 bool
551 alloca_call_p (tree exp)
552 {
553 if (TREE_CODE (exp) == CALL_EXPR
554 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
555 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
556 == FUNCTION_DECL)
557 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
558 0) & ECF_MAY_BE_ALLOCA))
559 return true;
560 return false;
561 }
562
563 /* Detect flags (function attributes) from the function decl or type node. */
564
565 int
566 flags_from_decl_or_type (tree exp)
567 {
568 int flags = 0;
569 tree type = exp;
570
571 if (DECL_P (exp))
572 {
573 struct cgraph_rtl_info *i = cgraph_rtl_info (exp);
574 type = TREE_TYPE (exp);
575
576 if (i)
577 {
578 if (i->pure_function)
579 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
580 if (i->const_function)
581 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
582 }
583
584 /* The function exp may have the `malloc' attribute. */
585 if (DECL_IS_MALLOC (exp))
586 flags |= ECF_MALLOC;
587
588 /* The function exp may have the `pure' attribute. */
589 if (DECL_IS_PURE (exp))
590 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
591
592 if (TREE_NOTHROW (exp))
593 flags |= ECF_NOTHROW;
594
595 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
596 flags |= ECF_LIBCALL_BLOCK | ECF_CONST;
597
598 flags = special_function_p (exp, flags);
599 }
600 else if (TYPE_P (exp) && TYPE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
601 flags |= ECF_CONST;
602
603 if (TREE_THIS_VOLATILE (exp))
604 flags |= ECF_NORETURN;
605
606 /* Mark if the function returns with the stack pointer depressed. We
607 cannot consider it pure or constant in that case. */
608 if (TREE_CODE (type) == FUNCTION_TYPE && TYPE_RETURNS_STACK_DEPRESSED (type))
609 {
610 flags |= ECF_SP_DEPRESSED;
611 flags &= ~(ECF_PURE | ECF_CONST | ECF_LIBCALL_BLOCK);
612 }
613
614 return flags;
615 }
616
617 /* Detect flags from a CALL_EXPR. */
618
619 int
620 call_expr_flags (tree t)
621 {
622 int flags;
623 tree decl = get_callee_fndecl (t);
624
625 if (decl)
626 flags = flags_from_decl_or_type (decl);
627 else
628 {
629 t = TREE_TYPE (TREE_OPERAND (t, 0));
630 if (t && TREE_CODE (t) == POINTER_TYPE)
631 flags = flags_from_decl_or_type (TREE_TYPE (t));
632 else
633 flags = 0;
634 }
635
636 return flags;
637 }
638
639 /* Precompute all register parameters as described by ARGS, storing values
640 into fields within the ARGS array.
641
642 NUM_ACTUALS indicates the total number elements in the ARGS array.
643
644 Set REG_PARM_SEEN if we encounter a register parameter. */
645
646 static void
647 precompute_register_parameters (int num_actuals, struct arg_data *args, int *reg_parm_seen)
648 {
649 int i;
650
651 *reg_parm_seen = 0;
652
653 for (i = 0; i < num_actuals; i++)
654 if (args[i].reg != 0 && ! args[i].pass_on_stack)
655 {
656 *reg_parm_seen = 1;
657
658 if (args[i].value == 0)
659 {
660 push_temp_slots ();
661 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
662 VOIDmode, 0);
663 preserve_temp_slots (args[i].value);
664 pop_temp_slots ();
665
666 /* ANSI doesn't require a sequence point here,
667 but PCC has one, so this will avoid some problems. */
668 emit_queue ();
669 }
670
671 /* If the value is a non-legitimate constant, force it into a
672 pseudo now. TLS symbols sometimes need a call to resolve. */
673 if (CONSTANT_P (args[i].value)
674 && !LEGITIMATE_CONSTANT_P (args[i].value))
675 args[i].value = force_reg (args[i].mode, args[i].value);
676
677 /* If we are to promote the function arg to a wider mode,
678 do it now. */
679
680 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
681 args[i].value
682 = convert_modes (args[i].mode,
683 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
684 args[i].value, args[i].unsignedp);
685
686 /* If the value is expensive, and we are inside an appropriately
687 short loop, put the value into a pseudo and then put the pseudo
688 into the hard reg.
689
690 For small register classes, also do this if this call uses
691 register parameters. This is to avoid reload conflicts while
692 loading the parameters registers. */
693
694 if ((! (REG_P (args[i].value)
695 || (GET_CODE (args[i].value) == SUBREG
696 && REG_P (SUBREG_REG (args[i].value)))))
697 && args[i].mode != BLKmode
698 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
699 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
700 || preserve_subexpressions_p ()))
701 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
702 }
703 }
704
705 #ifdef REG_PARM_STACK_SPACE
706
707 /* The argument list is the property of the called routine and it
708 may clobber it. If the fixed area has been used for previous
709 parameters, we must save and restore it. */
710
711 static rtx
712 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
713 {
714 int low;
715 int high;
716
717 /* Compute the boundary of the area that needs to be saved, if any. */
718 high = reg_parm_stack_space;
719 #ifdef ARGS_GROW_DOWNWARD
720 high += 1;
721 #endif
722 if (high > highest_outgoing_arg_in_use)
723 high = highest_outgoing_arg_in_use;
724
725 for (low = 0; low < high; low++)
726 if (stack_usage_map[low] != 0)
727 {
728 int num_to_save;
729 enum machine_mode save_mode;
730 int delta;
731 rtx stack_area;
732 rtx save_area;
733
734 while (stack_usage_map[--high] == 0)
735 ;
736
737 *low_to_save = low;
738 *high_to_save = high;
739
740 num_to_save = high - low + 1;
741 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
742
743 /* If we don't have the required alignment, must do this
744 in BLKmode. */
745 if ((low & (MIN (GET_MODE_SIZE (save_mode),
746 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
747 save_mode = BLKmode;
748
749 #ifdef ARGS_GROW_DOWNWARD
750 delta = -high;
751 #else
752 delta = low;
753 #endif
754 stack_area = gen_rtx_MEM (save_mode,
755 memory_address (save_mode,
756 plus_constant (argblock,
757 delta)));
758
759 set_mem_align (stack_area, PARM_BOUNDARY);
760 if (save_mode == BLKmode)
761 {
762 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
763 emit_block_move (validize_mem (save_area), stack_area,
764 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
765 }
766 else
767 {
768 save_area = gen_reg_rtx (save_mode);
769 emit_move_insn (save_area, stack_area);
770 }
771
772 return save_area;
773 }
774
775 return NULL_RTX;
776 }
777
778 static void
779 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
780 {
781 enum machine_mode save_mode = GET_MODE (save_area);
782 int delta;
783 rtx stack_area;
784
785 #ifdef ARGS_GROW_DOWNWARD
786 delta = -high_to_save;
787 #else
788 delta = low_to_save;
789 #endif
790 stack_area = gen_rtx_MEM (save_mode,
791 memory_address (save_mode,
792 plus_constant (argblock, delta)));
793 set_mem_align (stack_area, PARM_BOUNDARY);
794
795 if (save_mode != BLKmode)
796 emit_move_insn (stack_area, save_area);
797 else
798 emit_block_move (stack_area, validize_mem (save_area),
799 GEN_INT (high_to_save - low_to_save + 1),
800 BLOCK_OP_CALL_PARM);
801 }
802 #endif /* REG_PARM_STACK_SPACE */
803
804 /* If any elements in ARGS refer to parameters that are to be passed in
805 registers, but not in memory, and whose alignment does not permit a
806 direct copy into registers. Copy the values into a group of pseudos
807 which we will later copy into the appropriate hard registers.
808
809 Pseudos for each unaligned argument will be stored into the array
810 args[argnum].aligned_regs. The caller is responsible for deallocating
811 the aligned_regs array if it is nonzero. */
812
813 static void
814 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
815 {
816 int i, j;
817
818 for (i = 0; i < num_actuals; i++)
819 if (args[i].reg != 0 && ! args[i].pass_on_stack
820 && args[i].mode == BLKmode
821 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
822 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
823 {
824 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
825 int nregs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
826 int endian_correction = 0;
827
828 args[i].n_aligned_regs = args[i].partial ? args[i].partial : nregs;
829 args[i].aligned_regs = xmalloc (sizeof (rtx) * args[i].n_aligned_regs);
830
831 /* Structures smaller than a word are normally aligned to the
832 least significant byte. On a BYTES_BIG_ENDIAN machine,
833 this means we must skip the empty high order bytes when
834 calculating the bit offset. */
835 if (bytes < UNITS_PER_WORD
836 #ifdef BLOCK_REG_PADDING
837 && (BLOCK_REG_PADDING (args[i].mode,
838 TREE_TYPE (args[i].tree_value), 1)
839 == downward)
840 #else
841 && BYTES_BIG_ENDIAN
842 #endif
843 )
844 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
845
846 for (j = 0; j < args[i].n_aligned_regs; j++)
847 {
848 rtx reg = gen_reg_rtx (word_mode);
849 rtx word = operand_subword_force (args[i].value, j, BLKmode);
850 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
851
852 args[i].aligned_regs[j] = reg;
853 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
854 word_mode, word_mode, BITS_PER_WORD);
855
856 /* There is no need to restrict this code to loading items
857 in TYPE_ALIGN sized hunks. The bitfield instructions can
858 load up entire word sized registers efficiently.
859
860 ??? This may not be needed anymore.
861 We use to emit a clobber here but that doesn't let later
862 passes optimize the instructions we emit. By storing 0 into
863 the register later passes know the first AND to zero out the
864 bitfield being set in the register is unnecessary. The store
865 of 0 will be deleted as will at least the first AND. */
866
867 emit_move_insn (reg, const0_rtx);
868
869 bytes -= bitsize / BITS_PER_UNIT;
870 store_bit_field (reg, bitsize, endian_correction, word_mode,
871 word, BITS_PER_WORD);
872 }
873 }
874 }
875
876 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
877 ACTPARMS.
878
879 NUM_ACTUALS is the total number of parameters.
880
881 N_NAMED_ARGS is the total number of named arguments.
882
883 FNDECL is the tree code for the target of this call (if known)
884
885 ARGS_SO_FAR holds state needed by the target to know where to place
886 the next argument.
887
888 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
889 for arguments which are passed in registers.
890
891 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
892 and may be modified by this routine.
893
894 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
895 flags which may may be modified by this routine.
896
897 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
898 that requires allocation of stack space.
899
900 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
901 the thunked-to function. */
902
903 static void
904 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
905 struct arg_data *args,
906 struct args_size *args_size,
907 int n_named_args ATTRIBUTE_UNUSED,
908 tree actparms, tree fndecl,
909 CUMULATIVE_ARGS *args_so_far,
910 int reg_parm_stack_space,
911 rtx *old_stack_level, int *old_pending_adj,
912 int *must_preallocate, int *ecf_flags,
913 bool *may_tailcall, bool call_from_thunk_p)
914 {
915 /* 1 if scanning parms front to back, -1 if scanning back to front. */
916 int inc;
917
918 /* Count arg position in order args appear. */
919 int argpos;
920
921 int i;
922 tree p;
923
924 args_size->constant = 0;
925 args_size->var = 0;
926
927 /* In this loop, we consider args in the order they are written.
928 We fill up ARGS from the front or from the back if necessary
929 so that in any case the first arg to be pushed ends up at the front. */
930
931 if (PUSH_ARGS_REVERSED)
932 {
933 i = num_actuals - 1, inc = -1;
934 /* In this case, must reverse order of args
935 so that we compute and push the last arg first. */
936 }
937 else
938 {
939 i = 0, inc = 1;
940 }
941
942 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
943 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
944 {
945 tree type = TREE_TYPE (TREE_VALUE (p));
946 int unsignedp;
947 enum machine_mode mode;
948
949 args[i].tree_value = TREE_VALUE (p);
950
951 /* Replace erroneous argument with constant zero. */
952 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
953 args[i].tree_value = integer_zero_node, type = integer_type_node;
954
955 /* If TYPE is a transparent union, pass things the way we would
956 pass the first field of the union. We have already verified that
957 the modes are the same. */
958 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
959 type = TREE_TYPE (TYPE_FIELDS (type));
960
961 /* Decide where to pass this arg.
962
963 args[i].reg is nonzero if all or part is passed in registers.
964
965 args[i].partial is nonzero if part but not all is passed in registers,
966 and the exact value says how many words are passed in registers.
967
968 args[i].pass_on_stack is nonzero if the argument must at least be
969 computed on the stack. It may then be loaded back into registers
970 if args[i].reg is nonzero.
971
972 These decisions are driven by the FUNCTION_... macros and must agree
973 with those made by function.c. */
974
975 /* See if this argument should be passed by invisible reference. */
976 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
977 || TREE_ADDRESSABLE (type)
978 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
979 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
980 type, argpos < n_named_args)
981 #endif
982 )
983 {
984 /* If we're compiling a thunk, pass through invisible
985 references instead of making a copy. */
986 if (call_from_thunk_p
987 #ifdef FUNCTION_ARG_CALLEE_COPIES
988 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
989 type, argpos < n_named_args)
990 /* If it's in a register, we must make a copy of it too. */
991 /* ??? Is this a sufficient test? Is there a better one? */
992 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
993 && REG_P (DECL_RTL (args[i].tree_value)))
994 && ! TREE_ADDRESSABLE (type))
995 #endif
996 )
997 {
998 /* C++ uses a TARGET_EXPR to indicate that we want to make a
999 new object from the argument. If we are passing by
1000 invisible reference, the callee will do that for us, so we
1001 can strip off the TARGET_EXPR. This is not always safe,
1002 but it is safe in the only case where this is a useful
1003 optimization; namely, when the argument is a plain object.
1004 In that case, the frontend is just asking the backend to
1005 make a bitwise copy of the argument. */
1006
1007 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
1008 && (DECL_P (TREE_OPERAND (args[i].tree_value, 1)))
1009 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1010 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1011
1012 /* We can't use sibcalls if a callee-copied argument is stored
1013 in the current function's frame. */
1014 if (!call_from_thunk_p
1015 && (!DECL_P (args[i].tree_value)
1016 || !TREE_STATIC (args[i].tree_value)))
1017 *may_tailcall = false;
1018
1019 args[i].tree_value = build1 (ADDR_EXPR,
1020 build_pointer_type (type),
1021 args[i].tree_value);
1022 type = build_pointer_type (type);
1023 }
1024 else if (TREE_CODE (args[i].tree_value) == TARGET_EXPR)
1025 {
1026 /* In the V3 C++ ABI, parameters are destroyed in the caller.
1027 We implement this by passing the address of the temporary
1028 rather than expanding it into another allocated slot. */
1029 args[i].tree_value = build1 (ADDR_EXPR,
1030 build_pointer_type (type),
1031 args[i].tree_value);
1032 type = build_pointer_type (type);
1033 *may_tailcall = false;
1034 }
1035 else
1036 {
1037 /* We make a copy of the object and pass the address to the
1038 function being called. */
1039 rtx copy;
1040
1041 if (!COMPLETE_TYPE_P (type)
1042 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1043 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1044 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1045 STACK_CHECK_MAX_VAR_SIZE))))
1046 {
1047 /* This is a variable-sized object. Make space on the stack
1048 for it. */
1049 rtx size_rtx = expr_size (TREE_VALUE (p));
1050
1051 if (*old_stack_level == 0)
1052 {
1053 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1054 *old_pending_adj = pending_stack_adjust;
1055 pending_stack_adjust = 0;
1056 }
1057
1058 copy = gen_rtx_MEM (BLKmode,
1059 allocate_dynamic_stack_space
1060 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1061 set_mem_attributes (copy, type, 1);
1062 }
1063 else
1064 copy = assign_temp (type, 0, 1, 0);
1065
1066 store_expr (args[i].tree_value, copy, 0);
1067 *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1068
1069 args[i].tree_value = build1 (ADDR_EXPR,
1070 build_pointer_type (type),
1071 make_tree (type, copy));
1072 type = build_pointer_type (type);
1073 *may_tailcall = false;
1074 }
1075 }
1076
1077 mode = TYPE_MODE (type);
1078 unsignedp = TYPE_UNSIGNED (type);
1079
1080 if (targetm.calls.promote_function_args (fndecl ? TREE_TYPE (fndecl) : 0))
1081 mode = promote_mode (type, mode, &unsignedp, 1);
1082
1083 args[i].unsignedp = unsignedp;
1084 args[i].mode = mode;
1085
1086 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1087 argpos < n_named_args);
1088 #ifdef FUNCTION_INCOMING_ARG
1089 /* If this is a sibling call and the machine has register windows, the
1090 register window has to be unwinded before calling the routine, so
1091 arguments have to go into the incoming registers. */
1092 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1093 argpos < n_named_args);
1094 #else
1095 args[i].tail_call_reg = args[i].reg;
1096 #endif
1097
1098 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1099 if (args[i].reg)
1100 args[i].partial
1101 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1102 argpos < n_named_args);
1103 #endif
1104
1105 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1106
1107 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1108 it means that we are to pass this arg in the register(s) designated
1109 by the PARALLEL, but also to pass it in the stack. */
1110 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1111 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1112 args[i].pass_on_stack = 1;
1113
1114 /* If this is an addressable type, we must preallocate the stack
1115 since we must evaluate the object into its final location.
1116
1117 If this is to be passed in both registers and the stack, it is simpler
1118 to preallocate. */
1119 if (TREE_ADDRESSABLE (type)
1120 || (args[i].pass_on_stack && args[i].reg != 0))
1121 *must_preallocate = 1;
1122
1123 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1124 we cannot consider this function call constant. */
1125 if (TREE_ADDRESSABLE (type))
1126 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
1127
1128 /* Compute the stack-size of this argument. */
1129 if (args[i].reg == 0 || args[i].partial != 0
1130 || reg_parm_stack_space > 0
1131 || args[i].pass_on_stack)
1132 locate_and_pad_parm (mode, type,
1133 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1134 1,
1135 #else
1136 args[i].reg != 0,
1137 #endif
1138 args[i].pass_on_stack ? 0 : args[i].partial,
1139 fndecl, args_size, &args[i].locate);
1140 #ifdef BLOCK_REG_PADDING
1141 else
1142 /* The argument is passed entirely in registers. See at which
1143 end it should be padded. */
1144 args[i].locate.where_pad =
1145 BLOCK_REG_PADDING (mode, type,
1146 int_size_in_bytes (type) <= UNITS_PER_WORD);
1147 #endif
1148
1149 /* Update ARGS_SIZE, the total stack space for args so far. */
1150
1151 args_size->constant += args[i].locate.size.constant;
1152 if (args[i].locate.size.var)
1153 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1154
1155 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1156 have been used, etc. */
1157
1158 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1159 argpos < n_named_args);
1160 }
1161 }
1162
1163 /* Update ARGS_SIZE to contain the total size for the argument block.
1164 Return the original constant component of the argument block's size.
1165
1166 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1167 for arguments passed in registers. */
1168
1169 static int
1170 compute_argument_block_size (int reg_parm_stack_space,
1171 struct args_size *args_size,
1172 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1173 {
1174 int unadjusted_args_size = args_size->constant;
1175
1176 /* For accumulate outgoing args mode we don't need to align, since the frame
1177 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1178 backends from generating misaligned frame sizes. */
1179 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1180 preferred_stack_boundary = STACK_BOUNDARY;
1181
1182 /* Compute the actual size of the argument block required. The variable
1183 and constant sizes must be combined, the size may have to be rounded,
1184 and there may be a minimum required size. */
1185
1186 if (args_size->var)
1187 {
1188 args_size->var = ARGS_SIZE_TREE (*args_size);
1189 args_size->constant = 0;
1190
1191 preferred_stack_boundary /= BITS_PER_UNIT;
1192 if (preferred_stack_boundary > 1)
1193 {
1194 /* We don't handle this case yet. To handle it correctly we have
1195 to add the delta, round and subtract the delta.
1196 Currently no machine description requires this support. */
1197 if (stack_pointer_delta & (preferred_stack_boundary - 1))
1198 abort ();
1199 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1200 }
1201
1202 if (reg_parm_stack_space > 0)
1203 {
1204 args_size->var
1205 = size_binop (MAX_EXPR, args_size->var,
1206 ssize_int (reg_parm_stack_space));
1207
1208 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1209 /* The area corresponding to register parameters is not to count in
1210 the size of the block we need. So make the adjustment. */
1211 args_size->var
1212 = size_binop (MINUS_EXPR, args_size->var,
1213 ssize_int (reg_parm_stack_space));
1214 #endif
1215 }
1216 }
1217 else
1218 {
1219 preferred_stack_boundary /= BITS_PER_UNIT;
1220 if (preferred_stack_boundary < 1)
1221 preferred_stack_boundary = 1;
1222 args_size->constant = (((args_size->constant
1223 + stack_pointer_delta
1224 + preferred_stack_boundary - 1)
1225 / preferred_stack_boundary
1226 * preferred_stack_boundary)
1227 - stack_pointer_delta);
1228
1229 args_size->constant = MAX (args_size->constant,
1230 reg_parm_stack_space);
1231
1232 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1233 args_size->constant -= reg_parm_stack_space;
1234 #endif
1235 }
1236 return unadjusted_args_size;
1237 }
1238
1239 /* Precompute parameters as needed for a function call.
1240
1241 FLAGS is mask of ECF_* constants.
1242
1243 NUM_ACTUALS is the number of arguments.
1244
1245 ARGS is an array containing information for each argument; this
1246 routine fills in the INITIAL_VALUE and VALUE fields for each
1247 precomputed argument. */
1248
1249 static void
1250 precompute_arguments (int flags, int num_actuals, struct arg_data *args)
1251 {
1252 int i;
1253
1254 /* If this is a libcall, then precompute all arguments so that we do not
1255 get extraneous instructions emitted as part of the libcall sequence. */
1256 if ((flags & ECF_LIBCALL_BLOCK) == 0)
1257 return;
1258
1259 for (i = 0; i < num_actuals; i++)
1260 {
1261 enum machine_mode mode;
1262
1263 /* If this is an addressable type, we cannot pre-evaluate it. */
1264 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1265 abort ();
1266
1267 args[i].value
1268 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1269
1270 /* ANSI doesn't require a sequence point here,
1271 but PCC has one, so this will avoid some problems. */
1272 emit_queue ();
1273
1274 args[i].initial_value = args[i].value
1275 = protect_from_queue (args[i].value, 0);
1276
1277 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1278 if (mode != args[i].mode)
1279 {
1280 args[i].value
1281 = convert_modes (args[i].mode, mode,
1282 args[i].value, args[i].unsignedp);
1283 #if defined(PROMOTE_FUNCTION_MODE) && !defined(PROMOTE_MODE)
1284 /* CSE will replace this only if it contains args[i].value
1285 pseudo, so convert it down to the declared mode using
1286 a SUBREG. */
1287 if (REG_P (args[i].value)
1288 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1289 {
1290 args[i].initial_value
1291 = gen_lowpart_SUBREG (mode, args[i].value);
1292 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1293 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1294 args[i].unsignedp);
1295 }
1296 #endif
1297 }
1298 }
1299 }
1300
1301 /* Given the current state of MUST_PREALLOCATE and information about
1302 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1303 compute and return the final value for MUST_PREALLOCATE. */
1304
1305 static int
1306 finalize_must_preallocate (int must_preallocate, int num_actuals, struct arg_data *args, struct args_size *args_size)
1307 {
1308 /* See if we have or want to preallocate stack space.
1309
1310 If we would have to push a partially-in-regs parm
1311 before other stack parms, preallocate stack space instead.
1312
1313 If the size of some parm is not a multiple of the required stack
1314 alignment, we must preallocate.
1315
1316 If the total size of arguments that would otherwise create a copy in
1317 a temporary (such as a CALL) is more than half the total argument list
1318 size, preallocation is faster.
1319
1320 Another reason to preallocate is if we have a machine (like the m88k)
1321 where stack alignment is required to be maintained between every
1322 pair of insns, not just when the call is made. However, we assume here
1323 that such machines either do not have push insns (and hence preallocation
1324 would occur anyway) or the problem is taken care of with
1325 PUSH_ROUNDING. */
1326
1327 if (! must_preallocate)
1328 {
1329 int partial_seen = 0;
1330 int copy_to_evaluate_size = 0;
1331 int i;
1332
1333 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1334 {
1335 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1336 partial_seen = 1;
1337 else if (partial_seen && args[i].reg == 0)
1338 must_preallocate = 1;
1339
1340 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1341 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1342 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1343 || TREE_CODE (args[i].tree_value) == COND_EXPR
1344 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1345 copy_to_evaluate_size
1346 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1347 }
1348
1349 if (copy_to_evaluate_size * 2 >= args_size->constant
1350 && args_size->constant > 0)
1351 must_preallocate = 1;
1352 }
1353 return must_preallocate;
1354 }
1355
1356 /* If we preallocated stack space, compute the address of each argument
1357 and store it into the ARGS array.
1358
1359 We need not ensure it is a valid memory address here; it will be
1360 validized when it is used.
1361
1362 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1363
1364 static void
1365 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1366 {
1367 if (argblock)
1368 {
1369 rtx arg_reg = argblock;
1370 int i, arg_offset = 0;
1371
1372 if (GET_CODE (argblock) == PLUS)
1373 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1374
1375 for (i = 0; i < num_actuals; i++)
1376 {
1377 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1378 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1379 rtx addr;
1380
1381 /* Skip this parm if it will not be passed on the stack. */
1382 if (! args[i].pass_on_stack && args[i].reg != 0)
1383 continue;
1384
1385 if (GET_CODE (offset) == CONST_INT)
1386 addr = plus_constant (arg_reg, INTVAL (offset));
1387 else
1388 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1389
1390 addr = plus_constant (addr, arg_offset);
1391 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1392 set_mem_align (args[i].stack, PARM_BOUNDARY);
1393 set_mem_attributes (args[i].stack,
1394 TREE_TYPE (args[i].tree_value), 1);
1395
1396 if (GET_CODE (slot_offset) == CONST_INT)
1397 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1398 else
1399 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1400
1401 addr = plus_constant (addr, arg_offset);
1402 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1403 set_mem_align (args[i].stack_slot, PARM_BOUNDARY);
1404 set_mem_attributes (args[i].stack_slot,
1405 TREE_TYPE (args[i].tree_value), 1);
1406
1407 /* Function incoming arguments may overlap with sibling call
1408 outgoing arguments and we cannot allow reordering of reads
1409 from function arguments with stores to outgoing arguments
1410 of sibling calls. */
1411 set_mem_alias_set (args[i].stack, 0);
1412 set_mem_alias_set (args[i].stack_slot, 0);
1413 }
1414 }
1415 }
1416
1417 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1418 in a call instruction.
1419
1420 FNDECL is the tree node for the target function. For an indirect call
1421 FNDECL will be NULL_TREE.
1422
1423 ADDR is the operand 0 of CALL_EXPR for this call. */
1424
1425 static rtx
1426 rtx_for_function_call (tree fndecl, tree addr)
1427 {
1428 rtx funexp;
1429
1430 /* Get the function to call, in the form of RTL. */
1431 if (fndecl)
1432 {
1433 /* If this is the first use of the function, see if we need to
1434 make an external definition for it. */
1435 if (! TREE_USED (fndecl))
1436 {
1437 assemble_external (fndecl);
1438 TREE_USED (fndecl) = 1;
1439 }
1440
1441 /* Get a SYMBOL_REF rtx for the function address. */
1442 funexp = XEXP (DECL_RTL (fndecl), 0);
1443 }
1444 else
1445 /* Generate an rtx (probably a pseudo-register) for the address. */
1446 {
1447 push_temp_slots ();
1448 funexp = expand_expr (addr, NULL_RTX, VOIDmode, 0);
1449 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1450 emit_queue ();
1451 }
1452 return funexp;
1453 }
1454
1455 /* Do the register loads required for any wholly-register parms or any
1456 parms which are passed both on the stack and in a register. Their
1457 expressions were already evaluated.
1458
1459 Mark all register-parms as living through the call, putting these USE
1460 insns in the CALL_INSN_FUNCTION_USAGE field.
1461
1462 When IS_SIBCALL, perform the check_sibcall_overlap_argument_overlap
1463 checking, setting *SIBCALL_FAILURE if appropriate. */
1464
1465 static void
1466 load_register_parameters (struct arg_data *args, int num_actuals,
1467 rtx *call_fusage, int flags, int is_sibcall,
1468 int *sibcall_failure)
1469 {
1470 int i, j;
1471
1472 for (i = 0; i < num_actuals; i++)
1473 {
1474 rtx reg = ((flags & ECF_SIBCALL)
1475 ? args[i].tail_call_reg : args[i].reg);
1476 if (reg)
1477 {
1478 int partial = args[i].partial;
1479 int nregs;
1480 int size = 0;
1481 rtx before_arg = get_last_insn ();
1482 /* Set to non-negative if must move a word at a time, even if just
1483 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1484 we just use a normal move insn. This value can be zero if the
1485 argument is a zero size structure with no fields. */
1486 nregs = -1;
1487 if (partial)
1488 nregs = partial;
1489 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1490 {
1491 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1492 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1493 }
1494 else
1495 size = GET_MODE_SIZE (args[i].mode);
1496
1497 /* Handle calls that pass values in multiple non-contiguous
1498 locations. The Irix 6 ABI has examples of this. */
1499
1500 if (GET_CODE (reg) == PARALLEL)
1501 {
1502 tree type = TREE_TYPE (args[i].tree_value);
1503 emit_group_load (reg, args[i].value, type,
1504 int_size_in_bytes (type));
1505 }
1506
1507 /* If simple case, just do move. If normal partial, store_one_arg
1508 has already loaded the register for us. In all other cases,
1509 load the register(s) from memory. */
1510
1511 else if (nregs == -1)
1512 {
1513 emit_move_insn (reg, args[i].value);
1514 #ifdef BLOCK_REG_PADDING
1515 /* Handle case where we have a value that needs shifting
1516 up to the msb. eg. a QImode value and we're padding
1517 upward on a BYTES_BIG_ENDIAN machine. */
1518 if (size < UNITS_PER_WORD
1519 && (args[i].locate.where_pad
1520 == (BYTES_BIG_ENDIAN ? upward : downward)))
1521 {
1522 rtx x;
1523 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1524
1525 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1526 report the whole reg as used. Strictly speaking, the
1527 call only uses SIZE bytes at the msb end, but it doesn't
1528 seem worth generating rtl to say that. */
1529 reg = gen_rtx_REG (word_mode, REGNO (reg));
1530 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
1531 build_int_2 (shift, 0), reg, 1);
1532 if (x != reg)
1533 emit_move_insn (reg, x);
1534 }
1535 #endif
1536 }
1537
1538 /* If we have pre-computed the values to put in the registers in
1539 the case of non-aligned structures, copy them in now. */
1540
1541 else if (args[i].n_aligned_regs != 0)
1542 for (j = 0; j < args[i].n_aligned_regs; j++)
1543 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1544 args[i].aligned_regs[j]);
1545
1546 else if (partial == 0 || args[i].pass_on_stack)
1547 {
1548 rtx mem = validize_mem (args[i].value);
1549
1550 /* Handle a BLKmode that needs shifting. */
1551 if (nregs == 1 && size < UNITS_PER_WORD
1552 #ifdef BLOCK_REG_PADDING
1553 && args[i].locate.where_pad == downward
1554 #else
1555 && BYTES_BIG_ENDIAN
1556 #endif
1557 )
1558 {
1559 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1560 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1561 rtx x = gen_reg_rtx (word_mode);
1562 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1563 enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
1564 : LSHIFT_EXPR;
1565
1566 emit_move_insn (x, tem);
1567 x = expand_shift (dir, word_mode, x,
1568 build_int_2 (shift, 0), ri, 1);
1569 if (x != ri)
1570 emit_move_insn (ri, x);
1571 }
1572 else
1573 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1574 }
1575
1576 /* When a parameter is a block, and perhaps in other cases, it is
1577 possible that it did a load from an argument slot that was
1578 already clobbered. */
1579 if (is_sibcall
1580 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1581 *sibcall_failure = 1;
1582
1583 /* Handle calls that pass values in multiple non-contiguous
1584 locations. The Irix 6 ABI has examples of this. */
1585 if (GET_CODE (reg) == PARALLEL)
1586 use_group_regs (call_fusage, reg);
1587 else if (nregs == -1)
1588 use_reg (call_fusage, reg);
1589 else
1590 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1591 }
1592 }
1593 }
1594
1595 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1596 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1597 bytes, then we would need to push some additional bytes to pad the
1598 arguments. So, we compute an adjust to the stack pointer for an
1599 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1600 bytes. Then, when the arguments are pushed the stack will be perfectly
1601 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1602 be popped after the call. Returns the adjustment. */
1603
1604 static int
1605 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1606 struct args_size *args_size,
1607 int preferred_unit_stack_boundary)
1608 {
1609 /* The number of bytes to pop so that the stack will be
1610 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1611 HOST_WIDE_INT adjustment;
1612 /* The alignment of the stack after the arguments are pushed, if we
1613 just pushed the arguments without adjust the stack here. */
1614 HOST_WIDE_INT unadjusted_alignment;
1615
1616 unadjusted_alignment
1617 = ((stack_pointer_delta + unadjusted_args_size)
1618 % preferred_unit_stack_boundary);
1619
1620 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1621 as possible -- leaving just enough left to cancel out the
1622 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1623 PENDING_STACK_ADJUST is non-negative, and congruent to
1624 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1625
1626 /* Begin by trying to pop all the bytes. */
1627 unadjusted_alignment
1628 = (unadjusted_alignment
1629 - (pending_stack_adjust % preferred_unit_stack_boundary));
1630 adjustment = pending_stack_adjust;
1631 /* Push enough additional bytes that the stack will be aligned
1632 after the arguments are pushed. */
1633 if (preferred_unit_stack_boundary > 1)
1634 {
1635 if (unadjusted_alignment > 0)
1636 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1637 else
1638 adjustment += unadjusted_alignment;
1639 }
1640
1641 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1642 bytes after the call. The right number is the entire
1643 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1644 by the arguments in the first place. */
1645 args_size->constant
1646 = pending_stack_adjust - adjustment + unadjusted_args_size;
1647
1648 return adjustment;
1649 }
1650
1651 /* Scan X expression if it does not dereference any argument slots
1652 we already clobbered by tail call arguments (as noted in stored_args_map
1653 bitmap).
1654 Return nonzero if X expression dereferences such argument slots,
1655 zero otherwise. */
1656
1657 static int
1658 check_sibcall_argument_overlap_1 (rtx x)
1659 {
1660 RTX_CODE code;
1661 int i, j;
1662 unsigned int k;
1663 const char *fmt;
1664
1665 if (x == NULL_RTX)
1666 return 0;
1667
1668 code = GET_CODE (x);
1669
1670 if (code == MEM)
1671 {
1672 if (XEXP (x, 0) == current_function_internal_arg_pointer)
1673 i = 0;
1674 else if (GET_CODE (XEXP (x, 0)) == PLUS
1675 && XEXP (XEXP (x, 0), 0) ==
1676 current_function_internal_arg_pointer
1677 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
1678 i = INTVAL (XEXP (XEXP (x, 0), 1));
1679 else
1680 return 0;
1681
1682 #ifdef ARGS_GROW_DOWNWARD
1683 i = -i - GET_MODE_SIZE (GET_MODE (x));
1684 #endif
1685
1686 for (k = 0; k < GET_MODE_SIZE (GET_MODE (x)); k++)
1687 if (i + k < stored_args_map->n_bits
1688 && TEST_BIT (stored_args_map, i + k))
1689 return 1;
1690
1691 return 0;
1692 }
1693
1694 /* Scan all subexpressions. */
1695 fmt = GET_RTX_FORMAT (code);
1696 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1697 {
1698 if (*fmt == 'e')
1699 {
1700 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1701 return 1;
1702 }
1703 else if (*fmt == 'E')
1704 {
1705 for (j = 0; j < XVECLEN (x, i); j++)
1706 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1707 return 1;
1708 }
1709 }
1710 return 0;
1711 }
1712
1713 /* Scan sequence after INSN if it does not dereference any argument slots
1714 we already clobbered by tail call arguments (as noted in stored_args_map
1715 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1716 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1717 should be 0). Return nonzero if sequence after INSN dereferences such argument
1718 slots, zero otherwise. */
1719
1720 static int
1721 check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
1722 {
1723 int low, high;
1724
1725 if (insn == NULL_RTX)
1726 insn = get_insns ();
1727 else
1728 insn = NEXT_INSN (insn);
1729
1730 for (; insn; insn = NEXT_INSN (insn))
1731 if (INSN_P (insn)
1732 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
1733 break;
1734
1735 if (mark_stored_args_map)
1736 {
1737 #ifdef ARGS_GROW_DOWNWARD
1738 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
1739 #else
1740 low = arg->locate.slot_offset.constant;
1741 #endif
1742
1743 for (high = low + arg->locate.size.constant; low < high; low++)
1744 SET_BIT (stored_args_map, low);
1745 }
1746 return insn != NULL_RTX;
1747 }
1748
1749 static tree
1750 fix_unsafe_tree (tree t)
1751 {
1752 switch (unsafe_for_reeval (t))
1753 {
1754 case 0: /* Safe. */
1755 break;
1756
1757 case 1: /* Mildly unsafe. */
1758 t = unsave_expr (t);
1759 break;
1760
1761 case 2: /* Wildly unsafe. */
1762 {
1763 tree var = build_decl (VAR_DECL, NULL_TREE,
1764 TREE_TYPE (t));
1765 SET_DECL_RTL (var,
1766 expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL));
1767 t = var;
1768 }
1769 break;
1770
1771 default:
1772 abort ();
1773 }
1774 return t;
1775 }
1776
1777
1778 /* If function value *VALUE was returned at the most significant end of a
1779 register, shift it towards the least significant end and convert it to
1780 TYPE's mode. Return true and update *VALUE if some action was needed.
1781
1782 TYPE is the type of the function's return value, which is known not
1783 to have mode BLKmode. */
1784
1785 static bool
1786 shift_returned_value (tree type, rtx *value)
1787 {
1788 if (targetm.calls.return_in_msb (type))
1789 {
1790 HOST_WIDE_INT shift;
1791
1792 shift = (GET_MODE_BITSIZE (GET_MODE (*value))
1793 - BITS_PER_UNIT * int_size_in_bytes (type));
1794 if (shift > 0)
1795 {
1796 /* Shift the value into the low part of the register. */
1797 *value = expand_binop (GET_MODE (*value), lshr_optab, *value,
1798 GEN_INT (shift), 0, 1, OPTAB_WIDEN);
1799
1800 /* Truncate it to the type's mode, or its integer equivalent.
1801 This is subject to TRULY_NOOP_TRUNCATION. */
1802 *value = convert_to_mode (int_mode_for_mode (TYPE_MODE (type)),
1803 *value, 0);
1804
1805 /* Now convert it to the final form. */
1806 *value = gen_lowpart (TYPE_MODE (type), *value);
1807 return true;
1808 }
1809 }
1810 return false;
1811 }
1812
1813 /* Remove all REG_EQUIV notes found in the insn chain. */
1814
1815 static void
1816 purge_reg_equiv_notes (void)
1817 {
1818 rtx insn;
1819
1820 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1821 {
1822 while (1)
1823 {
1824 rtx note = find_reg_note (insn, REG_EQUIV, 0);
1825 if (note)
1826 {
1827 /* Remove the note and keep looking at the notes for
1828 this insn. */
1829 remove_note (insn, note);
1830 continue;
1831 }
1832 break;
1833 }
1834 }
1835 }
1836
1837 /* Clear RTX_UNCHANGING_P flag of incoming argument MEMs. */
1838
1839 static void
1840 purge_mem_unchanging_flag (rtx x)
1841 {
1842 RTX_CODE code;
1843 int i, j;
1844 const char *fmt;
1845
1846 if (x == NULL_RTX)
1847 return;
1848
1849 code = GET_CODE (x);
1850
1851 if (code == MEM)
1852 {
1853 if (RTX_UNCHANGING_P (x)
1854 && (XEXP (x, 0) == current_function_internal_arg_pointer
1855 || (GET_CODE (XEXP (x, 0)) == PLUS
1856 && XEXP (XEXP (x, 0), 0) ==
1857 current_function_internal_arg_pointer
1858 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
1859 RTX_UNCHANGING_P (x) = 0;
1860 return;
1861 }
1862
1863 /* Scan all subexpressions. */
1864 fmt = GET_RTX_FORMAT (code);
1865 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1866 {
1867 if (*fmt == 'e')
1868 purge_mem_unchanging_flag (XEXP (x, i));
1869 else if (*fmt == 'E')
1870 for (j = 0; j < XVECLEN (x, i); j++)
1871 purge_mem_unchanging_flag (XVECEXP (x, i, j));
1872 }
1873 }
1874
1875
1876 /* Generate all the code for a function call
1877 and return an rtx for its value.
1878 Store the value in TARGET (specified as an rtx) if convenient.
1879 If the value is stored in TARGET then TARGET is returned.
1880 If IGNORE is nonzero, then we ignore the value of the function call. */
1881
1882 rtx
1883 expand_call (tree exp, rtx target, int ignore)
1884 {
1885 /* Nonzero if we are currently expanding a call. */
1886 static int currently_expanding_call = 0;
1887
1888 /* List of actual parameters. */
1889 tree actparms = TREE_OPERAND (exp, 1);
1890 /* RTX for the function to be called. */
1891 rtx funexp;
1892 /* Sequence of insns to perform a normal "call". */
1893 rtx normal_call_insns = NULL_RTX;
1894 /* Sequence of insns to perform a tail "call". */
1895 rtx tail_call_insns = NULL_RTX;
1896 /* Data type of the function. */
1897 tree funtype;
1898 tree type_arg_types;
1899 /* Declaration of the function being called,
1900 or 0 if the function is computed (not known by name). */
1901 tree fndecl = 0;
1902 /* The type of the function being called. */
1903 tree fntype;
1904 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
1905 int pass;
1906
1907 /* Register in which non-BLKmode value will be returned,
1908 or 0 if no value or if value is BLKmode. */
1909 rtx valreg;
1910 /* Address where we should return a BLKmode value;
1911 0 if value not BLKmode. */
1912 rtx structure_value_addr = 0;
1913 /* Nonzero if that address is being passed by treating it as
1914 an extra, implicit first parameter. Otherwise,
1915 it is passed by being copied directly into struct_value_rtx. */
1916 int structure_value_addr_parm = 0;
1917 /* Size of aggregate value wanted, or zero if none wanted
1918 or if we are using the non-reentrant PCC calling convention
1919 or expecting the value in registers. */
1920 HOST_WIDE_INT struct_value_size = 0;
1921 /* Nonzero if called function returns an aggregate in memory PCC style,
1922 by returning the address of where to find it. */
1923 int pcc_struct_value = 0;
1924 rtx struct_value = 0;
1925
1926 /* Number of actual parameters in this call, including struct value addr. */
1927 int num_actuals;
1928 /* Number of named args. Args after this are anonymous ones
1929 and they must all go on the stack. */
1930 int n_named_args;
1931
1932 /* Vector of information about each argument.
1933 Arguments are numbered in the order they will be pushed,
1934 not the order they are written. */
1935 struct arg_data *args;
1936
1937 /* Total size in bytes of all the stack-parms scanned so far. */
1938 struct args_size args_size;
1939 struct args_size adjusted_args_size;
1940 /* Size of arguments before any adjustments (such as rounding). */
1941 int unadjusted_args_size;
1942 /* Data on reg parms scanned so far. */
1943 CUMULATIVE_ARGS args_so_far;
1944 /* Nonzero if a reg parm has been scanned. */
1945 int reg_parm_seen;
1946 /* Nonzero if this is an indirect function call. */
1947
1948 /* Nonzero if we must avoid push-insns in the args for this call.
1949 If stack space is allocated for register parameters, but not by the
1950 caller, then it is preallocated in the fixed part of the stack frame.
1951 So the entire argument block must then be preallocated (i.e., we
1952 ignore PUSH_ROUNDING in that case). */
1953
1954 int must_preallocate = !PUSH_ARGS;
1955
1956 /* Size of the stack reserved for parameter registers. */
1957 int reg_parm_stack_space = 0;
1958
1959 /* Address of space preallocated for stack parms
1960 (on machines that lack push insns), or 0 if space not preallocated. */
1961 rtx argblock = 0;
1962
1963 /* Mask of ECF_ flags. */
1964 int flags = 0;
1965 #ifdef REG_PARM_STACK_SPACE
1966 /* Define the boundary of the register parm stack space that needs to be
1967 saved, if any. */
1968 int low_to_save, high_to_save;
1969 rtx save_area = 0; /* Place that it is saved */
1970 #endif
1971
1972 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1973 char *initial_stack_usage_map = stack_usage_map;
1974
1975 int old_stack_allocated;
1976
1977 /* State variables to track stack modifications. */
1978 rtx old_stack_level = 0;
1979 int old_stack_arg_under_construction = 0;
1980 int old_pending_adj = 0;
1981 int old_inhibit_defer_pop = inhibit_defer_pop;
1982
1983 /* Some stack pointer alterations we make are performed via
1984 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
1985 which we then also need to save/restore along the way. */
1986 int old_stack_pointer_delta = 0;
1987
1988 rtx call_fusage;
1989 tree p = TREE_OPERAND (exp, 0);
1990 tree addr = TREE_OPERAND (exp, 0);
1991 int i;
1992 /* The alignment of the stack, in bits. */
1993 HOST_WIDE_INT preferred_stack_boundary;
1994 /* The alignment of the stack, in bytes. */
1995 HOST_WIDE_INT preferred_unit_stack_boundary;
1996 /* The static chain value to use for this call. */
1997 rtx static_chain_value;
1998 /* See if this is "nothrow" function call. */
1999 if (TREE_NOTHROW (exp))
2000 flags |= ECF_NOTHROW;
2001
2002 /* See if we can find a DECL-node for the actual function, and get the
2003 function attributes (flags) from the function decl or type node. */
2004 fndecl = get_callee_fndecl (exp);
2005 if (fndecl)
2006 {
2007 fntype = TREE_TYPE (fndecl);
2008 flags |= flags_from_decl_or_type (fndecl);
2009 }
2010 else
2011 {
2012 fntype = TREE_TYPE (TREE_TYPE (p));
2013 flags |= flags_from_decl_or_type (fntype);
2014 }
2015
2016 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
2017
2018 /* Warn if this value is an aggregate type,
2019 regardless of which calling convention we are using for it. */
2020 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
2021 warning ("function call has aggregate value");
2022
2023 /* If the result of a pure or const function call is ignored (or void),
2024 and none of its arguments are volatile, we can avoid expanding the
2025 call and just evaluate the arguments for side-effects. */
2026 if ((flags & (ECF_CONST | ECF_PURE))
2027 && (ignore || target == const0_rtx
2028 || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
2029 {
2030 bool volatilep = false;
2031 tree arg;
2032
2033 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
2034 if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
2035 {
2036 volatilep = true;
2037 break;
2038 }
2039
2040 if (! volatilep)
2041 {
2042 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
2043 expand_expr (TREE_VALUE (arg), const0_rtx,
2044 VOIDmode, EXPAND_NORMAL);
2045 return const0_rtx;
2046 }
2047 }
2048
2049 #ifdef REG_PARM_STACK_SPACE
2050 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2051 #endif
2052
2053 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2054 if (reg_parm_stack_space > 0 && PUSH_ARGS)
2055 must_preallocate = 1;
2056 #endif
2057
2058 /* Set up a place to return a structure. */
2059
2060 /* Cater to broken compilers. */
2061 if (aggregate_value_p (exp, fndecl))
2062 {
2063 /* This call returns a big structure. */
2064 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
2065
2066 #ifdef PCC_STATIC_STRUCT_RETURN
2067 {
2068 pcc_struct_value = 1;
2069 }
2070 #else /* not PCC_STATIC_STRUCT_RETURN */
2071 {
2072 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2073
2074 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (exp))
2075 {
2076 /* The structure value address arg is already in actparms.
2077 Pull it out. It might be nice to just leave it there, but
2078 we need to set structure_value_addr. */
2079 tree return_arg = TREE_VALUE (actparms);
2080 actparms = TREE_CHAIN (actparms);
2081 structure_value_addr = expand_expr (return_arg, NULL_RTX,
2082 VOIDmode, EXPAND_NORMAL);
2083 }
2084 else if (target && MEM_P (target))
2085 structure_value_addr = XEXP (target, 0);
2086 else
2087 {
2088 /* For variable-sized objects, we must be called with a target
2089 specified. If we were to allocate space on the stack here,
2090 we would have no way of knowing when to free it. */
2091 rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
2092
2093 mark_temp_addr_taken (d);
2094 structure_value_addr = XEXP (d, 0);
2095 target = 0;
2096 }
2097 }
2098 #endif /* not PCC_STATIC_STRUCT_RETURN */
2099 }
2100
2101 /* Figure out the amount to which the stack should be aligned. */
2102 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2103 if (fndecl)
2104 {
2105 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2106 if (i && i->preferred_incoming_stack_boundary)
2107 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2108 }
2109
2110 /* Operand 0 is a pointer-to-function; get the type of the function. */
2111 funtype = TREE_TYPE (addr);
2112 if (! POINTER_TYPE_P (funtype))
2113 abort ();
2114 funtype = TREE_TYPE (funtype);
2115
2116 /* Munge the tree to split complex arguments into their imaginary
2117 and real parts. */
2118 if (targetm.calls.split_complex_arg)
2119 {
2120 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2121 actparms = split_complex_values (actparms);
2122 }
2123 else
2124 type_arg_types = TYPE_ARG_TYPES (funtype);
2125
2126 if (flags & ECF_MAY_BE_ALLOCA)
2127 current_function_calls_alloca = 1;
2128
2129 /* If struct_value_rtx is 0, it means pass the address
2130 as if it were an extra parameter. */
2131 if (structure_value_addr && struct_value == 0)
2132 {
2133 /* If structure_value_addr is a REG other than
2134 virtual_outgoing_args_rtx, we can use always use it. If it
2135 is not a REG, we must always copy it into a register.
2136 If it is virtual_outgoing_args_rtx, we must copy it to another
2137 register in some cases. */
2138 rtx temp = (!REG_P (structure_value_addr)
2139 || (ACCUMULATE_OUTGOING_ARGS
2140 && stack_arg_under_construction
2141 && structure_value_addr == virtual_outgoing_args_rtx)
2142 ? copy_addr_to_reg (convert_memory_address
2143 (Pmode, structure_value_addr))
2144 : structure_value_addr);
2145
2146 actparms
2147 = tree_cons (error_mark_node,
2148 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2149 temp),
2150 actparms);
2151 structure_value_addr_parm = 1;
2152 }
2153
2154 /* Count the arguments and set NUM_ACTUALS. */
2155 for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p))
2156 num_actuals++;
2157
2158 /* Compute number of named args.
2159 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2160
2161 if (type_arg_types != 0)
2162 n_named_args
2163 = (list_length (type_arg_types)
2164 /* Count the struct value address, if it is passed as a parm. */
2165 + structure_value_addr_parm);
2166 else
2167 /* If we know nothing, treat all args as named. */
2168 n_named_args = num_actuals;
2169
2170 /* Start updating where the next arg would go.
2171
2172 On some machines (such as the PA) indirect calls have a different
2173 calling convention than normal calls. The fourth argument in
2174 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2175 or not. */
2176 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl, n_named_args);
2177
2178 /* Now possibly adjust the number of named args.
2179 Normally, don't include the last named arg if anonymous args follow.
2180 We do include the last named arg if
2181 targetm.calls.strict_argument_naming() returns nonzero.
2182 (If no anonymous args follow, the result of list_length is actually
2183 one too large. This is harmless.)
2184
2185 If targetm.calls.pretend_outgoing_varargs_named() returns
2186 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2187 this machine will be able to place unnamed args that were passed
2188 in registers into the stack. So treat all args as named. This
2189 allows the insns emitting for a specific argument list to be
2190 independent of the function declaration.
2191
2192 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2193 we do not have any reliable way to pass unnamed args in
2194 registers, so we must force them into memory. */
2195
2196 if (type_arg_types != 0
2197 && targetm.calls.strict_argument_naming (&args_so_far))
2198 ;
2199 else if (type_arg_types != 0
2200 && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
2201 /* Don't include the last named arg. */
2202 --n_named_args;
2203 else
2204 /* Treat all args as named. */
2205 n_named_args = num_actuals;
2206
2207 /* Make a vector to hold all the information about each arg. */
2208 args = alloca (num_actuals * sizeof (struct arg_data));
2209 memset (args, 0, num_actuals * sizeof (struct arg_data));
2210
2211 /* Build up entries in the ARGS array, compute the size of the
2212 arguments into ARGS_SIZE, etc. */
2213 initialize_argument_information (num_actuals, args, &args_size,
2214 n_named_args, actparms, fndecl,
2215 &args_so_far, reg_parm_stack_space,
2216 &old_stack_level, &old_pending_adj,
2217 &must_preallocate, &flags,
2218 &try_tail_call, CALL_FROM_THUNK_P (exp));
2219
2220 if (args_size.var)
2221 {
2222 /* If this function requires a variable-sized argument list, don't
2223 try to make a cse'able block for this call. We may be able to
2224 do this eventually, but it is too complicated to keep track of
2225 what insns go in the cse'able block and which don't. */
2226
2227 flags &= ~ECF_LIBCALL_BLOCK;
2228 must_preallocate = 1;
2229 }
2230
2231 /* Now make final decision about preallocating stack space. */
2232 must_preallocate = finalize_must_preallocate (must_preallocate,
2233 num_actuals, args,
2234 &args_size);
2235
2236 /* If the structure value address will reference the stack pointer, we
2237 must stabilize it. We don't need to do this if we know that we are
2238 not going to adjust the stack pointer in processing this call. */
2239
2240 if (structure_value_addr
2241 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2242 || reg_mentioned_p (virtual_outgoing_args_rtx,
2243 structure_value_addr))
2244 && (args_size.var
2245 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2246 structure_value_addr = copy_to_reg (structure_value_addr);
2247
2248 /* Tail calls can make things harder to debug, and we're traditionally
2249 pushed these optimizations into -O2. Don't try if we're already
2250 expanding a call, as that means we're an argument. Don't try if
2251 there's cleanups, as we know there's code to follow the call.
2252
2253 If rtx_equal_function_value_matters is false, that means we've
2254 finished with regular parsing. Which means that some of the
2255 machinery we use to generate tail-calls is no longer in place.
2256 This is most often true of sjlj-exceptions, which we couldn't
2257 tail-call to anyway.
2258
2259 If current_nesting_level () == 0, we're being called after
2260 the function body has been expanded. This can happen when
2261 setting up trampolines in expand_function_end. */
2262 if (currently_expanding_call++ != 0
2263 || !flag_optimize_sibling_calls
2264 || !rtx_equal_function_value_matters
2265 || current_nesting_level () == 0
2266 || args_size.var
2267 || lookup_stmt_eh_region (exp) >= 0)
2268 try_tail_call = 0;
2269
2270 /* Rest of purposes for tail call optimizations to fail. */
2271 if (
2272 #ifdef HAVE_sibcall_epilogue
2273 !HAVE_sibcall_epilogue
2274 #else
2275 1
2276 #endif
2277 || !try_tail_call
2278 /* Doing sibling call optimization needs some work, since
2279 structure_value_addr can be allocated on the stack.
2280 It does not seem worth the effort since few optimizable
2281 sibling calls will return a structure. */
2282 || structure_value_addr != NULL_RTX
2283 /* Check whether the target is able to optimize the call
2284 into a sibcall. */
2285 || !targetm.function_ok_for_sibcall (fndecl, exp)
2286 /* Functions that do not return exactly once may not be sibcall
2287 optimized. */
2288 || (flags & (ECF_RETURNS_TWICE | ECF_LONGJMP | ECF_NORETURN))
2289 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2290 /* If the called function is nested in the current one, it might access
2291 some of the caller's arguments, but could clobber them beforehand if
2292 the argument areas are shared. */
2293 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2294 /* If this function requires more stack slots than the current
2295 function, we cannot change it into a sibling call. */
2296 || args_size.constant > current_function_args_size
2297 /* If the callee pops its own arguments, then it must pop exactly
2298 the same number of arguments as the current function. */
2299 || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2300 != RETURN_POPS_ARGS (current_function_decl,
2301 TREE_TYPE (current_function_decl),
2302 current_function_args_size))
2303 || !lang_hooks.decls.ok_for_sibcall (fndecl))
2304 try_tail_call = 0;
2305
2306 if (try_tail_call)
2307 {
2308 int end, inc;
2309 actparms = NULL_TREE;
2310 /* Ok, we're going to give the tail call the old college try.
2311 This means we're going to evaluate the function arguments
2312 up to three times. There are two degrees of badness we can
2313 encounter, those that can be unsaved and those that can't.
2314 (See unsafe_for_reeval commentary for details.)
2315
2316 Generate a new argument list. Pass safe arguments through
2317 unchanged. For the easy badness wrap them in UNSAVE_EXPRs.
2318 For hard badness, evaluate them now and put their resulting
2319 rtx in a temporary VAR_DECL.
2320
2321 initialize_argument_information has ordered the array for the
2322 order to be pushed, and we must remember this when reconstructing
2323 the original argument order. */
2324
2325 if (PUSH_ARGS_REVERSED)
2326 {
2327 inc = 1;
2328 i = 0;
2329 end = num_actuals;
2330 }
2331 else
2332 {
2333 inc = -1;
2334 i = num_actuals - 1;
2335 end = -1;
2336 }
2337
2338 for (; i != end; i += inc)
2339 {
2340 args[i].tree_value = fix_unsafe_tree (args[i].tree_value);
2341 }
2342 /* Do the same for the function address if it is an expression. */
2343 if (!fndecl)
2344 addr = fix_unsafe_tree (addr);
2345 }
2346
2347
2348 /* Ensure current function's preferred stack boundary is at least
2349 what we need. We don't have to increase alignment for recursive
2350 functions. */
2351 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2352 && fndecl != current_function_decl)
2353 cfun->preferred_stack_boundary = preferred_stack_boundary;
2354 if (fndecl == current_function_decl)
2355 cfun->recursive_call_emit = true;
2356
2357 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2358
2359 /* We want to make two insn chains; one for a sibling call, the other
2360 for a normal call. We will select one of the two chains after
2361 initial RTL generation is complete. */
2362 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2363 {
2364 int sibcall_failure = 0;
2365 /* We want to emit any pending stack adjustments before the tail
2366 recursion "call". That way we know any adjustment after the tail
2367 recursion call can be ignored if we indeed use the tail
2368 call expansion. */
2369 int save_pending_stack_adjust = 0;
2370 int save_stack_pointer_delta = 0;
2371 rtx insns;
2372 rtx before_call, next_arg_reg;
2373
2374 if (pass == 0)
2375 {
2376 /* Emit any queued insns now; otherwise they would end up in
2377 only one of the alternates. */
2378 emit_queue ();
2379
2380 /* State variables we need to save and restore between
2381 iterations. */
2382 save_pending_stack_adjust = pending_stack_adjust;
2383 save_stack_pointer_delta = stack_pointer_delta;
2384 }
2385 if (pass)
2386 flags &= ~ECF_SIBCALL;
2387 else
2388 flags |= ECF_SIBCALL;
2389
2390 /* Other state variables that we must reinitialize each time
2391 through the loop (that are not initialized by the loop itself). */
2392 argblock = 0;
2393 call_fusage = 0;
2394
2395 /* Start a new sequence for the normal call case.
2396
2397 From this point on, if the sibling call fails, we want to set
2398 sibcall_failure instead of continuing the loop. */
2399 start_sequence ();
2400
2401 if (pass == 0)
2402 {
2403 /* We know at this point that there are not currently any
2404 pending cleanups. If, however, in the process of evaluating
2405 the arguments we were to create some, we'll need to be
2406 able to get rid of them. */
2407 expand_start_target_temps ();
2408 }
2409
2410 /* Don't let pending stack adjusts add up to too much.
2411 Also, do all pending adjustments now if there is any chance
2412 this might be a call to alloca or if we are expanding a sibling
2413 call sequence or if we are calling a function that is to return
2414 with stack pointer depressed. */
2415 if (pending_stack_adjust >= 32
2416 || (pending_stack_adjust > 0
2417 && (flags & (ECF_MAY_BE_ALLOCA | ECF_SP_DEPRESSED)))
2418 || pass == 0)
2419 do_pending_stack_adjust ();
2420
2421 /* When calling a const function, we must pop the stack args right away,
2422 so that the pop is deleted or moved with the call. */
2423 if (pass && (flags & ECF_LIBCALL_BLOCK))
2424 NO_DEFER_POP;
2425
2426 /* Precompute any arguments as needed. */
2427 if (pass)
2428 precompute_arguments (flags, num_actuals, args);
2429
2430 /* Now we are about to start emitting insns that can be deleted
2431 if a libcall is deleted. */
2432 if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
2433 start_sequence ();
2434
2435 adjusted_args_size = args_size;
2436 /* Compute the actual size of the argument block required. The variable
2437 and constant sizes must be combined, the size may have to be rounded,
2438 and there may be a minimum required size. When generating a sibcall
2439 pattern, do not round up, since we'll be re-using whatever space our
2440 caller provided. */
2441 unadjusted_args_size
2442 = compute_argument_block_size (reg_parm_stack_space,
2443 &adjusted_args_size,
2444 (pass == 0 ? 0
2445 : preferred_stack_boundary));
2446
2447 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2448
2449 /* The argument block when performing a sibling call is the
2450 incoming argument block. */
2451 if (pass == 0)
2452 {
2453 argblock = virtual_incoming_args_rtx;
2454 argblock
2455 #ifdef STACK_GROWS_DOWNWARD
2456 = plus_constant (argblock, current_function_pretend_args_size);
2457 #else
2458 = plus_constant (argblock, -current_function_pretend_args_size);
2459 #endif
2460 stored_args_map = sbitmap_alloc (args_size.constant);
2461 sbitmap_zero (stored_args_map);
2462 }
2463
2464 /* If we have no actual push instructions, or shouldn't use them,
2465 make space for all args right now. */
2466 else if (adjusted_args_size.var != 0)
2467 {
2468 if (old_stack_level == 0)
2469 {
2470 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2471 old_stack_pointer_delta = stack_pointer_delta;
2472 old_pending_adj = pending_stack_adjust;
2473 pending_stack_adjust = 0;
2474 /* stack_arg_under_construction says whether a stack arg is
2475 being constructed at the old stack level. Pushing the stack
2476 gets a clean outgoing argument block. */
2477 old_stack_arg_under_construction = stack_arg_under_construction;
2478 stack_arg_under_construction = 0;
2479 }
2480 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2481 }
2482 else
2483 {
2484 /* Note that we must go through the motions of allocating an argument
2485 block even if the size is zero because we may be storing args
2486 in the area reserved for register arguments, which may be part of
2487 the stack frame. */
2488
2489 int needed = adjusted_args_size.constant;
2490
2491 /* Store the maximum argument space used. It will be pushed by
2492 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2493 checking). */
2494
2495 if (needed > current_function_outgoing_args_size)
2496 current_function_outgoing_args_size = needed;
2497
2498 if (must_preallocate)
2499 {
2500 if (ACCUMULATE_OUTGOING_ARGS)
2501 {
2502 /* Since the stack pointer will never be pushed, it is
2503 possible for the evaluation of a parm to clobber
2504 something we have already written to the stack.
2505 Since most function calls on RISC machines do not use
2506 the stack, this is uncommon, but must work correctly.
2507
2508 Therefore, we save any area of the stack that was already
2509 written and that we are using. Here we set up to do this
2510 by making a new stack usage map from the old one. The
2511 actual save will be done by store_one_arg.
2512
2513 Another approach might be to try to reorder the argument
2514 evaluations to avoid this conflicting stack usage. */
2515
2516 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2517 /* Since we will be writing into the entire argument area,
2518 the map must be allocated for its entire size, not just
2519 the part that is the responsibility of the caller. */
2520 needed += reg_parm_stack_space;
2521 #endif
2522
2523 #ifdef ARGS_GROW_DOWNWARD
2524 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2525 needed + 1);
2526 #else
2527 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2528 needed);
2529 #endif
2530 stack_usage_map = alloca (highest_outgoing_arg_in_use);
2531
2532 if (initial_highest_arg_in_use)
2533 memcpy (stack_usage_map, initial_stack_usage_map,
2534 initial_highest_arg_in_use);
2535
2536 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2537 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2538 (highest_outgoing_arg_in_use
2539 - initial_highest_arg_in_use));
2540 needed = 0;
2541
2542 /* The address of the outgoing argument list must not be
2543 copied to a register here, because argblock would be left
2544 pointing to the wrong place after the call to
2545 allocate_dynamic_stack_space below. */
2546
2547 argblock = virtual_outgoing_args_rtx;
2548 }
2549 else
2550 {
2551 if (inhibit_defer_pop == 0)
2552 {
2553 /* Try to reuse some or all of the pending_stack_adjust
2554 to get this space. */
2555 needed
2556 = (combine_pending_stack_adjustment_and_call
2557 (unadjusted_args_size,
2558 &adjusted_args_size,
2559 preferred_unit_stack_boundary));
2560
2561 /* combine_pending_stack_adjustment_and_call computes
2562 an adjustment before the arguments are allocated.
2563 Account for them and see whether or not the stack
2564 needs to go up or down. */
2565 needed = unadjusted_args_size - needed;
2566
2567 if (needed < 0)
2568 {
2569 /* We're releasing stack space. */
2570 /* ??? We can avoid any adjustment at all if we're
2571 already aligned. FIXME. */
2572 pending_stack_adjust = -needed;
2573 do_pending_stack_adjust ();
2574 needed = 0;
2575 }
2576 else
2577 /* We need to allocate space. We'll do that in
2578 push_block below. */
2579 pending_stack_adjust = 0;
2580 }
2581
2582 /* Special case this because overhead of `push_block' in
2583 this case is non-trivial. */
2584 if (needed == 0)
2585 argblock = virtual_outgoing_args_rtx;
2586 else
2587 {
2588 argblock = push_block (GEN_INT (needed), 0, 0);
2589 #ifdef ARGS_GROW_DOWNWARD
2590 argblock = plus_constant (argblock, needed);
2591 #endif
2592 }
2593
2594 /* We only really need to call `copy_to_reg' in the case
2595 where push insns are going to be used to pass ARGBLOCK
2596 to a function call in ARGS. In that case, the stack
2597 pointer changes value from the allocation point to the
2598 call point, and hence the value of
2599 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2600 as well always do it. */
2601 argblock = copy_to_reg (argblock);
2602 }
2603 }
2604 }
2605
2606 if (ACCUMULATE_OUTGOING_ARGS)
2607 {
2608 /* The save/restore code in store_one_arg handles all
2609 cases except one: a constructor call (including a C
2610 function returning a BLKmode struct) to initialize
2611 an argument. */
2612 if (stack_arg_under_construction)
2613 {
2614 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2615 rtx push_size = GEN_INT (reg_parm_stack_space
2616 + adjusted_args_size.constant);
2617 #else
2618 rtx push_size = GEN_INT (adjusted_args_size.constant);
2619 #endif
2620 if (old_stack_level == 0)
2621 {
2622 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2623 NULL_RTX);
2624 old_stack_pointer_delta = stack_pointer_delta;
2625 old_pending_adj = pending_stack_adjust;
2626 pending_stack_adjust = 0;
2627 /* stack_arg_under_construction says whether a stack
2628 arg is being constructed at the old stack level.
2629 Pushing the stack gets a clean outgoing argument
2630 block. */
2631 old_stack_arg_under_construction
2632 = stack_arg_under_construction;
2633 stack_arg_under_construction = 0;
2634 /* Make a new map for the new argument list. */
2635 stack_usage_map = alloca (highest_outgoing_arg_in_use);
2636 memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
2637 highest_outgoing_arg_in_use = 0;
2638 }
2639 allocate_dynamic_stack_space (push_size, NULL_RTX,
2640 BITS_PER_UNIT);
2641 }
2642
2643 /* If argument evaluation might modify the stack pointer,
2644 copy the address of the argument list to a register. */
2645 for (i = 0; i < num_actuals; i++)
2646 if (args[i].pass_on_stack)
2647 {
2648 argblock = copy_addr_to_reg (argblock);
2649 break;
2650 }
2651 }
2652
2653 compute_argument_addresses (args, argblock, num_actuals);
2654
2655 /* If we push args individually in reverse order, perform stack alignment
2656 before the first push (the last arg). */
2657 if (PUSH_ARGS_REVERSED && argblock == 0
2658 && adjusted_args_size.constant != unadjusted_args_size)
2659 {
2660 /* When the stack adjustment is pending, we get better code
2661 by combining the adjustments. */
2662 if (pending_stack_adjust
2663 && ! (flags & ECF_LIBCALL_BLOCK)
2664 && ! inhibit_defer_pop)
2665 {
2666 pending_stack_adjust
2667 = (combine_pending_stack_adjustment_and_call
2668 (unadjusted_args_size,
2669 &adjusted_args_size,
2670 preferred_unit_stack_boundary));
2671 do_pending_stack_adjust ();
2672 }
2673 else if (argblock == 0)
2674 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2675 - unadjusted_args_size));
2676 }
2677 /* Now that the stack is properly aligned, pops can't safely
2678 be deferred during the evaluation of the arguments. */
2679 NO_DEFER_POP;
2680
2681 funexp = rtx_for_function_call (fndecl, addr);
2682
2683 /* Figure out the register where the value, if any, will come back. */
2684 valreg = 0;
2685 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2686 && ! structure_value_addr)
2687 {
2688 if (pcc_struct_value)
2689 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2690 fndecl, (pass == 0));
2691 else
2692 valreg = hard_function_value (TREE_TYPE (exp), fndecl, (pass == 0));
2693 }
2694
2695 /* Precompute all register parameters. It isn't safe to compute anything
2696 once we have started filling any specific hard regs. */
2697 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
2698
2699 if (TREE_OPERAND (exp, 2))
2700 static_chain_value = expand_expr (TREE_OPERAND (exp, 2),
2701 NULL_RTX, VOIDmode, 0);
2702 else
2703 static_chain_value = 0;
2704
2705 #ifdef REG_PARM_STACK_SPACE
2706 /* Save the fixed argument area if it's part of the caller's frame and
2707 is clobbered by argument setup for this call. */
2708 if (ACCUMULATE_OUTGOING_ARGS && pass)
2709 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2710 &low_to_save, &high_to_save);
2711 #endif
2712
2713 /* Now store (and compute if necessary) all non-register parms.
2714 These come before register parms, since they can require block-moves,
2715 which could clobber the registers used for register parms.
2716 Parms which have partial registers are not stored here,
2717 but we do preallocate space here if they want that. */
2718
2719 for (i = 0; i < num_actuals; i++)
2720 if (args[i].reg == 0 || args[i].pass_on_stack)
2721 {
2722 rtx before_arg = get_last_insn ();
2723
2724 if (store_one_arg (&args[i], argblock, flags,
2725 adjusted_args_size.var != 0,
2726 reg_parm_stack_space)
2727 || (pass == 0
2728 && check_sibcall_argument_overlap (before_arg,
2729 &args[i], 1)))
2730 sibcall_failure = 1;
2731
2732 if (flags & ECF_CONST
2733 && args[i].stack
2734 && args[i].value == args[i].stack)
2735 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
2736 gen_rtx_USE (VOIDmode,
2737 args[i].value),
2738 call_fusage);
2739 }
2740
2741 /* If we have a parm that is passed in registers but not in memory
2742 and whose alignment does not permit a direct copy into registers,
2743 make a group of pseudos that correspond to each register that we
2744 will later fill. */
2745 if (STRICT_ALIGNMENT)
2746 store_unaligned_arguments_into_pseudos (args, num_actuals);
2747
2748 /* Now store any partially-in-registers parm.
2749 This is the last place a block-move can happen. */
2750 if (reg_parm_seen)
2751 for (i = 0; i < num_actuals; i++)
2752 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2753 {
2754 rtx before_arg = get_last_insn ();
2755
2756 if (store_one_arg (&args[i], argblock, flags,
2757 adjusted_args_size.var != 0,
2758 reg_parm_stack_space)
2759 || (pass == 0
2760 && check_sibcall_argument_overlap (before_arg,
2761 &args[i], 1)))
2762 sibcall_failure = 1;
2763 }
2764
2765 /* If we pushed args in forward order, perform stack alignment
2766 after pushing the last arg. */
2767 if (!PUSH_ARGS_REVERSED && argblock == 0)
2768 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2769 - unadjusted_args_size));
2770
2771 /* If register arguments require space on the stack and stack space
2772 was not preallocated, allocate stack space here for arguments
2773 passed in registers. */
2774 #ifdef OUTGOING_REG_PARM_STACK_SPACE
2775 if (!ACCUMULATE_OUTGOING_ARGS
2776 && must_preallocate == 0 && reg_parm_stack_space > 0)
2777 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2778 #endif
2779
2780 /* Pass the function the address in which to return a
2781 structure value. */
2782 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2783 {
2784 structure_value_addr
2785 = convert_memory_address (Pmode, structure_value_addr);
2786 emit_move_insn (struct_value,
2787 force_reg (Pmode,
2788 force_operand (structure_value_addr,
2789 NULL_RTX)));
2790
2791 if (REG_P (struct_value))
2792 use_reg (&call_fusage, struct_value);
2793 }
2794
2795 funexp = prepare_call_address (funexp, static_chain_value,
2796 &call_fusage, reg_parm_seen, pass == 0);
2797
2798 load_register_parameters (args, num_actuals, &call_fusage, flags,
2799 pass == 0, &sibcall_failure);
2800
2801 /* Perform postincrements before actually calling the function. */
2802 emit_queue ();
2803
2804 /* Save a pointer to the last insn before the call, so that we can
2805 later safely search backwards to find the CALL_INSN. */
2806 before_call = get_last_insn ();
2807
2808 /* Set up next argument register. For sibling calls on machines
2809 with register windows this should be the incoming register. */
2810 #ifdef FUNCTION_INCOMING_ARG
2811 if (pass == 0)
2812 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
2813 void_type_node, 1);
2814 else
2815 #endif
2816 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
2817 void_type_node, 1);
2818
2819 /* All arguments and registers used for the call must be set up by
2820 now! */
2821
2822 /* Stack must be properly aligned now. */
2823 if (pass && stack_pointer_delta % preferred_unit_stack_boundary)
2824 abort ();
2825
2826 /* Generate the actual call instruction. */
2827 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
2828 adjusted_args_size.constant, struct_value_size,
2829 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
2830 flags, & args_so_far);
2831
2832 /* If call is cse'able, make appropriate pair of reg-notes around it.
2833 Test valreg so we don't crash; may safely ignore `const'
2834 if return type is void. Disable for PARALLEL return values, because
2835 we have no way to move such values into a pseudo register. */
2836 if (pass && (flags & ECF_LIBCALL_BLOCK))
2837 {
2838 rtx insns;
2839 rtx insn;
2840 bool failed = valreg == 0 || GET_CODE (valreg) == PARALLEL;
2841
2842 insns = get_insns ();
2843
2844 /* Expansion of block moves possibly introduced a loop that may
2845 not appear inside libcall block. */
2846 for (insn = insns; insn; insn = NEXT_INSN (insn))
2847 if (GET_CODE (insn) == JUMP_INSN)
2848 failed = true;
2849
2850 if (failed)
2851 {
2852 end_sequence ();
2853 emit_insn (insns);
2854 }
2855 else
2856 {
2857 rtx note = 0;
2858 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2859
2860 /* Mark the return value as a pointer if needed. */
2861 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2862 mark_reg_pointer (temp,
2863 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
2864
2865 end_sequence ();
2866 if (flag_unsafe_math_optimizations
2867 && fndecl
2868 && DECL_BUILT_IN (fndecl)
2869 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRT
2870 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTF
2871 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTL))
2872 note = gen_rtx_fmt_e (SQRT,
2873 GET_MODE (temp),
2874 args[0].initial_value);
2875 else
2876 {
2877 /* Construct an "equal form" for the value which
2878 mentions all the arguments in order as well as
2879 the function name. */
2880 for (i = 0; i < num_actuals; i++)
2881 note = gen_rtx_EXPR_LIST (VOIDmode,
2882 args[i].initial_value, note);
2883 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2884
2885 if (flags & ECF_PURE)
2886 note = gen_rtx_EXPR_LIST (VOIDmode,
2887 gen_rtx_USE (VOIDmode,
2888 gen_rtx_MEM (BLKmode,
2889 gen_rtx_SCRATCH (VOIDmode))),
2890 note);
2891 }
2892 emit_libcall_block (insns, temp, valreg, note);
2893
2894 valreg = temp;
2895 }
2896 }
2897 else if (pass && (flags & ECF_MALLOC))
2898 {
2899 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2900 rtx last, insns;
2901
2902 /* The return value from a malloc-like function is a pointer. */
2903 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2904 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
2905
2906 emit_move_insn (temp, valreg);
2907
2908 /* The return value from a malloc-like function can not alias
2909 anything else. */
2910 last = get_last_insn ();
2911 REG_NOTES (last) =
2912 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2913
2914 /* Write out the sequence. */
2915 insns = get_insns ();
2916 end_sequence ();
2917 emit_insn (insns);
2918 valreg = temp;
2919 }
2920
2921 /* For calls to `setjmp', etc., inform flow.c it should complain
2922 if nonvolatile values are live. For functions that cannot return,
2923 inform flow that control does not fall through. */
2924
2925 if ((flags & (ECF_NORETURN | ECF_LONGJMP)) || pass == 0)
2926 {
2927 /* The barrier must be emitted
2928 immediately after the CALL_INSN. Some ports emit more
2929 than just a CALL_INSN above, so we must search for it here. */
2930
2931 rtx last = get_last_insn ();
2932 while (GET_CODE (last) != CALL_INSN)
2933 {
2934 last = PREV_INSN (last);
2935 /* There was no CALL_INSN? */
2936 if (last == before_call)
2937 abort ();
2938 }
2939
2940 emit_barrier_after (last);
2941
2942 /* Stack adjustments after a noreturn call are dead code.
2943 However when NO_DEFER_POP is in effect, we must preserve
2944 stack_pointer_delta. */
2945 if (inhibit_defer_pop == 0)
2946 {
2947 stack_pointer_delta = old_stack_allocated;
2948 pending_stack_adjust = 0;
2949 }
2950 }
2951
2952 if (flags & ECF_LONGJMP)
2953 current_function_calls_longjmp = 1;
2954
2955 /* If value type not void, return an rtx for the value. */
2956
2957 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2958 || ignore)
2959 target = const0_rtx;
2960 else if (structure_value_addr)
2961 {
2962 if (target == 0 || !MEM_P (target))
2963 {
2964 target
2965 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2966 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2967 structure_value_addr));
2968 set_mem_attributes (target, exp, 1);
2969 }
2970 }
2971 else if (pcc_struct_value)
2972 {
2973 /* This is the special C++ case where we need to
2974 know what the true target was. We take care to
2975 never use this value more than once in one expression. */
2976 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2977 copy_to_reg (valreg));
2978 set_mem_attributes (target, exp, 1);
2979 }
2980 /* Handle calls that return values in multiple non-contiguous locations.
2981 The Irix 6 ABI has examples of this. */
2982 else if (GET_CODE (valreg) == PARALLEL)
2983 {
2984 if (target == 0)
2985 {
2986 /* This will only be assigned once, so it can be readonly. */
2987 tree nt = build_qualified_type (TREE_TYPE (exp),
2988 (TYPE_QUALS (TREE_TYPE (exp))
2989 | TYPE_QUAL_CONST));
2990
2991 target = assign_temp (nt, 0, 1, 1);
2992 preserve_temp_slots (target);
2993 }
2994
2995 if (! rtx_equal_p (target, valreg))
2996 emit_group_store (target, valreg, TREE_TYPE (exp),
2997 int_size_in_bytes (TREE_TYPE (exp)));
2998
2999 /* We can not support sibling calls for this case. */
3000 sibcall_failure = 1;
3001 }
3002 else if (target
3003 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
3004 && GET_MODE (target) == GET_MODE (valreg))
3005 {
3006 /* TARGET and VALREG cannot be equal at this point because the
3007 latter would not have REG_FUNCTION_VALUE_P true, while the
3008 former would if it were referring to the same register.
3009
3010 If they refer to the same register, this move will be a no-op,
3011 except when function inlining is being done. */
3012 emit_move_insn (target, valreg);
3013
3014 /* If we are setting a MEM, this code must be executed. Since it is
3015 emitted after the call insn, sibcall optimization cannot be
3016 performed in that case. */
3017 if (MEM_P (target))
3018 sibcall_failure = 1;
3019 }
3020 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
3021 {
3022 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
3023
3024 /* We can not support sibling calls for this case. */
3025 sibcall_failure = 1;
3026 }
3027 else
3028 {
3029 if (shift_returned_value (TREE_TYPE (exp), &valreg))
3030 sibcall_failure = 1;
3031
3032 target = copy_to_reg (valreg);
3033 }
3034
3035 if (targetm.calls.promote_function_return(funtype))
3036 {
3037 /* If we promoted this return value, make the proper SUBREG. TARGET
3038 might be const0_rtx here, so be careful. */
3039 if (REG_P (target)
3040 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3041 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3042 {
3043 tree type = TREE_TYPE (exp);
3044 int unsignedp = TYPE_UNSIGNED (type);
3045 int offset = 0;
3046
3047 /* If we don't promote as expected, something is wrong. */
3048 if (GET_MODE (target)
3049 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
3050 abort ();
3051
3052 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3053 && GET_MODE_SIZE (GET_MODE (target))
3054 > GET_MODE_SIZE (TYPE_MODE (type)))
3055 {
3056 offset = GET_MODE_SIZE (GET_MODE (target))
3057 - GET_MODE_SIZE (TYPE_MODE (type));
3058 if (! BYTES_BIG_ENDIAN)
3059 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3060 else if (! WORDS_BIG_ENDIAN)
3061 offset %= UNITS_PER_WORD;
3062 }
3063 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3064 SUBREG_PROMOTED_VAR_P (target) = 1;
3065 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3066 }
3067 }
3068
3069 /* If size of args is variable or this was a constructor call for a stack
3070 argument, restore saved stack-pointer value. */
3071
3072 if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
3073 {
3074 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3075 stack_pointer_delta = old_stack_pointer_delta;
3076 pending_stack_adjust = old_pending_adj;
3077 stack_arg_under_construction = old_stack_arg_under_construction;
3078 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3079 stack_usage_map = initial_stack_usage_map;
3080 sibcall_failure = 1;
3081 }
3082 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3083 {
3084 #ifdef REG_PARM_STACK_SPACE
3085 if (save_area)
3086 restore_fixed_argument_area (save_area, argblock,
3087 high_to_save, low_to_save);
3088 #endif
3089
3090 /* If we saved any argument areas, restore them. */
3091 for (i = 0; i < num_actuals; i++)
3092 if (args[i].save_area)
3093 {
3094 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3095 rtx stack_area
3096 = gen_rtx_MEM (save_mode,
3097 memory_address (save_mode,
3098 XEXP (args[i].stack_slot, 0)));
3099
3100 if (save_mode != BLKmode)
3101 emit_move_insn (stack_area, args[i].save_area);
3102 else
3103 emit_block_move (stack_area, args[i].save_area,
3104 GEN_INT (args[i].locate.size.constant),
3105 BLOCK_OP_CALL_PARM);
3106 }
3107
3108 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3109 stack_usage_map = initial_stack_usage_map;
3110 }
3111
3112 /* If this was alloca, record the new stack level for nonlocal gotos.
3113 Check for the handler slots since we might not have a save area
3114 for non-local gotos. */
3115
3116 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
3117 update_nonlocal_goto_save_area ();
3118
3119 /* Free up storage we no longer need. */
3120 for (i = 0; i < num_actuals; ++i)
3121 if (args[i].aligned_regs)
3122 free (args[i].aligned_regs);
3123
3124 if (pass == 0)
3125 {
3126 /* Undo the fake expand_start_target_temps we did earlier. If
3127 there had been any cleanups created, we've already set
3128 sibcall_failure. */
3129 expand_end_target_temps ();
3130 }
3131
3132 /* If this function is returning into a memory location marked as
3133 readonly, it means it is initializing that location. We normally treat
3134 functions as not clobbering such locations, so we need to specify that
3135 this one does. We do this by adding the appropriate CLOBBER to the
3136 CALL_INSN function usage list. This cannot be done by emitting a
3137 standalone CLOBBER after the call because the latter would be ignored
3138 by at least the delay slot scheduling pass. We do this now instead of
3139 adding to call_fusage before the call to emit_call_1 because TARGET
3140 may be modified in the meantime. */
3141 if (structure_value_addr != 0 && target != 0
3142 && MEM_P (target) && RTX_UNCHANGING_P (target))
3143 add_function_usage_to
3144 (last_call_insn (),
3145 gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_CLOBBER (VOIDmode, target),
3146 NULL_RTX));
3147
3148 insns = get_insns ();
3149 end_sequence ();
3150
3151 if (pass == 0)
3152 {
3153 tail_call_insns = insns;
3154
3155 /* Restore the pending stack adjustment now that we have
3156 finished generating the sibling call sequence. */
3157
3158 pending_stack_adjust = save_pending_stack_adjust;
3159 stack_pointer_delta = save_stack_pointer_delta;
3160
3161 /* Prepare arg structure for next iteration. */
3162 for (i = 0; i < num_actuals; i++)
3163 {
3164 args[i].value = 0;
3165 args[i].aligned_regs = 0;
3166 args[i].stack = 0;
3167 }
3168
3169 sbitmap_free (stored_args_map);
3170 }
3171 else
3172 {
3173 normal_call_insns = insns;
3174
3175 /* Verify that we've deallocated all the stack we used. */
3176 if (! (flags & (ECF_NORETURN | ECF_LONGJMP))
3177 && old_stack_allocated != stack_pointer_delta
3178 - pending_stack_adjust)
3179 abort ();
3180 }
3181
3182 /* If something prevents making this a sibling call,
3183 zero out the sequence. */
3184 if (sibcall_failure)
3185 tail_call_insns = NULL_RTX;
3186 else
3187 break;
3188 }
3189
3190 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
3191 arguments too, as argument area is now clobbered by the call. */
3192 if (tail_call_insns)
3193 {
3194 emit_insn (tail_call_insns);
3195 cfun->tail_call_emit = true;
3196 }
3197 else
3198 emit_insn (normal_call_insns);
3199
3200 currently_expanding_call--;
3201
3202 /* If this function returns with the stack pointer depressed, ensure
3203 this block saves and restores the stack pointer, show it was
3204 changed, and adjust for any outgoing arg space. */
3205 if (flags & ECF_SP_DEPRESSED)
3206 {
3207 clear_pending_stack_adjust ();
3208 emit_insn (gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx));
3209 emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
3210 }
3211
3212 return target;
3213 }
3214
3215 /* A sibling call sequence invalidates any REG_EQUIV notes made for
3216 this function's incoming arguments.
3217
3218 At the start of RTL generation we know the only REG_EQUIV notes
3219 in the rtl chain are those for incoming arguments, so we can safely
3220 flush any REG_EQUIV note.
3221
3222 This is (slight) overkill. We could keep track of the highest
3223 argument we clobber and be more selective in removing notes, but it
3224 does not seem to be worth the effort. */
3225 void
3226 fixup_tail_calls (void)
3227 {
3228 rtx insn;
3229 tree arg;
3230
3231 purge_reg_equiv_notes ();
3232
3233 /* A sibling call sequence also may invalidate RTX_UNCHANGING_P
3234 flag of some incoming arguments MEM RTLs, because it can write into
3235 those slots. We clear all those bits now.
3236
3237 This is (slight) overkill, we could keep track of which arguments
3238 we actually write into. */
3239 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3240 {
3241 if (INSN_P (insn))
3242 purge_mem_unchanging_flag (PATTERN (insn));
3243 }
3244
3245 /* Similarly, invalidate RTX_UNCHANGING_P for any incoming
3246 arguments passed in registers. */
3247 for (arg = DECL_ARGUMENTS (current_function_decl);
3248 arg;
3249 arg = TREE_CHAIN (arg))
3250 {
3251 if (REG_P (DECL_RTL (arg)))
3252 RTX_UNCHANGING_P (DECL_RTL (arg)) = false;
3253 }
3254 }
3255
3256 /* Traverse an argument list in VALUES and expand all complex
3257 arguments into their components. */
3258 tree
3259 split_complex_values (tree values)
3260 {
3261 tree p;
3262
3263 /* Before allocating memory, check for the common case of no complex. */
3264 for (p = values; p; p = TREE_CHAIN (p))
3265 {
3266 tree type = TREE_TYPE (TREE_VALUE (p));
3267 if (type && TREE_CODE (type) == COMPLEX_TYPE
3268 && targetm.calls.split_complex_arg (type))
3269 goto found;
3270 }
3271 return values;
3272
3273 found:
3274 values = copy_list (values);
3275
3276 for (p = values; p; p = TREE_CHAIN (p))
3277 {
3278 tree complex_value = TREE_VALUE (p);
3279 tree complex_type;
3280
3281 complex_type = TREE_TYPE (complex_value);
3282 if (!complex_type)
3283 continue;
3284
3285 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3286 && targetm.calls.split_complex_arg (complex_type))
3287 {
3288 tree subtype;
3289 tree real, imag, next;
3290
3291 subtype = TREE_TYPE (complex_type);
3292 complex_value = save_expr (complex_value);
3293 real = build1 (REALPART_EXPR, subtype, complex_value);
3294 imag = build1 (IMAGPART_EXPR, subtype, complex_value);
3295
3296 TREE_VALUE (p) = real;
3297 next = TREE_CHAIN (p);
3298 imag = build_tree_list (NULL_TREE, imag);
3299 TREE_CHAIN (p) = imag;
3300 TREE_CHAIN (imag) = next;
3301
3302 /* Skip the newly created node. */
3303 p = TREE_CHAIN (p);
3304 }
3305 }
3306
3307 return values;
3308 }
3309
3310 /* Traverse a list of TYPES and expand all complex types into their
3311 components. */
3312 tree
3313 split_complex_types (tree types)
3314 {
3315 tree p;
3316
3317 /* Before allocating memory, check for the common case of no complex. */
3318 for (p = types; p; p = TREE_CHAIN (p))
3319 {
3320 tree type = TREE_VALUE (p);
3321 if (TREE_CODE (type) == COMPLEX_TYPE
3322 && targetm.calls.split_complex_arg (type))
3323 goto found;
3324 }
3325 return types;
3326
3327 found:
3328 types = copy_list (types);
3329
3330 for (p = types; p; p = TREE_CHAIN (p))
3331 {
3332 tree complex_type = TREE_VALUE (p);
3333
3334 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3335 && targetm.calls.split_complex_arg (complex_type))
3336 {
3337 tree next, imag;
3338
3339 /* Rewrite complex type with component type. */
3340 TREE_VALUE (p) = TREE_TYPE (complex_type);
3341 next = TREE_CHAIN (p);
3342
3343 /* Add another component type for the imaginary part. */
3344 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3345 TREE_CHAIN (p) = imag;
3346 TREE_CHAIN (imag) = next;
3347
3348 /* Skip the newly created node. */
3349 p = TREE_CHAIN (p);
3350 }
3351 }
3352
3353 return types;
3354 }
3355 \f
3356 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3357 The RETVAL parameter specifies whether return value needs to be saved, other
3358 parameters are documented in the emit_library_call function below. */
3359
3360 static rtx
3361 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3362 enum libcall_type fn_type,
3363 enum machine_mode outmode, int nargs, va_list p)
3364 {
3365 /* Total size in bytes of all the stack-parms scanned so far. */
3366 struct args_size args_size;
3367 /* Size of arguments before any adjustments (such as rounding). */
3368 struct args_size original_args_size;
3369 int argnum;
3370 rtx fun;
3371 int inc;
3372 int count;
3373 rtx argblock = 0;
3374 CUMULATIVE_ARGS args_so_far;
3375 struct arg
3376 {
3377 rtx value;
3378 enum machine_mode mode;
3379 rtx reg;
3380 int partial;
3381 struct locate_and_pad_arg_data locate;
3382 rtx save_area;
3383 };
3384 struct arg *argvec;
3385 int old_inhibit_defer_pop = inhibit_defer_pop;
3386 rtx call_fusage = 0;
3387 rtx mem_value = 0;
3388 rtx valreg;
3389 int pcc_struct_value = 0;
3390 int struct_value_size = 0;
3391 int flags;
3392 int reg_parm_stack_space = 0;
3393 int needed;
3394 rtx before_call;
3395 tree tfom; /* type_for_mode (outmode, 0) */
3396
3397 #ifdef REG_PARM_STACK_SPACE
3398 /* Define the boundary of the register parm stack space that needs to be
3399 save, if any. */
3400 int low_to_save, high_to_save;
3401 rtx save_area = 0; /* Place that it is saved. */
3402 #endif
3403
3404 /* Size of the stack reserved for parameter registers. */
3405 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3406 char *initial_stack_usage_map = stack_usage_map;
3407
3408 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3409
3410 #ifdef REG_PARM_STACK_SPACE
3411 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3412 #endif
3413
3414 /* By default, library functions can not throw. */
3415 flags = ECF_NOTHROW;
3416
3417 switch (fn_type)
3418 {
3419 case LCT_NORMAL:
3420 break;
3421 case LCT_CONST:
3422 flags |= ECF_CONST;
3423 break;
3424 case LCT_PURE:
3425 flags |= ECF_PURE;
3426 break;
3427 case LCT_CONST_MAKE_BLOCK:
3428 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
3429 break;
3430 case LCT_PURE_MAKE_BLOCK:
3431 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
3432 break;
3433 case LCT_NORETURN:
3434 flags |= ECF_NORETURN;
3435 break;
3436 case LCT_THROW:
3437 flags = ECF_NORETURN;
3438 break;
3439 case LCT_ALWAYS_RETURN:
3440 flags = ECF_ALWAYS_RETURN;
3441 break;
3442 case LCT_RETURNS_TWICE:
3443 flags = ECF_RETURNS_TWICE;
3444 break;
3445 }
3446 fun = orgfun;
3447
3448 /* Ensure current function's preferred stack boundary is at least
3449 what we need. */
3450 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3451 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3452
3453 /* If this kind of value comes back in memory,
3454 decide where in memory it should come back. */
3455 if (outmode != VOIDmode)
3456 {
3457 tfom = lang_hooks.types.type_for_mode (outmode, 0);
3458 if (aggregate_value_p (tfom, 0))
3459 {
3460 #ifdef PCC_STATIC_STRUCT_RETURN
3461 rtx pointer_reg
3462 = hard_function_value (build_pointer_type (tfom), 0, 0);
3463 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3464 pcc_struct_value = 1;
3465 if (value == 0)
3466 value = gen_reg_rtx (outmode);
3467 #else /* not PCC_STATIC_STRUCT_RETURN */
3468 struct_value_size = GET_MODE_SIZE (outmode);
3469 if (value != 0 && MEM_P (value))
3470 mem_value = value;
3471 else
3472 mem_value = assign_temp (tfom, 0, 1, 1);
3473 #endif
3474 /* This call returns a big structure. */
3475 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3476 }
3477 }
3478 else
3479 tfom = void_type_node;
3480
3481 /* ??? Unfinished: must pass the memory address as an argument. */
3482
3483 /* Copy all the libcall-arguments out of the varargs data
3484 and into a vector ARGVEC.
3485
3486 Compute how to pass each argument. We only support a very small subset
3487 of the full argument passing conventions to limit complexity here since
3488 library functions shouldn't have many args. */
3489
3490 argvec = alloca ((nargs + 1) * sizeof (struct arg));
3491 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3492
3493 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3494 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3495 #else
3496 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0, nargs);
3497 #endif
3498
3499 args_size.constant = 0;
3500 args_size.var = 0;
3501
3502 count = 0;
3503
3504 /* Now we are about to start emitting insns that can be deleted
3505 if a libcall is deleted. */
3506 if (flags & ECF_LIBCALL_BLOCK)
3507 start_sequence ();
3508
3509 push_temp_slots ();
3510
3511 /* If there's a structure value address to be passed,
3512 either pass it in the special place, or pass it as an extra argument. */
3513 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3514 {
3515 rtx addr = XEXP (mem_value, 0);
3516 nargs++;
3517
3518 /* Make sure it is a reasonable operand for a move or push insn. */
3519 if (!REG_P (addr) && !MEM_P (addr)
3520 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3521 addr = force_operand (addr, NULL_RTX);
3522
3523 argvec[count].value = addr;
3524 argvec[count].mode = Pmode;
3525 argvec[count].partial = 0;
3526
3527 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3528 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3529 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3530 abort ();
3531 #endif
3532
3533 locate_and_pad_parm (Pmode, NULL_TREE,
3534 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3535 1,
3536 #else
3537 argvec[count].reg != 0,
3538 #endif
3539 0, NULL_TREE, &args_size, &argvec[count].locate);
3540
3541 if (argvec[count].reg == 0 || argvec[count].partial != 0
3542 || reg_parm_stack_space > 0)
3543 args_size.constant += argvec[count].locate.size.constant;
3544
3545 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3546
3547 count++;
3548 }
3549
3550 for (; count < nargs; count++)
3551 {
3552 rtx val = va_arg (p, rtx);
3553 enum machine_mode mode = va_arg (p, enum machine_mode);
3554
3555 /* We cannot convert the arg value to the mode the library wants here;
3556 must do it earlier where we know the signedness of the arg. */
3557 if (mode == BLKmode
3558 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3559 abort ();
3560
3561 /* There's no need to call protect_from_queue, because
3562 either emit_move_insn or emit_push_insn will do that. */
3563
3564 /* Make sure it is a reasonable operand for a move or push insn. */
3565 if (!REG_P (val) && !MEM_P (val)
3566 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3567 val = force_operand (val, NULL_RTX);
3568
3569 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3570 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3571 {
3572 rtx slot;
3573 int must_copy = 1
3574 #ifdef FUNCTION_ARG_CALLEE_COPIES
3575 && ! FUNCTION_ARG_CALLEE_COPIES (args_so_far, mode,
3576 NULL_TREE, 1)
3577 #endif
3578 ;
3579
3580 /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3581 functions, so we have to pretend this isn't such a function. */
3582 if (flags & ECF_LIBCALL_BLOCK)
3583 {
3584 rtx insns = get_insns ();
3585 end_sequence ();
3586 emit_insn (insns);
3587 }
3588 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3589
3590 /* If this was a CONST function, it is now PURE since
3591 it now reads memory. */
3592 if (flags & ECF_CONST)
3593 {
3594 flags &= ~ECF_CONST;
3595 flags |= ECF_PURE;
3596 }
3597
3598 if (GET_MODE (val) == MEM && ! must_copy)
3599 slot = val;
3600 else if (must_copy)
3601 {
3602 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
3603 0, 1, 1);
3604 emit_move_insn (slot, val);
3605 }
3606 else
3607 {
3608 tree type = lang_hooks.types.type_for_mode (mode, 0);
3609
3610 slot
3611 = gen_rtx_MEM (mode,
3612 expand_expr (build1 (ADDR_EXPR,
3613 build_pointer_type (type),
3614 make_tree (type, val)),
3615 NULL_RTX, VOIDmode, 0));
3616 }
3617
3618 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3619 gen_rtx_USE (VOIDmode, slot),
3620 call_fusage);
3621 if (must_copy)
3622 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3623 gen_rtx_CLOBBER (VOIDmode,
3624 slot),
3625 call_fusage);
3626
3627 mode = Pmode;
3628 val = force_operand (XEXP (slot, 0), NULL_RTX);
3629 }
3630 #endif
3631
3632 argvec[count].value = val;
3633 argvec[count].mode = mode;
3634
3635 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3636
3637 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3638 argvec[count].partial
3639 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3640 #else
3641 argvec[count].partial = 0;
3642 #endif
3643
3644 locate_and_pad_parm (mode, NULL_TREE,
3645 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3646 1,
3647 #else
3648 argvec[count].reg != 0,
3649 #endif
3650 argvec[count].partial,
3651 NULL_TREE, &args_size, &argvec[count].locate);
3652
3653 if (argvec[count].locate.size.var)
3654 abort ();
3655
3656 if (argvec[count].reg == 0 || argvec[count].partial != 0
3657 || reg_parm_stack_space > 0)
3658 args_size.constant += argvec[count].locate.size.constant;
3659
3660 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3661 }
3662
3663 /* If this machine requires an external definition for library
3664 functions, write one out. */
3665 assemble_external_libcall (fun);
3666
3667 original_args_size = args_size;
3668 args_size.constant = (((args_size.constant
3669 + stack_pointer_delta
3670 + STACK_BYTES - 1)
3671 / STACK_BYTES
3672 * STACK_BYTES)
3673 - stack_pointer_delta);
3674
3675 args_size.constant = MAX (args_size.constant,
3676 reg_parm_stack_space);
3677
3678 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3679 args_size.constant -= reg_parm_stack_space;
3680 #endif
3681
3682 if (args_size.constant > current_function_outgoing_args_size)
3683 current_function_outgoing_args_size = args_size.constant;
3684
3685 if (ACCUMULATE_OUTGOING_ARGS)
3686 {
3687 /* Since the stack pointer will never be pushed, it is possible for
3688 the evaluation of a parm to clobber something we have already
3689 written to the stack. Since most function calls on RISC machines
3690 do not use the stack, this is uncommon, but must work correctly.
3691
3692 Therefore, we save any area of the stack that was already written
3693 and that we are using. Here we set up to do this by making a new
3694 stack usage map from the old one.
3695
3696 Another approach might be to try to reorder the argument
3697 evaluations to avoid this conflicting stack usage. */
3698
3699 needed = args_size.constant;
3700
3701 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3702 /* Since we will be writing into the entire argument area, the
3703 map must be allocated for its entire size, not just the part that
3704 is the responsibility of the caller. */
3705 needed += reg_parm_stack_space;
3706 #endif
3707
3708 #ifdef ARGS_GROW_DOWNWARD
3709 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3710 needed + 1);
3711 #else
3712 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3713 needed);
3714 #endif
3715 stack_usage_map = alloca (highest_outgoing_arg_in_use);
3716
3717 if (initial_highest_arg_in_use)
3718 memcpy (stack_usage_map, initial_stack_usage_map,
3719 initial_highest_arg_in_use);
3720
3721 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3722 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3723 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3724 needed = 0;
3725
3726 /* We must be careful to use virtual regs before they're instantiated,
3727 and real regs afterwards. Loop optimization, for example, can create
3728 new libcalls after we've instantiated the virtual regs, and if we
3729 use virtuals anyway, they won't match the rtl patterns. */
3730
3731 if (virtuals_instantiated)
3732 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3733 else
3734 argblock = virtual_outgoing_args_rtx;
3735 }
3736 else
3737 {
3738 if (!PUSH_ARGS)
3739 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3740 }
3741
3742 /* If we push args individually in reverse order, perform stack alignment
3743 before the first push (the last arg). */
3744 if (argblock == 0 && PUSH_ARGS_REVERSED)
3745 anti_adjust_stack (GEN_INT (args_size.constant
3746 - original_args_size.constant));
3747
3748 if (PUSH_ARGS_REVERSED)
3749 {
3750 inc = -1;
3751 argnum = nargs - 1;
3752 }
3753 else
3754 {
3755 inc = 1;
3756 argnum = 0;
3757 }
3758
3759 #ifdef REG_PARM_STACK_SPACE
3760 if (ACCUMULATE_OUTGOING_ARGS)
3761 {
3762 /* The argument list is the property of the called routine and it
3763 may clobber it. If the fixed area has been used for previous
3764 parameters, we must save and restore it. */
3765 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3766 &low_to_save, &high_to_save);
3767 }
3768 #endif
3769
3770 /* Push the args that need to be pushed. */
3771
3772 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3773 are to be pushed. */
3774 for (count = 0; count < nargs; count++, argnum += inc)
3775 {
3776 enum machine_mode mode = argvec[argnum].mode;
3777 rtx val = argvec[argnum].value;
3778 rtx reg = argvec[argnum].reg;
3779 int partial = argvec[argnum].partial;
3780 int lower_bound = 0, upper_bound = 0, i;
3781
3782 if (! (reg != 0 && partial == 0))
3783 {
3784 if (ACCUMULATE_OUTGOING_ARGS)
3785 {
3786 /* If this is being stored into a pre-allocated, fixed-size,
3787 stack area, save any previous data at that location. */
3788
3789 #ifdef ARGS_GROW_DOWNWARD
3790 /* stack_slot is negative, but we want to index stack_usage_map
3791 with positive values. */
3792 upper_bound = -argvec[argnum].locate.offset.constant + 1;
3793 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
3794 #else
3795 lower_bound = argvec[argnum].locate.offset.constant;
3796 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
3797 #endif
3798
3799 i = lower_bound;
3800 /* Don't worry about things in the fixed argument area;
3801 it has already been saved. */
3802 if (i < reg_parm_stack_space)
3803 i = reg_parm_stack_space;
3804 while (i < upper_bound && stack_usage_map[i] == 0)
3805 i++;
3806
3807 if (i < upper_bound)
3808 {
3809 /* We need to make a save area. */
3810 unsigned int size
3811 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
3812 enum machine_mode save_mode
3813 = mode_for_size (size, MODE_INT, 1);
3814 rtx adr
3815 = plus_constant (argblock,
3816 argvec[argnum].locate.offset.constant);
3817 rtx stack_area
3818 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
3819
3820 if (save_mode == BLKmode)
3821 {
3822 argvec[argnum].save_area
3823 = assign_stack_temp (BLKmode,
3824 argvec[argnum].locate.size.constant,
3825 0);
3826
3827 emit_block_move (validize_mem (argvec[argnum].save_area),
3828 stack_area,
3829 GEN_INT (argvec[argnum].locate.size.constant),
3830 BLOCK_OP_CALL_PARM);
3831 }
3832 else
3833 {
3834 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3835
3836 emit_move_insn (argvec[argnum].save_area, stack_area);
3837 }
3838 }
3839 }
3840
3841 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
3842 partial, reg, 0, argblock,
3843 GEN_INT (argvec[argnum].locate.offset.constant),
3844 reg_parm_stack_space,
3845 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
3846
3847 /* Now mark the segment we just used. */
3848 if (ACCUMULATE_OUTGOING_ARGS)
3849 for (i = lower_bound; i < upper_bound; i++)
3850 stack_usage_map[i] = 1;
3851
3852 NO_DEFER_POP;
3853 }
3854 }
3855
3856 /* If we pushed args in forward order, perform stack alignment
3857 after pushing the last arg. */
3858 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3859 anti_adjust_stack (GEN_INT (args_size.constant
3860 - original_args_size.constant));
3861
3862 if (PUSH_ARGS_REVERSED)
3863 argnum = nargs - 1;
3864 else
3865 argnum = 0;
3866
3867 fun = prepare_call_address (fun, NULL, &call_fusage, 0, 0);
3868
3869 /* Now load any reg parms into their regs. */
3870
3871 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3872 are to be pushed. */
3873 for (count = 0; count < nargs; count++, argnum += inc)
3874 {
3875 enum machine_mode mode = argvec[argnum].mode;
3876 rtx val = argvec[argnum].value;
3877 rtx reg = argvec[argnum].reg;
3878 int partial = argvec[argnum].partial;
3879
3880 /* Handle calls that pass values in multiple non-contiguous
3881 locations. The PA64 has examples of this for library calls. */
3882 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3883 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
3884 else if (reg != 0 && partial == 0)
3885 emit_move_insn (reg, val);
3886
3887 NO_DEFER_POP;
3888 }
3889
3890 /* Any regs containing parms remain in use through the call. */
3891 for (count = 0; count < nargs; count++)
3892 {
3893 rtx reg = argvec[count].reg;
3894 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3895 use_group_regs (&call_fusage, reg);
3896 else if (reg != 0)
3897 use_reg (&call_fusage, reg);
3898 }
3899
3900 /* Pass the function the address in which to return a structure value. */
3901 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3902 {
3903 emit_move_insn (struct_value,
3904 force_reg (Pmode,
3905 force_operand (XEXP (mem_value, 0),
3906 NULL_RTX)));
3907 if (REG_P (struct_value))
3908 use_reg (&call_fusage, struct_value);
3909 }
3910
3911 /* Don't allow popping to be deferred, since then
3912 cse'ing of library calls could delete a call and leave the pop. */
3913 NO_DEFER_POP;
3914 valreg = (mem_value == 0 && outmode != VOIDmode
3915 ? hard_libcall_value (outmode) : NULL_RTX);
3916
3917 /* Stack must be properly aligned now. */
3918 if (stack_pointer_delta & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))
3919 abort ();
3920
3921 before_call = get_last_insn ();
3922
3923 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3924 will set inhibit_defer_pop to that value. */
3925 /* The return type is needed to decide how many bytes the function pops.
3926 Signedness plays no role in that, so for simplicity, we pretend it's
3927 always signed. We also assume that the list of arguments passed has
3928 no impact, so we pretend it is unknown. */
3929
3930 emit_call_1 (fun, NULL,
3931 get_identifier (XSTR (orgfun, 0)),
3932 build_function_type (tfom, NULL_TREE),
3933 original_args_size.constant, args_size.constant,
3934 struct_value_size,
3935 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3936 valreg,
3937 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
3938
3939 /* For calls to `setjmp', etc., inform flow.c it should complain
3940 if nonvolatile values are live. For functions that cannot return,
3941 inform flow that control does not fall through. */
3942
3943 if (flags & (ECF_NORETURN | ECF_LONGJMP))
3944 {
3945 /* The barrier note must be emitted
3946 immediately after the CALL_INSN. Some ports emit more than
3947 just a CALL_INSN above, so we must search for it here. */
3948
3949 rtx last = get_last_insn ();
3950 while (GET_CODE (last) != CALL_INSN)
3951 {
3952 last = PREV_INSN (last);
3953 /* There was no CALL_INSN? */
3954 if (last == before_call)
3955 abort ();
3956 }
3957
3958 emit_barrier_after (last);
3959 }
3960
3961 /* Now restore inhibit_defer_pop to its actual original value. */
3962 OK_DEFER_POP;
3963
3964 /* If call is cse'able, make appropriate pair of reg-notes around it.
3965 Test valreg so we don't crash; may safely ignore `const'
3966 if return type is void. Disable for PARALLEL return values, because
3967 we have no way to move such values into a pseudo register. */
3968 if (flags & ECF_LIBCALL_BLOCK)
3969 {
3970 rtx insns;
3971
3972 if (valreg == 0)
3973 {
3974 insns = get_insns ();
3975 end_sequence ();
3976 emit_insn (insns);
3977 }
3978 else
3979 {
3980 rtx note = 0;
3981 rtx temp;
3982 int i;
3983
3984 if (GET_CODE (valreg) == PARALLEL)
3985 {
3986 temp = gen_reg_rtx (outmode);
3987 emit_group_store (temp, valreg, NULL_TREE,
3988 GET_MODE_SIZE (outmode));
3989 valreg = temp;
3990 }
3991
3992 temp = gen_reg_rtx (GET_MODE (valreg));
3993
3994 /* Construct an "equal form" for the value which mentions all the
3995 arguments in order as well as the function name. */
3996 for (i = 0; i < nargs; i++)
3997 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
3998 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
3999
4000 insns = get_insns ();
4001 end_sequence ();
4002
4003 if (flags & ECF_PURE)
4004 note = gen_rtx_EXPR_LIST (VOIDmode,
4005 gen_rtx_USE (VOIDmode,
4006 gen_rtx_MEM (BLKmode,
4007 gen_rtx_SCRATCH (VOIDmode))),
4008 note);
4009
4010 emit_libcall_block (insns, temp, valreg, note);
4011
4012 valreg = temp;
4013 }
4014 }
4015 pop_temp_slots ();
4016
4017 /* Copy the value to the right place. */
4018 if (outmode != VOIDmode && retval)
4019 {
4020 if (mem_value)
4021 {
4022 if (value == 0)
4023 value = mem_value;
4024 if (value != mem_value)
4025 emit_move_insn (value, mem_value);
4026 }
4027 else if (GET_CODE (valreg) == PARALLEL)
4028 {
4029 if (value == 0)
4030 value = gen_reg_rtx (outmode);
4031 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
4032 }
4033 else if (value != 0)
4034 emit_move_insn (value, valreg);
4035 else
4036 value = valreg;
4037 }
4038
4039 if (ACCUMULATE_OUTGOING_ARGS)
4040 {
4041 #ifdef REG_PARM_STACK_SPACE
4042 if (save_area)
4043 restore_fixed_argument_area (save_area, argblock,
4044 high_to_save, low_to_save);
4045 #endif
4046
4047 /* If we saved any argument areas, restore them. */
4048 for (count = 0; count < nargs; count++)
4049 if (argvec[count].save_area)
4050 {
4051 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
4052 rtx adr = plus_constant (argblock,
4053 argvec[count].locate.offset.constant);
4054 rtx stack_area = gen_rtx_MEM (save_mode,
4055 memory_address (save_mode, adr));
4056
4057 if (save_mode == BLKmode)
4058 emit_block_move (stack_area,
4059 validize_mem (argvec[count].save_area),
4060 GEN_INT (argvec[count].locate.size.constant),
4061 BLOCK_OP_CALL_PARM);
4062 else
4063 emit_move_insn (stack_area, argvec[count].save_area);
4064 }
4065
4066 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4067 stack_usage_map = initial_stack_usage_map;
4068 }
4069
4070 return value;
4071
4072 }
4073 \f
4074 /* Output a library call to function FUN (a SYMBOL_REF rtx)
4075 (emitting the queue unless NO_QUEUE is nonzero),
4076 for a value of mode OUTMODE,
4077 with NARGS different arguments, passed as alternating rtx values
4078 and machine_modes to convert them to.
4079 The rtx values should have been passed through protect_from_queue already.
4080
4081 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
4082 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
4083 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
4084 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
4085 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
4086 or other LCT_ value for other types of library calls. */
4087
4088 void
4089 emit_library_call (rtx orgfun, enum libcall_type fn_type,
4090 enum machine_mode outmode, int nargs, ...)
4091 {
4092 va_list p;
4093
4094 va_start (p, nargs);
4095 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4096 va_end (p);
4097 }
4098 \f
4099 /* Like emit_library_call except that an extra argument, VALUE,
4100 comes second and says where to store the result.
4101 (If VALUE is zero, this function chooses a convenient way
4102 to return the value.
4103
4104 This function returns an rtx for where the value is to be found.
4105 If VALUE is nonzero, VALUE is returned. */
4106
4107 rtx
4108 emit_library_call_value (rtx orgfun, rtx value,
4109 enum libcall_type fn_type,
4110 enum machine_mode outmode, int nargs, ...)
4111 {
4112 rtx result;
4113 va_list p;
4114
4115 va_start (p, nargs);
4116 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4117 nargs, p);
4118 va_end (p);
4119
4120 return result;
4121 }
4122 \f
4123 /* Store a single argument for a function call
4124 into the register or memory area where it must be passed.
4125 *ARG describes the argument value and where to pass it.
4126
4127 ARGBLOCK is the address of the stack-block for all the arguments,
4128 or 0 on a machine where arguments are pushed individually.
4129
4130 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4131 so must be careful about how the stack is used.
4132
4133 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4134 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4135 that we need not worry about saving and restoring the stack.
4136
4137 FNDECL is the declaration of the function we are calling.
4138
4139 Return nonzero if this arg should cause sibcall failure,
4140 zero otherwise. */
4141
4142 static int
4143 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4144 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
4145 {
4146 tree pval = arg->tree_value;
4147 rtx reg = 0;
4148 int partial = 0;
4149 int used = 0;
4150 int i, lower_bound = 0, upper_bound = 0;
4151 int sibcall_failure = 0;
4152
4153 if (TREE_CODE (pval) == ERROR_MARK)
4154 return 1;
4155
4156 /* Push a new temporary level for any temporaries we make for
4157 this argument. */
4158 push_temp_slots ();
4159
4160 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4161 {
4162 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4163 save any previous data at that location. */
4164 if (argblock && ! variable_size && arg->stack)
4165 {
4166 #ifdef ARGS_GROW_DOWNWARD
4167 /* stack_slot is negative, but we want to index stack_usage_map
4168 with positive values. */
4169 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4170 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4171 else
4172 upper_bound = 0;
4173
4174 lower_bound = upper_bound - arg->locate.size.constant;
4175 #else
4176 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4177 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4178 else
4179 lower_bound = 0;
4180
4181 upper_bound = lower_bound + arg->locate.size.constant;
4182 #endif
4183
4184 i = lower_bound;
4185 /* Don't worry about things in the fixed argument area;
4186 it has already been saved. */
4187 if (i < reg_parm_stack_space)
4188 i = reg_parm_stack_space;
4189 while (i < upper_bound && stack_usage_map[i] == 0)
4190 i++;
4191
4192 if (i < upper_bound)
4193 {
4194 /* We need to make a save area. */
4195 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4196 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4197 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4198 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4199
4200 if (save_mode == BLKmode)
4201 {
4202 tree ot = TREE_TYPE (arg->tree_value);
4203 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4204 | TYPE_QUAL_CONST));
4205
4206 arg->save_area = assign_temp (nt, 0, 1, 1);
4207 preserve_temp_slots (arg->save_area);
4208 emit_block_move (validize_mem (arg->save_area), stack_area,
4209 expr_size (arg->tree_value),
4210 BLOCK_OP_CALL_PARM);
4211 }
4212 else
4213 {
4214 arg->save_area = gen_reg_rtx (save_mode);
4215 emit_move_insn (arg->save_area, stack_area);
4216 }
4217 }
4218 }
4219 }
4220
4221 /* If this isn't going to be placed on both the stack and in registers,
4222 set up the register and number of words. */
4223 if (! arg->pass_on_stack)
4224 {
4225 if (flags & ECF_SIBCALL)
4226 reg = arg->tail_call_reg;
4227 else
4228 reg = arg->reg;
4229 partial = arg->partial;
4230 }
4231
4232 if (reg != 0 && partial == 0)
4233 /* Being passed entirely in a register. We shouldn't be called in
4234 this case. */
4235 abort ();
4236
4237 /* If this arg needs special alignment, don't load the registers
4238 here. */
4239 if (arg->n_aligned_regs != 0)
4240 reg = 0;
4241
4242 /* If this is being passed partially in a register, we can't evaluate
4243 it directly into its stack slot. Otherwise, we can. */
4244 if (arg->value == 0)
4245 {
4246 /* stack_arg_under_construction is nonzero if a function argument is
4247 being evaluated directly into the outgoing argument list and
4248 expand_call must take special action to preserve the argument list
4249 if it is called recursively.
4250
4251 For scalar function arguments stack_usage_map is sufficient to
4252 determine which stack slots must be saved and restored. Scalar
4253 arguments in general have pass_on_stack == 0.
4254
4255 If this argument is initialized by a function which takes the
4256 address of the argument (a C++ constructor or a C function
4257 returning a BLKmode structure), then stack_usage_map is
4258 insufficient and expand_call must push the stack around the
4259 function call. Such arguments have pass_on_stack == 1.
4260
4261 Note that it is always safe to set stack_arg_under_construction,
4262 but this generates suboptimal code if set when not needed. */
4263
4264 if (arg->pass_on_stack)
4265 stack_arg_under_construction++;
4266
4267 arg->value = expand_expr (pval,
4268 (partial
4269 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4270 ? NULL_RTX : arg->stack,
4271 VOIDmode, EXPAND_STACK_PARM);
4272
4273 /* If we are promoting object (or for any other reason) the mode
4274 doesn't agree, convert the mode. */
4275
4276 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4277 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4278 arg->value, arg->unsignedp);
4279
4280 if (arg->pass_on_stack)
4281 stack_arg_under_construction--;
4282 }
4283
4284 /* Don't allow anything left on stack from computation
4285 of argument to alloca. */
4286 if (flags & ECF_MAY_BE_ALLOCA)
4287 do_pending_stack_adjust ();
4288
4289 if (arg->value == arg->stack)
4290 /* If the value is already in the stack slot, we are done. */
4291 ;
4292 else if (arg->mode != BLKmode)
4293 {
4294 int size;
4295
4296 /* Argument is a scalar, not entirely passed in registers.
4297 (If part is passed in registers, arg->partial says how much
4298 and emit_push_insn will take care of putting it there.)
4299
4300 Push it, and if its size is less than the
4301 amount of space allocated to it,
4302 also bump stack pointer by the additional space.
4303 Note that in C the default argument promotions
4304 will prevent such mismatches. */
4305
4306 size = GET_MODE_SIZE (arg->mode);
4307 /* Compute how much space the push instruction will push.
4308 On many machines, pushing a byte will advance the stack
4309 pointer by a halfword. */
4310 #ifdef PUSH_ROUNDING
4311 size = PUSH_ROUNDING (size);
4312 #endif
4313 used = size;
4314
4315 /* Compute how much space the argument should get:
4316 round up to a multiple of the alignment for arguments. */
4317 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4318 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4319 / (PARM_BOUNDARY / BITS_PER_UNIT))
4320 * (PARM_BOUNDARY / BITS_PER_UNIT));
4321
4322 /* This isn't already where we want it on the stack, so put it there.
4323 This can either be done with push or copy insns. */
4324 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4325 PARM_BOUNDARY, partial, reg, used - size, argblock,
4326 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4327 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4328
4329 /* Unless this is a partially-in-register argument, the argument is now
4330 in the stack. */
4331 if (partial == 0)
4332 arg->value = arg->stack;
4333 }
4334 else
4335 {
4336 /* BLKmode, at least partly to be pushed. */
4337
4338 unsigned int parm_align;
4339 int excess;
4340 rtx size_rtx;
4341
4342 /* Pushing a nonscalar.
4343 If part is passed in registers, PARTIAL says how much
4344 and emit_push_insn will take care of putting it there. */
4345
4346 /* Round its size up to a multiple
4347 of the allocation unit for arguments. */
4348
4349 if (arg->locate.size.var != 0)
4350 {
4351 excess = 0;
4352 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4353 }
4354 else
4355 {
4356 /* PUSH_ROUNDING has no effect on us, because
4357 emit_push_insn for BLKmode is careful to avoid it. */
4358 if (reg && GET_CODE (reg) == PARALLEL)
4359 {
4360 /* Use the size of the elt to compute excess. */
4361 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
4362 excess = (arg->locate.size.constant
4363 - int_size_in_bytes (TREE_TYPE (pval))
4364 + partial * GET_MODE_SIZE (GET_MODE (elt)));
4365 }
4366 else
4367 excess = (arg->locate.size.constant
4368 - int_size_in_bytes (TREE_TYPE (pval))
4369 + partial * UNITS_PER_WORD);
4370 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4371 NULL_RTX, TYPE_MODE (sizetype), 0);
4372 }
4373
4374 /* Some types will require stricter alignment, which will be
4375 provided for elsewhere in argument layout. */
4376 parm_align = MAX (PARM_BOUNDARY, TYPE_ALIGN (TREE_TYPE (pval)));
4377
4378 /* When an argument is padded down, the block is aligned to
4379 PARM_BOUNDARY, but the actual argument isn't. */
4380 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4381 {
4382 if (arg->locate.size.var)
4383 parm_align = BITS_PER_UNIT;
4384 else if (excess)
4385 {
4386 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4387 parm_align = MIN (parm_align, excess_align);
4388 }
4389 }
4390
4391 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4392 {
4393 /* emit_push_insn might not work properly if arg->value and
4394 argblock + arg->locate.offset areas overlap. */
4395 rtx x = arg->value;
4396 int i = 0;
4397
4398 if (XEXP (x, 0) == current_function_internal_arg_pointer
4399 || (GET_CODE (XEXP (x, 0)) == PLUS
4400 && XEXP (XEXP (x, 0), 0) ==
4401 current_function_internal_arg_pointer
4402 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4403 {
4404 if (XEXP (x, 0) != current_function_internal_arg_pointer)
4405 i = INTVAL (XEXP (XEXP (x, 0), 1));
4406
4407 /* expand_call should ensure this. */
4408 if (arg->locate.offset.var || GET_CODE (size_rtx) != CONST_INT)
4409 abort ();
4410
4411 if (arg->locate.offset.constant > i)
4412 {
4413 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4414 sibcall_failure = 1;
4415 }
4416 else if (arg->locate.offset.constant < i)
4417 {
4418 if (i < arg->locate.offset.constant + INTVAL (size_rtx))
4419 sibcall_failure = 1;
4420 }
4421 }
4422 }
4423
4424 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4425 parm_align, partial, reg, excess, argblock,
4426 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4427 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4428
4429 /* Unless this is a partially-in-register argument, the argument is now
4430 in the stack.
4431
4432 ??? Unlike the case above, in which we want the actual
4433 address of the data, so that we can load it directly into a
4434 register, here we want the address of the stack slot, so that
4435 it's properly aligned for word-by-word copying or something
4436 like that. It's not clear that this is always correct. */
4437 if (partial == 0)
4438 arg->value = arg->stack_slot;
4439 }
4440
4441 /* Mark all slots this store used. */
4442 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4443 && argblock && ! variable_size && arg->stack)
4444 for (i = lower_bound; i < upper_bound; i++)
4445 stack_usage_map[i] = 1;
4446
4447 /* Once we have pushed something, pops can't safely
4448 be deferred during the rest of the arguments. */
4449 NO_DEFER_POP;
4450
4451 /* ANSI doesn't require a sequence point here,
4452 but PCC has one, so this will avoid some problems. */
4453 emit_queue ();
4454
4455 /* Free any temporary slots made in processing this argument. Show
4456 that we might have taken the address of something and pushed that
4457 as an operand. */
4458 preserve_temp_slots (NULL_RTX);
4459 free_temp_slots ();
4460 pop_temp_slots ();
4461
4462 return sibcall_failure;
4463 }
4464
4465 /* Nonzero if we do not know how to pass TYPE solely in registers.
4466 We cannot do so in the following cases:
4467
4468 - if the type has variable size
4469 - if the type is marked as addressable (it is required to be constructed
4470 into the stack)
4471 - if the padding and mode of the type is such that a copy into a register
4472 would put it into the wrong part of the register.
4473
4474 Which padding can't be supported depends on the byte endianness.
4475
4476 A value in a register is implicitly padded at the most significant end.
4477 On a big-endian machine, that is the lower end in memory.
4478 So a value padded in memory at the upper end can't go in a register.
4479 For a little-endian machine, the reverse is true. */
4480
4481 bool
4482 default_must_pass_in_stack (enum machine_mode mode, tree type)
4483 {
4484 if (!type)
4485 return false;
4486
4487 /* If the type has variable size... */
4488 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4489 return true;
4490
4491 /* If the type is marked as addressable (it is required
4492 to be constructed into the stack)... */
4493 if (TREE_ADDRESSABLE (type))
4494 return true;
4495
4496 /* If the padding and mode of the type is such that a copy into
4497 a register would put it into the wrong part of the register. */
4498 if (mode == BLKmode
4499 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4500 && (FUNCTION_ARG_PADDING (mode, type)
4501 == (BYTES_BIG_ENDIAN ? upward : downward)))
4502 return true;
4503
4504 return false;
4505 }