fix whitespace
[gcc.git] / gcc / calls.c
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "expr.h"
28 #include "libfuncs.h"
29 #include "function.h"
30 #include "regs.h"
31 #include "toplev.h"
32 #include "output.h"
33 #include "tm_p.h"
34 #include "timevar.h"
35 #include "sbitmap.h"
36 #include "langhooks.h"
37
38 #if !defined FUNCTION_OK_FOR_SIBCALL
39 #define FUNCTION_OK_FOR_SIBCALL(DECL) 1
40 #endif
41
42 /* Decide whether a function's arguments should be processed
43 from first to last or from last to first.
44
45 They should if the stack and args grow in opposite directions, but
46 only if we have push insns. */
47
48 #ifdef PUSH_ROUNDING
49
50 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
51 #define PUSH_ARGS_REVERSED PUSH_ARGS
52 #endif
53
54 #endif
55
56 #ifndef PUSH_ARGS_REVERSED
57 #define PUSH_ARGS_REVERSED 0
58 #endif
59
60 #ifndef STACK_POINTER_OFFSET
61 #define STACK_POINTER_OFFSET 0
62 #endif
63
64 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
65 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
66
67 /* Data structure and subroutines used within expand_call. */
68
69 struct arg_data
70 {
71 /* Tree node for this argument. */
72 tree tree_value;
73 /* Mode for value; TYPE_MODE unless promoted. */
74 enum machine_mode mode;
75 /* Current RTL value for argument, or 0 if it isn't precomputed. */
76 rtx value;
77 /* Initially-compute RTL value for argument; only for const functions. */
78 rtx initial_value;
79 /* Register to pass this argument in, 0 if passed on stack, or an
80 PARALLEL if the arg is to be copied into multiple non-contiguous
81 registers. */
82 rtx reg;
83 /* Register to pass this argument in when generating tail call sequence.
84 This is not the same register as for normal calls on machines with
85 register windows. */
86 rtx tail_call_reg;
87 /* If REG was promoted from the actual mode of the argument expression,
88 indicates whether the promotion is sign- or zero-extended. */
89 int unsignedp;
90 /* Number of registers to use. 0 means put the whole arg in registers.
91 Also 0 if not passed in registers. */
92 int partial;
93 /* Non-zero if argument must be passed on stack.
94 Note that some arguments may be passed on the stack
95 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
96 pass_on_stack identifies arguments that *cannot* go in registers. */
97 int pass_on_stack;
98 /* Offset of this argument from beginning of stack-args. */
99 struct args_size offset;
100 /* Similar, but offset to the start of the stack slot. Different from
101 OFFSET if this arg pads downward. */
102 struct args_size slot_offset;
103 /* Size of this argument on the stack, rounded up for any padding it gets,
104 parts of the argument passed in registers do not count.
105 If REG_PARM_STACK_SPACE is defined, then register parms
106 are counted here as well. */
107 struct args_size size;
108 /* Location on the stack at which parameter should be stored. The store
109 has already been done if STACK == VALUE. */
110 rtx stack;
111 /* Location on the stack of the start of this argument slot. This can
112 differ from STACK if this arg pads downward. This location is known
113 to be aligned to FUNCTION_ARG_BOUNDARY. */
114 rtx stack_slot;
115 /* Place that this stack area has been saved, if needed. */
116 rtx save_area;
117 /* If an argument's alignment does not permit direct copying into registers,
118 copy in smaller-sized pieces into pseudos. These are stored in a
119 block pointed to by this field. The next field says how many
120 word-sized pseudos we made. */
121 rtx *aligned_regs;
122 int n_aligned_regs;
123 /* The amount that the stack pointer needs to be adjusted to
124 force alignment for the next argument. */
125 struct args_size alignment_pad;
126 };
127
128 /* A vector of one char per byte of stack space. A byte if non-zero if
129 the corresponding stack location has been used.
130 This vector is used to prevent a function call within an argument from
131 clobbering any stack already set up. */
132 static char *stack_usage_map;
133
134 /* Size of STACK_USAGE_MAP. */
135 static int highest_outgoing_arg_in_use;
136
137 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
138 stack location's tail call argument has been already stored into the stack.
139 This bitmap is used to prevent sibling call optimization if function tries
140 to use parent's incoming argument slots when they have been already
141 overwritten with tail call arguments. */
142 static sbitmap stored_args_map;
143
144 /* stack_arg_under_construction is nonzero when an argument may be
145 initialized with a constructor call (including a C function that
146 returns a BLKmode struct) and expand_call must take special action
147 to make sure the object being constructed does not overlap the
148 argument list for the constructor call. */
149 int stack_arg_under_construction;
150
151 static int calls_function PARAMS ((tree, int));
152 static int calls_function_1 PARAMS ((tree, int));
153
154 /* Nonzero if this is a call to a `const' function. */
155 #define ECF_CONST 1
156 /* Nonzero if this is a call to a `volatile' function. */
157 #define ECF_NORETURN 2
158 /* Nonzero if this is a call to malloc or a related function. */
159 #define ECF_MALLOC 4
160 /* Nonzero if it is plausible that this is a call to alloca. */
161 #define ECF_MAY_BE_ALLOCA 8
162 /* Nonzero if this is a call to a function that won't throw an exception. */
163 #define ECF_NOTHROW 16
164 /* Nonzero if this is a call to setjmp or a related function. */
165 #define ECF_RETURNS_TWICE 32
166 /* Nonzero if this is a call to `longjmp'. */
167 #define ECF_LONGJMP 64
168 /* Nonzero if this is a syscall that makes a new process in the image of
169 the current one. */
170 #define ECF_FORK_OR_EXEC 128
171 #define ECF_SIBCALL 256
172 /* Nonzero if this is a call to "pure" function (like const function,
173 but may read memory. */
174 #define ECF_PURE 512
175 /* Nonzero if this is a call to a function that returns with the stack
176 pointer depressed. */
177 #define ECF_SP_DEPRESSED 1024
178 /* Nonzero if this call is known to always return. */
179 #define ECF_ALWAYS_RETURN 2048
180 /* Create libcall block around the call. */
181 #define ECF_LIBCALL_BLOCK 4096
182
183 static void emit_call_1 PARAMS ((rtx, tree, tree, HOST_WIDE_INT,
184 HOST_WIDE_INT, HOST_WIDE_INT, rtx,
185 rtx, int, rtx, int,
186 CUMULATIVE_ARGS *));
187 static void precompute_register_parameters PARAMS ((int,
188 struct arg_data *,
189 int *));
190 static int store_one_arg PARAMS ((struct arg_data *, rtx, int, int,
191 int));
192 static void store_unaligned_arguments_into_pseudos PARAMS ((struct arg_data *,
193 int));
194 static int finalize_must_preallocate PARAMS ((int, int,
195 struct arg_data *,
196 struct args_size *));
197 static void precompute_arguments PARAMS ((int, int,
198 struct arg_data *));
199 static int compute_argument_block_size PARAMS ((int,
200 struct args_size *,
201 int));
202 static void initialize_argument_information PARAMS ((int,
203 struct arg_data *,
204 struct args_size *,
205 int, tree, tree,
206 CUMULATIVE_ARGS *,
207 int, rtx *, int *,
208 int *, int *));
209 static void compute_argument_addresses PARAMS ((struct arg_data *,
210 rtx, int));
211 static rtx rtx_for_function_call PARAMS ((tree, tree));
212 static void load_register_parameters PARAMS ((struct arg_data *,
213 int, rtx *, int));
214 static rtx emit_library_call_value_1 PARAMS ((int, rtx, rtx,
215 enum libcall_type,
216 enum machine_mode,
217 int, va_list));
218 static int special_function_p PARAMS ((tree, int));
219 static int flags_from_decl_or_type PARAMS ((tree));
220 static rtx try_to_integrate PARAMS ((tree, tree, rtx,
221 int, tree, rtx));
222 static int check_sibcall_argument_overlap_1 PARAMS ((rtx));
223 static int check_sibcall_argument_overlap PARAMS ((rtx, struct arg_data *));
224
225 static int combine_pending_stack_adjustment_and_call
226 PARAMS ((int, struct args_size *, int));
227
228 #ifdef REG_PARM_STACK_SPACE
229 static rtx save_fixed_argument_area PARAMS ((int, rtx, int *, int *));
230 static void restore_fixed_argument_area PARAMS ((rtx, rtx, int, int));
231 #endif
232 \f
233 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
234 `alloca'.
235
236 If WHICH is 0, return 1 if EXP contains a call to any function.
237 Actually, we only need return 1 if evaluating EXP would require pushing
238 arguments on the stack, but that is too difficult to compute, so we just
239 assume any function call might require the stack. */
240
241 static tree calls_function_save_exprs;
242
243 static int
244 calls_function (exp, which)
245 tree exp;
246 int which;
247 {
248 int val;
249
250 calls_function_save_exprs = 0;
251 val = calls_function_1 (exp, which);
252 calls_function_save_exprs = 0;
253 return val;
254 }
255
256 /* Recursive function to do the work of above function. */
257
258 static int
259 calls_function_1 (exp, which)
260 tree exp;
261 int which;
262 {
263 int i;
264 enum tree_code code = TREE_CODE (exp);
265 int class = TREE_CODE_CLASS (code);
266 int length = first_rtl_op (code);
267
268 /* If this code is language-specific, we don't know what it will do. */
269 if ((int) code >= NUM_TREE_CODES)
270 return 1;
271
272 switch (code)
273 {
274 case CALL_EXPR:
275 if (which == 0)
276 return 1;
277 else if ((TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
278 == FUNCTION_TYPE)
279 && (TYPE_RETURNS_STACK_DEPRESSED
280 (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
281 return 1;
282 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
283 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
284 == FUNCTION_DECL)
285 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
286 0)
287 & ECF_MAY_BE_ALLOCA))
288 return 1;
289
290 break;
291
292 case CONSTRUCTOR:
293 {
294 tree tem;
295
296 for (tem = CONSTRUCTOR_ELTS (exp); tem != 0; tem = TREE_CHAIN (tem))
297 if (calls_function_1 (TREE_VALUE (tem), which))
298 return 1;
299 }
300
301 return 0;
302
303 case SAVE_EXPR:
304 if (SAVE_EXPR_RTL (exp) != 0)
305 return 0;
306 if (value_member (exp, calls_function_save_exprs))
307 return 0;
308 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
309 calls_function_save_exprs);
310 return (TREE_OPERAND (exp, 0) != 0
311 && calls_function_1 (TREE_OPERAND (exp, 0), which));
312
313 case BLOCK:
314 {
315 tree local;
316 tree subblock;
317
318 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
319 if (DECL_INITIAL (local) != 0
320 && calls_function_1 (DECL_INITIAL (local), which))
321 return 1;
322
323 for (subblock = BLOCK_SUBBLOCKS (exp);
324 subblock;
325 subblock = TREE_CHAIN (subblock))
326 if (calls_function_1 (subblock, which))
327 return 1;
328 }
329 return 0;
330
331 case TREE_LIST:
332 for (; exp != 0; exp = TREE_CHAIN (exp))
333 if (calls_function_1 (TREE_VALUE (exp), which))
334 return 1;
335 return 0;
336
337 default:
338 break;
339 }
340
341 /* Only expressions, references, and blocks can contain calls. */
342 if (! IS_EXPR_CODE_CLASS (class) && class != 'r' && class != 'b')
343 return 0;
344
345 for (i = 0; i < length; i++)
346 if (TREE_OPERAND (exp, i) != 0
347 && calls_function_1 (TREE_OPERAND (exp, i), which))
348 return 1;
349
350 return 0;
351 }
352 \f
353 /* Force FUNEXP into a form suitable for the address of a CALL,
354 and return that as an rtx. Also load the static chain register
355 if FNDECL is a nested function.
356
357 CALL_FUSAGE points to a variable holding the prospective
358 CALL_INSN_FUNCTION_USAGE information. */
359
360 rtx
361 prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen, sibcallp)
362 rtx funexp;
363 tree fndecl;
364 rtx *call_fusage;
365 int reg_parm_seen;
366 int sibcallp;
367 {
368 rtx static_chain_value = 0;
369
370 funexp = protect_from_queue (funexp, 0);
371
372 if (fndecl != 0)
373 /* Get possible static chain value for nested function in C. */
374 static_chain_value = lookup_static_chain (fndecl);
375
376 /* Make a valid memory address and copy constants thru pseudo-regs,
377 but not for a constant address if -fno-function-cse. */
378 if (GET_CODE (funexp) != SYMBOL_REF)
379 /* If we are using registers for parameters, force the
380 function address into a register now. */
381 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
382 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
383 : memory_address (FUNCTION_MODE, funexp));
384 else if (! sibcallp)
385 {
386 #ifndef NO_FUNCTION_CSE
387 if (optimize && ! flag_no_function_cse)
388 #ifdef NO_RECURSIVE_FUNCTION_CSE
389 if (fndecl != current_function_decl)
390 #endif
391 funexp = force_reg (Pmode, funexp);
392 #endif
393 }
394
395 if (static_chain_value != 0)
396 {
397 emit_move_insn (static_chain_rtx, static_chain_value);
398
399 if (GET_CODE (static_chain_rtx) == REG)
400 use_reg (call_fusage, static_chain_rtx);
401 }
402
403 return funexp;
404 }
405
406 /* Generate instructions to call function FUNEXP,
407 and optionally pop the results.
408 The CALL_INSN is the first insn generated.
409
410 FNDECL is the declaration node of the function. This is given to the
411 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
412
413 FUNTYPE is the data type of the function. This is given to the macro
414 RETURN_POPS_ARGS to determine whether this function pops its own args.
415 We used to allow an identifier for library functions, but that doesn't
416 work when the return type is an aggregate type and the calling convention
417 says that the pointer to this aggregate is to be popped by the callee.
418
419 STACK_SIZE is the number of bytes of arguments on the stack,
420 ROUNDED_STACK_SIZE is that number rounded up to
421 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
422 both to put into the call insn and to generate explicit popping
423 code if necessary.
424
425 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
426 It is zero if this call doesn't want a structure value.
427
428 NEXT_ARG_REG is the rtx that results from executing
429 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
430 just after all the args have had their registers assigned.
431 This could be whatever you like, but normally it is the first
432 arg-register beyond those used for args in this call,
433 or 0 if all the arg-registers are used in this call.
434 It is passed on to `gen_call' so you can put this info in the call insn.
435
436 VALREG is a hard register in which a value is returned,
437 or 0 if the call does not return a value.
438
439 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
440 the args to this call were processed.
441 We restore `inhibit_defer_pop' to that value.
442
443 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
444 denote registers used by the called function. */
445
446 static void
447 emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
448 struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop,
449 call_fusage, ecf_flags, args_so_far)
450 rtx funexp;
451 tree fndecl ATTRIBUTE_UNUSED;
452 tree funtype ATTRIBUTE_UNUSED;
453 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED;
454 HOST_WIDE_INT rounded_stack_size;
455 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED;
456 rtx next_arg_reg ATTRIBUTE_UNUSED;
457 rtx valreg;
458 int old_inhibit_defer_pop;
459 rtx call_fusage;
460 int ecf_flags;
461 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED;
462 {
463 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
464 rtx call_insn;
465 int already_popped = 0;
466 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
467 #if defined (HAVE_call) && defined (HAVE_call_value)
468 rtx struct_value_size_rtx;
469 struct_value_size_rtx = GEN_INT (struct_value_size);
470 #endif
471
472 #ifdef CALL_POPS_ARGS
473 n_popped += CALL_POPS_ARGS (* args_so_far);
474 #endif
475
476 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
477 and we don't want to load it into a register as an optimization,
478 because prepare_call_address already did it if it should be done. */
479 if (GET_CODE (funexp) != SYMBOL_REF)
480 funexp = memory_address (FUNCTION_MODE, funexp);
481
482 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
483 if ((ecf_flags & ECF_SIBCALL)
484 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
485 && (n_popped > 0 || stack_size == 0))
486 {
487 rtx n_pop = GEN_INT (n_popped);
488 rtx pat;
489
490 /* If this subroutine pops its own args, record that in the call insn
491 if possible, for the sake of frame pointer elimination. */
492
493 if (valreg)
494 pat = GEN_SIBCALL_VALUE_POP (valreg,
495 gen_rtx_MEM (FUNCTION_MODE, funexp),
496 rounded_stack_size_rtx, next_arg_reg,
497 n_pop);
498 else
499 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
500 rounded_stack_size_rtx, next_arg_reg, n_pop);
501
502 emit_call_insn (pat);
503 already_popped = 1;
504 }
505 else
506 #endif
507
508 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
509 /* If the target has "call" or "call_value" insns, then prefer them
510 if no arguments are actually popped. If the target does not have
511 "call" or "call_value" insns, then we must use the popping versions
512 even if the call has no arguments to pop. */
513 #if defined (HAVE_call) && defined (HAVE_call_value)
514 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
515 && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
516 #else
517 if (HAVE_call_pop && HAVE_call_value_pop)
518 #endif
519 {
520 rtx n_pop = GEN_INT (n_popped);
521 rtx pat;
522
523 /* If this subroutine pops its own args, record that in the call insn
524 if possible, for the sake of frame pointer elimination. */
525
526 if (valreg)
527 pat = GEN_CALL_VALUE_POP (valreg,
528 gen_rtx_MEM (FUNCTION_MODE, funexp),
529 rounded_stack_size_rtx, next_arg_reg, n_pop);
530 else
531 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
532 rounded_stack_size_rtx, next_arg_reg, n_pop);
533
534 emit_call_insn (pat);
535 already_popped = 1;
536 }
537 else
538 #endif
539
540 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
541 if ((ecf_flags & ECF_SIBCALL)
542 && HAVE_sibcall && HAVE_sibcall_value)
543 {
544 if (valreg)
545 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
546 gen_rtx_MEM (FUNCTION_MODE, funexp),
547 rounded_stack_size_rtx,
548 next_arg_reg, NULL_RTX));
549 else
550 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
551 rounded_stack_size_rtx, next_arg_reg,
552 struct_value_size_rtx));
553 }
554 else
555 #endif
556
557 #if defined (HAVE_call) && defined (HAVE_call_value)
558 if (HAVE_call && HAVE_call_value)
559 {
560 if (valreg)
561 emit_call_insn (GEN_CALL_VALUE (valreg,
562 gen_rtx_MEM (FUNCTION_MODE, funexp),
563 rounded_stack_size_rtx, next_arg_reg,
564 NULL_RTX));
565 else
566 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
567 rounded_stack_size_rtx, next_arg_reg,
568 struct_value_size_rtx));
569 }
570 else
571 #endif
572 abort ();
573
574 /* Find the CALL insn we just emitted. */
575 for (call_insn = get_last_insn ();
576 call_insn && GET_CODE (call_insn) != CALL_INSN;
577 call_insn = PREV_INSN (call_insn))
578 ;
579
580 if (! call_insn)
581 abort ();
582
583 /* Mark memory as used for "pure" function call. */
584 if (ecf_flags & ECF_PURE)
585 call_fusage
586 = gen_rtx_EXPR_LIST
587 (VOIDmode,
588 gen_rtx_USE (VOIDmode,
589 gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
590 call_fusage);
591
592 /* Put the register usage information on the CALL. If there is already
593 some usage information, put ours at the end. */
594 if (CALL_INSN_FUNCTION_USAGE (call_insn))
595 {
596 rtx link;
597
598 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
599 link = XEXP (link, 1))
600 ;
601
602 XEXP (link, 1) = call_fusage;
603 }
604 else
605 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
606
607 /* If this is a const call, then set the insn's unchanging bit. */
608 if (ecf_flags & (ECF_CONST | ECF_PURE))
609 CONST_OR_PURE_CALL_P (call_insn) = 1;
610
611 /* If this call can't throw, attach a REG_EH_REGION reg note to that
612 effect. */
613 if (ecf_flags & ECF_NOTHROW)
614 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
615 REG_NOTES (call_insn));
616
617 if (ecf_flags & ECF_NORETURN)
618 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
619 REG_NOTES (call_insn));
620 if (ecf_flags & ECF_ALWAYS_RETURN)
621 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_ALWAYS_RETURN, const0_rtx,
622 REG_NOTES (call_insn));
623
624 if (ecf_flags & ECF_RETURNS_TWICE)
625 {
626 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
627 REG_NOTES (call_insn));
628 current_function_calls_setjmp = 1;
629 }
630
631 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
632
633 /* Restore this now, so that we do defer pops for this call's args
634 if the context of the call as a whole permits. */
635 inhibit_defer_pop = old_inhibit_defer_pop;
636
637 if (n_popped > 0)
638 {
639 if (!already_popped)
640 CALL_INSN_FUNCTION_USAGE (call_insn)
641 = gen_rtx_EXPR_LIST (VOIDmode,
642 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
643 CALL_INSN_FUNCTION_USAGE (call_insn));
644 rounded_stack_size -= n_popped;
645 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
646 stack_pointer_delta -= n_popped;
647 }
648
649 if (!ACCUMULATE_OUTGOING_ARGS)
650 {
651 /* If returning from the subroutine does not automatically pop the args,
652 we need an instruction to pop them sooner or later.
653 Perhaps do it now; perhaps just record how much space to pop later.
654
655 If returning from the subroutine does pop the args, indicate that the
656 stack pointer will be changed. */
657
658 if (rounded_stack_size != 0)
659 {
660 if (ecf_flags & ECF_SP_DEPRESSED)
661 /* Just pretend we did the pop. */
662 stack_pointer_delta -= rounded_stack_size;
663 else if (flag_defer_pop && inhibit_defer_pop == 0
664 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
665 pending_stack_adjust += rounded_stack_size;
666 else
667 adjust_stack (rounded_stack_size_rtx);
668 }
669 }
670 /* When we accumulate outgoing args, we must avoid any stack manipulations.
671 Restore the stack pointer to its original value now. Usually
672 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
673 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
674 popping variants of functions exist as well.
675
676 ??? We may optimize similar to defer_pop above, but it is
677 probably not worthwhile.
678
679 ??? It will be worthwhile to enable combine_stack_adjustments even for
680 such machines. */
681 else if (n_popped)
682 anti_adjust_stack (GEN_INT (n_popped));
683 }
684
685 /* Determine if the function identified by NAME and FNDECL is one with
686 special properties we wish to know about.
687
688 For example, if the function might return more than one time (setjmp), then
689 set RETURNS_TWICE to a nonzero value.
690
691 Similarly set LONGJMP for if the function is in the longjmp family.
692
693 Set MALLOC for any of the standard memory allocation functions which
694 allocate from the heap.
695
696 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
697 space from the stack such as alloca. */
698
699 static int
700 special_function_p (fndecl, flags)
701 tree fndecl;
702 int flags;
703 {
704 if (! (flags & ECF_MALLOC)
705 && fndecl && DECL_NAME (fndecl)
706 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
707 /* Exclude functions not at the file scope, or not `extern',
708 since they are not the magic functions we would otherwise
709 think they are. */
710 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
711 {
712 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
713 const char *tname = name;
714
715 /* We assume that alloca will always be called by name. It
716 makes no sense to pass it as a pointer-to-function to
717 anything that does not understand its behavior. */
718 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
719 && name[0] == 'a'
720 && ! strcmp (name, "alloca"))
721 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
722 && name[0] == '_'
723 && ! strcmp (name, "__builtin_alloca"))))
724 flags |= ECF_MAY_BE_ALLOCA;
725
726 /* Disregard prefix _, __ or __x. */
727 if (name[0] == '_')
728 {
729 if (name[1] == '_' && name[2] == 'x')
730 tname += 3;
731 else if (name[1] == '_')
732 tname += 2;
733 else
734 tname += 1;
735 }
736
737 if (tname[0] == 's')
738 {
739 if ((tname[1] == 'e'
740 && (! strcmp (tname, "setjmp")
741 || ! strcmp (tname, "setjmp_syscall")))
742 || (tname[1] == 'i'
743 && ! strcmp (tname, "sigsetjmp"))
744 || (tname[1] == 'a'
745 && ! strcmp (tname, "savectx")))
746 flags |= ECF_RETURNS_TWICE;
747
748 if (tname[1] == 'i'
749 && ! strcmp (tname, "siglongjmp"))
750 flags |= ECF_LONGJMP;
751 }
752 else if ((tname[0] == 'q' && tname[1] == 's'
753 && ! strcmp (tname, "qsetjmp"))
754 || (tname[0] == 'v' && tname[1] == 'f'
755 && ! strcmp (tname, "vfork")))
756 flags |= ECF_RETURNS_TWICE;
757
758 else if (tname[0] == 'l' && tname[1] == 'o'
759 && ! strcmp (tname, "longjmp"))
760 flags |= ECF_LONGJMP;
761
762 else if ((tname[0] == 'f' && tname[1] == 'o'
763 && ! strcmp (tname, "fork"))
764 /* Linux specific: __clone. check NAME to insist on the
765 leading underscores, to avoid polluting the ISO / POSIX
766 namespace. */
767 || (name[0] == '_' && name[1] == '_'
768 && ! strcmp (tname, "clone"))
769 || (tname[0] == 'e' && tname[1] == 'x' && tname[2] == 'e'
770 && tname[3] == 'c' && (tname[4] == 'l' || tname[4] == 'v')
771 && (tname[5] == '\0'
772 || ((tname[5] == 'p' || tname[5] == 'e')
773 && tname[6] == '\0'))))
774 flags |= ECF_FORK_OR_EXEC;
775
776 /* Do not add any more malloc-like functions to this list,
777 instead mark them as malloc functions using the malloc attribute.
778 Note, realloc is not suitable for attribute malloc since
779 it may return the same address across multiple calls.
780 C++ operator new is not suitable because it is not required
781 to return a unique pointer; indeed, the standard placement new
782 just returns its argument. */
783 else if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == Pmode
784 && (! strcmp (tname, "malloc")
785 || ! strcmp (tname, "calloc")
786 || ! strcmp (tname, "strdup")))
787 flags |= ECF_MALLOC;
788 }
789 return flags;
790 }
791
792 /* Return nonzero when tree represent call to longjmp. */
793
794 int
795 setjmp_call_p (fndecl)
796 tree fndecl;
797 {
798 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
799 }
800
801 /* Detect flags (function attributes) from the function decl or type node. */
802
803 static int
804 flags_from_decl_or_type (exp)
805 tree exp;
806 {
807 int flags = 0;
808 tree type = exp;
809 /* ??? We can't set IS_MALLOC for function types? */
810 if (DECL_P (exp))
811 {
812 type = TREE_TYPE (exp);
813
814 /* The function exp may have the `malloc' attribute. */
815 if (DECL_P (exp) && DECL_IS_MALLOC (exp))
816 flags |= ECF_MALLOC;
817
818 /* The function exp may have the `pure' attribute. */
819 if (DECL_P (exp) && DECL_IS_PURE (exp))
820 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
821
822 if (TREE_NOTHROW (exp))
823 flags |= ECF_NOTHROW;
824 }
825
826 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
827 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
828
829 if (TREE_THIS_VOLATILE (exp))
830 flags |= ECF_NORETURN;
831
832 /* Mark if the function returns with the stack pointer depressed. We
833 cannot consider it pure or constant in that case. */
834 if (TREE_CODE (type) == FUNCTION_TYPE && TYPE_RETURNS_STACK_DEPRESSED (type))
835 {
836 flags |= ECF_SP_DEPRESSED;
837 flags &= ~(ECF_PURE | ECF_CONST | ECF_LIBCALL_BLOCK);
838 }
839
840 return flags;
841 }
842
843 /* Precompute all register parameters as described by ARGS, storing values
844 into fields within the ARGS array.
845
846 NUM_ACTUALS indicates the total number elements in the ARGS array.
847
848 Set REG_PARM_SEEN if we encounter a register parameter. */
849
850 static void
851 precompute_register_parameters (num_actuals, args, reg_parm_seen)
852 int num_actuals;
853 struct arg_data *args;
854 int *reg_parm_seen;
855 {
856 int i;
857
858 *reg_parm_seen = 0;
859
860 for (i = 0; i < num_actuals; i++)
861 if (args[i].reg != 0 && ! args[i].pass_on_stack)
862 {
863 *reg_parm_seen = 1;
864
865 if (args[i].value == 0)
866 {
867 push_temp_slots ();
868 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
869 VOIDmode, 0);
870 preserve_temp_slots (args[i].value);
871 pop_temp_slots ();
872
873 /* ANSI doesn't require a sequence point here,
874 but PCC has one, so this will avoid some problems. */
875 emit_queue ();
876 }
877
878 /* If we are to promote the function arg to a wider mode,
879 do it now. */
880
881 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
882 args[i].value
883 = convert_modes (args[i].mode,
884 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
885 args[i].value, args[i].unsignedp);
886
887 /* If the value is expensive, and we are inside an appropriately
888 short loop, put the value into a pseudo and then put the pseudo
889 into the hard reg.
890
891 For small register classes, also do this if this call uses
892 register parameters. This is to avoid reload conflicts while
893 loading the parameters registers. */
894
895 if ((! (GET_CODE (args[i].value) == REG
896 || (GET_CODE (args[i].value) == SUBREG
897 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
898 && args[i].mode != BLKmode
899 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
900 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
901 || preserve_subexpressions_p ()))
902 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
903 }
904 }
905
906 #ifdef REG_PARM_STACK_SPACE
907
908 /* The argument list is the property of the called routine and it
909 may clobber it. If the fixed area has been used for previous
910 parameters, we must save and restore it. */
911
912 static rtx
913 save_fixed_argument_area (reg_parm_stack_space, argblock,
914 low_to_save, high_to_save)
915 int reg_parm_stack_space;
916 rtx argblock;
917 int *low_to_save;
918 int *high_to_save;
919 {
920 int i;
921 rtx save_area = NULL_RTX;
922
923 /* Compute the boundary of the that needs to be saved, if any. */
924 #ifdef ARGS_GROW_DOWNWARD
925 for (i = 0; i < reg_parm_stack_space + 1; i++)
926 #else
927 for (i = 0; i < reg_parm_stack_space; i++)
928 #endif
929 {
930 if (i >= highest_outgoing_arg_in_use
931 || stack_usage_map[i] == 0)
932 continue;
933
934 if (*low_to_save == -1)
935 *low_to_save = i;
936
937 *high_to_save = i;
938 }
939
940 if (*low_to_save >= 0)
941 {
942 int num_to_save = *high_to_save - *low_to_save + 1;
943 enum machine_mode save_mode
944 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
945 rtx stack_area;
946
947 /* If we don't have the required alignment, must do this in BLKmode. */
948 if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
949 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
950 save_mode = BLKmode;
951
952 #ifdef ARGS_GROW_DOWNWARD
953 stack_area
954 = gen_rtx_MEM (save_mode,
955 memory_address (save_mode,
956 plus_constant (argblock,
957 - *high_to_save)));
958 #else
959 stack_area = gen_rtx_MEM (save_mode,
960 memory_address (save_mode,
961 plus_constant (argblock,
962 *low_to_save)));
963 #endif
964
965 set_mem_align (stack_area, PARM_BOUNDARY);
966 if (save_mode == BLKmode)
967 {
968 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
969 /* Cannot use emit_block_move here because it can be done by a
970 library call which in turn gets into this place again and deadly
971 infinite recursion happens. */
972 move_by_pieces (validize_mem (save_area), stack_area, num_to_save,
973 PARM_BOUNDARY);
974 }
975 else
976 {
977 save_area = gen_reg_rtx (save_mode);
978 emit_move_insn (save_area, stack_area);
979 }
980 }
981
982 return save_area;
983 }
984
985 static void
986 restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
987 rtx save_area;
988 rtx argblock;
989 int high_to_save;
990 int low_to_save;
991 {
992 enum machine_mode save_mode = GET_MODE (save_area);
993 #ifdef ARGS_GROW_DOWNWARD
994 rtx stack_area
995 = gen_rtx_MEM (save_mode,
996 memory_address (save_mode,
997 plus_constant (argblock,
998 - high_to_save)));
999 #else
1000 rtx stack_area
1001 = gen_rtx_MEM (save_mode,
1002 memory_address (save_mode,
1003 plus_constant (argblock,
1004 low_to_save)));
1005 #endif
1006
1007 if (save_mode != BLKmode)
1008 emit_move_insn (stack_area, save_area);
1009 else
1010 /* Cannot use emit_block_move here because it can be done by a library
1011 call which in turn gets into this place again and deadly infinite
1012 recursion happens. */
1013 move_by_pieces (stack_area, validize_mem (save_area),
1014 high_to_save - low_to_save + 1, PARM_BOUNDARY);
1015 }
1016 #endif /* REG_PARM_STACK_SPACE */
1017
1018 /* If any elements in ARGS refer to parameters that are to be passed in
1019 registers, but not in memory, and whose alignment does not permit a
1020 direct copy into registers. Copy the values into a group of pseudos
1021 which we will later copy into the appropriate hard registers.
1022
1023 Pseudos for each unaligned argument will be stored into the array
1024 args[argnum].aligned_regs. The caller is responsible for deallocating
1025 the aligned_regs array if it is nonzero. */
1026
1027 static void
1028 store_unaligned_arguments_into_pseudos (args, num_actuals)
1029 struct arg_data *args;
1030 int num_actuals;
1031 {
1032 int i, j;
1033
1034 for (i = 0; i < num_actuals; i++)
1035 if (args[i].reg != 0 && ! args[i].pass_on_stack
1036 && args[i].mode == BLKmode
1037 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1038 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1039 {
1040 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1041 int big_endian_correction = 0;
1042
1043 args[i].n_aligned_regs
1044 = args[i].partial ? args[i].partial
1045 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1046
1047 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
1048 * args[i].n_aligned_regs);
1049
1050 /* Structures smaller than a word are aligned to the least
1051 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
1052 this means we must skip the empty high order bytes when
1053 calculating the bit offset. */
1054 if (BYTES_BIG_ENDIAN
1055 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
1056 && bytes < UNITS_PER_WORD)
1057 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
1058
1059 for (j = 0; j < args[i].n_aligned_regs; j++)
1060 {
1061 rtx reg = gen_reg_rtx (word_mode);
1062 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1063 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1064
1065 args[i].aligned_regs[j] = reg;
1066
1067 /* There is no need to restrict this code to loading items
1068 in TYPE_ALIGN sized hunks. The bitfield instructions can
1069 load up entire word sized registers efficiently.
1070
1071 ??? This may not be needed anymore.
1072 We use to emit a clobber here but that doesn't let later
1073 passes optimize the instructions we emit. By storing 0 into
1074 the register later passes know the first AND to zero out the
1075 bitfield being set in the register is unnecessary. The store
1076 of 0 will be deleted as will at least the first AND. */
1077
1078 emit_move_insn (reg, const0_rtx);
1079
1080 bytes -= bitsize / BITS_PER_UNIT;
1081 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
1082 extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1083 word_mode, word_mode,
1084 BITS_PER_WORD),
1085 BITS_PER_WORD);
1086 }
1087 }
1088 }
1089
1090 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
1091 ACTPARMS.
1092
1093 NUM_ACTUALS is the total number of parameters.
1094
1095 N_NAMED_ARGS is the total number of named arguments.
1096
1097 FNDECL is the tree code for the target of this call (if known)
1098
1099 ARGS_SO_FAR holds state needed by the target to know where to place
1100 the next argument.
1101
1102 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1103 for arguments which are passed in registers.
1104
1105 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1106 and may be modified by this routine.
1107
1108 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1109 flags which may may be modified by this routine. */
1110
1111 static void
1112 initialize_argument_information (num_actuals, args, args_size, n_named_args,
1113 actparms, fndecl, args_so_far,
1114 reg_parm_stack_space, old_stack_level,
1115 old_pending_adj, must_preallocate,
1116 ecf_flags)
1117 int num_actuals ATTRIBUTE_UNUSED;
1118 struct arg_data *args;
1119 struct args_size *args_size;
1120 int n_named_args ATTRIBUTE_UNUSED;
1121 tree actparms;
1122 tree fndecl;
1123 CUMULATIVE_ARGS *args_so_far;
1124 int reg_parm_stack_space;
1125 rtx *old_stack_level;
1126 int *old_pending_adj;
1127 int *must_preallocate;
1128 int *ecf_flags;
1129 {
1130 /* 1 if scanning parms front to back, -1 if scanning back to front. */
1131 int inc;
1132
1133 /* Count arg position in order args appear. */
1134 int argpos;
1135
1136 struct args_size alignment_pad;
1137 int i;
1138 tree p;
1139
1140 args_size->constant = 0;
1141 args_size->var = 0;
1142
1143 /* In this loop, we consider args in the order they are written.
1144 We fill up ARGS from the front or from the back if necessary
1145 so that in any case the first arg to be pushed ends up at the front. */
1146
1147 if (PUSH_ARGS_REVERSED)
1148 {
1149 i = num_actuals - 1, inc = -1;
1150 /* In this case, must reverse order of args
1151 so that we compute and push the last arg first. */
1152 }
1153 else
1154 {
1155 i = 0, inc = 1;
1156 }
1157
1158 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1159 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
1160 {
1161 tree type = TREE_TYPE (TREE_VALUE (p));
1162 int unsignedp;
1163 enum machine_mode mode;
1164
1165 args[i].tree_value = TREE_VALUE (p);
1166
1167 /* Replace erroneous argument with constant zero. */
1168 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1169 args[i].tree_value = integer_zero_node, type = integer_type_node;
1170
1171 /* If TYPE is a transparent union, pass things the way we would
1172 pass the first field of the union. We have already verified that
1173 the modes are the same. */
1174 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
1175 type = TREE_TYPE (TYPE_FIELDS (type));
1176
1177 /* Decide where to pass this arg.
1178
1179 args[i].reg is nonzero if all or part is passed in registers.
1180
1181 args[i].partial is nonzero if part but not all is passed in registers,
1182 and the exact value says how many words are passed in registers.
1183
1184 args[i].pass_on_stack is nonzero if the argument must at least be
1185 computed on the stack. It may then be loaded back into registers
1186 if args[i].reg is nonzero.
1187
1188 These decisions are driven by the FUNCTION_... macros and must agree
1189 with those made by function.c. */
1190
1191 /* See if this argument should be passed by invisible reference. */
1192 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1193 && contains_placeholder_p (TYPE_SIZE (type)))
1194 || TREE_ADDRESSABLE (type)
1195 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1196 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
1197 type, argpos < n_named_args)
1198 #endif
1199 )
1200 {
1201 /* If we're compiling a thunk, pass through invisible
1202 references instead of making a copy. */
1203 if (current_function_is_thunk
1204 #ifdef FUNCTION_ARG_CALLEE_COPIES
1205 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
1206 type, argpos < n_named_args)
1207 /* If it's in a register, we must make a copy of it too. */
1208 /* ??? Is this a sufficient test? Is there a better one? */
1209 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1210 && REG_P (DECL_RTL (args[i].tree_value)))
1211 && ! TREE_ADDRESSABLE (type))
1212 #endif
1213 )
1214 {
1215 /* C++ uses a TARGET_EXPR to indicate that we want to make a
1216 new object from the argument. If we are passing by
1217 invisible reference, the callee will do that for us, so we
1218 can strip off the TARGET_EXPR. This is not always safe,
1219 but it is safe in the only case where this is a useful
1220 optimization; namely, when the argument is a plain object.
1221 In that case, the frontend is just asking the backend to
1222 make a bitwise copy of the argument. */
1223
1224 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
1225 && (DECL_P (TREE_OPERAND (args[i].tree_value, 1)))
1226 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1227 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1228
1229 args[i].tree_value = build1 (ADDR_EXPR,
1230 build_pointer_type (type),
1231 args[i].tree_value);
1232 type = build_pointer_type (type);
1233 }
1234 else if (TREE_CODE (args[i].tree_value) == TARGET_EXPR)
1235 {
1236 /* In the V3 C++ ABI, parameters are destroyed in the caller.
1237 We implement this by passing the address of the temporary
1238 rather than expanding it into another allocated slot. */
1239 args[i].tree_value = build1 (ADDR_EXPR,
1240 build_pointer_type (type),
1241 args[i].tree_value);
1242 type = build_pointer_type (type);
1243 }
1244 else
1245 {
1246 /* We make a copy of the object and pass the address to the
1247 function being called. */
1248 rtx copy;
1249
1250 if (!COMPLETE_TYPE_P (type)
1251 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1252 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1253 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1254 STACK_CHECK_MAX_VAR_SIZE))))
1255 {
1256 /* This is a variable-sized object. Make space on the stack
1257 for it. */
1258 rtx size_rtx = expr_size (TREE_VALUE (p));
1259
1260 if (*old_stack_level == 0)
1261 {
1262 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1263 *old_pending_adj = pending_stack_adjust;
1264 pending_stack_adjust = 0;
1265 }
1266
1267 copy = gen_rtx_MEM (BLKmode,
1268 allocate_dynamic_stack_space
1269 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1270 set_mem_attributes (copy, type, 1);
1271 }
1272 else
1273 copy = assign_temp (type, 0, 1, 0);
1274
1275 store_expr (args[i].tree_value, copy, 0);
1276 *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1277
1278 args[i].tree_value = build1 (ADDR_EXPR,
1279 build_pointer_type (type),
1280 make_tree (type, copy));
1281 type = build_pointer_type (type);
1282 }
1283 }
1284
1285 mode = TYPE_MODE (type);
1286 unsignedp = TREE_UNSIGNED (type);
1287
1288 #ifdef PROMOTE_FUNCTION_ARGS
1289 mode = promote_mode (type, mode, &unsignedp, 1);
1290 #endif
1291
1292 args[i].unsignedp = unsignedp;
1293 args[i].mode = mode;
1294
1295 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1296 argpos < n_named_args);
1297 #ifdef FUNCTION_INCOMING_ARG
1298 /* If this is a sibling call and the machine has register windows, the
1299 register window has to be unwinded before calling the routine, so
1300 arguments have to go into the incoming registers. */
1301 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1302 argpos < n_named_args);
1303 #else
1304 args[i].tail_call_reg = args[i].reg;
1305 #endif
1306
1307 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1308 if (args[i].reg)
1309 args[i].partial
1310 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1311 argpos < n_named_args);
1312 #endif
1313
1314 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1315
1316 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1317 it means that we are to pass this arg in the register(s) designated
1318 by the PARALLEL, but also to pass it in the stack. */
1319 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1320 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1321 args[i].pass_on_stack = 1;
1322
1323 /* If this is an addressable type, we must preallocate the stack
1324 since we must evaluate the object into its final location.
1325
1326 If this is to be passed in both registers and the stack, it is simpler
1327 to preallocate. */
1328 if (TREE_ADDRESSABLE (type)
1329 || (args[i].pass_on_stack && args[i].reg != 0))
1330 *must_preallocate = 1;
1331
1332 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1333 we cannot consider this function call constant. */
1334 if (TREE_ADDRESSABLE (type))
1335 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
1336
1337 /* Compute the stack-size of this argument. */
1338 if (args[i].reg == 0 || args[i].partial != 0
1339 || reg_parm_stack_space > 0
1340 || args[i].pass_on_stack)
1341 locate_and_pad_parm (mode, type,
1342 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1343 1,
1344 #else
1345 args[i].reg != 0,
1346 #endif
1347 fndecl, args_size, &args[i].offset,
1348 &args[i].size, &alignment_pad);
1349
1350 #ifndef ARGS_GROW_DOWNWARD
1351 args[i].slot_offset = *args_size;
1352 #endif
1353
1354 args[i].alignment_pad = alignment_pad;
1355
1356 /* If a part of the arg was put into registers,
1357 don't include that part in the amount pushed. */
1358 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1359 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1360 / (PARM_BOUNDARY / BITS_PER_UNIT)
1361 * (PARM_BOUNDARY / BITS_PER_UNIT));
1362
1363 /* Update ARGS_SIZE, the total stack space for args so far. */
1364
1365 args_size->constant += args[i].size.constant;
1366 if (args[i].size.var)
1367 {
1368 ADD_PARM_SIZE (*args_size, args[i].size.var);
1369 }
1370
1371 /* Since the slot offset points to the bottom of the slot,
1372 we must record it after incrementing if the args grow down. */
1373 #ifdef ARGS_GROW_DOWNWARD
1374 args[i].slot_offset = *args_size;
1375
1376 args[i].slot_offset.constant = -args_size->constant;
1377 if (args_size->var)
1378 SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
1379 #endif
1380
1381 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1382 have been used, etc. */
1383
1384 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1385 argpos < n_named_args);
1386 }
1387 }
1388
1389 /* Update ARGS_SIZE to contain the total size for the argument block.
1390 Return the original constant component of the argument block's size.
1391
1392 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1393 for arguments passed in registers. */
1394
1395 static int
1396 compute_argument_block_size (reg_parm_stack_space, args_size,
1397 preferred_stack_boundary)
1398 int reg_parm_stack_space;
1399 struct args_size *args_size;
1400 int preferred_stack_boundary ATTRIBUTE_UNUSED;
1401 {
1402 int unadjusted_args_size = args_size->constant;
1403
1404 /* For accumulate outgoing args mode we don't need to align, since the frame
1405 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1406 backends from generating misaligned frame sizes. */
1407 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1408 preferred_stack_boundary = STACK_BOUNDARY;
1409
1410 /* Compute the actual size of the argument block required. The variable
1411 and constant sizes must be combined, the size may have to be rounded,
1412 and there may be a minimum required size. */
1413
1414 if (args_size->var)
1415 {
1416 args_size->var = ARGS_SIZE_TREE (*args_size);
1417 args_size->constant = 0;
1418
1419 preferred_stack_boundary /= BITS_PER_UNIT;
1420 if (preferred_stack_boundary > 1)
1421 {
1422 /* We don't handle this case yet. To handle it correctly we have
1423 to add the delta, round and subtract the delta.
1424 Currently no machine description requires this support. */
1425 if (stack_pointer_delta & (preferred_stack_boundary - 1))
1426 abort ();
1427 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1428 }
1429
1430 if (reg_parm_stack_space > 0)
1431 {
1432 args_size->var
1433 = size_binop (MAX_EXPR, args_size->var,
1434 ssize_int (reg_parm_stack_space));
1435
1436 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1437 /* The area corresponding to register parameters is not to count in
1438 the size of the block we need. So make the adjustment. */
1439 args_size->var
1440 = size_binop (MINUS_EXPR, args_size->var,
1441 ssize_int (reg_parm_stack_space));
1442 #endif
1443 }
1444 }
1445 else
1446 {
1447 preferred_stack_boundary /= BITS_PER_UNIT;
1448 if (preferred_stack_boundary < 1)
1449 preferred_stack_boundary = 1;
1450 args_size->constant = (((args_size->constant
1451 + stack_pointer_delta
1452 + preferred_stack_boundary - 1)
1453 / preferred_stack_boundary
1454 * preferred_stack_boundary)
1455 - stack_pointer_delta);
1456
1457 args_size->constant = MAX (args_size->constant,
1458 reg_parm_stack_space);
1459
1460 #ifdef MAYBE_REG_PARM_STACK_SPACE
1461 if (reg_parm_stack_space == 0)
1462 args_size->constant = 0;
1463 #endif
1464
1465 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1466 args_size->constant -= reg_parm_stack_space;
1467 #endif
1468 }
1469 return unadjusted_args_size;
1470 }
1471
1472 /* Precompute parameters as needed for a function call.
1473
1474 FLAGS is mask of ECF_* constants.
1475
1476 NUM_ACTUALS is the number of arguments.
1477
1478 ARGS is an array containing information for each argument; this
1479 routine fills in the INITIAL_VALUE and VALUE fields for each
1480 precomputed argument. */
1481
1482 static void
1483 precompute_arguments (flags, num_actuals, args)
1484 int flags;
1485 int num_actuals;
1486 struct arg_data *args;
1487 {
1488 int i;
1489
1490 /* If this function call is cse'able, precompute all the parameters.
1491 Note that if the parameter is constructed into a temporary, this will
1492 cause an additional copy because the parameter will be constructed
1493 into a temporary location and then copied into the outgoing arguments.
1494 If a parameter contains a call to alloca and this function uses the
1495 stack, precompute the parameter. */
1496
1497 /* If we preallocated the stack space, and some arguments must be passed
1498 on the stack, then we must precompute any parameter which contains a
1499 function call which will store arguments on the stack.
1500 Otherwise, evaluating the parameter may clobber previous parameters
1501 which have already been stored into the stack. (we have code to avoid
1502 such case by saving the outgoing stack arguments, but it results in
1503 worse code) */
1504
1505 for (i = 0; i < num_actuals; i++)
1506 if ((flags & ECF_LIBCALL_BLOCK)
1507 || calls_function (args[i].tree_value, !ACCUMULATE_OUTGOING_ARGS))
1508 {
1509 enum machine_mode mode;
1510
1511 /* If this is an addressable type, we cannot pre-evaluate it. */
1512 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1513 abort ();
1514
1515 args[i].value
1516 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1517
1518 /* ANSI doesn't require a sequence point here,
1519 but PCC has one, so this will avoid some problems. */
1520 emit_queue ();
1521
1522 args[i].initial_value = args[i].value
1523 = protect_from_queue (args[i].value, 0);
1524
1525 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1526 if (mode != args[i].mode)
1527 {
1528 args[i].value
1529 = convert_modes (args[i].mode, mode,
1530 args[i].value, args[i].unsignedp);
1531 #ifdef PROMOTE_FOR_CALL_ONLY
1532 /* CSE will replace this only if it contains args[i].value
1533 pseudo, so convert it down to the declared mode using
1534 a SUBREG. */
1535 if (GET_CODE (args[i].value) == REG
1536 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1537 {
1538 args[i].initial_value
1539 = gen_lowpart_SUBREG (mode, args[i].value);
1540 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1541 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1542 args[i].unsignedp);
1543 }
1544 #endif
1545 }
1546 }
1547 }
1548
1549 /* Given the current state of MUST_PREALLOCATE and information about
1550 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1551 compute and return the final value for MUST_PREALLOCATE. */
1552
1553 static int
1554 finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1555 int must_preallocate;
1556 int num_actuals;
1557 struct arg_data *args;
1558 struct args_size *args_size;
1559 {
1560 /* See if we have or want to preallocate stack space.
1561
1562 If we would have to push a partially-in-regs parm
1563 before other stack parms, preallocate stack space instead.
1564
1565 If the size of some parm is not a multiple of the required stack
1566 alignment, we must preallocate.
1567
1568 If the total size of arguments that would otherwise create a copy in
1569 a temporary (such as a CALL) is more than half the total argument list
1570 size, preallocation is faster.
1571
1572 Another reason to preallocate is if we have a machine (like the m88k)
1573 where stack alignment is required to be maintained between every
1574 pair of insns, not just when the call is made. However, we assume here
1575 that such machines either do not have push insns (and hence preallocation
1576 would occur anyway) or the problem is taken care of with
1577 PUSH_ROUNDING. */
1578
1579 if (! must_preallocate)
1580 {
1581 int partial_seen = 0;
1582 int copy_to_evaluate_size = 0;
1583 int i;
1584
1585 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1586 {
1587 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1588 partial_seen = 1;
1589 else if (partial_seen && args[i].reg == 0)
1590 must_preallocate = 1;
1591
1592 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1593 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1594 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1595 || TREE_CODE (args[i].tree_value) == COND_EXPR
1596 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1597 copy_to_evaluate_size
1598 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1599 }
1600
1601 if (copy_to_evaluate_size * 2 >= args_size->constant
1602 && args_size->constant > 0)
1603 must_preallocate = 1;
1604 }
1605 return must_preallocate;
1606 }
1607
1608 /* If we preallocated stack space, compute the address of each argument
1609 and store it into the ARGS array.
1610
1611 We need not ensure it is a valid memory address here; it will be
1612 validized when it is used.
1613
1614 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1615
1616 static void
1617 compute_argument_addresses (args, argblock, num_actuals)
1618 struct arg_data *args;
1619 rtx argblock;
1620 int num_actuals;
1621 {
1622 if (argblock)
1623 {
1624 rtx arg_reg = argblock;
1625 int i, arg_offset = 0;
1626
1627 if (GET_CODE (argblock) == PLUS)
1628 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1629
1630 for (i = 0; i < num_actuals; i++)
1631 {
1632 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1633 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1634 rtx addr;
1635
1636 /* Skip this parm if it will not be passed on the stack. */
1637 if (! args[i].pass_on_stack && args[i].reg != 0)
1638 continue;
1639
1640 if (GET_CODE (offset) == CONST_INT)
1641 addr = plus_constant (arg_reg, INTVAL (offset));
1642 else
1643 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1644
1645 addr = plus_constant (addr, arg_offset);
1646 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1647 set_mem_attributes (args[i].stack,
1648 TREE_TYPE (args[i].tree_value), 1);
1649
1650 if (GET_CODE (slot_offset) == CONST_INT)
1651 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1652 else
1653 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1654
1655 addr = plus_constant (addr, arg_offset);
1656 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1657 set_mem_attributes (args[i].stack_slot,
1658 TREE_TYPE (args[i].tree_value), 1);
1659
1660 /* Function incoming arguments may overlap with sibling call
1661 outgoing arguments and we cannot allow reordering of reads
1662 from function arguments with stores to outgoing arguments
1663 of sibling calls. */
1664 set_mem_alias_set (args[i].stack, 0);
1665 set_mem_alias_set (args[i].stack_slot, 0);
1666 }
1667 }
1668 }
1669
1670 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1671 in a call instruction.
1672
1673 FNDECL is the tree node for the target function. For an indirect call
1674 FNDECL will be NULL_TREE.
1675
1676 EXP is the CALL_EXPR for this call. */
1677
1678 static rtx
1679 rtx_for_function_call (fndecl, exp)
1680 tree fndecl;
1681 tree exp;
1682 {
1683 rtx funexp;
1684
1685 /* Get the function to call, in the form of RTL. */
1686 if (fndecl)
1687 {
1688 /* If this is the first use of the function, see if we need to
1689 make an external definition for it. */
1690 if (! TREE_USED (fndecl))
1691 {
1692 assemble_external (fndecl);
1693 TREE_USED (fndecl) = 1;
1694 }
1695
1696 /* Get a SYMBOL_REF rtx for the function address. */
1697 funexp = XEXP (DECL_RTL (fndecl), 0);
1698 }
1699 else
1700 /* Generate an rtx (probably a pseudo-register) for the address. */
1701 {
1702 rtx funaddr;
1703 push_temp_slots ();
1704 funaddr = funexp
1705 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
1706 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1707 emit_queue ();
1708 }
1709 return funexp;
1710 }
1711
1712 /* Do the register loads required for any wholly-register parms or any
1713 parms which are passed both on the stack and in a register. Their
1714 expressions were already evaluated.
1715
1716 Mark all register-parms as living through the call, putting these USE
1717 insns in the CALL_INSN_FUNCTION_USAGE field. */
1718
1719 static void
1720 load_register_parameters (args, num_actuals, call_fusage, flags)
1721 struct arg_data *args;
1722 int num_actuals;
1723 rtx *call_fusage;
1724 int flags;
1725 {
1726 int i, j;
1727
1728 #ifdef LOAD_ARGS_REVERSED
1729 for (i = num_actuals - 1; i >= 0; i--)
1730 #else
1731 for (i = 0; i < num_actuals; i++)
1732 #endif
1733 {
1734 rtx reg = ((flags & ECF_SIBCALL)
1735 ? args[i].tail_call_reg : args[i].reg);
1736 int partial = args[i].partial;
1737 int nregs;
1738
1739 if (reg)
1740 {
1741 /* Set to non-negative if must move a word at a time, even if just
1742 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1743 we just use a normal move insn. This value can be zero if the
1744 argument is a zero size structure with no fields. */
1745 nregs = (partial ? partial
1746 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1747 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1748 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1749 : -1));
1750
1751 /* Handle calls that pass values in multiple non-contiguous
1752 locations. The Irix 6 ABI has examples of this. */
1753
1754 if (GET_CODE (reg) == PARALLEL)
1755 emit_group_load (reg, args[i].value,
1756 int_size_in_bytes (TREE_TYPE (args[i].tree_value)));
1757
1758 /* If simple case, just do move. If normal partial, store_one_arg
1759 has already loaded the register for us. In all other cases,
1760 load the register(s) from memory. */
1761
1762 else if (nregs == -1)
1763 emit_move_insn (reg, args[i].value);
1764
1765 /* If we have pre-computed the values to put in the registers in
1766 the case of non-aligned structures, copy them in now. */
1767
1768 else if (args[i].n_aligned_regs != 0)
1769 for (j = 0; j < args[i].n_aligned_regs; j++)
1770 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1771 args[i].aligned_regs[j]);
1772
1773 else if (partial == 0 || args[i].pass_on_stack)
1774 move_block_to_reg (REGNO (reg),
1775 validize_mem (args[i].value), nregs,
1776 args[i].mode);
1777
1778 /* Handle calls that pass values in multiple non-contiguous
1779 locations. The Irix 6 ABI has examples of this. */
1780 if (GET_CODE (reg) == PARALLEL)
1781 use_group_regs (call_fusage, reg);
1782 else if (nregs == -1)
1783 use_reg (call_fusage, reg);
1784 else
1785 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1786 }
1787 }
1788 }
1789
1790 /* Try to integrate function. See expand_inline_function for documentation
1791 about the parameters. */
1792
1793 static rtx
1794 try_to_integrate (fndecl, actparms, target, ignore, type, structure_value_addr)
1795 tree fndecl;
1796 tree actparms;
1797 rtx target;
1798 int ignore;
1799 tree type;
1800 rtx structure_value_addr;
1801 {
1802 rtx temp;
1803 rtx before_call;
1804 int i;
1805 rtx old_stack_level = 0;
1806 int reg_parm_stack_space = 0;
1807
1808 #ifdef REG_PARM_STACK_SPACE
1809 #ifdef MAYBE_REG_PARM_STACK_SPACE
1810 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1811 #else
1812 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1813 #endif
1814 #endif
1815
1816 before_call = get_last_insn ();
1817
1818 timevar_push (TV_INTEGRATION);
1819
1820 temp = expand_inline_function (fndecl, actparms, target,
1821 ignore, type,
1822 structure_value_addr);
1823
1824 timevar_pop (TV_INTEGRATION);
1825
1826 /* If inlining succeeded, return. */
1827 if (temp != (rtx) (size_t) - 1)
1828 {
1829 if (ACCUMULATE_OUTGOING_ARGS)
1830 {
1831 /* If the outgoing argument list must be preserved, push
1832 the stack before executing the inlined function if it
1833 makes any calls. */
1834
1835 for (i = reg_parm_stack_space - 1; i >= 0; i--)
1836 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
1837 break;
1838
1839 if (stack_arg_under_construction || i >= 0)
1840 {
1841 rtx first_insn
1842 = before_call ? NEXT_INSN (before_call) : get_insns ();
1843 rtx insn = NULL_RTX, seq;
1844
1845 /* Look for a call in the inline function code.
1846 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
1847 nonzero then there is a call and it is not necessary
1848 to scan the insns. */
1849
1850 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
1851 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1852 if (GET_CODE (insn) == CALL_INSN)
1853 break;
1854
1855 if (insn)
1856 {
1857 /* Reserve enough stack space so that the largest
1858 argument list of any function call in the inline
1859 function does not overlap the argument list being
1860 evaluated. This is usually an overestimate because
1861 allocate_dynamic_stack_space reserves space for an
1862 outgoing argument list in addition to the requested
1863 space, but there is no way to ask for stack space such
1864 that an argument list of a certain length can be
1865 safely constructed.
1866
1867 Add the stack space reserved for register arguments, if
1868 any, in the inline function. What is really needed is the
1869 largest value of reg_parm_stack_space in the inline
1870 function, but that is not available. Using the current
1871 value of reg_parm_stack_space is wrong, but gives
1872 correct results on all supported machines. */
1873
1874 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
1875 + reg_parm_stack_space);
1876
1877 start_sequence ();
1878 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1879 allocate_dynamic_stack_space (GEN_INT (adjust),
1880 NULL_RTX, BITS_PER_UNIT);
1881 seq = get_insns ();
1882 end_sequence ();
1883 emit_insns_before (seq, first_insn);
1884 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1885 }
1886 }
1887 }
1888
1889 /* If the result is equivalent to TARGET, return TARGET to simplify
1890 checks in store_expr. They can be equivalent but not equal in the
1891 case of a function that returns BLKmode. */
1892 if (temp != target && rtx_equal_p (temp, target))
1893 return target;
1894 return temp;
1895 }
1896
1897 /* If inlining failed, mark FNDECL as needing to be compiled
1898 separately after all. If function was declared inline,
1899 give a warning. */
1900 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1901 && optimize > 0 && !TREE_ADDRESSABLE (fndecl))
1902 {
1903 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1904 warning ("called from here");
1905 }
1906 (*lang_hooks.mark_addressable) (fndecl);
1907 return (rtx) (size_t) - 1;
1908 }
1909
1910 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1911 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1912 bytes, then we would need to push some additional bytes to pad the
1913 arguments. So, we compute an adjust to the stack pointer for an
1914 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1915 bytes. Then, when the arguments are pushed the stack will be perfectly
1916 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1917 be popped after the call. Returns the adjustment. */
1918
1919 static int
1920 combine_pending_stack_adjustment_and_call (unadjusted_args_size,
1921 args_size,
1922 preferred_unit_stack_boundary)
1923 int unadjusted_args_size;
1924 struct args_size *args_size;
1925 int preferred_unit_stack_boundary;
1926 {
1927 /* The number of bytes to pop so that the stack will be
1928 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1929 HOST_WIDE_INT adjustment;
1930 /* The alignment of the stack after the arguments are pushed, if we
1931 just pushed the arguments without adjust the stack here. */
1932 HOST_WIDE_INT unadjusted_alignment;
1933
1934 unadjusted_alignment
1935 = ((stack_pointer_delta + unadjusted_args_size)
1936 % preferred_unit_stack_boundary);
1937
1938 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1939 as possible -- leaving just enough left to cancel out the
1940 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1941 PENDING_STACK_ADJUST is non-negative, and congruent to
1942 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1943
1944 /* Begin by trying to pop all the bytes. */
1945 unadjusted_alignment
1946 = (unadjusted_alignment
1947 - (pending_stack_adjust % preferred_unit_stack_boundary));
1948 adjustment = pending_stack_adjust;
1949 /* Push enough additional bytes that the stack will be aligned
1950 after the arguments are pushed. */
1951 if (preferred_unit_stack_boundary > 1)
1952 {
1953 if (unadjusted_alignment > 0)
1954 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1955 else
1956 adjustment += unadjusted_alignment;
1957 }
1958
1959 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1960 bytes after the call. The right number is the entire
1961 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1962 by the arguments in the first place. */
1963 args_size->constant
1964 = pending_stack_adjust - adjustment + unadjusted_args_size;
1965
1966 return adjustment;
1967 }
1968
1969 /* Scan X expression if it does not dereference any argument slots
1970 we already clobbered by tail call arguments (as noted in stored_args_map
1971 bitmap).
1972 Return non-zero if X expression dereferences such argument slots,
1973 zero otherwise. */
1974
1975 static int
1976 check_sibcall_argument_overlap_1 (x)
1977 rtx x;
1978 {
1979 RTX_CODE code;
1980 int i, j;
1981 unsigned int k;
1982 const char *fmt;
1983
1984 if (x == NULL_RTX)
1985 return 0;
1986
1987 code = GET_CODE (x);
1988
1989 if (code == MEM)
1990 {
1991 if (XEXP (x, 0) == current_function_internal_arg_pointer)
1992 i = 0;
1993 else if (GET_CODE (XEXP (x, 0)) == PLUS
1994 && XEXP (XEXP (x, 0), 0) ==
1995 current_function_internal_arg_pointer
1996 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
1997 i = INTVAL (XEXP (XEXP (x, 0), 1));
1998 else
1999 return 0;
2000
2001 #ifdef ARGS_GROW_DOWNWARD
2002 i = -i - GET_MODE_SIZE (GET_MODE (x));
2003 #endif
2004
2005 for (k = 0; k < GET_MODE_SIZE (GET_MODE (x)); k++)
2006 if (i + k < stored_args_map->n_bits
2007 && TEST_BIT (stored_args_map, i + k))
2008 return 1;
2009
2010 return 0;
2011 }
2012
2013 /* Scan all subexpressions. */
2014 fmt = GET_RTX_FORMAT (code);
2015 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2016 {
2017 if (*fmt == 'e')
2018 {
2019 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
2020 return 1;
2021 }
2022 else if (*fmt == 'E')
2023 {
2024 for (j = 0; j < XVECLEN (x, i); j++)
2025 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
2026 return 1;
2027 }
2028 }
2029 return 0;
2030 }
2031
2032 /* Scan sequence after INSN if it does not dereference any argument slots
2033 we already clobbered by tail call arguments (as noted in stored_args_map
2034 bitmap). Add stack slots for ARG to stored_args_map bitmap afterwards.
2035 Return non-zero if sequence after INSN dereferences such argument slots,
2036 zero otherwise. */
2037
2038 static int
2039 check_sibcall_argument_overlap (insn, arg)
2040 rtx insn;
2041 struct arg_data *arg;
2042 {
2043 int low, high;
2044
2045 if (insn == NULL_RTX)
2046 insn = get_insns ();
2047 else
2048 insn = NEXT_INSN (insn);
2049
2050 for (; insn; insn = NEXT_INSN (insn))
2051 if (INSN_P (insn)
2052 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
2053 break;
2054
2055 #ifdef ARGS_GROW_DOWNWARD
2056 low = -arg->slot_offset.constant - arg->size.constant;
2057 #else
2058 low = arg->slot_offset.constant;
2059 #endif
2060
2061 for (high = low + arg->size.constant; low < high; low++)
2062 SET_BIT (stored_args_map, low);
2063 return insn != NULL_RTX;
2064 }
2065
2066 /* Generate all the code for a function call
2067 and return an rtx for its value.
2068 Store the value in TARGET (specified as an rtx) if convenient.
2069 If the value is stored in TARGET then TARGET is returned.
2070 If IGNORE is nonzero, then we ignore the value of the function call. */
2071
2072 rtx
2073 expand_call (exp, target, ignore)
2074 tree exp;
2075 rtx target;
2076 int ignore;
2077 {
2078 /* Nonzero if we are currently expanding a call. */
2079 static int currently_expanding_call = 0;
2080
2081 /* List of actual parameters. */
2082 tree actparms = TREE_OPERAND (exp, 1);
2083 /* RTX for the function to be called. */
2084 rtx funexp;
2085 /* Sequence of insns to perform a tail recursive "call". */
2086 rtx tail_recursion_insns = NULL_RTX;
2087 /* Sequence of insns to perform a normal "call". */
2088 rtx normal_call_insns = NULL_RTX;
2089 /* Sequence of insns to perform a tail recursive "call". */
2090 rtx tail_call_insns = NULL_RTX;
2091 /* Data type of the function. */
2092 tree funtype;
2093 /* Declaration of the function being called,
2094 or 0 if the function is computed (not known by name). */
2095 tree fndecl = 0;
2096 rtx insn;
2097 int try_tail_call = 1;
2098 int try_tail_recursion = 1;
2099 int pass;
2100
2101 /* Register in which non-BLKmode value will be returned,
2102 or 0 if no value or if value is BLKmode. */
2103 rtx valreg;
2104 /* Address where we should return a BLKmode value;
2105 0 if value not BLKmode. */
2106 rtx structure_value_addr = 0;
2107 /* Nonzero if that address is being passed by treating it as
2108 an extra, implicit first parameter. Otherwise,
2109 it is passed by being copied directly into struct_value_rtx. */
2110 int structure_value_addr_parm = 0;
2111 /* Size of aggregate value wanted, or zero if none wanted
2112 or if we are using the non-reentrant PCC calling convention
2113 or expecting the value in registers. */
2114 HOST_WIDE_INT struct_value_size = 0;
2115 /* Nonzero if called function returns an aggregate in memory PCC style,
2116 by returning the address of where to find it. */
2117 int pcc_struct_value = 0;
2118
2119 /* Number of actual parameters in this call, including struct value addr. */
2120 int num_actuals;
2121 /* Number of named args. Args after this are anonymous ones
2122 and they must all go on the stack. */
2123 int n_named_args;
2124
2125 /* Vector of information about each argument.
2126 Arguments are numbered in the order they will be pushed,
2127 not the order they are written. */
2128 struct arg_data *args;
2129
2130 /* Total size in bytes of all the stack-parms scanned so far. */
2131 struct args_size args_size;
2132 struct args_size adjusted_args_size;
2133 /* Size of arguments before any adjustments (such as rounding). */
2134 int unadjusted_args_size;
2135 /* Data on reg parms scanned so far. */
2136 CUMULATIVE_ARGS args_so_far;
2137 /* Nonzero if a reg parm has been scanned. */
2138 int reg_parm_seen;
2139 /* Nonzero if this is an indirect function call. */
2140
2141 /* Nonzero if we must avoid push-insns in the args for this call.
2142 If stack space is allocated for register parameters, but not by the
2143 caller, then it is preallocated in the fixed part of the stack frame.
2144 So the entire argument block must then be preallocated (i.e., we
2145 ignore PUSH_ROUNDING in that case). */
2146
2147 int must_preallocate = !PUSH_ARGS;
2148
2149 /* Size of the stack reserved for parameter registers. */
2150 int reg_parm_stack_space = 0;
2151
2152 /* Address of space preallocated for stack parms
2153 (on machines that lack push insns), or 0 if space not preallocated. */
2154 rtx argblock = 0;
2155
2156 /* Mask of ECF_ flags. */
2157 int flags = 0;
2158 /* Nonzero if this is a call to an inline function. */
2159 int is_integrable = 0;
2160 #ifdef REG_PARM_STACK_SPACE
2161 /* Define the boundary of the register parm stack space that needs to be
2162 save, if any. */
2163 int low_to_save = -1, high_to_save;
2164 rtx save_area = 0; /* Place that it is saved */
2165 #endif
2166
2167 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2168 char *initial_stack_usage_map = stack_usage_map;
2169 int old_stack_arg_under_construction = 0;
2170
2171 rtx old_stack_level = 0;
2172 int old_pending_adj = 0;
2173 int old_inhibit_defer_pop = inhibit_defer_pop;
2174 int old_stack_allocated;
2175 rtx call_fusage;
2176 tree p = TREE_OPERAND (exp, 0);
2177 int i;
2178 /* The alignment of the stack, in bits. */
2179 HOST_WIDE_INT preferred_stack_boundary;
2180 /* The alignment of the stack, in bytes. */
2181 HOST_WIDE_INT preferred_unit_stack_boundary;
2182
2183 /* See if this is "nothrow" function call. */
2184 if (TREE_NOTHROW (exp))
2185 flags |= ECF_NOTHROW;
2186
2187 /* See if we can find a DECL-node for the actual function.
2188 As a result, decide whether this is a call to an integrable function. */
2189
2190 fndecl = get_callee_fndecl (exp);
2191 if (fndecl)
2192 {
2193 if (!flag_no_inline
2194 && fndecl != current_function_decl
2195 && DECL_INLINE (fndecl)
2196 && DECL_SAVED_INSNS (fndecl)
2197 && DECL_SAVED_INSNS (fndecl)->inlinable)
2198 is_integrable = 1;
2199 else if (! TREE_ADDRESSABLE (fndecl))
2200 {
2201 /* In case this function later becomes inlinable,
2202 record that there was already a non-inline call to it.
2203
2204 Use abstraction instead of setting TREE_ADDRESSABLE
2205 directly. */
2206 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
2207 && optimize > 0)
2208 {
2209 warning_with_decl (fndecl, "can't inline call to `%s'");
2210 warning ("called from here");
2211 }
2212 (*lang_hooks.mark_addressable) (fndecl);
2213 }
2214
2215 flags |= flags_from_decl_or_type (fndecl);
2216 }
2217
2218 /* If we don't have specific function to call, see if we have a
2219 attributes set in the type. */
2220 else
2221 flags |= flags_from_decl_or_type (TREE_TYPE (TREE_TYPE (p)));
2222
2223 #ifdef REG_PARM_STACK_SPACE
2224 #ifdef MAYBE_REG_PARM_STACK_SPACE
2225 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2226 #else
2227 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2228 #endif
2229 #endif
2230
2231 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2232 if (reg_parm_stack_space > 0 && PUSH_ARGS)
2233 must_preallocate = 1;
2234 #endif
2235
2236 /* Warn if this value is an aggregate type,
2237 regardless of which calling convention we are using for it. */
2238 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
2239 warning ("function call has aggregate value");
2240
2241 /* Set up a place to return a structure. */
2242
2243 /* Cater to broken compilers. */
2244 if (aggregate_value_p (exp))
2245 {
2246 /* This call returns a big structure. */
2247 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
2248
2249 #ifdef PCC_STATIC_STRUCT_RETURN
2250 {
2251 pcc_struct_value = 1;
2252 /* Easier than making that case work right. */
2253 if (is_integrable)
2254 {
2255 /* In case this is a static function, note that it has been
2256 used. */
2257 if (! TREE_ADDRESSABLE (fndecl))
2258 (*lang_hooks.mark_addressable) (fndecl);
2259 is_integrable = 0;
2260 }
2261 }
2262 #else /* not PCC_STATIC_STRUCT_RETURN */
2263 {
2264 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2265
2266 if (target && GET_CODE (target) == MEM)
2267 structure_value_addr = XEXP (target, 0);
2268 else
2269 {
2270 /* For variable-sized objects, we must be called with a target
2271 specified. If we were to allocate space on the stack here,
2272 we would have no way of knowing when to free it. */
2273 rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
2274
2275 mark_temp_addr_taken (d);
2276 structure_value_addr = XEXP (d, 0);
2277 target = 0;
2278 }
2279 }
2280 #endif /* not PCC_STATIC_STRUCT_RETURN */
2281 }
2282
2283 /* If called function is inline, try to integrate it. */
2284
2285 if (is_integrable)
2286 {
2287 rtx temp = try_to_integrate (fndecl, actparms, target,
2288 ignore, TREE_TYPE (exp),
2289 structure_value_addr);
2290 if (temp != (rtx) (size_t) - 1)
2291 return temp;
2292 }
2293
2294 /* Figure out the amount to which the stack should be aligned. */
2295 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2296
2297 /* Operand 0 is a pointer-to-function; get the type of the function. */
2298 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
2299 if (! POINTER_TYPE_P (funtype))
2300 abort ();
2301 funtype = TREE_TYPE (funtype);
2302
2303 /* See if this is a call to a function that can return more than once
2304 or a call to longjmp or malloc. */
2305 flags |= special_function_p (fndecl, flags);
2306
2307 if (flags & ECF_MAY_BE_ALLOCA)
2308 current_function_calls_alloca = 1;
2309
2310 /* If struct_value_rtx is 0, it means pass the address
2311 as if it were an extra parameter. */
2312 if (structure_value_addr && struct_value_rtx == 0)
2313 {
2314 /* If structure_value_addr is a REG other than
2315 virtual_outgoing_args_rtx, we can use always use it. If it
2316 is not a REG, we must always copy it into a register.
2317 If it is virtual_outgoing_args_rtx, we must copy it to another
2318 register in some cases. */
2319 rtx temp = (GET_CODE (structure_value_addr) != REG
2320 || (ACCUMULATE_OUTGOING_ARGS
2321 && stack_arg_under_construction
2322 && structure_value_addr == virtual_outgoing_args_rtx)
2323 ? copy_addr_to_reg (structure_value_addr)
2324 : structure_value_addr);
2325
2326 actparms
2327 = tree_cons (error_mark_node,
2328 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2329 temp),
2330 actparms);
2331 structure_value_addr_parm = 1;
2332 }
2333
2334 /* Count the arguments and set NUM_ACTUALS. */
2335 for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p))
2336 num_actuals++;
2337
2338 /* Compute number of named args.
2339 Normally, don't include the last named arg if anonymous args follow.
2340 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
2341 (If no anonymous args follow, the result of list_length is actually
2342 one too large. This is harmless.)
2343
2344 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
2345 zero, this machine will be able to place unnamed args that were
2346 passed in registers into the stack. So treat all args as named.
2347 This allows the insns emitting for a specific argument list to be
2348 independent of the function declaration.
2349
2350 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any
2351 reliable way to pass unnamed args in registers, so we must force
2352 them into memory. */
2353
2354 if ((STRICT_ARGUMENT_NAMING
2355 || ! PRETEND_OUTGOING_VARARGS_NAMED)
2356 && TYPE_ARG_TYPES (funtype) != 0)
2357 n_named_args
2358 = (list_length (TYPE_ARG_TYPES (funtype))
2359 /* Don't include the last named arg. */
2360 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
2361 /* Count the struct value address, if it is passed as a parm. */
2362 + structure_value_addr_parm);
2363 else
2364 /* If we know nothing, treat all args as named. */
2365 n_named_args = num_actuals;
2366
2367 /* Start updating where the next arg would go.
2368
2369 On some machines (such as the PA) indirect calls have a different
2370 calling convention than normal calls. The last argument in
2371 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2372 or not. */
2373 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
2374
2375 /* Make a vector to hold all the information about each arg. */
2376 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
2377 memset ((char *) args, 0, num_actuals * sizeof (struct arg_data));
2378
2379 /* Build up entries in the ARGS array, compute the size of the
2380 arguments into ARGS_SIZE, etc. */
2381 initialize_argument_information (num_actuals, args, &args_size,
2382 n_named_args, actparms, fndecl,
2383 &args_so_far, reg_parm_stack_space,
2384 &old_stack_level, &old_pending_adj,
2385 &must_preallocate, &flags);
2386
2387 if (args_size.var)
2388 {
2389 /* If this function requires a variable-sized argument list, don't
2390 try to make a cse'able block for this call. We may be able to
2391 do this eventually, but it is too complicated to keep track of
2392 what insns go in the cse'able block and which don't. */
2393
2394 flags &= ~ECF_LIBCALL_BLOCK;
2395 must_preallocate = 1;
2396 }
2397
2398 /* Now make final decision about preallocating stack space. */
2399 must_preallocate = finalize_must_preallocate (must_preallocate,
2400 num_actuals, args,
2401 &args_size);
2402
2403 /* If the structure value address will reference the stack pointer, we
2404 must stabilize it. We don't need to do this if we know that we are
2405 not going to adjust the stack pointer in processing this call. */
2406
2407 if (structure_value_addr
2408 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2409 || reg_mentioned_p (virtual_outgoing_args_rtx,
2410 structure_value_addr))
2411 && (args_size.var
2412 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2413 structure_value_addr = copy_to_reg (structure_value_addr);
2414
2415 /* Tail calls can make things harder to debug, and we're traditionally
2416 pushed these optimizations into -O2. Don't try if we're already
2417 expanding a call, as that means we're an argument. Don't try if
2418 there's cleanups, as we know there's code to follow the call.
2419
2420 If rtx_equal_function_value_matters is false, that means we've
2421 finished with regular parsing. Which means that some of the
2422 machinery we use to generate tail-calls is no longer in place.
2423 This is most often true of sjlj-exceptions, which we couldn't
2424 tail-call to anyway. */
2425
2426 if (currently_expanding_call++ != 0
2427 || !flag_optimize_sibling_calls
2428 || !rtx_equal_function_value_matters
2429 || any_pending_cleanups (1)
2430 || args_size.var)
2431 try_tail_call = try_tail_recursion = 0;
2432
2433 /* Tail recursion fails, when we are not dealing with recursive calls. */
2434 if (!try_tail_recursion
2435 || TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
2436 || TREE_OPERAND (TREE_OPERAND (exp, 0), 0) != current_function_decl)
2437 try_tail_recursion = 0;
2438
2439 /* Rest of purposes for tail call optimizations to fail. */
2440 if (
2441 #ifdef HAVE_sibcall_epilogue
2442 !HAVE_sibcall_epilogue
2443 #else
2444 1
2445 #endif
2446 || !try_tail_call
2447 /* Doing sibling call optimization needs some work, since
2448 structure_value_addr can be allocated on the stack.
2449 It does not seem worth the effort since few optimizable
2450 sibling calls will return a structure. */
2451 || structure_value_addr != NULL_RTX
2452 /* If the register holding the address is a callee saved
2453 register, then we lose. We have no way to prevent that,
2454 so we only allow calls to named functions. */
2455 /* ??? This could be done by having the insn constraints
2456 use a register class that is all call-clobbered. Any
2457 reload insns generated to fix things up would appear
2458 before the sibcall_epilogue. */
2459 || fndecl == NULL_TREE
2460 || (flags & (ECF_RETURNS_TWICE | ECF_LONGJMP))
2461 || TREE_THIS_VOLATILE (fndecl)
2462 || !FUNCTION_OK_FOR_SIBCALL (fndecl)
2463 /* If this function requires more stack slots than the current
2464 function, we cannot change it into a sibling call. */
2465 || args_size.constant > current_function_args_size
2466 /* If the callee pops its own arguments, then it must pop exactly
2467 the same number of arguments as the current function. */
2468 || RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2469 != RETURN_POPS_ARGS (current_function_decl,
2470 TREE_TYPE (current_function_decl),
2471 current_function_args_size))
2472 try_tail_call = 0;
2473
2474 if (try_tail_call || try_tail_recursion)
2475 {
2476 int end, inc;
2477 actparms = NULL_TREE;
2478 /* Ok, we're going to give the tail call the old college try.
2479 This means we're going to evaluate the function arguments
2480 up to three times. There are two degrees of badness we can
2481 encounter, those that can be unsaved and those that can't.
2482 (See unsafe_for_reeval commentary for details.)
2483
2484 Generate a new argument list. Pass safe arguments through
2485 unchanged. For the easy badness wrap them in UNSAVE_EXPRs.
2486 For hard badness, evaluate them now and put their resulting
2487 rtx in a temporary VAR_DECL.
2488
2489 initialize_argument_information has ordered the array for the
2490 order to be pushed, and we must remember this when reconstructing
2491 the original argument order. */
2492
2493 if (PUSH_ARGS_REVERSED)
2494 {
2495 inc = 1;
2496 i = 0;
2497 end = num_actuals;
2498 }
2499 else
2500 {
2501 inc = -1;
2502 i = num_actuals - 1;
2503 end = -1;
2504 }
2505
2506 for (; i != end; i += inc)
2507 {
2508 switch (unsafe_for_reeval (args[i].tree_value))
2509 {
2510 case 0: /* Safe. */
2511 break;
2512
2513 case 1: /* Mildly unsafe. */
2514 args[i].tree_value = unsave_expr (args[i].tree_value);
2515 break;
2516
2517 case 2: /* Wildly unsafe. */
2518 {
2519 tree var = build_decl (VAR_DECL, NULL_TREE,
2520 TREE_TYPE (args[i].tree_value));
2521 SET_DECL_RTL (var,
2522 expand_expr (args[i].tree_value, NULL_RTX,
2523 VOIDmode, EXPAND_NORMAL));
2524 args[i].tree_value = var;
2525 }
2526 break;
2527
2528 default:
2529 abort ();
2530 }
2531 /* We need to build actparms for optimize_tail_recursion. We can
2532 safely trash away TREE_PURPOSE, since it is unused by this
2533 function. */
2534 if (try_tail_recursion)
2535 actparms = tree_cons (NULL_TREE, args[i].tree_value, actparms);
2536 }
2537 /* Expanding one of those dangerous arguments could have added
2538 cleanups, but otherwise give it a whirl. */
2539 if (any_pending_cleanups (1))
2540 try_tail_call = try_tail_recursion = 0;
2541 }
2542
2543 /* Generate a tail recursion sequence when calling ourselves. */
2544
2545 if (try_tail_recursion)
2546 {
2547 /* We want to emit any pending stack adjustments before the tail
2548 recursion "call". That way we know any adjustment after the tail
2549 recursion call can be ignored if we indeed use the tail recursion
2550 call expansion. */
2551 int save_pending_stack_adjust = pending_stack_adjust;
2552 int save_stack_pointer_delta = stack_pointer_delta;
2553
2554 /* Emit any queued insns now; otherwise they would end up in
2555 only one of the alternates. */
2556 emit_queue ();
2557
2558 /* Use a new sequence to hold any RTL we generate. We do not even
2559 know if we will use this RTL yet. The final decision can not be
2560 made until after RTL generation for the entire function is
2561 complete. */
2562 start_sequence ();
2563 /* If expanding any of the arguments creates cleanups, we can't
2564 do a tailcall. So, we'll need to pop the pending cleanups
2565 list. If, however, all goes well, and there are no cleanups
2566 then the call to expand_start_target_temps will have no
2567 effect. */
2568 expand_start_target_temps ();
2569 if (optimize_tail_recursion (actparms, get_last_insn ()))
2570 {
2571 if (any_pending_cleanups (1))
2572 try_tail_call = try_tail_recursion = 0;
2573 else
2574 tail_recursion_insns = get_insns ();
2575 }
2576 expand_end_target_temps ();
2577 end_sequence ();
2578
2579 /* Restore the original pending stack adjustment for the sibling and
2580 normal call cases below. */
2581 pending_stack_adjust = save_pending_stack_adjust;
2582 stack_pointer_delta = save_stack_pointer_delta;
2583 }
2584
2585 if (profile_arc_flag && (flags & ECF_FORK_OR_EXEC))
2586 {
2587 /* A fork duplicates the profile information, and an exec discards
2588 it. We can't rely on fork/exec to be paired. So write out the
2589 profile information we have gathered so far, and clear it. */
2590 /* ??? When Linux's __clone is called with CLONE_VM set, profiling
2591 is subject to race conditions, just as with multithreaded
2592 programs. */
2593
2594 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__bb_fork_func"),
2595 LCT_ALWAYS_RETURN,
2596 VOIDmode, 0);
2597 }
2598
2599 /* Ensure current function's preferred stack boundary is at least
2600 what we need. We don't have to increase alignment for recursive
2601 functions. */
2602 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2603 && fndecl != current_function_decl)
2604 cfun->preferred_stack_boundary = preferred_stack_boundary;
2605
2606 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2607
2608 function_call_count++;
2609
2610 /* We want to make two insn chains; one for a sibling call, the other
2611 for a normal call. We will select one of the two chains after
2612 initial RTL generation is complete. */
2613 for (pass = 0; pass < 2; pass++)
2614 {
2615 int sibcall_failure = 0;
2616 /* We want to emit any pending stack adjustments before the tail
2617 recursion "call". That way we know any adjustment after the tail
2618 recursion call can be ignored if we indeed use the tail recursion
2619 call expansion. */
2620 int save_pending_stack_adjust = 0;
2621 int save_stack_pointer_delta = 0;
2622 rtx insns;
2623 rtx before_call, next_arg_reg;
2624
2625 if (pass == 0)
2626 {
2627 if (! try_tail_call)
2628 continue;
2629
2630 /* Emit any queued insns now; otherwise they would end up in
2631 only one of the alternates. */
2632 emit_queue ();
2633
2634 /* State variables we need to save and restore between
2635 iterations. */
2636 save_pending_stack_adjust = pending_stack_adjust;
2637 save_stack_pointer_delta = stack_pointer_delta;
2638 }
2639 if (pass)
2640 flags &= ~ECF_SIBCALL;
2641 else
2642 flags |= ECF_SIBCALL;
2643
2644 /* Other state variables that we must reinitialize each time
2645 through the loop (that are not initialized by the loop itself). */
2646 argblock = 0;
2647 call_fusage = 0;
2648
2649 /* Start a new sequence for the normal call case.
2650
2651 From this point on, if the sibling call fails, we want to set
2652 sibcall_failure instead of continuing the loop. */
2653 start_sequence ();
2654
2655 if (pass == 0)
2656 {
2657 /* We know at this point that there are not currently any
2658 pending cleanups. If, however, in the process of evaluating
2659 the arguments we were to create some, we'll need to be
2660 able to get rid of them. */
2661 expand_start_target_temps ();
2662 }
2663
2664 /* Don't let pending stack adjusts add up to too much.
2665 Also, do all pending adjustments now if there is any chance
2666 this might be a call to alloca or if we are expanding a sibling
2667 call sequence or if we are calling a function that is to return
2668 with stack pointer depressed. */
2669 if (pending_stack_adjust >= 32
2670 || (pending_stack_adjust > 0
2671 && (flags & (ECF_MAY_BE_ALLOCA | ECF_SP_DEPRESSED)))
2672 || pass == 0)
2673 do_pending_stack_adjust ();
2674
2675 /* When calling a const function, we must pop the stack args right away,
2676 so that the pop is deleted or moved with the call. */
2677 if (pass && (flags & ECF_LIBCALL_BLOCK))
2678 NO_DEFER_POP;
2679
2680 #ifdef FINAL_REG_PARM_STACK_SPACE
2681 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2682 args_size.var);
2683 #endif
2684 /* Precompute any arguments as needed. */
2685 if (pass)
2686 precompute_arguments (flags, num_actuals, args);
2687
2688 /* Now we are about to start emitting insns that can be deleted
2689 if a libcall is deleted. */
2690 if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
2691 start_sequence ();
2692
2693 adjusted_args_size = args_size;
2694 /* Compute the actual size of the argument block required. The variable
2695 and constant sizes must be combined, the size may have to be rounded,
2696 and there may be a minimum required size. When generating a sibcall
2697 pattern, do not round up, since we'll be re-using whatever space our
2698 caller provided. */
2699 unadjusted_args_size
2700 = compute_argument_block_size (reg_parm_stack_space,
2701 &adjusted_args_size,
2702 (pass == 0 ? 0
2703 : preferred_stack_boundary));
2704
2705 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2706
2707 /* The argument block when performing a sibling call is the
2708 incoming argument block. */
2709 if (pass == 0)
2710 {
2711 argblock = virtual_incoming_args_rtx;
2712 stored_args_map = sbitmap_alloc (args_size.constant);
2713 sbitmap_zero (stored_args_map);
2714 }
2715
2716 /* If we have no actual push instructions, or shouldn't use them,
2717 make space for all args right now. */
2718 else if (adjusted_args_size.var != 0)
2719 {
2720 if (old_stack_level == 0)
2721 {
2722 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2723 old_pending_adj = pending_stack_adjust;
2724 pending_stack_adjust = 0;
2725 /* stack_arg_under_construction says whether a stack arg is
2726 being constructed at the old stack level. Pushing the stack
2727 gets a clean outgoing argument block. */
2728 old_stack_arg_under_construction = stack_arg_under_construction;
2729 stack_arg_under_construction = 0;
2730 }
2731 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2732 }
2733 else
2734 {
2735 /* Note that we must go through the motions of allocating an argument
2736 block even if the size is zero because we may be storing args
2737 in the area reserved for register arguments, which may be part of
2738 the stack frame. */
2739
2740 int needed = adjusted_args_size.constant;
2741
2742 /* Store the maximum argument space used. It will be pushed by
2743 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2744 checking). */
2745
2746 if (needed > current_function_outgoing_args_size)
2747 current_function_outgoing_args_size = needed;
2748
2749 if (must_preallocate)
2750 {
2751 if (ACCUMULATE_OUTGOING_ARGS)
2752 {
2753 /* Since the stack pointer will never be pushed, it is
2754 possible for the evaluation of a parm to clobber
2755 something we have already written to the stack.
2756 Since most function calls on RISC machines do not use
2757 the stack, this is uncommon, but must work correctly.
2758
2759 Therefore, we save any area of the stack that was already
2760 written and that we are using. Here we set up to do this
2761 by making a new stack usage map from the old one. The
2762 actual save will be done by store_one_arg.
2763
2764 Another approach might be to try to reorder the argument
2765 evaluations to avoid this conflicting stack usage. */
2766
2767 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2768 /* Since we will be writing into the entire argument area,
2769 the map must be allocated for its entire size, not just
2770 the part that is the responsibility of the caller. */
2771 needed += reg_parm_stack_space;
2772 #endif
2773
2774 #ifdef ARGS_GROW_DOWNWARD
2775 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2776 needed + 1);
2777 #else
2778 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2779 needed);
2780 #endif
2781 stack_usage_map
2782 = (char *) alloca (highest_outgoing_arg_in_use);
2783
2784 if (initial_highest_arg_in_use)
2785 memcpy (stack_usage_map, initial_stack_usage_map,
2786 initial_highest_arg_in_use);
2787
2788 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2789 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2790 (highest_outgoing_arg_in_use
2791 - initial_highest_arg_in_use));
2792 needed = 0;
2793
2794 /* The address of the outgoing argument list must not be
2795 copied to a register here, because argblock would be left
2796 pointing to the wrong place after the call to
2797 allocate_dynamic_stack_space below. */
2798
2799 argblock = virtual_outgoing_args_rtx;
2800 }
2801 else
2802 {
2803 if (inhibit_defer_pop == 0)
2804 {
2805 /* Try to reuse some or all of the pending_stack_adjust
2806 to get this space. */
2807 needed
2808 = (combine_pending_stack_adjustment_and_call
2809 (unadjusted_args_size,
2810 &adjusted_args_size,
2811 preferred_unit_stack_boundary));
2812
2813 /* combine_pending_stack_adjustment_and_call computes
2814 an adjustment before the arguments are allocated.
2815 Account for them and see whether or not the stack
2816 needs to go up or down. */
2817 needed = unadjusted_args_size - needed;
2818
2819 if (needed < 0)
2820 {
2821 /* We're releasing stack space. */
2822 /* ??? We can avoid any adjustment at all if we're
2823 already aligned. FIXME. */
2824 pending_stack_adjust = -needed;
2825 do_pending_stack_adjust ();
2826 needed = 0;
2827 }
2828 else
2829 /* We need to allocate space. We'll do that in
2830 push_block below. */
2831 pending_stack_adjust = 0;
2832 }
2833
2834 /* Special case this because overhead of `push_block' in
2835 this case is non-trivial. */
2836 if (needed == 0)
2837 argblock = virtual_outgoing_args_rtx;
2838 else
2839 argblock = push_block (GEN_INT (needed), 0, 0);
2840
2841 /* We only really need to call `copy_to_reg' in the case
2842 where push insns are going to be used to pass ARGBLOCK
2843 to a function call in ARGS. In that case, the stack
2844 pointer changes value from the allocation point to the
2845 call point, and hence the value of
2846 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2847 as well always do it. */
2848 argblock = copy_to_reg (argblock);
2849
2850 /* The save/restore code in store_one_arg handles all
2851 cases except one: a constructor call (including a C
2852 function returning a BLKmode struct) to initialize
2853 an argument. */
2854 if (stack_arg_under_construction)
2855 {
2856 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2857 rtx push_size = GEN_INT (reg_parm_stack_space
2858 + adjusted_args_size.constant);
2859 #else
2860 rtx push_size = GEN_INT (adjusted_args_size.constant);
2861 #endif
2862 if (old_stack_level == 0)
2863 {
2864 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2865 NULL_RTX);
2866 old_pending_adj = pending_stack_adjust;
2867 pending_stack_adjust = 0;
2868 /* stack_arg_under_construction says whether a stack
2869 arg is being constructed at the old stack level.
2870 Pushing the stack gets a clean outgoing argument
2871 block. */
2872 old_stack_arg_under_construction
2873 = stack_arg_under_construction;
2874 stack_arg_under_construction = 0;
2875 /* Make a new map for the new argument list. */
2876 stack_usage_map = (char *)
2877 alloca (highest_outgoing_arg_in_use);
2878 memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
2879 highest_outgoing_arg_in_use = 0;
2880 }
2881 allocate_dynamic_stack_space (push_size, NULL_RTX,
2882 BITS_PER_UNIT);
2883 }
2884 /* If argument evaluation might modify the stack pointer,
2885 copy the address of the argument list to a register. */
2886 for (i = 0; i < num_actuals; i++)
2887 if (args[i].pass_on_stack)
2888 {
2889 argblock = copy_addr_to_reg (argblock);
2890 break;
2891 }
2892 }
2893 }
2894 }
2895
2896 compute_argument_addresses (args, argblock, num_actuals);
2897
2898 /* If we push args individually in reverse order, perform stack alignment
2899 before the first push (the last arg). */
2900 if (PUSH_ARGS_REVERSED && argblock == 0
2901 && adjusted_args_size.constant != unadjusted_args_size)
2902 {
2903 /* When the stack adjustment is pending, we get better code
2904 by combining the adjustments. */
2905 if (pending_stack_adjust
2906 && ! (flags & ECF_LIBCALL_BLOCK)
2907 && ! inhibit_defer_pop)
2908 {
2909 pending_stack_adjust
2910 = (combine_pending_stack_adjustment_and_call
2911 (unadjusted_args_size,
2912 &adjusted_args_size,
2913 preferred_unit_stack_boundary));
2914 do_pending_stack_adjust ();
2915 }
2916 else if (argblock == 0)
2917 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2918 - unadjusted_args_size));
2919 }
2920 /* Now that the stack is properly aligned, pops can't safely
2921 be deferred during the evaluation of the arguments. */
2922 NO_DEFER_POP;
2923
2924 funexp = rtx_for_function_call (fndecl, exp);
2925
2926 /* Figure out the register where the value, if any, will come back. */
2927 valreg = 0;
2928 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2929 && ! structure_value_addr)
2930 {
2931 if (pcc_struct_value)
2932 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2933 fndecl, (pass == 0));
2934 else
2935 valreg = hard_function_value (TREE_TYPE (exp), fndecl, (pass == 0));
2936 }
2937
2938 /* Precompute all register parameters. It isn't safe to compute anything
2939 once we have started filling any specific hard regs. */
2940 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
2941
2942 #ifdef REG_PARM_STACK_SPACE
2943 /* Save the fixed argument area if it's part of the caller's frame and
2944 is clobbered by argument setup for this call. */
2945 if (ACCUMULATE_OUTGOING_ARGS && pass)
2946 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2947 &low_to_save, &high_to_save);
2948 #endif
2949
2950 /* Now store (and compute if necessary) all non-register parms.
2951 These come before register parms, since they can require block-moves,
2952 which could clobber the registers used for register parms.
2953 Parms which have partial registers are not stored here,
2954 but we do preallocate space here if they want that. */
2955
2956 for (i = 0; i < num_actuals; i++)
2957 if (args[i].reg == 0 || args[i].pass_on_stack)
2958 {
2959 rtx before_arg = get_last_insn ();
2960
2961 if (store_one_arg (&args[i], argblock, flags,
2962 adjusted_args_size.var != 0,
2963 reg_parm_stack_space)
2964 || (pass == 0
2965 && check_sibcall_argument_overlap (before_arg,
2966 &args[i])))
2967 sibcall_failure = 1;
2968 }
2969
2970 /* If we have a parm that is passed in registers but not in memory
2971 and whose alignment does not permit a direct copy into registers,
2972 make a group of pseudos that correspond to each register that we
2973 will later fill. */
2974 if (STRICT_ALIGNMENT)
2975 store_unaligned_arguments_into_pseudos (args, num_actuals);
2976
2977 /* Now store any partially-in-registers parm.
2978 This is the last place a block-move can happen. */
2979 if (reg_parm_seen)
2980 for (i = 0; i < num_actuals; i++)
2981 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2982 {
2983 rtx before_arg = get_last_insn ();
2984
2985 if (store_one_arg (&args[i], argblock, flags,
2986 adjusted_args_size.var != 0,
2987 reg_parm_stack_space)
2988 || (pass == 0
2989 && check_sibcall_argument_overlap (before_arg,
2990 &args[i])))
2991 sibcall_failure = 1;
2992 }
2993
2994 /* If we pushed args in forward order, perform stack alignment
2995 after pushing the last arg. */
2996 if (!PUSH_ARGS_REVERSED && argblock == 0)
2997 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2998 - unadjusted_args_size));
2999
3000 /* If register arguments require space on the stack and stack space
3001 was not preallocated, allocate stack space here for arguments
3002 passed in registers. */
3003 #ifdef OUTGOING_REG_PARM_STACK_SPACE
3004 if (!ACCUMULATE_OUTGOING_ARGS
3005 && must_preallocate == 0 && reg_parm_stack_space > 0)
3006 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
3007 #endif
3008
3009 /* Pass the function the address in which to return a
3010 structure value. */
3011 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
3012 {
3013 emit_move_insn (struct_value_rtx,
3014 force_reg (Pmode,
3015 force_operand (structure_value_addr,
3016 NULL_RTX)));
3017
3018 if (GET_CODE (struct_value_rtx) == REG)
3019 use_reg (&call_fusage, struct_value_rtx);
3020 }
3021
3022 funexp = prepare_call_address (funexp, fndecl, &call_fusage,
3023 reg_parm_seen, pass == 0);
3024
3025 load_register_parameters (args, num_actuals, &call_fusage, flags);
3026
3027 /* Perform postincrements before actually calling the function. */
3028 emit_queue ();
3029
3030 /* Save a pointer to the last insn before the call, so that we can
3031 later safely search backwards to find the CALL_INSN. */
3032 before_call = get_last_insn ();
3033
3034 /* Set up next argument register. For sibling calls on machines
3035 with register windows this should be the incoming register. */
3036 #ifdef FUNCTION_INCOMING_ARG
3037 if (pass == 0)
3038 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
3039 void_type_node, 1);
3040 else
3041 #endif
3042 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
3043 void_type_node, 1);
3044
3045 /* All arguments and registers used for the call must be set up by
3046 now! */
3047
3048 /* Stack must be properly aligned now. */
3049 if (pass && stack_pointer_delta % preferred_unit_stack_boundary)
3050 abort ();
3051
3052 /* Generate the actual call instruction. */
3053 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
3054 adjusted_args_size.constant, struct_value_size,
3055 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
3056 flags, & args_so_far);
3057
3058 /* Verify that we've deallocated all the stack we used. */
3059 if (pass
3060 && old_stack_allocated != stack_pointer_delta - pending_stack_adjust)
3061 abort ();
3062
3063 /* If call is cse'able, make appropriate pair of reg-notes around it.
3064 Test valreg so we don't crash; may safely ignore `const'
3065 if return type is void. Disable for PARALLEL return values, because
3066 we have no way to move such values into a pseudo register. */
3067 if (pass && (flags & ECF_LIBCALL_BLOCK))
3068 {
3069 rtx insns;
3070
3071 if (valreg == 0 || GET_CODE (valreg) == PARALLEL)
3072 {
3073 insns = get_insns ();
3074 end_sequence ();
3075 emit_insns (insns);
3076 }
3077 else
3078 {
3079 rtx note = 0;
3080 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3081
3082 /* Mark the return value as a pointer if needed. */
3083 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
3084 mark_reg_pointer (temp,
3085 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
3086
3087 /* Construct an "equal form" for the value which mentions all the
3088 arguments in order as well as the function name. */
3089 for (i = 0; i < num_actuals; i++)
3090 note = gen_rtx_EXPR_LIST (VOIDmode,
3091 args[i].initial_value, note);
3092 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
3093
3094 insns = get_insns ();
3095 end_sequence ();
3096
3097 if (flags & ECF_PURE)
3098 note = gen_rtx_EXPR_LIST (VOIDmode,
3099 gen_rtx_USE (VOIDmode,
3100 gen_rtx_MEM (BLKmode,
3101 gen_rtx_SCRATCH (VOIDmode))),
3102 note);
3103
3104 emit_libcall_block (insns, temp, valreg, note);
3105
3106 valreg = temp;
3107 }
3108 }
3109 else if (pass && (flags & ECF_MALLOC))
3110 {
3111 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3112 rtx last, insns;
3113
3114 /* The return value from a malloc-like function is a pointer. */
3115 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
3116 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
3117
3118 emit_move_insn (temp, valreg);
3119
3120 /* The return value from a malloc-like function can not alias
3121 anything else. */
3122 last = get_last_insn ();
3123 REG_NOTES (last) =
3124 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
3125
3126 /* Write out the sequence. */
3127 insns = get_insns ();
3128 end_sequence ();
3129 emit_insns (insns);
3130 valreg = temp;
3131 }
3132
3133 /* For calls to `setjmp', etc., inform flow.c it should complain
3134 if nonvolatile values are live. For functions that cannot return,
3135 inform flow that control does not fall through. */
3136
3137 if ((flags & (ECF_NORETURN | ECF_LONGJMP)) || pass == 0)
3138 {
3139 /* The barrier must be emitted
3140 immediately after the CALL_INSN. Some ports emit more
3141 than just a CALL_INSN above, so we must search for it here. */
3142
3143 rtx last = get_last_insn ();
3144 while (GET_CODE (last) != CALL_INSN)
3145 {
3146 last = PREV_INSN (last);
3147 /* There was no CALL_INSN? */
3148 if (last == before_call)
3149 abort ();
3150 }
3151
3152 emit_barrier_after (last);
3153 }
3154
3155 if (flags & ECF_LONGJMP)
3156 current_function_calls_longjmp = 1;
3157
3158 /* If this function is returning into a memory location marked as
3159 readonly, it means it is initializing that location. But we normally
3160 treat functions as not clobbering such locations, so we need to
3161 specify that this one does. */
3162 if (target != 0 && GET_CODE (target) == MEM
3163 && structure_value_addr != 0 && RTX_UNCHANGING_P (target))
3164 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3165
3166 /* If value type not void, return an rtx for the value. */
3167
3168 /* If there are cleanups to be called, don't use a hard reg as target.
3169 We need to double check this and see if it matters anymore. */
3170 if (any_pending_cleanups (1))
3171 {
3172 if (target && REG_P (target)
3173 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3174 target = 0;
3175 sibcall_failure = 1;
3176 }
3177
3178 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
3179 || ignore)
3180 target = const0_rtx;
3181 else if (structure_value_addr)
3182 {
3183 if (target == 0 || GET_CODE (target) != MEM)
3184 {
3185 target
3186 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3187 memory_address (TYPE_MODE (TREE_TYPE (exp)),
3188 structure_value_addr));
3189 set_mem_attributes (target, exp, 1);
3190 }
3191 }
3192 else if (pcc_struct_value)
3193 {
3194 /* This is the special C++ case where we need to
3195 know what the true target was. We take care to
3196 never use this value more than once in one expression. */
3197 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3198 copy_to_reg (valreg));
3199 set_mem_attributes (target, exp, 1);
3200 }
3201 /* Handle calls that return values in multiple non-contiguous locations.
3202 The Irix 6 ABI has examples of this. */
3203 else if (GET_CODE (valreg) == PARALLEL)
3204 {
3205 if (target == 0)
3206 {
3207 /* This will only be assigned once, so it can be readonly. */
3208 tree nt = build_qualified_type (TREE_TYPE (exp),
3209 (TYPE_QUALS (TREE_TYPE (exp))
3210 | TYPE_QUAL_CONST));
3211
3212 target = assign_temp (nt, 0, 1, 1);
3213 preserve_temp_slots (target);
3214 }
3215
3216 if (! rtx_equal_p (target, valreg))
3217 emit_group_store (target, valreg,
3218 int_size_in_bytes (TREE_TYPE (exp)));
3219
3220 /* We can not support sibling calls for this case. */
3221 sibcall_failure = 1;
3222 }
3223 else if (target
3224 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
3225 && GET_MODE (target) == GET_MODE (valreg))
3226 {
3227 /* TARGET and VALREG cannot be equal at this point because the
3228 latter would not have REG_FUNCTION_VALUE_P true, while the
3229 former would if it were referring to the same register.
3230
3231 If they refer to the same register, this move will be a no-op,
3232 except when function inlining is being done. */
3233 emit_move_insn (target, valreg);
3234 }
3235 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
3236 {
3237 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
3238
3239 /* We can not support sibling calls for this case. */
3240 sibcall_failure = 1;
3241 }
3242 else
3243 target = copy_to_reg (valreg);
3244
3245 #ifdef PROMOTE_FUNCTION_RETURN
3246 /* If we promoted this return value, make the proper SUBREG. TARGET
3247 might be const0_rtx here, so be careful. */
3248 if (GET_CODE (target) == REG
3249 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3250 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3251 {
3252 tree type = TREE_TYPE (exp);
3253 int unsignedp = TREE_UNSIGNED (type);
3254 int offset = 0;
3255
3256 /* If we don't promote as expected, something is wrong. */
3257 if (GET_MODE (target)
3258 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
3259 abort ();
3260
3261 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3262 && GET_MODE_SIZE (GET_MODE (target))
3263 > GET_MODE_SIZE (TYPE_MODE (type)))
3264 {
3265 offset = GET_MODE_SIZE (GET_MODE (target))
3266 - GET_MODE_SIZE (TYPE_MODE (type));
3267 if (! BYTES_BIG_ENDIAN)
3268 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3269 else if (! WORDS_BIG_ENDIAN)
3270 offset %= UNITS_PER_WORD;
3271 }
3272 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3273 SUBREG_PROMOTED_VAR_P (target) = 1;
3274 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3275 }
3276 #endif
3277
3278 /* If size of args is variable or this was a constructor call for a stack
3279 argument, restore saved stack-pointer value. */
3280
3281 if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
3282 {
3283 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3284 pending_stack_adjust = old_pending_adj;
3285 stack_arg_under_construction = old_stack_arg_under_construction;
3286 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3287 stack_usage_map = initial_stack_usage_map;
3288 sibcall_failure = 1;
3289 }
3290 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3291 {
3292 #ifdef REG_PARM_STACK_SPACE
3293 if (save_area)
3294 {
3295 restore_fixed_argument_area (save_area, argblock,
3296 high_to_save, low_to_save);
3297 }
3298 #endif
3299
3300 /* If we saved any argument areas, restore them. */
3301 for (i = 0; i < num_actuals; i++)
3302 if (args[i].save_area)
3303 {
3304 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3305 rtx stack_area
3306 = gen_rtx_MEM (save_mode,
3307 memory_address (save_mode,
3308 XEXP (args[i].stack_slot, 0)));
3309
3310 if (save_mode != BLKmode)
3311 emit_move_insn (stack_area, args[i].save_area);
3312 else
3313 emit_block_move (stack_area,
3314 validize_mem (args[i].save_area),
3315 GEN_INT (args[i].size.constant));
3316 }
3317
3318 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3319 stack_usage_map = initial_stack_usage_map;
3320 }
3321
3322 /* If this was alloca, record the new stack level for nonlocal gotos.
3323 Check for the handler slots since we might not have a save area
3324 for non-local gotos. */
3325
3326 if ((flags & ECF_MAY_BE_ALLOCA) && nonlocal_goto_handler_slots != 0)
3327 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
3328
3329 /* Free up storage we no longer need. */
3330 for (i = 0; i < num_actuals; ++i)
3331 if (args[i].aligned_regs)
3332 free (args[i].aligned_regs);
3333
3334 if (pass == 0)
3335 {
3336 /* Undo the fake expand_start_target_temps we did earlier. If
3337 there had been any cleanups created, we've already set
3338 sibcall_failure. */
3339 expand_end_target_temps ();
3340 }
3341
3342 insns = get_insns ();
3343 end_sequence ();
3344
3345 if (pass == 0)
3346 {
3347 tail_call_insns = insns;
3348
3349 /* Restore the pending stack adjustment now that we have
3350 finished generating the sibling call sequence. */
3351
3352 pending_stack_adjust = save_pending_stack_adjust;
3353 stack_pointer_delta = save_stack_pointer_delta;
3354
3355 /* Prepare arg structure for next iteration. */
3356 for (i = 0; i < num_actuals; i++)
3357 {
3358 args[i].value = 0;
3359 args[i].aligned_regs = 0;
3360 args[i].stack = 0;
3361 }
3362
3363 sbitmap_free (stored_args_map);
3364 }
3365 else
3366 normal_call_insns = insns;
3367
3368 /* If something prevents making this a sibling call,
3369 zero out the sequence. */
3370 if (sibcall_failure)
3371 tail_call_insns = NULL_RTX;
3372 }
3373
3374 /* The function optimize_sibling_and_tail_recursive_calls doesn't
3375 handle CALL_PLACEHOLDERs inside other CALL_PLACEHOLDERs. This
3376 can happen if the arguments to this function call an inline
3377 function who's expansion contains another CALL_PLACEHOLDER.
3378
3379 If there are any C_Ps in any of these sequences, replace them
3380 with their normal call. */
3381
3382 for (insn = normal_call_insns; insn; insn = NEXT_INSN (insn))
3383 if (GET_CODE (insn) == CALL_INSN
3384 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3385 replace_call_placeholder (insn, sibcall_use_normal);
3386
3387 for (insn = tail_call_insns; insn; insn = NEXT_INSN (insn))
3388 if (GET_CODE (insn) == CALL_INSN
3389 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3390 replace_call_placeholder (insn, sibcall_use_normal);
3391
3392 for (insn = tail_recursion_insns; insn; insn = NEXT_INSN (insn))
3393 if (GET_CODE (insn) == CALL_INSN
3394 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3395 replace_call_placeholder (insn, sibcall_use_normal);
3396
3397 /* If this was a potential tail recursion site, then emit a
3398 CALL_PLACEHOLDER with the normal and the tail recursion streams.
3399 One of them will be selected later. */
3400 if (tail_recursion_insns || tail_call_insns)
3401 {
3402 /* The tail recursion label must be kept around. We could expose
3403 its use in the CALL_PLACEHOLDER, but that creates unwanted edges
3404 and makes determining true tail recursion sites difficult.
3405
3406 So we set LABEL_PRESERVE_P here, then clear it when we select
3407 one of the call sequences after rtl generation is complete. */
3408 if (tail_recursion_insns)
3409 LABEL_PRESERVE_P (tail_recursion_label) = 1;
3410 emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode, normal_call_insns,
3411 tail_call_insns,
3412 tail_recursion_insns,
3413 tail_recursion_label));
3414 }
3415 else
3416 emit_insns (normal_call_insns);
3417
3418 currently_expanding_call--;
3419
3420 /* If this function returns with the stack pointer depressed, ensure
3421 this block saves and restores the stack pointer, show it was
3422 changed, and adjust for any outgoing arg space. */
3423 if (flags & ECF_SP_DEPRESSED)
3424 {
3425 clear_pending_stack_adjust ();
3426 emit_insn (gen_rtx (CLOBBER, VOIDmode, stack_pointer_rtx));
3427 emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
3428 save_stack_pointer ();
3429 }
3430
3431 return target;
3432 }
3433 \f
3434 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3435 The RETVAL parameter specifies whether return value needs to be saved, other
3436 parameters are documented in the emit_library_call function below. */
3437
3438 static rtx
3439 emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p)
3440 int retval;
3441 rtx orgfun;
3442 rtx value;
3443 enum libcall_type fn_type;
3444 enum machine_mode outmode;
3445 int nargs;
3446 va_list p;
3447 {
3448 /* Total size in bytes of all the stack-parms scanned so far. */
3449 struct args_size args_size;
3450 /* Size of arguments before any adjustments (such as rounding). */
3451 struct args_size original_args_size;
3452 int argnum;
3453 rtx fun;
3454 int inc;
3455 int count;
3456 struct args_size alignment_pad;
3457 rtx argblock = 0;
3458 CUMULATIVE_ARGS args_so_far;
3459 struct arg
3460 {
3461 rtx value;
3462 enum machine_mode mode;
3463 rtx reg;
3464 int partial;
3465 struct args_size offset;
3466 struct args_size size;
3467 rtx save_area;
3468 };
3469 struct arg *argvec;
3470 int old_inhibit_defer_pop = inhibit_defer_pop;
3471 rtx call_fusage = 0;
3472 rtx mem_value = 0;
3473 rtx valreg;
3474 int pcc_struct_value = 0;
3475 int struct_value_size = 0;
3476 int flags;
3477 int reg_parm_stack_space = 0;
3478 int needed;
3479 rtx before_call;
3480 tree tfom; /* type_for_mode (outmode, 0) */
3481
3482 #ifdef REG_PARM_STACK_SPACE
3483 /* Define the boundary of the register parm stack space that needs to be
3484 save, if any. */
3485 int low_to_save = -1, high_to_save = 0;
3486 rtx save_area = 0; /* Place that it is saved. */
3487 #endif
3488
3489 /* Size of the stack reserved for parameter registers. */
3490 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3491 char *initial_stack_usage_map = stack_usage_map;
3492
3493 #ifdef REG_PARM_STACK_SPACE
3494 #ifdef MAYBE_REG_PARM_STACK_SPACE
3495 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3496 #else
3497 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3498 #endif
3499 #endif
3500
3501 /* By default, library functions can not throw. */
3502 flags = ECF_NOTHROW;
3503
3504 switch (fn_type)
3505 {
3506 case LCT_NORMAL:
3507 break;
3508 case LCT_CONST:
3509 flags |= ECF_CONST;
3510 break;
3511 case LCT_PURE:
3512 flags |= ECF_PURE;
3513 break;
3514 case LCT_CONST_MAKE_BLOCK:
3515 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
3516 break;
3517 case LCT_PURE_MAKE_BLOCK:
3518 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
3519 break;
3520 case LCT_NORETURN:
3521 flags |= ECF_NORETURN;
3522 break;
3523 case LCT_THROW:
3524 flags = ECF_NORETURN;
3525 break;
3526 case LCT_ALWAYS_RETURN:
3527 flags = ECF_ALWAYS_RETURN;
3528 break;
3529 case LCT_RETURNS_TWICE:
3530 flags = ECF_RETURNS_TWICE;
3531 break;
3532 }
3533 fun = orgfun;
3534
3535 /* Ensure current function's preferred stack boundary is at least
3536 what we need. */
3537 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3538 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3539
3540 /* If this kind of value comes back in memory,
3541 decide where in memory it should come back. */
3542 if (outmode != VOIDmode)
3543 {
3544 tfom = (*lang_hooks.types.type_for_mode) (outmode, 0);
3545 if (aggregate_value_p (tfom))
3546 {
3547 #ifdef PCC_STATIC_STRUCT_RETURN
3548 rtx pointer_reg
3549 = hard_function_value (build_pointer_type (tfom), 0, 0);
3550 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3551 pcc_struct_value = 1;
3552 if (value == 0)
3553 value = gen_reg_rtx (outmode);
3554 #else /* not PCC_STATIC_STRUCT_RETURN */
3555 struct_value_size = GET_MODE_SIZE (outmode);
3556 if (value != 0 && GET_CODE (value) == MEM)
3557 mem_value = value;
3558 else
3559 mem_value = assign_temp (tfom, 0, 1, 1);
3560 #endif
3561 /* This call returns a big structure. */
3562 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3563 }
3564 }
3565 else
3566 tfom = void_type_node;
3567
3568 /* ??? Unfinished: must pass the memory address as an argument. */
3569
3570 /* Copy all the libcall-arguments out of the varargs data
3571 and into a vector ARGVEC.
3572
3573 Compute how to pass each argument. We only support a very small subset
3574 of the full argument passing conventions to limit complexity here since
3575 library functions shouldn't have many args. */
3576
3577 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
3578 memset ((char *) argvec, 0, (nargs + 1) * sizeof (struct arg));
3579
3580 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3581 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3582 #else
3583 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3584 #endif
3585
3586 args_size.constant = 0;
3587 args_size.var = 0;
3588
3589 count = 0;
3590
3591 /* Now we are about to start emitting insns that can be deleted
3592 if a libcall is deleted. */
3593 if (flags & ECF_LIBCALL_BLOCK)
3594 start_sequence ();
3595
3596 push_temp_slots ();
3597
3598 /* If there's a structure value address to be passed,
3599 either pass it in the special place, or pass it as an extra argument. */
3600 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
3601 {
3602 rtx addr = XEXP (mem_value, 0);
3603 nargs++;
3604
3605 /* Make sure it is a reasonable operand for a move or push insn. */
3606 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3607 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3608 addr = force_operand (addr, NULL_RTX);
3609
3610 argvec[count].value = addr;
3611 argvec[count].mode = Pmode;
3612 argvec[count].partial = 0;
3613
3614 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3615 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3616 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3617 abort ();
3618 #endif
3619
3620 locate_and_pad_parm (Pmode, NULL_TREE,
3621 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3622 1,
3623 #else
3624 argvec[count].reg != 0,
3625 #endif
3626 NULL_TREE, &args_size, &argvec[count].offset,
3627 &argvec[count].size, &alignment_pad);
3628
3629 if (argvec[count].reg == 0 || argvec[count].partial != 0
3630 || reg_parm_stack_space > 0)
3631 args_size.constant += argvec[count].size.constant;
3632
3633 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3634
3635 count++;
3636 }
3637
3638 for (; count < nargs; count++)
3639 {
3640 rtx val = va_arg (p, rtx);
3641 enum machine_mode mode = va_arg (p, enum machine_mode);
3642
3643 /* We cannot convert the arg value to the mode the library wants here;
3644 must do it earlier where we know the signedness of the arg. */
3645 if (mode == BLKmode
3646 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3647 abort ();
3648
3649 /* On some machines, there's no way to pass a float to a library fcn.
3650 Pass it as a double instead. */
3651 #ifdef LIBGCC_NEEDS_DOUBLE
3652 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
3653 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
3654 #endif
3655
3656 /* There's no need to call protect_from_queue, because
3657 either emit_move_insn or emit_push_insn will do that. */
3658
3659 /* Make sure it is a reasonable operand for a move or push insn. */
3660 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3661 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3662 val = force_operand (val, NULL_RTX);
3663
3664 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3665 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3666 {
3667 rtx slot;
3668 int must_copy = 1
3669 #ifdef FUNCTION_ARG_CALLEE_COPIES
3670 && ! FUNCTION_ARG_CALLEE_COPIES (args_so_far, mode,
3671 NULL_TREE, 1)
3672 #endif
3673 ;
3674
3675 if (GET_MODE (val) == MEM && ! must_copy)
3676 slot = val;
3677 else if (must_copy)
3678 {
3679 slot = assign_temp ((*lang_hooks.types.type_for_mode) (mode, 0),
3680 0, 1, 1);
3681 emit_move_insn (slot, val);
3682 }
3683 else
3684 {
3685 tree type = (*lang_hooks.types.type_for_mode) (mode, 0);
3686
3687 slot = gen_rtx_MEM (mode,
3688 expand_expr (build1 (ADDR_EXPR,
3689 build_pointer_type
3690 (type),
3691 make_tree (type, val)),
3692 NULL_RTX, VOIDmode, 0));
3693 }
3694
3695 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3696 gen_rtx_USE (VOIDmode, slot),
3697 call_fusage);
3698 if (must_copy)
3699 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3700 gen_rtx_CLOBBER (VOIDmode,
3701 slot),
3702 call_fusage);
3703
3704 mode = Pmode;
3705 val = force_operand (XEXP (slot, 0), NULL_RTX);
3706 }
3707 #endif
3708
3709 argvec[count].value = val;
3710 argvec[count].mode = mode;
3711
3712 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3713
3714 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3715 argvec[count].partial
3716 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3717 #else
3718 argvec[count].partial = 0;
3719 #endif
3720
3721 locate_and_pad_parm (mode, NULL_TREE,
3722 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3723 1,
3724 #else
3725 argvec[count].reg != 0,
3726 #endif
3727 NULL_TREE, &args_size, &argvec[count].offset,
3728 &argvec[count].size, &alignment_pad);
3729
3730 if (argvec[count].size.var)
3731 abort ();
3732
3733 if (reg_parm_stack_space == 0 && argvec[count].partial)
3734 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
3735
3736 if (argvec[count].reg == 0 || argvec[count].partial != 0
3737 || reg_parm_stack_space > 0)
3738 args_size.constant += argvec[count].size.constant;
3739
3740 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3741 }
3742
3743 #ifdef FINAL_REG_PARM_STACK_SPACE
3744 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3745 args_size.var);
3746 #endif
3747 /* If this machine requires an external definition for library
3748 functions, write one out. */
3749 assemble_external_libcall (fun);
3750
3751 original_args_size = args_size;
3752 args_size.constant = (((args_size.constant
3753 + stack_pointer_delta
3754 + STACK_BYTES - 1)
3755 / STACK_BYTES
3756 * STACK_BYTES)
3757 - stack_pointer_delta);
3758
3759 args_size.constant = MAX (args_size.constant,
3760 reg_parm_stack_space);
3761
3762 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3763 args_size.constant -= reg_parm_stack_space;
3764 #endif
3765
3766 if (args_size.constant > current_function_outgoing_args_size)
3767 current_function_outgoing_args_size = args_size.constant;
3768
3769 if (ACCUMULATE_OUTGOING_ARGS)
3770 {
3771 /* Since the stack pointer will never be pushed, it is possible for
3772 the evaluation of a parm to clobber something we have already
3773 written to the stack. Since most function calls on RISC machines
3774 do not use the stack, this is uncommon, but must work correctly.
3775
3776 Therefore, we save any area of the stack that was already written
3777 and that we are using. Here we set up to do this by making a new
3778 stack usage map from the old one.
3779
3780 Another approach might be to try to reorder the argument
3781 evaluations to avoid this conflicting stack usage. */
3782
3783 needed = args_size.constant;
3784
3785 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3786 /* Since we will be writing into the entire argument area, the
3787 map must be allocated for its entire size, not just the part that
3788 is the responsibility of the caller. */
3789 needed += reg_parm_stack_space;
3790 #endif
3791
3792 #ifdef ARGS_GROW_DOWNWARD
3793 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3794 needed + 1);
3795 #else
3796 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3797 needed);
3798 #endif
3799 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3800
3801 if (initial_highest_arg_in_use)
3802 memcpy (stack_usage_map, initial_stack_usage_map,
3803 initial_highest_arg_in_use);
3804
3805 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3806 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3807 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3808 needed = 0;
3809
3810 /* We must be careful to use virtual regs before they're instantiated,
3811 and real regs afterwards. Loop optimization, for example, can create
3812 new libcalls after we've instantiated the virtual regs, and if we
3813 use virtuals anyway, they won't match the rtl patterns. */
3814
3815 if (virtuals_instantiated)
3816 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3817 else
3818 argblock = virtual_outgoing_args_rtx;
3819 }
3820 else
3821 {
3822 if (!PUSH_ARGS)
3823 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3824 }
3825
3826 /* If we push args individually in reverse order, perform stack alignment
3827 before the first push (the last arg). */
3828 if (argblock == 0 && PUSH_ARGS_REVERSED)
3829 anti_adjust_stack (GEN_INT (args_size.constant
3830 - original_args_size.constant));
3831
3832 if (PUSH_ARGS_REVERSED)
3833 {
3834 inc = -1;
3835 argnum = nargs - 1;
3836 }
3837 else
3838 {
3839 inc = 1;
3840 argnum = 0;
3841 }
3842
3843 #ifdef REG_PARM_STACK_SPACE
3844 if (ACCUMULATE_OUTGOING_ARGS)
3845 {
3846 /* The argument list is the property of the called routine and it
3847 may clobber it. If the fixed area has been used for previous
3848 parameters, we must save and restore it.
3849
3850 Here we compute the boundary of the that needs to be saved, if any. */
3851
3852 #ifdef ARGS_GROW_DOWNWARD
3853 for (count = 0; count < reg_parm_stack_space + 1; count++)
3854 #else
3855 for (count = 0; count < reg_parm_stack_space; count++)
3856 #endif
3857 {
3858 if (count >= highest_outgoing_arg_in_use
3859 || stack_usage_map[count] == 0)
3860 continue;
3861
3862 if (low_to_save == -1)
3863 low_to_save = count;
3864
3865 high_to_save = count;
3866 }
3867
3868 if (low_to_save >= 0)
3869 {
3870 int num_to_save = high_to_save - low_to_save + 1;
3871 enum machine_mode save_mode
3872 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3873 rtx stack_area;
3874
3875 /* If we don't have the required alignment, must do this in BLKmode. */
3876 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3877 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3878 save_mode = BLKmode;
3879
3880 #ifdef ARGS_GROW_DOWNWARD
3881 stack_area = gen_rtx_MEM (save_mode,
3882 memory_address (save_mode,
3883 plus_constant (argblock,
3884 -high_to_save)));
3885 #else
3886 stack_area = gen_rtx_MEM (save_mode,
3887 memory_address (save_mode,
3888 plus_constant (argblock,
3889 low_to_save)));
3890 #endif
3891 if (save_mode == BLKmode)
3892 {
3893 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3894 set_mem_align (save_area, PARM_BOUNDARY);
3895 emit_block_move (validize_mem (save_area), stack_area,
3896 GEN_INT (num_to_save));
3897 }
3898 else
3899 {
3900 save_area = gen_reg_rtx (save_mode);
3901 emit_move_insn (save_area, stack_area);
3902 }
3903 }
3904 }
3905 #endif
3906
3907 /* Push the args that need to be pushed. */
3908
3909 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3910 are to be pushed. */
3911 for (count = 0; count < nargs; count++, argnum += inc)
3912 {
3913 enum machine_mode mode = argvec[argnum].mode;
3914 rtx val = argvec[argnum].value;
3915 rtx reg = argvec[argnum].reg;
3916 int partial = argvec[argnum].partial;
3917 int lower_bound = 0, upper_bound = 0, i;
3918
3919 if (! (reg != 0 && partial == 0))
3920 {
3921 if (ACCUMULATE_OUTGOING_ARGS)
3922 {
3923 /* If this is being stored into a pre-allocated, fixed-size,
3924 stack area, save any previous data at that location. */
3925
3926 #ifdef ARGS_GROW_DOWNWARD
3927 /* stack_slot is negative, but we want to index stack_usage_map
3928 with positive values. */
3929 upper_bound = -argvec[argnum].offset.constant + 1;
3930 lower_bound = upper_bound - argvec[argnum].size.constant;
3931 #else
3932 lower_bound = argvec[argnum].offset.constant;
3933 upper_bound = lower_bound + argvec[argnum].size.constant;
3934 #endif
3935
3936 for (i = lower_bound; i < upper_bound; i++)
3937 if (stack_usage_map[i]
3938 /* Don't store things in the fixed argument area at this
3939 point; it has already been saved. */
3940 && i > reg_parm_stack_space)
3941 break;
3942
3943 if (i != upper_bound)
3944 {
3945 /* We need to make a save area. See what mode we can make
3946 it. */
3947 enum machine_mode save_mode
3948 = mode_for_size (argvec[argnum].size.constant
3949 * BITS_PER_UNIT,
3950 MODE_INT, 1);
3951 rtx stack_area
3952 = gen_rtx_MEM
3953 (save_mode,
3954 memory_address
3955 (save_mode,
3956 plus_constant (argblock,
3957 argvec[argnum].offset.constant)));
3958 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3959
3960 emit_move_insn (argvec[argnum].save_area, stack_area);
3961 }
3962 }
3963
3964 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
3965 argblock, GEN_INT (argvec[argnum].offset.constant),
3966 reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad));
3967
3968 /* Now mark the segment we just used. */
3969 if (ACCUMULATE_OUTGOING_ARGS)
3970 for (i = lower_bound; i < upper_bound; i++)
3971 stack_usage_map[i] = 1;
3972
3973 NO_DEFER_POP;
3974 }
3975 }
3976
3977 /* If we pushed args in forward order, perform stack alignment
3978 after pushing the last arg. */
3979 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3980 anti_adjust_stack (GEN_INT (args_size.constant
3981 - original_args_size.constant));
3982
3983 if (PUSH_ARGS_REVERSED)
3984 argnum = nargs - 1;
3985 else
3986 argnum = 0;
3987
3988 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0, 0);
3989
3990 /* Now load any reg parms into their regs. */
3991
3992 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3993 are to be pushed. */
3994 for (count = 0; count < nargs; count++, argnum += inc)
3995 {
3996 rtx val = argvec[argnum].value;
3997 rtx reg = argvec[argnum].reg;
3998 int partial = argvec[argnum].partial;
3999
4000 /* Handle calls that pass values in multiple non-contiguous
4001 locations. The PA64 has examples of this for library calls. */
4002 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4003 emit_group_load (reg, val, GET_MODE_SIZE (GET_MODE (val)));
4004 else if (reg != 0 && partial == 0)
4005 emit_move_insn (reg, val);
4006
4007 NO_DEFER_POP;
4008 }
4009
4010 /* Any regs containing parms remain in use through the call. */
4011 for (count = 0; count < nargs; count++)
4012 {
4013 rtx reg = argvec[count].reg;
4014 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4015 use_group_regs (&call_fusage, reg);
4016 else if (reg != 0)
4017 use_reg (&call_fusage, reg);
4018 }
4019
4020 /* Pass the function the address in which to return a structure value. */
4021 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
4022 {
4023 emit_move_insn (struct_value_rtx,
4024 force_reg (Pmode,
4025 force_operand (XEXP (mem_value, 0),
4026 NULL_RTX)));
4027 if (GET_CODE (struct_value_rtx) == REG)
4028 use_reg (&call_fusage, struct_value_rtx);
4029 }
4030
4031 /* Don't allow popping to be deferred, since then
4032 cse'ing of library calls could delete a call and leave the pop. */
4033 NO_DEFER_POP;
4034 valreg = (mem_value == 0 && outmode != VOIDmode
4035 ? hard_libcall_value (outmode) : NULL_RTX);
4036
4037 /* Stack must be properly aligned now. */
4038 if (stack_pointer_delta & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))
4039 abort ();
4040
4041 before_call = get_last_insn ();
4042
4043 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
4044 will set inhibit_defer_pop to that value. */
4045 /* The return type is needed to decide how many bytes the function pops.
4046 Signedness plays no role in that, so for simplicity, we pretend it's
4047 always signed. We also assume that the list of arguments passed has
4048 no impact, so we pretend it is unknown. */
4049
4050 emit_call_1 (fun,
4051 get_identifier (XSTR (orgfun, 0)),
4052 build_function_type (tfom, NULL_TREE),
4053 original_args_size.constant, args_size.constant,
4054 struct_value_size,
4055 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
4056 valreg,
4057 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
4058
4059 /* For calls to `setjmp', etc., inform flow.c it should complain
4060 if nonvolatile values are live. For functions that cannot return,
4061 inform flow that control does not fall through. */
4062
4063 if (flags & (ECF_NORETURN | ECF_LONGJMP))
4064 {
4065 /* The barrier note must be emitted
4066 immediately after the CALL_INSN. Some ports emit more than
4067 just a CALL_INSN above, so we must search for it here. */
4068
4069 rtx last = get_last_insn ();
4070 while (GET_CODE (last) != CALL_INSN)
4071 {
4072 last = PREV_INSN (last);
4073 /* There was no CALL_INSN? */
4074 if (last == before_call)
4075 abort ();
4076 }
4077
4078 emit_barrier_after (last);
4079 }
4080
4081 /* Now restore inhibit_defer_pop to its actual original value. */
4082 OK_DEFER_POP;
4083
4084 /* If call is cse'able, make appropriate pair of reg-notes around it.
4085 Test valreg so we don't crash; may safely ignore `const'
4086 if return type is void. Disable for PARALLEL return values, because
4087 we have no way to move such values into a pseudo register. */
4088 if (flags & ECF_LIBCALL_BLOCK)
4089 {
4090 rtx insns;
4091
4092 if (valreg == 0 || GET_CODE (valreg) == PARALLEL)
4093 {
4094 insns = get_insns ();
4095 end_sequence ();
4096 emit_insns (insns);
4097 }
4098 else
4099 {
4100 rtx note = 0;
4101 rtx temp = gen_reg_rtx (GET_MODE (valreg));
4102 int i;
4103
4104 /* Construct an "equal form" for the value which mentions all the
4105 arguments in order as well as the function name. */
4106 for (i = 0; i < nargs; i++)
4107 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
4108 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
4109
4110 insns = get_insns ();
4111 end_sequence ();
4112
4113 if (flags & ECF_PURE)
4114 note = gen_rtx_EXPR_LIST (VOIDmode,
4115 gen_rtx_USE (VOIDmode,
4116 gen_rtx_MEM (BLKmode,
4117 gen_rtx_SCRATCH (VOIDmode))),
4118 note);
4119
4120 emit_libcall_block (insns, temp, valreg, note);
4121
4122 valreg = temp;
4123 }
4124 }
4125 pop_temp_slots ();
4126
4127 /* Copy the value to the right place. */
4128 if (outmode != VOIDmode && retval)
4129 {
4130 if (mem_value)
4131 {
4132 if (value == 0)
4133 value = mem_value;
4134 if (value != mem_value)
4135 emit_move_insn (value, mem_value);
4136 }
4137 else if (value != 0)
4138 emit_move_insn (value, hard_libcall_value (outmode));
4139 else
4140 value = hard_libcall_value (outmode);
4141 }
4142
4143 if (ACCUMULATE_OUTGOING_ARGS)
4144 {
4145 #ifdef REG_PARM_STACK_SPACE
4146 if (save_area)
4147 {
4148 enum machine_mode save_mode = GET_MODE (save_area);
4149 #ifdef ARGS_GROW_DOWNWARD
4150 rtx stack_area
4151 = gen_rtx_MEM (save_mode,
4152 memory_address (save_mode,
4153 plus_constant (argblock,
4154 - high_to_save)));
4155 #else
4156 rtx stack_area
4157 = gen_rtx_MEM (save_mode,
4158 memory_address (save_mode,
4159 plus_constant (argblock, low_to_save)));
4160 #endif
4161
4162 set_mem_align (stack_area, PARM_BOUNDARY);
4163 if (save_mode != BLKmode)
4164 emit_move_insn (stack_area, save_area);
4165 else
4166 emit_block_move (stack_area, validize_mem (save_area),
4167 GEN_INT (high_to_save - low_to_save + 1));
4168 }
4169 #endif
4170
4171 /* If we saved any argument areas, restore them. */
4172 for (count = 0; count < nargs; count++)
4173 if (argvec[count].save_area)
4174 {
4175 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
4176 rtx stack_area
4177 = gen_rtx_MEM (save_mode,
4178 memory_address
4179 (save_mode,
4180 plus_constant (argblock,
4181 argvec[count].offset.constant)));
4182
4183 emit_move_insn (stack_area, argvec[count].save_area);
4184 }
4185
4186 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4187 stack_usage_map = initial_stack_usage_map;
4188 }
4189
4190 return value;
4191
4192 }
4193 \f
4194 /* Output a library call to function FUN (a SYMBOL_REF rtx)
4195 (emitting the queue unless NO_QUEUE is nonzero),
4196 for a value of mode OUTMODE,
4197 with NARGS different arguments, passed as alternating rtx values
4198 and machine_modes to convert them to.
4199 The rtx values should have been passed through protect_from_queue already.
4200
4201 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
4202 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
4203 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
4204 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
4205 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
4206 or other LCT_ value for other types of library calls. */
4207
4208 void
4209 emit_library_call VPARAMS((rtx orgfun, enum libcall_type fn_type,
4210 enum machine_mode outmode, int nargs, ...))
4211 {
4212 VA_OPEN (p, nargs);
4213 VA_FIXEDARG (p, rtx, orgfun);
4214 VA_FIXEDARG (p, int, fn_type);
4215 VA_FIXEDARG (p, enum machine_mode, outmode);
4216 VA_FIXEDARG (p, int, nargs);
4217
4218 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4219
4220 VA_CLOSE (p);
4221 }
4222 \f
4223 /* Like emit_library_call except that an extra argument, VALUE,
4224 comes second and says where to store the result.
4225 (If VALUE is zero, this function chooses a convenient way
4226 to return the value.
4227
4228 This function returns an rtx for where the value is to be found.
4229 If VALUE is nonzero, VALUE is returned. */
4230
4231 rtx
4232 emit_library_call_value VPARAMS((rtx orgfun, rtx value,
4233 enum libcall_type fn_type,
4234 enum machine_mode outmode, int nargs, ...))
4235 {
4236 rtx result;
4237
4238 VA_OPEN (p, nargs);
4239 VA_FIXEDARG (p, rtx, orgfun);
4240 VA_FIXEDARG (p, rtx, value);
4241 VA_FIXEDARG (p, int, fn_type);
4242 VA_FIXEDARG (p, enum machine_mode, outmode);
4243 VA_FIXEDARG (p, int, nargs);
4244
4245 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4246 nargs, p);
4247
4248 VA_CLOSE (p);
4249
4250 return result;
4251 }
4252 \f
4253 /* Store a single argument for a function call
4254 into the register or memory area where it must be passed.
4255 *ARG describes the argument value and where to pass it.
4256
4257 ARGBLOCK is the address of the stack-block for all the arguments,
4258 or 0 on a machine where arguments are pushed individually.
4259
4260 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4261 so must be careful about how the stack is used.
4262
4263 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4264 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4265 that we need not worry about saving and restoring the stack.
4266
4267 FNDECL is the declaration of the function we are calling.
4268
4269 Return non-zero if this arg should cause sibcall failure,
4270 zero otherwise. */
4271
4272 static int
4273 store_one_arg (arg, argblock, flags, variable_size, reg_parm_stack_space)
4274 struct arg_data *arg;
4275 rtx argblock;
4276 int flags;
4277 int variable_size ATTRIBUTE_UNUSED;
4278 int reg_parm_stack_space;
4279 {
4280 tree pval = arg->tree_value;
4281 rtx reg = 0;
4282 int partial = 0;
4283 int used = 0;
4284 int i, lower_bound = 0, upper_bound = 0;
4285 int sibcall_failure = 0;
4286
4287 if (TREE_CODE (pval) == ERROR_MARK)
4288 return 1;
4289
4290 /* Push a new temporary level for any temporaries we make for
4291 this argument. */
4292 push_temp_slots ();
4293
4294 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4295 {
4296 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4297 save any previous data at that location. */
4298 if (argblock && ! variable_size && arg->stack)
4299 {
4300 #ifdef ARGS_GROW_DOWNWARD
4301 /* stack_slot is negative, but we want to index stack_usage_map
4302 with positive values. */
4303 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4304 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4305 else
4306 upper_bound = 0;
4307
4308 lower_bound = upper_bound - arg->size.constant;
4309 #else
4310 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4311 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4312 else
4313 lower_bound = 0;
4314
4315 upper_bound = lower_bound + arg->size.constant;
4316 #endif
4317
4318 for (i = lower_bound; i < upper_bound; i++)
4319 if (stack_usage_map[i]
4320 /* Don't store things in the fixed argument area at this point;
4321 it has already been saved. */
4322 && i > reg_parm_stack_space)
4323 break;
4324
4325 if (i != upper_bound)
4326 {
4327 /* We need to make a save area. See what mode we can make it. */
4328 enum machine_mode save_mode
4329 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
4330 rtx stack_area
4331 = gen_rtx_MEM (save_mode,
4332 memory_address (save_mode,
4333 XEXP (arg->stack_slot, 0)));
4334
4335 if (save_mode == BLKmode)
4336 {
4337 tree ot = TREE_TYPE (arg->tree_value);
4338 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4339 | TYPE_QUAL_CONST));
4340
4341 arg->save_area = assign_temp (nt, 0, 1, 1);
4342 preserve_temp_slots (arg->save_area);
4343 emit_block_move (validize_mem (arg->save_area), stack_area,
4344 expr_size (arg->tree_value));
4345 }
4346 else
4347 {
4348 arg->save_area = gen_reg_rtx (save_mode);
4349 emit_move_insn (arg->save_area, stack_area);
4350 }
4351 }
4352 }
4353 /* Now that we have saved any slots that will be overwritten by this
4354 store, mark all slots this store will use. We must do this before
4355 we actually expand the argument since the expansion itself may
4356 trigger library calls which might need to use the same stack slot. */
4357 if (argblock && ! variable_size && arg->stack)
4358 for (i = lower_bound; i < upper_bound; i++)
4359 stack_usage_map[i] = 1;
4360 }
4361
4362 /* If this isn't going to be placed on both the stack and in registers,
4363 set up the register and number of words. */
4364 if (! arg->pass_on_stack)
4365 {
4366 if (flags & ECF_SIBCALL)
4367 reg = arg->tail_call_reg;
4368 else
4369 reg = arg->reg;
4370 partial = arg->partial;
4371 }
4372
4373 if (reg != 0 && partial == 0)
4374 /* Being passed entirely in a register. We shouldn't be called in
4375 this case. */
4376 abort ();
4377
4378 /* If this arg needs special alignment, don't load the registers
4379 here. */
4380 if (arg->n_aligned_regs != 0)
4381 reg = 0;
4382
4383 /* If this is being passed partially in a register, we can't evaluate
4384 it directly into its stack slot. Otherwise, we can. */
4385 if (arg->value == 0)
4386 {
4387 /* stack_arg_under_construction is nonzero if a function argument is
4388 being evaluated directly into the outgoing argument list and
4389 expand_call must take special action to preserve the argument list
4390 if it is called recursively.
4391
4392 For scalar function arguments stack_usage_map is sufficient to
4393 determine which stack slots must be saved and restored. Scalar
4394 arguments in general have pass_on_stack == 0.
4395
4396 If this argument is initialized by a function which takes the
4397 address of the argument (a C++ constructor or a C function
4398 returning a BLKmode structure), then stack_usage_map is
4399 insufficient and expand_call must push the stack around the
4400 function call. Such arguments have pass_on_stack == 1.
4401
4402 Note that it is always safe to set stack_arg_under_construction,
4403 but this generates suboptimal code if set when not needed. */
4404
4405 if (arg->pass_on_stack)
4406 stack_arg_under_construction++;
4407
4408 arg->value = expand_expr (pval,
4409 (partial
4410 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4411 ? NULL_RTX : arg->stack,
4412 VOIDmode, 0);
4413
4414 /* If we are promoting object (or for any other reason) the mode
4415 doesn't agree, convert the mode. */
4416
4417 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4418 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4419 arg->value, arg->unsignedp);
4420
4421 if (arg->pass_on_stack)
4422 stack_arg_under_construction--;
4423 }
4424
4425 /* Don't allow anything left on stack from computation
4426 of argument to alloca. */
4427 if (flags & ECF_MAY_BE_ALLOCA)
4428 do_pending_stack_adjust ();
4429
4430 if (arg->value == arg->stack)
4431 /* If the value is already in the stack slot, we are done. */
4432 ;
4433 else if (arg->mode != BLKmode)
4434 {
4435 int size;
4436
4437 /* Argument is a scalar, not entirely passed in registers.
4438 (If part is passed in registers, arg->partial says how much
4439 and emit_push_insn will take care of putting it there.)
4440
4441 Push it, and if its size is less than the
4442 amount of space allocated to it,
4443 also bump stack pointer by the additional space.
4444 Note that in C the default argument promotions
4445 will prevent such mismatches. */
4446
4447 size = GET_MODE_SIZE (arg->mode);
4448 /* Compute how much space the push instruction will push.
4449 On many machines, pushing a byte will advance the stack
4450 pointer by a halfword. */
4451 #ifdef PUSH_ROUNDING
4452 size = PUSH_ROUNDING (size);
4453 #endif
4454 used = size;
4455
4456 /* Compute how much space the argument should get:
4457 round up to a multiple of the alignment for arguments. */
4458 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4459 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4460 / (PARM_BOUNDARY / BITS_PER_UNIT))
4461 * (PARM_BOUNDARY / BITS_PER_UNIT));
4462
4463 /* This isn't already where we want it on the stack, so put it there.
4464 This can either be done with push or copy insns. */
4465 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 0,
4466 partial, reg, used - size, argblock,
4467 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
4468 ARGS_SIZE_RTX (arg->alignment_pad));
4469
4470 /* Unless this is a partially-in-register argument, the argument is now
4471 in the stack. */
4472 if (partial == 0)
4473 arg->value = arg->stack;
4474 }
4475 else
4476 {
4477 /* BLKmode, at least partly to be pushed. */
4478
4479 int excess;
4480 rtx size_rtx;
4481
4482 /* Pushing a nonscalar.
4483 If part is passed in registers, PARTIAL says how much
4484 and emit_push_insn will take care of putting it there. */
4485
4486 /* Round its size up to a multiple
4487 of the allocation unit for arguments. */
4488
4489 if (arg->size.var != 0)
4490 {
4491 excess = 0;
4492 size_rtx = ARGS_SIZE_RTX (arg->size);
4493 }
4494 else
4495 {
4496 /* PUSH_ROUNDING has no effect on us, because
4497 emit_push_insn for BLKmode is careful to avoid it. */
4498 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
4499 + partial * UNITS_PER_WORD);
4500 size_rtx = expr_size (pval);
4501 }
4502
4503 if ((flags & ECF_SIBCALL) && GET_CODE (arg->value) == MEM)
4504 {
4505 /* emit_push_insn might not work properly if arg->value and
4506 argblock + arg->offset areas overlap. */
4507 rtx x = arg->value;
4508 int i = 0;
4509
4510 if (XEXP (x, 0) == current_function_internal_arg_pointer
4511 || (GET_CODE (XEXP (x, 0)) == PLUS
4512 && XEXP (XEXP (x, 0), 0) ==
4513 current_function_internal_arg_pointer
4514 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4515 {
4516 if (XEXP (x, 0) != current_function_internal_arg_pointer)
4517 i = INTVAL (XEXP (XEXP (x, 0), 1));
4518
4519 /* expand_call should ensure this */
4520 if (arg->offset.var || GET_CODE (size_rtx) != CONST_INT)
4521 abort ();
4522
4523 if (arg->offset.constant > i)
4524 {
4525 if (arg->offset.constant < i + INTVAL (size_rtx))
4526 sibcall_failure = 1;
4527 }
4528 else if (arg->offset.constant < i)
4529 {
4530 if (i < arg->offset.constant + INTVAL (size_rtx))
4531 sibcall_failure = 1;
4532 }
4533 }
4534 }
4535
4536 /* Special handling is required if part of the parameter lies in the
4537 register parameter area. The argument may be copied into the stack
4538 slot using memcpy(), but the original contents of the register
4539 parameter area will be restored after the memcpy() call.
4540
4541 To ensure that the part that lies in the register parameter area
4542 is copied correctly, we emit a separate push for that part. This
4543 push should be small enough to avoid a call to memcpy(). */
4544 #ifndef STACK_PARMS_IN_REG_PARM_AREA
4545 if (arg->reg && arg->pass_on_stack)
4546 #else
4547 if (1)
4548 #endif
4549 {
4550 if (arg->offset.constant < reg_parm_stack_space && arg->offset.var)
4551 error ("variable offset is passed partially in stack and in reg");
4552 else if (arg->offset.constant < reg_parm_stack_space && arg->size.var)
4553 error ("variable size is passed partially in stack and in reg");
4554 else if (arg->offset.constant < reg_parm_stack_space
4555 && ((arg->offset.constant + arg->size.constant)
4556 > reg_parm_stack_space))
4557 {
4558 rtx size_rtx1 = GEN_INT (reg_parm_stack_space - arg->offset.constant);
4559 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx1,
4560 TYPE_ALIGN (TREE_TYPE (pval)), partial, reg,
4561 excess, argblock, ARGS_SIZE_RTX (arg->offset),
4562 reg_parm_stack_space,
4563 ARGS_SIZE_RTX (arg->alignment_pad));
4564 }
4565 }
4566
4567
4568 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4569 TYPE_ALIGN (TREE_TYPE (pval)), partial, reg, excess,
4570 argblock, ARGS_SIZE_RTX (arg->offset),
4571 reg_parm_stack_space,
4572 ARGS_SIZE_RTX (arg->alignment_pad));
4573
4574 /* Unless this is a partially-in-register argument, the argument is now
4575 in the stack.
4576
4577 ??? Unlike the case above, in which we want the actual
4578 address of the data, so that we can load it directly into a
4579 register, here we want the address of the stack slot, so that
4580 it's properly aligned for word-by-word copying or something
4581 like that. It's not clear that this is always correct. */
4582 if (partial == 0)
4583 arg->value = arg->stack_slot;
4584 }
4585
4586 /* Once we have pushed something, pops can't safely
4587 be deferred during the rest of the arguments. */
4588 NO_DEFER_POP;
4589
4590 /* ANSI doesn't require a sequence point here,
4591 but PCC has one, so this will avoid some problems. */
4592 emit_queue ();
4593
4594 /* Free any temporary slots made in processing this argument. Show
4595 that we might have taken the address of something and pushed that
4596 as an operand. */
4597 preserve_temp_slots (NULL_RTX);
4598 free_temp_slots ();
4599 pop_temp_slots ();
4600
4601 return sibcall_failure;
4602 }