27bd5fc9de006675f18e779239876989c151cb08
[gcc.git] / gcc / calls.c
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "expr.h"
28 #include "function.h"
29 #include "regs.h"
30 #include "insn-flags.h"
31 #include "toplev.h"
32 #include "output.h"
33 #include "tm_p.h"
34 #include "timevar.h"
35
36 #ifndef ACCUMULATE_OUTGOING_ARGS
37 #define ACCUMULATE_OUTGOING_ARGS 0
38 #endif
39
40 /* Supply a default definition for PUSH_ARGS. */
41 #ifndef PUSH_ARGS
42 #ifdef PUSH_ROUNDING
43 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
44 #else
45 #define PUSH_ARGS 0
46 #endif
47 #endif
48
49 #if !defined FUNCTION_OK_FOR_SIBCALL
50 #define FUNCTION_OK_FOR_SIBCALL(DECL) 1
51 #endif
52
53 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
54 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
55 #endif
56
57 /* Decide whether a function's arguments should be processed
58 from first to last or from last to first.
59
60 They should if the stack and args grow in opposite directions, but
61 only if we have push insns. */
62
63 #ifdef PUSH_ROUNDING
64
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED PUSH_ARGS
67 #endif
68
69 #endif
70
71 #ifndef PUSH_ARGS_REVERSED
72 #define PUSH_ARGS_REVERSED 0
73 #endif
74
75 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
76 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
77
78 /* Data structure and subroutines used within expand_call. */
79
80 struct arg_data
81 {
82 /* Tree node for this argument. */
83 tree tree_value;
84 /* Mode for value; TYPE_MODE unless promoted. */
85 enum machine_mode mode;
86 /* Current RTL value for argument, or 0 if it isn't precomputed. */
87 rtx value;
88 /* Initially-compute RTL value for argument; only for const functions. */
89 rtx initial_value;
90 /* Register to pass this argument in, 0 if passed on stack, or an
91 PARALLEL if the arg is to be copied into multiple non-contiguous
92 registers. */
93 rtx reg;
94 /* Register to pass this argument in when generating tail call sequence.
95 This is not the same register as for normal calls on machines with
96 register windows. */
97 rtx tail_call_reg;
98 /* If REG was promoted from the actual mode of the argument expression,
99 indicates whether the promotion is sign- or zero-extended. */
100 int unsignedp;
101 /* Number of registers to use. 0 means put the whole arg in registers.
102 Also 0 if not passed in registers. */
103 int partial;
104 /* Non-zero if argument must be passed on stack.
105 Note that some arguments may be passed on the stack
106 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
107 pass_on_stack identifies arguments that *cannot* go in registers. */
108 int pass_on_stack;
109 /* Offset of this argument from beginning of stack-args. */
110 struct args_size offset;
111 /* Similar, but offset to the start of the stack slot. Different from
112 OFFSET if this arg pads downward. */
113 struct args_size slot_offset;
114 /* Size of this argument on the stack, rounded up for any padding it gets,
115 parts of the argument passed in registers do not count.
116 If REG_PARM_STACK_SPACE is defined, then register parms
117 are counted here as well. */
118 struct args_size size;
119 /* Location on the stack at which parameter should be stored. The store
120 has already been done if STACK == VALUE. */
121 rtx stack;
122 /* Location on the stack of the start of this argument slot. This can
123 differ from STACK if this arg pads downward. This location is known
124 to be aligned to FUNCTION_ARG_BOUNDARY. */
125 rtx stack_slot;
126 /* Place that this stack area has been saved, if needed. */
127 rtx save_area;
128 /* If an argument's alignment does not permit direct copying into registers,
129 copy in smaller-sized pieces into pseudos. These are stored in a
130 block pointed to by this field. The next field says how many
131 word-sized pseudos we made. */
132 rtx *aligned_regs;
133 int n_aligned_regs;
134 /* The amount that the stack pointer needs to be adjusted to
135 force alignment for the next argument. */
136 struct args_size alignment_pad;
137 };
138
139 /* A vector of one char per byte of stack space. A byte if non-zero if
140 the corresponding stack location has been used.
141 This vector is used to prevent a function call within an argument from
142 clobbering any stack already set up. */
143 static char *stack_usage_map;
144
145 /* Size of STACK_USAGE_MAP. */
146 static int highest_outgoing_arg_in_use;
147
148 /* stack_arg_under_construction is nonzero when an argument may be
149 initialized with a constructor call (including a C function that
150 returns a BLKmode struct) and expand_call must take special action
151 to make sure the object being constructed does not overlap the
152 argument list for the constructor call. */
153 int stack_arg_under_construction;
154
155 static int calls_function PARAMS ((tree, int));
156 static int calls_function_1 PARAMS ((tree, int));
157
158 /* Nonzero if this is a call to a `const' function. */
159 #define ECF_CONST 1
160 /* Nonzero if this is a call to a `volatile' function. */
161 #define ECF_NORETURN 2
162 /* Nonzero if this is a call to malloc or a related function. */
163 #define ECF_MALLOC 4
164 /* Nonzero if it is plausible that this is a call to alloca. */
165 #define ECF_MAY_BE_ALLOCA 8
166 /* Nonzero if this is a call to a function that won't throw an exception. */
167 #define ECF_NOTHROW 16
168 /* Nonzero if this is a call to setjmp or a related function. */
169 #define ECF_RETURNS_TWICE 32
170 /* Nonzero if this is a call to `longjmp'. */
171 #define ECF_LONGJMP 64
172 /* Nonzero if this is a syscall that makes a new process in the image of
173 the current one. */
174 #define ECF_FORK_OR_EXEC 128
175 #define ECF_SIBCALL 256
176 /* Nonzero if this is a call to "pure" function (like const function,
177 but may read memory. */
178 #define ECF_PURE 512
179
180 static void emit_call_1 PARAMS ((rtx, tree, tree, HOST_WIDE_INT,
181 HOST_WIDE_INT, HOST_WIDE_INT, rtx,
182 rtx, int, rtx, int));
183 static void precompute_register_parameters PARAMS ((int,
184 struct arg_data *,
185 int *));
186 static void store_one_arg PARAMS ((struct arg_data *, rtx, int, int,
187 int));
188 static void store_unaligned_arguments_into_pseudos PARAMS ((struct arg_data *,
189 int));
190 static int finalize_must_preallocate PARAMS ((int, int,
191 struct arg_data *,
192 struct args_size *));
193 static void precompute_arguments PARAMS ((int, int,
194 struct arg_data *));
195 static int compute_argument_block_size PARAMS ((int,
196 struct args_size *,
197 int));
198 static void initialize_argument_information PARAMS ((int,
199 struct arg_data *,
200 struct args_size *,
201 int, tree, tree,
202 CUMULATIVE_ARGS *,
203 int, rtx *, int *,
204 int *, int *));
205 static void compute_argument_addresses PARAMS ((struct arg_data *,
206 rtx, int));
207 static rtx rtx_for_function_call PARAMS ((tree, tree));
208 static void load_register_parameters PARAMS ((struct arg_data *,
209 int, rtx *, int));
210 static int libfunc_nothrow PARAMS ((rtx));
211 static rtx emit_library_call_value_1 PARAMS ((int, rtx, rtx, int,
212 enum machine_mode,
213 int, va_list));
214 static int special_function_p PARAMS ((tree, int));
215 static int flags_from_decl_or_type PARAMS ((tree));
216 static rtx try_to_integrate PARAMS ((tree, tree, rtx,
217 int, tree, rtx));
218 static int combine_pending_stack_adjustment_and_call
219 PARAMS ((int, struct args_size *, int));
220
221 #ifdef REG_PARM_STACK_SPACE
222 static rtx save_fixed_argument_area PARAMS ((int, rtx, int *, int *));
223 static void restore_fixed_argument_area PARAMS ((rtx, rtx, int, int));
224 #endif
225 \f
226 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
227 `alloca'.
228
229 If WHICH is 0, return 1 if EXP contains a call to any function.
230 Actually, we only need return 1 if evaluating EXP would require pushing
231 arguments on the stack, but that is too difficult to compute, so we just
232 assume any function call might require the stack. */
233
234 static tree calls_function_save_exprs;
235
236 static int
237 calls_function (exp, which)
238 tree exp;
239 int which;
240 {
241 int val;
242
243 calls_function_save_exprs = 0;
244 val = calls_function_1 (exp, which);
245 calls_function_save_exprs = 0;
246 return val;
247 }
248
249 /* Recursive function to do the work of above function. */
250
251 static int
252 calls_function_1 (exp, which)
253 tree exp;
254 int which;
255 {
256 register int i;
257 enum tree_code code = TREE_CODE (exp);
258 int class = TREE_CODE_CLASS (code);
259 int length = first_rtl_op (code);
260
261 /* If this code is language-specific, we don't know what it will do. */
262 if ((int) code >= NUM_TREE_CODES)
263 return 1;
264
265 switch (code)
266 {
267 case CALL_EXPR:
268 if (which == 0)
269 return 1;
270 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
271 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
272 == FUNCTION_DECL)
273 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
274 0)
275 & ECF_MAY_BE_ALLOCA))
276 return 1;
277
278 break;
279
280 case SAVE_EXPR:
281 if (SAVE_EXPR_RTL (exp) != 0)
282 return 0;
283 if (value_member (exp, calls_function_save_exprs))
284 return 0;
285 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
286 calls_function_save_exprs);
287 return (TREE_OPERAND (exp, 0) != 0
288 && calls_function_1 (TREE_OPERAND (exp, 0), which));
289
290 case BLOCK:
291 {
292 register tree local;
293 register tree subblock;
294
295 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
296 if (DECL_INITIAL (local) != 0
297 && calls_function_1 (DECL_INITIAL (local), which))
298 return 1;
299
300 for (subblock = BLOCK_SUBBLOCKS (exp);
301 subblock;
302 subblock = TREE_CHAIN (subblock))
303 if (calls_function_1 (subblock, which))
304 return 1;
305 }
306 return 0;
307
308 case TREE_LIST:
309 for (; exp != 0; exp = TREE_CHAIN (exp))
310 if (calls_function_1 (TREE_VALUE (exp), which))
311 return 1;
312 return 0;
313
314 default:
315 break;
316 }
317
318 /* Only expressions, references, and blocks can contain calls. */
319 if (! IS_EXPR_CODE_CLASS (class) && class != 'r' && class != 'b')
320 return 0;
321
322 for (i = 0; i < length; i++)
323 if (TREE_OPERAND (exp, i) != 0
324 && calls_function_1 (TREE_OPERAND (exp, i), which))
325 return 1;
326
327 return 0;
328 }
329 \f
330 /* Force FUNEXP into a form suitable for the address of a CALL,
331 and return that as an rtx. Also load the static chain register
332 if FNDECL is a nested function.
333
334 CALL_FUSAGE points to a variable holding the prospective
335 CALL_INSN_FUNCTION_USAGE information. */
336
337 rtx
338 prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
339 rtx funexp;
340 tree fndecl;
341 rtx *call_fusage;
342 int reg_parm_seen;
343 {
344 rtx static_chain_value = 0;
345
346 funexp = protect_from_queue (funexp, 0);
347
348 if (fndecl != 0)
349 /* Get possible static chain value for nested function in C. */
350 static_chain_value = lookup_static_chain (fndecl);
351
352 /* Make a valid memory address and copy constants thru pseudo-regs,
353 but not for a constant address if -fno-function-cse. */
354 if (GET_CODE (funexp) != SYMBOL_REF)
355 /* If we are using registers for parameters, force the
356 function address into a register now. */
357 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
358 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
359 : memory_address (FUNCTION_MODE, funexp));
360 else
361 {
362 #ifndef NO_FUNCTION_CSE
363 if (optimize && ! flag_no_function_cse)
364 #ifdef NO_RECURSIVE_FUNCTION_CSE
365 if (fndecl != current_function_decl)
366 #endif
367 funexp = force_reg (Pmode, funexp);
368 #endif
369 }
370
371 if (static_chain_value != 0)
372 {
373 emit_move_insn (static_chain_rtx, static_chain_value);
374
375 if (GET_CODE (static_chain_rtx) == REG)
376 use_reg (call_fusage, static_chain_rtx);
377 }
378
379 return funexp;
380 }
381
382 /* Generate instructions to call function FUNEXP,
383 and optionally pop the results.
384 The CALL_INSN is the first insn generated.
385
386 FNDECL is the declaration node of the function. This is given to the
387 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
388
389 FUNTYPE is the data type of the function. This is given to the macro
390 RETURN_POPS_ARGS to determine whether this function pops its own args.
391 We used to allow an identifier for library functions, but that doesn't
392 work when the return type is an aggregate type and the calling convention
393 says that the pointer to this aggregate is to be popped by the callee.
394
395 STACK_SIZE is the number of bytes of arguments on the stack,
396 ROUNDED_STACK_SIZE is that number rounded up to
397 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
398 both to put into the call insn and to generate explicit popping
399 code if necessary.
400
401 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
402 It is zero if this call doesn't want a structure value.
403
404 NEXT_ARG_REG is the rtx that results from executing
405 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
406 just after all the args have had their registers assigned.
407 This could be whatever you like, but normally it is the first
408 arg-register beyond those used for args in this call,
409 or 0 if all the arg-registers are used in this call.
410 It is passed on to `gen_call' so you can put this info in the call insn.
411
412 VALREG is a hard register in which a value is returned,
413 or 0 if the call does not return a value.
414
415 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
416 the args to this call were processed.
417 We restore `inhibit_defer_pop' to that value.
418
419 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
420 denote registers used by the called function. */
421
422 static void
423 emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
424 struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop,
425 call_fusage, ecf_flags)
426 rtx funexp;
427 tree fndecl ATTRIBUTE_UNUSED;
428 tree funtype ATTRIBUTE_UNUSED;
429 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED;
430 HOST_WIDE_INT rounded_stack_size;
431 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED;
432 rtx next_arg_reg;
433 rtx valreg;
434 int old_inhibit_defer_pop;
435 rtx call_fusage;
436 int ecf_flags;
437 {
438 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
439 rtx call_insn;
440 int already_popped = 0;
441 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
442 #if defined (HAVE_call) && defined (HAVE_call_value)
443 rtx struct_value_size_rtx;
444 struct_value_size_rtx = GEN_INT (struct_value_size);
445 #endif
446
447 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
448 and we don't want to load it into a register as an optimization,
449 because prepare_call_address already did it if it should be done. */
450 if (GET_CODE (funexp) != SYMBOL_REF)
451 funexp = memory_address (FUNCTION_MODE, funexp);
452
453 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
454 if ((ecf_flags & ECF_SIBCALL)
455 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
456 && (RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0
457 || stack_size == 0))
458 {
459 rtx n_pop = GEN_INT (RETURN_POPS_ARGS (fndecl, funtype, stack_size));
460 rtx pat;
461
462 /* If this subroutine pops its own args, record that in the call insn
463 if possible, for the sake of frame pointer elimination. */
464
465 if (valreg)
466 pat = GEN_SIBCALL_VALUE_POP (valreg,
467 gen_rtx_MEM (FUNCTION_MODE, funexp),
468 rounded_stack_size_rtx, next_arg_reg,
469 n_pop);
470 else
471 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
472 rounded_stack_size_rtx, next_arg_reg, n_pop);
473
474 emit_call_insn (pat);
475 already_popped = 1;
476 }
477 else
478 #endif
479
480 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
481 /* If the target has "call" or "call_value" insns, then prefer them
482 if no arguments are actually popped. If the target does not have
483 "call" or "call_value" insns, then we must use the popping versions
484 even if the call has no arguments to pop. */
485 #if defined (HAVE_call) && defined (HAVE_call_value)
486 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
487 && n_popped > 0)
488 #else
489 if (HAVE_call_pop && HAVE_call_value_pop)
490 #endif
491 {
492 rtx n_pop = GEN_INT (n_popped);
493 rtx pat;
494
495 /* If this subroutine pops its own args, record that in the call insn
496 if possible, for the sake of frame pointer elimination. */
497
498 if (valreg)
499 pat = GEN_CALL_VALUE_POP (valreg,
500 gen_rtx_MEM (FUNCTION_MODE, funexp),
501 rounded_stack_size_rtx, next_arg_reg, n_pop);
502 else
503 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
504 rounded_stack_size_rtx, next_arg_reg, n_pop);
505
506 emit_call_insn (pat);
507 already_popped = 1;
508 }
509 else
510 #endif
511
512 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
513 if ((ecf_flags & ECF_SIBCALL)
514 && HAVE_sibcall && HAVE_sibcall_value)
515 {
516 if (valreg)
517 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
518 gen_rtx_MEM (FUNCTION_MODE, funexp),
519 rounded_stack_size_rtx,
520 next_arg_reg, NULL_RTX));
521 else
522 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
523 rounded_stack_size_rtx, next_arg_reg,
524 struct_value_size_rtx));
525 }
526 else
527 #endif
528
529 #if defined (HAVE_call) && defined (HAVE_call_value)
530 if (HAVE_call && HAVE_call_value)
531 {
532 if (valreg)
533 emit_call_insn (GEN_CALL_VALUE (valreg,
534 gen_rtx_MEM (FUNCTION_MODE, funexp),
535 rounded_stack_size_rtx, next_arg_reg,
536 NULL_RTX));
537 else
538 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
539 rounded_stack_size_rtx, next_arg_reg,
540 struct_value_size_rtx));
541 }
542 else
543 #endif
544 abort ();
545
546 /* Find the CALL insn we just emitted. */
547 for (call_insn = get_last_insn ();
548 call_insn && GET_CODE (call_insn) != CALL_INSN;
549 call_insn = PREV_INSN (call_insn))
550 ;
551
552 if (! call_insn)
553 abort ();
554
555 /* Mark memory as used for "pure" function call. */
556 if (ecf_flags & ECF_PURE)
557 {
558 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
559 gen_rtx_USE (VOIDmode,
560 gen_rtx_MEM (BLKmode,
561 gen_rtx_SCRATCH (VOIDmode))), call_fusage);
562 }
563
564 /* Put the register usage information on the CALL. If there is already
565 some usage information, put ours at the end. */
566 if (CALL_INSN_FUNCTION_USAGE (call_insn))
567 {
568 rtx link;
569
570 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
571 link = XEXP (link, 1))
572 ;
573
574 XEXP (link, 1) = call_fusage;
575 }
576 else
577 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
578
579 /* If this is a const call, then set the insn's unchanging bit. */
580 if (ecf_flags & (ECF_CONST | ECF_PURE))
581 CONST_CALL_P (call_insn) = 1;
582
583 /* If this call can't throw, attach a REG_EH_REGION reg note to that
584 effect. */
585 if (ecf_flags & ECF_NOTHROW)
586 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
587 REG_NOTES (call_insn));
588
589 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
590
591 /* Restore this now, so that we do defer pops for this call's args
592 if the context of the call as a whole permits. */
593 inhibit_defer_pop = old_inhibit_defer_pop;
594
595 if (n_popped > 0)
596 {
597 if (!already_popped)
598 CALL_INSN_FUNCTION_USAGE (call_insn)
599 = gen_rtx_EXPR_LIST (VOIDmode,
600 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
601 CALL_INSN_FUNCTION_USAGE (call_insn));
602 rounded_stack_size -= n_popped;
603 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
604 stack_pointer_delta -= n_popped;
605 }
606
607 if (!ACCUMULATE_OUTGOING_ARGS)
608 {
609 /* If returning from the subroutine does not automatically pop the args,
610 we need an instruction to pop them sooner or later.
611 Perhaps do it now; perhaps just record how much space to pop later.
612
613 If returning from the subroutine does pop the args, indicate that the
614 stack pointer will be changed. */
615
616 if (rounded_stack_size != 0)
617 {
618 if (flag_defer_pop && inhibit_defer_pop == 0
619 && !(ecf_flags & (ECF_CONST | ECF_PURE)))
620 pending_stack_adjust += rounded_stack_size;
621 else
622 adjust_stack (rounded_stack_size_rtx);
623 }
624 }
625 /* When we accumulate outgoing args, we must avoid any stack manipulations.
626 Restore the stack pointer to its original value now. Usually
627 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
628 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
629 popping variants of functions exist as well.
630
631 ??? We may optimize similar to defer_pop above, but it is
632 probably not worthwhile.
633
634 ??? It will be worthwhile to enable combine_stack_adjustments even for
635 such machines. */
636 else if (n_popped)
637 anti_adjust_stack (GEN_INT (n_popped));
638 }
639
640 /* Determine if the function identified by NAME and FNDECL is one with
641 special properties we wish to know about.
642
643 For example, if the function might return more than one time (setjmp), then
644 set RETURNS_TWICE to a nonzero value.
645
646 Similarly set LONGJMP for if the function is in the longjmp family.
647
648 Set MALLOC for any of the standard memory allocation functions which
649 allocate from the heap.
650
651 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
652 space from the stack such as alloca. */
653
654 static int
655 special_function_p (fndecl, flags)
656 tree fndecl;
657 int flags;
658 {
659 if (! (flags & ECF_MALLOC)
660 && fndecl && DECL_NAME (fndecl)
661 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
662 /* Exclude functions not at the file scope, or not `extern',
663 since they are not the magic functions we would otherwise
664 think they are. */
665 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
666 {
667 char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
668 char *tname = name;
669
670 /* We assume that alloca will always be called by name. It
671 makes no sense to pass it as a pointer-to-function to
672 anything that does not understand its behavior. */
673 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
674 && name[0] == 'a'
675 && ! strcmp (name, "alloca"))
676 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
677 && name[0] == '_'
678 && ! strcmp (name, "__builtin_alloca"))))
679 flags |= ECF_MAY_BE_ALLOCA;
680
681 /* Disregard prefix _, __ or __x. */
682 if (name[0] == '_')
683 {
684 if (name[1] == '_' && name[2] == 'x')
685 tname += 3;
686 else if (name[1] == '_')
687 tname += 2;
688 else
689 tname += 1;
690 }
691
692 if (tname[0] == 's')
693 {
694 if ((tname[1] == 'e'
695 && (! strcmp (tname, "setjmp")
696 || ! strcmp (tname, "setjmp_syscall")))
697 || (tname[1] == 'i'
698 && ! strcmp (tname, "sigsetjmp"))
699 || (tname[1] == 'a'
700 && ! strcmp (tname, "savectx")))
701 flags |= ECF_RETURNS_TWICE;
702
703 if (tname[1] == 'i'
704 && ! strcmp (tname, "siglongjmp"))
705 flags |= ECF_LONGJMP;
706 }
707 else if ((tname[0] == 'q' && tname[1] == 's'
708 && ! strcmp (tname, "qsetjmp"))
709 || (tname[0] == 'v' && tname[1] == 'f'
710 && ! strcmp (tname, "vfork")))
711 flags |= ECF_RETURNS_TWICE;
712
713 else if (tname[0] == 'l' && tname[1] == 'o'
714 && ! strcmp (tname, "longjmp"))
715 flags |= ECF_LONGJMP;
716
717 else if ((tname[0] == 'f' && tname[1] == 'o'
718 && ! strcmp (tname, "fork"))
719 /* Linux specific: __clone. check NAME to insist on the
720 leading underscores, to avoid polluting the ISO / POSIX
721 namespace. */
722 || (name[0] == '_' && name[1] == '_'
723 && ! strcmp (tname, "clone"))
724 || (tname[0] == 'e' && tname[1] == 'x' && tname[2] == 'e'
725 && tname[3] == 'c' && (tname[4] == 'l' || tname[4] == 'v')
726 && (tname[5] == '\0'
727 || ((tname[5] == 'p' || tname[5] == 'e')
728 && tname[6] == '\0'))))
729 flags |= ECF_FORK_OR_EXEC;
730
731 /* Do not add any more malloc-like functions to this list,
732 instead mark them as malloc functions using the malloc attribute.
733 Note, realloc is not suitable for attribute malloc since
734 it may return the same address across multiple calls.
735 C++ operator new is not suitable because it is not required
736 to return a unique pointer; indeed, the standard placement new
737 just returns its argument. */
738 else if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == Pmode
739 && (! strcmp (tname, "malloc")
740 || ! strcmp (tname, "calloc")
741 || ! strcmp (tname, "strdup")))
742 flags |= ECF_MALLOC;
743 }
744 return flags;
745 }
746
747 /* Return nonzero when tree represent call to longjmp. */
748 int
749 setjmp_call_p (fndecl)
750 tree fndecl;
751 {
752 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
753 }
754
755 /* Detect flags (function attributes) from the function type node. */
756 static int
757 flags_from_decl_or_type (exp)
758 tree exp;
759 {
760 int flags = 0;
761 /* ??? We can't set IS_MALLOC for function types? */
762 if (DECL_P (exp))
763 {
764 /* The function exp may have the `malloc' attribute. */
765 if (DECL_P (exp) && DECL_IS_MALLOC (exp))
766 flags |= ECF_MALLOC;
767
768 /* The function exp may have the `pure' attribute. */
769 if (DECL_P (exp) && DECL_IS_PURE (exp))
770 flags |= ECF_PURE;
771
772 if (TREE_NOTHROW (exp))
773 flags |= ECF_NOTHROW;
774 }
775
776 if (TREE_READONLY (exp) && !TREE_THIS_VOLATILE (exp))
777 flags |= ECF_CONST;
778
779 if (TREE_THIS_VOLATILE (exp))
780 flags |= ECF_NORETURN;
781
782 return flags;
783 }
784
785
786 /* Precompute all register parameters as described by ARGS, storing values
787 into fields within the ARGS array.
788
789 NUM_ACTUALS indicates the total number elements in the ARGS array.
790
791 Set REG_PARM_SEEN if we encounter a register parameter. */
792
793 static void
794 precompute_register_parameters (num_actuals, args, reg_parm_seen)
795 int num_actuals;
796 struct arg_data *args;
797 int *reg_parm_seen;
798 {
799 int i;
800
801 *reg_parm_seen = 0;
802
803 for (i = 0; i < num_actuals; i++)
804 if (args[i].reg != 0 && ! args[i].pass_on_stack)
805 {
806 *reg_parm_seen = 1;
807
808 if (args[i].value == 0)
809 {
810 push_temp_slots ();
811 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
812 VOIDmode, 0);
813 preserve_temp_slots (args[i].value);
814 pop_temp_slots ();
815
816 /* ANSI doesn't require a sequence point here,
817 but PCC has one, so this will avoid some problems. */
818 emit_queue ();
819 }
820
821 /* If we are to promote the function arg to a wider mode,
822 do it now. */
823
824 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
825 args[i].value
826 = convert_modes (args[i].mode,
827 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
828 args[i].value, args[i].unsignedp);
829
830 /* If the value is expensive, and we are inside an appropriately
831 short loop, put the value into a pseudo and then put the pseudo
832 into the hard reg.
833
834 For small register classes, also do this if this call uses
835 register parameters. This is to avoid reload conflicts while
836 loading the parameters registers. */
837
838 if ((! (GET_CODE (args[i].value) == REG
839 || (GET_CODE (args[i].value) == SUBREG
840 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
841 && args[i].mode != BLKmode
842 && rtx_cost (args[i].value, SET) > 2
843 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
844 || preserve_subexpressions_p ()))
845 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
846 }
847 }
848
849 #ifdef REG_PARM_STACK_SPACE
850
851 /* The argument list is the property of the called routine and it
852 may clobber it. If the fixed area has been used for previous
853 parameters, we must save and restore it. */
854
855 static rtx
856 save_fixed_argument_area (reg_parm_stack_space, argblock,
857 low_to_save, high_to_save)
858 int reg_parm_stack_space;
859 rtx argblock;
860 int *low_to_save;
861 int *high_to_save;
862 {
863 int i;
864 rtx save_area = NULL_RTX;
865
866 /* Compute the boundary of the that needs to be saved, if any. */
867 #ifdef ARGS_GROW_DOWNWARD
868 for (i = 0; i < reg_parm_stack_space + 1; i++)
869 #else
870 for (i = 0; i < reg_parm_stack_space; i++)
871 #endif
872 {
873 if (i >= highest_outgoing_arg_in_use
874 || stack_usage_map[i] == 0)
875 continue;
876
877 if (*low_to_save == -1)
878 *low_to_save = i;
879
880 *high_to_save = i;
881 }
882
883 if (*low_to_save >= 0)
884 {
885 int num_to_save = *high_to_save - *low_to_save + 1;
886 enum machine_mode save_mode
887 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
888 rtx stack_area;
889
890 /* If we don't have the required alignment, must do this in BLKmode. */
891 if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
892 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
893 save_mode = BLKmode;
894
895 #ifdef ARGS_GROW_DOWNWARD
896 stack_area
897 = gen_rtx_MEM (save_mode,
898 memory_address (save_mode,
899 plus_constant (argblock,
900 - *high_to_save)));
901 #else
902 stack_area = gen_rtx_MEM (save_mode,
903 memory_address (save_mode,
904 plus_constant (argblock,
905 *low_to_save)));
906 #endif
907 if (save_mode == BLKmode)
908 {
909 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
910 /* Cannot use emit_block_move here because it can be done by a
911 library call which in turn gets into this place again and deadly
912 infinite recursion happens. */
913 move_by_pieces (validize_mem (save_area), stack_area, num_to_save,
914 PARM_BOUNDARY);
915 }
916 else
917 {
918 save_area = gen_reg_rtx (save_mode);
919 emit_move_insn (save_area, stack_area);
920 }
921 }
922 return save_area;
923 }
924
925 static void
926 restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
927 rtx save_area;
928 rtx argblock;
929 int high_to_save;
930 int low_to_save;
931 {
932 enum machine_mode save_mode = GET_MODE (save_area);
933 #ifdef ARGS_GROW_DOWNWARD
934 rtx stack_area
935 = gen_rtx_MEM (save_mode,
936 memory_address (save_mode,
937 plus_constant (argblock,
938 - high_to_save)));
939 #else
940 rtx stack_area
941 = gen_rtx_MEM (save_mode,
942 memory_address (save_mode,
943 plus_constant (argblock,
944 low_to_save)));
945 #endif
946
947 if (save_mode != BLKmode)
948 emit_move_insn (stack_area, save_area);
949 else
950 /* Cannot use emit_block_move here because it can be done by a library
951 call which in turn gets into this place again and deadly infinite
952 recursion happens. */
953 move_by_pieces (stack_area, validize_mem (save_area),
954 high_to_save - low_to_save + 1, PARM_BOUNDARY);
955 }
956 #endif
957
958 /* If any elements in ARGS refer to parameters that are to be passed in
959 registers, but not in memory, and whose alignment does not permit a
960 direct copy into registers. Copy the values into a group of pseudos
961 which we will later copy into the appropriate hard registers.
962
963 Pseudos for each unaligned argument will be stored into the array
964 args[argnum].aligned_regs. The caller is responsible for deallocating
965 the aligned_regs array if it is nonzero. */
966
967 static void
968 store_unaligned_arguments_into_pseudos (args, num_actuals)
969 struct arg_data *args;
970 int num_actuals;
971 {
972 int i, j;
973
974 for (i = 0; i < num_actuals; i++)
975 if (args[i].reg != 0 && ! args[i].pass_on_stack
976 && args[i].mode == BLKmode
977 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
978 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
979 {
980 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
981 int big_endian_correction = 0;
982
983 args[i].n_aligned_regs
984 = args[i].partial ? args[i].partial
985 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
986
987 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
988 * args[i].n_aligned_regs);
989
990 /* Structures smaller than a word are aligned to the least
991 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
992 this means we must skip the empty high order bytes when
993 calculating the bit offset. */
994 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
995 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
996
997 for (j = 0; j < args[i].n_aligned_regs; j++)
998 {
999 rtx reg = gen_reg_rtx (word_mode);
1000 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1001 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1002 int bitalign = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
1003
1004 args[i].aligned_regs[j] = reg;
1005
1006 /* There is no need to restrict this code to loading items
1007 in TYPE_ALIGN sized hunks. The bitfield instructions can
1008 load up entire word sized registers efficiently.
1009
1010 ??? This may not be needed anymore.
1011 We use to emit a clobber here but that doesn't let later
1012 passes optimize the instructions we emit. By storing 0 into
1013 the register later passes know the first AND to zero out the
1014 bitfield being set in the register is unnecessary. The store
1015 of 0 will be deleted as will at least the first AND. */
1016
1017 emit_move_insn (reg, const0_rtx);
1018
1019 bytes -= bitsize / BITS_PER_UNIT;
1020 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
1021 extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1022 word_mode, word_mode, bitalign,
1023 BITS_PER_WORD),
1024 bitalign, BITS_PER_WORD);
1025 }
1026 }
1027 }
1028
1029 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
1030 ACTPARMS.
1031
1032 NUM_ACTUALS is the total number of parameters.
1033
1034 N_NAMED_ARGS is the total number of named arguments.
1035
1036 FNDECL is the tree code for the target of this call (if known)
1037
1038 ARGS_SO_FAR holds state needed by the target to know where to place
1039 the next argument.
1040
1041 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1042 for arguments which are passed in registers.
1043
1044 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1045 and may be modified by this routine.
1046
1047 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1048 flags which may may be modified by this routine. */
1049
1050 static void
1051 initialize_argument_information (num_actuals, args, args_size, n_named_args,
1052 actparms, fndecl, args_so_far,
1053 reg_parm_stack_space, old_stack_level,
1054 old_pending_adj, must_preallocate,
1055 ecf_flags)
1056 int num_actuals ATTRIBUTE_UNUSED;
1057 struct arg_data *args;
1058 struct args_size *args_size;
1059 int n_named_args ATTRIBUTE_UNUSED;
1060 tree actparms;
1061 tree fndecl;
1062 CUMULATIVE_ARGS *args_so_far;
1063 int reg_parm_stack_space;
1064 rtx *old_stack_level;
1065 int *old_pending_adj;
1066 int *must_preallocate;
1067 int *ecf_flags;
1068 {
1069 /* 1 if scanning parms front to back, -1 if scanning back to front. */
1070 int inc;
1071
1072 /* Count arg position in order args appear. */
1073 int argpos;
1074
1075 struct args_size alignment_pad;
1076 int i;
1077 tree p;
1078
1079 args_size->constant = 0;
1080 args_size->var = 0;
1081
1082 /* In this loop, we consider args in the order they are written.
1083 We fill up ARGS from the front or from the back if necessary
1084 so that in any case the first arg to be pushed ends up at the front. */
1085
1086 if (PUSH_ARGS_REVERSED)
1087 {
1088 i = num_actuals - 1, inc = -1;
1089 /* In this case, must reverse order of args
1090 so that we compute and push the last arg first. */
1091 }
1092 else
1093 {
1094 i = 0, inc = 1;
1095 }
1096
1097 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1098 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
1099 {
1100 tree type = TREE_TYPE (TREE_VALUE (p));
1101 int unsignedp;
1102 enum machine_mode mode;
1103
1104 args[i].tree_value = TREE_VALUE (p);
1105
1106 /* Replace erroneous argument with constant zero. */
1107 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1108 args[i].tree_value = integer_zero_node, type = integer_type_node;
1109
1110 /* If TYPE is a transparent union, pass things the way we would
1111 pass the first field of the union. We have already verified that
1112 the modes are the same. */
1113 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
1114 type = TREE_TYPE (TYPE_FIELDS (type));
1115
1116 /* Decide where to pass this arg.
1117
1118 args[i].reg is nonzero if all or part is passed in registers.
1119
1120 args[i].partial is nonzero if part but not all is passed in registers,
1121 and the exact value says how many words are passed in registers.
1122
1123 args[i].pass_on_stack is nonzero if the argument must at least be
1124 computed on the stack. It may then be loaded back into registers
1125 if args[i].reg is nonzero.
1126
1127 These decisions are driven by the FUNCTION_... macros and must agree
1128 with those made by function.c. */
1129
1130 /* See if this argument should be passed by invisible reference. */
1131 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1132 && contains_placeholder_p (TYPE_SIZE (type)))
1133 || TREE_ADDRESSABLE (type)
1134 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1135 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
1136 type, argpos < n_named_args)
1137 #endif
1138 )
1139 {
1140 /* If we're compiling a thunk, pass through invisible
1141 references instead of making a copy. */
1142 if (current_function_is_thunk
1143 #ifdef FUNCTION_ARG_CALLEE_COPIES
1144 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
1145 type, argpos < n_named_args)
1146 /* If it's in a register, we must make a copy of it too. */
1147 /* ??? Is this a sufficient test? Is there a better one? */
1148 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1149 && REG_P (DECL_RTL (args[i].tree_value)))
1150 && ! TREE_ADDRESSABLE (type))
1151 #endif
1152 )
1153 {
1154 /* C++ uses a TARGET_EXPR to indicate that we want to make a
1155 new object from the argument. If we are passing by
1156 invisible reference, the callee will do that for us, so we
1157 can strip off the TARGET_EXPR. This is not always safe,
1158 but it is safe in the only case where this is a useful
1159 optimization; namely, when the argument is a plain object.
1160 In that case, the frontend is just asking the backend to
1161 make a bitwise copy of the argument. */
1162
1163 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
1164 && (DECL_P (TREE_OPERAND (args[i].tree_value, 1)))
1165 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1166 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1167
1168 args[i].tree_value = build1 (ADDR_EXPR,
1169 build_pointer_type (type),
1170 args[i].tree_value);
1171 type = build_pointer_type (type);
1172 }
1173 else
1174 {
1175 /* We make a copy of the object and pass the address to the
1176 function being called. */
1177 rtx copy;
1178
1179 if (!COMPLETE_TYPE_P (type)
1180 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1181 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1182 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1183 STACK_CHECK_MAX_VAR_SIZE))))
1184 {
1185 /* This is a variable-sized object. Make space on the stack
1186 for it. */
1187 rtx size_rtx = expr_size (TREE_VALUE (p));
1188
1189 if (*old_stack_level == 0)
1190 {
1191 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1192 *old_pending_adj = pending_stack_adjust;
1193 pending_stack_adjust = 0;
1194 }
1195
1196 copy = gen_rtx_MEM (BLKmode,
1197 allocate_dynamic_stack_space
1198 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1199 set_mem_attributes (copy, type, 1);
1200 }
1201 else
1202 copy = assign_temp (type, 0, 1, 0);
1203
1204 store_expr (args[i].tree_value, copy, 0);
1205 *ecf_flags &= ~(ECF_CONST | ECF_PURE);
1206
1207 args[i].tree_value = build1 (ADDR_EXPR,
1208 build_pointer_type (type),
1209 make_tree (type, copy));
1210 type = build_pointer_type (type);
1211 }
1212 }
1213
1214 mode = TYPE_MODE (type);
1215 unsignedp = TREE_UNSIGNED (type);
1216
1217 #ifdef PROMOTE_FUNCTION_ARGS
1218 mode = promote_mode (type, mode, &unsignedp, 1);
1219 #endif
1220
1221 args[i].unsignedp = unsignedp;
1222 args[i].mode = mode;
1223
1224 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1225 argpos < n_named_args);
1226 #ifdef FUNCTION_INCOMING_ARG
1227 /* If this is a sibling call and the machine has register windows, the
1228 register window has to be unwinded before calling the routine, so
1229 arguments have to go into the incoming registers. */
1230 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1231 argpos < n_named_args);
1232 #else
1233 args[i].tail_call_reg = args[i].reg;
1234 #endif
1235
1236 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1237 if (args[i].reg)
1238 args[i].partial
1239 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1240 argpos < n_named_args);
1241 #endif
1242
1243 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1244
1245 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1246 it means that we are to pass this arg in the register(s) designated
1247 by the PARALLEL, but also to pass it in the stack. */
1248 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1249 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1250 args[i].pass_on_stack = 1;
1251
1252 /* If this is an addressable type, we must preallocate the stack
1253 since we must evaluate the object into its final location.
1254
1255 If this is to be passed in both registers and the stack, it is simpler
1256 to preallocate. */
1257 if (TREE_ADDRESSABLE (type)
1258 || (args[i].pass_on_stack && args[i].reg != 0))
1259 *must_preallocate = 1;
1260
1261 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1262 we cannot consider this function call constant. */
1263 if (TREE_ADDRESSABLE (type))
1264 *ecf_flags &= ~(ECF_CONST | ECF_PURE);
1265
1266 /* Compute the stack-size of this argument. */
1267 if (args[i].reg == 0 || args[i].partial != 0
1268 || reg_parm_stack_space > 0
1269 || args[i].pass_on_stack)
1270 locate_and_pad_parm (mode, type,
1271 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1272 1,
1273 #else
1274 args[i].reg != 0,
1275 #endif
1276 fndecl, args_size, &args[i].offset,
1277 &args[i].size, &alignment_pad);
1278
1279 #ifndef ARGS_GROW_DOWNWARD
1280 args[i].slot_offset = *args_size;
1281 #endif
1282
1283 args[i].alignment_pad = alignment_pad;
1284
1285 /* If a part of the arg was put into registers,
1286 don't include that part in the amount pushed. */
1287 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1288 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1289 / (PARM_BOUNDARY / BITS_PER_UNIT)
1290 * (PARM_BOUNDARY / BITS_PER_UNIT));
1291
1292 /* Update ARGS_SIZE, the total stack space for args so far. */
1293
1294 args_size->constant += args[i].size.constant;
1295 if (args[i].size.var)
1296 {
1297 ADD_PARM_SIZE (*args_size, args[i].size.var);
1298 }
1299
1300 /* Since the slot offset points to the bottom of the slot,
1301 we must record it after incrementing if the args grow down. */
1302 #ifdef ARGS_GROW_DOWNWARD
1303 args[i].slot_offset = *args_size;
1304
1305 args[i].slot_offset.constant = -args_size->constant;
1306 if (args_size->var)
1307 SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
1308 #endif
1309
1310 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1311 have been used, etc. */
1312
1313 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1314 argpos < n_named_args);
1315 }
1316 }
1317
1318 /* Update ARGS_SIZE to contain the total size for the argument block.
1319 Return the original constant component of the argument block's size.
1320
1321 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1322 for arguments passed in registers. */
1323
1324 static int
1325 compute_argument_block_size (reg_parm_stack_space, args_size,
1326 preferred_stack_boundary)
1327 int reg_parm_stack_space;
1328 struct args_size *args_size;
1329 int preferred_stack_boundary ATTRIBUTE_UNUSED;
1330 {
1331 int unadjusted_args_size = args_size->constant;
1332
1333 /* For accumulate outgoing args mode we don't need to align, since the frame
1334 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1335 backends from generating missaligned frame sizes. */
1336 #ifdef STACK_BOUNDARY
1337 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1338 preferred_stack_boundary = STACK_BOUNDARY;
1339 #endif
1340
1341 /* Compute the actual size of the argument block required. The variable
1342 and constant sizes must be combined, the size may have to be rounded,
1343 and there may be a minimum required size. */
1344
1345 if (args_size->var)
1346 {
1347 args_size->var = ARGS_SIZE_TREE (*args_size);
1348 args_size->constant = 0;
1349
1350 #ifdef PREFERRED_STACK_BOUNDARY
1351 preferred_stack_boundary /= BITS_PER_UNIT;
1352 if (preferred_stack_boundary > 1)
1353 {
1354 /* We don't handle this case yet. To handle it correctly we have
1355 to add the delta, round and substract the delta.
1356 Currently no machine description requires this support. */
1357 if (stack_pointer_delta & (preferred_stack_boundary - 1))
1358 abort();
1359 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1360 }
1361 #endif
1362
1363 if (reg_parm_stack_space > 0)
1364 {
1365 args_size->var
1366 = size_binop (MAX_EXPR, args_size->var,
1367 ssize_int (reg_parm_stack_space));
1368
1369 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1370 /* The area corresponding to register parameters is not to count in
1371 the size of the block we need. So make the adjustment. */
1372 args_size->var
1373 = size_binop (MINUS_EXPR, args_size->var,
1374 ssize_int (reg_parm_stack_space));
1375 #endif
1376 }
1377 }
1378 else
1379 {
1380 #ifdef PREFERRED_STACK_BOUNDARY
1381 preferred_stack_boundary /= BITS_PER_UNIT;
1382 if (preferred_stack_boundary < 1)
1383 preferred_stack_boundary = 1;
1384 args_size->constant = (((args_size->constant
1385 + stack_pointer_delta
1386 + preferred_stack_boundary - 1)
1387 / preferred_stack_boundary
1388 * preferred_stack_boundary)
1389 - stack_pointer_delta);
1390 #endif
1391
1392 args_size->constant = MAX (args_size->constant,
1393 reg_parm_stack_space);
1394
1395 #ifdef MAYBE_REG_PARM_STACK_SPACE
1396 if (reg_parm_stack_space == 0)
1397 args_size->constant = 0;
1398 #endif
1399
1400 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1401 args_size->constant -= reg_parm_stack_space;
1402 #endif
1403 }
1404 return unadjusted_args_size;
1405 }
1406
1407 /* Precompute parameters as needed for a function call.
1408
1409 FLAGS is mask of ECF_* constants.
1410
1411 NUM_ACTUALS is the number of arguments.
1412
1413 ARGS is an array containing information for each argument; this routine
1414 fills in the INITIAL_VALUE and VALUE fields for each precomputed argument.
1415 */
1416
1417 static void
1418 precompute_arguments (flags, num_actuals, args)
1419 int flags;
1420 int num_actuals;
1421 struct arg_data *args;
1422 {
1423 int i;
1424
1425 /* If this function call is cse'able, precompute all the parameters.
1426 Note that if the parameter is constructed into a temporary, this will
1427 cause an additional copy because the parameter will be constructed
1428 into a temporary location and then copied into the outgoing arguments.
1429 If a parameter contains a call to alloca and this function uses the
1430 stack, precompute the parameter. */
1431
1432 /* If we preallocated the stack space, and some arguments must be passed
1433 on the stack, then we must precompute any parameter which contains a
1434 function call which will store arguments on the stack.
1435 Otherwise, evaluating the parameter may clobber previous parameters
1436 which have already been stored into the stack. (we have code to avoid
1437 such case by saving the ougoing stack arguments, but it results in
1438 worse code) */
1439
1440 for (i = 0; i < num_actuals; i++)
1441 if ((flags & (ECF_CONST | ECF_PURE))
1442 || calls_function (args[i].tree_value, !ACCUMULATE_OUTGOING_ARGS))
1443 {
1444 /* If this is an addressable type, we cannot pre-evaluate it. */
1445 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1446 abort ();
1447
1448 push_temp_slots ();
1449
1450 args[i].value
1451 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1452
1453 preserve_temp_slots (args[i].value);
1454 pop_temp_slots ();
1455
1456 /* ANSI doesn't require a sequence point here,
1457 but PCC has one, so this will avoid some problems. */
1458 emit_queue ();
1459
1460 args[i].initial_value = args[i].value
1461 = protect_from_queue (args[i].value, 0);
1462
1463 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
1464 {
1465 args[i].value
1466 = convert_modes (args[i].mode,
1467 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1468 args[i].value, args[i].unsignedp);
1469 #ifdef PROMOTE_FOR_CALL_ONLY
1470 /* CSE will replace this only if it contains args[i].value
1471 pseudo, so convert it down to the declared mode using
1472 a SUBREG. */
1473 if (GET_CODE (args[i].value) == REG
1474 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1475 {
1476 args[i].initial_value
1477 = gen_rtx_SUBREG (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1478 args[i].value, 0);
1479 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1480 SUBREG_PROMOTED_UNSIGNED_P (args[i].initial_value)
1481 = args[i].unsignedp;
1482 }
1483 #endif
1484 }
1485 }
1486 }
1487
1488 /* Given the current state of MUST_PREALLOCATE and information about
1489 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1490 compute and return the final value for MUST_PREALLOCATE. */
1491
1492 static int
1493 finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1494 int must_preallocate;
1495 int num_actuals;
1496 struct arg_data *args;
1497 struct args_size *args_size;
1498 {
1499 /* See if we have or want to preallocate stack space.
1500
1501 If we would have to push a partially-in-regs parm
1502 before other stack parms, preallocate stack space instead.
1503
1504 If the size of some parm is not a multiple of the required stack
1505 alignment, we must preallocate.
1506
1507 If the total size of arguments that would otherwise create a copy in
1508 a temporary (such as a CALL) is more than half the total argument list
1509 size, preallocation is faster.
1510
1511 Another reason to preallocate is if we have a machine (like the m88k)
1512 where stack alignment is required to be maintained between every
1513 pair of insns, not just when the call is made. However, we assume here
1514 that such machines either do not have push insns (and hence preallocation
1515 would occur anyway) or the problem is taken care of with
1516 PUSH_ROUNDING. */
1517
1518 if (! must_preallocate)
1519 {
1520 int partial_seen = 0;
1521 int copy_to_evaluate_size = 0;
1522 int i;
1523
1524 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1525 {
1526 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1527 partial_seen = 1;
1528 else if (partial_seen && args[i].reg == 0)
1529 must_preallocate = 1;
1530
1531 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1532 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1533 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1534 || TREE_CODE (args[i].tree_value) == COND_EXPR
1535 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1536 copy_to_evaluate_size
1537 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1538 }
1539
1540 if (copy_to_evaluate_size * 2 >= args_size->constant
1541 && args_size->constant > 0)
1542 must_preallocate = 1;
1543 }
1544 return must_preallocate;
1545 }
1546
1547 /* If we preallocated stack space, compute the address of each argument
1548 and store it into the ARGS array.
1549
1550 We need not ensure it is a valid memory address here; it will be
1551 validized when it is used.
1552
1553 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1554
1555 static void
1556 compute_argument_addresses (args, argblock, num_actuals)
1557 struct arg_data *args;
1558 rtx argblock;
1559 int num_actuals;
1560 {
1561 if (argblock)
1562 {
1563 rtx arg_reg = argblock;
1564 int i, arg_offset = 0;
1565
1566 if (GET_CODE (argblock) == PLUS)
1567 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1568
1569 for (i = 0; i < num_actuals; i++)
1570 {
1571 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1572 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1573 rtx addr;
1574
1575 /* Skip this parm if it will not be passed on the stack. */
1576 if (! args[i].pass_on_stack && args[i].reg != 0)
1577 continue;
1578
1579 if (GET_CODE (offset) == CONST_INT)
1580 addr = plus_constant (arg_reg, INTVAL (offset));
1581 else
1582 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1583
1584 addr = plus_constant (addr, arg_offset);
1585 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1586 set_mem_attributes (args[i].stack,
1587 TREE_TYPE (args[i].tree_value), 1);
1588
1589 if (GET_CODE (slot_offset) == CONST_INT)
1590 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1591 else
1592 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1593
1594 addr = plus_constant (addr, arg_offset);
1595 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1596 set_mem_attributes (args[i].stack_slot,
1597 TREE_TYPE (args[i].tree_value), 1);
1598 }
1599 }
1600 }
1601
1602 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1603 in a call instruction.
1604
1605 FNDECL is the tree node for the target function. For an indirect call
1606 FNDECL will be NULL_TREE.
1607
1608 EXP is the CALL_EXPR for this call. */
1609
1610 static rtx
1611 rtx_for_function_call (fndecl, exp)
1612 tree fndecl;
1613 tree exp;
1614 {
1615 rtx funexp;
1616
1617 /* Get the function to call, in the form of RTL. */
1618 if (fndecl)
1619 {
1620 /* If this is the first use of the function, see if we need to
1621 make an external definition for it. */
1622 if (! TREE_USED (fndecl))
1623 {
1624 assemble_external (fndecl);
1625 TREE_USED (fndecl) = 1;
1626 }
1627
1628 /* Get a SYMBOL_REF rtx for the function address. */
1629 funexp = XEXP (DECL_RTL (fndecl), 0);
1630 }
1631 else
1632 /* Generate an rtx (probably a pseudo-register) for the address. */
1633 {
1634 rtx funaddr;
1635 push_temp_slots ();
1636 funaddr = funexp =
1637 expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
1638 pop_temp_slots (); /* FUNEXP can't be BLKmode */
1639
1640 /* Check the function is executable. */
1641 if (current_function_check_memory_usage)
1642 {
1643 #ifdef POINTERS_EXTEND_UNSIGNED
1644 /* It might be OK to convert funexp in place, but there's
1645 a lot going on between here and when it happens naturally
1646 that this seems safer. */
1647 funaddr = convert_memory_address (Pmode, funexp);
1648 #endif
1649 emit_library_call (chkr_check_exec_libfunc, 1,
1650 VOIDmode, 1,
1651 funaddr, Pmode);
1652 }
1653 emit_queue ();
1654 }
1655 return funexp;
1656 }
1657
1658 /* Do the register loads required for any wholly-register parms or any
1659 parms which are passed both on the stack and in a register. Their
1660 expressions were already evaluated.
1661
1662 Mark all register-parms as living through the call, putting these USE
1663 insns in the CALL_INSN_FUNCTION_USAGE field. */
1664
1665 static void
1666 load_register_parameters (args, num_actuals, call_fusage, flags)
1667 struct arg_data *args;
1668 int num_actuals;
1669 rtx *call_fusage;
1670 int flags;
1671 {
1672 int i, j;
1673
1674 #ifdef LOAD_ARGS_REVERSED
1675 for (i = num_actuals - 1; i >= 0; i--)
1676 #else
1677 for (i = 0; i < num_actuals; i++)
1678 #endif
1679 {
1680 rtx reg = ((flags & ECF_SIBCALL)
1681 ? args[i].tail_call_reg : args[i].reg);
1682 int partial = args[i].partial;
1683 int nregs;
1684
1685 if (reg)
1686 {
1687 /* Set to non-negative if must move a word at a time, even if just
1688 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1689 we just use a normal move insn. This value can be zero if the
1690 argument is a zero size structure with no fields. */
1691 nregs = (partial ? partial
1692 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1693 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1694 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1695 : -1));
1696
1697 /* Handle calls that pass values in multiple non-contiguous
1698 locations. The Irix 6 ABI has examples of this. */
1699
1700 if (GET_CODE (reg) == PARALLEL)
1701 emit_group_load (reg, args[i].value,
1702 int_size_in_bytes (TREE_TYPE (args[i].tree_value)),
1703 TYPE_ALIGN (TREE_TYPE (args[i].tree_value)));
1704
1705 /* If simple case, just do move. If normal partial, store_one_arg
1706 has already loaded the register for us. In all other cases,
1707 load the register(s) from memory. */
1708
1709 else if (nregs == -1)
1710 emit_move_insn (reg, args[i].value);
1711
1712 /* If we have pre-computed the values to put in the registers in
1713 the case of non-aligned structures, copy them in now. */
1714
1715 else if (args[i].n_aligned_regs != 0)
1716 for (j = 0; j < args[i].n_aligned_regs; j++)
1717 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1718 args[i].aligned_regs[j]);
1719
1720 else if (partial == 0 || args[i].pass_on_stack)
1721 move_block_to_reg (REGNO (reg),
1722 validize_mem (args[i].value), nregs,
1723 args[i].mode);
1724
1725 /* Handle calls that pass values in multiple non-contiguous
1726 locations. The Irix 6 ABI has examples of this. */
1727 if (GET_CODE (reg) == PARALLEL)
1728 use_group_regs (call_fusage, reg);
1729 else if (nregs == -1)
1730 use_reg (call_fusage, reg);
1731 else
1732 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1733 }
1734 }
1735 }
1736
1737 /* Try to integrate function. See expand_inline_function for documentation
1738 about the parameters. */
1739
1740 static rtx
1741 try_to_integrate (fndecl, actparms, target, ignore, type, structure_value_addr)
1742 tree fndecl;
1743 tree actparms;
1744 rtx target;
1745 int ignore;
1746 tree type;
1747 rtx structure_value_addr;
1748 {
1749 rtx temp;
1750 rtx before_call;
1751 int i;
1752 rtx old_stack_level = 0;
1753 int reg_parm_stack_space = 0;
1754
1755 #ifdef REG_PARM_STACK_SPACE
1756 #ifdef MAYBE_REG_PARM_STACK_SPACE
1757 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1758 #else
1759 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1760 #endif
1761 #endif
1762
1763 before_call = get_last_insn ();
1764
1765 timevar_push (TV_INTEGRATION);
1766
1767 temp = expand_inline_function (fndecl, actparms, target,
1768 ignore, type,
1769 structure_value_addr);
1770
1771 timevar_pop (TV_INTEGRATION);
1772
1773 /* If inlining succeeded, return. */
1774 if (temp != (rtx) (HOST_WIDE_INT) - 1)
1775 {
1776 if (ACCUMULATE_OUTGOING_ARGS)
1777 {
1778 /* If the outgoing argument list must be preserved, push
1779 the stack before executing the inlined function if it
1780 makes any calls. */
1781
1782 for (i = reg_parm_stack_space - 1; i >= 0; i--)
1783 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
1784 break;
1785
1786 if (stack_arg_under_construction || i >= 0)
1787 {
1788 rtx first_insn
1789 = before_call ? NEXT_INSN (before_call) : get_insns ();
1790 rtx insn = NULL_RTX, seq;
1791
1792 /* Look for a call in the inline function code.
1793 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
1794 nonzero then there is a call and it is not necessary
1795 to scan the insns. */
1796
1797 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
1798 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1799 if (GET_CODE (insn) == CALL_INSN)
1800 break;
1801
1802 if (insn)
1803 {
1804 /* Reserve enough stack space so that the largest
1805 argument list of any function call in the inline
1806 function does not overlap the argument list being
1807 evaluated. This is usually an overestimate because
1808 allocate_dynamic_stack_space reserves space for an
1809 outgoing argument list in addition to the requested
1810 space, but there is no way to ask for stack space such
1811 that an argument list of a certain length can be
1812 safely constructed.
1813
1814 Add the stack space reserved for register arguments, if
1815 any, in the inline function. What is really needed is the
1816 largest value of reg_parm_stack_space in the inline
1817 function, but that is not available. Using the current
1818 value of reg_parm_stack_space is wrong, but gives
1819 correct results on all supported machines. */
1820
1821 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
1822 + reg_parm_stack_space);
1823
1824 start_sequence ();
1825 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1826 allocate_dynamic_stack_space (GEN_INT (adjust),
1827 NULL_RTX, BITS_PER_UNIT);
1828 seq = get_insns ();
1829 end_sequence ();
1830 emit_insns_before (seq, first_insn);
1831 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1832 }
1833 }
1834 }
1835
1836 /* If the result is equivalent to TARGET, return TARGET to simplify
1837 checks in store_expr. They can be equivalent but not equal in the
1838 case of a function that returns BLKmode. */
1839 if (temp != target && rtx_equal_p (temp, target))
1840 return target;
1841 return temp;
1842 }
1843
1844 /* If inlining failed, mark FNDECL as needing to be compiled
1845 separately after all. If function was declared inline,
1846 give a warning. */
1847 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1848 && optimize > 0 && !TREE_ADDRESSABLE (fndecl))
1849 {
1850 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1851 warning ("called from here");
1852 }
1853 mark_addressable (fndecl);
1854 return (rtx) (HOST_WIDE_INT) - 1;
1855 }
1856
1857 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1858 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1859 bytes, then we would need to push some additional bytes to pad the
1860 arguments. So, we compute an adjust to the stack pointer for an
1861 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1862 bytes. Then, when the arguments are pushed the stack will be perfectly
1863 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1864 be popped after the call. Returns the adjustment. */
1865
1866 static int
1867 combine_pending_stack_adjustment_and_call (unadjusted_args_size,
1868 args_size,
1869 preferred_unit_stack_boundary)
1870 int unadjusted_args_size;
1871 struct args_size *args_size;
1872 int preferred_unit_stack_boundary;
1873 {
1874 /* The number of bytes to pop so that the stack will be
1875 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1876 HOST_WIDE_INT adjustment;
1877 /* The alignment of the stack after the arguments are pushed, if we
1878 just pushed the arguments without adjust the stack here. */
1879 HOST_WIDE_INT unadjusted_alignment;
1880
1881 unadjusted_alignment
1882 = ((stack_pointer_delta + unadjusted_args_size)
1883 % preferred_unit_stack_boundary);
1884
1885 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1886 as possible -- leaving just enough left to cancel out the
1887 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1888 PENDING_STACK_ADJUST is non-negative, and congruent to
1889 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1890
1891 /* Begin by trying to pop all the bytes. */
1892 unadjusted_alignment
1893 = (unadjusted_alignment
1894 - (pending_stack_adjust % preferred_unit_stack_boundary));
1895 adjustment = pending_stack_adjust;
1896 /* Push enough additional bytes that the stack will be aligned
1897 after the arguments are pushed. */
1898 if (unadjusted_alignment >= 0)
1899 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1900 else
1901 adjustment += unadjusted_alignment;
1902
1903 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1904 bytes after the call. The right number is the entire
1905 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1906 by the arguments in the first place. */
1907 args_size->constant
1908 = pending_stack_adjust - adjustment + unadjusted_args_size;
1909
1910 return adjustment;
1911 }
1912
1913 /* Generate all the code for a function call
1914 and return an rtx for its value.
1915 Store the value in TARGET (specified as an rtx) if convenient.
1916 If the value is stored in TARGET then TARGET is returned.
1917 If IGNORE is nonzero, then we ignore the value of the function call. */
1918
1919 rtx
1920 expand_call (exp, target, ignore)
1921 tree exp;
1922 rtx target;
1923 int ignore;
1924 {
1925 /* Nonzero if we are currently expanding a call. */
1926 static int currently_expanding_call = 0;
1927
1928 /* List of actual parameters. */
1929 tree actparms = TREE_OPERAND (exp, 1);
1930 /* RTX for the function to be called. */
1931 rtx funexp;
1932 /* Sequence of insns to perform a tail recursive "call". */
1933 rtx tail_recursion_insns = NULL_RTX;
1934 /* Sequence of insns to perform a normal "call". */
1935 rtx normal_call_insns = NULL_RTX;
1936 /* Sequence of insns to perform a tail recursive "call". */
1937 rtx tail_call_insns = NULL_RTX;
1938 /* Data type of the function. */
1939 tree funtype;
1940 /* Declaration of the function being called,
1941 or 0 if the function is computed (not known by name). */
1942 tree fndecl = 0;
1943 char *name = 0;
1944 rtx insn;
1945 int try_tail_call = 1;
1946 int try_tail_recursion = 1;
1947 int pass;
1948
1949 /* Register in which non-BLKmode value will be returned,
1950 or 0 if no value or if value is BLKmode. */
1951 rtx valreg;
1952 /* Address where we should return a BLKmode value;
1953 0 if value not BLKmode. */
1954 rtx structure_value_addr = 0;
1955 /* Nonzero if that address is being passed by treating it as
1956 an extra, implicit first parameter. Otherwise,
1957 it is passed by being copied directly into struct_value_rtx. */
1958 int structure_value_addr_parm = 0;
1959 /* Size of aggregate value wanted, or zero if none wanted
1960 or if we are using the non-reentrant PCC calling convention
1961 or expecting the value in registers. */
1962 HOST_WIDE_INT struct_value_size = 0;
1963 /* Nonzero if called function returns an aggregate in memory PCC style,
1964 by returning the address of where to find it. */
1965 int pcc_struct_value = 0;
1966
1967 /* Number of actual parameters in this call, including struct value addr. */
1968 int num_actuals;
1969 /* Number of named args. Args after this are anonymous ones
1970 and they must all go on the stack. */
1971 int n_named_args;
1972
1973 /* Vector of information about each argument.
1974 Arguments are numbered in the order they will be pushed,
1975 not the order they are written. */
1976 struct arg_data *args;
1977
1978 /* Total size in bytes of all the stack-parms scanned so far. */
1979 struct args_size args_size;
1980 struct args_size adjusted_args_size;
1981 /* Size of arguments before any adjustments (such as rounding). */
1982 int unadjusted_args_size;
1983 /* Data on reg parms scanned so far. */
1984 CUMULATIVE_ARGS args_so_far;
1985 /* Nonzero if a reg parm has been scanned. */
1986 int reg_parm_seen;
1987 /* Nonzero if this is an indirect function call. */
1988
1989 /* Nonzero if we must avoid push-insns in the args for this call.
1990 If stack space is allocated for register parameters, but not by the
1991 caller, then it is preallocated in the fixed part of the stack frame.
1992 So the entire argument block must then be preallocated (i.e., we
1993 ignore PUSH_ROUNDING in that case). */
1994
1995 int must_preallocate = !PUSH_ARGS;
1996
1997 /* Size of the stack reserved for parameter registers. */
1998 int reg_parm_stack_space = 0;
1999
2000 /* Address of space preallocated for stack parms
2001 (on machines that lack push insns), or 0 if space not preallocated. */
2002 rtx argblock = 0;
2003
2004 /* Mask of ECF_ flags. */
2005 int flags = 0;
2006 /* Nonzero if this is a call to an inline function. */
2007 int is_integrable = 0;
2008 #ifdef REG_PARM_STACK_SPACE
2009 /* Define the boundary of the register parm stack space that needs to be
2010 save, if any. */
2011 int low_to_save = -1, high_to_save;
2012 rtx save_area = 0; /* Place that it is saved */
2013 #endif
2014
2015 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2016 char *initial_stack_usage_map = stack_usage_map;
2017 int old_stack_arg_under_construction = 0;
2018
2019 rtx old_stack_level = 0;
2020 int old_pending_adj = 0;
2021 int old_inhibit_defer_pop = inhibit_defer_pop;
2022 int old_stack_allocated;
2023 rtx call_fusage;
2024 register tree p;
2025 register int i;
2026 /* The alignment of the stack, in bits. */
2027 HOST_WIDE_INT preferred_stack_boundary;
2028 /* The alignment of the stack, in bytes. */
2029 HOST_WIDE_INT preferred_unit_stack_boundary;
2030
2031 /* The value of the function call can be put in a hard register. But
2032 if -fcheck-memory-usage, code which invokes functions (and thus
2033 damages some hard registers) can be inserted before using the value.
2034 So, target is always a pseudo-register in that case. */
2035 if (current_function_check_memory_usage)
2036 target = 0;
2037
2038 /* See if this is "nothrow" function call. */
2039 if (TREE_NOTHROW (exp))
2040 flags |= ECF_NOTHROW;
2041
2042 /* See if we can find a DECL-node for the actual function.
2043 As a result, decide whether this is a call to an integrable function. */
2044
2045 fndecl = get_callee_fndecl (exp);
2046 if (fndecl)
2047 {
2048 if (!flag_no_inline
2049 && fndecl != current_function_decl
2050 && DECL_INLINE (fndecl)
2051 && DECL_SAVED_INSNS (fndecl)
2052 && DECL_SAVED_INSNS (fndecl)->inlinable)
2053 is_integrable = 1;
2054 else if (! TREE_ADDRESSABLE (fndecl))
2055 {
2056 /* In case this function later becomes inlinable,
2057 record that there was already a non-inline call to it.
2058
2059 Use abstraction instead of setting TREE_ADDRESSABLE
2060 directly. */
2061 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
2062 && optimize > 0)
2063 {
2064 warning_with_decl (fndecl, "can't inline call to `%s'");
2065 warning ("called from here");
2066 }
2067 mark_addressable (fndecl);
2068 }
2069
2070 flags |= flags_from_decl_or_type (fndecl);
2071 }
2072
2073 /* If we don't have specific function to call, see if we have a
2074 attributes set in the type. */
2075 else
2076 {
2077 p = TREE_OPERAND (exp, 0);
2078 flags |= flags_from_decl_or_type (TREE_TYPE (TREE_TYPE (p)));
2079 }
2080
2081 #ifdef REG_PARM_STACK_SPACE
2082 #ifdef MAYBE_REG_PARM_STACK_SPACE
2083 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2084 #else
2085 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2086 #endif
2087 #endif
2088
2089 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2090 if (reg_parm_stack_space > 0 && PUSH_ARGS)
2091 must_preallocate = 1;
2092 #endif
2093
2094 /* Warn if this value is an aggregate type,
2095 regardless of which calling convention we are using for it. */
2096 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
2097 warning ("function call has aggregate value");
2098
2099 /* Set up a place to return a structure. */
2100
2101 /* Cater to broken compilers. */
2102 if (aggregate_value_p (exp))
2103 {
2104 /* This call returns a big structure. */
2105 flags &= ~(ECF_CONST | ECF_PURE);
2106
2107 #ifdef PCC_STATIC_STRUCT_RETURN
2108 {
2109 pcc_struct_value = 1;
2110 /* Easier than making that case work right. */
2111 if (is_integrable)
2112 {
2113 /* In case this is a static function, note that it has been
2114 used. */
2115 if (! TREE_ADDRESSABLE (fndecl))
2116 mark_addressable (fndecl);
2117 is_integrable = 0;
2118 }
2119 }
2120 #else /* not PCC_STATIC_STRUCT_RETURN */
2121 {
2122 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2123
2124 if (target && GET_CODE (target) == MEM)
2125 structure_value_addr = XEXP (target, 0);
2126 else
2127 {
2128 /* Assign a temporary to hold the value. */
2129 tree d;
2130
2131 /* For variable-sized objects, we must be called with a target
2132 specified. If we were to allocate space on the stack here,
2133 we would have no way of knowing when to free it. */
2134
2135 if (struct_value_size < 0)
2136 abort ();
2137
2138 /* This DECL is just something to feed to mark_addressable;
2139 it doesn't get pushed. */
2140 d = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
2141 DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 1, 0, 1);
2142 mark_addressable (d);
2143 mark_temp_addr_taken (DECL_RTL (d));
2144 structure_value_addr = XEXP (DECL_RTL (d), 0);
2145 TREE_USED (d) = 1;
2146 target = 0;
2147 }
2148 }
2149 #endif /* not PCC_STATIC_STRUCT_RETURN */
2150 }
2151
2152 /* If called function is inline, try to integrate it. */
2153
2154 if (is_integrable)
2155 {
2156 rtx temp = try_to_integrate (fndecl, actparms, target,
2157 ignore, TREE_TYPE (exp),
2158 structure_value_addr);
2159 if (temp != (rtx) (HOST_WIDE_INT) - 1)
2160 return temp;
2161 }
2162
2163 if (fndecl && DECL_NAME (fndecl))
2164 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
2165
2166 /* Figure out the amount to which the stack should be aligned. */
2167 #ifdef PREFERRED_STACK_BOUNDARY
2168 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2169 #else
2170 preferred_stack_boundary = STACK_BOUNDARY;
2171 #endif
2172
2173 /* Operand 0 is a pointer-to-function; get the type of the function. */
2174 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
2175 if (! POINTER_TYPE_P (funtype))
2176 abort ();
2177 funtype = TREE_TYPE (funtype);
2178
2179 /* See if this is a call to a function that can return more than once
2180 or a call to longjmp or malloc. */
2181 flags |= special_function_p (fndecl, flags);
2182
2183 if (flags & ECF_MAY_BE_ALLOCA)
2184 current_function_calls_alloca = 1;
2185
2186 /* If struct_value_rtx is 0, it means pass the address
2187 as if it were an extra parameter. */
2188 if (structure_value_addr && struct_value_rtx == 0)
2189 {
2190 /* If structure_value_addr is a REG other than
2191 virtual_outgoing_args_rtx, we can use always use it. If it
2192 is not a REG, we must always copy it into a register.
2193 If it is virtual_outgoing_args_rtx, we must copy it to another
2194 register in some cases. */
2195 rtx temp = (GET_CODE (structure_value_addr) != REG
2196 || (ACCUMULATE_OUTGOING_ARGS
2197 && stack_arg_under_construction
2198 && structure_value_addr == virtual_outgoing_args_rtx)
2199 ? copy_addr_to_reg (structure_value_addr)
2200 : structure_value_addr);
2201
2202 actparms
2203 = tree_cons (error_mark_node,
2204 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2205 temp),
2206 actparms);
2207 structure_value_addr_parm = 1;
2208 }
2209
2210 /* Count the arguments and set NUM_ACTUALS. */
2211 for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p))
2212 num_actuals++;
2213
2214 /* Compute number of named args.
2215 Normally, don't include the last named arg if anonymous args follow.
2216 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
2217 (If no anonymous args follow, the result of list_length is actually
2218 one too large. This is harmless.)
2219
2220 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
2221 zero, this machine will be able to place unnamed args that were
2222 passed in registers into the stack. So treat all args as named.
2223 This allows the insns emitting for a specific argument list to be
2224 independent of the function declaration.
2225
2226 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any
2227 reliable way to pass unnamed args in registers, so we must force
2228 them into memory. */
2229
2230 if ((STRICT_ARGUMENT_NAMING
2231 || ! PRETEND_OUTGOING_VARARGS_NAMED)
2232 && TYPE_ARG_TYPES (funtype) != 0)
2233 n_named_args
2234 = (list_length (TYPE_ARG_TYPES (funtype))
2235 /* Don't include the last named arg. */
2236 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
2237 /* Count the struct value address, if it is passed as a parm. */
2238 + structure_value_addr_parm);
2239 else
2240 /* If we know nothing, treat all args as named. */
2241 n_named_args = num_actuals;
2242
2243 /* Start updating where the next arg would go.
2244
2245 On some machines (such as the PA) indirect calls have a different
2246 calling convention than normal calls. The last argument in
2247 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2248 or not. */
2249 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
2250
2251
2252 /* Make a vector to hold all the information about each arg. */
2253 args = (struct arg_data *) alloca (num_actuals
2254 * sizeof (struct arg_data));
2255 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
2256
2257 /* Build up entries inthe ARGS array, compute the size of the arguments
2258 into ARGS_SIZE, etc. */
2259 initialize_argument_information (num_actuals, args, &args_size,
2260 n_named_args, actparms, fndecl,
2261 &args_so_far, reg_parm_stack_space,
2262 &old_stack_level, &old_pending_adj,
2263 &must_preallocate, &flags);
2264
2265 if (args_size.var)
2266 {
2267 /* If this function requires a variable-sized argument list, don't
2268 try to make a cse'able block for this call. We may be able to
2269 do this eventually, but it is too complicated to keep track of
2270 what insns go in the cse'able block and which don't. */
2271
2272 flags &= ~(ECF_CONST | ECF_PURE);
2273 must_preallocate = 1;
2274 }
2275
2276 /* Now make final decision about preallocating stack space. */
2277 must_preallocate = finalize_must_preallocate (must_preallocate,
2278 num_actuals, args,
2279 &args_size);
2280
2281 /* If the structure value address will reference the stack pointer, we
2282 must stabilize it. We don't need to do this if we know that we are
2283 not going to adjust the stack pointer in processing this call. */
2284
2285 if (structure_value_addr
2286 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2287 || reg_mentioned_p (virtual_outgoing_args_rtx,
2288 structure_value_addr))
2289 && (args_size.var
2290 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2291 structure_value_addr = copy_to_reg (structure_value_addr);
2292
2293 /* Tail calls can make things harder to debug, and we're traditionally
2294 pushed these optimizations into -O2. Don't try if we're already
2295 expanding a call, as that means we're an argument. Similarly, if
2296 there's pending loops or cleanups we know there's code to follow
2297 the call.
2298
2299 If rtx_equal_function_value_matters is false, that means we've
2300 finished with regular parsing. Which means that some of the
2301 machinery we use to generate tail-calls is no longer in place.
2302 This is most often true of sjlj-exceptions, which we couldn't
2303 tail-call to anyway. */
2304
2305 if (currently_expanding_call++ != 0
2306 || !flag_optimize_sibling_calls
2307 || !rtx_equal_function_value_matters
2308 || !stmt_loop_nest_empty ()
2309 || any_pending_cleanups (1)
2310 || args_size.var)
2311 try_tail_call = try_tail_recursion = 0;
2312
2313 /* Tail recursion fails, when we are not dealing with recursive calls. */
2314 if (!try_tail_recursion
2315 || TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
2316 || TREE_OPERAND (TREE_OPERAND (exp, 0), 0) != current_function_decl)
2317 try_tail_recursion = 0;
2318
2319 /* Rest of purposes for tail call optimizations to fail. */
2320 if (
2321 #ifdef HAVE_sibcall_epilogue
2322 !HAVE_sibcall_epilogue
2323 #else
2324 1
2325 #endif
2326 || !try_tail_call
2327 /* Doing sibling call optimization needs some work, since
2328 structure_value_addr can be allocated on the stack.
2329 It does not seem worth the effort since few optimizable
2330 sibling calls will return a structure. */
2331 || structure_value_addr != NULL_RTX
2332 /* If the register holding the address is a callee saved
2333 register, then we lose. We have no way to prevent that,
2334 so we only allow calls to named functions. */
2335 /* ??? This could be done by having the insn constraints
2336 use a register class that is all call-clobbered. Any
2337 reload insns generated to fix things up would appear
2338 before the sibcall_epilogue. */
2339 || fndecl == NULL_TREE
2340 || (flags & (ECF_RETURNS_TWICE | ECF_LONGJMP))
2341 || !FUNCTION_OK_FOR_SIBCALL (fndecl)
2342 /* If this function requires more stack slots than the current
2343 function, we cannot change it into a sibling call. */
2344 || args_size.constant > current_function_args_size
2345 /* If the callee pops its own arguments, then it must pop exactly
2346 the same number of arguments as the current function. */
2347 || RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2348 != RETURN_POPS_ARGS (current_function_decl,
2349 TREE_TYPE (current_function_decl),
2350 current_function_args_size))
2351 try_tail_call = 0;
2352
2353 if (try_tail_call || try_tail_recursion)
2354 {
2355 int end, inc;
2356 actparms = NULL_TREE;
2357 /* Ok, we're going to give the tail call the old college try.
2358 This means we're going to evaluate the function arguments
2359 up to three times. There are two degrees of badness we can
2360 encounter, those that can be unsaved and those that can't.
2361 (See unsafe_for_reeval commentary for details.)
2362
2363 Generate a new argument list. Pass safe arguments through
2364 unchanged. For the easy badness wrap them in UNSAVE_EXPRs.
2365 For hard badness, evaluate them now and put their resulting
2366 rtx in a temporary VAR_DECL.
2367
2368 initialize_argument_information has ordered the array for the
2369 order to be pushed, and we must remember this when reconstructing
2370 the original argument orde. */
2371
2372 if (PUSH_ARGS_REVERSED)
2373 {
2374 inc = 1;
2375 i = 0;
2376 end = num_actuals;
2377 }
2378 else
2379 {
2380 inc = -1;
2381 i = num_actuals - 1;
2382 end = -1;
2383 }
2384
2385 for (; i != end; i += inc)
2386 {
2387 switch (unsafe_for_reeval (args[i].tree_value))
2388 {
2389 case 0: /* Safe. */
2390 break;
2391
2392 case 1: /* Mildly unsafe. */
2393 args[i].tree_value = unsave_expr (args[i].tree_value);
2394 break;
2395
2396 case 2: /* Wildly unsafe. */
2397 {
2398 tree var = build_decl (VAR_DECL, NULL_TREE,
2399 TREE_TYPE (args[i].tree_value));
2400 DECL_RTL (var) = expand_expr (args[i].tree_value, NULL_RTX,
2401 VOIDmode, EXPAND_NORMAL);
2402 args[i].tree_value = var;
2403 }
2404 break;
2405
2406 default:
2407 abort ();
2408 }
2409 /* We need to build actparms for optimize_tail_recursion. We can
2410 safely trash away TREE_PURPOSE, since it is unused by this
2411 function. */
2412 if (try_tail_recursion)
2413 actparms = tree_cons (NULL_TREE, args[i].tree_value, actparms);
2414 }
2415 /* Expanding one of those dangerous arguments could have added
2416 cleanups, but otherwise give it a whirl. */
2417 if (any_pending_cleanups (1))
2418 try_tail_call = try_tail_recursion = 0;
2419 }
2420
2421 /* Generate a tail recursion sequence when calling ourselves. */
2422
2423 if (try_tail_recursion)
2424 {
2425 /* We want to emit any pending stack adjustments before the tail
2426 recursion "call". That way we know any adjustment after the tail
2427 recursion call can be ignored if we indeed use the tail recursion
2428 call expansion. */
2429 int save_pending_stack_adjust = pending_stack_adjust;
2430 int save_stack_pointer_delta = stack_pointer_delta;
2431
2432 /* Use a new sequence to hold any RTL we generate. We do not even
2433 know if we will use this RTL yet. The final decision can not be
2434 made until after RTL generation for the entire function is
2435 complete. */
2436 start_sequence ();
2437 /* If expanding any of the arguments creates cleanups, we can't
2438 do a tailcall. So, we'll need to pop the pending cleanups
2439 list. If, however, all goes well, and there are no cleanups
2440 then the call to expand_start_target_temps will have no
2441 effect. */
2442 expand_start_target_temps ();
2443 if (optimize_tail_recursion (actparms, get_last_insn ()))
2444 {
2445 if (any_pending_cleanups (1))
2446 try_tail_call = try_tail_recursion = 0;
2447 else
2448 tail_recursion_insns = get_insns ();
2449 }
2450 expand_end_target_temps ();
2451 end_sequence ();
2452
2453 /* Restore the original pending stack adjustment for the sibling and
2454 normal call cases below. */
2455 pending_stack_adjust = save_pending_stack_adjust;
2456 stack_pointer_delta = save_stack_pointer_delta;
2457 }
2458
2459 if (profile_arc_flag && (flags & ECF_FORK_OR_EXEC))
2460 {
2461 /* A fork duplicates the profile information, and an exec discards
2462 it. We can't rely on fork/exec to be paired. So write out the
2463 profile information we have gathered so far, and clear it. */
2464 /* ??? When Linux's __clone is called with CLONE_VM set, profiling
2465 is subject to race conditions, just as with multithreaded
2466 programs. */
2467
2468 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__bb_fork_func"), 0,
2469 VOIDmode, 0);
2470 }
2471
2472 /* Ensure current function's preferred stack boundary is at least
2473 what we need. We don't have to increase alignment for recursive
2474 functions. */
2475 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2476 && fndecl != current_function_decl)
2477 cfun->preferred_stack_boundary = preferred_stack_boundary;
2478
2479 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2480
2481 function_call_count++;
2482
2483 /* We want to make two insn chains; one for a sibling call, the other
2484 for a normal call. We will select one of the two chains after
2485 initial RTL generation is complete. */
2486 for (pass = 0; pass < 2; pass++)
2487 {
2488 int sibcall_failure = 0;
2489 /* We want to emit ay pending stack adjustments before the tail
2490 recursion "call". That way we know any adjustment after the tail
2491 recursion call can be ignored if we indeed use the tail recursion
2492 call expansion. */
2493 int save_pending_stack_adjust;
2494 int save_stack_pointer_delta;
2495 rtx insns;
2496 rtx before_call, next_arg_reg;
2497
2498 if (pass == 0)
2499 {
2500 if (! try_tail_call)
2501 continue;
2502
2503 /* Emit any queued insns now; otherwise they would end up in
2504 only one of the alternates. */
2505 emit_queue ();
2506
2507 /* State variables we need to save and restore between
2508 iterations. */
2509 save_pending_stack_adjust = pending_stack_adjust;
2510 save_stack_pointer_delta = stack_pointer_delta;
2511 }
2512 if (pass)
2513 flags &= ~ECF_SIBCALL;
2514 else
2515 flags |= ECF_SIBCALL;
2516
2517 /* Other state variables that we must reinitialize each time
2518 through the loop (that are not initialized by the loop itself). */
2519 argblock = 0;
2520 call_fusage = 0;
2521
2522 /* Start a new sequence for the normal call case.
2523
2524 From this point on, if the sibling call fails, we want to set
2525 sibcall_failure instead of continuing the loop. */
2526 start_sequence ();
2527
2528 if (pass == 0)
2529 {
2530 /* We know at this point that there are not currently any
2531 pending cleanups. If, however, in the process of evaluating
2532 the arguments we were to create some, we'll need to be
2533 able to get rid of them. */
2534 expand_start_target_temps ();
2535 }
2536
2537 /* When calling a const function, we must pop the stack args right away,
2538 so that the pop is deleted or moved with the call. */
2539 if (flags & (ECF_CONST | ECF_PURE))
2540 NO_DEFER_POP;
2541
2542 /* Don't let pending stack adjusts add up to too much.
2543 Also, do all pending adjustments now if there is any chance
2544 this might be a call to alloca or if we are expanding a sibling
2545 call sequence. */
2546 if (pending_stack_adjust >= 32
2547 || (pending_stack_adjust > 0 && (flags & ECF_MAY_BE_ALLOCA))
2548 || pass == 0)
2549 do_pending_stack_adjust ();
2550
2551 /* Push the temporary stack slot level so that we can free any
2552 temporaries we make. */
2553 push_temp_slots ();
2554
2555
2556 #ifdef FINAL_REG_PARM_STACK_SPACE
2557 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2558 args_size.var);
2559 #endif
2560 /* Precompute any arguments as needed. */
2561 if (pass)
2562 precompute_arguments (flags, num_actuals, args);
2563
2564 /* Now we are about to start emitting insns that can be deleted
2565 if a libcall is deleted. */
2566 if (flags & (ECF_CONST | ECF_PURE | ECF_MALLOC))
2567 start_sequence ();
2568
2569 adjusted_args_size = args_size;
2570 /* Compute the actual size of the argument block required. The variable
2571 and constant sizes must be combined, the size may have to be rounded,
2572 and there may be a minimum required size. When generating a sibcall
2573 pattern, do not round up, since we'll be re-using whatever space our
2574 caller provided. */
2575 unadjusted_args_size
2576 = compute_argument_block_size (reg_parm_stack_space, &adjusted_args_size,
2577 (pass == 0 ? 0
2578 : preferred_stack_boundary));
2579
2580 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2581
2582 /* The argument block when performing a sibling call is the
2583 incoming argument block. */
2584 if (pass == 0)
2585 argblock = virtual_incoming_args_rtx;
2586
2587 /* If we have no actual push instructions, or shouldn't use them,
2588 make space for all args right now. */
2589 else if (adjusted_args_size.var != 0)
2590 {
2591 if (old_stack_level == 0)
2592 {
2593 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2594 old_pending_adj = pending_stack_adjust;
2595 pending_stack_adjust = 0;
2596 /* stack_arg_under_construction says whether a stack arg is
2597 being constructed at the old stack level. Pushing the stack
2598 gets a clean outgoing argument block. */
2599 old_stack_arg_under_construction = stack_arg_under_construction;
2600 stack_arg_under_construction = 0;
2601 }
2602 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2603 }
2604 else
2605 {
2606 /* Note that we must go through the motions of allocating an argument
2607 block even if the size is zero because we may be storing args
2608 in the area reserved for register arguments, which may be part of
2609 the stack frame. */
2610
2611 int needed = adjusted_args_size.constant;
2612
2613 /* Store the maximum argument space used. It will be pushed by
2614 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2615 checking). */
2616
2617 if (needed > current_function_outgoing_args_size)
2618 current_function_outgoing_args_size = needed;
2619
2620 if (must_preallocate)
2621 {
2622 if (ACCUMULATE_OUTGOING_ARGS)
2623 {
2624 /* Since the stack pointer will never be pushed, it is
2625 possible for the evaluation of a parm to clobber
2626 something we have already written to the stack.
2627 Since most function calls on RISC machines do not use
2628 the stack, this is uncommon, but must work correctly.
2629
2630 Therefore, we save any area of the stack that was already
2631 written and that we are using. Here we set up to do this
2632 by making a new stack usage map from the old one. The
2633 actual save will be done by store_one_arg.
2634
2635 Another approach might be to try to reorder the argument
2636 evaluations to avoid this conflicting stack usage. */
2637
2638 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2639 /* Since we will be writing into the entire argument area,
2640 the map must be allocated for its entire size, not just
2641 the part that is the responsibility of the caller. */
2642 needed += reg_parm_stack_space;
2643 #endif
2644
2645 #ifdef ARGS_GROW_DOWNWARD
2646 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2647 needed + 1);
2648 #else
2649 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2650 needed);
2651 #endif
2652 stack_usage_map
2653 = (char *) alloca (highest_outgoing_arg_in_use);
2654
2655 if (initial_highest_arg_in_use)
2656 bcopy (initial_stack_usage_map, stack_usage_map,
2657 initial_highest_arg_in_use);
2658
2659 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2660 bzero (&stack_usage_map[initial_highest_arg_in_use],
2661 (highest_outgoing_arg_in_use
2662 - initial_highest_arg_in_use));
2663 needed = 0;
2664
2665 /* The address of the outgoing argument list must not be
2666 copied to a register here, because argblock would be left
2667 pointing to the wrong place after the call to
2668 allocate_dynamic_stack_space below. */
2669
2670 argblock = virtual_outgoing_args_rtx;
2671 }
2672 else
2673 {
2674 if (inhibit_defer_pop == 0)
2675 {
2676 /* Try to reuse some or all of the pending_stack_adjust
2677 to get this space. */
2678 needed
2679 = (combine_pending_stack_adjustment_and_call
2680 (unadjusted_args_size,
2681 &adjusted_args_size,
2682 preferred_unit_stack_boundary));
2683
2684 /* combine_pending_stack_adjustment_and_call computes
2685 an adjustment before the arguments are allocated.
2686 Account for them and see whether or not the stack
2687 needs to go up or down. */
2688 needed = unadjusted_args_size - needed;
2689
2690 if (needed < 0)
2691 {
2692 /* We're releasing stack space. */
2693 /* ??? We can avoid any adjustment at all if we're
2694 already aligned. FIXME. */
2695 pending_stack_adjust = -needed;
2696 do_pending_stack_adjust ();
2697 needed = 0;
2698 }
2699 else
2700 /* We need to allocate space. We'll do that in
2701 push_block below. */
2702 pending_stack_adjust = 0;
2703 }
2704
2705 /* Special case this because overhead of `push_block' in
2706 this case is non-trivial. */
2707 if (needed == 0)
2708 argblock = virtual_outgoing_args_rtx;
2709 else
2710 argblock = push_block (GEN_INT (needed), 0, 0);
2711
2712 /* We only really need to call `copy_to_reg' in the case
2713 where push insns are going to be used to pass ARGBLOCK
2714 to a function call in ARGS. In that case, the stack
2715 pointer changes value from the allocation point to the
2716 call point, and hence the value of
2717 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2718 as well always do it. */
2719 argblock = copy_to_reg (argblock);
2720
2721 /* The save/restore code in store_one_arg handles all
2722 cases except one: a constructor call (including a C
2723 function returning a BLKmode struct) to initialize
2724 an argument. */
2725 if (stack_arg_under_construction)
2726 {
2727 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2728 rtx push_size = GEN_INT (reg_parm_stack_space
2729 + adjusted_args_size.constant);
2730 #else
2731 rtx push_size = GEN_INT (adjusted_args_size.constant);
2732 #endif
2733 if (old_stack_level == 0)
2734 {
2735 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2736 NULL_RTX);
2737 old_pending_adj = pending_stack_adjust;
2738 pending_stack_adjust = 0;
2739 /* stack_arg_under_construction says whether a stack
2740 arg is being constructed at the old stack level.
2741 Pushing the stack gets a clean outgoing argument
2742 block. */
2743 old_stack_arg_under_construction
2744 = stack_arg_under_construction;
2745 stack_arg_under_construction = 0;
2746 /* Make a new map for the new argument list. */
2747 stack_usage_map = (char *)
2748 alloca (highest_outgoing_arg_in_use);
2749 bzero (stack_usage_map, highest_outgoing_arg_in_use);
2750 highest_outgoing_arg_in_use = 0;
2751 }
2752 allocate_dynamic_stack_space (push_size, NULL_RTX,
2753 BITS_PER_UNIT);
2754 }
2755 /* If argument evaluation might modify the stack pointer,
2756 copy the address of the argument list to a register. */
2757 for (i = 0; i < num_actuals; i++)
2758 if (args[i].pass_on_stack)
2759 {
2760 argblock = copy_addr_to_reg (argblock);
2761 break;
2762 }
2763 }
2764 }
2765 }
2766
2767 compute_argument_addresses (args, argblock, num_actuals);
2768
2769 #ifdef PREFERRED_STACK_BOUNDARY
2770 /* If we push args individually in reverse order, perform stack alignment
2771 before the first push (the last arg). */
2772 if (PUSH_ARGS_REVERSED && argblock == 0
2773 && adjusted_args_size.constant != unadjusted_args_size)
2774 {
2775 /* When the stack adjustment is pending, we get better code
2776 by combining the adjustments. */
2777 if (pending_stack_adjust
2778 && ! (flags & (ECF_CONST | ECF_PURE))
2779 && ! inhibit_defer_pop)
2780 {
2781 pending_stack_adjust
2782 = (combine_pending_stack_adjustment_and_call
2783 (unadjusted_args_size,
2784 &adjusted_args_size,
2785 preferred_unit_stack_boundary));
2786 do_pending_stack_adjust ();
2787 }
2788 else if (argblock == 0)
2789 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2790 - unadjusted_args_size));
2791 }
2792 /* Now that the stack is properly aligned, pops can't safely
2793 be deferred during the evaluation of the arguments. */
2794 NO_DEFER_POP;
2795 #endif
2796
2797 /* Don't try to defer pops if preallocating, not even from the first arg,
2798 since ARGBLOCK probably refers to the SP. */
2799 if (argblock)
2800 NO_DEFER_POP;
2801
2802 funexp = rtx_for_function_call (fndecl, exp);
2803
2804 /* Figure out the register where the value, if any, will come back. */
2805 valreg = 0;
2806 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2807 && ! structure_value_addr)
2808 {
2809 if (pcc_struct_value)
2810 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2811 fndecl, (pass == 0));
2812 else
2813 valreg = hard_function_value (TREE_TYPE (exp), fndecl, (pass == 0));
2814 }
2815
2816 /* Precompute all register parameters. It isn't safe to compute anything
2817 once we have started filling any specific hard regs. */
2818 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
2819
2820 #ifdef REG_PARM_STACK_SPACE
2821 /* Save the fixed argument area if it's part of the caller's frame and
2822 is clobbered by argument setup for this call. */
2823 if (ACCUMULATE_OUTGOING_ARGS && pass)
2824 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2825 &low_to_save, &high_to_save);
2826 #endif
2827
2828 /* Now store (and compute if necessary) all non-register parms.
2829 These come before register parms, since they can require block-moves,
2830 which could clobber the registers used for register parms.
2831 Parms which have partial registers are not stored here,
2832 but we do preallocate space here if they want that. */
2833
2834 for (i = 0; i < num_actuals; i++)
2835 if (args[i].reg == 0 || args[i].pass_on_stack)
2836 store_one_arg (&args[i], argblock, flags,
2837 adjusted_args_size.var != 0, reg_parm_stack_space);
2838
2839 /* If we have a parm that is passed in registers but not in memory
2840 and whose alignment does not permit a direct copy into registers,
2841 make a group of pseudos that correspond to each register that we
2842 will later fill. */
2843 if (STRICT_ALIGNMENT)
2844 store_unaligned_arguments_into_pseudos (args, num_actuals);
2845
2846 /* Now store any partially-in-registers parm.
2847 This is the last place a block-move can happen. */
2848 if (reg_parm_seen)
2849 for (i = 0; i < num_actuals; i++)
2850 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2851 store_one_arg (&args[i], argblock, flags,
2852 adjusted_args_size.var != 0, reg_parm_stack_space);
2853
2854 #ifdef PREFERRED_STACK_BOUNDARY
2855 /* If we pushed args in forward order, perform stack alignment
2856 after pushing the last arg. */
2857 if (!PUSH_ARGS_REVERSED && argblock == 0)
2858 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2859 - unadjusted_args_size));
2860 #endif
2861
2862 /* If register arguments require space on the stack and stack space
2863 was not preallocated, allocate stack space here for arguments
2864 passed in registers. */
2865 #ifdef OUTGOING_REG_PARM_STACK_SPACE
2866 if (!ACCUMULATE_OUTGOING_ARGS
2867 && must_preallocate == 0 && reg_parm_stack_space > 0)
2868 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2869 #endif
2870
2871 /* Pass the function the address in which to return a
2872 structure value. */
2873 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2874 {
2875 emit_move_insn (struct_value_rtx,
2876 force_reg (Pmode,
2877 force_operand (structure_value_addr,
2878 NULL_RTX)));
2879
2880 /* Mark the memory for the aggregate as write-only. */
2881 if (current_function_check_memory_usage)
2882 emit_library_call (chkr_set_right_libfunc, 1,
2883 VOIDmode, 3,
2884 structure_value_addr, ptr_mode,
2885 GEN_INT (struct_value_size),
2886 TYPE_MODE (sizetype),
2887 GEN_INT (MEMORY_USE_WO),
2888 TYPE_MODE (integer_type_node));
2889
2890 if (GET_CODE (struct_value_rtx) == REG)
2891 use_reg (&call_fusage, struct_value_rtx);
2892 }
2893
2894 funexp = prepare_call_address (funexp, fndecl, &call_fusage,
2895 reg_parm_seen);
2896
2897 load_register_parameters (args, num_actuals, &call_fusage, flags);
2898
2899 /* Perform postincrements before actually calling the function. */
2900 emit_queue ();
2901
2902 /* Save a pointer to the last insn before the call, so that we can
2903 later safely search backwards to find the CALL_INSN. */
2904 before_call = get_last_insn ();
2905
2906 /* Set up next argument register. For sibling calls on machines
2907 with register windows this should be the incoming register. */
2908 #ifdef FUNCTION_INCOMING_ARG
2909 if (pass == 0)
2910 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
2911 void_type_node, 1);
2912 else
2913 #endif
2914 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
2915 void_type_node, 1);
2916
2917 /* All arguments and registers used for the call must be set up by
2918 now! */
2919
2920 #ifdef PREFERRED_STACK_BOUNDARY
2921 /* Stack must be properly aligned now. */
2922 if (pass && stack_pointer_delta % preferred_unit_stack_boundary)
2923 abort ();
2924 #endif
2925
2926 /* Generate the actual call instruction. */
2927 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
2928 adjusted_args_size.constant, struct_value_size,
2929 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
2930 flags);
2931
2932 /* Verify that we've deallocated all the stack we used. */
2933 if (pass
2934 && old_stack_allocated != stack_pointer_delta - pending_stack_adjust)
2935 abort();
2936
2937 /* If call is cse'able, make appropriate pair of reg-notes around it.
2938 Test valreg so we don't crash; may safely ignore `const'
2939 if return type is void. Disable for PARALLEL return values, because
2940 we have no way to move such values into a pseudo register. */
2941 if (pass
2942 && (flags & (ECF_CONST | ECF_PURE))
2943 && valreg != 0 && GET_CODE (valreg) != PARALLEL)
2944 {
2945 rtx note = 0;
2946 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2947 rtx insns;
2948
2949 /* Mark the return value as a pointer if needed. */
2950 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2951 mark_reg_pointer (temp, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
2952
2953 /* Construct an "equal form" for the value which mentions all the
2954 arguments in order as well as the function name. */
2955 for (i = 0; i < num_actuals; i++)
2956 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
2957 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2958
2959 insns = get_insns ();
2960 end_sequence ();
2961
2962 if (flags & ECF_PURE)
2963 note = gen_rtx_EXPR_LIST (VOIDmode,
2964 gen_rtx_USE (VOIDmode,
2965 gen_rtx_MEM (BLKmode,
2966 gen_rtx_SCRATCH (VOIDmode))), note);
2967
2968 emit_libcall_block (insns, temp, valreg, note);
2969
2970 valreg = temp;
2971 }
2972 else if (flags & (ECF_CONST | ECF_PURE))
2973 {
2974 /* Otherwise, just write out the sequence without a note. */
2975 rtx insns = get_insns ();
2976
2977 end_sequence ();
2978 emit_insns (insns);
2979 }
2980 else if (flags & ECF_MALLOC)
2981 {
2982 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2983 rtx last, insns;
2984
2985 /* The return value from a malloc-like function is a pointer. */
2986 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2987 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
2988
2989 emit_move_insn (temp, valreg);
2990
2991 /* The return value from a malloc-like function can not alias
2992 anything else. */
2993 last = get_last_insn ();
2994 REG_NOTES (last) =
2995 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2996
2997 /* Write out the sequence. */
2998 insns = get_insns ();
2999 end_sequence ();
3000 emit_insns (insns);
3001 valreg = temp;
3002 }
3003
3004 /* For calls to `setjmp', etc., inform flow.c it should complain
3005 if nonvolatile values are live. For functions that cannot return,
3006 inform flow that control does not fall through. */
3007
3008 if ((flags & (ECF_RETURNS_TWICE | ECF_NORETURN | ECF_LONGJMP)) || pass == 0)
3009 {
3010 /* The barrier or NOTE_INSN_SETJMP note must be emitted
3011 immediately after the CALL_INSN. Some ports emit more
3012 than just a CALL_INSN above, so we must search for it here. */
3013
3014 rtx last = get_last_insn ();
3015 while (GET_CODE (last) != CALL_INSN)
3016 {
3017 last = PREV_INSN (last);
3018 /* There was no CALL_INSN? */
3019 if (last == before_call)
3020 abort ();
3021 }
3022
3023 if (flags & ECF_RETURNS_TWICE)
3024 {
3025 emit_note_after (NOTE_INSN_SETJMP, last);
3026 current_function_calls_setjmp = 1;
3027 }
3028 else
3029 emit_barrier_after (last);
3030 }
3031
3032 if (flags & ECF_LONGJMP)
3033 current_function_calls_longjmp = 1;
3034
3035 /* If this function is returning into a memory location marked as
3036 readonly, it means it is initializing that location. But we normally
3037 treat functions as not clobbering such locations, so we need to
3038 specify that this one does. */
3039 if (target != 0 && GET_CODE (target) == MEM
3040 && structure_value_addr != 0 && RTX_UNCHANGING_P (target))
3041 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3042
3043 /* If value type not void, return an rtx for the value. */
3044
3045 /* If there are cleanups to be called, don't use a hard reg as target.
3046 We need to double check this and see if it matters anymore. */
3047 if (any_pending_cleanups (1))
3048 {
3049 if (target && REG_P (target)
3050 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3051 target = 0;
3052 sibcall_failure = 1;
3053 }
3054
3055 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
3056 || ignore)
3057 {
3058 target = const0_rtx;
3059 }
3060 else if (structure_value_addr)
3061 {
3062 if (target == 0 || GET_CODE (target) != MEM)
3063 {
3064 target
3065 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3066 memory_address (TYPE_MODE (TREE_TYPE (exp)),
3067 structure_value_addr));
3068 set_mem_attributes (target, exp, 1);
3069 }
3070 }
3071 else if (pcc_struct_value)
3072 {
3073 /* This is the special C++ case where we need to
3074 know what the true target was. We take care to
3075 never use this value more than once in one expression. */
3076 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3077 copy_to_reg (valreg));
3078 set_mem_attributes (target, exp, 1);
3079 }
3080 /* Handle calls that return values in multiple non-contiguous locations.
3081 The Irix 6 ABI has examples of this. */
3082 else if (GET_CODE (valreg) == PARALLEL)
3083 {
3084 int bytes = int_size_in_bytes (TREE_TYPE (exp));
3085
3086 if (target == 0)
3087 {
3088 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)),
3089 bytes, 0);
3090 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
3091 preserve_temp_slots (target);
3092 }
3093
3094 if (! rtx_equal_p (target, valreg))
3095 emit_group_store (target, valreg, bytes,
3096 TYPE_ALIGN (TREE_TYPE (exp)));
3097
3098 /* We can not support sibling calls for this case. */
3099 sibcall_failure = 1;
3100 }
3101 else if (target
3102 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
3103 && GET_MODE (target) == GET_MODE (valreg))
3104 {
3105 /* TARGET and VALREG cannot be equal at this point because the
3106 latter would not have REG_FUNCTION_VALUE_P true, while the
3107 former would if it were referring to the same register.
3108
3109 If they refer to the same register, this move will be a no-op,
3110 except when function inlining is being done. */
3111 emit_move_insn (target, valreg);
3112 }
3113 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
3114 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
3115 else
3116 target = copy_to_reg (valreg);
3117
3118 #ifdef PROMOTE_FUNCTION_RETURN
3119 /* If we promoted this return value, make the proper SUBREG. TARGET
3120 might be const0_rtx here, so be careful. */
3121 if (GET_CODE (target) == REG
3122 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3123 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3124 {
3125 tree type = TREE_TYPE (exp);
3126 int unsignedp = TREE_UNSIGNED (type);
3127
3128 /* If we don't promote as expected, something is wrong. */
3129 if (GET_MODE (target)
3130 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
3131 abort ();
3132
3133 target = gen_rtx_SUBREG (TYPE_MODE (type), target, 0);
3134 SUBREG_PROMOTED_VAR_P (target) = 1;
3135 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
3136 }
3137 #endif
3138
3139 /* If size of args is variable or this was a constructor call for a stack
3140 argument, restore saved stack-pointer value. */
3141
3142 if (old_stack_level)
3143 {
3144 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3145 pending_stack_adjust = old_pending_adj;
3146 stack_arg_under_construction = old_stack_arg_under_construction;
3147 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3148 stack_usage_map = initial_stack_usage_map;
3149 sibcall_failure = 1;
3150 }
3151 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3152 {
3153 #ifdef REG_PARM_STACK_SPACE
3154 if (save_area)
3155 {
3156 restore_fixed_argument_area (save_area, argblock,
3157 high_to_save, low_to_save);
3158 }
3159 #endif
3160
3161 /* If we saved any argument areas, restore them. */
3162 for (i = 0; i < num_actuals; i++)
3163 if (args[i].save_area)
3164 {
3165 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3166 rtx stack_area
3167 = gen_rtx_MEM (save_mode,
3168 memory_address (save_mode,
3169 XEXP (args[i].stack_slot, 0)));
3170
3171 if (save_mode != BLKmode)
3172 emit_move_insn (stack_area, args[i].save_area);
3173 else
3174 emit_block_move (stack_area,
3175 validize_mem (args[i].save_area),
3176 GEN_INT (args[i].size.constant),
3177 PARM_BOUNDARY);
3178 }
3179
3180 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3181 stack_usage_map = initial_stack_usage_map;
3182 }
3183
3184 /* If this was alloca, record the new stack level for nonlocal gotos.
3185 Check for the handler slots since we might not have a save area
3186 for non-local gotos. */
3187
3188 if ((flags & ECF_MAY_BE_ALLOCA) && nonlocal_goto_handler_slots != 0)
3189 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
3190
3191 pop_temp_slots ();
3192
3193 /* Free up storage we no longer need. */
3194 for (i = 0; i < num_actuals; ++i)
3195 if (args[i].aligned_regs)
3196 free (args[i].aligned_regs);
3197
3198 if (pass == 0)
3199 {
3200 /* Undo the fake expand_start_target_temps we did earlier. If
3201 there had been any cleanups created, we've already set
3202 sibcall_failure. */
3203 expand_end_target_temps ();
3204 }
3205
3206 insns = get_insns ();
3207 end_sequence ();
3208
3209 if (pass == 0)
3210 {
3211 tail_call_insns = insns;
3212
3213 /* If something prevents making this a sibling call,
3214 zero out the sequence. */
3215 if (sibcall_failure)
3216 tail_call_insns = NULL_RTX;
3217 /* Restore the pending stack adjustment now that we have
3218 finished generating the sibling call sequence. */
3219
3220 pending_stack_adjust = save_pending_stack_adjust;
3221 stack_pointer_delta = save_stack_pointer_delta;
3222
3223 /* Prepare arg structure for next iteration. */
3224 for (i = 0 ; i < num_actuals ; i++)
3225 {
3226 args[i].value = 0;
3227 args[i].aligned_regs = 0;
3228 args[i].stack = 0;
3229 }
3230 }
3231 else
3232 normal_call_insns = insns;
3233 }
3234
3235 /* The function optimize_sibling_and_tail_recursive_calls doesn't
3236 handle CALL_PLACEHOLDERs inside other CALL_PLACEHOLDERs. This
3237 can happen if the arguments to this function call an inline
3238 function who's expansion contains another CALL_PLACEHOLDER.
3239
3240 If there are any C_Ps in any of these sequences, replace them
3241 with their normal call. */
3242
3243 for (insn = normal_call_insns; insn; insn = NEXT_INSN (insn))
3244 if (GET_CODE (insn) == CALL_INSN
3245 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3246 replace_call_placeholder (insn, sibcall_use_normal);
3247
3248 for (insn = tail_call_insns; insn; insn = NEXT_INSN (insn))
3249 if (GET_CODE (insn) == CALL_INSN
3250 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3251 replace_call_placeholder (insn, sibcall_use_normal);
3252
3253 for (insn = tail_recursion_insns; insn; insn = NEXT_INSN (insn))
3254 if (GET_CODE (insn) == CALL_INSN
3255 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3256 replace_call_placeholder (insn, sibcall_use_normal);
3257
3258 /* If this was a potential tail recursion site, then emit a
3259 CALL_PLACEHOLDER with the normal and the tail recursion streams.
3260 One of them will be selected later. */
3261 if (tail_recursion_insns || tail_call_insns)
3262 {
3263 /* The tail recursion label must be kept around. We could expose
3264 its use in the CALL_PLACEHOLDER, but that creates unwanted edges
3265 and makes determining true tail recursion sites difficult.
3266
3267 So we set LABEL_PRESERVE_P here, then clear it when we select
3268 one of the call sequences after rtl generation is complete. */
3269 if (tail_recursion_insns)
3270 LABEL_PRESERVE_P (tail_recursion_label) = 1;
3271 emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode, normal_call_insns,
3272 tail_call_insns,
3273 tail_recursion_insns,
3274 tail_recursion_label));
3275 }
3276 else
3277 emit_insns (normal_call_insns);
3278
3279 currently_expanding_call--;
3280
3281 return target;
3282 }
3283 \f
3284 /* Returns nonzero if FUN is the symbol for a library function which can
3285 not throw. */
3286
3287 static int
3288 libfunc_nothrow (fun)
3289 rtx fun;
3290 {
3291 if (fun == throw_libfunc
3292 || fun == rethrow_libfunc
3293 || fun == sjthrow_libfunc
3294 || fun == sjpopnthrow_libfunc)
3295 return 0;
3296
3297 return 1;
3298 }
3299 \f
3300 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3301 The RETVAL parameter specifies whether return value needs to be saved, other
3302 parameters are documented in the emit_library_call function bellow. */
3303 static rtx
3304 emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p)
3305 int retval;
3306 rtx orgfun;
3307 rtx value;
3308 int fn_type;
3309 enum machine_mode outmode;
3310 int nargs;
3311 va_list p;
3312 {
3313 /* Total size in bytes of all the stack-parms scanned so far. */
3314 struct args_size args_size;
3315 /* Size of arguments before any adjustments (such as rounding). */
3316 struct args_size original_args_size;
3317 register int argnum;
3318 rtx fun;
3319 int inc;
3320 int count;
3321 struct args_size alignment_pad;
3322 rtx argblock = 0;
3323 CUMULATIVE_ARGS args_so_far;
3324 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
3325 struct args_size offset; struct args_size size; rtx save_area; };
3326 struct arg *argvec;
3327 int old_inhibit_defer_pop = inhibit_defer_pop;
3328 rtx call_fusage = 0;
3329 rtx mem_value = 0;
3330 rtx valreg;
3331 int pcc_struct_value = 0;
3332 int struct_value_size = 0;
3333 int flags = 0;
3334 int reg_parm_stack_space = 0;
3335 int needed;
3336
3337 #ifdef REG_PARM_STACK_SPACE
3338 /* Define the boundary of the register parm stack space that needs to be
3339 save, if any. */
3340 int low_to_save = -1, high_to_save = 0;
3341 rtx save_area = 0; /* Place that it is saved */
3342 #endif
3343
3344 /* Size of the stack reserved for parameter registers. */
3345 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3346 char *initial_stack_usage_map = stack_usage_map;
3347
3348 #ifdef REG_PARM_STACK_SPACE
3349 #ifdef MAYBE_REG_PARM_STACK_SPACE
3350 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3351 #else
3352 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3353 #endif
3354 #endif
3355
3356 if (fn_type == 1)
3357 flags |= ECF_CONST;
3358 else if (fn_type == 2)
3359 flags |= ECF_PURE;
3360 fun = orgfun;
3361
3362 if (libfunc_nothrow (fun))
3363 flags |= ECF_NOTHROW;
3364
3365 #ifdef PREFERRED_STACK_BOUNDARY
3366 /* Ensure current function's preferred stack boundary is at least
3367 what we need. */
3368 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3369 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3370 #endif
3371
3372 /* If this kind of value comes back in memory,
3373 decide where in memory it should come back. */
3374 if (outmode != VOIDmode && aggregate_value_p (type_for_mode (outmode, 0)))
3375 {
3376 #ifdef PCC_STATIC_STRUCT_RETURN
3377 rtx pointer_reg
3378 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
3379 0, 0);
3380 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3381 pcc_struct_value = 1;
3382 if (value == 0)
3383 value = gen_reg_rtx (outmode);
3384 #else /* not PCC_STATIC_STRUCT_RETURN */
3385 struct_value_size = GET_MODE_SIZE (outmode);
3386 if (value != 0 && GET_CODE (value) == MEM)
3387 mem_value = value;
3388 else
3389 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
3390 #endif
3391
3392 /* This call returns a big structure. */
3393 flags &= ~(ECF_CONST | ECF_PURE);
3394 }
3395
3396 /* ??? Unfinished: must pass the memory address as an argument. */
3397
3398 /* Copy all the libcall-arguments out of the varargs data
3399 and into a vector ARGVEC.
3400
3401 Compute how to pass each argument. We only support a very small subset
3402 of the full argument passing conventions to limit complexity here since
3403 library functions shouldn't have many args. */
3404
3405 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
3406 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
3407
3408 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3409
3410 args_size.constant = 0;
3411 args_size.var = 0;
3412
3413 count = 0;
3414
3415 /* Now we are about to start emitting insns that can be deleted
3416 if a libcall is deleted. */
3417 if (flags & (ECF_CONST | ECF_PURE))
3418 start_sequence ();
3419
3420 push_temp_slots ();
3421
3422 /* If there's a structure value address to be passed,
3423 either pass it in the special place, or pass it as an extra argument. */
3424 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
3425 {
3426 rtx addr = XEXP (mem_value, 0);
3427 nargs++;
3428
3429 /* Make sure it is a reasonable operand for a move or push insn. */
3430 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3431 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3432 addr = force_operand (addr, NULL_RTX);
3433
3434 argvec[count].value = addr;
3435 argvec[count].mode = Pmode;
3436 argvec[count].partial = 0;
3437
3438 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3439 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3440 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3441 abort ();
3442 #endif
3443
3444 locate_and_pad_parm (Pmode, NULL_TREE,
3445 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3446 1,
3447 #else
3448 argvec[count].reg != 0,
3449 #endif
3450 NULL_TREE, &args_size, &argvec[count].offset,
3451 &argvec[count].size, &alignment_pad);
3452
3453
3454 if (argvec[count].reg == 0 || argvec[count].partial != 0
3455 || reg_parm_stack_space > 0)
3456 args_size.constant += argvec[count].size.constant;
3457
3458 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3459
3460 count++;
3461 }
3462
3463 for (; count < nargs; count++)
3464 {
3465 rtx val = va_arg (p, rtx);
3466 enum machine_mode mode = va_arg (p, enum machine_mode);
3467
3468 /* We cannot convert the arg value to the mode the library wants here;
3469 must do it earlier where we know the signedness of the arg. */
3470 if (mode == BLKmode
3471 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3472 abort ();
3473
3474 /* On some machines, there's no way to pass a float to a library fcn.
3475 Pass it as a double instead. */
3476 #ifdef LIBGCC_NEEDS_DOUBLE
3477 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
3478 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
3479 #endif
3480
3481 /* There's no need to call protect_from_queue, because
3482 either emit_move_insn or emit_push_insn will do that. */
3483
3484 /* Make sure it is a reasonable operand for a move or push insn. */
3485 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3486 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3487 val = force_operand (val, NULL_RTX);
3488
3489 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3490 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3491 {
3492 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
3493 be viewed as just an efficiency improvement. */
3494 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3495 emit_move_insn (slot, val);
3496 val = force_operand (XEXP (slot, 0), NULL_RTX);
3497 mode = Pmode;
3498 }
3499 #endif
3500
3501 argvec[count].value = val;
3502 argvec[count].mode = mode;
3503
3504 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3505
3506 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3507 argvec[count].partial
3508 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3509 #else
3510 argvec[count].partial = 0;
3511 #endif
3512
3513 locate_and_pad_parm (mode, NULL_TREE,
3514 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3515 1,
3516 #else
3517 argvec[count].reg != 0,
3518 #endif
3519 NULL_TREE, &args_size, &argvec[count].offset,
3520 &argvec[count].size, &alignment_pad);
3521
3522 if (argvec[count].size.var)
3523 abort ();
3524
3525 if (reg_parm_stack_space == 0 && argvec[count].partial)
3526 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
3527
3528 if (argvec[count].reg == 0 || argvec[count].partial != 0
3529 || reg_parm_stack_space > 0)
3530 args_size.constant += argvec[count].size.constant;
3531
3532 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3533 }
3534
3535 #ifdef FINAL_REG_PARM_STACK_SPACE
3536 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3537 args_size.var);
3538 #endif
3539 /* If this machine requires an external definition for library
3540 functions, write one out. */
3541 assemble_external_libcall (fun);
3542
3543 original_args_size = args_size;
3544 #ifdef PREFERRED_STACK_BOUNDARY
3545 args_size.constant = (((args_size.constant
3546 + stack_pointer_delta
3547 + STACK_BYTES - 1)
3548 / STACK_BYTES
3549 * STACK_BYTES)
3550 - stack_pointer_delta);
3551 #endif
3552
3553 args_size.constant = MAX (args_size.constant,
3554 reg_parm_stack_space);
3555
3556 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3557 args_size.constant -= reg_parm_stack_space;
3558 #endif
3559
3560 if (args_size.constant > current_function_outgoing_args_size)
3561 current_function_outgoing_args_size = args_size.constant;
3562
3563 if (ACCUMULATE_OUTGOING_ARGS)
3564 {
3565 /* Since the stack pointer will never be pushed, it is possible for
3566 the evaluation of a parm to clobber something we have already
3567 written to the stack. Since most function calls on RISC machines
3568 do not use the stack, this is uncommon, but must work correctly.
3569
3570 Therefore, we save any area of the stack that was already written
3571 and that we are using. Here we set up to do this by making a new
3572 stack usage map from the old one.
3573
3574 Another approach might be to try to reorder the argument
3575 evaluations to avoid this conflicting stack usage. */
3576
3577 needed = args_size.constant;
3578
3579 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3580 /* Since we will be writing into the entire argument area, the
3581 map must be allocated for its entire size, not just the part that
3582 is the responsibility of the caller. */
3583 needed += reg_parm_stack_space;
3584 #endif
3585
3586 #ifdef ARGS_GROW_DOWNWARD
3587 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3588 needed + 1);
3589 #else
3590 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3591 needed);
3592 #endif
3593 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3594
3595 if (initial_highest_arg_in_use)
3596 bcopy (initial_stack_usage_map, stack_usage_map,
3597 initial_highest_arg_in_use);
3598
3599 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3600 bzero (&stack_usage_map[initial_highest_arg_in_use],
3601 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3602 needed = 0;
3603
3604 /* The address of the outgoing argument list must not be copied to a
3605 register here, because argblock would be left pointing to the
3606 wrong place after the call to allocate_dynamic_stack_space below.
3607 */
3608
3609 argblock = virtual_outgoing_args_rtx;
3610 }
3611 else
3612 {
3613 if (!PUSH_ARGS)
3614 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3615 }
3616
3617 #ifdef PREFERRED_STACK_BOUNDARY
3618 /* If we push args individually in reverse order, perform stack alignment
3619 before the first push (the last arg). */
3620 if (argblock == 0 && PUSH_ARGS_REVERSED)
3621 anti_adjust_stack (GEN_INT (args_size.constant
3622 - original_args_size.constant));
3623 #endif
3624
3625 if (PUSH_ARGS_REVERSED)
3626 {
3627 inc = -1;
3628 argnum = nargs - 1;
3629 }
3630 else
3631 {
3632 inc = 1;
3633 argnum = 0;
3634 }
3635
3636 #ifdef REG_PARM_STACK_SPACE
3637 if (ACCUMULATE_OUTGOING_ARGS)
3638 {
3639 /* The argument list is the property of the called routine and it
3640 may clobber it. If the fixed area has been used for previous
3641 parameters, we must save and restore it.
3642
3643 Here we compute the boundary of the that needs to be saved, if any. */
3644
3645 #ifdef ARGS_GROW_DOWNWARD
3646 for (count = 0; count < reg_parm_stack_space + 1; count++)
3647 #else
3648 for (count = 0; count < reg_parm_stack_space; count++)
3649 #endif
3650 {
3651 if (count >= highest_outgoing_arg_in_use
3652 || stack_usage_map[count] == 0)
3653 continue;
3654
3655 if (low_to_save == -1)
3656 low_to_save = count;
3657
3658 high_to_save = count;
3659 }
3660
3661 if (low_to_save >= 0)
3662 {
3663 int num_to_save = high_to_save - low_to_save + 1;
3664 enum machine_mode save_mode
3665 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3666 rtx stack_area;
3667
3668 /* If we don't have the required alignment, must do this in BLKmode. */
3669 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3670 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3671 save_mode = BLKmode;
3672
3673 #ifdef ARGS_GROW_DOWNWARD
3674 stack_area = gen_rtx_MEM (save_mode,
3675 memory_address (save_mode,
3676 plus_constant (argblock,
3677 - high_to_save)));
3678 #else
3679 stack_area = gen_rtx_MEM (save_mode,
3680 memory_address (save_mode,
3681 plus_constant (argblock,
3682 low_to_save)));
3683 #endif
3684 if (save_mode == BLKmode)
3685 {
3686 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3687 emit_block_move (validize_mem (save_area), stack_area,
3688 GEN_INT (num_to_save), PARM_BOUNDARY);
3689 }
3690 else
3691 {
3692 save_area = gen_reg_rtx (save_mode);
3693 emit_move_insn (save_area, stack_area);
3694 }
3695 }
3696 }
3697 #endif
3698
3699 /* Push the args that need to be pushed. */
3700
3701 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3702 are to be pushed. */
3703 for (count = 0; count < nargs; count++, argnum += inc)
3704 {
3705 register enum machine_mode mode = argvec[argnum].mode;
3706 register rtx val = argvec[argnum].value;
3707 rtx reg = argvec[argnum].reg;
3708 int partial = argvec[argnum].partial;
3709 int lower_bound = 0, upper_bound = 0, i;
3710
3711 if (! (reg != 0 && partial == 0))
3712 {
3713 if (ACCUMULATE_OUTGOING_ARGS)
3714 {
3715 /* If this is being stored into a pre-allocated, fixed-size,
3716 stack area, save any previous data at that location. */
3717
3718 #ifdef ARGS_GROW_DOWNWARD
3719 /* stack_slot is negative, but we want to index stack_usage_map
3720 with positive values. */
3721 upper_bound = -argvec[argnum].offset.constant + 1;
3722 lower_bound = upper_bound - argvec[argnum].size.constant;
3723 #else
3724 lower_bound = argvec[argnum].offset.constant;
3725 upper_bound = lower_bound + argvec[argnum].size.constant;
3726 #endif
3727
3728 for (i = lower_bound; i < upper_bound; i++)
3729 if (stack_usage_map[i]
3730 /* Don't store things in the fixed argument area at this
3731 point; it has already been saved. */
3732 && i > reg_parm_stack_space)
3733 break;
3734
3735 if (i != upper_bound)
3736 {
3737 /* We need to make a save area. See what mode we can make
3738 it. */
3739 enum machine_mode save_mode
3740 = mode_for_size (argvec[argnum].size.constant
3741 * BITS_PER_UNIT,
3742 MODE_INT, 1);
3743 rtx stack_area
3744 = gen_rtx_MEM
3745 (save_mode,
3746 memory_address
3747 (save_mode,
3748 plus_constant (argblock,
3749 argvec[argnum].offset.constant)));
3750 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3751
3752 emit_move_insn (argvec[argnum].save_area, stack_area);
3753 }
3754 }
3755
3756 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
3757 argblock, GEN_INT (argvec[argnum].offset.constant),
3758 reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad));
3759
3760 /* Now mark the segment we just used. */
3761 if (ACCUMULATE_OUTGOING_ARGS)
3762 for (i = lower_bound; i < upper_bound; i++)
3763 stack_usage_map[i] = 1;
3764
3765 NO_DEFER_POP;
3766 }
3767 }
3768
3769 #ifdef PREFERRED_STACK_BOUNDARY
3770 /* If we pushed args in forward order, perform stack alignment
3771 after pushing the last arg. */
3772 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3773 anti_adjust_stack (GEN_INT (args_size.constant
3774 - original_args_size.constant));
3775 #endif
3776
3777 if (PUSH_ARGS_REVERSED)
3778 argnum = nargs - 1;
3779 else
3780 argnum = 0;
3781
3782 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
3783
3784 /* Now load any reg parms into their regs. */
3785
3786 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3787 are to be pushed. */
3788 for (count = 0; count < nargs; count++, argnum += inc)
3789 {
3790 register rtx val = argvec[argnum].value;
3791 rtx reg = argvec[argnum].reg;
3792 int partial = argvec[argnum].partial;
3793
3794 /* Handle calls that pass values in multiple non-contiguous
3795 locations. The PA64 has examples of this for library calls. */
3796 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3797 emit_group_load (reg, val,
3798 GET_MODE_SIZE (GET_MODE (val)),
3799 GET_MODE_ALIGNMENT (GET_MODE (val)));
3800 else if (reg != 0 && partial == 0)
3801 emit_move_insn (reg, val);
3802
3803 NO_DEFER_POP;
3804 }
3805
3806 /* Any regs containing parms remain in use through the call. */
3807 for (count = 0; count < nargs; count++)
3808 {
3809 rtx reg = argvec[count].reg;
3810 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3811 use_group_regs (&call_fusage, reg);
3812 else if (reg != 0)
3813 use_reg (&call_fusage, reg);
3814 }
3815
3816 /* Pass the function the address in which to return a structure value. */
3817 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3818 {
3819 emit_move_insn (struct_value_rtx,
3820 force_reg (Pmode,
3821 force_operand (XEXP (mem_value, 0),
3822 NULL_RTX)));
3823 if (GET_CODE (struct_value_rtx) == REG)
3824 use_reg (&call_fusage, struct_value_rtx);
3825 }
3826
3827 /* Don't allow popping to be deferred, since then
3828 cse'ing of library calls could delete a call and leave the pop. */
3829 NO_DEFER_POP;
3830 valreg = (mem_value == 0 && outmode != VOIDmode
3831 ? hard_libcall_value (outmode) : NULL_RTX);
3832
3833 #ifdef PREFERRED_STACK_BOUNDARY
3834 /* Stack must be properly aligned now. */
3835 if (stack_pointer_delta & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))
3836 abort();
3837 #endif
3838
3839 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3840 will set inhibit_defer_pop to that value. */
3841 /* The return type is needed to decide how many bytes the function pops.
3842 Signedness plays no role in that, so for simplicity, we pretend it's
3843 always signed. We also assume that the list of arguments passed has
3844 no impact, so we pretend it is unknown. */
3845
3846 emit_call_1 (fun,
3847 get_identifier (XSTR (orgfun, 0)),
3848 build_function_type (outmode == VOIDmode ? void_type_node
3849 : type_for_mode (outmode, 0), NULL_TREE),
3850 original_args_size.constant, args_size.constant,
3851 struct_value_size,
3852 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3853 valreg,
3854 old_inhibit_defer_pop + 1, call_fusage, flags);
3855
3856 /* Now restore inhibit_defer_pop to its actual original value. */
3857 OK_DEFER_POP;
3858
3859 /* If call is cse'able, make appropriate pair of reg-notes around it.
3860 Test valreg so we don't crash; may safely ignore `const'
3861 if return type is void. Disable for PARALLEL return values, because
3862 we have no way to move such values into a pseudo register. */
3863 if ((flags & (ECF_CONST | ECF_PURE))
3864 && valreg != 0 && GET_CODE (valreg) != PARALLEL)
3865 {
3866 rtx note = 0;
3867 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3868 rtx insns;
3869 int i;
3870
3871 /* Construct an "equal form" for the value which mentions all the
3872 arguments in order as well as the function name. */
3873 for (i = 0; i < nargs; i++)
3874 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
3875 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
3876
3877 insns = get_insns ();
3878 end_sequence ();
3879
3880 if (flags & ECF_PURE)
3881 note = gen_rtx_EXPR_LIST (VOIDmode,
3882 gen_rtx_USE (VOIDmode,
3883 gen_rtx_MEM (BLKmode,
3884 gen_rtx_SCRATCH (VOIDmode))), note);
3885
3886 emit_libcall_block (insns, temp, valreg, note);
3887
3888 valreg = temp;
3889 }
3890 else if (flags & (ECF_CONST | ECF_PURE))
3891 {
3892 /* Otherwise, just write out the sequence without a note. */
3893 rtx insns = get_insns ();
3894
3895 end_sequence ();
3896 emit_insns (insns);
3897 }
3898 pop_temp_slots ();
3899
3900 /* Copy the value to the right place. */
3901 if (outmode != VOIDmode && retval)
3902 {
3903 if (mem_value)
3904 {
3905 if (value == 0)
3906 value = mem_value;
3907 if (value != mem_value)
3908 emit_move_insn (value, mem_value);
3909 }
3910 else if (value != 0)
3911 emit_move_insn (value, hard_libcall_value (outmode));
3912 else
3913 value = hard_libcall_value (outmode);
3914 }
3915
3916 if (ACCUMULATE_OUTGOING_ARGS)
3917 {
3918 #ifdef REG_PARM_STACK_SPACE
3919 if (save_area)
3920 {
3921 enum machine_mode save_mode = GET_MODE (save_area);
3922 #ifdef ARGS_GROW_DOWNWARD
3923 rtx stack_area
3924 = gen_rtx_MEM (save_mode,
3925 memory_address (save_mode,
3926 plus_constant (argblock,
3927 - high_to_save)));
3928 #else
3929 rtx stack_area
3930 = gen_rtx_MEM (save_mode,
3931 memory_address (save_mode,
3932 plus_constant (argblock, low_to_save)));
3933 #endif
3934 if (save_mode != BLKmode)
3935 emit_move_insn (stack_area, save_area);
3936 else
3937 emit_block_move (stack_area, validize_mem (save_area),
3938 GEN_INT (high_to_save - low_to_save + 1),
3939 PARM_BOUNDARY);
3940 }
3941 #endif
3942
3943 /* If we saved any argument areas, restore them. */
3944 for (count = 0; count < nargs; count++)
3945 if (argvec[count].save_area)
3946 {
3947 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3948 rtx stack_area
3949 = gen_rtx_MEM (save_mode,
3950 memory_address
3951 (save_mode,
3952 plus_constant (argblock,
3953 argvec[count].offset.constant)));
3954
3955 emit_move_insn (stack_area, argvec[count].save_area);
3956 }
3957
3958 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3959 stack_usage_map = initial_stack_usage_map;
3960 }
3961
3962 return value;
3963
3964 }
3965 \f
3966 /* Output a library call to function FUN (a SYMBOL_REF rtx)
3967 (emitting the queue unless NO_QUEUE is nonzero),
3968 for a value of mode OUTMODE,
3969 with NARGS different arguments, passed as alternating rtx values
3970 and machine_modes to convert them to.
3971 The rtx values should have been passed through protect_from_queue already.
3972
3973 FN_TYPE will is zero for `normal' calls, one for `const' calls, wich
3974 which will be enclosed in REG_LIBCALL/REG_RETVAL notes and two for `pure'
3975 calls, that are handled like `const' calls with extra
3976 (use (memory (scratch)). */
3977
3978 void
3979 emit_library_call VPARAMS((rtx orgfun, int fn_type, enum machine_mode outmode,
3980 int nargs, ...))
3981 {
3982 #ifndef ANSI_PROTOTYPES
3983 rtx orgfun;
3984 int fn_type;
3985 enum machine_mode outmode;
3986 int nargs;
3987 #endif
3988 va_list p;
3989
3990 VA_START (p, nargs);
3991
3992 #ifndef ANSI_PROTOTYPES
3993 orgfun = va_arg (p, rtx);
3994 fn_type = va_arg (p, int);
3995 outmode = va_arg (p, enum machine_mode);
3996 nargs = va_arg (p, int);
3997 #endif
3998
3999 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4000
4001 va_end (p);
4002 }
4003 \f
4004 /* Like emit_library_call except that an extra argument, VALUE,
4005 comes second and says where to store the result.
4006 (If VALUE is zero, this function chooses a convenient way
4007 to return the value.
4008
4009 This function returns an rtx for where the value is to be found.
4010 If VALUE is nonzero, VALUE is returned. */
4011
4012 rtx
4013 emit_library_call_value VPARAMS((rtx orgfun, rtx value, int fn_type,
4014 enum machine_mode outmode, int nargs, ...))
4015 {
4016 #ifndef ANSI_PROTOTYPES
4017 rtx orgfun;
4018 rtx value;
4019 int fn_type;
4020 enum machine_mode outmode;
4021 int nargs;
4022 #endif
4023 va_list p;
4024
4025 VA_START (p, nargs);
4026
4027 #ifndef ANSI_PROTOTYPES
4028 orgfun = va_arg (p, rtx);
4029 value = va_arg (p, rtx);
4030 fn_type = va_arg (p, int);
4031 outmode = va_arg (p, enum machine_mode);
4032 nargs = va_arg (p, int);
4033 #endif
4034
4035 value = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode, nargs, p);
4036
4037 va_end (p);
4038
4039 return value;
4040 }
4041 \f
4042 #if 0
4043 /* Return an rtx which represents a suitable home on the stack
4044 given TYPE, the type of the argument looking for a home.
4045 This is called only for BLKmode arguments.
4046
4047 SIZE is the size needed for this target.
4048 ARGS_ADDR is the address of the bottom of the argument block for this call.
4049 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
4050 if this machine uses push insns. */
4051
4052 static rtx
4053 target_for_arg (type, size, args_addr, offset)
4054 tree type;
4055 rtx size;
4056 rtx args_addr;
4057 struct args_size offset;
4058 {
4059 rtx target;
4060 rtx offset_rtx = ARGS_SIZE_RTX (offset);
4061
4062 /* We do not call memory_address if possible,
4063 because we want to address as close to the stack
4064 as possible. For non-variable sized arguments,
4065 this will be stack-pointer relative addressing. */
4066 if (GET_CODE (offset_rtx) == CONST_INT)
4067 target = plus_constant (args_addr, INTVAL (offset_rtx));
4068 else
4069 {
4070 /* I have no idea how to guarantee that this
4071 will work in the presence of register parameters. */
4072 target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
4073 target = memory_address (QImode, target);
4074 }
4075
4076 return gen_rtx_MEM (BLKmode, target);
4077 }
4078 #endif
4079 \f
4080 /* Store a single argument for a function call
4081 into the register or memory area where it must be passed.
4082 *ARG describes the argument value and where to pass it.
4083
4084 ARGBLOCK is the address of the stack-block for all the arguments,
4085 or 0 on a machine where arguments are pushed individually.
4086
4087 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4088 so must be careful about how the stack is used.
4089
4090 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4091 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4092 that we need not worry about saving and restoring the stack.
4093
4094 FNDECL is the declaration of the function we are calling. */
4095
4096 static void
4097 store_one_arg (arg, argblock, flags, variable_size,
4098 reg_parm_stack_space)
4099 struct arg_data *arg;
4100 rtx argblock;
4101 int flags;
4102 int variable_size ATTRIBUTE_UNUSED;
4103 int reg_parm_stack_space;
4104 {
4105 register tree pval = arg->tree_value;
4106 rtx reg = 0;
4107 int partial = 0;
4108 int used = 0;
4109 int i, lower_bound = 0, upper_bound = 0;
4110
4111 if (TREE_CODE (pval) == ERROR_MARK)
4112 return;
4113
4114 /* Push a new temporary level for any temporaries we make for
4115 this argument. */
4116 push_temp_slots ();
4117
4118 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4119 {
4120 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4121 save any previous data at that location. */
4122 if (argblock && ! variable_size && arg->stack)
4123 {
4124 #ifdef ARGS_GROW_DOWNWARD
4125 /* stack_slot is negative, but we want to index stack_usage_map
4126 with positive values. */
4127 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4128 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4129 else
4130 upper_bound = 0;
4131
4132 lower_bound = upper_bound - arg->size.constant;
4133 #else
4134 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4135 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4136 else
4137 lower_bound = 0;
4138
4139 upper_bound = lower_bound + arg->size.constant;
4140 #endif
4141
4142 for (i = lower_bound; i < upper_bound; i++)
4143 if (stack_usage_map[i]
4144 /* Don't store things in the fixed argument area at this point;
4145 it has already been saved. */
4146 && i > reg_parm_stack_space)
4147 break;
4148
4149 if (i != upper_bound)
4150 {
4151 /* We need to make a save area. See what mode we can make it. */
4152 enum machine_mode save_mode
4153 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
4154 rtx stack_area
4155 = gen_rtx_MEM (save_mode,
4156 memory_address (save_mode,
4157 XEXP (arg->stack_slot, 0)));
4158
4159 if (save_mode == BLKmode)
4160 {
4161 arg->save_area = assign_stack_temp (BLKmode,
4162 arg->size.constant, 0);
4163 MEM_SET_IN_STRUCT_P (arg->save_area,
4164 AGGREGATE_TYPE_P (TREE_TYPE
4165 (arg->tree_value)));
4166 preserve_temp_slots (arg->save_area);
4167 emit_block_move (validize_mem (arg->save_area), stack_area,
4168 GEN_INT (arg->size.constant),
4169 PARM_BOUNDARY);
4170 }
4171 else
4172 {
4173 arg->save_area = gen_reg_rtx (save_mode);
4174 emit_move_insn (arg->save_area, stack_area);
4175 }
4176 }
4177 }
4178 /* Now that we have saved any slots that will be overwritten by this
4179 store, mark all slots this store will use. We must do this before
4180 we actually expand the argument since the expansion itself may
4181 trigger library calls which might need to use the same stack slot. */
4182 if (argblock && ! variable_size && arg->stack)
4183 for (i = lower_bound; i < upper_bound; i++)
4184 stack_usage_map[i] = 1;
4185 }
4186
4187 /* If this isn't going to be placed on both the stack and in registers,
4188 set up the register and number of words. */
4189 if (! arg->pass_on_stack)
4190 reg = arg->reg, partial = arg->partial;
4191
4192 if (reg != 0 && partial == 0)
4193 /* Being passed entirely in a register. We shouldn't be called in
4194 this case. */
4195 abort ();
4196
4197 /* If this arg needs special alignment, don't load the registers
4198 here. */
4199 if (arg->n_aligned_regs != 0)
4200 reg = 0;
4201
4202 /* If this is being passed partially in a register, we can't evaluate
4203 it directly into its stack slot. Otherwise, we can. */
4204 if (arg->value == 0)
4205 {
4206 /* stack_arg_under_construction is nonzero if a function argument is
4207 being evaluated directly into the outgoing argument list and
4208 expand_call must take special action to preserve the argument list
4209 if it is called recursively.
4210
4211 For scalar function arguments stack_usage_map is sufficient to
4212 determine which stack slots must be saved and restored. Scalar
4213 arguments in general have pass_on_stack == 0.
4214
4215 If this argument is initialized by a function which takes the
4216 address of the argument (a C++ constructor or a C function
4217 returning a BLKmode structure), then stack_usage_map is
4218 insufficient and expand_call must push the stack around the
4219 function call. Such arguments have pass_on_stack == 1.
4220
4221 Note that it is always safe to set stack_arg_under_construction,
4222 but this generates suboptimal code if set when not needed. */
4223
4224 if (arg->pass_on_stack)
4225 stack_arg_under_construction++;
4226
4227 arg->value = expand_expr (pval,
4228 (partial
4229 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4230 ? NULL_RTX : arg->stack,
4231 VOIDmode, 0);
4232
4233 /* If we are promoting object (or for any other reason) the mode
4234 doesn't agree, convert the mode. */
4235
4236 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4237 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4238 arg->value, arg->unsignedp);
4239
4240 if (arg->pass_on_stack)
4241 stack_arg_under_construction--;
4242 }
4243
4244 /* Don't allow anything left on stack from computation
4245 of argument to alloca. */
4246 if (flags & ECF_MAY_BE_ALLOCA)
4247 do_pending_stack_adjust ();
4248
4249 if (arg->value == arg->stack)
4250 {
4251 /* If the value is already in the stack slot, we are done. */
4252 if (current_function_check_memory_usage && GET_CODE (arg->stack) == MEM)
4253 {
4254 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4255 XEXP (arg->stack, 0), Pmode,
4256 ARGS_SIZE_RTX (arg->size),
4257 TYPE_MODE (sizetype),
4258 GEN_INT (MEMORY_USE_RW),
4259 TYPE_MODE (integer_type_node));
4260 }
4261 }
4262 else if (arg->mode != BLKmode)
4263 {
4264 register int size;
4265
4266 /* Argument is a scalar, not entirely passed in registers.
4267 (If part is passed in registers, arg->partial says how much
4268 and emit_push_insn will take care of putting it there.)
4269
4270 Push it, and if its size is less than the
4271 amount of space allocated to it,
4272 also bump stack pointer by the additional space.
4273 Note that in C the default argument promotions
4274 will prevent such mismatches. */
4275
4276 size = GET_MODE_SIZE (arg->mode);
4277 /* Compute how much space the push instruction will push.
4278 On many machines, pushing a byte will advance the stack
4279 pointer by a halfword. */
4280 #ifdef PUSH_ROUNDING
4281 size = PUSH_ROUNDING (size);
4282 #endif
4283 used = size;
4284
4285 /* Compute how much space the argument should get:
4286 round up to a multiple of the alignment for arguments. */
4287 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4288 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4289 / (PARM_BOUNDARY / BITS_PER_UNIT))
4290 * (PARM_BOUNDARY / BITS_PER_UNIT));
4291
4292 /* This isn't already where we want it on the stack, so put it there.
4293 This can either be done with push or copy insns. */
4294 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 0,
4295 partial, reg, used - size, argblock,
4296 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
4297 ARGS_SIZE_RTX (arg->alignment_pad));
4298 }
4299 else
4300 {
4301 /* BLKmode, at least partly to be pushed. */
4302
4303 register int excess;
4304 rtx size_rtx;
4305
4306 /* Pushing a nonscalar.
4307 If part is passed in registers, PARTIAL says how much
4308 and emit_push_insn will take care of putting it there. */
4309
4310 /* Round its size up to a multiple
4311 of the allocation unit for arguments. */
4312
4313 if (arg->size.var != 0)
4314 {
4315 excess = 0;
4316 size_rtx = ARGS_SIZE_RTX (arg->size);
4317 }
4318 else
4319 {
4320 /* PUSH_ROUNDING has no effect on us, because
4321 emit_push_insn for BLKmode is careful to avoid it. */
4322 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
4323 + partial * UNITS_PER_WORD);
4324 size_rtx = expr_size (pval);
4325 }
4326
4327 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4328 TYPE_ALIGN (TREE_TYPE (pval)), partial, reg, excess,
4329 argblock, ARGS_SIZE_RTX (arg->offset),
4330 reg_parm_stack_space,
4331 ARGS_SIZE_RTX (arg->alignment_pad));
4332 }
4333
4334
4335 /* Unless this is a partially-in-register argument, the argument is now
4336 in the stack.
4337
4338 ??? Note that this can change arg->value from arg->stack to
4339 arg->stack_slot and it matters when they are not the same.
4340 It isn't totally clear that this is correct in all cases. */
4341 if (partial == 0)
4342 arg->value = arg->stack_slot;
4343
4344 /* Once we have pushed something, pops can't safely
4345 be deferred during the rest of the arguments. */
4346 NO_DEFER_POP;
4347
4348 /* ANSI doesn't require a sequence point here,
4349 but PCC has one, so this will avoid some problems. */
4350 emit_queue ();
4351
4352 /* Free any temporary slots made in processing this argument. Show
4353 that we might have taken the address of something and pushed that
4354 as an operand. */
4355 preserve_temp_slots (NULL_RTX);
4356 free_temp_slots ();
4357 pop_temp_slots ();
4358 }