toplev.h: New file.
[gcc.git] / gcc / calls.c
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 92-97, 1998 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 #include "config.h"
22 #ifdef __STDC__
23 #include <stdarg.h>
24 #else
25 #include <varargs.h>
26 #endif
27 #include "system.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "expr.h"
32 #include "regs.h"
33 #include "insn-flags.h"
34 #include "toplev.h"
35
36 /* Decide whether a function's arguments should be processed
37 from first to last or from last to first.
38
39 They should if the stack and args grow in opposite directions, but
40 only if we have push insns. */
41
42 #ifdef PUSH_ROUNDING
43
44 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
45 #define PUSH_ARGS_REVERSED /* If it's last to first */
46 #endif
47
48 #endif
49
50 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
51 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
52
53 /* Data structure and subroutines used within expand_call. */
54
55 struct arg_data
56 {
57 /* Tree node for this argument. */
58 tree tree_value;
59 /* Mode for value; TYPE_MODE unless promoted. */
60 enum machine_mode mode;
61 /* Current RTL value for argument, or 0 if it isn't precomputed. */
62 rtx value;
63 /* Initially-compute RTL value for argument; only for const functions. */
64 rtx initial_value;
65 /* Register to pass this argument in, 0 if passed on stack, or an
66 PARALLEL if the arg is to be copied into multiple non-contiguous
67 registers. */
68 rtx reg;
69 /* If REG was promoted from the actual mode of the argument expression,
70 indicates whether the promotion is sign- or zero-extended. */
71 int unsignedp;
72 /* Number of registers to use. 0 means put the whole arg in registers.
73 Also 0 if not passed in registers. */
74 int partial;
75 /* Non-zero if argument must be passed on stack.
76 Note that some arguments may be passed on the stack
77 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
78 pass_on_stack identifies arguments that *cannot* go in registers. */
79 int pass_on_stack;
80 /* Offset of this argument from beginning of stack-args. */
81 struct args_size offset;
82 /* Similar, but offset to the start of the stack slot. Different from
83 OFFSET if this arg pads downward. */
84 struct args_size slot_offset;
85 /* Size of this argument on the stack, rounded up for any padding it gets,
86 parts of the argument passed in registers do not count.
87 If REG_PARM_STACK_SPACE is defined, then register parms
88 are counted here as well. */
89 struct args_size size;
90 /* Location on the stack at which parameter should be stored. The store
91 has already been done if STACK == VALUE. */
92 rtx stack;
93 /* Location on the stack of the start of this argument slot. This can
94 differ from STACK if this arg pads downward. This location is known
95 to be aligned to FUNCTION_ARG_BOUNDARY. */
96 rtx stack_slot;
97 #ifdef ACCUMULATE_OUTGOING_ARGS
98 /* Place that this stack area has been saved, if needed. */
99 rtx save_area;
100 #endif
101 /* If an argument's alignment does not permit direct copying into registers,
102 copy in smaller-sized pieces into pseudos. These are stored in a
103 block pointed to by this field. The next field says how many
104 word-sized pseudos we made. */
105 rtx *aligned_regs;
106 int n_aligned_regs;
107 };
108
109 #ifdef ACCUMULATE_OUTGOING_ARGS
110 /* A vector of one char per byte of stack space. A byte if non-zero if
111 the corresponding stack location has been used.
112 This vector is used to prevent a function call within an argument from
113 clobbering any stack already set up. */
114 static char *stack_usage_map;
115
116 /* Size of STACK_USAGE_MAP. */
117 static int highest_outgoing_arg_in_use;
118
119 /* stack_arg_under_construction is nonzero when an argument may be
120 initialized with a constructor call (including a C function that
121 returns a BLKmode struct) and expand_call must take special action
122 to make sure the object being constructed does not overlap the
123 argument list for the constructor call. */
124 int stack_arg_under_construction;
125 #endif
126
127 static int calls_function PROTO((tree, int));
128 static int calls_function_1 PROTO((tree, int));
129 static void emit_call_1 PROTO((rtx, tree, tree, HOST_WIDE_INT,
130 HOST_WIDE_INT, rtx, rtx,
131 int, rtx, int));
132 static void store_one_arg PROTO ((struct arg_data *, rtx, int, int,
133 tree, int));
134 \f
135 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
136 `alloca'.
137
138 If WHICH is 0, return 1 if EXP contains a call to any function.
139 Actually, we only need return 1 if evaluating EXP would require pushing
140 arguments on the stack, but that is too difficult to compute, so we just
141 assume any function call might require the stack. */
142
143 static tree calls_function_save_exprs;
144
145 static int
146 calls_function (exp, which)
147 tree exp;
148 int which;
149 {
150 int val;
151 calls_function_save_exprs = 0;
152 val = calls_function_1 (exp, which);
153 calls_function_save_exprs = 0;
154 return val;
155 }
156
157 static int
158 calls_function_1 (exp, which)
159 tree exp;
160 int which;
161 {
162 register int i;
163 enum tree_code code = TREE_CODE (exp);
164 int type = TREE_CODE_CLASS (code);
165 int length = tree_code_length[(int) code];
166
167 /* If this code is language-specific, we don't know what it will do. */
168 if ((int) code >= NUM_TREE_CODES)
169 return 1;
170
171 /* Only expressions and references can contain calls. */
172 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
173 && type != 'b')
174 return 0;
175
176 switch (code)
177 {
178 case CALL_EXPR:
179 if (which == 0)
180 return 1;
181 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
182 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
183 == FUNCTION_DECL))
184 {
185 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
186
187 if ((DECL_BUILT_IN (fndecl)
188 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
189 || (DECL_SAVED_INSNS (fndecl)
190 && (FUNCTION_FLAGS (DECL_SAVED_INSNS (fndecl))
191 & FUNCTION_FLAGS_CALLS_ALLOCA)))
192 return 1;
193 }
194
195 /* Third operand is RTL. */
196 length = 2;
197 break;
198
199 case SAVE_EXPR:
200 if (SAVE_EXPR_RTL (exp) != 0)
201 return 0;
202 if (value_member (exp, calls_function_save_exprs))
203 return 0;
204 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
205 calls_function_save_exprs);
206 return (TREE_OPERAND (exp, 0) != 0
207 && calls_function_1 (TREE_OPERAND (exp, 0), which));
208
209 case BLOCK:
210 {
211 register tree local;
212
213 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
214 if (DECL_INITIAL (local) != 0
215 && calls_function_1 (DECL_INITIAL (local), which))
216 return 1;
217 }
218 {
219 register tree subblock;
220
221 for (subblock = BLOCK_SUBBLOCKS (exp);
222 subblock;
223 subblock = TREE_CHAIN (subblock))
224 if (calls_function_1 (subblock, which))
225 return 1;
226 }
227 return 0;
228
229 case METHOD_CALL_EXPR:
230 length = 3;
231 break;
232
233 case WITH_CLEANUP_EXPR:
234 length = 1;
235 break;
236
237 case RTL_EXPR:
238 return 0;
239
240 default:
241 break;
242 }
243
244 for (i = 0; i < length; i++)
245 if (TREE_OPERAND (exp, i) != 0
246 && calls_function_1 (TREE_OPERAND (exp, i), which))
247 return 1;
248
249 return 0;
250 }
251 \f
252 /* Force FUNEXP into a form suitable for the address of a CALL,
253 and return that as an rtx. Also load the static chain register
254 if FNDECL is a nested function.
255
256 CALL_FUSAGE points to a variable holding the prospective
257 CALL_INSN_FUNCTION_USAGE information. */
258
259 rtx
260 prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
261 rtx funexp;
262 tree fndecl;
263 rtx *call_fusage;
264 int reg_parm_seen;
265 {
266 rtx static_chain_value = 0;
267
268 funexp = protect_from_queue (funexp, 0);
269
270 if (fndecl != 0)
271 /* Get possible static chain value for nested function in C. */
272 static_chain_value = lookup_static_chain (fndecl);
273
274 /* Make a valid memory address and copy constants thru pseudo-regs,
275 but not for a constant address if -fno-function-cse. */
276 if (GET_CODE (funexp) != SYMBOL_REF)
277 /* If we are using registers for parameters, force the
278 function address into a register now. */
279 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
280 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
281 : memory_address (FUNCTION_MODE, funexp));
282 else
283 {
284 #ifndef NO_FUNCTION_CSE
285 if (optimize && ! flag_no_function_cse)
286 #ifdef NO_RECURSIVE_FUNCTION_CSE
287 if (fndecl != current_function_decl)
288 #endif
289 funexp = force_reg (Pmode, funexp);
290 #endif
291 }
292
293 if (static_chain_value != 0)
294 {
295 emit_move_insn (static_chain_rtx, static_chain_value);
296
297 if (GET_CODE (static_chain_rtx) == REG)
298 use_reg (call_fusage, static_chain_rtx);
299 }
300
301 return funexp;
302 }
303
304 /* Generate instructions to call function FUNEXP,
305 and optionally pop the results.
306 The CALL_INSN is the first insn generated.
307
308 FNDECL is the declaration node of the function. This is given to the
309 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
310
311 FUNTYPE is the data type of the function. This is given to the macro
312 RETURN_POPS_ARGS to determine whether this function pops its own args.
313 We used to allow an identifier for library functions, but that doesn't
314 work when the return type is an aggregate type and the calling convention
315 says that the pointer to this aggregate is to be popped by the callee.
316
317 STACK_SIZE is the number of bytes of arguments on the stack,
318 rounded up to STACK_BOUNDARY; zero if the size is variable.
319 This is both to put into the call insn and
320 to generate explicit popping code if necessary.
321
322 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
323 It is zero if this call doesn't want a structure value.
324
325 NEXT_ARG_REG is the rtx that results from executing
326 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
327 just after all the args have had their registers assigned.
328 This could be whatever you like, but normally it is the first
329 arg-register beyond those used for args in this call,
330 or 0 if all the arg-registers are used in this call.
331 It is passed on to `gen_call' so you can put this info in the call insn.
332
333 VALREG is a hard register in which a value is returned,
334 or 0 if the call does not return a value.
335
336 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
337 the args to this call were processed.
338 We restore `inhibit_defer_pop' to that value.
339
340 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
341 denote registers used by the called function.
342
343 IS_CONST is true if this is a `const' call. */
344
345 static void
346 emit_call_1 (funexp, fndecl, funtype, stack_size, struct_value_size,
347 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
348 is_const)
349 rtx funexp;
350 tree fndecl;
351 tree funtype;
352 HOST_WIDE_INT stack_size;
353 HOST_WIDE_INT struct_value_size;
354 rtx next_arg_reg;
355 rtx valreg;
356 int old_inhibit_defer_pop;
357 rtx call_fusage;
358 int is_const;
359 {
360 rtx stack_size_rtx = GEN_INT (stack_size);
361 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
362 rtx call_insn;
363 #ifndef ACCUMULATE_OUTGOING_ARGS
364 int already_popped = 0;
365 #endif
366
367 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
368 and we don't want to load it into a register as an optimization,
369 because prepare_call_address already did it if it should be done. */
370 if (GET_CODE (funexp) != SYMBOL_REF)
371 funexp = memory_address (FUNCTION_MODE, funexp);
372
373 #ifndef ACCUMULATE_OUTGOING_ARGS
374 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
375 if (HAVE_call_pop && HAVE_call_value_pop
376 && (RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0
377 || stack_size == 0))
378 {
379 rtx n_pop = GEN_INT (RETURN_POPS_ARGS (fndecl, funtype, stack_size));
380 rtx pat;
381
382 /* If this subroutine pops its own args, record that in the call insn
383 if possible, for the sake of frame pointer elimination. */
384
385 if (valreg)
386 pat = gen_call_value_pop (valreg,
387 gen_rtx_MEM (FUNCTION_MODE, funexp),
388 stack_size_rtx, next_arg_reg, n_pop);
389 else
390 pat = gen_call_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
391 stack_size_rtx, next_arg_reg, n_pop);
392
393 emit_call_insn (pat);
394 already_popped = 1;
395 }
396 else
397 #endif
398 #endif
399
400 #if defined (HAVE_call) && defined (HAVE_call_value)
401 if (HAVE_call && HAVE_call_value)
402 {
403 if (valreg)
404 emit_call_insn (gen_call_value (valreg,
405 gen_rtx_MEM (FUNCTION_MODE, funexp),
406 stack_size_rtx, next_arg_reg,
407 NULL_RTX));
408 else
409 emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE, funexp),
410 stack_size_rtx, next_arg_reg,
411 struct_value_size_rtx));
412 }
413 else
414 #endif
415 abort ();
416
417 /* Find the CALL insn we just emitted. */
418 for (call_insn = get_last_insn ();
419 call_insn && GET_CODE (call_insn) != CALL_INSN;
420 call_insn = PREV_INSN (call_insn))
421 ;
422
423 if (! call_insn)
424 abort ();
425
426 /* Put the register usage information on the CALL. If there is already
427 some usage information, put ours at the end. */
428 if (CALL_INSN_FUNCTION_USAGE (call_insn))
429 {
430 rtx link;
431
432 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
433 link = XEXP (link, 1))
434 ;
435
436 XEXP (link, 1) = call_fusage;
437 }
438 else
439 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
440
441 /* If this is a const call, then set the insn's unchanging bit. */
442 if (is_const)
443 CONST_CALL_P (call_insn) = 1;
444
445 /* Restore this now, so that we do defer pops for this call's args
446 if the context of the call as a whole permits. */
447 inhibit_defer_pop = old_inhibit_defer_pop;
448
449 #ifndef ACCUMULATE_OUTGOING_ARGS
450 /* If returning from the subroutine does not automatically pop the args,
451 we need an instruction to pop them sooner or later.
452 Perhaps do it now; perhaps just record how much space to pop later.
453
454 If returning from the subroutine does pop the args, indicate that the
455 stack pointer will be changed. */
456
457 if (stack_size != 0 && RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0)
458 {
459 if (!already_popped)
460 CALL_INSN_FUNCTION_USAGE (call_insn)
461 = gen_rtx_EXPR_LIST (VOIDmode,
462 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
463 CALL_INSN_FUNCTION_USAGE (call_insn));
464 stack_size -= RETURN_POPS_ARGS (fndecl, funtype, stack_size);
465 stack_size_rtx = GEN_INT (stack_size);
466 }
467
468 if (stack_size != 0)
469 {
470 if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
471 pending_stack_adjust += stack_size;
472 else
473 adjust_stack (stack_size_rtx);
474 }
475 #endif
476 }
477
478 /* Generate all the code for a function call
479 and return an rtx for its value.
480 Store the value in TARGET (specified as an rtx) if convenient.
481 If the value is stored in TARGET then TARGET is returned.
482 If IGNORE is nonzero, then we ignore the value of the function call. */
483
484 rtx
485 expand_call (exp, target, ignore)
486 tree exp;
487 rtx target;
488 int ignore;
489 {
490 /* List of actual parameters. */
491 tree actparms = TREE_OPERAND (exp, 1);
492 /* RTX for the function to be called. */
493 rtx funexp;
494 /* Data type of the function. */
495 tree funtype;
496 /* Declaration of the function being called,
497 or 0 if the function is computed (not known by name). */
498 tree fndecl = 0;
499 char *name = 0;
500
501 /* Register in which non-BLKmode value will be returned,
502 or 0 if no value or if value is BLKmode. */
503 rtx valreg;
504 /* Address where we should return a BLKmode value;
505 0 if value not BLKmode. */
506 rtx structure_value_addr = 0;
507 /* Nonzero if that address is being passed by treating it as
508 an extra, implicit first parameter. Otherwise,
509 it is passed by being copied directly into struct_value_rtx. */
510 int structure_value_addr_parm = 0;
511 /* Size of aggregate value wanted, or zero if none wanted
512 or if we are using the non-reentrant PCC calling convention
513 or expecting the value in registers. */
514 HOST_WIDE_INT struct_value_size = 0;
515 /* Nonzero if called function returns an aggregate in memory PCC style,
516 by returning the address of where to find it. */
517 int pcc_struct_value = 0;
518
519 /* Number of actual parameters in this call, including struct value addr. */
520 int num_actuals;
521 /* Number of named args. Args after this are anonymous ones
522 and they must all go on the stack. */
523 int n_named_args;
524 /* Count arg position in order args appear. */
525 int argpos;
526
527 /* Vector of information about each argument.
528 Arguments are numbered in the order they will be pushed,
529 not the order they are written. */
530 struct arg_data *args;
531
532 /* Total size in bytes of all the stack-parms scanned so far. */
533 struct args_size args_size;
534 /* Size of arguments before any adjustments (such as rounding). */
535 struct args_size original_args_size;
536 /* Data on reg parms scanned so far. */
537 CUMULATIVE_ARGS args_so_far;
538 /* Nonzero if a reg parm has been scanned. */
539 int reg_parm_seen;
540 /* Nonzero if this is an indirect function call. */
541
542 /* Nonzero if we must avoid push-insns in the args for this call.
543 If stack space is allocated for register parameters, but not by the
544 caller, then it is preallocated in the fixed part of the stack frame.
545 So the entire argument block must then be preallocated (i.e., we
546 ignore PUSH_ROUNDING in that case). */
547
548 #ifdef PUSH_ROUNDING
549 int must_preallocate = 0;
550 #else
551 int must_preallocate = 1;
552 #endif
553
554 /* Size of the stack reserved for parameter registers. */
555 int reg_parm_stack_space = 0;
556
557 /* 1 if scanning parms front to back, -1 if scanning back to front. */
558 int inc;
559 /* Address of space preallocated for stack parms
560 (on machines that lack push insns), or 0 if space not preallocated. */
561 rtx argblock = 0;
562
563 /* Nonzero if it is plausible that this is a call to alloca. */
564 int may_be_alloca;
565 /* Nonzero if this is a call to malloc or a related function. */
566 int is_malloc;
567 /* Nonzero if this is a call to setjmp or a related function. */
568 int returns_twice;
569 /* Nonzero if this is a call to `longjmp'. */
570 int is_longjmp;
571 /* Nonzero if this is a call to an inline function. */
572 int is_integrable = 0;
573 /* Nonzero if this is a call to a `const' function.
574 Note that only explicitly named functions are handled as `const' here. */
575 int is_const = 0;
576 /* Nonzero if this is a call to a `volatile' function. */
577 int is_volatile = 0;
578 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
579 /* Define the boundary of the register parm stack space that needs to be
580 save, if any. */
581 int low_to_save = -1, high_to_save;
582 rtx save_area = 0; /* Place that it is saved */
583 #endif
584
585 #ifdef ACCUMULATE_OUTGOING_ARGS
586 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
587 char *initial_stack_usage_map = stack_usage_map;
588 int old_stack_arg_under_construction;
589 #endif
590
591 rtx old_stack_level = 0;
592 int old_pending_adj = 0;
593 int old_inhibit_defer_pop = inhibit_defer_pop;
594 rtx call_fusage = 0;
595 register tree p;
596 register int i, j;
597
598 /* The value of the function call can be put in a hard register. But
599 if -fcheck-memory-usage, code which invokes functions (and thus
600 damages some hard registers) can be inserted before using the value.
601 So, target is always a pseudo-register in that case. */
602 if (flag_check_memory_usage)
603 target = 0;
604
605 /* See if we can find a DECL-node for the actual function.
606 As a result, decide whether this is a call to an integrable function. */
607
608 p = TREE_OPERAND (exp, 0);
609 if (TREE_CODE (p) == ADDR_EXPR)
610 {
611 fndecl = TREE_OPERAND (p, 0);
612 if (TREE_CODE (fndecl) != FUNCTION_DECL)
613 fndecl = 0;
614 else
615 {
616 if (!flag_no_inline
617 && fndecl != current_function_decl
618 && DECL_INLINE (fndecl)
619 && DECL_SAVED_INSNS (fndecl)
620 && RTX_INTEGRATED_P (DECL_SAVED_INSNS (fndecl)))
621 is_integrable = 1;
622 else if (! TREE_ADDRESSABLE (fndecl))
623 {
624 /* In case this function later becomes inlinable,
625 record that there was already a non-inline call to it.
626
627 Use abstraction instead of setting TREE_ADDRESSABLE
628 directly. */
629 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
630 && optimize > 0)
631 {
632 warning_with_decl (fndecl, "can't inline call to `%s'");
633 warning ("called from here");
634 }
635 mark_addressable (fndecl);
636 }
637
638 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
639 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
640 is_const = 1;
641
642 if (TREE_THIS_VOLATILE (fndecl))
643 is_volatile = 1;
644 }
645 }
646
647 /* If we don't have specific function to call, see if we have a
648 constant or `noreturn' function from the type. */
649 if (fndecl == 0)
650 {
651 is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p)));
652 is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
653 }
654
655 #ifdef REG_PARM_STACK_SPACE
656 #ifdef MAYBE_REG_PARM_STACK_SPACE
657 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
658 #else
659 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
660 #endif
661 #endif
662
663 #if defined(PUSH_ROUNDING) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
664 if (reg_parm_stack_space > 0)
665 must_preallocate = 1;
666 #endif
667
668 /* Warn if this value is an aggregate type,
669 regardless of which calling convention we are using for it. */
670 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
671 warning ("function call has aggregate value");
672
673 /* Set up a place to return a structure. */
674
675 /* Cater to broken compilers. */
676 if (aggregate_value_p (exp))
677 {
678 /* This call returns a big structure. */
679 is_const = 0;
680
681 #ifdef PCC_STATIC_STRUCT_RETURN
682 {
683 pcc_struct_value = 1;
684 /* Easier than making that case work right. */
685 if (is_integrable)
686 {
687 /* In case this is a static function, note that it has been
688 used. */
689 if (! TREE_ADDRESSABLE (fndecl))
690 mark_addressable (fndecl);
691 is_integrable = 0;
692 }
693 }
694 #else /* not PCC_STATIC_STRUCT_RETURN */
695 {
696 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
697
698 if (target && GET_CODE (target) == MEM)
699 structure_value_addr = XEXP (target, 0);
700 else
701 {
702 /* Assign a temporary to hold the value. */
703 tree d;
704
705 /* For variable-sized objects, we must be called with a target
706 specified. If we were to allocate space on the stack here,
707 we would have no way of knowing when to free it. */
708
709 if (struct_value_size < 0)
710 abort ();
711
712 /* This DECL is just something to feed to mark_addressable;
713 it doesn't get pushed. */
714 d = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
715 DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 1, 0, 1);
716 mark_addressable (d);
717 structure_value_addr = XEXP (DECL_RTL (d), 0);
718 TREE_USED (d) = 1;
719 target = 0;
720 }
721 }
722 #endif /* not PCC_STATIC_STRUCT_RETURN */
723 }
724
725 /* If called function is inline, try to integrate it. */
726
727 if (is_integrable)
728 {
729 rtx temp;
730 #ifdef ACCUMULATE_OUTGOING_ARGS
731 rtx before_call = get_last_insn ();
732 #endif
733
734 temp = expand_inline_function (fndecl, actparms, target,
735 ignore, TREE_TYPE (exp),
736 structure_value_addr);
737
738 /* If inlining succeeded, return. */
739 if (temp != (rtx) (HOST_WIDE_INT) -1)
740 {
741 #ifdef ACCUMULATE_OUTGOING_ARGS
742 /* If the outgoing argument list must be preserved, push
743 the stack before executing the inlined function if it
744 makes any calls. */
745
746 for (i = reg_parm_stack_space - 1; i >= 0; i--)
747 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
748 break;
749
750 if (stack_arg_under_construction || i >= 0)
751 {
752 rtx first_insn
753 = before_call ? NEXT_INSN (before_call) : get_insns ();
754 rtx insn, seq;
755
756 /* Look for a call in the inline function code.
757 If OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) is
758 nonzero then there is a call and it is not necessary
759 to scan the insns. */
760
761 if (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) == 0)
762 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
763 if (GET_CODE (insn) == CALL_INSN)
764 break;
765
766 if (insn)
767 {
768 /* Reserve enough stack space so that the largest
769 argument list of any function call in the inline
770 function does not overlap the argument list being
771 evaluated. This is usually an overestimate because
772 allocate_dynamic_stack_space reserves space for an
773 outgoing argument list in addition to the requested
774 space, but there is no way to ask for stack space such
775 that an argument list of a certain length can be
776 safely constructed.
777
778 Add the stack space reserved for register arguments, if
779 any, in the inline function. What is really needed is the
780 largest value of reg_parm_stack_space in the inline
781 function, but that is not available. Using the current
782 value of reg_parm_stack_space is wrong, but gives
783 correct results on all supported machines. */
784
785 int adjust = (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl))
786 + reg_parm_stack_space);
787
788 start_sequence ();
789 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
790 allocate_dynamic_stack_space (GEN_INT (adjust),
791 NULL_RTX, BITS_PER_UNIT);
792 seq = get_insns ();
793 end_sequence ();
794 emit_insns_before (seq, first_insn);
795 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
796 }
797 }
798 #endif
799
800 /* If the result is equivalent to TARGET, return TARGET to simplify
801 checks in store_expr. They can be equivalent but not equal in the
802 case of a function that returns BLKmode. */
803 if (temp != target && rtx_equal_p (temp, target))
804 return target;
805 return temp;
806 }
807
808 /* If inlining failed, mark FNDECL as needing to be compiled
809 separately after all. If function was declared inline,
810 give a warning. */
811 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
812 && optimize > 0 && ! TREE_ADDRESSABLE (fndecl))
813 {
814 warning_with_decl (fndecl, "inlining failed in call to `%s'");
815 warning ("called from here");
816 }
817 mark_addressable (fndecl);
818 }
819
820 /* When calling a const function, we must pop the stack args right away,
821 so that the pop is deleted or moved with the call. */
822 if (is_const)
823 NO_DEFER_POP;
824
825 function_call_count++;
826
827 if (fndecl && DECL_NAME (fndecl))
828 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
829
830 #if 0
831 /* Unless it's a call to a specific function that isn't alloca,
832 if it has one argument, we must assume it might be alloca. */
833
834 may_be_alloca
835 = (!(fndecl != 0 && strcmp (name, "alloca"))
836 && actparms != 0
837 && TREE_CHAIN (actparms) == 0);
838 #else
839 /* We assume that alloca will always be called by name. It
840 makes no sense to pass it as a pointer-to-function to
841 anything that does not understand its behavior. */
842 may_be_alloca
843 = (name && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
844 && name[0] == 'a'
845 && ! strcmp (name, "alloca"))
846 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
847 && name[0] == '_'
848 && ! strcmp (name, "__builtin_alloca"))));
849 #endif
850
851 /* See if this is a call to a function that can return more than once
852 or a call to longjmp. */
853
854 returns_twice = 0;
855 is_longjmp = 0;
856 is_malloc = 0;
857
858 if (name != 0 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
859 /* Exclude functions not at the file scope, or not `extern',
860 since they are not the magic functions we would otherwise
861 think they are. */
862 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
863 {
864 char *tname = name;
865
866 /* Disregard prefix _, __ or __x. */
867 if (name[0] == '_')
868 {
869 if (name[1] == '_' && name[2] == 'x')
870 tname += 3;
871 else if (name[1] == '_')
872 tname += 2;
873 else
874 tname += 1;
875 }
876
877 if (tname[0] == 's')
878 {
879 returns_twice
880 = ((tname[1] == 'e'
881 && (! strcmp (tname, "setjmp")
882 || ! strcmp (tname, "setjmp_syscall")))
883 || (tname[1] == 'i'
884 && ! strcmp (tname, "sigsetjmp"))
885 || (tname[1] == 'a'
886 && ! strcmp (tname, "savectx")));
887 if (tname[1] == 'i'
888 && ! strcmp (tname, "siglongjmp"))
889 is_longjmp = 1;
890 }
891 else if ((tname[0] == 'q' && tname[1] == 's'
892 && ! strcmp (tname, "qsetjmp"))
893 || (tname[0] == 'v' && tname[1] == 'f'
894 && ! strcmp (tname, "vfork")))
895 returns_twice = 1;
896
897 else if (tname[0] == 'l' && tname[1] == 'o'
898 && ! strcmp (tname, "longjmp"))
899 is_longjmp = 1;
900 /* XXX should have "malloc" attribute on functions instead
901 of recognizing them by name. */
902 else if (! strcmp (tname, "malloc")
903 || ! strcmp (tname, "calloc")
904 || ! strcmp (tname, "realloc")
905 /* Note use of NAME rather than TNAME here. These functions
906 are only reserved when preceded with __. */
907 || ! strcmp (name, "__vn") /* mangled __builtin_vec_new */
908 || ! strcmp (name, "__nw") /* mangled __builtin_new */
909 || ! strcmp (name, "__builtin_new")
910 || ! strcmp (name, "__builtin_vec_new"))
911 is_malloc = 1;
912 }
913
914 if (may_be_alloca)
915 current_function_calls_alloca = 1;
916
917 /* Don't let pending stack adjusts add up to too much.
918 Also, do all pending adjustments now
919 if there is any chance this might be a call to alloca. */
920
921 if (pending_stack_adjust >= 32
922 || (pending_stack_adjust > 0 && may_be_alloca))
923 do_pending_stack_adjust ();
924
925 /* Operand 0 is a pointer-to-function; get the type of the function. */
926 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
927 if (TREE_CODE (funtype) != POINTER_TYPE)
928 abort ();
929 funtype = TREE_TYPE (funtype);
930
931 /* Push the temporary stack slot level so that we can free any temporaries
932 we make. */
933 push_temp_slots ();
934
935 /* Start updating where the next arg would go.
936
937 On some machines (such as the PA) indirect calls have a different
938 calling convention than normal calls. The last argument in
939 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
940 or not. */
941 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
942
943 /* If struct_value_rtx is 0, it means pass the address
944 as if it were an extra parameter. */
945 if (structure_value_addr && struct_value_rtx == 0)
946 {
947 /* If structure_value_addr is a REG other than
948 virtual_outgoing_args_rtx, we can use always use it. If it
949 is not a REG, we must always copy it into a register.
950 If it is virtual_outgoing_args_rtx, we must copy it to another
951 register in some cases. */
952 rtx temp = (GET_CODE (structure_value_addr) != REG
953 #ifdef ACCUMULATE_OUTGOING_ARGS
954 || (stack_arg_under_construction
955 && structure_value_addr == virtual_outgoing_args_rtx)
956 #endif
957 ? copy_addr_to_reg (structure_value_addr)
958 : structure_value_addr);
959
960 actparms
961 = tree_cons (error_mark_node,
962 make_tree (build_pointer_type (TREE_TYPE (funtype)),
963 temp),
964 actparms);
965 structure_value_addr_parm = 1;
966 }
967
968 /* Count the arguments and set NUM_ACTUALS. */
969 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
970 num_actuals = i;
971
972 /* Compute number of named args.
973 Normally, don't include the last named arg if anonymous args follow.
974 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
975 (If no anonymous args follow, the result of list_length is actually
976 one too large. This is harmless.)
977
978 If SETUP_INCOMING_VARARGS is defined and STRICT_ARGUMENT_NAMING is zero,
979 this machine will be able to place unnamed args that were passed in
980 registers into the stack. So treat all args as named. This allows the
981 insns emitting for a specific argument list to be independent of the
982 function declaration.
983
984 If SETUP_INCOMING_VARARGS is not defined, we do not have any reliable
985 way to pass unnamed args in registers, so we must force them into
986 memory. */
987
988 if ((STRICT_ARGUMENT_NAMING
989 #ifndef SETUP_INCOMING_VARARGS
990 || 1
991 #endif
992 )
993 && TYPE_ARG_TYPES (funtype) != 0)
994 n_named_args
995 = (list_length (TYPE_ARG_TYPES (funtype))
996 /* Don't include the last named arg. */
997 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
998 /* Count the struct value address, if it is passed as a parm. */
999 + structure_value_addr_parm);
1000 else
1001 /* If we know nothing, treat all args as named. */
1002 n_named_args = num_actuals;
1003
1004 /* Make a vector to hold all the information about each arg. */
1005 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
1006 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
1007
1008 args_size.constant = 0;
1009 args_size.var = 0;
1010
1011 /* In this loop, we consider args in the order they are written.
1012 We fill up ARGS from the front or from the back if necessary
1013 so that in any case the first arg to be pushed ends up at the front. */
1014
1015 #ifdef PUSH_ARGS_REVERSED
1016 i = num_actuals - 1, inc = -1;
1017 /* In this case, must reverse order of args
1018 so that we compute and push the last arg first. */
1019 #else
1020 i = 0, inc = 1;
1021 #endif
1022
1023 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1024 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
1025 {
1026 tree type = TREE_TYPE (TREE_VALUE (p));
1027 int unsignedp;
1028 enum machine_mode mode;
1029
1030 args[i].tree_value = TREE_VALUE (p);
1031
1032 /* Replace erroneous argument with constant zero. */
1033 if (type == error_mark_node || TYPE_SIZE (type) == 0)
1034 args[i].tree_value = integer_zero_node, type = integer_type_node;
1035
1036 /* If TYPE is a transparent union, pass things the way we would
1037 pass the first field of the union. We have already verified that
1038 the modes are the same. */
1039 if (TYPE_TRANSPARENT_UNION (type))
1040 type = TREE_TYPE (TYPE_FIELDS (type));
1041
1042 /* Decide where to pass this arg.
1043
1044 args[i].reg is nonzero if all or part is passed in registers.
1045
1046 args[i].partial is nonzero if part but not all is passed in registers,
1047 and the exact value says how many words are passed in registers.
1048
1049 args[i].pass_on_stack is nonzero if the argument must at least be
1050 computed on the stack. It may then be loaded back into registers
1051 if args[i].reg is nonzero.
1052
1053 These decisions are driven by the FUNCTION_... macros and must agree
1054 with those made by function.c. */
1055
1056 /* See if this argument should be passed by invisible reference. */
1057 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1058 && contains_placeholder_p (TYPE_SIZE (type)))
1059 || TREE_ADDRESSABLE (type)
1060 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1061 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, TYPE_MODE (type),
1062 type, argpos < n_named_args)
1063 #endif
1064 )
1065 {
1066 /* If we're compiling a thunk, pass through invisible
1067 references instead of making a copy. */
1068 if (current_function_is_thunk
1069 #ifdef FUNCTION_ARG_CALLEE_COPIES
1070 || (FUNCTION_ARG_CALLEE_COPIES (args_so_far, TYPE_MODE (type),
1071 type, argpos < n_named_args)
1072 /* If it's in a register, we must make a copy of it too. */
1073 /* ??? Is this a sufficient test? Is there a better one? */
1074 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1075 && REG_P (DECL_RTL (args[i].tree_value)))
1076 && ! TREE_ADDRESSABLE (type))
1077 #endif
1078 )
1079 {
1080 args[i].tree_value = build1 (ADDR_EXPR,
1081 build_pointer_type (type),
1082 args[i].tree_value);
1083 type = build_pointer_type (type);
1084 }
1085 else
1086 {
1087 /* We make a copy of the object and pass the address to the
1088 function being called. */
1089 rtx copy;
1090
1091 if (TYPE_SIZE (type) == 0
1092 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1093 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1094 && (TREE_INT_CST_HIGH (TYPE_SIZE (type)) != 0
1095 || (TREE_INT_CST_LOW (TYPE_SIZE (type))
1096 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
1097 {
1098 /* This is a variable-sized object. Make space on the stack
1099 for it. */
1100 rtx size_rtx = expr_size (TREE_VALUE (p));
1101
1102 if (old_stack_level == 0)
1103 {
1104 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1105 old_pending_adj = pending_stack_adjust;
1106 pending_stack_adjust = 0;
1107 }
1108
1109 copy = gen_rtx_MEM (BLKmode,
1110 allocate_dynamic_stack_space (size_rtx,
1111 NULL_RTX,
1112 TYPE_ALIGN (type)));
1113 }
1114 else
1115 {
1116 int size = int_size_in_bytes (type);
1117 copy = assign_stack_temp (TYPE_MODE (type), size, 0);
1118 }
1119
1120 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
1121
1122 store_expr (args[i].tree_value, copy, 0);
1123 is_const = 0;
1124
1125 args[i].tree_value = build1 (ADDR_EXPR,
1126 build_pointer_type (type),
1127 make_tree (type, copy));
1128 type = build_pointer_type (type);
1129 }
1130 }
1131
1132 mode = TYPE_MODE (type);
1133 unsignedp = TREE_UNSIGNED (type);
1134
1135 #ifdef PROMOTE_FUNCTION_ARGS
1136 mode = promote_mode (type, mode, &unsignedp, 1);
1137 #endif
1138
1139 args[i].unsignedp = unsignedp;
1140 args[i].mode = mode;
1141 args[i].reg = FUNCTION_ARG (args_so_far, mode, type,
1142 argpos < n_named_args);
1143 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1144 if (args[i].reg)
1145 args[i].partial
1146 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, type,
1147 argpos < n_named_args);
1148 #endif
1149
1150 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1151
1152 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1153 it means that we are to pass this arg in the register(s) designated
1154 by the PARALLEL, but also to pass it in the stack. */
1155 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1156 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1157 args[i].pass_on_stack = 1;
1158
1159 /* If this is an addressable type, we must preallocate the stack
1160 since we must evaluate the object into its final location.
1161
1162 If this is to be passed in both registers and the stack, it is simpler
1163 to preallocate. */
1164 if (TREE_ADDRESSABLE (type)
1165 || (args[i].pass_on_stack && args[i].reg != 0))
1166 must_preallocate = 1;
1167
1168 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1169 we cannot consider this function call constant. */
1170 if (TREE_ADDRESSABLE (type))
1171 is_const = 0;
1172
1173 /* Compute the stack-size of this argument. */
1174 if (args[i].reg == 0 || args[i].partial != 0
1175 || reg_parm_stack_space > 0
1176 || args[i].pass_on_stack)
1177 locate_and_pad_parm (mode, type,
1178 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1179 1,
1180 #else
1181 args[i].reg != 0,
1182 #endif
1183 fndecl, &args_size, &args[i].offset,
1184 &args[i].size);
1185
1186 #ifndef ARGS_GROW_DOWNWARD
1187 args[i].slot_offset = args_size;
1188 #endif
1189
1190 /* If a part of the arg was put into registers,
1191 don't include that part in the amount pushed. */
1192 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1193 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1194 / (PARM_BOUNDARY / BITS_PER_UNIT)
1195 * (PARM_BOUNDARY / BITS_PER_UNIT));
1196
1197 /* Update ARGS_SIZE, the total stack space for args so far. */
1198
1199 args_size.constant += args[i].size.constant;
1200 if (args[i].size.var)
1201 {
1202 ADD_PARM_SIZE (args_size, args[i].size.var);
1203 }
1204
1205 /* Since the slot offset points to the bottom of the slot,
1206 we must record it after incrementing if the args grow down. */
1207 #ifdef ARGS_GROW_DOWNWARD
1208 args[i].slot_offset = args_size;
1209
1210 args[i].slot_offset.constant = -args_size.constant;
1211 if (args_size.var)
1212 {
1213 SUB_PARM_SIZE (args[i].slot_offset, args_size.var);
1214 }
1215 #endif
1216
1217 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1218 have been used, etc. */
1219
1220 FUNCTION_ARG_ADVANCE (args_so_far, TYPE_MODE (type), type,
1221 argpos < n_named_args);
1222 }
1223
1224 #ifdef FINAL_REG_PARM_STACK_SPACE
1225 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
1226 args_size.var);
1227 #endif
1228
1229 /* Compute the actual size of the argument block required. The variable
1230 and constant sizes must be combined, the size may have to be rounded,
1231 and there may be a minimum required size. */
1232
1233 original_args_size = args_size;
1234 if (args_size.var)
1235 {
1236 /* If this function requires a variable-sized argument list, don't try to
1237 make a cse'able block for this call. We may be able to do this
1238 eventually, but it is too complicated to keep track of what insns go
1239 in the cse'able block and which don't. */
1240
1241 is_const = 0;
1242 must_preallocate = 1;
1243
1244 args_size.var = ARGS_SIZE_TREE (args_size);
1245 args_size.constant = 0;
1246
1247 #ifdef STACK_BOUNDARY
1248 if (STACK_BOUNDARY != BITS_PER_UNIT)
1249 args_size.var = round_up (args_size.var, STACK_BYTES);
1250 #endif
1251
1252 if (reg_parm_stack_space > 0)
1253 {
1254 args_size.var
1255 = size_binop (MAX_EXPR, args_size.var,
1256 size_int (reg_parm_stack_space));
1257
1258 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1259 /* The area corresponding to register parameters is not to count in
1260 the size of the block we need. So make the adjustment. */
1261 args_size.var
1262 = size_binop (MINUS_EXPR, args_size.var,
1263 size_int (reg_parm_stack_space));
1264 #endif
1265 }
1266 }
1267 else
1268 {
1269 #ifdef STACK_BOUNDARY
1270 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
1271 / STACK_BYTES) * STACK_BYTES);
1272 #endif
1273
1274 args_size.constant = MAX (args_size.constant,
1275 reg_parm_stack_space);
1276
1277 #ifdef MAYBE_REG_PARM_STACK_SPACE
1278 if (reg_parm_stack_space == 0)
1279 args_size.constant = 0;
1280 #endif
1281
1282 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1283 args_size.constant -= reg_parm_stack_space;
1284 #endif
1285 }
1286
1287 /* See if we have or want to preallocate stack space.
1288
1289 If we would have to push a partially-in-regs parm
1290 before other stack parms, preallocate stack space instead.
1291
1292 If the size of some parm is not a multiple of the required stack
1293 alignment, we must preallocate.
1294
1295 If the total size of arguments that would otherwise create a copy in
1296 a temporary (such as a CALL) is more than half the total argument list
1297 size, preallocation is faster.
1298
1299 Another reason to preallocate is if we have a machine (like the m88k)
1300 where stack alignment is required to be maintained between every
1301 pair of insns, not just when the call is made. However, we assume here
1302 that such machines either do not have push insns (and hence preallocation
1303 would occur anyway) or the problem is taken care of with
1304 PUSH_ROUNDING. */
1305
1306 if (! must_preallocate)
1307 {
1308 int partial_seen = 0;
1309 int copy_to_evaluate_size = 0;
1310
1311 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1312 {
1313 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1314 partial_seen = 1;
1315 else if (partial_seen && args[i].reg == 0)
1316 must_preallocate = 1;
1317
1318 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1319 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1320 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1321 || TREE_CODE (args[i].tree_value) == COND_EXPR
1322 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1323 copy_to_evaluate_size
1324 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1325 }
1326
1327 if (copy_to_evaluate_size * 2 >= args_size.constant
1328 && args_size.constant > 0)
1329 must_preallocate = 1;
1330 }
1331
1332 /* If the structure value address will reference the stack pointer, we must
1333 stabilize it. We don't need to do this if we know that we are not going
1334 to adjust the stack pointer in processing this call. */
1335
1336 if (structure_value_addr
1337 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
1338 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
1339 && (args_size.var
1340 #ifndef ACCUMULATE_OUTGOING_ARGS
1341 || args_size.constant
1342 #endif
1343 ))
1344 structure_value_addr = copy_to_reg (structure_value_addr);
1345
1346 /* If this function call is cse'able, precompute all the parameters.
1347 Note that if the parameter is constructed into a temporary, this will
1348 cause an additional copy because the parameter will be constructed
1349 into a temporary location and then copied into the outgoing arguments.
1350 If a parameter contains a call to alloca and this function uses the
1351 stack, precompute the parameter. */
1352
1353 /* If we preallocated the stack space, and some arguments must be passed
1354 on the stack, then we must precompute any parameter which contains a
1355 function call which will store arguments on the stack.
1356 Otherwise, evaluating the parameter may clobber previous parameters
1357 which have already been stored into the stack. */
1358
1359 for (i = 0; i < num_actuals; i++)
1360 if (is_const
1361 || ((args_size.var != 0 || args_size.constant != 0)
1362 && calls_function (args[i].tree_value, 1))
1363 || (must_preallocate && (args_size.var != 0 || args_size.constant != 0)
1364 && calls_function (args[i].tree_value, 0)))
1365 {
1366 /* If this is an addressable type, we cannot pre-evaluate it. */
1367 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1368 abort ();
1369
1370 push_temp_slots ();
1371
1372 args[i].initial_value = args[i].value
1373 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1374
1375 preserve_temp_slots (args[i].value);
1376 pop_temp_slots ();
1377
1378 /* ANSI doesn't require a sequence point here,
1379 but PCC has one, so this will avoid some problems. */
1380 emit_queue ();
1381
1382 args[i].initial_value = args[i].value
1383 = protect_from_queue (args[i].initial_value, 0);
1384
1385 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
1386 args[i].value
1387 = convert_modes (args[i].mode,
1388 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1389 args[i].value, args[i].unsignedp);
1390 }
1391
1392 /* Now we are about to start emitting insns that can be deleted
1393 if a libcall is deleted. */
1394 if (is_const || is_malloc)
1395 start_sequence ();
1396
1397 /* If we have no actual push instructions, or shouldn't use them,
1398 make space for all args right now. */
1399
1400 if (args_size.var != 0)
1401 {
1402 if (old_stack_level == 0)
1403 {
1404 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1405 old_pending_adj = pending_stack_adjust;
1406 pending_stack_adjust = 0;
1407 #ifdef ACCUMULATE_OUTGOING_ARGS
1408 /* stack_arg_under_construction says whether a stack arg is
1409 being constructed at the old stack level. Pushing the stack
1410 gets a clean outgoing argument block. */
1411 old_stack_arg_under_construction = stack_arg_under_construction;
1412 stack_arg_under_construction = 0;
1413 #endif
1414 }
1415 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
1416 }
1417 else
1418 {
1419 /* Note that we must go through the motions of allocating an argument
1420 block even if the size is zero because we may be storing args
1421 in the area reserved for register arguments, which may be part of
1422 the stack frame. */
1423
1424 int needed = args_size.constant;
1425
1426 /* Store the maximum argument space used. It will be pushed by
1427 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
1428 checking). */
1429
1430 if (needed > current_function_outgoing_args_size)
1431 current_function_outgoing_args_size = needed;
1432
1433 if (must_preallocate)
1434 {
1435 #ifdef ACCUMULATE_OUTGOING_ARGS
1436 /* Since the stack pointer will never be pushed, it is possible for
1437 the evaluation of a parm to clobber something we have already
1438 written to the stack. Since most function calls on RISC machines
1439 do not use the stack, this is uncommon, but must work correctly.
1440
1441 Therefore, we save any area of the stack that was already written
1442 and that we are using. Here we set up to do this by making a new
1443 stack usage map from the old one. The actual save will be done
1444 by store_one_arg.
1445
1446 Another approach might be to try to reorder the argument
1447 evaluations to avoid this conflicting stack usage. */
1448
1449 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1450 /* Since we will be writing into the entire argument area, the
1451 map must be allocated for its entire size, not just the part that
1452 is the responsibility of the caller. */
1453 needed += reg_parm_stack_space;
1454 #endif
1455
1456 #ifdef ARGS_GROW_DOWNWARD
1457 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
1458 needed + 1);
1459 #else
1460 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
1461 needed);
1462 #endif
1463 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
1464
1465 if (initial_highest_arg_in_use)
1466 bcopy (initial_stack_usage_map, stack_usage_map,
1467 initial_highest_arg_in_use);
1468
1469 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
1470 bzero (&stack_usage_map[initial_highest_arg_in_use],
1471 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
1472 needed = 0;
1473
1474 /* The address of the outgoing argument list must not be copied to a
1475 register here, because argblock would be left pointing to the
1476 wrong place after the call to allocate_dynamic_stack_space below.
1477 */
1478
1479 argblock = virtual_outgoing_args_rtx;
1480
1481 #else /* not ACCUMULATE_OUTGOING_ARGS */
1482 if (inhibit_defer_pop == 0)
1483 {
1484 /* Try to reuse some or all of the pending_stack_adjust
1485 to get this space. Maybe we can avoid any pushing. */
1486 if (needed > pending_stack_adjust)
1487 {
1488 needed -= pending_stack_adjust;
1489 pending_stack_adjust = 0;
1490 }
1491 else
1492 {
1493 pending_stack_adjust -= needed;
1494 needed = 0;
1495 }
1496 }
1497 /* Special case this because overhead of `push_block' in this
1498 case is non-trivial. */
1499 if (needed == 0)
1500 argblock = virtual_outgoing_args_rtx;
1501 else
1502 argblock = push_block (GEN_INT (needed), 0, 0);
1503
1504 /* We only really need to call `copy_to_reg' in the case where push
1505 insns are going to be used to pass ARGBLOCK to a function
1506 call in ARGS. In that case, the stack pointer changes value
1507 from the allocation point to the call point, and hence
1508 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
1509 But might as well always do it. */
1510 argblock = copy_to_reg (argblock);
1511 #endif /* not ACCUMULATE_OUTGOING_ARGS */
1512 }
1513 }
1514
1515 #ifdef ACCUMULATE_OUTGOING_ARGS
1516 /* The save/restore code in store_one_arg handles all cases except one:
1517 a constructor call (including a C function returning a BLKmode struct)
1518 to initialize an argument. */
1519 if (stack_arg_under_construction)
1520 {
1521 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1522 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
1523 #else
1524 rtx push_size = GEN_INT (args_size.constant);
1525 #endif
1526 if (old_stack_level == 0)
1527 {
1528 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1529 old_pending_adj = pending_stack_adjust;
1530 pending_stack_adjust = 0;
1531 /* stack_arg_under_construction says whether a stack arg is
1532 being constructed at the old stack level. Pushing the stack
1533 gets a clean outgoing argument block. */
1534 old_stack_arg_under_construction = stack_arg_under_construction;
1535 stack_arg_under_construction = 0;
1536 /* Make a new map for the new argument list. */
1537 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
1538 bzero (stack_usage_map, highest_outgoing_arg_in_use);
1539 highest_outgoing_arg_in_use = 0;
1540 }
1541 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
1542 }
1543 /* If argument evaluation might modify the stack pointer, copy the
1544 address of the argument list to a register. */
1545 for (i = 0; i < num_actuals; i++)
1546 if (args[i].pass_on_stack)
1547 {
1548 argblock = copy_addr_to_reg (argblock);
1549 break;
1550 }
1551 #endif
1552
1553
1554 /* If we preallocated stack space, compute the address of each argument.
1555 We need not ensure it is a valid memory address here; it will be
1556 validized when it is used. */
1557 if (argblock)
1558 {
1559 rtx arg_reg = argblock;
1560 int arg_offset = 0;
1561
1562 if (GET_CODE (argblock) == PLUS)
1563 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1564
1565 for (i = 0; i < num_actuals; i++)
1566 {
1567 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1568 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1569 rtx addr;
1570
1571 /* Skip this parm if it will not be passed on the stack. */
1572 if (! args[i].pass_on_stack && args[i].reg != 0)
1573 continue;
1574
1575 if (GET_CODE (offset) == CONST_INT)
1576 addr = plus_constant (arg_reg, INTVAL (offset));
1577 else
1578 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1579
1580 addr = plus_constant (addr, arg_offset);
1581 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1582 MEM_IN_STRUCT_P (args[i].stack)
1583 = AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value));
1584
1585 if (GET_CODE (slot_offset) == CONST_INT)
1586 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1587 else
1588 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1589
1590 addr = plus_constant (addr, arg_offset);
1591 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1592 }
1593 }
1594
1595 #ifdef PUSH_ARGS_REVERSED
1596 #ifdef STACK_BOUNDARY
1597 /* If we push args individually in reverse order, perform stack alignment
1598 before the first push (the last arg). */
1599 if (argblock == 0)
1600 anti_adjust_stack (GEN_INT (args_size.constant
1601 - original_args_size.constant));
1602 #endif
1603 #endif
1604
1605 /* Don't try to defer pops if preallocating, not even from the first arg,
1606 since ARGBLOCK probably refers to the SP. */
1607 if (argblock)
1608 NO_DEFER_POP;
1609
1610 /* Get the function to call, in the form of RTL. */
1611 if (fndecl)
1612 {
1613 /* If this is the first use of the function, see if we need to
1614 make an external definition for it. */
1615 if (! TREE_USED (fndecl))
1616 {
1617 assemble_external (fndecl);
1618 TREE_USED (fndecl) = 1;
1619 }
1620
1621 /* Get a SYMBOL_REF rtx for the function address. */
1622 funexp = XEXP (DECL_RTL (fndecl), 0);
1623 }
1624 else
1625 /* Generate an rtx (probably a pseudo-register) for the address. */
1626 {
1627 push_temp_slots ();
1628 funexp = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
1629 pop_temp_slots (); /* FUNEXP can't be BLKmode */
1630
1631 /* Check the function is executable. */
1632 if (flag_check_memory_usage)
1633 emit_library_call (chkr_check_exec_libfunc, 1,
1634 VOIDmode, 1,
1635 funexp, ptr_mode);
1636 emit_queue ();
1637 }
1638
1639 /* Figure out the register where the value, if any, will come back. */
1640 valreg = 0;
1641 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
1642 && ! structure_value_addr)
1643 {
1644 if (pcc_struct_value)
1645 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
1646 fndecl);
1647 else
1648 valreg = hard_function_value (TREE_TYPE (exp), fndecl);
1649 }
1650
1651 /* Precompute all register parameters. It isn't safe to compute anything
1652 once we have started filling any specific hard regs. */
1653 reg_parm_seen = 0;
1654 for (i = 0; i < num_actuals; i++)
1655 if (args[i].reg != 0 && ! args[i].pass_on_stack)
1656 {
1657 reg_parm_seen = 1;
1658
1659 if (args[i].value == 0)
1660 {
1661 push_temp_slots ();
1662 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
1663 VOIDmode, 0);
1664 preserve_temp_slots (args[i].value);
1665 pop_temp_slots ();
1666
1667 /* ANSI doesn't require a sequence point here,
1668 but PCC has one, so this will avoid some problems. */
1669 emit_queue ();
1670 }
1671
1672 /* If we are to promote the function arg to a wider mode,
1673 do it now. */
1674
1675 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
1676 args[i].value
1677 = convert_modes (args[i].mode,
1678 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1679 args[i].value, args[i].unsignedp);
1680
1681 /* If the value is expensive, and we are inside an appropriately
1682 short loop, put the value into a pseudo and then put the pseudo
1683 into the hard reg.
1684
1685 For small register classes, also do this if this call uses
1686 register parameters. This is to avoid reload conflicts while
1687 loading the parameters registers. */
1688
1689 if ((! (GET_CODE (args[i].value) == REG
1690 || (GET_CODE (args[i].value) == SUBREG
1691 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
1692 && args[i].mode != BLKmode
1693 && rtx_cost (args[i].value, SET) > 2
1694 && ((SMALL_REGISTER_CLASSES && reg_parm_seen)
1695 || preserve_subexpressions_p ()))
1696 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
1697 }
1698
1699 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1700
1701 /* The argument list is the property of the called routine and it
1702 may clobber it. If the fixed area has been used for previous
1703 parameters, we must save and restore it.
1704
1705 Here we compute the boundary of the that needs to be saved, if any. */
1706
1707 #ifdef ARGS_GROW_DOWNWARD
1708 for (i = 0; i < reg_parm_stack_space + 1; i++)
1709 #else
1710 for (i = 0; i < reg_parm_stack_space; i++)
1711 #endif
1712 {
1713 if (i >= highest_outgoing_arg_in_use
1714 || stack_usage_map[i] == 0)
1715 continue;
1716
1717 if (low_to_save == -1)
1718 low_to_save = i;
1719
1720 high_to_save = i;
1721 }
1722
1723 if (low_to_save >= 0)
1724 {
1725 int num_to_save = high_to_save - low_to_save + 1;
1726 enum machine_mode save_mode
1727 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
1728 rtx stack_area;
1729
1730 /* If we don't have the required alignment, must do this in BLKmode. */
1731 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
1732 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
1733 save_mode = BLKmode;
1734
1735 #ifdef ARGS_GROW_DOWNWARD
1736 stack_area = gen_rtx_MEM (save_mode,
1737 memory_address (save_mode,
1738 plus_constant (argblock,
1739 - high_to_save)));
1740 #else
1741 stack_area = gen_rtx_MEM (save_mode,
1742 memory_address (save_mode,
1743 plus_constant (argblock,
1744 low_to_save)));
1745 #endif
1746 if (save_mode == BLKmode)
1747 {
1748 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
1749 MEM_IN_STRUCT_P (save_area) = 0;
1750 emit_block_move (validize_mem (save_area), stack_area,
1751 GEN_INT (num_to_save),
1752 PARM_BOUNDARY / BITS_PER_UNIT);
1753 }
1754 else
1755 {
1756 save_area = gen_reg_rtx (save_mode);
1757 emit_move_insn (save_area, stack_area);
1758 }
1759 }
1760 #endif
1761
1762
1763 /* Now store (and compute if necessary) all non-register parms.
1764 These come before register parms, since they can require block-moves,
1765 which could clobber the registers used for register parms.
1766 Parms which have partial registers are not stored here,
1767 but we do preallocate space here if they want that. */
1768
1769 for (i = 0; i < num_actuals; i++)
1770 if (args[i].reg == 0 || args[i].pass_on_stack)
1771 store_one_arg (&args[i], argblock, may_be_alloca,
1772 args_size.var != 0, fndecl, reg_parm_stack_space);
1773
1774 /* If we have a parm that is passed in registers but not in memory
1775 and whose alignment does not permit a direct copy into registers,
1776 make a group of pseudos that correspond to each register that we
1777 will later fill. */
1778
1779 if (STRICT_ALIGNMENT)
1780 for (i = 0; i < num_actuals; i++)
1781 if (args[i].reg != 0 && ! args[i].pass_on_stack
1782 && args[i].mode == BLKmode
1783 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1784 < MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1785 {
1786 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1787 int big_endian_correction = 0;
1788
1789 args[i].n_aligned_regs
1790 = args[i].partial ? args[i].partial
1791 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1792
1793 args[i].aligned_regs = (rtx *) alloca (sizeof (rtx)
1794 * args[i].n_aligned_regs);
1795
1796 /* Structures smaller than a word are aligned to the least
1797 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
1798 this means we must skip the empty high order bytes when
1799 calculating the bit offset. */
1800 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
1801 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
1802
1803 for (j = 0; j < args[i].n_aligned_regs; j++)
1804 {
1805 rtx reg = gen_reg_rtx (word_mode);
1806 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1807 int bitsize = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
1808 int bitpos;
1809
1810 args[i].aligned_regs[j] = reg;
1811
1812 /* Clobber REG and move each partword into it. Ensure we don't
1813 go past the end of the structure. Note that the loop below
1814 works because we've already verified that padding
1815 and endianness are compatible.
1816
1817 We use to emit a clobber here but that doesn't let later
1818 passes optimize the instructions we emit. By storing 0 into
1819 the register later passes know the first AND to zero out the
1820 bitfield being set in the register is unnecessary. The store
1821 of 0 will be deleted as will at least the first AND. */
1822
1823 emit_move_insn (reg, const0_rtx);
1824
1825 for (bitpos = 0;
1826 bitpos < BITS_PER_WORD && bytes > 0;
1827 bitpos += bitsize, bytes -= bitsize / BITS_PER_UNIT)
1828 {
1829 int xbitpos = bitpos + big_endian_correction;
1830
1831 store_bit_field (reg, bitsize, xbitpos, word_mode,
1832 extract_bit_field (word, bitsize, bitpos, 1,
1833 NULL_RTX, word_mode,
1834 word_mode,
1835 bitsize / BITS_PER_UNIT,
1836 BITS_PER_WORD),
1837 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
1838 }
1839 }
1840 }
1841
1842 /* Now store any partially-in-registers parm.
1843 This is the last place a block-move can happen. */
1844 if (reg_parm_seen)
1845 for (i = 0; i < num_actuals; i++)
1846 if (args[i].partial != 0 && ! args[i].pass_on_stack)
1847 store_one_arg (&args[i], argblock, may_be_alloca,
1848 args_size.var != 0, fndecl, reg_parm_stack_space);
1849
1850 #ifndef PUSH_ARGS_REVERSED
1851 #ifdef STACK_BOUNDARY
1852 /* If we pushed args in forward order, perform stack alignment
1853 after pushing the last arg. */
1854 if (argblock == 0)
1855 anti_adjust_stack (GEN_INT (args_size.constant
1856 - original_args_size.constant));
1857 #endif
1858 #endif
1859
1860 /* If register arguments require space on the stack and stack space
1861 was not preallocated, allocate stack space here for arguments
1862 passed in registers. */
1863 #if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
1864 if (must_preallocate == 0 && reg_parm_stack_space > 0)
1865 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
1866 #endif
1867
1868 /* Pass the function the address in which to return a structure value. */
1869 if (structure_value_addr && ! structure_value_addr_parm)
1870 {
1871 emit_move_insn (struct_value_rtx,
1872 force_reg (Pmode,
1873 force_operand (structure_value_addr,
1874 NULL_RTX)));
1875
1876 /* Mark the memory for the aggregate as write-only. */
1877 if (flag_check_memory_usage)
1878 emit_library_call (chkr_set_right_libfunc, 1,
1879 VOIDmode, 3,
1880 structure_value_addr, ptr_mode,
1881 GEN_INT (struct_value_size), TYPE_MODE (sizetype),
1882 GEN_INT (MEMORY_USE_WO),
1883 TYPE_MODE (integer_type_node));
1884
1885 if (GET_CODE (struct_value_rtx) == REG)
1886 use_reg (&call_fusage, struct_value_rtx);
1887 }
1888
1889 funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen);
1890
1891 /* Now do the register loads required for any wholly-register parms or any
1892 parms which are passed both on the stack and in a register. Their
1893 expressions were already evaluated.
1894
1895 Mark all register-parms as living through the call, putting these USE
1896 insns in the CALL_INSN_FUNCTION_USAGE field. */
1897
1898 #ifdef LOAD_ARGS_REVERSED
1899 for (i = num_actuals - 1; i >= 0; i--)
1900 #else
1901 for (i = 0; i < num_actuals; i++)
1902 #endif
1903 {
1904 rtx reg = args[i].reg;
1905 int partial = args[i].partial;
1906 int nregs;
1907
1908 if (reg)
1909 {
1910 /* Set to non-negative if must move a word at a time, even if just
1911 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1912 we just use a normal move insn. This value can be zero if the
1913 argument is a zero size structure with no fields. */
1914 nregs = (partial ? partial
1915 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1916 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1917 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1918 : -1));
1919
1920 /* Handle calls that pass values in multiple non-contiguous
1921 locations. The Irix 6 ABI has examples of this. */
1922
1923 if (GET_CODE (reg) == PARALLEL)
1924 emit_group_load (reg, args[i].value);
1925
1926 /* If simple case, just do move. If normal partial, store_one_arg
1927 has already loaded the register for us. In all other cases,
1928 load the register(s) from memory. */
1929
1930 else if (nregs == -1)
1931 emit_move_insn (reg, args[i].value);
1932
1933 /* If we have pre-computed the values to put in the registers in
1934 the case of non-aligned structures, copy them in now. */
1935
1936 else if (args[i].n_aligned_regs != 0)
1937 for (j = 0; j < args[i].n_aligned_regs; j++)
1938 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1939 args[i].aligned_regs[j]);
1940
1941 else if (partial == 0 || args[i].pass_on_stack)
1942 move_block_to_reg (REGNO (reg),
1943 validize_mem (args[i].value), nregs,
1944 args[i].mode);
1945
1946 /* Handle calls that pass values in multiple non-contiguous
1947 locations. The Irix 6 ABI has examples of this. */
1948 if (GET_CODE (reg) == PARALLEL)
1949 use_group_regs (&call_fusage, reg);
1950 else if (nregs == -1)
1951 use_reg (&call_fusage, reg);
1952 else
1953 use_regs (&call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1954 }
1955 }
1956
1957 /* Perform postincrements before actually calling the function. */
1958 emit_queue ();
1959
1960 /* All arguments and registers used for the call must be set up by now! */
1961
1962 /* Generate the actual call instruction. */
1963 emit_call_1 (funexp, fndecl, funtype, args_size.constant, struct_value_size,
1964 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
1965 valreg, old_inhibit_defer_pop, call_fusage, is_const);
1966
1967 /* If call is cse'able, make appropriate pair of reg-notes around it.
1968 Test valreg so we don't crash; may safely ignore `const'
1969 if return type is void. Disable for PARALLEL return values, because
1970 we have no way to move such values into a pseudo register. */
1971 if (is_const && valreg != 0 && GET_CODE (valreg) != PARALLEL)
1972 {
1973 rtx note = 0;
1974 rtx temp = gen_reg_rtx (GET_MODE (valreg));
1975 rtx insns;
1976
1977 /* Mark the return value as a pointer if needed. */
1978 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
1979 {
1980 tree pointed_to = TREE_TYPE (TREE_TYPE (exp));
1981 mark_reg_pointer (temp, TYPE_ALIGN (pointed_to) / BITS_PER_UNIT);
1982 }
1983
1984 /* Construct an "equal form" for the value which mentions all the
1985 arguments in order as well as the function name. */
1986 #ifdef PUSH_ARGS_REVERSED
1987 for (i = 0; i < num_actuals; i++)
1988 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
1989 #else
1990 for (i = num_actuals - 1; i >= 0; i--)
1991 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
1992 #endif
1993 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
1994
1995 insns = get_insns ();
1996 end_sequence ();
1997
1998 emit_libcall_block (insns, temp, valreg, note);
1999
2000 valreg = temp;
2001 }
2002 else if (is_const)
2003 {
2004 /* Otherwise, just write out the sequence without a note. */
2005 rtx insns = get_insns ();
2006
2007 end_sequence ();
2008 emit_insns (insns);
2009 }
2010 else if (is_malloc)
2011 {
2012 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2013 rtx last, insns;
2014
2015 /* The return value from a malloc-like function is a pointer. */
2016 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2017 mark_reg_pointer (temp, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2018
2019 emit_move_insn (temp, valreg);
2020
2021 /* The return value from a malloc-like function can not alias
2022 anything else. */
2023 last = get_last_insn ();
2024 REG_NOTES (last) =
2025 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2026
2027 /* Write out the sequence. */
2028 insns = get_insns ();
2029 end_sequence ();
2030 emit_insns (insns);
2031 valreg = temp;
2032 }
2033
2034 /* For calls to `setjmp', etc., inform flow.c it should complain
2035 if nonvolatile values are live. */
2036
2037 if (returns_twice)
2038 {
2039 emit_note (name, NOTE_INSN_SETJMP);
2040 current_function_calls_setjmp = 1;
2041 }
2042
2043 if (is_longjmp)
2044 current_function_calls_longjmp = 1;
2045
2046 /* Notice functions that cannot return.
2047 If optimizing, insns emitted below will be dead.
2048 If not optimizing, they will exist, which is useful
2049 if the user uses the `return' command in the debugger. */
2050
2051 if (is_volatile || is_longjmp)
2052 emit_barrier ();
2053
2054 /* If value type not void, return an rtx for the value. */
2055
2056 /* If there are cleanups to be called, don't use a hard reg as target.
2057 We need to double check this and see if it matters anymore. */
2058 if (any_pending_cleanups (1)
2059 && target && REG_P (target)
2060 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2061 target = 0;
2062
2063 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2064 || ignore)
2065 {
2066 target = const0_rtx;
2067 }
2068 else if (structure_value_addr)
2069 {
2070 if (target == 0 || GET_CODE (target) != MEM)
2071 {
2072 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2073 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2074 structure_value_addr));
2075 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
2076 }
2077 }
2078 else if (pcc_struct_value)
2079 {
2080 /* This is the special C++ case where we need to
2081 know what the true target was. We take care to
2082 never use this value more than once in one expression. */
2083 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2084 copy_to_reg (valreg));
2085 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
2086 }
2087 /* Handle calls that return values in multiple non-contiguous locations.
2088 The Irix 6 ABI has examples of this. */
2089 else if (GET_CODE (valreg) == PARALLEL)
2090 {
2091 if (target == 0)
2092 {
2093 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2094 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)), bytes, 0);
2095 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
2096 preserve_temp_slots (target);
2097 }
2098
2099 emit_group_store (target, valreg);
2100 }
2101 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2102 && GET_MODE (target) == GET_MODE (valreg))
2103 /* TARGET and VALREG cannot be equal at this point because the latter
2104 would not have REG_FUNCTION_VALUE_P true, while the former would if
2105 it were referring to the same register.
2106
2107 If they refer to the same register, this move will be a no-op, except
2108 when function inlining is being done. */
2109 emit_move_insn (target, valreg);
2110 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2111 {
2112 /* Some machines (the PA for example) want to return all small
2113 structures in registers regardless of the structure's alignment.
2114
2115 Deal with them explicitly by copying from the return registers
2116 into the target MEM locations. */
2117 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2118 rtx src, dst;
2119 int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (exp)), BITS_PER_WORD);
2120 int bitpos, xbitpos, big_endian_correction = 0;
2121
2122 if (target == 0)
2123 {
2124 target = assign_stack_temp (BLKmode, bytes, 0);
2125 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
2126 preserve_temp_slots (target);
2127 }
2128
2129 /* This code assumes valreg is at least a full word. If it isn't,
2130 copy it into a new pseudo which is a full word. */
2131 if (GET_MODE (valreg) != BLKmode
2132 && GET_MODE_SIZE (GET_MODE (valreg)) < UNITS_PER_WORD)
2133 valreg = convert_to_mode (word_mode, valreg,
2134 TREE_UNSIGNED (TREE_TYPE (exp)));
2135
2136 /* Structures whose size is not a multiple of a word are aligned
2137 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2138 machine, this means we must skip the empty high order bytes when
2139 calculating the bit offset. */
2140 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2141 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2142 * BITS_PER_UNIT));
2143
2144 /* Copy the structure BITSIZE bites at a time.
2145
2146 We could probably emit more efficient code for machines
2147 which do not use strict alignment, but it doesn't seem
2148 worth the effort at the current time. */
2149 for (bitpos = 0, xbitpos = big_endian_correction;
2150 bitpos < bytes * BITS_PER_UNIT;
2151 bitpos += bitsize, xbitpos += bitsize)
2152 {
2153
2154 /* We need a new source operand each time xbitpos is on a
2155 word boundary and when xbitpos == big_endian_correction
2156 (the first time through). */
2157 if (xbitpos % BITS_PER_WORD == 0
2158 || xbitpos == big_endian_correction)
2159 src = operand_subword_force (valreg,
2160 xbitpos / BITS_PER_WORD,
2161 BLKmode);
2162
2163 /* We need a new destination operand each time bitpos is on
2164 a word boundary. */
2165 if (bitpos % BITS_PER_WORD == 0)
2166 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, BLKmode);
2167
2168 /* Use xbitpos for the source extraction (right justified) and
2169 xbitpos for the destination store (left justified). */
2170 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2171 extract_bit_field (src, bitsize,
2172 xbitpos % BITS_PER_WORD, 1,
2173 NULL_RTX, word_mode,
2174 word_mode,
2175 bitsize / BITS_PER_UNIT,
2176 BITS_PER_WORD),
2177 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2178 }
2179 }
2180 else
2181 target = copy_to_reg (valreg);
2182
2183 #ifdef PROMOTE_FUNCTION_RETURN
2184 /* If we promoted this return value, make the proper SUBREG. TARGET
2185 might be const0_rtx here, so be careful. */
2186 if (GET_CODE (target) == REG
2187 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2188 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2189 {
2190 tree type = TREE_TYPE (exp);
2191 int unsignedp = TREE_UNSIGNED (type);
2192
2193 /* If we don't promote as expected, something is wrong. */
2194 if (GET_MODE (target)
2195 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
2196 abort ();
2197
2198 target = gen_rtx_SUBREG (TYPE_MODE (type), target, 0);
2199 SUBREG_PROMOTED_VAR_P (target) = 1;
2200 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
2201 }
2202 #endif
2203
2204 /* If size of args is variable or this was a constructor call for a stack
2205 argument, restore saved stack-pointer value. */
2206
2207 if (old_stack_level)
2208 {
2209 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2210 pending_stack_adjust = old_pending_adj;
2211 #ifdef ACCUMULATE_OUTGOING_ARGS
2212 stack_arg_under_construction = old_stack_arg_under_construction;
2213 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2214 stack_usage_map = initial_stack_usage_map;
2215 #endif
2216 }
2217 #ifdef ACCUMULATE_OUTGOING_ARGS
2218 else
2219 {
2220 #ifdef REG_PARM_STACK_SPACE
2221 if (save_area)
2222 {
2223 enum machine_mode save_mode = GET_MODE (save_area);
2224 #ifdef ARGS_GROW_DOWNWARD
2225 rtx stack_area
2226 = gen_rtx_MEM (save_mode,
2227 memory_address (save_mode,
2228 plus_constant (argblock,
2229 - high_to_save)));
2230 #else
2231 rtx stack_area
2232 = gen_rtx_MEM (save_mode,
2233 memory_address (save_mode,
2234 plus_constant (argblock,
2235 low_to_save)));
2236 #endif
2237
2238 if (save_mode != BLKmode)
2239 emit_move_insn (stack_area, save_area);
2240 else
2241 emit_block_move (stack_area, validize_mem (save_area),
2242 GEN_INT (high_to_save - low_to_save + 1),
2243 PARM_BOUNDARY / BITS_PER_UNIT);
2244 }
2245 #endif
2246
2247 /* If we saved any argument areas, restore them. */
2248 for (i = 0; i < num_actuals; i++)
2249 if (args[i].save_area)
2250 {
2251 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2252 rtx stack_area
2253 = gen_rtx_MEM (save_mode,
2254 memory_address (save_mode,
2255 XEXP (args[i].stack_slot, 0)));
2256
2257 if (save_mode != BLKmode)
2258 emit_move_insn (stack_area, args[i].save_area);
2259 else
2260 emit_block_move (stack_area, validize_mem (args[i].save_area),
2261 GEN_INT (args[i].size.constant),
2262 PARM_BOUNDARY / BITS_PER_UNIT);
2263 }
2264
2265 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2266 stack_usage_map = initial_stack_usage_map;
2267 }
2268 #endif
2269
2270 /* If this was alloca, record the new stack level for nonlocal gotos.
2271 Check for the handler slots since we might not have a save area
2272 for non-local gotos. */
2273
2274 if (may_be_alloca && nonlocal_goto_handler_slot != 0)
2275 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
2276
2277 pop_temp_slots ();
2278
2279 return target;
2280 }
2281 \f
2282 /* Output a library call to function FUN (a SYMBOL_REF rtx)
2283 (emitting the queue unless NO_QUEUE is nonzero),
2284 for a value of mode OUTMODE,
2285 with NARGS different arguments, passed as alternating rtx values
2286 and machine_modes to convert them to.
2287 The rtx values should have been passed through protect_from_queue already.
2288
2289 NO_QUEUE will be true if and only if the library call is a `const' call
2290 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2291 to the variable is_const in expand_call.
2292
2293 NO_QUEUE must be true for const calls, because if it isn't, then
2294 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2295 and will be lost if the libcall sequence is optimized away.
2296
2297 NO_QUEUE must be false for non-const calls, because if it isn't, the
2298 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2299 optimized. For instance, the instruction scheduler may incorrectly
2300 move memory references across the non-const call. */
2301
2302 void
2303 emit_library_call VPROTO((rtx orgfun, int no_queue, enum machine_mode outmode,
2304 int nargs, ...))
2305 {
2306 #ifndef __STDC__
2307 rtx orgfun;
2308 int no_queue;
2309 enum machine_mode outmode;
2310 int nargs;
2311 #endif
2312 va_list p;
2313 /* Total size in bytes of all the stack-parms scanned so far. */
2314 struct args_size args_size;
2315 /* Size of arguments before any adjustments (such as rounding). */
2316 struct args_size original_args_size;
2317 register int argnum;
2318 rtx fun;
2319 int inc;
2320 int count;
2321 rtx argblock = 0;
2322 CUMULATIVE_ARGS args_so_far;
2323 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2324 struct args_size offset; struct args_size size; rtx save_area; };
2325 struct arg *argvec;
2326 int old_inhibit_defer_pop = inhibit_defer_pop;
2327 rtx call_fusage = 0;
2328 int reg_parm_stack_space = 0;
2329 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2330 /* Define the boundary of the register parm stack space that needs to be
2331 save, if any. */
2332 int low_to_save = -1, high_to_save;
2333 rtx save_area = 0; /* Place that it is saved */
2334 #endif
2335
2336 #ifdef ACCUMULATE_OUTGOING_ARGS
2337 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2338 char *initial_stack_usage_map = stack_usage_map;
2339 int needed;
2340 #endif
2341
2342 #ifdef REG_PARM_STACK_SPACE
2343 /* Size of the stack reserved for parameter registers. */
2344 #ifdef MAYBE_REG_PARM_STACK_SPACE
2345 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2346 #else
2347 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2348 #endif
2349 #endif
2350
2351 VA_START (p, nargs);
2352
2353 #ifndef __STDC__
2354 orgfun = va_arg (p, rtx);
2355 no_queue = va_arg (p, int);
2356 outmode = va_arg (p, enum machine_mode);
2357 nargs = va_arg (p, int);
2358 #endif
2359
2360 fun = orgfun;
2361
2362 /* Copy all the libcall-arguments out of the varargs data
2363 and into a vector ARGVEC.
2364
2365 Compute how to pass each argument. We only support a very small subset
2366 of the full argument passing conventions to limit complexity here since
2367 library functions shouldn't have many args. */
2368
2369 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2370 bzero ((char *) argvec, nargs * sizeof (struct arg));
2371
2372
2373 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
2374
2375 args_size.constant = 0;
2376 args_size.var = 0;
2377
2378 push_temp_slots ();
2379
2380 for (count = 0; count < nargs; count++)
2381 {
2382 rtx val = va_arg (p, rtx);
2383 enum machine_mode mode = va_arg (p, enum machine_mode);
2384
2385 /* We cannot convert the arg value to the mode the library wants here;
2386 must do it earlier where we know the signedness of the arg. */
2387 if (mode == BLKmode
2388 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2389 abort ();
2390
2391 /* On some machines, there's no way to pass a float to a library fcn.
2392 Pass it as a double instead. */
2393 #ifdef LIBGCC_NEEDS_DOUBLE
2394 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2395 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
2396 #endif
2397
2398 /* There's no need to call protect_from_queue, because
2399 either emit_move_insn or emit_push_insn will do that. */
2400
2401 /* Make sure it is a reasonable operand for a move or push insn. */
2402 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2403 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2404 val = force_operand (val, NULL_RTX);
2405
2406 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2407 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2408 {
2409 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2410 be viewed as just an efficiency improvement. */
2411 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2412 emit_move_insn (slot, val);
2413 val = force_operand (XEXP (slot, 0), NULL_RTX);
2414 mode = Pmode;
2415 }
2416 #endif
2417
2418 argvec[count].value = val;
2419 argvec[count].mode = mode;
2420
2421 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2422 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
2423 abort ();
2424 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2425 argvec[count].partial
2426 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2427 #else
2428 argvec[count].partial = 0;
2429 #endif
2430
2431 locate_and_pad_parm (mode, NULL_TREE,
2432 argvec[count].reg && argvec[count].partial == 0,
2433 NULL_TREE, &args_size, &argvec[count].offset,
2434 &argvec[count].size);
2435
2436 if (argvec[count].size.var)
2437 abort ();
2438
2439 if (reg_parm_stack_space == 0 && argvec[count].partial)
2440 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2441
2442 if (argvec[count].reg == 0 || argvec[count].partial != 0
2443 || reg_parm_stack_space > 0)
2444 args_size.constant += argvec[count].size.constant;
2445
2446 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
2447 }
2448 va_end (p);
2449
2450 #ifdef FINAL_REG_PARM_STACK_SPACE
2451 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2452 args_size.var);
2453 #endif
2454
2455 /* If this machine requires an external definition for library
2456 functions, write one out. */
2457 assemble_external_libcall (fun);
2458
2459 original_args_size = args_size;
2460 #ifdef STACK_BOUNDARY
2461 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2462 / STACK_BYTES) * STACK_BYTES);
2463 #endif
2464
2465 args_size.constant = MAX (args_size.constant,
2466 reg_parm_stack_space);
2467
2468 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2469 args_size.constant -= reg_parm_stack_space;
2470 #endif
2471
2472 if (args_size.constant > current_function_outgoing_args_size)
2473 current_function_outgoing_args_size = args_size.constant;
2474
2475 #ifdef ACCUMULATE_OUTGOING_ARGS
2476 /* Since the stack pointer will never be pushed, it is possible for
2477 the evaluation of a parm to clobber something we have already
2478 written to the stack. Since most function calls on RISC machines
2479 do not use the stack, this is uncommon, but must work correctly.
2480
2481 Therefore, we save any area of the stack that was already written
2482 and that we are using. Here we set up to do this by making a new
2483 stack usage map from the old one.
2484
2485 Another approach might be to try to reorder the argument
2486 evaluations to avoid this conflicting stack usage. */
2487
2488 needed = args_size.constant;
2489
2490 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2491 /* Since we will be writing into the entire argument area, the
2492 map must be allocated for its entire size, not just the part that
2493 is the responsibility of the caller. */
2494 needed += reg_parm_stack_space;
2495 #endif
2496
2497 #ifdef ARGS_GROW_DOWNWARD
2498 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2499 needed + 1);
2500 #else
2501 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2502 needed);
2503 #endif
2504 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2505
2506 if (initial_highest_arg_in_use)
2507 bcopy (initial_stack_usage_map, stack_usage_map,
2508 initial_highest_arg_in_use);
2509
2510 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2511 bzero (&stack_usage_map[initial_highest_arg_in_use],
2512 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2513 needed = 0;
2514
2515 /* The address of the outgoing argument list must not be copied to a
2516 register here, because argblock would be left pointing to the
2517 wrong place after the call to allocate_dynamic_stack_space below.
2518 */
2519
2520 argblock = virtual_outgoing_args_rtx;
2521 #else /* not ACCUMULATE_OUTGOING_ARGS */
2522 #ifndef PUSH_ROUNDING
2523 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2524 #endif
2525 #endif
2526
2527 #ifdef PUSH_ARGS_REVERSED
2528 #ifdef STACK_BOUNDARY
2529 /* If we push args individually in reverse order, perform stack alignment
2530 before the first push (the last arg). */
2531 if (argblock == 0)
2532 anti_adjust_stack (GEN_INT (args_size.constant
2533 - original_args_size.constant));
2534 #endif
2535 #endif
2536
2537 #ifdef PUSH_ARGS_REVERSED
2538 inc = -1;
2539 argnum = nargs - 1;
2540 #else
2541 inc = 1;
2542 argnum = 0;
2543 #endif
2544
2545 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2546 /* The argument list is the property of the called routine and it
2547 may clobber it. If the fixed area has been used for previous
2548 parameters, we must save and restore it.
2549
2550 Here we compute the boundary of the that needs to be saved, if any. */
2551
2552 #ifdef ARGS_GROW_DOWNWARD
2553 for (count = 0; count < reg_parm_stack_space + 1; count++)
2554 #else
2555 for (count = 0; count < reg_parm_stack_space; count++)
2556 #endif
2557 {
2558 if (count >= highest_outgoing_arg_in_use
2559 || stack_usage_map[count] == 0)
2560 continue;
2561
2562 if (low_to_save == -1)
2563 low_to_save = count;
2564
2565 high_to_save = count;
2566 }
2567
2568 if (low_to_save >= 0)
2569 {
2570 int num_to_save = high_to_save - low_to_save + 1;
2571 enum machine_mode save_mode
2572 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
2573 rtx stack_area;
2574
2575 /* If we don't have the required alignment, must do this in BLKmode. */
2576 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
2577 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
2578 save_mode = BLKmode;
2579
2580 #ifdef ARGS_GROW_DOWNWARD
2581 stack_area = gen_rtx_MEM (save_mode,
2582 memory_address (save_mode,
2583 plus_constant (argblock,
2584 - high_to_save)));
2585 #else
2586 stack_area = gen_rtx_MEM (save_mode,
2587 memory_address (save_mode,
2588 plus_constant (argblock,
2589 low_to_save)));
2590 #endif
2591 if (save_mode == BLKmode)
2592 {
2593 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
2594 MEM_IN_STRUCT_P (save_area) = 0;
2595 emit_block_move (validize_mem (save_area), stack_area,
2596 GEN_INT (num_to_save),
2597 PARM_BOUNDARY / BITS_PER_UNIT);
2598 }
2599 else
2600 {
2601 save_area = gen_reg_rtx (save_mode);
2602 emit_move_insn (save_area, stack_area);
2603 }
2604 }
2605 #endif
2606
2607 /* Push the args that need to be pushed. */
2608
2609 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2610 are to be pushed. */
2611 for (count = 0; count < nargs; count++, argnum += inc)
2612 {
2613 register enum machine_mode mode = argvec[argnum].mode;
2614 register rtx val = argvec[argnum].value;
2615 rtx reg = argvec[argnum].reg;
2616 int partial = argvec[argnum].partial;
2617 #ifdef ACCUMULATE_OUTGOING_ARGS
2618 int lower_bound, upper_bound, i;
2619 #endif
2620
2621 if (! (reg != 0 && partial == 0))
2622 {
2623 #ifdef ACCUMULATE_OUTGOING_ARGS
2624 /* If this is being stored into a pre-allocated, fixed-size, stack
2625 area, save any previous data at that location. */
2626
2627 #ifdef ARGS_GROW_DOWNWARD
2628 /* stack_slot is negative, but we want to index stack_usage_map
2629 with positive values. */
2630 upper_bound = -argvec[argnum].offset.constant + 1;
2631 lower_bound = upper_bound - argvec[argnum].size.constant;
2632 #else
2633 lower_bound = argvec[argnum].offset.constant;
2634 upper_bound = lower_bound + argvec[argnum].size.constant;
2635 #endif
2636
2637 for (i = lower_bound; i < upper_bound; i++)
2638 if (stack_usage_map[i]
2639 /* Don't store things in the fixed argument area at this point;
2640 it has already been saved. */
2641 && i > reg_parm_stack_space)
2642 break;
2643
2644 if (i != upper_bound)
2645 {
2646 /* We need to make a save area. See what mode we can make it. */
2647 enum machine_mode save_mode
2648 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
2649 MODE_INT, 1);
2650 rtx stack_area
2651 = gen_rtx_MEM (save_mode,
2652 memory_address (save_mode,
2653 plus_constant (argblock, argvec[argnum].offset.constant)));
2654 argvec[argnum].save_area = gen_reg_rtx (save_mode);
2655 emit_move_insn (argvec[argnum].save_area, stack_area);
2656 }
2657 #endif
2658 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2659 argblock, GEN_INT (argvec[argnum].offset.constant),
2660 reg_parm_stack_space);
2661
2662 #ifdef ACCUMULATE_OUTGOING_ARGS
2663 /* Now mark the segment we just used. */
2664 for (i = lower_bound; i < upper_bound; i++)
2665 stack_usage_map[i] = 1;
2666 #endif
2667
2668 NO_DEFER_POP;
2669 }
2670 }
2671
2672 #ifndef PUSH_ARGS_REVERSED
2673 #ifdef STACK_BOUNDARY
2674 /* If we pushed args in forward order, perform stack alignment
2675 after pushing the last arg. */
2676 if (argblock == 0)
2677 anti_adjust_stack (GEN_INT (args_size.constant
2678 - original_args_size.constant));
2679 #endif
2680 #endif
2681
2682 #ifdef PUSH_ARGS_REVERSED
2683 argnum = nargs - 1;
2684 #else
2685 argnum = 0;
2686 #endif
2687
2688 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
2689
2690 /* Now load any reg parms into their regs. */
2691
2692 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2693 are to be pushed. */
2694 for (count = 0; count < nargs; count++, argnum += inc)
2695 {
2696 register rtx val = argvec[argnum].value;
2697 rtx reg = argvec[argnum].reg;
2698 int partial = argvec[argnum].partial;
2699
2700 if (reg != 0 && partial == 0)
2701 emit_move_insn (reg, val);
2702 NO_DEFER_POP;
2703 }
2704
2705 /* For version 1.37, try deleting this entirely. */
2706 if (! no_queue)
2707 emit_queue ();
2708
2709 /* Any regs containing parms remain in use through the call. */
2710 for (count = 0; count < nargs; count++)
2711 if (argvec[count].reg != 0)
2712 use_reg (&call_fusage, argvec[count].reg);
2713
2714 /* Don't allow popping to be deferred, since then
2715 cse'ing of library calls could delete a call and leave the pop. */
2716 NO_DEFER_POP;
2717
2718 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2719 will set inhibit_defer_pop to that value. */
2720
2721 /* The return type is needed to decide how many bytes the function pops.
2722 Signedness plays no role in that, so for simplicity, we pretend it's
2723 always signed. We also assume that the list of arguments passed has
2724 no impact, so we pretend it is unknown. */
2725
2726 emit_call_1 (fun,
2727 get_identifier (XSTR (orgfun, 0)),
2728 build_function_type (outmode == VOIDmode ? void_type_node
2729 : type_for_mode (outmode, 0), NULL_TREE),
2730 args_size.constant, 0,
2731 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2732 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2733 old_inhibit_defer_pop + 1, call_fusage, no_queue);
2734
2735 pop_temp_slots ();
2736
2737 /* Now restore inhibit_defer_pop to its actual original value. */
2738 OK_DEFER_POP;
2739
2740 #ifdef ACCUMULATE_OUTGOING_ARGS
2741 #ifdef REG_PARM_STACK_SPACE
2742 if (save_area)
2743 {
2744 enum machine_mode save_mode = GET_MODE (save_area);
2745 #ifdef ARGS_GROW_DOWNWARD
2746 rtx stack_area
2747 = gen_rtx_MEM (save_mode,
2748 memory_address (save_mode,
2749 plus_constant (argblock,
2750 - high_to_save)));
2751 #else
2752 rtx stack_area
2753 = gen_rtx_MEM (save_mode,
2754 memory_address (save_mode,
2755 plus_constant (argblock, low_to_save)));
2756 #endif
2757
2758 if (save_mode != BLKmode)
2759 emit_move_insn (stack_area, save_area);
2760 else
2761 emit_block_move (stack_area, validize_mem (save_area),
2762 GEN_INT (high_to_save - low_to_save + 1),
2763 PARM_BOUNDARY / BITS_PER_UNIT);
2764 }
2765 #endif
2766
2767 /* If we saved any argument areas, restore them. */
2768 for (count = 0; count < nargs; count++)
2769 if (argvec[count].save_area)
2770 {
2771 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
2772 rtx stack_area
2773 = gen_rtx_MEM (save_mode,
2774 memory_address (save_mode,
2775 plus_constant (argblock, argvec[count].offset.constant)));
2776
2777 emit_move_insn (stack_area, argvec[count].save_area);
2778 }
2779
2780 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2781 stack_usage_map = initial_stack_usage_map;
2782 #endif
2783 }
2784 \f
2785 /* Like emit_library_call except that an extra argument, VALUE,
2786 comes second and says where to store the result.
2787 (If VALUE is zero, this function chooses a convenient way
2788 to return the value.
2789
2790 This function returns an rtx for where the value is to be found.
2791 If VALUE is nonzero, VALUE is returned. */
2792
2793 rtx
2794 emit_library_call_value VPROTO((rtx orgfun, rtx value, int no_queue,
2795 enum machine_mode outmode, int nargs, ...))
2796 {
2797 #ifndef __STDC__
2798 rtx orgfun;
2799 rtx value;
2800 int no_queue;
2801 enum machine_mode outmode;
2802 int nargs;
2803 #endif
2804 va_list p;
2805 /* Total size in bytes of all the stack-parms scanned so far. */
2806 struct args_size args_size;
2807 /* Size of arguments before any adjustments (such as rounding). */
2808 struct args_size original_args_size;
2809 register int argnum;
2810 rtx fun;
2811 int inc;
2812 int count;
2813 rtx argblock = 0;
2814 CUMULATIVE_ARGS args_so_far;
2815 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2816 struct args_size offset; struct args_size size; rtx save_area; };
2817 struct arg *argvec;
2818 int old_inhibit_defer_pop = inhibit_defer_pop;
2819 rtx call_fusage = 0;
2820 rtx mem_value = 0;
2821 int pcc_struct_value = 0;
2822 int struct_value_size = 0;
2823 int is_const;
2824 int reg_parm_stack_space = 0;
2825 #ifdef ACCUMULATE_OUTGOING_ARGS
2826 int needed;
2827 #endif
2828
2829 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2830 /* Define the boundary of the register parm stack space that needs to be
2831 save, if any. */
2832 int low_to_save = -1, high_to_save;
2833 rtx save_area = 0; /* Place that it is saved */
2834 #endif
2835
2836 #ifdef ACCUMULATE_OUTGOING_ARGS
2837 /* Size of the stack reserved for parameter registers. */
2838 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2839 char *initial_stack_usage_map = stack_usage_map;
2840 #endif
2841
2842 #ifdef REG_PARM_STACK_SPACE
2843 #ifdef MAYBE_REG_PARM_STACK_SPACE
2844 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2845 #else
2846 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2847 #endif
2848 #endif
2849
2850 VA_START (p, nargs);
2851
2852 #ifndef __STDC__
2853 orgfun = va_arg (p, rtx);
2854 value = va_arg (p, rtx);
2855 no_queue = va_arg (p, int);
2856 outmode = va_arg (p, enum machine_mode);
2857 nargs = va_arg (p, int);
2858 #endif
2859
2860 is_const = no_queue;
2861 fun = orgfun;
2862
2863 /* If this kind of value comes back in memory,
2864 decide where in memory it should come back. */
2865 if (aggregate_value_p (type_for_mode (outmode, 0)))
2866 {
2867 #ifdef PCC_STATIC_STRUCT_RETURN
2868 rtx pointer_reg
2869 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
2870 0);
2871 mem_value = gen_rtx_MEM (outmode, pointer_reg);
2872 pcc_struct_value = 1;
2873 if (value == 0)
2874 value = gen_reg_rtx (outmode);
2875 #else /* not PCC_STATIC_STRUCT_RETURN */
2876 struct_value_size = GET_MODE_SIZE (outmode);
2877 if (value != 0 && GET_CODE (value) == MEM)
2878 mem_value = value;
2879 else
2880 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
2881 #endif
2882
2883 /* This call returns a big structure. */
2884 is_const = 0;
2885 }
2886
2887 /* ??? Unfinished: must pass the memory address as an argument. */
2888
2889 /* Copy all the libcall-arguments out of the varargs data
2890 and into a vector ARGVEC.
2891
2892 Compute how to pass each argument. We only support a very small subset
2893 of the full argument passing conventions to limit complexity here since
2894 library functions shouldn't have many args. */
2895
2896 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
2897 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
2898
2899 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
2900
2901 args_size.constant = 0;
2902 args_size.var = 0;
2903
2904 count = 0;
2905
2906 push_temp_slots ();
2907
2908 /* If there's a structure value address to be passed,
2909 either pass it in the special place, or pass it as an extra argument. */
2910 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
2911 {
2912 rtx addr = XEXP (mem_value, 0);
2913 nargs++;
2914
2915 /* Make sure it is a reasonable operand for a move or push insn. */
2916 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
2917 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
2918 addr = force_operand (addr, NULL_RTX);
2919
2920 argvec[count].value = addr;
2921 argvec[count].mode = Pmode;
2922 argvec[count].partial = 0;
2923
2924 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
2925 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2926 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
2927 abort ();
2928 #endif
2929
2930 locate_and_pad_parm (Pmode, NULL_TREE,
2931 argvec[count].reg && argvec[count].partial == 0,
2932 NULL_TREE, &args_size, &argvec[count].offset,
2933 &argvec[count].size);
2934
2935
2936 if (argvec[count].reg == 0 || argvec[count].partial != 0
2937 || reg_parm_stack_space > 0)
2938 args_size.constant += argvec[count].size.constant;
2939
2940 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
2941
2942 count++;
2943 }
2944
2945 for (; count < nargs; count++)
2946 {
2947 rtx val = va_arg (p, rtx);
2948 enum machine_mode mode = va_arg (p, enum machine_mode);
2949
2950 /* We cannot convert the arg value to the mode the library wants here;
2951 must do it earlier where we know the signedness of the arg. */
2952 if (mode == BLKmode
2953 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2954 abort ();
2955
2956 /* On some machines, there's no way to pass a float to a library fcn.
2957 Pass it as a double instead. */
2958 #ifdef LIBGCC_NEEDS_DOUBLE
2959 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2960 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
2961 #endif
2962
2963 /* There's no need to call protect_from_queue, because
2964 either emit_move_insn or emit_push_insn will do that. */
2965
2966 /* Make sure it is a reasonable operand for a move or push insn. */
2967 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2968 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2969 val = force_operand (val, NULL_RTX);
2970
2971 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2972 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2973 {
2974 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2975 be viewed as just an efficiency improvement. */
2976 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2977 emit_move_insn (slot, val);
2978 val = XEXP (slot, 0);
2979 mode = Pmode;
2980 }
2981 #endif
2982
2983 argvec[count].value = val;
2984 argvec[count].mode = mode;
2985
2986 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2987 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
2988 abort ();
2989 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2990 argvec[count].partial
2991 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2992 #else
2993 argvec[count].partial = 0;
2994 #endif
2995
2996 locate_and_pad_parm (mode, NULL_TREE,
2997 argvec[count].reg && argvec[count].partial == 0,
2998 NULL_TREE, &args_size, &argvec[count].offset,
2999 &argvec[count].size);
3000
3001 if (argvec[count].size.var)
3002 abort ();
3003
3004 if (reg_parm_stack_space == 0 && argvec[count].partial)
3005 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
3006
3007 if (argvec[count].reg == 0 || argvec[count].partial != 0
3008 || reg_parm_stack_space > 0)
3009 args_size.constant += argvec[count].size.constant;
3010
3011 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3012 }
3013 va_end (p);
3014
3015 #ifdef FINAL_REG_PARM_STACK_SPACE
3016 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3017 args_size.var);
3018 #endif
3019 /* If this machine requires an external definition for library
3020 functions, write one out. */
3021 assemble_external_libcall (fun);
3022
3023 original_args_size = args_size;
3024 #ifdef STACK_BOUNDARY
3025 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
3026 / STACK_BYTES) * STACK_BYTES);
3027 #endif
3028
3029 args_size.constant = MAX (args_size.constant,
3030 reg_parm_stack_space);
3031
3032 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3033 args_size.constant -= reg_parm_stack_space;
3034 #endif
3035
3036 if (args_size.constant > current_function_outgoing_args_size)
3037 current_function_outgoing_args_size = args_size.constant;
3038
3039 #ifdef ACCUMULATE_OUTGOING_ARGS
3040 /* Since the stack pointer will never be pushed, it is possible for
3041 the evaluation of a parm to clobber something we have already
3042 written to the stack. Since most function calls on RISC machines
3043 do not use the stack, this is uncommon, but must work correctly.
3044
3045 Therefore, we save any area of the stack that was already written
3046 and that we are using. Here we set up to do this by making a new
3047 stack usage map from the old one.
3048
3049 Another approach might be to try to reorder the argument
3050 evaluations to avoid this conflicting stack usage. */
3051
3052 needed = args_size.constant;
3053
3054 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3055 /* Since we will be writing into the entire argument area, the
3056 map must be allocated for its entire size, not just the part that
3057 is the responsibility of the caller. */
3058 needed += reg_parm_stack_space;
3059 #endif
3060
3061 #ifdef ARGS_GROW_DOWNWARD
3062 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3063 needed + 1);
3064 #else
3065 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3066 needed);
3067 #endif
3068 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3069
3070 if (initial_highest_arg_in_use)
3071 bcopy (initial_stack_usage_map, stack_usage_map,
3072 initial_highest_arg_in_use);
3073
3074 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3075 bzero (&stack_usage_map[initial_highest_arg_in_use],
3076 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3077 needed = 0;
3078
3079 /* The address of the outgoing argument list must not be copied to a
3080 register here, because argblock would be left pointing to the
3081 wrong place after the call to allocate_dynamic_stack_space below.
3082 */
3083
3084 argblock = virtual_outgoing_args_rtx;
3085 #else /* not ACCUMULATE_OUTGOING_ARGS */
3086 #ifndef PUSH_ROUNDING
3087 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3088 #endif
3089 #endif
3090
3091 #ifdef PUSH_ARGS_REVERSED
3092 #ifdef STACK_BOUNDARY
3093 /* If we push args individually in reverse order, perform stack alignment
3094 before the first push (the last arg). */
3095 if (argblock == 0)
3096 anti_adjust_stack (GEN_INT (args_size.constant
3097 - original_args_size.constant));
3098 #endif
3099 #endif
3100
3101 #ifdef PUSH_ARGS_REVERSED
3102 inc = -1;
3103 argnum = nargs - 1;
3104 #else
3105 inc = 1;
3106 argnum = 0;
3107 #endif
3108
3109 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3110 /* The argument list is the property of the called routine and it
3111 may clobber it. If the fixed area has been used for previous
3112 parameters, we must save and restore it.
3113
3114 Here we compute the boundary of the that needs to be saved, if any. */
3115
3116 #ifdef ARGS_GROW_DOWNWARD
3117 for (count = 0; count < reg_parm_stack_space + 1; count++)
3118 #else
3119 for (count = 0; count < reg_parm_stack_space; count++)
3120 #endif
3121 {
3122 if (count >= highest_outgoing_arg_in_use
3123 || stack_usage_map[count] == 0)
3124 continue;
3125
3126 if (low_to_save == -1)
3127 low_to_save = count;
3128
3129 high_to_save = count;
3130 }
3131
3132 if (low_to_save >= 0)
3133 {
3134 int num_to_save = high_to_save - low_to_save + 1;
3135 enum machine_mode save_mode
3136 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3137 rtx stack_area;
3138
3139 /* If we don't have the required alignment, must do this in BLKmode. */
3140 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3141 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3142 save_mode = BLKmode;
3143
3144 #ifdef ARGS_GROW_DOWNWARD
3145 stack_area = gen_rtx_MEM (save_mode,
3146 memory_address (save_mode,
3147 plus_constant (argblock,
3148 - high_to_save)));
3149 #else
3150 stack_area = gen_rtx_MEM (save_mode,
3151 memory_address (save_mode,
3152 plus_constant (argblock,
3153 low_to_save)));
3154 #endif
3155 if (save_mode == BLKmode)
3156 {
3157 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3158 MEM_IN_STRUCT_P (save_area) = 0;
3159 emit_block_move (validize_mem (save_area), stack_area,
3160 GEN_INT (num_to_save),
3161 PARM_BOUNDARY / BITS_PER_UNIT);
3162 }
3163 else
3164 {
3165 save_area = gen_reg_rtx (save_mode);
3166 emit_move_insn (save_area, stack_area);
3167 }
3168 }
3169 #endif
3170
3171 /* Push the args that need to be pushed. */
3172
3173 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3174 are to be pushed. */
3175 for (count = 0; count < nargs; count++, argnum += inc)
3176 {
3177 register enum machine_mode mode = argvec[argnum].mode;
3178 register rtx val = argvec[argnum].value;
3179 rtx reg = argvec[argnum].reg;
3180 int partial = argvec[argnum].partial;
3181 #ifdef ACCUMULATE_OUTGOING_ARGS
3182 int lower_bound, upper_bound, i;
3183 #endif
3184
3185 if (! (reg != 0 && partial == 0))
3186 {
3187 #ifdef ACCUMULATE_OUTGOING_ARGS
3188 /* If this is being stored into a pre-allocated, fixed-size, stack
3189 area, save any previous data at that location. */
3190
3191 #ifdef ARGS_GROW_DOWNWARD
3192 /* stack_slot is negative, but we want to index stack_usage_map
3193 with positive values. */
3194 upper_bound = -argvec[argnum].offset.constant + 1;
3195 lower_bound = upper_bound - argvec[argnum].size.constant;
3196 #else
3197 lower_bound = argvec[argnum].offset.constant;
3198 upper_bound = lower_bound + argvec[argnum].size.constant;
3199 #endif
3200
3201 for (i = lower_bound; i < upper_bound; i++)
3202 if (stack_usage_map[i]
3203 /* Don't store things in the fixed argument area at this point;
3204 it has already been saved. */
3205 && i > reg_parm_stack_space)
3206 break;
3207
3208 if (i != upper_bound)
3209 {
3210 /* We need to make a save area. See what mode we can make it. */
3211 enum machine_mode save_mode
3212 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
3213 MODE_INT, 1);
3214 rtx stack_area
3215 = gen_rtx_MEM (save_mode,
3216 memory_address (save_mode,
3217 plus_constant (argblock,
3218 argvec[argnum].offset.constant)));
3219 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3220 emit_move_insn (argvec[argnum].save_area, stack_area);
3221 }
3222 #endif
3223 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
3224 argblock, GEN_INT (argvec[argnum].offset.constant),
3225 reg_parm_stack_space);
3226
3227 #ifdef ACCUMULATE_OUTGOING_ARGS
3228 /* Now mark the segment we just used. */
3229 for (i = lower_bound; i < upper_bound; i++)
3230 stack_usage_map[i] = 1;
3231 #endif
3232
3233 NO_DEFER_POP;
3234 }
3235 }
3236
3237 #ifndef PUSH_ARGS_REVERSED
3238 #ifdef STACK_BOUNDARY
3239 /* If we pushed args in forward order, perform stack alignment
3240 after pushing the last arg. */
3241 if (argblock == 0)
3242 anti_adjust_stack (GEN_INT (args_size.constant
3243 - original_args_size.constant));
3244 #endif
3245 #endif
3246
3247 #ifdef PUSH_ARGS_REVERSED
3248 argnum = nargs - 1;
3249 #else
3250 argnum = 0;
3251 #endif
3252
3253 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
3254
3255 /* Now load any reg parms into their regs. */
3256
3257 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3258 are to be pushed. */
3259 for (count = 0; count < nargs; count++, argnum += inc)
3260 {
3261 register rtx val = argvec[argnum].value;
3262 rtx reg = argvec[argnum].reg;
3263 int partial = argvec[argnum].partial;
3264
3265 if (reg != 0 && partial == 0)
3266 emit_move_insn (reg, val);
3267 NO_DEFER_POP;
3268 }
3269
3270 #if 0
3271 /* For version 1.37, try deleting this entirely. */
3272 if (! no_queue)
3273 emit_queue ();
3274 #endif
3275
3276 /* Any regs containing parms remain in use through the call. */
3277 for (count = 0; count < nargs; count++)
3278 if (argvec[count].reg != 0)
3279 use_reg (&call_fusage, argvec[count].reg);
3280
3281 /* Pass the function the address in which to return a structure value. */
3282 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3283 {
3284 emit_move_insn (struct_value_rtx,
3285 force_reg (Pmode,
3286 force_operand (XEXP (mem_value, 0),
3287 NULL_RTX)));
3288 if (GET_CODE (struct_value_rtx) == REG)
3289 use_reg (&call_fusage, struct_value_rtx);
3290 }
3291
3292 /* Don't allow popping to be deferred, since then
3293 cse'ing of library calls could delete a call and leave the pop. */
3294 NO_DEFER_POP;
3295
3296 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3297 will set inhibit_defer_pop to that value. */
3298 /* See the comment in emit_library_call about the function type we build
3299 and pass here. */
3300
3301 emit_call_1 (fun,
3302 get_identifier (XSTR (orgfun, 0)),
3303 build_function_type (type_for_mode (outmode, 0), NULL_TREE),
3304 args_size.constant, struct_value_size,
3305 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3306 mem_value == 0 ? hard_libcall_value (outmode) : NULL_RTX,
3307 old_inhibit_defer_pop + 1, call_fusage, is_const);
3308
3309 /* Now restore inhibit_defer_pop to its actual original value. */
3310 OK_DEFER_POP;
3311
3312 pop_temp_slots ();
3313
3314 /* Copy the value to the right place. */
3315 if (outmode != VOIDmode)
3316 {
3317 if (mem_value)
3318 {
3319 if (value == 0)
3320 value = mem_value;
3321 if (value != mem_value)
3322 emit_move_insn (value, mem_value);
3323 }
3324 else if (value != 0)
3325 emit_move_insn (value, hard_libcall_value (outmode));
3326 else
3327 value = hard_libcall_value (outmode);
3328 }
3329
3330 #ifdef ACCUMULATE_OUTGOING_ARGS
3331 #ifdef REG_PARM_STACK_SPACE
3332 if (save_area)
3333 {
3334 enum machine_mode save_mode = GET_MODE (save_area);
3335 #ifdef ARGS_GROW_DOWNWARD
3336 rtx stack_area
3337 = gen_rtx_MEM (save_mode,
3338 memory_address (save_mode,
3339 plus_constant (argblock,
3340 - high_to_save)));
3341 #else
3342 rtx stack_area
3343 = gen_rtx_MEM (save_mode,
3344 memory_address (save_mode,
3345 plus_constant (argblock, low_to_save)));
3346 #endif
3347 if (save_mode != BLKmode)
3348 emit_move_insn (stack_area, save_area);
3349 else
3350 emit_block_move (stack_area, validize_mem (save_area),
3351 GEN_INT (high_to_save - low_to_save + 1),
3352 PARM_BOUNDARY / BITS_PER_UNIT);
3353 }
3354 #endif
3355
3356 /* If we saved any argument areas, restore them. */
3357 for (count = 0; count < nargs; count++)
3358 if (argvec[count].save_area)
3359 {
3360 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3361 rtx stack_area
3362 = gen_rtx_MEM (save_mode,
3363 memory_address (save_mode, plus_constant (argblock,
3364 argvec[count].offset.constant)));
3365
3366 emit_move_insn (stack_area, argvec[count].save_area);
3367 }
3368
3369 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3370 stack_usage_map = initial_stack_usage_map;
3371 #endif
3372
3373 return value;
3374 }
3375 \f
3376 #if 0
3377 /* Return an rtx which represents a suitable home on the stack
3378 given TYPE, the type of the argument looking for a home.
3379 This is called only for BLKmode arguments.
3380
3381 SIZE is the size needed for this target.
3382 ARGS_ADDR is the address of the bottom of the argument block for this call.
3383 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
3384 if this machine uses push insns. */
3385
3386 static rtx
3387 target_for_arg (type, size, args_addr, offset)
3388 tree type;
3389 rtx size;
3390 rtx args_addr;
3391 struct args_size offset;
3392 {
3393 rtx target;
3394 rtx offset_rtx = ARGS_SIZE_RTX (offset);
3395
3396 /* We do not call memory_address if possible,
3397 because we want to address as close to the stack
3398 as possible. For non-variable sized arguments,
3399 this will be stack-pointer relative addressing. */
3400 if (GET_CODE (offset_rtx) == CONST_INT)
3401 target = plus_constant (args_addr, INTVAL (offset_rtx));
3402 else
3403 {
3404 /* I have no idea how to guarantee that this
3405 will work in the presence of register parameters. */
3406 target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
3407 target = memory_address (QImode, target);
3408 }
3409
3410 return gen_rtx_MEM (BLKmode, target);
3411 }
3412 #endif
3413 \f
3414 /* Store a single argument for a function call
3415 into the register or memory area where it must be passed.
3416 *ARG describes the argument value and where to pass it.
3417
3418 ARGBLOCK is the address of the stack-block for all the arguments,
3419 or 0 on a machine where arguments are pushed individually.
3420
3421 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3422 so must be careful about how the stack is used.
3423
3424 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3425 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3426 that we need not worry about saving and restoring the stack.
3427
3428 FNDECL is the declaration of the function we are calling. */
3429
3430 static void
3431 store_one_arg (arg, argblock, may_be_alloca, variable_size, fndecl,
3432 reg_parm_stack_space)
3433 struct arg_data *arg;
3434 rtx argblock;
3435 int may_be_alloca;
3436 int variable_size;
3437 tree fndecl;
3438 int reg_parm_stack_space;
3439 {
3440 register tree pval = arg->tree_value;
3441 rtx reg = 0;
3442 int partial = 0;
3443 int used = 0;
3444 #ifdef ACCUMULATE_OUTGOING_ARGS
3445 int i, lower_bound, upper_bound;
3446 #endif
3447
3448 if (TREE_CODE (pval) == ERROR_MARK)
3449 return;
3450
3451 /* Push a new temporary level for any temporaries we make for
3452 this argument. */
3453 push_temp_slots ();
3454
3455 #ifdef ACCUMULATE_OUTGOING_ARGS
3456 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3457 save any previous data at that location. */
3458 if (argblock && ! variable_size && arg->stack)
3459 {
3460 #ifdef ARGS_GROW_DOWNWARD
3461 /* stack_slot is negative, but we want to index stack_usage_map
3462 with positive values. */
3463 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3464 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
3465 else
3466 upper_bound = 0;
3467
3468 lower_bound = upper_bound - arg->size.constant;
3469 #else
3470 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3471 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
3472 else
3473 lower_bound = 0;
3474
3475 upper_bound = lower_bound + arg->size.constant;
3476 #endif
3477
3478 for (i = lower_bound; i < upper_bound; i++)
3479 if (stack_usage_map[i]
3480 /* Don't store things in the fixed argument area at this point;
3481 it has already been saved. */
3482 && i > reg_parm_stack_space)
3483 break;
3484
3485 if (i != upper_bound)
3486 {
3487 /* We need to make a save area. See what mode we can make it. */
3488 enum machine_mode save_mode
3489 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
3490 rtx stack_area
3491 = gen_rtx_MEM (save_mode,
3492 memory_address (save_mode,
3493 XEXP (arg->stack_slot, 0)));
3494
3495 if (save_mode == BLKmode)
3496 {
3497 arg->save_area = assign_stack_temp (BLKmode,
3498 arg->size.constant, 0);
3499 MEM_IN_STRUCT_P (arg->save_area)
3500 = AGGREGATE_TYPE_P (TREE_TYPE (arg->tree_value));
3501 preserve_temp_slots (arg->save_area);
3502 emit_block_move (validize_mem (arg->save_area), stack_area,
3503 GEN_INT (arg->size.constant),
3504 PARM_BOUNDARY / BITS_PER_UNIT);
3505 }
3506 else
3507 {
3508 arg->save_area = gen_reg_rtx (save_mode);
3509 emit_move_insn (arg->save_area, stack_area);
3510 }
3511 }
3512 }
3513 #endif
3514
3515 /* If this isn't going to be placed on both the stack and in registers,
3516 set up the register and number of words. */
3517 if (! arg->pass_on_stack)
3518 reg = arg->reg, partial = arg->partial;
3519
3520 if (reg != 0 && partial == 0)
3521 /* Being passed entirely in a register. We shouldn't be called in
3522 this case. */
3523 abort ();
3524
3525 /* If this arg needs special alignment, don't load the registers
3526 here. */
3527 if (arg->n_aligned_regs != 0)
3528 reg = 0;
3529
3530 /* If this is being passed partially in a register, we can't evaluate
3531 it directly into its stack slot. Otherwise, we can. */
3532 if (arg->value == 0)
3533 {
3534 #ifdef ACCUMULATE_OUTGOING_ARGS
3535 /* stack_arg_under_construction is nonzero if a function argument is
3536 being evaluated directly into the outgoing argument list and
3537 expand_call must take special action to preserve the argument list
3538 if it is called recursively.
3539
3540 For scalar function arguments stack_usage_map is sufficient to
3541 determine which stack slots must be saved and restored. Scalar
3542 arguments in general have pass_on_stack == 0.
3543
3544 If this argument is initialized by a function which takes the
3545 address of the argument (a C++ constructor or a C function
3546 returning a BLKmode structure), then stack_usage_map is
3547 insufficient and expand_call must push the stack around the
3548 function call. Such arguments have pass_on_stack == 1.
3549
3550 Note that it is always safe to set stack_arg_under_construction,
3551 but this generates suboptimal code if set when not needed. */
3552
3553 if (arg->pass_on_stack)
3554 stack_arg_under_construction++;
3555 #endif
3556 arg->value = expand_expr (pval,
3557 (partial
3558 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
3559 ? NULL_RTX : arg->stack,
3560 VOIDmode, 0);
3561
3562 /* If we are promoting object (or for any other reason) the mode
3563 doesn't agree, convert the mode. */
3564
3565 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
3566 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
3567 arg->value, arg->unsignedp);
3568
3569 #ifdef ACCUMULATE_OUTGOING_ARGS
3570 if (arg->pass_on_stack)
3571 stack_arg_under_construction--;
3572 #endif
3573 }
3574
3575 /* Don't allow anything left on stack from computation
3576 of argument to alloca. */
3577 if (may_be_alloca)
3578 do_pending_stack_adjust ();
3579
3580 if (arg->value == arg->stack)
3581 {
3582 /* If the value is already in the stack slot, we are done. */
3583 if (flag_check_memory_usage && GET_CODE (arg->stack) == MEM)
3584 {
3585 if (arg->mode == BLKmode)
3586 abort ();
3587
3588 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3589 XEXP (arg->stack, 0), ptr_mode,
3590 GEN_INT (GET_MODE_SIZE (arg->mode)),
3591 TYPE_MODE (sizetype),
3592 GEN_INT (MEMORY_USE_RW),
3593 TYPE_MODE (integer_type_node));
3594 }
3595 }
3596 else if (arg->mode != BLKmode)
3597 {
3598 register int size;
3599
3600 /* Argument is a scalar, not entirely passed in registers.
3601 (If part is passed in registers, arg->partial says how much
3602 and emit_push_insn will take care of putting it there.)
3603
3604 Push it, and if its size is less than the
3605 amount of space allocated to it,
3606 also bump stack pointer by the additional space.
3607 Note that in C the default argument promotions
3608 will prevent such mismatches. */
3609
3610 size = GET_MODE_SIZE (arg->mode);
3611 /* Compute how much space the push instruction will push.
3612 On many machines, pushing a byte will advance the stack
3613 pointer by a halfword. */
3614 #ifdef PUSH_ROUNDING
3615 size = PUSH_ROUNDING (size);
3616 #endif
3617 used = size;
3618
3619 /* Compute how much space the argument should get:
3620 round up to a multiple of the alignment for arguments. */
3621 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
3622 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
3623 / (PARM_BOUNDARY / BITS_PER_UNIT))
3624 * (PARM_BOUNDARY / BITS_PER_UNIT));
3625
3626 /* This isn't already where we want it on the stack, so put it there.
3627 This can either be done with push or copy insns. */
3628 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 0,
3629 partial, reg, used - size, argblock,
3630 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space);
3631 }
3632 else
3633 {
3634 /* BLKmode, at least partly to be pushed. */
3635
3636 register int excess;
3637 rtx size_rtx;
3638
3639 /* Pushing a nonscalar.
3640 If part is passed in registers, PARTIAL says how much
3641 and emit_push_insn will take care of putting it there. */
3642
3643 /* Round its size up to a multiple
3644 of the allocation unit for arguments. */
3645
3646 if (arg->size.var != 0)
3647 {
3648 excess = 0;
3649 size_rtx = ARGS_SIZE_RTX (arg->size);
3650 }
3651 else
3652 {
3653 /* PUSH_ROUNDING has no effect on us, because
3654 emit_push_insn for BLKmode is careful to avoid it. */
3655 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
3656 + partial * UNITS_PER_WORD);
3657 size_rtx = expr_size (pval);
3658 }
3659
3660 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
3661 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
3662 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset),
3663 reg_parm_stack_space);
3664 }
3665
3666
3667 /* Unless this is a partially-in-register argument, the argument is now
3668 in the stack.
3669
3670 ??? Note that this can change arg->value from arg->stack to
3671 arg->stack_slot and it matters when they are not the same.
3672 It isn't totally clear that this is correct in all cases. */
3673 if (partial == 0)
3674 arg->value = arg->stack_slot;
3675
3676 /* Once we have pushed something, pops can't safely
3677 be deferred during the rest of the arguments. */
3678 NO_DEFER_POP;
3679
3680 /* ANSI doesn't require a sequence point here,
3681 but PCC has one, so this will avoid some problems. */
3682 emit_queue ();
3683
3684 /* Free any temporary slots made in processing this argument. Show
3685 that we might have taken the address of something and pushed that
3686 as an operand. */
3687 preserve_temp_slots (NULL_RTX);
3688 free_temp_slots ();
3689 pop_temp_slots ();
3690
3691 #ifdef ACCUMULATE_OUTGOING_ARGS
3692 /* Now mark the segment we just used. */
3693 if (argblock && ! variable_size && arg->stack)
3694 for (i = lower_bound; i < upper_bound; i++)
3695 stack_usage_map[i] = 1;
3696 #endif
3697 }