Bump for snapshot
[gcc.git] / gcc / calls.c
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 92-97, 1998, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "rtl.h"
24 #include "tree.h"
25 #include "flags.h"
26 #include "expr.h"
27 #include "regs.h"
28 #include "insn-flags.h"
29 #include "toplev.h"
30 #include "output.h"
31
32 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
33 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
34 #endif
35
36 /* Decide whether a function's arguments should be processed
37 from first to last or from last to first.
38
39 They should if the stack and args grow in opposite directions, but
40 only if we have push insns. */
41
42 #ifdef PUSH_ROUNDING
43
44 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
45 #define PUSH_ARGS_REVERSED /* If it's last to first */
46 #endif
47
48 #endif
49
50 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
51 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
52
53 /* Data structure and subroutines used within expand_call. */
54
55 struct arg_data
56 {
57 /* Tree node for this argument. */
58 tree tree_value;
59 /* Mode for value; TYPE_MODE unless promoted. */
60 enum machine_mode mode;
61 /* Current RTL value for argument, or 0 if it isn't precomputed. */
62 rtx value;
63 /* Initially-compute RTL value for argument; only for const functions. */
64 rtx initial_value;
65 /* Register to pass this argument in, 0 if passed on stack, or an
66 PARALLEL if the arg is to be copied into multiple non-contiguous
67 registers. */
68 rtx reg;
69 /* If REG was promoted from the actual mode of the argument expression,
70 indicates whether the promotion is sign- or zero-extended. */
71 int unsignedp;
72 /* Number of registers to use. 0 means put the whole arg in registers.
73 Also 0 if not passed in registers. */
74 int partial;
75 /* Non-zero if argument must be passed on stack.
76 Note that some arguments may be passed on the stack
77 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
78 pass_on_stack identifies arguments that *cannot* go in registers. */
79 int pass_on_stack;
80 /* Offset of this argument from beginning of stack-args. */
81 struct args_size offset;
82 /* Similar, but offset to the start of the stack slot. Different from
83 OFFSET if this arg pads downward. */
84 struct args_size slot_offset;
85 /* Size of this argument on the stack, rounded up for any padding it gets,
86 parts of the argument passed in registers do not count.
87 If REG_PARM_STACK_SPACE is defined, then register parms
88 are counted here as well. */
89 struct args_size size;
90 /* Location on the stack at which parameter should be stored. The store
91 has already been done if STACK == VALUE. */
92 rtx stack;
93 /* Location on the stack of the start of this argument slot. This can
94 differ from STACK if this arg pads downward. This location is known
95 to be aligned to FUNCTION_ARG_BOUNDARY. */
96 rtx stack_slot;
97 #ifdef ACCUMULATE_OUTGOING_ARGS
98 /* Place that this stack area has been saved, if needed. */
99 rtx save_area;
100 #endif
101 /* If an argument's alignment does not permit direct copying into registers,
102 copy in smaller-sized pieces into pseudos. These are stored in a
103 block pointed to by this field. The next field says how many
104 word-sized pseudos we made. */
105 rtx *aligned_regs;
106 int n_aligned_regs;
107 };
108
109 #ifdef ACCUMULATE_OUTGOING_ARGS
110 /* A vector of one char per byte of stack space. A byte if non-zero if
111 the corresponding stack location has been used.
112 This vector is used to prevent a function call within an argument from
113 clobbering any stack already set up. */
114 static char *stack_usage_map;
115
116 /* Size of STACK_USAGE_MAP. */
117 static int highest_outgoing_arg_in_use;
118
119 /* stack_arg_under_construction is nonzero when an argument may be
120 initialized with a constructor call (including a C function that
121 returns a BLKmode struct) and expand_call must take special action
122 to make sure the object being constructed does not overlap the
123 argument list for the constructor call. */
124 int stack_arg_under_construction;
125 #endif
126
127 static int calls_function PROTO ((tree, int));
128 static int calls_function_1 PROTO ((tree, int));
129 static void emit_call_1 PROTO ((rtx, tree, tree, HOST_WIDE_INT,
130 HOST_WIDE_INT, rtx, rtx,
131 int, rtx, int));
132 static void special_function_p PROTO ((char *, tree, int *, int *,
133 int *, int *));
134 static void precompute_register_parameters PROTO ((int, struct arg_data *,
135 int *));
136 static void store_one_arg PROTO ((struct arg_data *, rtx, int, int,
137 int));
138 static void store_unaligned_arguments_into_pseudos PROTO ((struct arg_data *,
139 int));
140
141 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
142 static rtx save_fixed_argument_area PROTO ((int, rtx, int *, int *));
143 static void restore_fixed_argument_area PROTO ((rtx, rtx, int, int));
144 #endif
145 \f
146 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
147 `alloca'.
148
149 If WHICH is 0, return 1 if EXP contains a call to any function.
150 Actually, we only need return 1 if evaluating EXP would require pushing
151 arguments on the stack, but that is too difficult to compute, so we just
152 assume any function call might require the stack. */
153
154 static tree calls_function_save_exprs;
155
156 static int
157 calls_function (exp, which)
158 tree exp;
159 int which;
160 {
161 int val;
162 calls_function_save_exprs = 0;
163 val = calls_function_1 (exp, which);
164 calls_function_save_exprs = 0;
165 return val;
166 }
167
168 static int
169 calls_function_1 (exp, which)
170 tree exp;
171 int which;
172 {
173 register int i;
174 enum tree_code code = TREE_CODE (exp);
175 int type = TREE_CODE_CLASS (code);
176 int length = tree_code_length[(int) code];
177
178 /* If this code is language-specific, we don't know what it will do. */
179 if ((int) code >= NUM_TREE_CODES)
180 return 1;
181
182 /* Only expressions and references can contain calls. */
183 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
184 && type != 'b')
185 return 0;
186
187 switch (code)
188 {
189 case CALL_EXPR:
190 if (which == 0)
191 return 1;
192 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
193 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
194 == FUNCTION_DECL))
195 {
196 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
197
198 if ((DECL_BUILT_IN (fndecl)
199 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
200 || (DECL_SAVED_INSNS (fndecl)
201 && (FUNCTION_FLAGS (DECL_SAVED_INSNS (fndecl))
202 & FUNCTION_FLAGS_CALLS_ALLOCA)))
203 return 1;
204 }
205
206 /* Third operand is RTL. */
207 length = 2;
208 break;
209
210 case SAVE_EXPR:
211 if (SAVE_EXPR_RTL (exp) != 0)
212 return 0;
213 if (value_member (exp, calls_function_save_exprs))
214 return 0;
215 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
216 calls_function_save_exprs);
217 return (TREE_OPERAND (exp, 0) != 0
218 && calls_function_1 (TREE_OPERAND (exp, 0), which));
219
220 case BLOCK:
221 {
222 register tree local;
223
224 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
225 if (DECL_INITIAL (local) != 0
226 && calls_function_1 (DECL_INITIAL (local), which))
227 return 1;
228 }
229 {
230 register tree subblock;
231
232 for (subblock = BLOCK_SUBBLOCKS (exp);
233 subblock;
234 subblock = TREE_CHAIN (subblock))
235 if (calls_function_1 (subblock, which))
236 return 1;
237 }
238 return 0;
239
240 case METHOD_CALL_EXPR:
241 length = 3;
242 break;
243
244 case WITH_CLEANUP_EXPR:
245 length = 1;
246 break;
247
248 case RTL_EXPR:
249 return 0;
250
251 default:
252 break;
253 }
254
255 for (i = 0; i < length; i++)
256 if (TREE_OPERAND (exp, i) != 0
257 && calls_function_1 (TREE_OPERAND (exp, i), which))
258 return 1;
259
260 return 0;
261 }
262 \f
263 /* Force FUNEXP into a form suitable for the address of a CALL,
264 and return that as an rtx. Also load the static chain register
265 if FNDECL is a nested function.
266
267 CALL_FUSAGE points to a variable holding the prospective
268 CALL_INSN_FUNCTION_USAGE information. */
269
270 rtx
271 prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
272 rtx funexp;
273 tree fndecl;
274 rtx *call_fusage;
275 int reg_parm_seen;
276 {
277 rtx static_chain_value = 0;
278
279 funexp = protect_from_queue (funexp, 0);
280
281 if (fndecl != 0)
282 /* Get possible static chain value for nested function in C. */
283 static_chain_value = lookup_static_chain (fndecl);
284
285 /* Make a valid memory address and copy constants thru pseudo-regs,
286 but not for a constant address if -fno-function-cse. */
287 if (GET_CODE (funexp) != SYMBOL_REF)
288 /* If we are using registers for parameters, force the
289 function address into a register now. */
290 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
291 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
292 : memory_address (FUNCTION_MODE, funexp));
293 else
294 {
295 #ifndef NO_FUNCTION_CSE
296 if (optimize && ! flag_no_function_cse)
297 #ifdef NO_RECURSIVE_FUNCTION_CSE
298 if (fndecl != current_function_decl)
299 #endif
300 funexp = force_reg (Pmode, funexp);
301 #endif
302 }
303
304 if (static_chain_value != 0)
305 {
306 emit_move_insn (static_chain_rtx, static_chain_value);
307
308 if (GET_CODE (static_chain_rtx) == REG)
309 use_reg (call_fusage, static_chain_rtx);
310 }
311
312 return funexp;
313 }
314
315 /* Generate instructions to call function FUNEXP,
316 and optionally pop the results.
317 The CALL_INSN is the first insn generated.
318
319 FNDECL is the declaration node of the function. This is given to the
320 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
321
322 FUNTYPE is the data type of the function. This is given to the macro
323 RETURN_POPS_ARGS to determine whether this function pops its own args.
324 We used to allow an identifier for library functions, but that doesn't
325 work when the return type is an aggregate type and the calling convention
326 says that the pointer to this aggregate is to be popped by the callee.
327
328 STACK_SIZE is the number of bytes of arguments on the stack,
329 rounded up to PREFERRED_STACK_BOUNDARY; zero if the size is variable.
330 This is both to put into the call insn and
331 to generate explicit popping code if necessary.
332
333 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
334 It is zero if this call doesn't want a structure value.
335
336 NEXT_ARG_REG is the rtx that results from executing
337 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
338 just after all the args have had their registers assigned.
339 This could be whatever you like, but normally it is the first
340 arg-register beyond those used for args in this call,
341 or 0 if all the arg-registers are used in this call.
342 It is passed on to `gen_call' so you can put this info in the call insn.
343
344 VALREG is a hard register in which a value is returned,
345 or 0 if the call does not return a value.
346
347 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
348 the args to this call were processed.
349 We restore `inhibit_defer_pop' to that value.
350
351 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
352 denote registers used by the called function.
353
354 IS_CONST is true if this is a `const' call. */
355
356 static void
357 emit_call_1 (funexp, fndecl, funtype, stack_size, struct_value_size,
358 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
359 is_const)
360 rtx funexp;
361 tree fndecl ATTRIBUTE_UNUSED;
362 tree funtype ATTRIBUTE_UNUSED;
363 HOST_WIDE_INT stack_size;
364 HOST_WIDE_INT struct_value_size;
365 rtx next_arg_reg;
366 rtx valreg;
367 int old_inhibit_defer_pop;
368 rtx call_fusage;
369 int is_const;
370 {
371 rtx stack_size_rtx = GEN_INT (stack_size);
372 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
373 rtx call_insn;
374 #ifndef ACCUMULATE_OUTGOING_ARGS
375 int already_popped = 0;
376 #endif
377
378 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
379 and we don't want to load it into a register as an optimization,
380 because prepare_call_address already did it if it should be done. */
381 if (GET_CODE (funexp) != SYMBOL_REF)
382 funexp = memory_address (FUNCTION_MODE, funexp);
383
384 #ifndef ACCUMULATE_OUTGOING_ARGS
385 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
386 if (HAVE_call_pop && HAVE_call_value_pop
387 && (RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0
388 || stack_size == 0))
389 {
390 rtx n_pop = GEN_INT (RETURN_POPS_ARGS (fndecl, funtype, stack_size));
391 rtx pat;
392
393 /* If this subroutine pops its own args, record that in the call insn
394 if possible, for the sake of frame pointer elimination. */
395
396 if (valreg)
397 pat = gen_call_value_pop (valreg,
398 gen_rtx_MEM (FUNCTION_MODE, funexp),
399 stack_size_rtx, next_arg_reg, n_pop);
400 else
401 pat = gen_call_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
402 stack_size_rtx, next_arg_reg, n_pop);
403
404 emit_call_insn (pat);
405 already_popped = 1;
406 }
407 else
408 #endif
409 #endif
410
411 #if defined (HAVE_call) && defined (HAVE_call_value)
412 if (HAVE_call && HAVE_call_value)
413 {
414 if (valreg)
415 emit_call_insn (gen_call_value (valreg,
416 gen_rtx_MEM (FUNCTION_MODE, funexp),
417 stack_size_rtx, next_arg_reg,
418 NULL_RTX));
419 else
420 emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE, funexp),
421 stack_size_rtx, next_arg_reg,
422 struct_value_size_rtx));
423 }
424 else
425 #endif
426 abort ();
427
428 /* Find the CALL insn we just emitted. */
429 for (call_insn = get_last_insn ();
430 call_insn && GET_CODE (call_insn) != CALL_INSN;
431 call_insn = PREV_INSN (call_insn))
432 ;
433
434 if (! call_insn)
435 abort ();
436
437 /* Put the register usage information on the CALL. If there is already
438 some usage information, put ours at the end. */
439 if (CALL_INSN_FUNCTION_USAGE (call_insn))
440 {
441 rtx link;
442
443 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
444 link = XEXP (link, 1))
445 ;
446
447 XEXP (link, 1) = call_fusage;
448 }
449 else
450 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
451
452 /* If this is a const call, then set the insn's unchanging bit. */
453 if (is_const)
454 CONST_CALL_P (call_insn) = 1;
455
456 /* Restore this now, so that we do defer pops for this call's args
457 if the context of the call as a whole permits. */
458 inhibit_defer_pop = old_inhibit_defer_pop;
459
460 #ifndef ACCUMULATE_OUTGOING_ARGS
461 /* If returning from the subroutine does not automatically pop the args,
462 we need an instruction to pop them sooner or later.
463 Perhaps do it now; perhaps just record how much space to pop later.
464
465 If returning from the subroutine does pop the args, indicate that the
466 stack pointer will be changed. */
467
468 if (stack_size != 0 && RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0)
469 {
470 if (!already_popped)
471 CALL_INSN_FUNCTION_USAGE (call_insn)
472 = gen_rtx_EXPR_LIST (VOIDmode,
473 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
474 CALL_INSN_FUNCTION_USAGE (call_insn));
475 stack_size -= RETURN_POPS_ARGS (fndecl, funtype, stack_size);
476 stack_size_rtx = GEN_INT (stack_size);
477 }
478
479 if (stack_size != 0)
480 {
481 if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
482 pending_stack_adjust += stack_size;
483 else
484 adjust_stack (stack_size_rtx);
485 }
486 #endif
487 }
488
489 /* Determine if the function identified by NAME and FNDECL is one with
490 special properties we wish to know about.
491
492 For example, if the function might return more than one time (setjmp), then
493 set RETURNS_TWICE to a nonzero value.
494
495 Similarly set IS_LONGJMP for if the function is in the longjmp family.
496
497 Set IS_MALLOC for any of the standard memory allocation functions which
498 allocate from the heap.
499
500 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
501 space from the stack such as alloca. */
502
503 static void
504 special_function_p (name, fndecl, returns_twice, is_longjmp,
505 is_malloc, may_be_alloca)
506 char *name;
507 tree fndecl;
508 int *returns_twice;
509 int *is_longjmp;
510 int *is_malloc;
511 int *may_be_alloca;
512 {
513 *returns_twice = 0;
514 *is_longjmp = 0;
515 *is_malloc = 0;
516 *may_be_alloca = 0;
517
518 if (name != 0 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
519 /* Exclude functions not at the file scope, or not `extern',
520 since they are not the magic functions we would otherwise
521 think they are. */
522 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
523 {
524 char *tname = name;
525
526 /* We assume that alloca will always be called by name. It
527 makes no sense to pass it as a pointer-to-function to
528 anything that does not understand its behavior. */
529 *may_be_alloca
530 = (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
531 && name[0] == 'a'
532 && ! strcmp (name, "alloca"))
533 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
534 && name[0] == '_'
535 && ! strcmp (name, "__builtin_alloca"))));
536
537 /* Disregard prefix _, __ or __x. */
538 if (name[0] == '_')
539 {
540 if (name[1] == '_' && name[2] == 'x')
541 tname += 3;
542 else if (name[1] == '_')
543 tname += 2;
544 else
545 tname += 1;
546 }
547
548 if (tname[0] == 's')
549 {
550 *returns_twice
551 = ((tname[1] == 'e'
552 && (! strcmp (tname, "setjmp")
553 || ! strcmp (tname, "setjmp_syscall")))
554 || (tname[1] == 'i'
555 && ! strcmp (tname, "sigsetjmp"))
556 || (tname[1] == 'a'
557 && ! strcmp (tname, "savectx")));
558 if (tname[1] == 'i'
559 && ! strcmp (tname, "siglongjmp"))
560 *is_longjmp = 1;
561 }
562 else if ((tname[0] == 'q' && tname[1] == 's'
563 && ! strcmp (tname, "qsetjmp"))
564 || (tname[0] == 'v' && tname[1] == 'f'
565 && ! strcmp (tname, "vfork")))
566 *returns_twice = 1;
567
568 else if (tname[0] == 'l' && tname[1] == 'o'
569 && ! strcmp (tname, "longjmp"))
570 *is_longjmp = 1;
571 /* XXX should have "malloc" attribute on functions instead
572 of recognizing them by name. */
573 else if (! strcmp (tname, "malloc")
574 || ! strcmp (tname, "calloc")
575 || ! strcmp (tname, "realloc")
576 /* Note use of NAME rather than TNAME here. These functions
577 are only reserved when preceded with __. */
578 || ! strcmp (name, "__vn") /* mangled __builtin_vec_new */
579 || ! strcmp (name, "__nw") /* mangled __builtin_new */
580 || ! strcmp (name, "__builtin_new")
581 || ! strcmp (name, "__builtin_vec_new"))
582 *is_malloc = 1;
583 }
584 }
585
586 /* Precompute all register parameters as described by ARGS, storing values
587 into fields within the ARGS array.
588
589 NUM_ACTUALS indicates the total number elements in the ARGS array.
590
591 Set REG_PARM_SEEN if we encounter a register parameter. */
592
593 static void
594 precompute_register_parameters (num_actuals, args, reg_parm_seen)
595 int num_actuals;
596 struct arg_data *args;
597 int *reg_parm_seen;
598 {
599 int i;
600
601 *reg_parm_seen = 0;
602
603 for (i = 0; i < num_actuals; i++)
604 if (args[i].reg != 0 && ! args[i].pass_on_stack)
605 {
606 *reg_parm_seen = 1;
607
608 if (args[i].value == 0)
609 {
610 push_temp_slots ();
611 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
612 VOIDmode, 0);
613 preserve_temp_slots (args[i].value);
614 pop_temp_slots ();
615
616 /* ANSI doesn't require a sequence point here,
617 but PCC has one, so this will avoid some problems. */
618 emit_queue ();
619 }
620
621 /* If we are to promote the function arg to a wider mode,
622 do it now. */
623
624 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
625 args[i].value
626 = convert_modes (args[i].mode,
627 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
628 args[i].value, args[i].unsignedp);
629
630 /* If the value is expensive, and we are inside an appropriately
631 short loop, put the value into a pseudo and then put the pseudo
632 into the hard reg.
633
634 For small register classes, also do this if this call uses
635 register parameters. This is to avoid reload conflicts while
636 loading the parameters registers. */
637
638 if ((! (GET_CODE (args[i].value) == REG
639 || (GET_CODE (args[i].value) == SUBREG
640 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
641 && args[i].mode != BLKmode
642 && rtx_cost (args[i].value, SET) > 2
643 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
644 || preserve_subexpressions_p ()))
645 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
646 }
647 }
648
649 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
650
651 /* The argument list is the property of the called routine and it
652 may clobber it. If the fixed area has been used for previous
653 parameters, we must save and restore it. */
654 static rtx
655 save_fixed_argument_area (reg_parm_stack_space, argblock,
656 low_to_save, high_to_save)
657 int reg_parm_stack_space;
658 rtx argblock;
659 int *low_to_save;
660 int *high_to_save;
661 {
662 int i;
663 rtx save_area = NULL_RTX;
664
665 /* Compute the boundary of the that needs to be saved, if any. */
666 #ifdef ARGS_GROW_DOWNWARD
667 for (i = 0; i < reg_parm_stack_space + 1; i++)
668 #else
669 for (i = 0; i < reg_parm_stack_space; i++)
670 #endif
671 {
672 if (i >= highest_outgoing_arg_in_use
673 || stack_usage_map[i] == 0)
674 continue;
675
676 if (*low_to_save == -1)
677 *low_to_save = i;
678
679 *high_to_save = i;
680 }
681
682 if (*low_to_save >= 0)
683 {
684 int num_to_save = *high_to_save - *low_to_save + 1;
685 enum machine_mode save_mode
686 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
687 rtx stack_area;
688
689 /* If we don't have the required alignment, must do this in BLKmode. */
690 if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
691 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
692 save_mode = BLKmode;
693
694 #ifdef ARGS_GROW_DOWNWARD
695 stack_area = gen_rtx_MEM (save_mode,
696 memory_address (save_mode,
697 plus_constant (argblock,
698 - *high_to_save)));
699 #else
700 stack_area = gen_rtx_MEM (save_mode,
701 memory_address (save_mode,
702 plus_constant (argblock,
703 *low_to_save)));
704 #endif
705 if (save_mode == BLKmode)
706 {
707 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
708 MEM_IN_STRUCT_P (save_area) = 0;
709 emit_block_move (validize_mem (save_area), stack_area,
710 GEN_INT (num_to_save),
711 PARM_BOUNDARY / BITS_PER_UNIT);
712 }
713 else
714 {
715 save_area = gen_reg_rtx (save_mode);
716 emit_move_insn (save_area, stack_area);
717 }
718 }
719 return save_area;
720 }
721
722 static void
723 restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
724 rtx save_area;
725 rtx argblock;
726 int high_to_save;
727 int low_to_save;
728 {
729 enum machine_mode save_mode = GET_MODE (save_area);
730 #ifdef ARGS_GROW_DOWNWARD
731 rtx stack_area
732 = gen_rtx_MEM (save_mode,
733 memory_address (save_mode,
734 plus_constant (argblock,
735 - high_to_save)));
736 #else
737 rtx stack_area
738 = gen_rtx_MEM (save_mode,
739 memory_address (save_mode,
740 plus_constant (argblock,
741 low_to_save)));
742 #endif
743
744 if (save_mode != BLKmode)
745 emit_move_insn (stack_area, save_area);
746 else
747 emit_block_move (stack_area, validize_mem (save_area),
748 GEN_INT (high_to_save - low_to_save + 1),
749 PARM_BOUNDARY / BITS_PER_UNIT);
750 }
751 #endif
752
753 /* If any elements in ARGS refer to parameters that are to be passed in
754 registers, but not in memory, and whose alignment does not permit a
755 direct copy into registers. Copy the values into a group of pseudos
756 which we will later copy into the appropriate hard registers.
757
758 Pseudos for each unaligned argument will be stored into the array
759 args[argnum].aligned_regs. The caller is responsible for deallocating
760 the aligned_regs array if it is nonzero. */
761
762 static void
763 store_unaligned_arguments_into_pseudos (args, num_actuals)
764 struct arg_data *args;
765 int num_actuals;
766 {
767 int i, j;
768
769 for (i = 0; i < num_actuals; i++)
770 if (args[i].reg != 0 && ! args[i].pass_on_stack
771 && args[i].mode == BLKmode
772 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
773 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
774 {
775 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
776 int big_endian_correction = 0;
777
778 args[i].n_aligned_regs
779 = args[i].partial ? args[i].partial
780 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
781
782 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
783 * args[i].n_aligned_regs);
784
785 /* Structures smaller than a word are aligned to the least
786 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
787 this means we must skip the empty high order bytes when
788 calculating the bit offset. */
789 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
790 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
791
792 for (j = 0; j < args[i].n_aligned_regs; j++)
793 {
794 rtx reg = gen_reg_rtx (word_mode);
795 rtx word = operand_subword_force (args[i].value, j, BLKmode);
796 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
797 int bitalign = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
798
799 args[i].aligned_regs[j] = reg;
800
801 /* There is no need to restrict this code to loading items
802 in TYPE_ALIGN sized hunks. The bitfield instructions can
803 load up entire word sized registers efficiently.
804
805 ??? This may not be needed anymore.
806 We use to emit a clobber here but that doesn't let later
807 passes optimize the instructions we emit. By storing 0 into
808 the register later passes know the first AND to zero out the
809 bitfield being set in the register is unnecessary. The store
810 of 0 will be deleted as will at least the first AND. */
811
812 emit_move_insn (reg, const0_rtx);
813
814 bytes -= bitsize / BITS_PER_UNIT;
815 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
816 extract_bit_field (word, bitsize, 0, 1,
817 NULL_RTX, word_mode,
818 word_mode,
819 bitalign / BITS_PER_UNIT,
820 BITS_PER_WORD),
821 bitalign / BITS_PER_UNIT, BITS_PER_WORD);
822 }
823 }
824 }
825
826 /* Generate all the code for a function call
827 and return an rtx for its value.
828 Store the value in TARGET (specified as an rtx) if convenient.
829 If the value is stored in TARGET then TARGET is returned.
830 If IGNORE is nonzero, then we ignore the value of the function call. */
831
832 rtx
833 expand_call (exp, target, ignore)
834 tree exp;
835 rtx target;
836 int ignore;
837 {
838 /* List of actual parameters. */
839 tree actparms = TREE_OPERAND (exp, 1);
840 /* RTX for the function to be called. */
841 rtx funexp;
842 /* Data type of the function. */
843 tree funtype;
844 /* Declaration of the function being called,
845 or 0 if the function is computed (not known by name). */
846 tree fndecl = 0;
847 char *name = 0;
848
849 /* Register in which non-BLKmode value will be returned,
850 or 0 if no value or if value is BLKmode. */
851 rtx valreg;
852 /* Address where we should return a BLKmode value;
853 0 if value not BLKmode. */
854 rtx structure_value_addr = 0;
855 /* Nonzero if that address is being passed by treating it as
856 an extra, implicit first parameter. Otherwise,
857 it is passed by being copied directly into struct_value_rtx. */
858 int structure_value_addr_parm = 0;
859 /* Size of aggregate value wanted, or zero if none wanted
860 or if we are using the non-reentrant PCC calling convention
861 or expecting the value in registers. */
862 HOST_WIDE_INT struct_value_size = 0;
863 /* Nonzero if called function returns an aggregate in memory PCC style,
864 by returning the address of where to find it. */
865 int pcc_struct_value = 0;
866
867 /* Number of actual parameters in this call, including struct value addr. */
868 int num_actuals;
869 /* Number of named args. Args after this are anonymous ones
870 and they must all go on the stack. */
871 int n_named_args;
872 /* Count arg position in order args appear. */
873 int argpos;
874
875 /* Vector of information about each argument.
876 Arguments are numbered in the order they will be pushed,
877 not the order they are written. */
878 struct arg_data *args;
879
880 /* Total size in bytes of all the stack-parms scanned so far. */
881 struct args_size args_size;
882 /* Size of arguments before any adjustments (such as rounding). */
883 struct args_size original_args_size;
884 /* Data on reg parms scanned so far. */
885 CUMULATIVE_ARGS args_so_far;
886 /* Nonzero if a reg parm has been scanned. */
887 int reg_parm_seen;
888 /* Nonzero if this is an indirect function call. */
889
890 /* Nonzero if we must avoid push-insns in the args for this call.
891 If stack space is allocated for register parameters, but not by the
892 caller, then it is preallocated in the fixed part of the stack frame.
893 So the entire argument block must then be preallocated (i.e., we
894 ignore PUSH_ROUNDING in that case). */
895
896 #ifdef PUSH_ROUNDING
897 int must_preallocate = 0;
898 #else
899 int must_preallocate = 1;
900 #endif
901
902 /* Size of the stack reserved for parameter registers. */
903 int reg_parm_stack_space = 0;
904
905 /* 1 if scanning parms front to back, -1 if scanning back to front. */
906 int inc;
907 /* Address of space preallocated for stack parms
908 (on machines that lack push insns), or 0 if space not preallocated. */
909 rtx argblock = 0;
910
911 /* Nonzero if it is plausible that this is a call to alloca. */
912 int may_be_alloca;
913 /* Nonzero if this is a call to malloc or a related function. */
914 int is_malloc;
915 /* Nonzero if this is a call to setjmp or a related function. */
916 int returns_twice;
917 /* Nonzero if this is a call to `longjmp'. */
918 int is_longjmp;
919 /* Nonzero if this is a call to an inline function. */
920 int is_integrable = 0;
921 /* Nonzero if this is a call to a `const' function.
922 Note that only explicitly named functions are handled as `const' here. */
923 int is_const = 0;
924 /* Nonzero if this is a call to a `volatile' function. */
925 int is_volatile = 0;
926 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
927 /* Define the boundary of the register parm stack space that needs to be
928 save, if any. */
929 int low_to_save = -1, high_to_save;
930 rtx save_area = 0; /* Place that it is saved */
931 #endif
932
933 #ifdef ACCUMULATE_OUTGOING_ARGS
934 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
935 char *initial_stack_usage_map = stack_usage_map;
936 int old_stack_arg_under_construction;
937 #endif
938
939 rtx old_stack_level = 0;
940 int old_pending_adj = 0;
941 int old_inhibit_defer_pop = inhibit_defer_pop;
942 rtx call_fusage = 0;
943 register tree p;
944 register int i, j;
945
946 /* The value of the function call can be put in a hard register. But
947 if -fcheck-memory-usage, code which invokes functions (and thus
948 damages some hard registers) can be inserted before using the value.
949 So, target is always a pseudo-register in that case. */
950 if (current_function_check_memory_usage)
951 target = 0;
952
953 /* See if we can find a DECL-node for the actual function.
954 As a result, decide whether this is a call to an integrable function. */
955
956 p = TREE_OPERAND (exp, 0);
957 if (TREE_CODE (p) == ADDR_EXPR)
958 {
959 fndecl = TREE_OPERAND (p, 0);
960 if (TREE_CODE (fndecl) != FUNCTION_DECL)
961 fndecl = 0;
962 else
963 {
964 if (!flag_no_inline
965 && fndecl != current_function_decl
966 && DECL_INLINE (fndecl)
967 && DECL_SAVED_INSNS (fndecl)
968 && RTX_INTEGRATED_P (DECL_SAVED_INSNS (fndecl)))
969 is_integrable = 1;
970 else if (! TREE_ADDRESSABLE (fndecl))
971 {
972 /* In case this function later becomes inlinable,
973 record that there was already a non-inline call to it.
974
975 Use abstraction instead of setting TREE_ADDRESSABLE
976 directly. */
977 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
978 && optimize > 0)
979 {
980 warning_with_decl (fndecl, "can't inline call to `%s'");
981 warning ("called from here");
982 }
983 mark_addressable (fndecl);
984 }
985
986 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
987 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
988 is_const = 1;
989
990 if (TREE_THIS_VOLATILE (fndecl))
991 is_volatile = 1;
992 }
993 }
994
995 /* If we don't have specific function to call, see if we have a
996 constant or `noreturn' function from the type. */
997 if (fndecl == 0)
998 {
999 is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p)));
1000 is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
1001 }
1002
1003 #ifdef REG_PARM_STACK_SPACE
1004 #ifdef MAYBE_REG_PARM_STACK_SPACE
1005 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1006 #else
1007 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1008 #endif
1009 #endif
1010
1011 #if defined(PUSH_ROUNDING) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1012 if (reg_parm_stack_space > 0)
1013 must_preallocate = 1;
1014 #endif
1015
1016 /* Warn if this value is an aggregate type,
1017 regardless of which calling convention we are using for it. */
1018 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
1019 warning ("function call has aggregate value");
1020
1021 /* Set up a place to return a structure. */
1022
1023 /* Cater to broken compilers. */
1024 if (aggregate_value_p (exp))
1025 {
1026 /* This call returns a big structure. */
1027 is_const = 0;
1028
1029 #ifdef PCC_STATIC_STRUCT_RETURN
1030 {
1031 pcc_struct_value = 1;
1032 /* Easier than making that case work right. */
1033 if (is_integrable)
1034 {
1035 /* In case this is a static function, note that it has been
1036 used. */
1037 if (! TREE_ADDRESSABLE (fndecl))
1038 mark_addressable (fndecl);
1039 is_integrable = 0;
1040 }
1041 }
1042 #else /* not PCC_STATIC_STRUCT_RETURN */
1043 {
1044 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
1045
1046 if (target && GET_CODE (target) == MEM)
1047 structure_value_addr = XEXP (target, 0);
1048 else
1049 {
1050 /* Assign a temporary to hold the value. */
1051 tree d;
1052
1053 /* For variable-sized objects, we must be called with a target
1054 specified. If we were to allocate space on the stack here,
1055 we would have no way of knowing when to free it. */
1056
1057 if (struct_value_size < 0)
1058 abort ();
1059
1060 /* This DECL is just something to feed to mark_addressable;
1061 it doesn't get pushed. */
1062 d = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1063 DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 1, 0, 1);
1064 mark_addressable (d);
1065 structure_value_addr = XEXP (DECL_RTL (d), 0);
1066 TREE_USED (d) = 1;
1067 target = 0;
1068 }
1069 }
1070 #endif /* not PCC_STATIC_STRUCT_RETURN */
1071 }
1072
1073 /* If called function is inline, try to integrate it. */
1074
1075 if (is_integrable)
1076 {
1077 rtx temp;
1078 #ifdef ACCUMULATE_OUTGOING_ARGS
1079 rtx before_call = get_last_insn ();
1080 #endif
1081
1082 temp = expand_inline_function (fndecl, actparms, target,
1083 ignore, TREE_TYPE (exp),
1084 structure_value_addr);
1085
1086 /* If inlining succeeded, return. */
1087 if (temp != (rtx) (HOST_WIDE_INT) -1)
1088 {
1089 #ifdef ACCUMULATE_OUTGOING_ARGS
1090 /* If the outgoing argument list must be preserved, push
1091 the stack before executing the inlined function if it
1092 makes any calls. */
1093
1094 for (i = reg_parm_stack_space - 1; i >= 0; i--)
1095 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
1096 break;
1097
1098 if (stack_arg_under_construction || i >= 0)
1099 {
1100 rtx first_insn
1101 = before_call ? NEXT_INSN (before_call) : get_insns ();
1102 rtx insn, seq;
1103
1104 /* Look for a call in the inline function code.
1105 If OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) is
1106 nonzero then there is a call and it is not necessary
1107 to scan the insns. */
1108
1109 if (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) == 0)
1110 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1111 if (GET_CODE (insn) == CALL_INSN)
1112 break;
1113
1114 if (insn)
1115 {
1116 /* Reserve enough stack space so that the largest
1117 argument list of any function call in the inline
1118 function does not overlap the argument list being
1119 evaluated. This is usually an overestimate because
1120 allocate_dynamic_stack_space reserves space for an
1121 outgoing argument list in addition to the requested
1122 space, but there is no way to ask for stack space such
1123 that an argument list of a certain length can be
1124 safely constructed.
1125
1126 Add the stack space reserved for register arguments, if
1127 any, in the inline function. What is really needed is the
1128 largest value of reg_parm_stack_space in the inline
1129 function, but that is not available. Using the current
1130 value of reg_parm_stack_space is wrong, but gives
1131 correct results on all supported machines. */
1132
1133 int adjust = (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl))
1134 + reg_parm_stack_space);
1135
1136 start_sequence ();
1137 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1138 allocate_dynamic_stack_space (GEN_INT (adjust),
1139 NULL_RTX, BITS_PER_UNIT);
1140 seq = get_insns ();
1141 end_sequence ();
1142 emit_insns_before (seq, first_insn);
1143 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1144 }
1145 }
1146 #endif
1147
1148 /* If the result is equivalent to TARGET, return TARGET to simplify
1149 checks in store_expr. They can be equivalent but not equal in the
1150 case of a function that returns BLKmode. */
1151 if (temp != target && rtx_equal_p (temp, target))
1152 return target;
1153 return temp;
1154 }
1155
1156 /* If inlining failed, mark FNDECL as needing to be compiled
1157 separately after all. If function was declared inline,
1158 give a warning. */
1159 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1160 && optimize > 0 && ! TREE_ADDRESSABLE (fndecl))
1161 {
1162 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1163 warning ("called from here");
1164 }
1165 mark_addressable (fndecl);
1166 }
1167
1168 /* When calling a const function, we must pop the stack args right away,
1169 so that the pop is deleted or moved with the call. */
1170 if (is_const)
1171 NO_DEFER_POP;
1172
1173 function_call_count++;
1174
1175 if (fndecl && DECL_NAME (fndecl))
1176 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
1177
1178 /* See if this is a call to a function that can return more than once
1179 or a call to longjmp or malloc. */
1180 special_function_p (name, fndecl, &returns_twice, &is_longjmp,
1181 &is_malloc, &may_be_alloca);
1182
1183 if (may_be_alloca)
1184 current_function_calls_alloca = 1;
1185
1186 /* Don't let pending stack adjusts add up to too much.
1187 Also, do all pending adjustments now
1188 if there is any chance this might be a call to alloca. */
1189
1190 if (pending_stack_adjust >= 32
1191 || (pending_stack_adjust > 0 && may_be_alloca))
1192 do_pending_stack_adjust ();
1193
1194 /* Operand 0 is a pointer-to-function; get the type of the function. */
1195 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
1196 if (TREE_CODE (funtype) != POINTER_TYPE)
1197 abort ();
1198 funtype = TREE_TYPE (funtype);
1199
1200 /* Push the temporary stack slot level so that we can free any temporaries
1201 we make. */
1202 push_temp_slots ();
1203
1204 /* Start updating where the next arg would go.
1205
1206 On some machines (such as the PA) indirect calls have a different
1207 calling convention than normal calls. The last argument in
1208 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
1209 or not. */
1210 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
1211
1212 /* If struct_value_rtx is 0, it means pass the address
1213 as if it were an extra parameter. */
1214 if (structure_value_addr && struct_value_rtx == 0)
1215 {
1216 /* If structure_value_addr is a REG other than
1217 virtual_outgoing_args_rtx, we can use always use it. If it
1218 is not a REG, we must always copy it into a register.
1219 If it is virtual_outgoing_args_rtx, we must copy it to another
1220 register in some cases. */
1221 rtx temp = (GET_CODE (structure_value_addr) != REG
1222 #ifdef ACCUMULATE_OUTGOING_ARGS
1223 || (stack_arg_under_construction
1224 && structure_value_addr == virtual_outgoing_args_rtx)
1225 #endif
1226 ? copy_addr_to_reg (structure_value_addr)
1227 : structure_value_addr);
1228
1229 actparms
1230 = tree_cons (error_mark_node,
1231 make_tree (build_pointer_type (TREE_TYPE (funtype)),
1232 temp),
1233 actparms);
1234 structure_value_addr_parm = 1;
1235 }
1236
1237 /* Count the arguments and set NUM_ACTUALS. */
1238 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
1239 num_actuals = i;
1240
1241 /* Compute number of named args.
1242 Normally, don't include the last named arg if anonymous args follow.
1243 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
1244 (If no anonymous args follow, the result of list_length is actually
1245 one too large. This is harmless.)
1246
1247 If SETUP_INCOMING_VARARGS is defined and STRICT_ARGUMENT_NAMING is zero,
1248 this machine will be able to place unnamed args that were passed in
1249 registers into the stack. So treat all args as named. This allows the
1250 insns emitting for a specific argument list to be independent of the
1251 function declaration.
1252
1253 If SETUP_INCOMING_VARARGS is not defined, we do not have any reliable
1254 way to pass unnamed args in registers, so we must force them into
1255 memory. */
1256
1257 if ((STRICT_ARGUMENT_NAMING
1258 #ifndef SETUP_INCOMING_VARARGS
1259 || 1
1260 #endif
1261 )
1262 && TYPE_ARG_TYPES (funtype) != 0)
1263 n_named_args
1264 = (list_length (TYPE_ARG_TYPES (funtype))
1265 /* Don't include the last named arg. */
1266 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
1267 /* Count the struct value address, if it is passed as a parm. */
1268 + structure_value_addr_parm);
1269 else
1270 /* If we know nothing, treat all args as named. */
1271 n_named_args = num_actuals;
1272
1273 /* Make a vector to hold all the information about each arg. */
1274 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
1275 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
1276
1277 args_size.constant = 0;
1278 args_size.var = 0;
1279
1280 /* In this loop, we consider args in the order they are written.
1281 We fill up ARGS from the front or from the back if necessary
1282 so that in any case the first arg to be pushed ends up at the front. */
1283
1284 #ifdef PUSH_ARGS_REVERSED
1285 i = num_actuals - 1, inc = -1;
1286 /* In this case, must reverse order of args
1287 so that we compute and push the last arg first. */
1288 #else
1289 i = 0, inc = 1;
1290 #endif
1291
1292 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1293 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
1294 {
1295 tree type = TREE_TYPE (TREE_VALUE (p));
1296 int unsignedp;
1297 enum machine_mode mode;
1298
1299 args[i].tree_value = TREE_VALUE (p);
1300
1301 /* Replace erroneous argument with constant zero. */
1302 if (type == error_mark_node || TYPE_SIZE (type) == 0)
1303 args[i].tree_value = integer_zero_node, type = integer_type_node;
1304
1305 /* If TYPE is a transparent union, pass things the way we would
1306 pass the first field of the union. We have already verified that
1307 the modes are the same. */
1308 if (TYPE_TRANSPARENT_UNION (type))
1309 type = TREE_TYPE (TYPE_FIELDS (type));
1310
1311 /* Decide where to pass this arg.
1312
1313 args[i].reg is nonzero if all or part is passed in registers.
1314
1315 args[i].partial is nonzero if part but not all is passed in registers,
1316 and the exact value says how many words are passed in registers.
1317
1318 args[i].pass_on_stack is nonzero if the argument must at least be
1319 computed on the stack. It may then be loaded back into registers
1320 if args[i].reg is nonzero.
1321
1322 These decisions are driven by the FUNCTION_... macros and must agree
1323 with those made by function.c. */
1324
1325 /* See if this argument should be passed by invisible reference. */
1326 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1327 && contains_placeholder_p (TYPE_SIZE (type)))
1328 || TREE_ADDRESSABLE (type)
1329 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1330 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, TYPE_MODE (type),
1331 type, argpos < n_named_args)
1332 #endif
1333 )
1334 {
1335 /* If we're compiling a thunk, pass through invisible
1336 references instead of making a copy. */
1337 if (current_function_is_thunk
1338 #ifdef FUNCTION_ARG_CALLEE_COPIES
1339 || (FUNCTION_ARG_CALLEE_COPIES (args_so_far, TYPE_MODE (type),
1340 type, argpos < n_named_args)
1341 /* If it's in a register, we must make a copy of it too. */
1342 /* ??? Is this a sufficient test? Is there a better one? */
1343 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1344 && REG_P (DECL_RTL (args[i].tree_value)))
1345 && ! TREE_ADDRESSABLE (type))
1346 #endif
1347 )
1348 {
1349 args[i].tree_value = build1 (ADDR_EXPR,
1350 build_pointer_type (type),
1351 args[i].tree_value);
1352 type = build_pointer_type (type);
1353 }
1354 else
1355 {
1356 /* We make a copy of the object and pass the address to the
1357 function being called. */
1358 rtx copy;
1359
1360 if (TYPE_SIZE (type) == 0
1361 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1362 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1363 && (TREE_INT_CST_HIGH (TYPE_SIZE (type)) != 0
1364 || (TREE_INT_CST_LOW (TYPE_SIZE (type))
1365 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
1366 {
1367 /* This is a variable-sized object. Make space on the stack
1368 for it. */
1369 rtx size_rtx = expr_size (TREE_VALUE (p));
1370
1371 if (old_stack_level == 0)
1372 {
1373 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1374 old_pending_adj = pending_stack_adjust;
1375 pending_stack_adjust = 0;
1376 }
1377
1378 copy = gen_rtx_MEM (BLKmode,
1379 allocate_dynamic_stack_space (size_rtx,
1380 NULL_RTX,
1381 TYPE_ALIGN (type)));
1382 }
1383 else
1384 {
1385 int size = int_size_in_bytes (type);
1386 copy = assign_stack_temp (TYPE_MODE (type), size, 0);
1387 }
1388
1389 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
1390
1391 store_expr (args[i].tree_value, copy, 0);
1392 is_const = 0;
1393
1394 args[i].tree_value = build1 (ADDR_EXPR,
1395 build_pointer_type (type),
1396 make_tree (type, copy));
1397 type = build_pointer_type (type);
1398 }
1399 }
1400
1401 mode = TYPE_MODE (type);
1402 unsignedp = TREE_UNSIGNED (type);
1403
1404 #ifdef PROMOTE_FUNCTION_ARGS
1405 mode = promote_mode (type, mode, &unsignedp, 1);
1406 #endif
1407
1408 args[i].unsignedp = unsignedp;
1409 args[i].mode = mode;
1410 args[i].reg = FUNCTION_ARG (args_so_far, mode, type,
1411 argpos < n_named_args);
1412 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1413 if (args[i].reg)
1414 args[i].partial
1415 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, type,
1416 argpos < n_named_args);
1417 #endif
1418
1419 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1420
1421 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1422 it means that we are to pass this arg in the register(s) designated
1423 by the PARALLEL, but also to pass it in the stack. */
1424 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1425 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1426 args[i].pass_on_stack = 1;
1427
1428 /* If this is an addressable type, we must preallocate the stack
1429 since we must evaluate the object into its final location.
1430
1431 If this is to be passed in both registers and the stack, it is simpler
1432 to preallocate. */
1433 if (TREE_ADDRESSABLE (type)
1434 || (args[i].pass_on_stack && args[i].reg != 0))
1435 must_preallocate = 1;
1436
1437 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1438 we cannot consider this function call constant. */
1439 if (TREE_ADDRESSABLE (type))
1440 is_const = 0;
1441
1442 /* Compute the stack-size of this argument. */
1443 if (args[i].reg == 0 || args[i].partial != 0
1444 || reg_parm_stack_space > 0
1445 || args[i].pass_on_stack)
1446 locate_and_pad_parm (mode, type,
1447 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1448 1,
1449 #else
1450 args[i].reg != 0,
1451 #endif
1452 fndecl, &args_size, &args[i].offset,
1453 &args[i].size);
1454
1455 #ifndef ARGS_GROW_DOWNWARD
1456 args[i].slot_offset = args_size;
1457 #endif
1458
1459 /* If a part of the arg was put into registers,
1460 don't include that part in the amount pushed. */
1461 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1462 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1463 / (PARM_BOUNDARY / BITS_PER_UNIT)
1464 * (PARM_BOUNDARY / BITS_PER_UNIT));
1465
1466 /* Update ARGS_SIZE, the total stack space for args so far. */
1467
1468 args_size.constant += args[i].size.constant;
1469 if (args[i].size.var)
1470 {
1471 ADD_PARM_SIZE (args_size, args[i].size.var);
1472 }
1473
1474 /* Since the slot offset points to the bottom of the slot,
1475 we must record it after incrementing if the args grow down. */
1476 #ifdef ARGS_GROW_DOWNWARD
1477 args[i].slot_offset = args_size;
1478
1479 args[i].slot_offset.constant = -args_size.constant;
1480 if (args_size.var)
1481 {
1482 SUB_PARM_SIZE (args[i].slot_offset, args_size.var);
1483 }
1484 #endif
1485
1486 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1487 have been used, etc. */
1488
1489 FUNCTION_ARG_ADVANCE (args_so_far, TYPE_MODE (type), type,
1490 argpos < n_named_args);
1491 }
1492
1493 #ifdef FINAL_REG_PARM_STACK_SPACE
1494 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
1495 args_size.var);
1496 #endif
1497
1498 /* Compute the actual size of the argument block required. The variable
1499 and constant sizes must be combined, the size may have to be rounded,
1500 and there may be a minimum required size. */
1501
1502 original_args_size = args_size;
1503 if (args_size.var)
1504 {
1505 /* If this function requires a variable-sized argument list, don't try to
1506 make a cse'able block for this call. We may be able to do this
1507 eventually, but it is too complicated to keep track of what insns go
1508 in the cse'able block and which don't. */
1509
1510 is_const = 0;
1511 must_preallocate = 1;
1512
1513 args_size.var = ARGS_SIZE_TREE (args_size);
1514 args_size.constant = 0;
1515
1516 #ifdef PREFERRED_STACK_BOUNDARY
1517 if (PREFERRED_STACK_BOUNDARY != BITS_PER_UNIT)
1518 args_size.var = round_up (args_size.var, STACK_BYTES);
1519 #endif
1520
1521 if (reg_parm_stack_space > 0)
1522 {
1523 args_size.var
1524 = size_binop (MAX_EXPR, args_size.var,
1525 size_int (reg_parm_stack_space));
1526
1527 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1528 /* The area corresponding to register parameters is not to count in
1529 the size of the block we need. So make the adjustment. */
1530 args_size.var
1531 = size_binop (MINUS_EXPR, args_size.var,
1532 size_int (reg_parm_stack_space));
1533 #endif
1534 }
1535 }
1536 else
1537 {
1538 #ifdef PREFERRED_STACK_BOUNDARY
1539 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
1540 / STACK_BYTES) * STACK_BYTES);
1541 #endif
1542
1543 args_size.constant = MAX (args_size.constant,
1544 reg_parm_stack_space);
1545
1546 #ifdef MAYBE_REG_PARM_STACK_SPACE
1547 if (reg_parm_stack_space == 0)
1548 args_size.constant = 0;
1549 #endif
1550
1551 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1552 args_size.constant -= reg_parm_stack_space;
1553 #endif
1554 }
1555
1556 /* See if we have or want to preallocate stack space.
1557
1558 If we would have to push a partially-in-regs parm
1559 before other stack parms, preallocate stack space instead.
1560
1561 If the size of some parm is not a multiple of the required stack
1562 alignment, we must preallocate.
1563
1564 If the total size of arguments that would otherwise create a copy in
1565 a temporary (such as a CALL) is more than half the total argument list
1566 size, preallocation is faster.
1567
1568 Another reason to preallocate is if we have a machine (like the m88k)
1569 where stack alignment is required to be maintained between every
1570 pair of insns, not just when the call is made. However, we assume here
1571 that such machines either do not have push insns (and hence preallocation
1572 would occur anyway) or the problem is taken care of with
1573 PUSH_ROUNDING. */
1574
1575 if (! must_preallocate)
1576 {
1577 int partial_seen = 0;
1578 int copy_to_evaluate_size = 0;
1579
1580 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1581 {
1582 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1583 partial_seen = 1;
1584 else if (partial_seen && args[i].reg == 0)
1585 must_preallocate = 1;
1586
1587 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1588 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1589 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1590 || TREE_CODE (args[i].tree_value) == COND_EXPR
1591 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1592 copy_to_evaluate_size
1593 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1594 }
1595
1596 if (copy_to_evaluate_size * 2 >= args_size.constant
1597 && args_size.constant > 0)
1598 must_preallocate = 1;
1599 }
1600
1601 /* If the structure value address will reference the stack pointer, we must
1602 stabilize it. We don't need to do this if we know that we are not going
1603 to adjust the stack pointer in processing this call. */
1604
1605 if (structure_value_addr
1606 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
1607 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
1608 && (args_size.var
1609 #ifndef ACCUMULATE_OUTGOING_ARGS
1610 || args_size.constant
1611 #endif
1612 ))
1613 structure_value_addr = copy_to_reg (structure_value_addr);
1614
1615 /* If this function call is cse'able, precompute all the parameters.
1616 Note that if the parameter is constructed into a temporary, this will
1617 cause an additional copy because the parameter will be constructed
1618 into a temporary location and then copied into the outgoing arguments.
1619 If a parameter contains a call to alloca and this function uses the
1620 stack, precompute the parameter. */
1621
1622 /* If we preallocated the stack space, and some arguments must be passed
1623 on the stack, then we must precompute any parameter which contains a
1624 function call which will store arguments on the stack.
1625 Otherwise, evaluating the parameter may clobber previous parameters
1626 which have already been stored into the stack. */
1627
1628 for (i = 0; i < num_actuals; i++)
1629 if (is_const
1630 || ((args_size.var != 0 || args_size.constant != 0)
1631 && calls_function (args[i].tree_value, 1))
1632 || (must_preallocate && (args_size.var != 0 || args_size.constant != 0)
1633 && calls_function (args[i].tree_value, 0)))
1634 {
1635 /* If this is an addressable type, we cannot pre-evaluate it. */
1636 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1637 abort ();
1638
1639 push_temp_slots ();
1640
1641 args[i].initial_value = args[i].value
1642 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1643
1644 preserve_temp_slots (args[i].value);
1645 pop_temp_slots ();
1646
1647 /* ANSI doesn't require a sequence point here,
1648 but PCC has one, so this will avoid some problems. */
1649 emit_queue ();
1650
1651 args[i].initial_value = args[i].value
1652 = protect_from_queue (args[i].initial_value, 0);
1653
1654 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
1655 args[i].value
1656 = convert_modes (args[i].mode,
1657 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1658 args[i].value, args[i].unsignedp);
1659 }
1660
1661 /* Now we are about to start emitting insns that can be deleted
1662 if a libcall is deleted. */
1663 if (is_const || is_malloc)
1664 start_sequence ();
1665
1666 /* If we have no actual push instructions, or shouldn't use them,
1667 make space for all args right now. */
1668
1669 if (args_size.var != 0)
1670 {
1671 if (old_stack_level == 0)
1672 {
1673 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1674 old_pending_adj = pending_stack_adjust;
1675 pending_stack_adjust = 0;
1676 #ifdef ACCUMULATE_OUTGOING_ARGS
1677 /* stack_arg_under_construction says whether a stack arg is
1678 being constructed at the old stack level. Pushing the stack
1679 gets a clean outgoing argument block. */
1680 old_stack_arg_under_construction = stack_arg_under_construction;
1681 stack_arg_under_construction = 0;
1682 #endif
1683 }
1684 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
1685 }
1686 else
1687 {
1688 /* Note that we must go through the motions of allocating an argument
1689 block even if the size is zero because we may be storing args
1690 in the area reserved for register arguments, which may be part of
1691 the stack frame. */
1692
1693 int needed = args_size.constant;
1694
1695 /* Store the maximum argument space used. It will be pushed by
1696 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
1697 checking). */
1698
1699 if (needed > current_function_outgoing_args_size)
1700 current_function_outgoing_args_size = needed;
1701
1702 if (must_preallocate)
1703 {
1704 #ifdef ACCUMULATE_OUTGOING_ARGS
1705 /* Since the stack pointer will never be pushed, it is possible for
1706 the evaluation of a parm to clobber something we have already
1707 written to the stack. Since most function calls on RISC machines
1708 do not use the stack, this is uncommon, but must work correctly.
1709
1710 Therefore, we save any area of the stack that was already written
1711 and that we are using. Here we set up to do this by making a new
1712 stack usage map from the old one. The actual save will be done
1713 by store_one_arg.
1714
1715 Another approach might be to try to reorder the argument
1716 evaluations to avoid this conflicting stack usage. */
1717
1718 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1719 /* Since we will be writing into the entire argument area, the
1720 map must be allocated for its entire size, not just the part that
1721 is the responsibility of the caller. */
1722 needed += reg_parm_stack_space;
1723 #endif
1724
1725 #ifdef ARGS_GROW_DOWNWARD
1726 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
1727 needed + 1);
1728 #else
1729 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
1730 needed);
1731 #endif
1732 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
1733
1734 if (initial_highest_arg_in_use)
1735 bcopy (initial_stack_usage_map, stack_usage_map,
1736 initial_highest_arg_in_use);
1737
1738 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
1739 bzero (&stack_usage_map[initial_highest_arg_in_use],
1740 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
1741 needed = 0;
1742
1743 /* The address of the outgoing argument list must not be copied to a
1744 register here, because argblock would be left pointing to the
1745 wrong place after the call to allocate_dynamic_stack_space below.
1746 */
1747
1748 argblock = virtual_outgoing_args_rtx;
1749
1750 #else /* not ACCUMULATE_OUTGOING_ARGS */
1751 if (inhibit_defer_pop == 0)
1752 {
1753 /* Try to reuse some or all of the pending_stack_adjust
1754 to get this space. Maybe we can avoid any pushing. */
1755 if (needed > pending_stack_adjust)
1756 {
1757 needed -= pending_stack_adjust;
1758 pending_stack_adjust = 0;
1759 }
1760 else
1761 {
1762 pending_stack_adjust -= needed;
1763 needed = 0;
1764 }
1765 }
1766 /* Special case this because overhead of `push_block' in this
1767 case is non-trivial. */
1768 if (needed == 0)
1769 argblock = virtual_outgoing_args_rtx;
1770 else
1771 argblock = push_block (GEN_INT (needed), 0, 0);
1772
1773 /* We only really need to call `copy_to_reg' in the case where push
1774 insns are going to be used to pass ARGBLOCK to a function
1775 call in ARGS. In that case, the stack pointer changes value
1776 from the allocation point to the call point, and hence
1777 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
1778 But might as well always do it. */
1779 argblock = copy_to_reg (argblock);
1780 #endif /* not ACCUMULATE_OUTGOING_ARGS */
1781 }
1782 }
1783
1784 #ifdef ACCUMULATE_OUTGOING_ARGS
1785 /* The save/restore code in store_one_arg handles all cases except one:
1786 a constructor call (including a C function returning a BLKmode struct)
1787 to initialize an argument. */
1788 if (stack_arg_under_construction)
1789 {
1790 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1791 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
1792 #else
1793 rtx push_size = GEN_INT (args_size.constant);
1794 #endif
1795 if (old_stack_level == 0)
1796 {
1797 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1798 old_pending_adj = pending_stack_adjust;
1799 pending_stack_adjust = 0;
1800 /* stack_arg_under_construction says whether a stack arg is
1801 being constructed at the old stack level. Pushing the stack
1802 gets a clean outgoing argument block. */
1803 old_stack_arg_under_construction = stack_arg_under_construction;
1804 stack_arg_under_construction = 0;
1805 /* Make a new map for the new argument list. */
1806 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
1807 bzero (stack_usage_map, highest_outgoing_arg_in_use);
1808 highest_outgoing_arg_in_use = 0;
1809 }
1810 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
1811 }
1812 /* If argument evaluation might modify the stack pointer, copy the
1813 address of the argument list to a register. */
1814 for (i = 0; i < num_actuals; i++)
1815 if (args[i].pass_on_stack)
1816 {
1817 argblock = copy_addr_to_reg (argblock);
1818 break;
1819 }
1820 #endif
1821
1822
1823 /* If we preallocated stack space, compute the address of each argument.
1824 We need not ensure it is a valid memory address here; it will be
1825 validized when it is used. */
1826 if (argblock)
1827 {
1828 rtx arg_reg = argblock;
1829 int arg_offset = 0;
1830
1831 if (GET_CODE (argblock) == PLUS)
1832 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1833
1834 for (i = 0; i < num_actuals; i++)
1835 {
1836 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1837 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1838 rtx addr;
1839
1840 /* Skip this parm if it will not be passed on the stack. */
1841 if (! args[i].pass_on_stack && args[i].reg != 0)
1842 continue;
1843
1844 if (GET_CODE (offset) == CONST_INT)
1845 addr = plus_constant (arg_reg, INTVAL (offset));
1846 else
1847 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1848
1849 addr = plus_constant (addr, arg_offset);
1850 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1851 MEM_IN_STRUCT_P (args[i].stack)
1852 = AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value));
1853
1854 if (GET_CODE (slot_offset) == CONST_INT)
1855 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1856 else
1857 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1858
1859 addr = plus_constant (addr, arg_offset);
1860 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1861 }
1862 }
1863
1864 #ifdef PUSH_ARGS_REVERSED
1865 #ifdef PREFERRED_STACK_BOUNDARY
1866 /* If we push args individually in reverse order, perform stack alignment
1867 before the first push (the last arg). */
1868 if (argblock == 0)
1869 anti_adjust_stack (GEN_INT (args_size.constant
1870 - original_args_size.constant));
1871 #endif
1872 #endif
1873
1874 /* Don't try to defer pops if preallocating, not even from the first arg,
1875 since ARGBLOCK probably refers to the SP. */
1876 if (argblock)
1877 NO_DEFER_POP;
1878
1879 /* Get the function to call, in the form of RTL. */
1880 if (fndecl)
1881 {
1882 /* If this is the first use of the function, see if we need to
1883 make an external definition for it. */
1884 if (! TREE_USED (fndecl))
1885 {
1886 assemble_external (fndecl);
1887 TREE_USED (fndecl) = 1;
1888 }
1889
1890 /* Get a SYMBOL_REF rtx for the function address. */
1891 funexp = XEXP (DECL_RTL (fndecl), 0);
1892 }
1893 else
1894 /* Generate an rtx (probably a pseudo-register) for the address. */
1895 {
1896 push_temp_slots ();
1897 funexp = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
1898 pop_temp_slots (); /* FUNEXP can't be BLKmode */
1899
1900 /* Check the function is executable. */
1901 if (current_function_check_memory_usage)
1902 emit_library_call (chkr_check_exec_libfunc, 1,
1903 VOIDmode, 1,
1904 funexp, ptr_mode);
1905 emit_queue ();
1906 }
1907
1908 /* Figure out the register where the value, if any, will come back. */
1909 valreg = 0;
1910 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
1911 && ! structure_value_addr)
1912 {
1913 if (pcc_struct_value)
1914 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
1915 fndecl);
1916 else
1917 valreg = hard_function_value (TREE_TYPE (exp), fndecl);
1918 }
1919
1920 /* Precompute all register parameters. It isn't safe to compute anything
1921 once we have started filling any specific hard regs. */
1922 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
1923
1924 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1925
1926 /* Save the fixed argument area if it's part of the caller's frame and
1927 is clobbered by argument setup for this call. */
1928 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
1929 &low_to_save, &high_to_save);
1930 #endif
1931
1932
1933 /* Now store (and compute if necessary) all non-register parms.
1934 These come before register parms, since they can require block-moves,
1935 which could clobber the registers used for register parms.
1936 Parms which have partial registers are not stored here,
1937 but we do preallocate space here if they want that. */
1938
1939 for (i = 0; i < num_actuals; i++)
1940 if (args[i].reg == 0 || args[i].pass_on_stack)
1941 store_one_arg (&args[i], argblock, may_be_alloca,
1942 args_size.var != 0, reg_parm_stack_space);
1943
1944 /* If we have a parm that is passed in registers but not in memory
1945 and whose alignment does not permit a direct copy into registers,
1946 make a group of pseudos that correspond to each register that we
1947 will later fill. */
1948 if (STRICT_ALIGNMENT)
1949 store_unaligned_arguments_into_pseudos (args, num_actuals);
1950
1951 /* Now store any partially-in-registers parm.
1952 This is the last place a block-move can happen. */
1953 if (reg_parm_seen)
1954 for (i = 0; i < num_actuals; i++)
1955 if (args[i].partial != 0 && ! args[i].pass_on_stack)
1956 store_one_arg (&args[i], argblock, may_be_alloca,
1957 args_size.var != 0, reg_parm_stack_space);
1958
1959 #ifndef PUSH_ARGS_REVERSED
1960 #ifdef PREFERRED_STACK_BOUNDARY
1961 /* If we pushed args in forward order, perform stack alignment
1962 after pushing the last arg. */
1963 if (argblock == 0)
1964 anti_adjust_stack (GEN_INT (args_size.constant
1965 - original_args_size.constant));
1966 #endif
1967 #endif
1968
1969 /* If register arguments require space on the stack and stack space
1970 was not preallocated, allocate stack space here for arguments
1971 passed in registers. */
1972 #if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
1973 if (must_preallocate == 0 && reg_parm_stack_space > 0)
1974 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
1975 #endif
1976
1977 /* Pass the function the address in which to return a structure value. */
1978 if (structure_value_addr && ! structure_value_addr_parm)
1979 {
1980 emit_move_insn (struct_value_rtx,
1981 force_reg (Pmode,
1982 force_operand (structure_value_addr,
1983 NULL_RTX)));
1984
1985 /* Mark the memory for the aggregate as write-only. */
1986 if (current_function_check_memory_usage)
1987 emit_library_call (chkr_set_right_libfunc, 1,
1988 VOIDmode, 3,
1989 structure_value_addr, ptr_mode,
1990 GEN_INT (struct_value_size), TYPE_MODE (sizetype),
1991 GEN_INT (MEMORY_USE_WO),
1992 TYPE_MODE (integer_type_node));
1993
1994 if (GET_CODE (struct_value_rtx) == REG)
1995 use_reg (&call_fusage, struct_value_rtx);
1996 }
1997
1998 funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen);
1999
2000 /* Now do the register loads required for any wholly-register parms or any
2001 parms which are passed both on the stack and in a register. Their
2002 expressions were already evaluated.
2003
2004 Mark all register-parms as living through the call, putting these USE
2005 insns in the CALL_INSN_FUNCTION_USAGE field. */
2006
2007 #ifdef LOAD_ARGS_REVERSED
2008 for (i = num_actuals - 1; i >= 0; i--)
2009 #else
2010 for (i = 0; i < num_actuals; i++)
2011 #endif
2012 {
2013 rtx reg = args[i].reg;
2014 int partial = args[i].partial;
2015 int nregs;
2016
2017 if (reg)
2018 {
2019 /* Set to non-negative if must move a word at a time, even if just
2020 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
2021 we just use a normal move insn. This value can be zero if the
2022 argument is a zero size structure with no fields. */
2023 nregs = (partial ? partial
2024 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
2025 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
2026 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
2027 : -1));
2028
2029 /* Handle calls that pass values in multiple non-contiguous
2030 locations. The Irix 6 ABI has examples of this. */
2031
2032 if (GET_CODE (reg) == PARALLEL)
2033 {
2034 emit_group_load (reg, args[i].value,
2035 int_size_in_bytes (TREE_TYPE (args[i].tree_value)),
2036 (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
2037 / BITS_PER_UNIT));
2038 }
2039
2040 /* If simple case, just do move. If normal partial, store_one_arg
2041 has already loaded the register for us. In all other cases,
2042 load the register(s) from memory. */
2043
2044 else if (nregs == -1)
2045 emit_move_insn (reg, args[i].value);
2046
2047 /* If we have pre-computed the values to put in the registers in
2048 the case of non-aligned structures, copy them in now. */
2049
2050 else if (args[i].n_aligned_regs != 0)
2051 for (j = 0; j < args[i].n_aligned_regs; j++)
2052 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
2053 args[i].aligned_regs[j]);
2054
2055 else if (partial == 0 || args[i].pass_on_stack)
2056 move_block_to_reg (REGNO (reg),
2057 validize_mem (args[i].value), nregs,
2058 args[i].mode);
2059
2060 /* Handle calls that pass values in multiple non-contiguous
2061 locations. The Irix 6 ABI has examples of this. */
2062 if (GET_CODE (reg) == PARALLEL)
2063 use_group_regs (&call_fusage, reg);
2064 else if (nregs == -1)
2065 use_reg (&call_fusage, reg);
2066 else
2067 use_regs (&call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
2068 }
2069 }
2070
2071 /* Perform postincrements before actually calling the function. */
2072 emit_queue ();
2073
2074 /* All arguments and registers used for the call must be set up by now! */
2075
2076 /* Generate the actual call instruction. */
2077 emit_call_1 (funexp, fndecl, funtype, args_size.constant, struct_value_size,
2078 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2079 valreg, old_inhibit_defer_pop, call_fusage, is_const);
2080
2081 /* If call is cse'able, make appropriate pair of reg-notes around it.
2082 Test valreg so we don't crash; may safely ignore `const'
2083 if return type is void. Disable for PARALLEL return values, because
2084 we have no way to move such values into a pseudo register. */
2085 if (is_const && valreg != 0 && GET_CODE (valreg) != PARALLEL)
2086 {
2087 rtx note = 0;
2088 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2089 rtx insns;
2090
2091 /* Mark the return value as a pointer if needed. */
2092 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2093 {
2094 tree pointed_to = TREE_TYPE (TREE_TYPE (exp));
2095 mark_reg_pointer (temp, TYPE_ALIGN (pointed_to) / BITS_PER_UNIT);
2096 }
2097
2098 /* Construct an "equal form" for the value which mentions all the
2099 arguments in order as well as the function name. */
2100 #ifdef PUSH_ARGS_REVERSED
2101 for (i = 0; i < num_actuals; i++)
2102 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
2103 #else
2104 for (i = num_actuals - 1; i >= 0; i--)
2105 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
2106 #endif
2107 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2108
2109 insns = get_insns ();
2110 end_sequence ();
2111
2112 emit_libcall_block (insns, temp, valreg, note);
2113
2114 valreg = temp;
2115 }
2116 else if (is_const)
2117 {
2118 /* Otherwise, just write out the sequence without a note. */
2119 rtx insns = get_insns ();
2120
2121 end_sequence ();
2122 emit_insns (insns);
2123 }
2124 else if (is_malloc)
2125 {
2126 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2127 rtx last, insns;
2128
2129 /* The return value from a malloc-like function is a pointer. */
2130 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2131 mark_reg_pointer (temp, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2132
2133 emit_move_insn (temp, valreg);
2134
2135 /* The return value from a malloc-like function can not alias
2136 anything else. */
2137 last = get_last_insn ();
2138 REG_NOTES (last) =
2139 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2140
2141 /* Write out the sequence. */
2142 insns = get_insns ();
2143 end_sequence ();
2144 emit_insns (insns);
2145 valreg = temp;
2146 }
2147
2148 /* For calls to `setjmp', etc., inform flow.c it should complain
2149 if nonvolatile values are live. */
2150
2151 if (returns_twice)
2152 {
2153 emit_note (name, NOTE_INSN_SETJMP);
2154 current_function_calls_setjmp = 1;
2155 }
2156
2157 if (is_longjmp)
2158 current_function_calls_longjmp = 1;
2159
2160 /* Notice functions that cannot return.
2161 If optimizing, insns emitted below will be dead.
2162 If not optimizing, they will exist, which is useful
2163 if the user uses the `return' command in the debugger. */
2164
2165 if (is_volatile || is_longjmp)
2166 emit_barrier ();
2167
2168 /* If value type not void, return an rtx for the value. */
2169
2170 /* If there are cleanups to be called, don't use a hard reg as target.
2171 We need to double check this and see if it matters anymore. */
2172 if (any_pending_cleanups (1)
2173 && target && REG_P (target)
2174 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2175 target = 0;
2176
2177 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2178 || ignore)
2179 {
2180 target = const0_rtx;
2181 }
2182 else if (structure_value_addr)
2183 {
2184 if (target == 0 || GET_CODE (target) != MEM)
2185 {
2186 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2187 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2188 structure_value_addr));
2189 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
2190 }
2191 }
2192 else if (pcc_struct_value)
2193 {
2194 /* This is the special C++ case where we need to
2195 know what the true target was. We take care to
2196 never use this value more than once in one expression. */
2197 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2198 copy_to_reg (valreg));
2199 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
2200 }
2201 /* Handle calls that return values in multiple non-contiguous locations.
2202 The Irix 6 ABI has examples of this. */
2203 else if (GET_CODE (valreg) == PARALLEL)
2204 {
2205 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2206
2207 if (target == 0)
2208 {
2209 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)), bytes, 0);
2210 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
2211 preserve_temp_slots (target);
2212 }
2213
2214 emit_group_store (target, valreg, bytes,
2215 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2216 }
2217 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2218 && GET_MODE (target) == GET_MODE (valreg))
2219 /* TARGET and VALREG cannot be equal at this point because the latter
2220 would not have REG_FUNCTION_VALUE_P true, while the former would if
2221 it were referring to the same register.
2222
2223 If they refer to the same register, this move will be a no-op, except
2224 when function inlining is being done. */
2225 emit_move_insn (target, valreg);
2226 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2227 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
2228 else
2229 target = copy_to_reg (valreg);
2230
2231 #ifdef PROMOTE_FUNCTION_RETURN
2232 /* If we promoted this return value, make the proper SUBREG. TARGET
2233 might be const0_rtx here, so be careful. */
2234 if (GET_CODE (target) == REG
2235 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2236 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2237 {
2238 tree type = TREE_TYPE (exp);
2239 int unsignedp = TREE_UNSIGNED (type);
2240
2241 /* If we don't promote as expected, something is wrong. */
2242 if (GET_MODE (target)
2243 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
2244 abort ();
2245
2246 target = gen_rtx_SUBREG (TYPE_MODE (type), target, 0);
2247 SUBREG_PROMOTED_VAR_P (target) = 1;
2248 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
2249 }
2250 #endif
2251
2252 /* If size of args is variable or this was a constructor call for a stack
2253 argument, restore saved stack-pointer value. */
2254
2255 if (old_stack_level)
2256 {
2257 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2258 pending_stack_adjust = old_pending_adj;
2259 #ifdef ACCUMULATE_OUTGOING_ARGS
2260 stack_arg_under_construction = old_stack_arg_under_construction;
2261 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2262 stack_usage_map = initial_stack_usage_map;
2263 #endif
2264 }
2265 #ifdef ACCUMULATE_OUTGOING_ARGS
2266 else
2267 {
2268 #ifdef REG_PARM_STACK_SPACE
2269 if (save_area)
2270 restore_fixed_argument_area (save_area, argblock,
2271 high_to_save, low_to_save);
2272 #endif
2273
2274 /* If we saved any argument areas, restore them. */
2275 for (i = 0; i < num_actuals; i++)
2276 if (args[i].save_area)
2277 {
2278 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2279 rtx stack_area
2280 = gen_rtx_MEM (save_mode,
2281 memory_address (save_mode,
2282 XEXP (args[i].stack_slot, 0)));
2283
2284 if (save_mode != BLKmode)
2285 emit_move_insn (stack_area, args[i].save_area);
2286 else
2287 emit_block_move (stack_area, validize_mem (args[i].save_area),
2288 GEN_INT (args[i].size.constant),
2289 PARM_BOUNDARY / BITS_PER_UNIT);
2290 }
2291
2292 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2293 stack_usage_map = initial_stack_usage_map;
2294 }
2295 #endif
2296
2297 /* If this was alloca, record the new stack level for nonlocal gotos.
2298 Check for the handler slots since we might not have a save area
2299 for non-local gotos. */
2300
2301 if (may_be_alloca && nonlocal_goto_handler_slots != 0)
2302 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
2303
2304 pop_temp_slots ();
2305
2306 /* Free up storage we no longer need. */
2307 for (i = 0; i < num_actuals; ++i)
2308 if (args[i].aligned_regs)
2309 free (args[i].aligned_regs);
2310
2311 return target;
2312 }
2313 \f
2314 /* Output a library call to function FUN (a SYMBOL_REF rtx)
2315 (emitting the queue unless NO_QUEUE is nonzero),
2316 for a value of mode OUTMODE,
2317 with NARGS different arguments, passed as alternating rtx values
2318 and machine_modes to convert them to.
2319 The rtx values should have been passed through protect_from_queue already.
2320
2321 NO_QUEUE will be true if and only if the library call is a `const' call
2322 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2323 to the variable is_const in expand_call.
2324
2325 NO_QUEUE must be true for const calls, because if it isn't, then
2326 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2327 and will be lost if the libcall sequence is optimized away.
2328
2329 NO_QUEUE must be false for non-const calls, because if it isn't, the
2330 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2331 optimized. For instance, the instruction scheduler may incorrectly
2332 move memory references across the non-const call. */
2333
2334 void
2335 emit_library_call VPROTO((rtx orgfun, int no_queue, enum machine_mode outmode,
2336 int nargs, ...))
2337 {
2338 #ifndef ANSI_PROTOTYPES
2339 rtx orgfun;
2340 int no_queue;
2341 enum machine_mode outmode;
2342 int nargs;
2343 #endif
2344 va_list p;
2345 /* Total size in bytes of all the stack-parms scanned so far. */
2346 struct args_size args_size;
2347 /* Size of arguments before any adjustments (such as rounding). */
2348 struct args_size original_args_size;
2349 register int argnum;
2350 rtx fun;
2351 int inc;
2352 int count;
2353 rtx argblock = 0;
2354 CUMULATIVE_ARGS args_so_far;
2355 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2356 struct args_size offset; struct args_size size; rtx save_area; };
2357 struct arg *argvec;
2358 int old_inhibit_defer_pop = inhibit_defer_pop;
2359 rtx call_fusage = 0;
2360 int reg_parm_stack_space = 0;
2361 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2362 /* Define the boundary of the register parm stack space that needs to be
2363 save, if any. */
2364 int low_to_save = -1, high_to_save;
2365 rtx save_area = 0; /* Place that it is saved */
2366 #endif
2367
2368 #ifdef ACCUMULATE_OUTGOING_ARGS
2369 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2370 char *initial_stack_usage_map = stack_usage_map;
2371 int needed;
2372 #endif
2373
2374 #ifdef REG_PARM_STACK_SPACE
2375 /* Size of the stack reserved for parameter registers. */
2376 #ifdef MAYBE_REG_PARM_STACK_SPACE
2377 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2378 #else
2379 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2380 #endif
2381 #endif
2382
2383 VA_START (p, nargs);
2384
2385 #ifndef ANSI_PROTOTYPES
2386 orgfun = va_arg (p, rtx);
2387 no_queue = va_arg (p, int);
2388 outmode = va_arg (p, enum machine_mode);
2389 nargs = va_arg (p, int);
2390 #endif
2391
2392 fun = orgfun;
2393
2394 /* Copy all the libcall-arguments out of the varargs data
2395 and into a vector ARGVEC.
2396
2397 Compute how to pass each argument. We only support a very small subset
2398 of the full argument passing conventions to limit complexity here since
2399 library functions shouldn't have many args. */
2400
2401 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2402 bzero ((char *) argvec, nargs * sizeof (struct arg));
2403
2404
2405 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
2406
2407 args_size.constant = 0;
2408 args_size.var = 0;
2409
2410 push_temp_slots ();
2411
2412 for (count = 0; count < nargs; count++)
2413 {
2414 rtx val = va_arg (p, rtx);
2415 enum machine_mode mode = va_arg (p, enum machine_mode);
2416
2417 /* We cannot convert the arg value to the mode the library wants here;
2418 must do it earlier where we know the signedness of the arg. */
2419 if (mode == BLKmode
2420 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2421 abort ();
2422
2423 /* On some machines, there's no way to pass a float to a library fcn.
2424 Pass it as a double instead. */
2425 #ifdef LIBGCC_NEEDS_DOUBLE
2426 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2427 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
2428 #endif
2429
2430 /* There's no need to call protect_from_queue, because
2431 either emit_move_insn or emit_push_insn will do that. */
2432
2433 /* Make sure it is a reasonable operand for a move or push insn. */
2434 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2435 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2436 val = force_operand (val, NULL_RTX);
2437
2438 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2439 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2440 {
2441 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2442 be viewed as just an efficiency improvement. */
2443 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2444 emit_move_insn (slot, val);
2445 val = force_operand (XEXP (slot, 0), NULL_RTX);
2446 mode = Pmode;
2447 }
2448 #endif
2449
2450 argvec[count].value = val;
2451 argvec[count].mode = mode;
2452
2453 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2454 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
2455 abort ();
2456 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2457 argvec[count].partial
2458 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2459 #else
2460 argvec[count].partial = 0;
2461 #endif
2462
2463 locate_and_pad_parm (mode, NULL_TREE,
2464 argvec[count].reg && argvec[count].partial == 0,
2465 NULL_TREE, &args_size, &argvec[count].offset,
2466 &argvec[count].size);
2467
2468 if (argvec[count].size.var)
2469 abort ();
2470
2471 if (reg_parm_stack_space == 0 && argvec[count].partial)
2472 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2473
2474 if (argvec[count].reg == 0 || argvec[count].partial != 0
2475 || reg_parm_stack_space > 0)
2476 args_size.constant += argvec[count].size.constant;
2477
2478 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
2479 }
2480 va_end (p);
2481
2482 #ifdef FINAL_REG_PARM_STACK_SPACE
2483 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2484 args_size.var);
2485 #endif
2486
2487 /* If this machine requires an external definition for library
2488 functions, write one out. */
2489 assemble_external_libcall (fun);
2490
2491 original_args_size = args_size;
2492 #ifdef PREFERRED_STACK_BOUNDARY
2493 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2494 / STACK_BYTES) * STACK_BYTES);
2495 #endif
2496
2497 args_size.constant = MAX (args_size.constant,
2498 reg_parm_stack_space);
2499
2500 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2501 args_size.constant -= reg_parm_stack_space;
2502 #endif
2503
2504 if (args_size.constant > current_function_outgoing_args_size)
2505 current_function_outgoing_args_size = args_size.constant;
2506
2507 #ifdef ACCUMULATE_OUTGOING_ARGS
2508 /* Since the stack pointer will never be pushed, it is possible for
2509 the evaluation of a parm to clobber something we have already
2510 written to the stack. Since most function calls on RISC machines
2511 do not use the stack, this is uncommon, but must work correctly.
2512
2513 Therefore, we save any area of the stack that was already written
2514 and that we are using. Here we set up to do this by making a new
2515 stack usage map from the old one.
2516
2517 Another approach might be to try to reorder the argument
2518 evaluations to avoid this conflicting stack usage. */
2519
2520 needed = args_size.constant;
2521
2522 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2523 /* Since we will be writing into the entire argument area, the
2524 map must be allocated for its entire size, not just the part that
2525 is the responsibility of the caller. */
2526 needed += reg_parm_stack_space;
2527 #endif
2528
2529 #ifdef ARGS_GROW_DOWNWARD
2530 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2531 needed + 1);
2532 #else
2533 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2534 needed);
2535 #endif
2536 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2537
2538 if (initial_highest_arg_in_use)
2539 bcopy (initial_stack_usage_map, stack_usage_map,
2540 initial_highest_arg_in_use);
2541
2542 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2543 bzero (&stack_usage_map[initial_highest_arg_in_use],
2544 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2545 needed = 0;
2546
2547 /* The address of the outgoing argument list must not be copied to a
2548 register here, because argblock would be left pointing to the
2549 wrong place after the call to allocate_dynamic_stack_space below.
2550 */
2551
2552 argblock = virtual_outgoing_args_rtx;
2553 #else /* not ACCUMULATE_OUTGOING_ARGS */
2554 #ifndef PUSH_ROUNDING
2555 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2556 #endif
2557 #endif
2558
2559 #ifdef PUSH_ARGS_REVERSED
2560 #ifdef PREFERRED_STACK_BOUNDARY
2561 /* If we push args individually in reverse order, perform stack alignment
2562 before the first push (the last arg). */
2563 if (argblock == 0)
2564 anti_adjust_stack (GEN_INT (args_size.constant
2565 - original_args_size.constant));
2566 #endif
2567 #endif
2568
2569 #ifdef PUSH_ARGS_REVERSED
2570 inc = -1;
2571 argnum = nargs - 1;
2572 #else
2573 inc = 1;
2574 argnum = 0;
2575 #endif
2576
2577 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2578 /* The argument list is the property of the called routine and it
2579 may clobber it. If the fixed area has been used for previous
2580 parameters, we must save and restore it.
2581
2582 Here we compute the boundary of the that needs to be saved, if any. */
2583
2584 #ifdef ARGS_GROW_DOWNWARD
2585 for (count = 0; count < reg_parm_stack_space + 1; count++)
2586 #else
2587 for (count = 0; count < reg_parm_stack_space; count++)
2588 #endif
2589 {
2590 if (count >= highest_outgoing_arg_in_use
2591 || stack_usage_map[count] == 0)
2592 continue;
2593
2594 if (low_to_save == -1)
2595 low_to_save = count;
2596
2597 high_to_save = count;
2598 }
2599
2600 if (low_to_save >= 0)
2601 {
2602 int num_to_save = high_to_save - low_to_save + 1;
2603 enum machine_mode save_mode
2604 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
2605 rtx stack_area;
2606
2607 /* If we don't have the required alignment, must do this in BLKmode. */
2608 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
2609 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
2610 save_mode = BLKmode;
2611
2612 #ifdef ARGS_GROW_DOWNWARD
2613 stack_area = gen_rtx_MEM (save_mode,
2614 memory_address (save_mode,
2615 plus_constant (argblock,
2616 - high_to_save)));
2617 #else
2618 stack_area = gen_rtx_MEM (save_mode,
2619 memory_address (save_mode,
2620 plus_constant (argblock,
2621 low_to_save)));
2622 #endif
2623 if (save_mode == BLKmode)
2624 {
2625 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
2626 MEM_IN_STRUCT_P (save_area) = 0;
2627 emit_block_move (validize_mem (save_area), stack_area,
2628 GEN_INT (num_to_save),
2629 PARM_BOUNDARY / BITS_PER_UNIT);
2630 }
2631 else
2632 {
2633 save_area = gen_reg_rtx (save_mode);
2634 emit_move_insn (save_area, stack_area);
2635 }
2636 }
2637 #endif
2638
2639 /* Push the args that need to be pushed. */
2640
2641 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2642 are to be pushed. */
2643 for (count = 0; count < nargs; count++, argnum += inc)
2644 {
2645 register enum machine_mode mode = argvec[argnum].mode;
2646 register rtx val = argvec[argnum].value;
2647 rtx reg = argvec[argnum].reg;
2648 int partial = argvec[argnum].partial;
2649 #ifdef ACCUMULATE_OUTGOING_ARGS
2650 int lower_bound, upper_bound, i;
2651 #endif
2652
2653 if (! (reg != 0 && partial == 0))
2654 {
2655 #ifdef ACCUMULATE_OUTGOING_ARGS
2656 /* If this is being stored into a pre-allocated, fixed-size, stack
2657 area, save any previous data at that location. */
2658
2659 #ifdef ARGS_GROW_DOWNWARD
2660 /* stack_slot is negative, but we want to index stack_usage_map
2661 with positive values. */
2662 upper_bound = -argvec[argnum].offset.constant + 1;
2663 lower_bound = upper_bound - argvec[argnum].size.constant;
2664 #else
2665 lower_bound = argvec[argnum].offset.constant;
2666 upper_bound = lower_bound + argvec[argnum].size.constant;
2667 #endif
2668
2669 for (i = lower_bound; i < upper_bound; i++)
2670 if (stack_usage_map[i]
2671 /* Don't store things in the fixed argument area at this point;
2672 it has already been saved. */
2673 && i > reg_parm_stack_space)
2674 break;
2675
2676 if (i != upper_bound)
2677 {
2678 /* We need to make a save area. See what mode we can make it. */
2679 enum machine_mode save_mode
2680 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
2681 MODE_INT, 1);
2682 rtx stack_area
2683 = gen_rtx_MEM (save_mode,
2684 memory_address (save_mode,
2685 plus_constant (argblock, argvec[argnum].offset.constant)));
2686 argvec[argnum].save_area = gen_reg_rtx (save_mode);
2687 emit_move_insn (argvec[argnum].save_area, stack_area);
2688 }
2689 #endif
2690 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2691 argblock, GEN_INT (argvec[argnum].offset.constant),
2692 reg_parm_stack_space);
2693
2694 #ifdef ACCUMULATE_OUTGOING_ARGS
2695 /* Now mark the segment we just used. */
2696 for (i = lower_bound; i < upper_bound; i++)
2697 stack_usage_map[i] = 1;
2698 #endif
2699
2700 NO_DEFER_POP;
2701 }
2702 }
2703
2704 #ifndef PUSH_ARGS_REVERSED
2705 #ifdef PREFERRED_STACK_BOUNDARY
2706 /* If we pushed args in forward order, perform stack alignment
2707 after pushing the last arg. */
2708 if (argblock == 0)
2709 anti_adjust_stack (GEN_INT (args_size.constant
2710 - original_args_size.constant));
2711 #endif
2712 #endif
2713
2714 #ifdef PUSH_ARGS_REVERSED
2715 argnum = nargs - 1;
2716 #else
2717 argnum = 0;
2718 #endif
2719
2720 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
2721
2722 /* Now load any reg parms into their regs. */
2723
2724 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2725 are to be pushed. */
2726 for (count = 0; count < nargs; count++, argnum += inc)
2727 {
2728 register rtx val = argvec[argnum].value;
2729 rtx reg = argvec[argnum].reg;
2730 int partial = argvec[argnum].partial;
2731
2732 if (reg != 0 && partial == 0)
2733 emit_move_insn (reg, val);
2734 NO_DEFER_POP;
2735 }
2736
2737 /* For version 1.37, try deleting this entirely. */
2738 if (! no_queue)
2739 emit_queue ();
2740
2741 /* Any regs containing parms remain in use through the call. */
2742 for (count = 0; count < nargs; count++)
2743 if (argvec[count].reg != 0)
2744 use_reg (&call_fusage, argvec[count].reg);
2745
2746 /* Don't allow popping to be deferred, since then
2747 cse'ing of library calls could delete a call and leave the pop. */
2748 NO_DEFER_POP;
2749
2750 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2751 will set inhibit_defer_pop to that value. */
2752
2753 /* The return type is needed to decide how many bytes the function pops.
2754 Signedness plays no role in that, so for simplicity, we pretend it's
2755 always signed. We also assume that the list of arguments passed has
2756 no impact, so we pretend it is unknown. */
2757
2758 emit_call_1 (fun,
2759 get_identifier (XSTR (orgfun, 0)),
2760 build_function_type (outmode == VOIDmode ? void_type_node
2761 : type_for_mode (outmode, 0), NULL_TREE),
2762 args_size.constant, 0,
2763 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2764 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2765 old_inhibit_defer_pop + 1, call_fusage, no_queue);
2766
2767 pop_temp_slots ();
2768
2769 /* Now restore inhibit_defer_pop to its actual original value. */
2770 OK_DEFER_POP;
2771
2772 #ifdef ACCUMULATE_OUTGOING_ARGS
2773 #ifdef REG_PARM_STACK_SPACE
2774 if (save_area)
2775 {
2776 enum machine_mode save_mode = GET_MODE (save_area);
2777 #ifdef ARGS_GROW_DOWNWARD
2778 rtx stack_area
2779 = gen_rtx_MEM (save_mode,
2780 memory_address (save_mode,
2781 plus_constant (argblock,
2782 - high_to_save)));
2783 #else
2784 rtx stack_area
2785 = gen_rtx_MEM (save_mode,
2786 memory_address (save_mode,
2787 plus_constant (argblock, low_to_save)));
2788 #endif
2789
2790 if (save_mode != BLKmode)
2791 emit_move_insn (stack_area, save_area);
2792 else
2793 emit_block_move (stack_area, validize_mem (save_area),
2794 GEN_INT (high_to_save - low_to_save + 1),
2795 PARM_BOUNDARY / BITS_PER_UNIT);
2796 }
2797 #endif
2798
2799 /* If we saved any argument areas, restore them. */
2800 for (count = 0; count < nargs; count++)
2801 if (argvec[count].save_area)
2802 {
2803 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
2804 rtx stack_area
2805 = gen_rtx_MEM (save_mode,
2806 memory_address (save_mode,
2807 plus_constant (argblock, argvec[count].offset.constant)));
2808
2809 emit_move_insn (stack_area, argvec[count].save_area);
2810 }
2811
2812 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2813 stack_usage_map = initial_stack_usage_map;
2814 #endif
2815 }
2816 \f
2817 /* Like emit_library_call except that an extra argument, VALUE,
2818 comes second and says where to store the result.
2819 (If VALUE is zero, this function chooses a convenient way
2820 to return the value.
2821
2822 This function returns an rtx for where the value is to be found.
2823 If VALUE is nonzero, VALUE is returned. */
2824
2825 rtx
2826 emit_library_call_value VPROTO((rtx orgfun, rtx value, int no_queue,
2827 enum machine_mode outmode, int nargs, ...))
2828 {
2829 #ifndef ANSI_PROTOTYPES
2830 rtx orgfun;
2831 rtx value;
2832 int no_queue;
2833 enum machine_mode outmode;
2834 int nargs;
2835 #endif
2836 va_list p;
2837 /* Total size in bytes of all the stack-parms scanned so far. */
2838 struct args_size args_size;
2839 /* Size of arguments before any adjustments (such as rounding). */
2840 struct args_size original_args_size;
2841 register int argnum;
2842 rtx fun;
2843 int inc;
2844 int count;
2845 rtx argblock = 0;
2846 CUMULATIVE_ARGS args_so_far;
2847 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2848 struct args_size offset; struct args_size size; rtx save_area; };
2849 struct arg *argvec;
2850 int old_inhibit_defer_pop = inhibit_defer_pop;
2851 rtx call_fusage = 0;
2852 rtx mem_value = 0;
2853 int pcc_struct_value = 0;
2854 int struct_value_size = 0;
2855 int is_const;
2856 int reg_parm_stack_space = 0;
2857 #ifdef ACCUMULATE_OUTGOING_ARGS
2858 int needed;
2859 #endif
2860
2861 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2862 /* Define the boundary of the register parm stack space that needs to be
2863 save, if any. */
2864 int low_to_save = -1, high_to_save;
2865 rtx save_area = 0; /* Place that it is saved */
2866 #endif
2867
2868 #ifdef ACCUMULATE_OUTGOING_ARGS
2869 /* Size of the stack reserved for parameter registers. */
2870 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2871 char *initial_stack_usage_map = stack_usage_map;
2872 #endif
2873
2874 #ifdef REG_PARM_STACK_SPACE
2875 #ifdef MAYBE_REG_PARM_STACK_SPACE
2876 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2877 #else
2878 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2879 #endif
2880 #endif
2881
2882 VA_START (p, nargs);
2883
2884 #ifndef ANSI_PROTOTYPES
2885 orgfun = va_arg (p, rtx);
2886 value = va_arg (p, rtx);
2887 no_queue = va_arg (p, int);
2888 outmode = va_arg (p, enum machine_mode);
2889 nargs = va_arg (p, int);
2890 #endif
2891
2892 is_const = no_queue;
2893 fun = orgfun;
2894
2895 /* If this kind of value comes back in memory,
2896 decide where in memory it should come back. */
2897 if (aggregate_value_p (type_for_mode (outmode, 0)))
2898 {
2899 #ifdef PCC_STATIC_STRUCT_RETURN
2900 rtx pointer_reg
2901 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
2902 0);
2903 mem_value = gen_rtx_MEM (outmode, pointer_reg);
2904 pcc_struct_value = 1;
2905 if (value == 0)
2906 value = gen_reg_rtx (outmode);
2907 #else /* not PCC_STATIC_STRUCT_RETURN */
2908 struct_value_size = GET_MODE_SIZE (outmode);
2909 if (value != 0 && GET_CODE (value) == MEM)
2910 mem_value = value;
2911 else
2912 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
2913 #endif
2914
2915 /* This call returns a big structure. */
2916 is_const = 0;
2917 }
2918
2919 /* ??? Unfinished: must pass the memory address as an argument. */
2920
2921 /* Copy all the libcall-arguments out of the varargs data
2922 and into a vector ARGVEC.
2923
2924 Compute how to pass each argument. We only support a very small subset
2925 of the full argument passing conventions to limit complexity here since
2926 library functions shouldn't have many args. */
2927
2928 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
2929 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
2930
2931 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
2932
2933 args_size.constant = 0;
2934 args_size.var = 0;
2935
2936 count = 0;
2937
2938 push_temp_slots ();
2939
2940 /* If there's a structure value address to be passed,
2941 either pass it in the special place, or pass it as an extra argument. */
2942 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
2943 {
2944 rtx addr = XEXP (mem_value, 0);
2945 nargs++;
2946
2947 /* Make sure it is a reasonable operand for a move or push insn. */
2948 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
2949 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
2950 addr = force_operand (addr, NULL_RTX);
2951
2952 argvec[count].value = addr;
2953 argvec[count].mode = Pmode;
2954 argvec[count].partial = 0;
2955
2956 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
2957 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2958 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
2959 abort ();
2960 #endif
2961
2962 locate_and_pad_parm (Pmode, NULL_TREE,
2963 argvec[count].reg && argvec[count].partial == 0,
2964 NULL_TREE, &args_size, &argvec[count].offset,
2965 &argvec[count].size);
2966
2967
2968 if (argvec[count].reg == 0 || argvec[count].partial != 0
2969 || reg_parm_stack_space > 0)
2970 args_size.constant += argvec[count].size.constant;
2971
2972 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
2973
2974 count++;
2975 }
2976
2977 for (; count < nargs; count++)
2978 {
2979 rtx val = va_arg (p, rtx);
2980 enum machine_mode mode = va_arg (p, enum machine_mode);
2981
2982 /* We cannot convert the arg value to the mode the library wants here;
2983 must do it earlier where we know the signedness of the arg. */
2984 if (mode == BLKmode
2985 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2986 abort ();
2987
2988 /* On some machines, there's no way to pass a float to a library fcn.
2989 Pass it as a double instead. */
2990 #ifdef LIBGCC_NEEDS_DOUBLE
2991 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2992 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
2993 #endif
2994
2995 /* There's no need to call protect_from_queue, because
2996 either emit_move_insn or emit_push_insn will do that. */
2997
2998 /* Make sure it is a reasonable operand for a move or push insn. */
2999 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3000 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3001 val = force_operand (val, NULL_RTX);
3002
3003 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3004 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3005 {
3006 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
3007 be viewed as just an efficiency improvement. */
3008 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3009 emit_move_insn (slot, val);
3010 val = XEXP (slot, 0);
3011 mode = Pmode;
3012 }
3013 #endif
3014
3015 argvec[count].value = val;
3016 argvec[count].mode = mode;
3017
3018 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3019 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
3020 abort ();
3021 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3022 argvec[count].partial
3023 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3024 #else
3025 argvec[count].partial = 0;
3026 #endif
3027
3028 locate_and_pad_parm (mode, NULL_TREE,
3029 argvec[count].reg && argvec[count].partial == 0,
3030 NULL_TREE, &args_size, &argvec[count].offset,
3031 &argvec[count].size);
3032
3033 if (argvec[count].size.var)
3034 abort ();
3035
3036 if (reg_parm_stack_space == 0 && argvec[count].partial)
3037 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
3038
3039 if (argvec[count].reg == 0 || argvec[count].partial != 0
3040 || reg_parm_stack_space > 0)
3041 args_size.constant += argvec[count].size.constant;
3042
3043 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3044 }
3045 va_end (p);
3046
3047 #ifdef FINAL_REG_PARM_STACK_SPACE
3048 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3049 args_size.var);
3050 #endif
3051 /* If this machine requires an external definition for library
3052 functions, write one out. */
3053 assemble_external_libcall (fun);
3054
3055 original_args_size = args_size;
3056 #ifdef PREFERRED_STACK_BOUNDARY
3057 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
3058 / STACK_BYTES) * STACK_BYTES);
3059 #endif
3060
3061 args_size.constant = MAX (args_size.constant,
3062 reg_parm_stack_space);
3063
3064 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3065 args_size.constant -= reg_parm_stack_space;
3066 #endif
3067
3068 if (args_size.constant > current_function_outgoing_args_size)
3069 current_function_outgoing_args_size = args_size.constant;
3070
3071 #ifdef ACCUMULATE_OUTGOING_ARGS
3072 /* Since the stack pointer will never be pushed, it is possible for
3073 the evaluation of a parm to clobber something we have already
3074 written to the stack. Since most function calls on RISC machines
3075 do not use the stack, this is uncommon, but must work correctly.
3076
3077 Therefore, we save any area of the stack that was already written
3078 and that we are using. Here we set up to do this by making a new
3079 stack usage map from the old one.
3080
3081 Another approach might be to try to reorder the argument
3082 evaluations to avoid this conflicting stack usage. */
3083
3084 needed = args_size.constant;
3085
3086 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3087 /* Since we will be writing into the entire argument area, the
3088 map must be allocated for its entire size, not just the part that
3089 is the responsibility of the caller. */
3090 needed += reg_parm_stack_space;
3091 #endif
3092
3093 #ifdef ARGS_GROW_DOWNWARD
3094 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3095 needed + 1);
3096 #else
3097 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3098 needed);
3099 #endif
3100 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3101
3102 if (initial_highest_arg_in_use)
3103 bcopy (initial_stack_usage_map, stack_usage_map,
3104 initial_highest_arg_in_use);
3105
3106 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3107 bzero (&stack_usage_map[initial_highest_arg_in_use],
3108 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3109 needed = 0;
3110
3111 /* The address of the outgoing argument list must not be copied to a
3112 register here, because argblock would be left pointing to the
3113 wrong place after the call to allocate_dynamic_stack_space below.
3114 */
3115
3116 argblock = virtual_outgoing_args_rtx;
3117 #else /* not ACCUMULATE_OUTGOING_ARGS */
3118 #ifndef PUSH_ROUNDING
3119 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3120 #endif
3121 #endif
3122
3123 #ifdef PUSH_ARGS_REVERSED
3124 #ifdef PREFERRED_STACK_BOUNDARY
3125 /* If we push args individually in reverse order, perform stack alignment
3126 before the first push (the last arg). */
3127 if (argblock == 0)
3128 anti_adjust_stack (GEN_INT (args_size.constant
3129 - original_args_size.constant));
3130 #endif
3131 #endif
3132
3133 #ifdef PUSH_ARGS_REVERSED
3134 inc = -1;
3135 argnum = nargs - 1;
3136 #else
3137 inc = 1;
3138 argnum = 0;
3139 #endif
3140
3141 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3142 /* The argument list is the property of the called routine and it
3143 may clobber it. If the fixed area has been used for previous
3144 parameters, we must save and restore it.
3145
3146 Here we compute the boundary of the that needs to be saved, if any. */
3147
3148 #ifdef ARGS_GROW_DOWNWARD
3149 for (count = 0; count < reg_parm_stack_space + 1; count++)
3150 #else
3151 for (count = 0; count < reg_parm_stack_space; count++)
3152 #endif
3153 {
3154 if (count >= highest_outgoing_arg_in_use
3155 || stack_usage_map[count] == 0)
3156 continue;
3157
3158 if (low_to_save == -1)
3159 low_to_save = count;
3160
3161 high_to_save = count;
3162 }
3163
3164 if (low_to_save >= 0)
3165 {
3166 int num_to_save = high_to_save - low_to_save + 1;
3167 enum machine_mode save_mode
3168 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3169 rtx stack_area;
3170
3171 /* If we don't have the required alignment, must do this in BLKmode. */
3172 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3173 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3174 save_mode = BLKmode;
3175
3176 #ifdef ARGS_GROW_DOWNWARD
3177 stack_area = gen_rtx_MEM (save_mode,
3178 memory_address (save_mode,
3179 plus_constant (argblock,
3180 - high_to_save)));
3181 #else
3182 stack_area = gen_rtx_MEM (save_mode,
3183 memory_address (save_mode,
3184 plus_constant (argblock,
3185 low_to_save)));
3186 #endif
3187 if (save_mode == BLKmode)
3188 {
3189 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3190 MEM_IN_STRUCT_P (save_area) = 0;
3191 emit_block_move (validize_mem (save_area), stack_area,
3192 GEN_INT (num_to_save),
3193 PARM_BOUNDARY / BITS_PER_UNIT);
3194 }
3195 else
3196 {
3197 save_area = gen_reg_rtx (save_mode);
3198 emit_move_insn (save_area, stack_area);
3199 }
3200 }
3201 #endif
3202
3203 /* Push the args that need to be pushed. */
3204
3205 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3206 are to be pushed. */
3207 for (count = 0; count < nargs; count++, argnum += inc)
3208 {
3209 register enum machine_mode mode = argvec[argnum].mode;
3210 register rtx val = argvec[argnum].value;
3211 rtx reg = argvec[argnum].reg;
3212 int partial = argvec[argnum].partial;
3213 #ifdef ACCUMULATE_OUTGOING_ARGS
3214 int lower_bound, upper_bound, i;
3215 #endif
3216
3217 if (! (reg != 0 && partial == 0))
3218 {
3219 #ifdef ACCUMULATE_OUTGOING_ARGS
3220 /* If this is being stored into a pre-allocated, fixed-size, stack
3221 area, save any previous data at that location. */
3222
3223 #ifdef ARGS_GROW_DOWNWARD
3224 /* stack_slot is negative, but we want to index stack_usage_map
3225 with positive values. */
3226 upper_bound = -argvec[argnum].offset.constant + 1;
3227 lower_bound = upper_bound - argvec[argnum].size.constant;
3228 #else
3229 lower_bound = argvec[argnum].offset.constant;
3230 upper_bound = lower_bound + argvec[argnum].size.constant;
3231 #endif
3232
3233 for (i = lower_bound; i < upper_bound; i++)
3234 if (stack_usage_map[i]
3235 /* Don't store things in the fixed argument area at this point;
3236 it has already been saved. */
3237 && i > reg_parm_stack_space)
3238 break;
3239
3240 if (i != upper_bound)
3241 {
3242 /* We need to make a save area. See what mode we can make it. */
3243 enum machine_mode save_mode
3244 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
3245 MODE_INT, 1);
3246 rtx stack_area
3247 = gen_rtx_MEM (save_mode,
3248 memory_address (save_mode,
3249 plus_constant (argblock,
3250 argvec[argnum].offset.constant)));
3251 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3252 emit_move_insn (argvec[argnum].save_area, stack_area);
3253 }
3254 #endif
3255 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
3256 argblock, GEN_INT (argvec[argnum].offset.constant),
3257 reg_parm_stack_space);
3258
3259 #ifdef ACCUMULATE_OUTGOING_ARGS
3260 /* Now mark the segment we just used. */
3261 for (i = lower_bound; i < upper_bound; i++)
3262 stack_usage_map[i] = 1;
3263 #endif
3264
3265 NO_DEFER_POP;
3266 }
3267 }
3268
3269 #ifndef PUSH_ARGS_REVERSED
3270 #ifdef PREFERRED_STACK_BOUNDARY
3271 /* If we pushed args in forward order, perform stack alignment
3272 after pushing the last arg. */
3273 if (argblock == 0)
3274 anti_adjust_stack (GEN_INT (args_size.constant
3275 - original_args_size.constant));
3276 #endif
3277 #endif
3278
3279 #ifdef PUSH_ARGS_REVERSED
3280 argnum = nargs - 1;
3281 #else
3282 argnum = 0;
3283 #endif
3284
3285 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
3286
3287 /* Now load any reg parms into their regs. */
3288
3289 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3290 are to be pushed. */
3291 for (count = 0; count < nargs; count++, argnum += inc)
3292 {
3293 register rtx val = argvec[argnum].value;
3294 rtx reg = argvec[argnum].reg;
3295 int partial = argvec[argnum].partial;
3296
3297 if (reg != 0 && partial == 0)
3298 emit_move_insn (reg, val);
3299 NO_DEFER_POP;
3300 }
3301
3302 #if 0
3303 /* For version 1.37, try deleting this entirely. */
3304 if (! no_queue)
3305 emit_queue ();
3306 #endif
3307
3308 /* Any regs containing parms remain in use through the call. */
3309 for (count = 0; count < nargs; count++)
3310 if (argvec[count].reg != 0)
3311 use_reg (&call_fusage, argvec[count].reg);
3312
3313 /* Pass the function the address in which to return a structure value. */
3314 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3315 {
3316 emit_move_insn (struct_value_rtx,
3317 force_reg (Pmode,
3318 force_operand (XEXP (mem_value, 0),
3319 NULL_RTX)));
3320 if (GET_CODE (struct_value_rtx) == REG)
3321 use_reg (&call_fusage, struct_value_rtx);
3322 }
3323
3324 /* Don't allow popping to be deferred, since then
3325 cse'ing of library calls could delete a call and leave the pop. */
3326 NO_DEFER_POP;
3327
3328 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3329 will set inhibit_defer_pop to that value. */
3330 /* See the comment in emit_library_call about the function type we build
3331 and pass here. */
3332
3333 emit_call_1 (fun,
3334 get_identifier (XSTR (orgfun, 0)),
3335 build_function_type (type_for_mode (outmode, 0), NULL_TREE),
3336 args_size.constant, struct_value_size,
3337 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3338 mem_value == 0 ? hard_libcall_value (outmode) : NULL_RTX,
3339 old_inhibit_defer_pop + 1, call_fusage, is_const);
3340
3341 /* Now restore inhibit_defer_pop to its actual original value. */
3342 OK_DEFER_POP;
3343
3344 pop_temp_slots ();
3345
3346 /* Copy the value to the right place. */
3347 if (outmode != VOIDmode)
3348 {
3349 if (mem_value)
3350 {
3351 if (value == 0)
3352 value = mem_value;
3353 if (value != mem_value)
3354 emit_move_insn (value, mem_value);
3355 }
3356 else if (value != 0)
3357 emit_move_insn (value, hard_libcall_value (outmode));
3358 else
3359 value = hard_libcall_value (outmode);
3360 }
3361
3362 #ifdef ACCUMULATE_OUTGOING_ARGS
3363 #ifdef REG_PARM_STACK_SPACE
3364 if (save_area)
3365 {
3366 enum machine_mode save_mode = GET_MODE (save_area);
3367 #ifdef ARGS_GROW_DOWNWARD
3368 rtx stack_area
3369 = gen_rtx_MEM (save_mode,
3370 memory_address (save_mode,
3371 plus_constant (argblock,
3372 - high_to_save)));
3373 #else
3374 rtx stack_area
3375 = gen_rtx_MEM (save_mode,
3376 memory_address (save_mode,
3377 plus_constant (argblock, low_to_save)));
3378 #endif
3379 if (save_mode != BLKmode)
3380 emit_move_insn (stack_area, save_area);
3381 else
3382 emit_block_move (stack_area, validize_mem (save_area),
3383 GEN_INT (high_to_save - low_to_save + 1),
3384 PARM_BOUNDARY / BITS_PER_UNIT);
3385 }
3386 #endif
3387
3388 /* If we saved any argument areas, restore them. */
3389 for (count = 0; count < nargs; count++)
3390 if (argvec[count].save_area)
3391 {
3392 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3393 rtx stack_area
3394 = gen_rtx_MEM (save_mode,
3395 memory_address (save_mode, plus_constant (argblock,
3396 argvec[count].offset.constant)));
3397
3398 emit_move_insn (stack_area, argvec[count].save_area);
3399 }
3400
3401 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3402 stack_usage_map = initial_stack_usage_map;
3403 #endif
3404
3405 return value;
3406 }
3407 \f
3408 #if 0
3409 /* Return an rtx which represents a suitable home on the stack
3410 given TYPE, the type of the argument looking for a home.
3411 This is called only for BLKmode arguments.
3412
3413 SIZE is the size needed for this target.
3414 ARGS_ADDR is the address of the bottom of the argument block for this call.
3415 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
3416 if this machine uses push insns. */
3417
3418 static rtx
3419 target_for_arg (type, size, args_addr, offset)
3420 tree type;
3421 rtx size;
3422 rtx args_addr;
3423 struct args_size offset;
3424 {
3425 rtx target;
3426 rtx offset_rtx = ARGS_SIZE_RTX (offset);
3427
3428 /* We do not call memory_address if possible,
3429 because we want to address as close to the stack
3430 as possible. For non-variable sized arguments,
3431 this will be stack-pointer relative addressing. */
3432 if (GET_CODE (offset_rtx) == CONST_INT)
3433 target = plus_constant (args_addr, INTVAL (offset_rtx));
3434 else
3435 {
3436 /* I have no idea how to guarantee that this
3437 will work in the presence of register parameters. */
3438 target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
3439 target = memory_address (QImode, target);
3440 }
3441
3442 return gen_rtx_MEM (BLKmode, target);
3443 }
3444 #endif
3445 \f
3446 /* Store a single argument for a function call
3447 into the register or memory area where it must be passed.
3448 *ARG describes the argument value and where to pass it.
3449
3450 ARGBLOCK is the address of the stack-block for all the arguments,
3451 or 0 on a machine where arguments are pushed individually.
3452
3453 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3454 so must be careful about how the stack is used.
3455
3456 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3457 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3458 that we need not worry about saving and restoring the stack.
3459
3460 FNDECL is the declaration of the function we are calling. */
3461
3462 static void
3463 store_one_arg (arg, argblock, may_be_alloca, variable_size,
3464 reg_parm_stack_space)
3465 struct arg_data *arg;
3466 rtx argblock;
3467 int may_be_alloca;
3468 int variable_size;
3469 int reg_parm_stack_space;
3470 {
3471 register tree pval = arg->tree_value;
3472 rtx reg = 0;
3473 int partial = 0;
3474 int used = 0;
3475 #ifdef ACCUMULATE_OUTGOING_ARGS
3476 int i, lower_bound, upper_bound;
3477 #endif
3478
3479 if (TREE_CODE (pval) == ERROR_MARK)
3480 return;
3481
3482 /* Push a new temporary level for any temporaries we make for
3483 this argument. */
3484 push_temp_slots ();
3485
3486 #ifdef ACCUMULATE_OUTGOING_ARGS
3487 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3488 save any previous data at that location. */
3489 if (argblock && ! variable_size && arg->stack)
3490 {
3491 #ifdef ARGS_GROW_DOWNWARD
3492 /* stack_slot is negative, but we want to index stack_usage_map
3493 with positive values. */
3494 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3495 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
3496 else
3497 upper_bound = 0;
3498
3499 lower_bound = upper_bound - arg->size.constant;
3500 #else
3501 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3502 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
3503 else
3504 lower_bound = 0;
3505
3506 upper_bound = lower_bound + arg->size.constant;
3507 #endif
3508
3509 for (i = lower_bound; i < upper_bound; i++)
3510 if (stack_usage_map[i]
3511 /* Don't store things in the fixed argument area at this point;
3512 it has already been saved. */
3513 && i > reg_parm_stack_space)
3514 break;
3515
3516 if (i != upper_bound)
3517 {
3518 /* We need to make a save area. See what mode we can make it. */
3519 enum machine_mode save_mode
3520 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
3521 rtx stack_area
3522 = gen_rtx_MEM (save_mode,
3523 memory_address (save_mode,
3524 XEXP (arg->stack_slot, 0)));
3525
3526 if (save_mode == BLKmode)
3527 {
3528 arg->save_area = assign_stack_temp (BLKmode,
3529 arg->size.constant, 0);
3530 MEM_IN_STRUCT_P (arg->save_area)
3531 = AGGREGATE_TYPE_P (TREE_TYPE (arg->tree_value));
3532 preserve_temp_slots (arg->save_area);
3533 emit_block_move (validize_mem (arg->save_area), stack_area,
3534 GEN_INT (arg->size.constant),
3535 PARM_BOUNDARY / BITS_PER_UNIT);
3536 }
3537 else
3538 {
3539 arg->save_area = gen_reg_rtx (save_mode);
3540 emit_move_insn (arg->save_area, stack_area);
3541 }
3542 }
3543 }
3544 #endif
3545
3546 /* If this isn't going to be placed on both the stack and in registers,
3547 set up the register and number of words. */
3548 if (! arg->pass_on_stack)
3549 reg = arg->reg, partial = arg->partial;
3550
3551 if (reg != 0 && partial == 0)
3552 /* Being passed entirely in a register. We shouldn't be called in
3553 this case. */
3554 abort ();
3555
3556 /* If this arg needs special alignment, don't load the registers
3557 here. */
3558 if (arg->n_aligned_regs != 0)
3559 reg = 0;
3560
3561 /* If this is being passed partially in a register, we can't evaluate
3562 it directly into its stack slot. Otherwise, we can. */
3563 if (arg->value == 0)
3564 {
3565 #ifdef ACCUMULATE_OUTGOING_ARGS
3566 /* stack_arg_under_construction is nonzero if a function argument is
3567 being evaluated directly into the outgoing argument list and
3568 expand_call must take special action to preserve the argument list
3569 if it is called recursively.
3570
3571 For scalar function arguments stack_usage_map is sufficient to
3572 determine which stack slots must be saved and restored. Scalar
3573 arguments in general have pass_on_stack == 0.
3574
3575 If this argument is initialized by a function which takes the
3576 address of the argument (a C++ constructor or a C function
3577 returning a BLKmode structure), then stack_usage_map is
3578 insufficient and expand_call must push the stack around the
3579 function call. Such arguments have pass_on_stack == 1.
3580
3581 Note that it is always safe to set stack_arg_under_construction,
3582 but this generates suboptimal code if set when not needed. */
3583
3584 if (arg->pass_on_stack)
3585 stack_arg_under_construction++;
3586 #endif
3587 arg->value = expand_expr (pval,
3588 (partial
3589 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
3590 ? NULL_RTX : arg->stack,
3591 VOIDmode, 0);
3592
3593 /* If we are promoting object (or for any other reason) the mode
3594 doesn't agree, convert the mode. */
3595
3596 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
3597 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
3598 arg->value, arg->unsignedp);
3599
3600 #ifdef ACCUMULATE_OUTGOING_ARGS
3601 if (arg->pass_on_stack)
3602 stack_arg_under_construction--;
3603 #endif
3604 }
3605
3606 /* Don't allow anything left on stack from computation
3607 of argument to alloca. */
3608 if (may_be_alloca)
3609 do_pending_stack_adjust ();
3610
3611 if (arg->value == arg->stack)
3612 {
3613 /* If the value is already in the stack slot, we are done moving
3614 data. */
3615 if (current_function_check_memory_usage && GET_CODE (arg->stack) == MEM)
3616 {
3617 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3618 XEXP (arg->stack, 0), ptr_mode,
3619 ARGS_SIZE_RTX (arg->size),
3620 TYPE_MODE (sizetype),
3621 GEN_INT (MEMORY_USE_RW),
3622 TYPE_MODE (integer_type_node));
3623 }
3624 }
3625 else if (arg->mode != BLKmode)
3626 {
3627 register int size;
3628
3629 /* Argument is a scalar, not entirely passed in registers.
3630 (If part is passed in registers, arg->partial says how much
3631 and emit_push_insn will take care of putting it there.)
3632
3633 Push it, and if its size is less than the
3634 amount of space allocated to it,
3635 also bump stack pointer by the additional space.
3636 Note that in C the default argument promotions
3637 will prevent such mismatches. */
3638
3639 size = GET_MODE_SIZE (arg->mode);
3640 /* Compute how much space the push instruction will push.
3641 On many machines, pushing a byte will advance the stack
3642 pointer by a halfword. */
3643 #ifdef PUSH_ROUNDING
3644 size = PUSH_ROUNDING (size);
3645 #endif
3646 used = size;
3647
3648 /* Compute how much space the argument should get:
3649 round up to a multiple of the alignment for arguments. */
3650 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
3651 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
3652 / (PARM_BOUNDARY / BITS_PER_UNIT))
3653 * (PARM_BOUNDARY / BITS_PER_UNIT));
3654
3655 /* This isn't already where we want it on the stack, so put it there.
3656 This can either be done with push or copy insns. */
3657 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 0,
3658 partial, reg, used - size, argblock,
3659 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space);
3660 }
3661 else
3662 {
3663 /* BLKmode, at least partly to be pushed. */
3664
3665 register int excess;
3666 rtx size_rtx;
3667
3668 /* Pushing a nonscalar.
3669 If part is passed in registers, PARTIAL says how much
3670 and emit_push_insn will take care of putting it there. */
3671
3672 /* Round its size up to a multiple
3673 of the allocation unit for arguments. */
3674
3675 if (arg->size.var != 0)
3676 {
3677 excess = 0;
3678 size_rtx = ARGS_SIZE_RTX (arg->size);
3679 }
3680 else
3681 {
3682 /* PUSH_ROUNDING has no effect on us, because
3683 emit_push_insn for BLKmode is careful to avoid it. */
3684 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
3685 + partial * UNITS_PER_WORD);
3686 size_rtx = expr_size (pval);
3687 }
3688
3689 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
3690 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
3691 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset),
3692 reg_parm_stack_space);
3693 }
3694
3695
3696 /* Unless this is a partially-in-register argument, the argument is now
3697 in the stack.
3698
3699 ??? Note that this can change arg->value from arg->stack to
3700 arg->stack_slot and it matters when they are not the same.
3701 It isn't totally clear that this is correct in all cases. */
3702 if (partial == 0)
3703 arg->value = arg->stack_slot;
3704
3705 /* Once we have pushed something, pops can't safely
3706 be deferred during the rest of the arguments. */
3707 NO_DEFER_POP;
3708
3709 /* ANSI doesn't require a sequence point here,
3710 but PCC has one, so this will avoid some problems. */
3711 emit_queue ();
3712
3713 /* Free any temporary slots made in processing this argument. Show
3714 that we might have taken the address of something and pushed that
3715 as an operand. */
3716 preserve_temp_slots (NULL_RTX);
3717 free_temp_slots ();
3718 pop_temp_slots ();
3719
3720 #ifdef ACCUMULATE_OUTGOING_ARGS
3721 /* Now mark the segment we just used. */
3722 if (argblock && ! variable_size && arg->stack)
3723 for (i = lower_bound; i < upper_bound; i++)
3724 stack_usage_map[i] = 1;
3725 #endif
3726 }