mips.h (MASK_FIX_SB1): Bump.
[gcc.git] / gcc / calls.c
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "expr.h"
30 #include "optabs.h"
31 #include "libfuncs.h"
32 #include "function.h"
33 #include "regs.h"
34 #include "toplev.h"
35 #include "output.h"
36 #include "tm_p.h"
37 #include "timevar.h"
38 #include "sbitmap.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "cgraph.h"
42 #include "except.h"
43
44 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
45 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
46
47 /* Data structure and subroutines used within expand_call. */
48
49 struct arg_data
50 {
51 /* Tree node for this argument. */
52 tree tree_value;
53 /* Mode for value; TYPE_MODE unless promoted. */
54 enum machine_mode mode;
55 /* Current RTL value for argument, or 0 if it isn't precomputed. */
56 rtx value;
57 /* Initially-compute RTL value for argument; only for const functions. */
58 rtx initial_value;
59 /* Register to pass this argument in, 0 if passed on stack, or an
60 PARALLEL if the arg is to be copied into multiple non-contiguous
61 registers. */
62 rtx reg;
63 /* Register to pass this argument in when generating tail call sequence.
64 This is not the same register as for normal calls on machines with
65 register windows. */
66 rtx tail_call_reg;
67 /* If REG was promoted from the actual mode of the argument expression,
68 indicates whether the promotion is sign- or zero-extended. */
69 int unsignedp;
70 /* Number of registers to use. 0 means put the whole arg in registers.
71 Also 0 if not passed in registers. */
72 int partial;
73 /* Nonzero if argument must be passed on stack.
74 Note that some arguments may be passed on the stack
75 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
76 pass_on_stack identifies arguments that *cannot* go in registers. */
77 int pass_on_stack;
78 /* Some fields packaged up for locate_and_pad_parm. */
79 struct locate_and_pad_arg_data locate;
80 /* Location on the stack at which parameter should be stored. The store
81 has already been done if STACK == VALUE. */
82 rtx stack;
83 /* Location on the stack of the start of this argument slot. This can
84 differ from STACK if this arg pads downward. This location is known
85 to be aligned to FUNCTION_ARG_BOUNDARY. */
86 rtx stack_slot;
87 /* Place that this stack area has been saved, if needed. */
88 rtx save_area;
89 /* If an argument's alignment does not permit direct copying into registers,
90 copy in smaller-sized pieces into pseudos. These are stored in a
91 block pointed to by this field. The next field says how many
92 word-sized pseudos we made. */
93 rtx *aligned_regs;
94 int n_aligned_regs;
95 };
96
97 /* A vector of one char per byte of stack space. A byte if nonzero if
98 the corresponding stack location has been used.
99 This vector is used to prevent a function call within an argument from
100 clobbering any stack already set up. */
101 static char *stack_usage_map;
102
103 /* Size of STACK_USAGE_MAP. */
104 static int highest_outgoing_arg_in_use;
105
106 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
107 stack location's tail call argument has been already stored into the stack.
108 This bitmap is used to prevent sibling call optimization if function tries
109 to use parent's incoming argument slots when they have been already
110 overwritten with tail call arguments. */
111 static sbitmap stored_args_map;
112
113 /* stack_arg_under_construction is nonzero when an argument may be
114 initialized with a constructor call (including a C function that
115 returns a BLKmode struct) and expand_call must take special action
116 to make sure the object being constructed does not overlap the
117 argument list for the constructor call. */
118 int stack_arg_under_construction;
119
120 static int calls_function (tree, int);
121 static int calls_function_1 (tree, int);
122
123 static void emit_call_1 (rtx, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
124 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
125 CUMULATIVE_ARGS *);
126 static void precompute_register_parameters (int, struct arg_data *, int *);
127 static int store_one_arg (struct arg_data *, rtx, int, int, int);
128 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
129 static int finalize_must_preallocate (int, int, struct arg_data *,
130 struct args_size *);
131 static void precompute_arguments (int, int, struct arg_data *);
132 static int compute_argument_block_size (int, struct args_size *, int);
133 static void initialize_argument_information (int, struct arg_data *,
134 struct args_size *, int, tree,
135 tree, CUMULATIVE_ARGS *, int,
136 rtx *, int *, int *, int *,
137 bool);
138 static void compute_argument_addresses (struct arg_data *, rtx, int);
139 static rtx rtx_for_function_call (tree, tree);
140 static void load_register_parameters (struct arg_data *, int, rtx *, int,
141 int, int *);
142 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
143 enum machine_mode, int, va_list);
144 static int special_function_p (tree, int);
145 static rtx try_to_integrate (tree, tree, rtx, int, tree, rtx);
146 static int check_sibcall_argument_overlap_1 (rtx);
147 static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
148
149 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
150 int);
151 static tree fix_unsafe_tree (tree);
152 static bool shift_returned_value (tree, rtx *);
153
154 #ifdef REG_PARM_STACK_SPACE
155 static rtx save_fixed_argument_area (int, rtx, int *, int *);
156 static void restore_fixed_argument_area (rtx, rtx, int, int);
157 #endif
158 \f
159 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
160 `alloca'.
161
162 If WHICH is 0, return 1 if EXP contains a call to any function.
163 Actually, we only need return 1 if evaluating EXP would require pushing
164 arguments on the stack, but that is too difficult to compute, so we just
165 assume any function call might require the stack. */
166
167 static tree calls_function_save_exprs;
168
169 static int
170 calls_function (tree exp, int which)
171 {
172 int val;
173
174 calls_function_save_exprs = 0;
175 val = calls_function_1 (exp, which);
176 calls_function_save_exprs = 0;
177 return val;
178 }
179
180 /* Recursive function to do the work of above function. */
181
182 static int
183 calls_function_1 (tree exp, int which)
184 {
185 int i;
186 enum tree_code code = TREE_CODE (exp);
187 int class = TREE_CODE_CLASS (code);
188 int length = first_rtl_op (code);
189
190 /* If this code is language-specific, we don't know what it will do. */
191 if ((int) code >= NUM_TREE_CODES)
192 return 1;
193
194 switch (code)
195 {
196 case CALL_EXPR:
197 if (which == 0)
198 return 1;
199 else if ((TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
200 == FUNCTION_TYPE)
201 && (TYPE_RETURNS_STACK_DEPRESSED
202 (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
203 return 1;
204 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
205 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
206 == FUNCTION_DECL)
207 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
208 0)
209 & ECF_MAY_BE_ALLOCA))
210 return 1;
211
212 break;
213
214 case CONSTRUCTOR:
215 {
216 tree tem;
217
218 for (tem = CONSTRUCTOR_ELTS (exp); tem != 0; tem = TREE_CHAIN (tem))
219 if (calls_function_1 (TREE_VALUE (tem), which))
220 return 1;
221 }
222
223 return 0;
224
225 case SAVE_EXPR:
226 if (SAVE_EXPR_RTL (exp) != 0)
227 return 0;
228 if (value_member (exp, calls_function_save_exprs))
229 return 0;
230 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
231 calls_function_save_exprs);
232 return (TREE_OPERAND (exp, 0) != 0
233 && calls_function_1 (TREE_OPERAND (exp, 0), which));
234
235 case BLOCK:
236 {
237 tree local;
238 tree subblock;
239
240 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
241 if (DECL_INITIAL (local) != 0
242 && calls_function_1 (DECL_INITIAL (local), which))
243 return 1;
244
245 for (subblock = BLOCK_SUBBLOCKS (exp);
246 subblock;
247 subblock = TREE_CHAIN (subblock))
248 if (calls_function_1 (subblock, which))
249 return 1;
250 }
251 return 0;
252
253 case TREE_LIST:
254 for (; exp != 0; exp = TREE_CHAIN (exp))
255 if (calls_function_1 (TREE_VALUE (exp), which))
256 return 1;
257 return 0;
258
259 default:
260 break;
261 }
262
263 /* Only expressions and blocks can contain calls. */
264 if (! IS_EXPR_CODE_CLASS (class) && class != 'b')
265 return 0;
266
267 for (i = 0; i < length; i++)
268 if (TREE_OPERAND (exp, i) != 0
269 && calls_function_1 (TREE_OPERAND (exp, i), which))
270 return 1;
271
272 return 0;
273 }
274 \f
275 /* Force FUNEXP into a form suitable for the address of a CALL,
276 and return that as an rtx. Also load the static chain register
277 if FNDECL is a nested function.
278
279 CALL_FUSAGE points to a variable holding the prospective
280 CALL_INSN_FUNCTION_USAGE information. */
281
282 rtx
283 prepare_call_address (rtx funexp, tree fndecl, rtx *call_fusage,
284 int reg_parm_seen, int sibcallp)
285 {
286 rtx static_chain_value = 0;
287
288 funexp = protect_from_queue (funexp, 0);
289
290 if (fndecl != 0)
291 /* Get possible static chain value for nested function in C. */
292 static_chain_value = lookup_static_chain (fndecl);
293
294 /* Make a valid memory address and copy constants through pseudo-regs,
295 but not for a constant address if -fno-function-cse. */
296 if (GET_CODE (funexp) != SYMBOL_REF)
297 /* If we are using registers for parameters, force the
298 function address into a register now. */
299 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
300 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
301 : memory_address (FUNCTION_MODE, funexp));
302 else if (! sibcallp)
303 {
304 #ifndef NO_FUNCTION_CSE
305 if (optimize && ! flag_no_function_cse)
306 #ifdef NO_RECURSIVE_FUNCTION_CSE
307 if (fndecl != current_function_decl)
308 #endif
309 funexp = force_reg (Pmode, funexp);
310 #endif
311 }
312
313 if (static_chain_value != 0)
314 {
315 emit_move_insn (static_chain_rtx, static_chain_value);
316
317 if (GET_CODE (static_chain_rtx) == REG)
318 use_reg (call_fusage, static_chain_rtx);
319 }
320
321 return funexp;
322 }
323
324 /* Generate instructions to call function FUNEXP,
325 and optionally pop the results.
326 The CALL_INSN is the first insn generated.
327
328 FNDECL is the declaration node of the function. This is given to the
329 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
330
331 FUNTYPE is the data type of the function. This is given to the macro
332 RETURN_POPS_ARGS to determine whether this function pops its own args.
333 We used to allow an identifier for library functions, but that doesn't
334 work when the return type is an aggregate type and the calling convention
335 says that the pointer to this aggregate is to be popped by the callee.
336
337 STACK_SIZE is the number of bytes of arguments on the stack,
338 ROUNDED_STACK_SIZE is that number rounded up to
339 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
340 both to put into the call insn and to generate explicit popping
341 code if necessary.
342
343 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
344 It is zero if this call doesn't want a structure value.
345
346 NEXT_ARG_REG is the rtx that results from executing
347 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
348 just after all the args have had their registers assigned.
349 This could be whatever you like, but normally it is the first
350 arg-register beyond those used for args in this call,
351 or 0 if all the arg-registers are used in this call.
352 It is passed on to `gen_call' so you can put this info in the call insn.
353
354 VALREG is a hard register in which a value is returned,
355 or 0 if the call does not return a value.
356
357 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
358 the args to this call were processed.
359 We restore `inhibit_defer_pop' to that value.
360
361 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
362 denote registers used by the called function. */
363
364 static void
365 emit_call_1 (rtx funexp, tree fndecl ATTRIBUTE_UNUSED, tree funtype ATTRIBUTE_UNUSED,
366 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
367 HOST_WIDE_INT rounded_stack_size,
368 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
369 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
370 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
371 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
372 {
373 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
374 rtx call_insn;
375 int already_popped = 0;
376 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
377 #if defined (HAVE_call) && defined (HAVE_call_value)
378 rtx struct_value_size_rtx;
379 struct_value_size_rtx = GEN_INT (struct_value_size);
380 #endif
381
382 #ifdef CALL_POPS_ARGS
383 n_popped += CALL_POPS_ARGS (* args_so_far);
384 #endif
385
386 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
387 and we don't want to load it into a register as an optimization,
388 because prepare_call_address already did it if it should be done. */
389 if (GET_CODE (funexp) != SYMBOL_REF)
390 funexp = memory_address (FUNCTION_MODE, funexp);
391
392 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
393 if ((ecf_flags & ECF_SIBCALL)
394 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
395 && (n_popped > 0 || stack_size == 0))
396 {
397 rtx n_pop = GEN_INT (n_popped);
398 rtx pat;
399
400 /* If this subroutine pops its own args, record that in the call insn
401 if possible, for the sake of frame pointer elimination. */
402
403 if (valreg)
404 pat = GEN_SIBCALL_VALUE_POP (valreg,
405 gen_rtx_MEM (FUNCTION_MODE, funexp),
406 rounded_stack_size_rtx, next_arg_reg,
407 n_pop);
408 else
409 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
410 rounded_stack_size_rtx, next_arg_reg, n_pop);
411
412 emit_call_insn (pat);
413 already_popped = 1;
414 }
415 else
416 #endif
417
418 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
419 /* If the target has "call" or "call_value" insns, then prefer them
420 if no arguments are actually popped. If the target does not have
421 "call" or "call_value" insns, then we must use the popping versions
422 even if the call has no arguments to pop. */
423 #if defined (HAVE_call) && defined (HAVE_call_value)
424 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
425 && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
426 #else
427 if (HAVE_call_pop && HAVE_call_value_pop)
428 #endif
429 {
430 rtx n_pop = GEN_INT (n_popped);
431 rtx pat;
432
433 /* If this subroutine pops its own args, record that in the call insn
434 if possible, for the sake of frame pointer elimination. */
435
436 if (valreg)
437 pat = GEN_CALL_VALUE_POP (valreg,
438 gen_rtx_MEM (FUNCTION_MODE, funexp),
439 rounded_stack_size_rtx, next_arg_reg, n_pop);
440 else
441 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
442 rounded_stack_size_rtx, next_arg_reg, n_pop);
443
444 emit_call_insn (pat);
445 already_popped = 1;
446 }
447 else
448 #endif
449
450 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
451 if ((ecf_flags & ECF_SIBCALL)
452 && HAVE_sibcall && HAVE_sibcall_value)
453 {
454 if (valreg)
455 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
456 gen_rtx_MEM (FUNCTION_MODE, funexp),
457 rounded_stack_size_rtx,
458 next_arg_reg, NULL_RTX));
459 else
460 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
461 rounded_stack_size_rtx, next_arg_reg,
462 struct_value_size_rtx));
463 }
464 else
465 #endif
466
467 #if defined (HAVE_call) && defined (HAVE_call_value)
468 if (HAVE_call && HAVE_call_value)
469 {
470 if (valreg)
471 emit_call_insn (GEN_CALL_VALUE (valreg,
472 gen_rtx_MEM (FUNCTION_MODE, funexp),
473 rounded_stack_size_rtx, next_arg_reg,
474 NULL_RTX));
475 else
476 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
477 rounded_stack_size_rtx, next_arg_reg,
478 struct_value_size_rtx));
479 }
480 else
481 #endif
482 abort ();
483
484 /* Find the call we just emitted. */
485 call_insn = last_call_insn ();
486
487 /* Mark memory as used for "pure" function call. */
488 if (ecf_flags & ECF_PURE)
489 call_fusage
490 = gen_rtx_EXPR_LIST
491 (VOIDmode,
492 gen_rtx_USE (VOIDmode,
493 gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
494 call_fusage);
495
496 /* Put the register usage information there. */
497 add_function_usage_to (call_insn, call_fusage);
498
499 /* If this is a const call, then set the insn's unchanging bit. */
500 if (ecf_flags & (ECF_CONST | ECF_PURE))
501 CONST_OR_PURE_CALL_P (call_insn) = 1;
502
503 /* If this call can't throw, attach a REG_EH_REGION reg note to that
504 effect. */
505 if (ecf_flags & ECF_NOTHROW)
506 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
507 REG_NOTES (call_insn));
508 else
509 note_eh_region_may_contain_throw ();
510
511 if (ecf_flags & ECF_NORETURN)
512 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
513 REG_NOTES (call_insn));
514 if (ecf_flags & ECF_ALWAYS_RETURN)
515 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_ALWAYS_RETURN, const0_rtx,
516 REG_NOTES (call_insn));
517
518 if (ecf_flags & ECF_RETURNS_TWICE)
519 {
520 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
521 REG_NOTES (call_insn));
522 current_function_calls_setjmp = 1;
523 }
524
525 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
526
527 /* Restore this now, so that we do defer pops for this call's args
528 if the context of the call as a whole permits. */
529 inhibit_defer_pop = old_inhibit_defer_pop;
530
531 if (n_popped > 0)
532 {
533 if (!already_popped)
534 CALL_INSN_FUNCTION_USAGE (call_insn)
535 = gen_rtx_EXPR_LIST (VOIDmode,
536 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
537 CALL_INSN_FUNCTION_USAGE (call_insn));
538 rounded_stack_size -= n_popped;
539 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
540 stack_pointer_delta -= n_popped;
541 }
542
543 if (!ACCUMULATE_OUTGOING_ARGS)
544 {
545 /* If returning from the subroutine does not automatically pop the args,
546 we need an instruction to pop them sooner or later.
547 Perhaps do it now; perhaps just record how much space to pop later.
548
549 If returning from the subroutine does pop the args, indicate that the
550 stack pointer will be changed. */
551
552 if (rounded_stack_size != 0)
553 {
554 if (ecf_flags & (ECF_SP_DEPRESSED | ECF_NORETURN | ECF_LONGJMP))
555 /* Just pretend we did the pop. */
556 stack_pointer_delta -= rounded_stack_size;
557 else if (flag_defer_pop && inhibit_defer_pop == 0
558 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
559 pending_stack_adjust += rounded_stack_size;
560 else
561 adjust_stack (rounded_stack_size_rtx);
562 }
563 }
564 /* When we accumulate outgoing args, we must avoid any stack manipulations.
565 Restore the stack pointer to its original value now. Usually
566 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
567 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
568 popping variants of functions exist as well.
569
570 ??? We may optimize similar to defer_pop above, but it is
571 probably not worthwhile.
572
573 ??? It will be worthwhile to enable combine_stack_adjustments even for
574 such machines. */
575 else if (n_popped)
576 anti_adjust_stack (GEN_INT (n_popped));
577 }
578
579 /* Determine if the function identified by NAME and FNDECL is one with
580 special properties we wish to know about.
581
582 For example, if the function might return more than one time (setjmp), then
583 set RETURNS_TWICE to a nonzero value.
584
585 Similarly set LONGJMP for if the function is in the longjmp family.
586
587 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
588 space from the stack such as alloca. */
589
590 static int
591 special_function_p (tree fndecl, int flags)
592 {
593 if (! (flags & ECF_MALLOC)
594 && fndecl && DECL_NAME (fndecl)
595 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
596 /* Exclude functions not at the file scope, or not `extern',
597 since they are not the magic functions we would otherwise
598 think they are.
599 FIXME: this should be handled with attributes, not with this
600 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
601 because you can declare fork() inside a function if you
602 wish. */
603 && (DECL_CONTEXT (fndecl) == NULL_TREE
604 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
605 && TREE_PUBLIC (fndecl))
606 {
607 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
608 const char *tname = name;
609
610 /* We assume that alloca will always be called by name. It
611 makes no sense to pass it as a pointer-to-function to
612 anything that does not understand its behavior. */
613 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
614 && name[0] == 'a'
615 && ! strcmp (name, "alloca"))
616 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
617 && name[0] == '_'
618 && ! strcmp (name, "__builtin_alloca"))))
619 flags |= ECF_MAY_BE_ALLOCA;
620
621 /* Disregard prefix _, __ or __x. */
622 if (name[0] == '_')
623 {
624 if (name[1] == '_' && name[2] == 'x')
625 tname += 3;
626 else if (name[1] == '_')
627 tname += 2;
628 else
629 tname += 1;
630 }
631
632 if (tname[0] == 's')
633 {
634 if ((tname[1] == 'e'
635 && (! strcmp (tname, "setjmp")
636 || ! strcmp (tname, "setjmp_syscall")))
637 || (tname[1] == 'i'
638 && ! strcmp (tname, "sigsetjmp"))
639 || (tname[1] == 'a'
640 && ! strcmp (tname, "savectx")))
641 flags |= ECF_RETURNS_TWICE;
642
643 if (tname[1] == 'i'
644 && ! strcmp (tname, "siglongjmp"))
645 flags |= ECF_LONGJMP;
646 }
647 else if ((tname[0] == 'q' && tname[1] == 's'
648 && ! strcmp (tname, "qsetjmp"))
649 || (tname[0] == 'v' && tname[1] == 'f'
650 && ! strcmp (tname, "vfork")))
651 flags |= ECF_RETURNS_TWICE;
652
653 else if (tname[0] == 'l' && tname[1] == 'o'
654 && ! strcmp (tname, "longjmp"))
655 flags |= ECF_LONGJMP;
656
657 else if ((tname[0] == 'f' && tname[1] == 'o'
658 && ! strcmp (tname, "fork"))
659 /* Linux specific: __clone. check NAME to insist on the
660 leading underscores, to avoid polluting the ISO / POSIX
661 namespace. */
662 || (name[0] == '_' && name[1] == '_'
663 && ! strcmp (tname, "clone"))
664 || (tname[0] == 'e' && tname[1] == 'x' && tname[2] == 'e'
665 && tname[3] == 'c' && (tname[4] == 'l' || tname[4] == 'v')
666 && (tname[5] == '\0'
667 || ((tname[5] == 'p' || tname[5] == 'e')
668 && tname[6] == '\0'))))
669 flags |= ECF_FORK_OR_EXEC;
670 }
671 return flags;
672 }
673
674 /* Return nonzero when tree represent call to longjmp. */
675
676 int
677 setjmp_call_p (tree fndecl)
678 {
679 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
680 }
681
682 /* Return true when exp contains alloca call. */
683 bool
684 alloca_call_p (tree exp)
685 {
686 if (TREE_CODE (exp) == CALL_EXPR
687 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
688 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
689 == FUNCTION_DECL)
690 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
691 0) & ECF_MAY_BE_ALLOCA))
692 return true;
693 return false;
694 }
695
696 /* Detect flags (function attributes) from the function decl or type node. */
697
698 int
699 flags_from_decl_or_type (tree exp)
700 {
701 int flags = 0;
702 tree type = exp;
703
704 if (DECL_P (exp))
705 {
706 struct cgraph_rtl_info *i = cgraph_rtl_info (exp);
707 type = TREE_TYPE (exp);
708
709 if (i)
710 {
711 if (i->pure_function)
712 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
713 if (i->const_function)
714 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
715 }
716
717 /* The function exp may have the `malloc' attribute. */
718 if (DECL_IS_MALLOC (exp))
719 flags |= ECF_MALLOC;
720
721 /* The function exp may have the `pure' attribute. */
722 if (DECL_IS_PURE (exp))
723 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
724
725 if (TREE_NOTHROW (exp))
726 flags |= ECF_NOTHROW;
727
728 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
729 flags |= ECF_LIBCALL_BLOCK;
730 }
731
732 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
733 flags |= ECF_CONST;
734
735 if (TREE_THIS_VOLATILE (exp))
736 flags |= ECF_NORETURN;
737
738 /* Mark if the function returns with the stack pointer depressed. We
739 cannot consider it pure or constant in that case. */
740 if (TREE_CODE (type) == FUNCTION_TYPE && TYPE_RETURNS_STACK_DEPRESSED (type))
741 {
742 flags |= ECF_SP_DEPRESSED;
743 flags &= ~(ECF_PURE | ECF_CONST | ECF_LIBCALL_BLOCK);
744 }
745
746 return flags;
747 }
748
749 /* Detect flags from a CALL_EXPR. */
750
751 int
752 call_expr_flags (tree t)
753 {
754 int flags;
755 tree decl = get_callee_fndecl (t);
756
757 if (decl)
758 flags = flags_from_decl_or_type (decl);
759 else
760 {
761 t = TREE_TYPE (TREE_OPERAND (t, 0));
762 if (t && TREE_CODE (t) == POINTER_TYPE)
763 flags = flags_from_decl_or_type (TREE_TYPE (t));
764 else
765 flags = 0;
766 }
767
768 return flags;
769 }
770
771 /* Precompute all register parameters as described by ARGS, storing values
772 into fields within the ARGS array.
773
774 NUM_ACTUALS indicates the total number elements in the ARGS array.
775
776 Set REG_PARM_SEEN if we encounter a register parameter. */
777
778 static void
779 precompute_register_parameters (int num_actuals, struct arg_data *args, int *reg_parm_seen)
780 {
781 int i;
782
783 *reg_parm_seen = 0;
784
785 for (i = 0; i < num_actuals; i++)
786 if (args[i].reg != 0 && ! args[i].pass_on_stack)
787 {
788 *reg_parm_seen = 1;
789
790 if (args[i].value == 0)
791 {
792 push_temp_slots ();
793 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
794 VOIDmode, 0);
795 preserve_temp_slots (args[i].value);
796 pop_temp_slots ();
797
798 /* ANSI doesn't require a sequence point here,
799 but PCC has one, so this will avoid some problems. */
800 emit_queue ();
801 }
802
803 /* If the value is a non-legitimate constant, force it into a
804 pseudo now. TLS symbols sometimes need a call to resolve. */
805 if (CONSTANT_P (args[i].value)
806 && !LEGITIMATE_CONSTANT_P (args[i].value))
807 args[i].value = force_reg (args[i].mode, args[i].value);
808
809 /* If we are to promote the function arg to a wider mode,
810 do it now. */
811
812 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
813 args[i].value
814 = convert_modes (args[i].mode,
815 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
816 args[i].value, args[i].unsignedp);
817
818 /* If the value is expensive, and we are inside an appropriately
819 short loop, put the value into a pseudo and then put the pseudo
820 into the hard reg.
821
822 For small register classes, also do this if this call uses
823 register parameters. This is to avoid reload conflicts while
824 loading the parameters registers. */
825
826 if ((! (GET_CODE (args[i].value) == REG
827 || (GET_CODE (args[i].value) == SUBREG
828 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
829 && args[i].mode != BLKmode
830 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
831 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
832 || preserve_subexpressions_p ()))
833 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
834 }
835 }
836
837 #ifdef REG_PARM_STACK_SPACE
838
839 /* The argument list is the property of the called routine and it
840 may clobber it. If the fixed area has been used for previous
841 parameters, we must save and restore it. */
842
843 static rtx
844 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
845 {
846 int low;
847 int high;
848
849 /* Compute the boundary of the area that needs to be saved, if any. */
850 high = reg_parm_stack_space;
851 #ifdef ARGS_GROW_DOWNWARD
852 high += 1;
853 #endif
854 if (high > highest_outgoing_arg_in_use)
855 high = highest_outgoing_arg_in_use;
856
857 for (low = 0; low < high; low++)
858 if (stack_usage_map[low] != 0)
859 {
860 int num_to_save;
861 enum machine_mode save_mode;
862 int delta;
863 rtx stack_area;
864 rtx save_area;
865
866 while (stack_usage_map[--high] == 0)
867 ;
868
869 *low_to_save = low;
870 *high_to_save = high;
871
872 num_to_save = high - low + 1;
873 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
874
875 /* If we don't have the required alignment, must do this
876 in BLKmode. */
877 if ((low & (MIN (GET_MODE_SIZE (save_mode),
878 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
879 save_mode = BLKmode;
880
881 #ifdef ARGS_GROW_DOWNWARD
882 delta = -high;
883 #else
884 delta = low;
885 #endif
886 stack_area = gen_rtx_MEM (save_mode,
887 memory_address (save_mode,
888 plus_constant (argblock,
889 delta)));
890
891 set_mem_align (stack_area, PARM_BOUNDARY);
892 if (save_mode == BLKmode)
893 {
894 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
895 emit_block_move (validize_mem (save_area), stack_area,
896 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
897 }
898 else
899 {
900 save_area = gen_reg_rtx (save_mode);
901 emit_move_insn (save_area, stack_area);
902 }
903
904 return save_area;
905 }
906
907 return NULL_RTX;
908 }
909
910 static void
911 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
912 {
913 enum machine_mode save_mode = GET_MODE (save_area);
914 int delta;
915 rtx stack_area;
916
917 #ifdef ARGS_GROW_DOWNWARD
918 delta = -high_to_save;
919 #else
920 delta = low_to_save;
921 #endif
922 stack_area = gen_rtx_MEM (save_mode,
923 memory_address (save_mode,
924 plus_constant (argblock, delta)));
925 set_mem_align (stack_area, PARM_BOUNDARY);
926
927 if (save_mode != BLKmode)
928 emit_move_insn (stack_area, save_area);
929 else
930 emit_block_move (stack_area, validize_mem (save_area),
931 GEN_INT (high_to_save - low_to_save + 1),
932 BLOCK_OP_CALL_PARM);
933 }
934 #endif /* REG_PARM_STACK_SPACE */
935
936 /* If any elements in ARGS refer to parameters that are to be passed in
937 registers, but not in memory, and whose alignment does not permit a
938 direct copy into registers. Copy the values into a group of pseudos
939 which we will later copy into the appropriate hard registers.
940
941 Pseudos for each unaligned argument will be stored into the array
942 args[argnum].aligned_regs. The caller is responsible for deallocating
943 the aligned_regs array if it is nonzero. */
944
945 static void
946 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
947 {
948 int i, j;
949
950 for (i = 0; i < num_actuals; i++)
951 if (args[i].reg != 0 && ! args[i].pass_on_stack
952 && args[i].mode == BLKmode
953 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
954 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
955 {
956 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
957 int nregs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
958 int endian_correction = 0;
959
960 args[i].n_aligned_regs = args[i].partial ? args[i].partial : nregs;
961 args[i].aligned_regs = xmalloc (sizeof (rtx) * args[i].n_aligned_regs);
962
963 /* Structures smaller than a word are normally aligned to the
964 least significant byte. On a BYTES_BIG_ENDIAN machine,
965 this means we must skip the empty high order bytes when
966 calculating the bit offset. */
967 if (bytes < UNITS_PER_WORD
968 #ifdef BLOCK_REG_PADDING
969 && (BLOCK_REG_PADDING (args[i].mode,
970 TREE_TYPE (args[i].tree_value), 1)
971 == downward)
972 #else
973 && BYTES_BIG_ENDIAN
974 #endif
975 )
976 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
977
978 for (j = 0; j < args[i].n_aligned_regs; j++)
979 {
980 rtx reg = gen_reg_rtx (word_mode);
981 rtx word = operand_subword_force (args[i].value, j, BLKmode);
982 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
983
984 args[i].aligned_regs[j] = reg;
985 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
986 word_mode, word_mode, BITS_PER_WORD);
987
988 /* There is no need to restrict this code to loading items
989 in TYPE_ALIGN sized hunks. The bitfield instructions can
990 load up entire word sized registers efficiently.
991
992 ??? This may not be needed anymore.
993 We use to emit a clobber here but that doesn't let later
994 passes optimize the instructions we emit. By storing 0 into
995 the register later passes know the first AND to zero out the
996 bitfield being set in the register is unnecessary. The store
997 of 0 will be deleted as will at least the first AND. */
998
999 emit_move_insn (reg, const0_rtx);
1000
1001 bytes -= bitsize / BITS_PER_UNIT;
1002 store_bit_field (reg, bitsize, endian_correction, word_mode,
1003 word, BITS_PER_WORD);
1004 }
1005 }
1006 }
1007
1008 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
1009 ACTPARMS.
1010
1011 NUM_ACTUALS is the total number of parameters.
1012
1013 N_NAMED_ARGS is the total number of named arguments.
1014
1015 FNDECL is the tree code for the target of this call (if known)
1016
1017 ARGS_SO_FAR holds state needed by the target to know where to place
1018 the next argument.
1019
1020 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1021 for arguments which are passed in registers.
1022
1023 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1024 and may be modified by this routine.
1025
1026 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1027 flags which may may be modified by this routine.
1028
1029 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
1030 the thunked-to function. */
1031
1032 static void
1033 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
1034 struct arg_data *args,
1035 struct args_size *args_size,
1036 int n_named_args ATTRIBUTE_UNUSED,
1037 tree actparms, tree fndecl,
1038 CUMULATIVE_ARGS *args_so_far,
1039 int reg_parm_stack_space,
1040 rtx *old_stack_level, int *old_pending_adj,
1041 int *must_preallocate, int *ecf_flags,
1042 bool call_from_thunk_p)
1043 {
1044 /* 1 if scanning parms front to back, -1 if scanning back to front. */
1045 int inc;
1046
1047 /* Count arg position in order args appear. */
1048 int argpos;
1049
1050 int i;
1051 tree p;
1052
1053 args_size->constant = 0;
1054 args_size->var = 0;
1055
1056 /* In this loop, we consider args in the order they are written.
1057 We fill up ARGS from the front or from the back if necessary
1058 so that in any case the first arg to be pushed ends up at the front. */
1059
1060 if (PUSH_ARGS_REVERSED)
1061 {
1062 i = num_actuals - 1, inc = -1;
1063 /* In this case, must reverse order of args
1064 so that we compute and push the last arg first. */
1065 }
1066 else
1067 {
1068 i = 0, inc = 1;
1069 }
1070
1071 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1072 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
1073 {
1074 tree type = TREE_TYPE (TREE_VALUE (p));
1075 int unsignedp;
1076 enum machine_mode mode;
1077
1078 args[i].tree_value = TREE_VALUE (p);
1079
1080 /* Replace erroneous argument with constant zero. */
1081 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1082 args[i].tree_value = integer_zero_node, type = integer_type_node;
1083
1084 /* If TYPE is a transparent union, pass things the way we would
1085 pass the first field of the union. We have already verified that
1086 the modes are the same. */
1087 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
1088 type = TREE_TYPE (TYPE_FIELDS (type));
1089
1090 /* Decide where to pass this arg.
1091
1092 args[i].reg is nonzero if all or part is passed in registers.
1093
1094 args[i].partial is nonzero if part but not all is passed in registers,
1095 and the exact value says how many words are passed in registers.
1096
1097 args[i].pass_on_stack is nonzero if the argument must at least be
1098 computed on the stack. It may then be loaded back into registers
1099 if args[i].reg is nonzero.
1100
1101 These decisions are driven by the FUNCTION_... macros and must agree
1102 with those made by function.c. */
1103
1104 /* See if this argument should be passed by invisible reference. */
1105 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
1106 || TREE_ADDRESSABLE (type)
1107 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1108 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
1109 type, argpos < n_named_args)
1110 #endif
1111 )
1112 {
1113 /* If we're compiling a thunk, pass through invisible
1114 references instead of making a copy. */
1115 if (call_from_thunk_p
1116 #ifdef FUNCTION_ARG_CALLEE_COPIES
1117 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
1118 type, argpos < n_named_args)
1119 /* If it's in a register, we must make a copy of it too. */
1120 /* ??? Is this a sufficient test? Is there a better one? */
1121 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1122 && REG_P (DECL_RTL (args[i].tree_value)))
1123 && ! TREE_ADDRESSABLE (type))
1124 #endif
1125 )
1126 {
1127 /* C++ uses a TARGET_EXPR to indicate that we want to make a
1128 new object from the argument. If we are passing by
1129 invisible reference, the callee will do that for us, so we
1130 can strip off the TARGET_EXPR. This is not always safe,
1131 but it is safe in the only case where this is a useful
1132 optimization; namely, when the argument is a plain object.
1133 In that case, the frontend is just asking the backend to
1134 make a bitwise copy of the argument. */
1135
1136 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
1137 && (DECL_P (TREE_OPERAND (args[i].tree_value, 1)))
1138 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1139 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1140
1141 args[i].tree_value = build1 (ADDR_EXPR,
1142 build_pointer_type (type),
1143 args[i].tree_value);
1144 type = build_pointer_type (type);
1145 }
1146 else if (TREE_CODE (args[i].tree_value) == TARGET_EXPR)
1147 {
1148 /* In the V3 C++ ABI, parameters are destroyed in the caller.
1149 We implement this by passing the address of the temporary
1150 rather than expanding it into another allocated slot. */
1151 args[i].tree_value = build1 (ADDR_EXPR,
1152 build_pointer_type (type),
1153 args[i].tree_value);
1154 type = build_pointer_type (type);
1155 }
1156 else
1157 {
1158 /* We make a copy of the object and pass the address to the
1159 function being called. */
1160 rtx copy;
1161
1162 if (!COMPLETE_TYPE_P (type)
1163 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1164 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1165 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1166 STACK_CHECK_MAX_VAR_SIZE))))
1167 {
1168 /* This is a variable-sized object. Make space on the stack
1169 for it. */
1170 rtx size_rtx = expr_size (TREE_VALUE (p));
1171
1172 if (*old_stack_level == 0)
1173 {
1174 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1175 *old_pending_adj = pending_stack_adjust;
1176 pending_stack_adjust = 0;
1177 }
1178
1179 copy = gen_rtx_MEM (BLKmode,
1180 allocate_dynamic_stack_space
1181 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1182 set_mem_attributes (copy, type, 1);
1183 }
1184 else
1185 copy = assign_temp (type, 0, 1, 0);
1186
1187 store_expr (args[i].tree_value, copy, 0);
1188 *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1189
1190 args[i].tree_value = build1 (ADDR_EXPR,
1191 build_pointer_type (type),
1192 make_tree (type, copy));
1193 type = build_pointer_type (type);
1194 }
1195 }
1196
1197 mode = TYPE_MODE (type);
1198 unsignedp = TREE_UNSIGNED (type);
1199
1200 if (targetm.calls.promote_function_args (fndecl ? TREE_TYPE (fndecl) : 0))
1201 mode = promote_mode (type, mode, &unsignedp, 1);
1202
1203 args[i].unsignedp = unsignedp;
1204 args[i].mode = mode;
1205
1206 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1207 argpos < n_named_args);
1208 #ifdef FUNCTION_INCOMING_ARG
1209 /* If this is a sibling call and the machine has register windows, the
1210 register window has to be unwinded before calling the routine, so
1211 arguments have to go into the incoming registers. */
1212 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1213 argpos < n_named_args);
1214 #else
1215 args[i].tail_call_reg = args[i].reg;
1216 #endif
1217
1218 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1219 if (args[i].reg)
1220 args[i].partial
1221 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1222 argpos < n_named_args);
1223 #endif
1224
1225 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1226
1227 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1228 it means that we are to pass this arg in the register(s) designated
1229 by the PARALLEL, but also to pass it in the stack. */
1230 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1231 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1232 args[i].pass_on_stack = 1;
1233
1234 /* If this is an addressable type, we must preallocate the stack
1235 since we must evaluate the object into its final location.
1236
1237 If this is to be passed in both registers and the stack, it is simpler
1238 to preallocate. */
1239 if (TREE_ADDRESSABLE (type)
1240 || (args[i].pass_on_stack && args[i].reg != 0))
1241 *must_preallocate = 1;
1242
1243 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1244 we cannot consider this function call constant. */
1245 if (TREE_ADDRESSABLE (type))
1246 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
1247
1248 /* Compute the stack-size of this argument. */
1249 if (args[i].reg == 0 || args[i].partial != 0
1250 || reg_parm_stack_space > 0
1251 || args[i].pass_on_stack)
1252 locate_and_pad_parm (mode, type,
1253 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1254 1,
1255 #else
1256 args[i].reg != 0,
1257 #endif
1258 args[i].pass_on_stack ? 0 : args[i].partial,
1259 fndecl, args_size, &args[i].locate);
1260 #ifdef BLOCK_REG_PADDING
1261 else
1262 /* The argument is passed entirely in registers. See at which
1263 end it should be padded. */
1264 args[i].locate.where_pad =
1265 BLOCK_REG_PADDING (mode, type,
1266 int_size_in_bytes (type) <= UNITS_PER_WORD);
1267 #endif
1268
1269 /* Update ARGS_SIZE, the total stack space for args so far. */
1270
1271 args_size->constant += args[i].locate.size.constant;
1272 if (args[i].locate.size.var)
1273 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1274
1275 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1276 have been used, etc. */
1277
1278 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1279 argpos < n_named_args);
1280 }
1281 }
1282
1283 /* Update ARGS_SIZE to contain the total size for the argument block.
1284 Return the original constant component of the argument block's size.
1285
1286 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1287 for arguments passed in registers. */
1288
1289 static int
1290 compute_argument_block_size (int reg_parm_stack_space,
1291 struct args_size *args_size,
1292 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1293 {
1294 int unadjusted_args_size = args_size->constant;
1295
1296 /* For accumulate outgoing args mode we don't need to align, since the frame
1297 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1298 backends from generating misaligned frame sizes. */
1299 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1300 preferred_stack_boundary = STACK_BOUNDARY;
1301
1302 /* Compute the actual size of the argument block required. The variable
1303 and constant sizes must be combined, the size may have to be rounded,
1304 and there may be a minimum required size. */
1305
1306 if (args_size->var)
1307 {
1308 args_size->var = ARGS_SIZE_TREE (*args_size);
1309 args_size->constant = 0;
1310
1311 preferred_stack_boundary /= BITS_PER_UNIT;
1312 if (preferred_stack_boundary > 1)
1313 {
1314 /* We don't handle this case yet. To handle it correctly we have
1315 to add the delta, round and subtract the delta.
1316 Currently no machine description requires this support. */
1317 if (stack_pointer_delta & (preferred_stack_boundary - 1))
1318 abort ();
1319 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1320 }
1321
1322 if (reg_parm_stack_space > 0)
1323 {
1324 args_size->var
1325 = size_binop (MAX_EXPR, args_size->var,
1326 ssize_int (reg_parm_stack_space));
1327
1328 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1329 /* The area corresponding to register parameters is not to count in
1330 the size of the block we need. So make the adjustment. */
1331 args_size->var
1332 = size_binop (MINUS_EXPR, args_size->var,
1333 ssize_int (reg_parm_stack_space));
1334 #endif
1335 }
1336 }
1337 else
1338 {
1339 preferred_stack_boundary /= BITS_PER_UNIT;
1340 if (preferred_stack_boundary < 1)
1341 preferred_stack_boundary = 1;
1342 args_size->constant = (((args_size->constant
1343 + stack_pointer_delta
1344 + preferred_stack_boundary - 1)
1345 / preferred_stack_boundary
1346 * preferred_stack_boundary)
1347 - stack_pointer_delta);
1348
1349 args_size->constant = MAX (args_size->constant,
1350 reg_parm_stack_space);
1351
1352 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1353 args_size->constant -= reg_parm_stack_space;
1354 #endif
1355 }
1356 return unadjusted_args_size;
1357 }
1358
1359 /* Precompute parameters as needed for a function call.
1360
1361 FLAGS is mask of ECF_* constants.
1362
1363 NUM_ACTUALS is the number of arguments.
1364
1365 ARGS is an array containing information for each argument; this
1366 routine fills in the INITIAL_VALUE and VALUE fields for each
1367 precomputed argument. */
1368
1369 static void
1370 precompute_arguments (int flags, int num_actuals, struct arg_data *args)
1371 {
1372 int i;
1373
1374 /* If this is a libcall, then precompute all arguments so that we do not
1375 get extraneous instructions emitted as part of the libcall sequence.
1376
1377 If this target defines ACCUMULATE_OUTGOING_ARGS to true, then we must
1378 precompute all arguments that contain function calls. Otherwise,
1379 computing arguments for a subcall may clobber arguments for this call.
1380
1381 If this target defines ACCUMULATE_OUTGOING_ARGS to false, then we only
1382 need to precompute arguments that change the stack pointer, such as calls
1383 to alloca, and calls that do not pop all of their arguments. */
1384
1385 for (i = 0; i < num_actuals; i++)
1386 if ((flags & ECF_LIBCALL_BLOCK)
1387 || calls_function (args[i].tree_value, !ACCUMULATE_OUTGOING_ARGS))
1388 {
1389 enum machine_mode mode;
1390
1391 /* If this is an addressable type, we cannot pre-evaluate it. */
1392 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1393 abort ();
1394
1395 args[i].value
1396 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1397
1398 /* ANSI doesn't require a sequence point here,
1399 but PCC has one, so this will avoid some problems. */
1400 emit_queue ();
1401
1402 args[i].initial_value = args[i].value
1403 = protect_from_queue (args[i].value, 0);
1404
1405 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1406 if (mode != args[i].mode)
1407 {
1408 args[i].value
1409 = convert_modes (args[i].mode, mode,
1410 args[i].value, args[i].unsignedp);
1411 #ifdef PROMOTE_FOR_CALL_ONLY
1412 /* CSE will replace this only if it contains args[i].value
1413 pseudo, so convert it down to the declared mode using
1414 a SUBREG. */
1415 if (GET_CODE (args[i].value) == REG
1416 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1417 {
1418 args[i].initial_value
1419 = gen_lowpart_SUBREG (mode, args[i].value);
1420 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1421 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1422 args[i].unsignedp);
1423 }
1424 #endif
1425 }
1426 }
1427 }
1428
1429 /* Given the current state of MUST_PREALLOCATE and information about
1430 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1431 compute and return the final value for MUST_PREALLOCATE. */
1432
1433 static int
1434 finalize_must_preallocate (int must_preallocate, int num_actuals, struct arg_data *args, struct args_size *args_size)
1435 {
1436 /* See if we have or want to preallocate stack space.
1437
1438 If we would have to push a partially-in-regs parm
1439 before other stack parms, preallocate stack space instead.
1440
1441 If the size of some parm is not a multiple of the required stack
1442 alignment, we must preallocate.
1443
1444 If the total size of arguments that would otherwise create a copy in
1445 a temporary (such as a CALL) is more than half the total argument list
1446 size, preallocation is faster.
1447
1448 Another reason to preallocate is if we have a machine (like the m88k)
1449 where stack alignment is required to be maintained between every
1450 pair of insns, not just when the call is made. However, we assume here
1451 that such machines either do not have push insns (and hence preallocation
1452 would occur anyway) or the problem is taken care of with
1453 PUSH_ROUNDING. */
1454
1455 if (! must_preallocate)
1456 {
1457 int partial_seen = 0;
1458 int copy_to_evaluate_size = 0;
1459 int i;
1460
1461 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1462 {
1463 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1464 partial_seen = 1;
1465 else if (partial_seen && args[i].reg == 0)
1466 must_preallocate = 1;
1467
1468 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1469 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1470 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1471 || TREE_CODE (args[i].tree_value) == COND_EXPR
1472 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1473 copy_to_evaluate_size
1474 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1475 }
1476
1477 if (copy_to_evaluate_size * 2 >= args_size->constant
1478 && args_size->constant > 0)
1479 must_preallocate = 1;
1480 }
1481 return must_preallocate;
1482 }
1483
1484 /* If we preallocated stack space, compute the address of each argument
1485 and store it into the ARGS array.
1486
1487 We need not ensure it is a valid memory address here; it will be
1488 validized when it is used.
1489
1490 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1491
1492 static void
1493 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1494 {
1495 if (argblock)
1496 {
1497 rtx arg_reg = argblock;
1498 int i, arg_offset = 0;
1499
1500 if (GET_CODE (argblock) == PLUS)
1501 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1502
1503 for (i = 0; i < num_actuals; i++)
1504 {
1505 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1506 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1507 rtx addr;
1508
1509 /* Skip this parm if it will not be passed on the stack. */
1510 if (! args[i].pass_on_stack && args[i].reg != 0)
1511 continue;
1512
1513 if (GET_CODE (offset) == CONST_INT)
1514 addr = plus_constant (arg_reg, INTVAL (offset));
1515 else
1516 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1517
1518 addr = plus_constant (addr, arg_offset);
1519 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1520 set_mem_align (args[i].stack, PARM_BOUNDARY);
1521 set_mem_attributes (args[i].stack,
1522 TREE_TYPE (args[i].tree_value), 1);
1523
1524 if (GET_CODE (slot_offset) == CONST_INT)
1525 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1526 else
1527 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1528
1529 addr = plus_constant (addr, arg_offset);
1530 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1531 set_mem_align (args[i].stack_slot, PARM_BOUNDARY);
1532 set_mem_attributes (args[i].stack_slot,
1533 TREE_TYPE (args[i].tree_value), 1);
1534
1535 /* Function incoming arguments may overlap with sibling call
1536 outgoing arguments and we cannot allow reordering of reads
1537 from function arguments with stores to outgoing arguments
1538 of sibling calls. */
1539 set_mem_alias_set (args[i].stack, 0);
1540 set_mem_alias_set (args[i].stack_slot, 0);
1541 }
1542 }
1543 }
1544
1545 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1546 in a call instruction.
1547
1548 FNDECL is the tree node for the target function. For an indirect call
1549 FNDECL will be NULL_TREE.
1550
1551 ADDR is the operand 0 of CALL_EXPR for this call. */
1552
1553 static rtx
1554 rtx_for_function_call (tree fndecl, tree addr)
1555 {
1556 rtx funexp;
1557
1558 /* Get the function to call, in the form of RTL. */
1559 if (fndecl)
1560 {
1561 /* If this is the first use of the function, see if we need to
1562 make an external definition for it. */
1563 if (! TREE_USED (fndecl))
1564 {
1565 assemble_external (fndecl);
1566 TREE_USED (fndecl) = 1;
1567 }
1568
1569 /* Get a SYMBOL_REF rtx for the function address. */
1570 funexp = XEXP (DECL_RTL (fndecl), 0);
1571 }
1572 else
1573 /* Generate an rtx (probably a pseudo-register) for the address. */
1574 {
1575 push_temp_slots ();
1576 funexp = expand_expr (addr, NULL_RTX, VOIDmode, 0);
1577 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1578 emit_queue ();
1579 }
1580 return funexp;
1581 }
1582
1583 /* Do the register loads required for any wholly-register parms or any
1584 parms which are passed both on the stack and in a register. Their
1585 expressions were already evaluated.
1586
1587 Mark all register-parms as living through the call, putting these USE
1588 insns in the CALL_INSN_FUNCTION_USAGE field.
1589
1590 When IS_SIBCALL, perform the check_sibcall_overlap_argument_overlap
1591 checking, setting *SIBCALL_FAILURE if appropriate. */
1592
1593 static void
1594 load_register_parameters (struct arg_data *args, int num_actuals,
1595 rtx *call_fusage, int flags, int is_sibcall,
1596 int *sibcall_failure)
1597 {
1598 int i, j;
1599
1600 for (i = 0; i < num_actuals; i++)
1601 {
1602 rtx reg = ((flags & ECF_SIBCALL)
1603 ? args[i].tail_call_reg : args[i].reg);
1604 if (reg)
1605 {
1606 int partial = args[i].partial;
1607 int nregs;
1608 int size = 0;
1609 rtx before_arg = get_last_insn ();
1610 /* Set to non-negative if must move a word at a time, even if just
1611 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1612 we just use a normal move insn. This value can be zero if the
1613 argument is a zero size structure with no fields. */
1614 nregs = -1;
1615 if (partial)
1616 nregs = partial;
1617 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1618 {
1619 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1620 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1621 }
1622 else
1623 size = GET_MODE_SIZE (args[i].mode);
1624
1625 /* Handle calls that pass values in multiple non-contiguous
1626 locations. The Irix 6 ABI has examples of this. */
1627
1628 if (GET_CODE (reg) == PARALLEL)
1629 {
1630 tree type = TREE_TYPE (args[i].tree_value);
1631 emit_group_load (reg, args[i].value, type,
1632 int_size_in_bytes (type));
1633 }
1634
1635 /* If simple case, just do move. If normal partial, store_one_arg
1636 has already loaded the register for us. In all other cases,
1637 load the register(s) from memory. */
1638
1639 else if (nregs == -1)
1640 {
1641 emit_move_insn (reg, args[i].value);
1642 #ifdef BLOCK_REG_PADDING
1643 /* Handle case where we have a value that needs shifting
1644 up to the msb. eg. a QImode value and we're padding
1645 upward on a BYTES_BIG_ENDIAN machine. */
1646 if (size < UNITS_PER_WORD
1647 && (args[i].locate.where_pad
1648 == (BYTES_BIG_ENDIAN ? upward : downward)))
1649 {
1650 rtx x;
1651 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1652
1653 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1654 report the whole reg as used. Strictly speaking, the
1655 call only uses SIZE bytes at the msb end, but it doesn't
1656 seem worth generating rtl to say that. */
1657 reg = gen_rtx_REG (word_mode, REGNO (reg));
1658 x = expand_binop (word_mode, ashl_optab, reg,
1659 GEN_INT (shift), reg, 1, OPTAB_WIDEN);
1660 if (x != reg)
1661 emit_move_insn (reg, x);
1662 }
1663 #endif
1664 }
1665
1666 /* If we have pre-computed the values to put in the registers in
1667 the case of non-aligned structures, copy them in now. */
1668
1669 else if (args[i].n_aligned_regs != 0)
1670 for (j = 0; j < args[i].n_aligned_regs; j++)
1671 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1672 args[i].aligned_regs[j]);
1673
1674 else if (partial == 0 || args[i].pass_on_stack)
1675 {
1676 rtx mem = validize_mem (args[i].value);
1677
1678 #ifdef BLOCK_REG_PADDING
1679 /* Handle a BLKmode that needs shifting. */
1680 if (nregs == 1 && size < UNITS_PER_WORD
1681 && args[i].locate.where_pad == downward)
1682 {
1683 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1684 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1685 rtx x = gen_reg_rtx (word_mode);
1686 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1687 optab dir = BYTES_BIG_ENDIAN ? lshr_optab : ashl_optab;
1688
1689 emit_move_insn (x, tem);
1690 x = expand_binop (word_mode, dir, x, GEN_INT (shift),
1691 ri, 1, OPTAB_WIDEN);
1692 if (x != ri)
1693 emit_move_insn (ri, x);
1694 }
1695 else
1696 #endif
1697 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1698 }
1699
1700 /* When a parameter is a block, and perhaps in other cases, it is
1701 possible that it did a load from an argument slot that was
1702 already clobbered. */
1703 if (is_sibcall
1704 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1705 *sibcall_failure = 1;
1706
1707 /* Handle calls that pass values in multiple non-contiguous
1708 locations. The Irix 6 ABI has examples of this. */
1709 if (GET_CODE (reg) == PARALLEL)
1710 use_group_regs (call_fusage, reg);
1711 else if (nregs == -1)
1712 use_reg (call_fusage, reg);
1713 else
1714 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1715 }
1716 }
1717 }
1718
1719 /* Try to integrate function. See expand_inline_function for documentation
1720 about the parameters. */
1721
1722 static rtx
1723 try_to_integrate (tree fndecl, tree actparms, rtx target, int ignore,
1724 tree type, rtx structure_value_addr)
1725 {
1726 rtx temp;
1727 rtx before_call;
1728 int i;
1729 rtx old_stack_level = 0;
1730 int reg_parm_stack_space = 0;
1731
1732 #ifdef REG_PARM_STACK_SPACE
1733 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1734 #endif
1735
1736 before_call = get_last_insn ();
1737
1738 timevar_push (TV_INTEGRATION);
1739
1740 temp = expand_inline_function (fndecl, actparms, target,
1741 ignore, type,
1742 structure_value_addr);
1743
1744 timevar_pop (TV_INTEGRATION);
1745
1746 /* If inlining succeeded, return. */
1747 if (temp != (rtx) (size_t) - 1)
1748 {
1749 if (ACCUMULATE_OUTGOING_ARGS)
1750 {
1751 /* If the outgoing argument list must be preserved, push
1752 the stack before executing the inlined function if it
1753 makes any calls. */
1754
1755 i = reg_parm_stack_space;
1756 if (i > highest_outgoing_arg_in_use)
1757 i = highest_outgoing_arg_in_use;
1758 while (--i >= 0 && stack_usage_map[i] == 0)
1759 ;
1760
1761 if (stack_arg_under_construction || i >= 0)
1762 {
1763 rtx first_insn
1764 = before_call ? NEXT_INSN (before_call) : get_insns ();
1765 rtx insn = NULL_RTX, seq;
1766
1767 /* Look for a call in the inline function code.
1768 If DECL_STRUCT_FUNCTION (fndecl)->outgoing_args_size is
1769 nonzero then there is a call and it is not necessary
1770 to scan the insns. */
1771
1772 if (DECL_STRUCT_FUNCTION (fndecl)->outgoing_args_size == 0)
1773 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1774 if (GET_CODE (insn) == CALL_INSN)
1775 break;
1776
1777 if (insn)
1778 {
1779 /* Reserve enough stack space so that the largest
1780 argument list of any function call in the inline
1781 function does not overlap the argument list being
1782 evaluated. This is usually an overestimate because
1783 allocate_dynamic_stack_space reserves space for an
1784 outgoing argument list in addition to the requested
1785 space, but there is no way to ask for stack space such
1786 that an argument list of a certain length can be
1787 safely constructed.
1788
1789 Add the stack space reserved for register arguments, if
1790 any, in the inline function. What is really needed is the
1791 largest value of reg_parm_stack_space in the inline
1792 function, but that is not available. Using the current
1793 value of reg_parm_stack_space is wrong, but gives
1794 correct results on all supported machines. */
1795
1796 int adjust =
1797 (DECL_STRUCT_FUNCTION (fndecl)->outgoing_args_size
1798 + reg_parm_stack_space);
1799
1800 start_sequence ();
1801 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1802 allocate_dynamic_stack_space (GEN_INT (adjust),
1803 NULL_RTX, BITS_PER_UNIT);
1804 seq = get_insns ();
1805 end_sequence ();
1806 emit_insn_before (seq, first_insn);
1807 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1808 }
1809 }
1810 }
1811
1812 /* If the result is equivalent to TARGET, return TARGET to simplify
1813 checks in store_expr. They can be equivalent but not equal in the
1814 case of a function that returns BLKmode. */
1815 if (temp != target && rtx_equal_p (temp, target))
1816 return target;
1817 return temp;
1818 }
1819
1820 /* If inlining failed, mark FNDECL as needing to be compiled
1821 separately after all. If function was declared inline,
1822 give a warning. */
1823 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1824 && optimize > 0 && !TREE_ADDRESSABLE (fndecl))
1825 {
1826 warning ("%Jinlining failed in call to '%F'", fndecl, fndecl);
1827 warning ("called from here");
1828 }
1829 (*lang_hooks.mark_addressable) (fndecl);
1830 return (rtx) (size_t) - 1;
1831 }
1832
1833 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1834 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1835 bytes, then we would need to push some additional bytes to pad the
1836 arguments. So, we compute an adjust to the stack pointer for an
1837 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1838 bytes. Then, when the arguments are pushed the stack will be perfectly
1839 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1840 be popped after the call. Returns the adjustment. */
1841
1842 static int
1843 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1844 struct args_size *args_size,
1845 int preferred_unit_stack_boundary)
1846 {
1847 /* The number of bytes to pop so that the stack will be
1848 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1849 HOST_WIDE_INT adjustment;
1850 /* The alignment of the stack after the arguments are pushed, if we
1851 just pushed the arguments without adjust the stack here. */
1852 HOST_WIDE_INT unadjusted_alignment;
1853
1854 unadjusted_alignment
1855 = ((stack_pointer_delta + unadjusted_args_size)
1856 % preferred_unit_stack_boundary);
1857
1858 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1859 as possible -- leaving just enough left to cancel out the
1860 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1861 PENDING_STACK_ADJUST is non-negative, and congruent to
1862 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1863
1864 /* Begin by trying to pop all the bytes. */
1865 unadjusted_alignment
1866 = (unadjusted_alignment
1867 - (pending_stack_adjust % preferred_unit_stack_boundary));
1868 adjustment = pending_stack_adjust;
1869 /* Push enough additional bytes that the stack will be aligned
1870 after the arguments are pushed. */
1871 if (preferred_unit_stack_boundary > 1)
1872 {
1873 if (unadjusted_alignment > 0)
1874 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1875 else
1876 adjustment += unadjusted_alignment;
1877 }
1878
1879 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1880 bytes after the call. The right number is the entire
1881 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1882 by the arguments in the first place. */
1883 args_size->constant
1884 = pending_stack_adjust - adjustment + unadjusted_args_size;
1885
1886 return adjustment;
1887 }
1888
1889 /* Scan X expression if it does not dereference any argument slots
1890 we already clobbered by tail call arguments (as noted in stored_args_map
1891 bitmap).
1892 Return nonzero if X expression dereferences such argument slots,
1893 zero otherwise. */
1894
1895 static int
1896 check_sibcall_argument_overlap_1 (rtx x)
1897 {
1898 RTX_CODE code;
1899 int i, j;
1900 unsigned int k;
1901 const char *fmt;
1902
1903 if (x == NULL_RTX)
1904 return 0;
1905
1906 code = GET_CODE (x);
1907
1908 if (code == MEM)
1909 {
1910 if (XEXP (x, 0) == current_function_internal_arg_pointer)
1911 i = 0;
1912 else if (GET_CODE (XEXP (x, 0)) == PLUS
1913 && XEXP (XEXP (x, 0), 0) ==
1914 current_function_internal_arg_pointer
1915 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
1916 i = INTVAL (XEXP (XEXP (x, 0), 1));
1917 else
1918 return 0;
1919
1920 #ifdef ARGS_GROW_DOWNWARD
1921 i = -i - GET_MODE_SIZE (GET_MODE (x));
1922 #endif
1923
1924 for (k = 0; k < GET_MODE_SIZE (GET_MODE (x)); k++)
1925 if (i + k < stored_args_map->n_bits
1926 && TEST_BIT (stored_args_map, i + k))
1927 return 1;
1928
1929 return 0;
1930 }
1931
1932 /* Scan all subexpressions. */
1933 fmt = GET_RTX_FORMAT (code);
1934 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1935 {
1936 if (*fmt == 'e')
1937 {
1938 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1939 return 1;
1940 }
1941 else if (*fmt == 'E')
1942 {
1943 for (j = 0; j < XVECLEN (x, i); j++)
1944 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1945 return 1;
1946 }
1947 }
1948 return 0;
1949 }
1950
1951 /* Scan sequence after INSN if it does not dereference any argument slots
1952 we already clobbered by tail call arguments (as noted in stored_args_map
1953 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1954 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1955 should be 0). Return nonzero if sequence after INSN dereferences such argument
1956 slots, zero otherwise. */
1957
1958 static int
1959 check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
1960 {
1961 int low, high;
1962
1963 if (insn == NULL_RTX)
1964 insn = get_insns ();
1965 else
1966 insn = NEXT_INSN (insn);
1967
1968 for (; insn; insn = NEXT_INSN (insn))
1969 if (INSN_P (insn)
1970 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
1971 break;
1972
1973 if (mark_stored_args_map)
1974 {
1975 #ifdef ARGS_GROW_DOWNWARD
1976 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
1977 #else
1978 low = arg->locate.slot_offset.constant;
1979 #endif
1980
1981 for (high = low + arg->locate.size.constant; low < high; low++)
1982 SET_BIT (stored_args_map, low);
1983 }
1984 return insn != NULL_RTX;
1985 }
1986
1987 static tree
1988 fix_unsafe_tree (tree t)
1989 {
1990 switch (unsafe_for_reeval (t))
1991 {
1992 case 0: /* Safe. */
1993 break;
1994
1995 case 1: /* Mildly unsafe. */
1996 t = unsave_expr (t);
1997 break;
1998
1999 case 2: /* Wildly unsafe. */
2000 {
2001 tree var = build_decl (VAR_DECL, NULL_TREE,
2002 TREE_TYPE (t));
2003 SET_DECL_RTL (var,
2004 expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL));
2005 t = var;
2006 }
2007 break;
2008
2009 default:
2010 abort ();
2011 }
2012 return t;
2013 }
2014
2015
2016 /* If function value *VALUE was returned at the most significant end of a
2017 register, shift it towards the least significant end and convert it to
2018 TYPE's mode. Return true and update *VALUE if some action was needed.
2019
2020 TYPE is the type of the function's return value, which is known not
2021 to have mode BLKmode. */
2022
2023 static bool
2024 shift_returned_value (tree type, rtx *value)
2025 {
2026 if (targetm.calls.return_in_msb (type))
2027 {
2028 HOST_WIDE_INT shift;
2029
2030 shift = (GET_MODE_BITSIZE (GET_MODE (*value))
2031 - BITS_PER_UNIT * int_size_in_bytes (type));
2032 if (shift > 0)
2033 {
2034 *value = expand_binop (GET_MODE (*value), lshr_optab, *value,
2035 GEN_INT (shift), 0, 1, OPTAB_WIDEN);
2036 *value = convert_to_mode (TYPE_MODE (type), *value, 0);
2037 return true;
2038 }
2039 }
2040 return false;
2041 }
2042
2043 /* Generate all the code for a function call
2044 and return an rtx for its value.
2045 Store the value in TARGET (specified as an rtx) if convenient.
2046 If the value is stored in TARGET then TARGET is returned.
2047 If IGNORE is nonzero, then we ignore the value of the function call. */
2048
2049 rtx
2050 expand_call (tree exp, rtx target, int ignore)
2051 {
2052 /* Nonzero if we are currently expanding a call. */
2053 static int currently_expanding_call = 0;
2054
2055 /* List of actual parameters. */
2056 tree actparms = TREE_OPERAND (exp, 1);
2057 /* RTX for the function to be called. */
2058 rtx funexp;
2059 /* Sequence of insns to perform a tail recursive "call". */
2060 rtx tail_recursion_insns = NULL_RTX;
2061 /* Sequence of insns to perform a normal "call". */
2062 rtx normal_call_insns = NULL_RTX;
2063 /* Sequence of insns to perform a tail recursive "call". */
2064 rtx tail_call_insns = NULL_RTX;
2065 /* Data type of the function. */
2066 tree funtype;
2067 tree type_arg_types;
2068 /* Declaration of the function being called,
2069 or 0 if the function is computed (not known by name). */
2070 tree fndecl = 0;
2071 /* The type of the function being called. */
2072 tree fntype;
2073 rtx insn;
2074 int try_tail_call = 1;
2075 int try_tail_recursion = 1;
2076 int pass;
2077
2078 /* Register in which non-BLKmode value will be returned,
2079 or 0 if no value or if value is BLKmode. */
2080 rtx valreg;
2081 /* Address where we should return a BLKmode value;
2082 0 if value not BLKmode. */
2083 rtx structure_value_addr = 0;
2084 /* Nonzero if that address is being passed by treating it as
2085 an extra, implicit first parameter. Otherwise,
2086 it is passed by being copied directly into struct_value_rtx. */
2087 int structure_value_addr_parm = 0;
2088 /* Size of aggregate value wanted, or zero if none wanted
2089 or if we are using the non-reentrant PCC calling convention
2090 or expecting the value in registers. */
2091 HOST_WIDE_INT struct_value_size = 0;
2092 /* Nonzero if called function returns an aggregate in memory PCC style,
2093 by returning the address of where to find it. */
2094 int pcc_struct_value = 0;
2095 rtx struct_value = 0;
2096
2097 /* Number of actual parameters in this call, including struct value addr. */
2098 int num_actuals;
2099 /* Number of named args. Args after this are anonymous ones
2100 and they must all go on the stack. */
2101 int n_named_args;
2102
2103 /* Vector of information about each argument.
2104 Arguments are numbered in the order they will be pushed,
2105 not the order they are written. */
2106 struct arg_data *args;
2107
2108 /* Total size in bytes of all the stack-parms scanned so far. */
2109 struct args_size args_size;
2110 struct args_size adjusted_args_size;
2111 /* Size of arguments before any adjustments (such as rounding). */
2112 int unadjusted_args_size;
2113 /* Data on reg parms scanned so far. */
2114 CUMULATIVE_ARGS args_so_far;
2115 /* Nonzero if a reg parm has been scanned. */
2116 int reg_parm_seen;
2117 /* Nonzero if this is an indirect function call. */
2118
2119 /* Nonzero if we must avoid push-insns in the args for this call.
2120 If stack space is allocated for register parameters, but not by the
2121 caller, then it is preallocated in the fixed part of the stack frame.
2122 So the entire argument block must then be preallocated (i.e., we
2123 ignore PUSH_ROUNDING in that case). */
2124
2125 int must_preallocate = !PUSH_ARGS;
2126
2127 /* Size of the stack reserved for parameter registers. */
2128 int reg_parm_stack_space = 0;
2129
2130 /* Address of space preallocated for stack parms
2131 (on machines that lack push insns), or 0 if space not preallocated. */
2132 rtx argblock = 0;
2133
2134 /* Mask of ECF_ flags. */
2135 int flags = 0;
2136 /* Nonzero if this is a call to an inline function. */
2137 int is_integrable = 0;
2138 #ifdef REG_PARM_STACK_SPACE
2139 /* Define the boundary of the register parm stack space that needs to be
2140 saved, if any. */
2141 int low_to_save, high_to_save;
2142 rtx save_area = 0; /* Place that it is saved */
2143 #endif
2144
2145 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2146 rtx temp_target = 0;
2147 char *initial_stack_usage_map = stack_usage_map;
2148
2149 int old_stack_allocated;
2150
2151 /* State variables to track stack modifications. */
2152 rtx old_stack_level = 0;
2153 int old_stack_arg_under_construction = 0;
2154 int old_pending_adj = 0;
2155 int old_inhibit_defer_pop = inhibit_defer_pop;
2156
2157 /* Some stack pointer alterations we make are performed via
2158 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
2159 which we then also need to save/restore along the way. */
2160 int old_stack_pointer_delta = 0;
2161
2162 rtx call_fusage;
2163 tree p = TREE_OPERAND (exp, 0);
2164 tree addr = TREE_OPERAND (exp, 0);
2165 int i;
2166 /* The alignment of the stack, in bits. */
2167 HOST_WIDE_INT preferred_stack_boundary;
2168 /* The alignment of the stack, in bytes. */
2169 HOST_WIDE_INT preferred_unit_stack_boundary;
2170
2171 /* See if this is "nothrow" function call. */
2172 if (TREE_NOTHROW (exp))
2173 flags |= ECF_NOTHROW;
2174
2175 /* See if we can find a DECL-node for the actual function.
2176 As a result, decide whether this is a call to an integrable function. */
2177
2178 fndecl = get_callee_fndecl (exp);
2179 if (fndecl)
2180 {
2181 fntype = TREE_TYPE (fndecl);
2182 if (!flag_no_inline
2183 && fndecl != current_function_decl
2184 && DECL_INLINE (fndecl)
2185 && DECL_STRUCT_FUNCTION (fndecl)
2186 && DECL_STRUCT_FUNCTION (fndecl)->inlinable)
2187 is_integrable = 1;
2188 else if (! TREE_ADDRESSABLE (fndecl))
2189 {
2190 /* In case this function later becomes inlinable,
2191 record that there was already a non-inline call to it.
2192
2193 Use abstraction instead of setting TREE_ADDRESSABLE
2194 directly. */
2195 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
2196 && optimize > 0)
2197 {
2198 warning ("%Jcan't inline call to '%F'", fndecl, fndecl);
2199 warning ("called from here");
2200 }
2201 (*lang_hooks.mark_addressable) (fndecl);
2202 }
2203
2204 if (ignore
2205 && lookup_attribute ("warn_unused_result",
2206 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
2207 warning ("ignoring return value of `%D', "
2208 "declared with attribute warn_unused_result", fndecl);
2209
2210 flags |= flags_from_decl_or_type (fndecl);
2211 }
2212
2213 /* If we don't have specific function to call, see if we have a
2214 attributes set in the type. */
2215 else
2216 {
2217 fntype = TREE_TYPE (TREE_TYPE (p));
2218 if (ignore
2219 && lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (fntype)))
2220 warning ("ignoring return value of function "
2221 "declared with attribute warn_unused_result");
2222 flags |= flags_from_decl_or_type (fntype);
2223 }
2224
2225 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
2226
2227 /* Warn if this value is an aggregate type,
2228 regardless of which calling convention we are using for it. */
2229 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
2230 warning ("function call has aggregate value");
2231
2232 /* If the result of a pure or const function call is ignored (or void),
2233 and none of its arguments are volatile, we can avoid expanding the
2234 call and just evaluate the arguments for side-effects. */
2235 if ((flags & (ECF_CONST | ECF_PURE))
2236 && (ignore || target == const0_rtx
2237 || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
2238 {
2239 bool volatilep = false;
2240 tree arg;
2241
2242 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
2243 if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
2244 {
2245 volatilep = true;
2246 break;
2247 }
2248
2249 if (! volatilep)
2250 {
2251 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
2252 expand_expr (TREE_VALUE (arg), const0_rtx,
2253 VOIDmode, EXPAND_NORMAL);
2254 return const0_rtx;
2255 }
2256 }
2257
2258 #ifdef REG_PARM_STACK_SPACE
2259 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2260 #endif
2261
2262 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2263 if (reg_parm_stack_space > 0 && PUSH_ARGS)
2264 must_preallocate = 1;
2265 #endif
2266
2267 /* Set up a place to return a structure. */
2268
2269 /* Cater to broken compilers. */
2270 if (aggregate_value_p (exp, fndecl))
2271 {
2272 /* This call returns a big structure. */
2273 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
2274
2275 #ifdef PCC_STATIC_STRUCT_RETURN
2276 {
2277 pcc_struct_value = 1;
2278 /* Easier than making that case work right. */
2279 if (is_integrable)
2280 {
2281 /* In case this is a static function, note that it has been
2282 used. */
2283 if (! TREE_ADDRESSABLE (fndecl))
2284 (*lang_hooks.mark_addressable) (fndecl);
2285 is_integrable = 0;
2286 }
2287 }
2288 #else /* not PCC_STATIC_STRUCT_RETURN */
2289 {
2290 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2291
2292 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (exp))
2293 {
2294 /* The structure value address arg is already in actparms.
2295 Pull it out. It might be nice to just leave it there, but
2296 we need to set structure_value_addr. */
2297 tree return_arg = TREE_VALUE (actparms);
2298 actparms = TREE_CHAIN (actparms);
2299 structure_value_addr = expand_expr (return_arg, NULL_RTX,
2300 VOIDmode, EXPAND_NORMAL);
2301 }
2302 else if (target && GET_CODE (target) == MEM)
2303 structure_value_addr = XEXP (target, 0);
2304 else
2305 {
2306 /* For variable-sized objects, we must be called with a target
2307 specified. If we were to allocate space on the stack here,
2308 we would have no way of knowing when to free it. */
2309 rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
2310
2311 mark_temp_addr_taken (d);
2312 structure_value_addr = XEXP (d, 0);
2313 target = 0;
2314 }
2315 }
2316 #endif /* not PCC_STATIC_STRUCT_RETURN */
2317 }
2318
2319 /* If called function is inline, try to integrate it. */
2320
2321 if (is_integrable)
2322 {
2323 rtx temp = try_to_integrate (fndecl, actparms, target,
2324 ignore, TREE_TYPE (exp),
2325 structure_value_addr);
2326 if (temp != (rtx) (size_t) - 1)
2327 return temp;
2328 }
2329
2330 /* Figure out the amount to which the stack should be aligned. */
2331 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2332 if (fndecl)
2333 {
2334 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2335 if (i && i->preferred_incoming_stack_boundary)
2336 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2337 }
2338
2339 /* Operand 0 is a pointer-to-function; get the type of the function. */
2340 funtype = TREE_TYPE (addr);
2341 if (! POINTER_TYPE_P (funtype))
2342 abort ();
2343 funtype = TREE_TYPE (funtype);
2344
2345 /* Munge the tree to split complex arguments into their imaginary
2346 and real parts. */
2347 if (SPLIT_COMPLEX_ARGS)
2348 {
2349 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2350 actparms = split_complex_values (actparms);
2351 }
2352 else
2353 type_arg_types = TYPE_ARG_TYPES (funtype);
2354
2355 /* See if this is a call to a function that can return more than once
2356 or a call to longjmp or malloc. */
2357 flags |= special_function_p (fndecl, flags);
2358
2359 if (flags & ECF_MAY_BE_ALLOCA)
2360 current_function_calls_alloca = 1;
2361
2362 /* If struct_value_rtx is 0, it means pass the address
2363 as if it were an extra parameter. */
2364 if (structure_value_addr && struct_value == 0)
2365 {
2366 /* If structure_value_addr is a REG other than
2367 virtual_outgoing_args_rtx, we can use always use it. If it
2368 is not a REG, we must always copy it into a register.
2369 If it is virtual_outgoing_args_rtx, we must copy it to another
2370 register in some cases. */
2371 rtx temp = (GET_CODE (structure_value_addr) != REG
2372 || (ACCUMULATE_OUTGOING_ARGS
2373 && stack_arg_under_construction
2374 && structure_value_addr == virtual_outgoing_args_rtx)
2375 ? copy_addr_to_reg (convert_memory_address
2376 (Pmode, structure_value_addr))
2377 : structure_value_addr);
2378
2379 actparms
2380 = tree_cons (error_mark_node,
2381 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2382 temp),
2383 actparms);
2384 structure_value_addr_parm = 1;
2385 }
2386
2387 /* Count the arguments and set NUM_ACTUALS. */
2388 for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p))
2389 num_actuals++;
2390
2391 /* Compute number of named args.
2392 Normally, don't include the last named arg if anonymous args follow.
2393 We do include the last named arg if
2394 targetm.calls.strict_argument_naming() returns nonzero.
2395 (If no anonymous args follow, the result of list_length is actually
2396 one too large. This is harmless.)
2397
2398 If targetm.calls.pretend_outgoing_varargs_named() returns
2399 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2400 this machine will be able to place unnamed args that were passed
2401 in registers into the stack. So treat all args as named. This
2402 allows the insns emitting for a specific argument list to be
2403 independent of the function declaration.
2404
2405 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2406 we do not have any reliable way to pass unnamed args in
2407 registers, so we must force them into memory. */
2408
2409 if ((targetm.calls.strict_argument_naming (&args_so_far)
2410 || ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
2411 && type_arg_types != 0)
2412 n_named_args
2413 = (list_length (type_arg_types)
2414 /* Don't include the last named arg. */
2415 - (targetm.calls.strict_argument_naming (&args_so_far) ? 0 : 1)
2416 /* Count the struct value address, if it is passed as a parm. */
2417 + structure_value_addr_parm);
2418 else
2419 /* If we know nothing, treat all args as named. */
2420 n_named_args = num_actuals;
2421
2422 /* Start updating where the next arg would go.
2423
2424 On some machines (such as the PA) indirect calls have a different
2425 calling convention than normal calls. The fourth argument in
2426 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2427 or not. */
2428 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl, n_named_args);
2429
2430 /* Make a vector to hold all the information about each arg. */
2431 args = alloca (num_actuals * sizeof (struct arg_data));
2432 memset (args, 0, num_actuals * sizeof (struct arg_data));
2433
2434 /* Build up entries in the ARGS array, compute the size of the
2435 arguments into ARGS_SIZE, etc. */
2436 initialize_argument_information (num_actuals, args, &args_size,
2437 n_named_args, actparms, fndecl,
2438 &args_so_far, reg_parm_stack_space,
2439 &old_stack_level, &old_pending_adj,
2440 &must_preallocate, &flags,
2441 CALL_FROM_THUNK_P (exp));
2442
2443 if (args_size.var)
2444 {
2445 /* If this function requires a variable-sized argument list, don't
2446 try to make a cse'able block for this call. We may be able to
2447 do this eventually, but it is too complicated to keep track of
2448 what insns go in the cse'able block and which don't. */
2449
2450 flags &= ~ECF_LIBCALL_BLOCK;
2451 must_preallocate = 1;
2452 }
2453
2454 /* Now make final decision about preallocating stack space. */
2455 must_preallocate = finalize_must_preallocate (must_preallocate,
2456 num_actuals, args,
2457 &args_size);
2458
2459 /* If the structure value address will reference the stack pointer, we
2460 must stabilize it. We don't need to do this if we know that we are
2461 not going to adjust the stack pointer in processing this call. */
2462
2463 if (structure_value_addr
2464 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2465 || reg_mentioned_p (virtual_outgoing_args_rtx,
2466 structure_value_addr))
2467 && (args_size.var
2468 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2469 structure_value_addr = copy_to_reg (structure_value_addr);
2470
2471 /* Tail calls can make things harder to debug, and we're traditionally
2472 pushed these optimizations into -O2. Don't try if we're already
2473 expanding a call, as that means we're an argument. Don't try if
2474 there's cleanups, as we know there's code to follow the call.
2475
2476 If rtx_equal_function_value_matters is false, that means we've
2477 finished with regular parsing. Which means that some of the
2478 machinery we use to generate tail-calls is no longer in place.
2479 This is most often true of sjlj-exceptions, which we couldn't
2480 tail-call to anyway.
2481
2482 If current_nesting_level () == 0, we're being called after
2483 the function body has been expanded. This can happen when
2484 setting up trampolines in expand_function_end. */
2485 if (currently_expanding_call++ != 0
2486 || !flag_optimize_sibling_calls
2487 || !rtx_equal_function_value_matters
2488 || current_nesting_level () == 0
2489 || any_pending_cleanups ()
2490 || args_size.var)
2491 try_tail_call = try_tail_recursion = 0;
2492
2493 /* Tail recursion fails, when we are not dealing with recursive calls. */
2494 if (!try_tail_recursion
2495 || TREE_CODE (addr) != ADDR_EXPR
2496 || TREE_OPERAND (addr, 0) != current_function_decl)
2497 try_tail_recursion = 0;
2498
2499 /* Rest of purposes for tail call optimizations to fail. */
2500 if (
2501 #ifdef HAVE_sibcall_epilogue
2502 !HAVE_sibcall_epilogue
2503 #else
2504 1
2505 #endif
2506 || !try_tail_call
2507 /* Doing sibling call optimization needs some work, since
2508 structure_value_addr can be allocated on the stack.
2509 It does not seem worth the effort since few optimizable
2510 sibling calls will return a structure. */
2511 || structure_value_addr != NULL_RTX
2512 /* Check whether the target is able to optimize the call
2513 into a sibcall. */
2514 || !(*targetm.function_ok_for_sibcall) (fndecl, exp)
2515 /* Functions that do not return exactly once may not be sibcall
2516 optimized. */
2517 || (flags & (ECF_RETURNS_TWICE | ECF_LONGJMP | ECF_NORETURN))
2518 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2519 /* If the called function is nested in the current one, it might access
2520 some of the caller's arguments, but could clobber them beforehand if
2521 the argument areas are shared. */
2522 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2523 /* If this function requires more stack slots than the current
2524 function, we cannot change it into a sibling call. */
2525 || args_size.constant > current_function_args_size
2526 /* If the callee pops its own arguments, then it must pop exactly
2527 the same number of arguments as the current function. */
2528 || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2529 != RETURN_POPS_ARGS (current_function_decl,
2530 TREE_TYPE (current_function_decl),
2531 current_function_args_size))
2532 || !(*lang_hooks.decls.ok_for_sibcall) (fndecl))
2533 try_tail_call = 0;
2534
2535 if (try_tail_call || try_tail_recursion)
2536 {
2537 int end, inc;
2538 actparms = NULL_TREE;
2539 /* Ok, we're going to give the tail call the old college try.
2540 This means we're going to evaluate the function arguments
2541 up to three times. There are two degrees of badness we can
2542 encounter, those that can be unsaved and those that can't.
2543 (See unsafe_for_reeval commentary for details.)
2544
2545 Generate a new argument list. Pass safe arguments through
2546 unchanged. For the easy badness wrap them in UNSAVE_EXPRs.
2547 For hard badness, evaluate them now and put their resulting
2548 rtx in a temporary VAR_DECL.
2549
2550 initialize_argument_information has ordered the array for the
2551 order to be pushed, and we must remember this when reconstructing
2552 the original argument order. */
2553
2554 if (PUSH_ARGS_REVERSED)
2555 {
2556 inc = 1;
2557 i = 0;
2558 end = num_actuals;
2559 }
2560 else
2561 {
2562 inc = -1;
2563 i = num_actuals - 1;
2564 end = -1;
2565 }
2566
2567 for (; i != end; i += inc)
2568 {
2569 args[i].tree_value = fix_unsafe_tree (args[i].tree_value);
2570 /* We need to build actparms for optimize_tail_recursion. We can
2571 safely trash away TREE_PURPOSE, since it is unused by this
2572 function. */
2573 if (try_tail_recursion)
2574 actparms = tree_cons (NULL_TREE, args[i].tree_value, actparms);
2575 }
2576 /* Do the same for the function address if it is an expression. */
2577 if (!fndecl)
2578 addr = fix_unsafe_tree (addr);
2579 /* Expanding one of those dangerous arguments could have added
2580 cleanups, but otherwise give it a whirl. */
2581 if (any_pending_cleanups ())
2582 try_tail_call = try_tail_recursion = 0;
2583 }
2584
2585 /* Generate a tail recursion sequence when calling ourselves. */
2586
2587 if (try_tail_recursion)
2588 {
2589 /* We want to emit any pending stack adjustments before the tail
2590 recursion "call". That way we know any adjustment after the tail
2591 recursion call can be ignored if we indeed use the tail recursion
2592 call expansion. */
2593 int save_pending_stack_adjust = pending_stack_adjust;
2594 int save_stack_pointer_delta = stack_pointer_delta;
2595
2596 /* Emit any queued insns now; otherwise they would end up in
2597 only one of the alternates. */
2598 emit_queue ();
2599
2600 /* Use a new sequence to hold any RTL we generate. We do not even
2601 know if we will use this RTL yet. The final decision can not be
2602 made until after RTL generation for the entire function is
2603 complete. */
2604 start_sequence ();
2605 /* If expanding any of the arguments creates cleanups, we can't
2606 do a tailcall. So, we'll need to pop the pending cleanups
2607 list. If, however, all goes well, and there are no cleanups
2608 then the call to expand_start_target_temps will have no
2609 effect. */
2610 expand_start_target_temps ();
2611 if (optimize_tail_recursion (actparms, get_last_insn ()))
2612 {
2613 if (any_pending_cleanups ())
2614 try_tail_call = try_tail_recursion = 0;
2615 else
2616 tail_recursion_insns = get_insns ();
2617 }
2618 expand_end_target_temps ();
2619 end_sequence ();
2620
2621 /* Restore the original pending stack adjustment for the sibling and
2622 normal call cases below. */
2623 pending_stack_adjust = save_pending_stack_adjust;
2624 stack_pointer_delta = save_stack_pointer_delta;
2625 }
2626
2627 if (profile_arc_flag && (flags & ECF_FORK_OR_EXEC))
2628 {
2629 /* A fork duplicates the profile information, and an exec discards
2630 it. We can't rely on fork/exec to be paired. So write out the
2631 profile information we have gathered so far, and clear it. */
2632 /* ??? When Linux's __clone is called with CLONE_VM set, profiling
2633 is subject to race conditions, just as with multithreaded
2634 programs. */
2635
2636 emit_library_call (gcov_flush_libfunc, LCT_ALWAYS_RETURN, VOIDmode, 0);
2637 }
2638
2639 /* Ensure current function's preferred stack boundary is at least
2640 what we need. We don't have to increase alignment for recursive
2641 functions. */
2642 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2643 && fndecl != current_function_decl)
2644 cfun->preferred_stack_boundary = preferred_stack_boundary;
2645 if (fndecl == current_function_decl)
2646 cfun->recursive_call_emit = true;
2647
2648 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2649
2650 function_call_count++;
2651
2652 /* We want to make two insn chains; one for a sibling call, the other
2653 for a normal call. We will select one of the two chains after
2654 initial RTL generation is complete. */
2655 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2656 {
2657 int sibcall_failure = 0;
2658 /* We want to emit any pending stack adjustments before the tail
2659 recursion "call". That way we know any adjustment after the tail
2660 recursion call can be ignored if we indeed use the tail recursion
2661 call expansion. */
2662 int save_pending_stack_adjust = 0;
2663 int save_stack_pointer_delta = 0;
2664 rtx insns;
2665 rtx before_call, next_arg_reg;
2666
2667 if (pass == 0)
2668 {
2669 /* Emit any queued insns now; otherwise they would end up in
2670 only one of the alternates. */
2671 emit_queue ();
2672
2673 /* State variables we need to save and restore between
2674 iterations. */
2675 save_pending_stack_adjust = pending_stack_adjust;
2676 save_stack_pointer_delta = stack_pointer_delta;
2677 }
2678 if (pass)
2679 flags &= ~ECF_SIBCALL;
2680 else
2681 flags |= ECF_SIBCALL;
2682
2683 /* Other state variables that we must reinitialize each time
2684 through the loop (that are not initialized by the loop itself). */
2685 argblock = 0;
2686 call_fusage = 0;
2687
2688 /* Start a new sequence for the normal call case.
2689
2690 From this point on, if the sibling call fails, we want to set
2691 sibcall_failure instead of continuing the loop. */
2692 start_sequence ();
2693
2694 if (pass == 0)
2695 {
2696 /* We know at this point that there are not currently any
2697 pending cleanups. If, however, in the process of evaluating
2698 the arguments we were to create some, we'll need to be
2699 able to get rid of them. */
2700 expand_start_target_temps ();
2701 }
2702
2703 /* Don't let pending stack adjusts add up to too much.
2704 Also, do all pending adjustments now if there is any chance
2705 this might be a call to alloca or if we are expanding a sibling
2706 call sequence or if we are calling a function that is to return
2707 with stack pointer depressed. */
2708 if (pending_stack_adjust >= 32
2709 || (pending_stack_adjust > 0
2710 && (flags & (ECF_MAY_BE_ALLOCA | ECF_SP_DEPRESSED)))
2711 || pass == 0)
2712 do_pending_stack_adjust ();
2713
2714 /* When calling a const function, we must pop the stack args right away,
2715 so that the pop is deleted or moved with the call. */
2716 if (pass && (flags & ECF_LIBCALL_BLOCK))
2717 NO_DEFER_POP;
2718
2719 /* Precompute any arguments as needed. */
2720 if (pass)
2721 precompute_arguments (flags, num_actuals, args);
2722
2723 /* Now we are about to start emitting insns that can be deleted
2724 if a libcall is deleted. */
2725 if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
2726 start_sequence ();
2727
2728 adjusted_args_size = args_size;
2729 /* Compute the actual size of the argument block required. The variable
2730 and constant sizes must be combined, the size may have to be rounded,
2731 and there may be a minimum required size. When generating a sibcall
2732 pattern, do not round up, since we'll be re-using whatever space our
2733 caller provided. */
2734 unadjusted_args_size
2735 = compute_argument_block_size (reg_parm_stack_space,
2736 &adjusted_args_size,
2737 (pass == 0 ? 0
2738 : preferred_stack_boundary));
2739
2740 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2741
2742 /* The argument block when performing a sibling call is the
2743 incoming argument block. */
2744 if (pass == 0)
2745 {
2746 argblock = virtual_incoming_args_rtx;
2747 argblock
2748 #ifdef STACK_GROWS_DOWNWARD
2749 = plus_constant (argblock, current_function_pretend_args_size);
2750 #else
2751 = plus_constant (argblock, -current_function_pretend_args_size);
2752 #endif
2753 stored_args_map = sbitmap_alloc (args_size.constant);
2754 sbitmap_zero (stored_args_map);
2755 }
2756
2757 /* If we have no actual push instructions, or shouldn't use them,
2758 make space for all args right now. */
2759 else if (adjusted_args_size.var != 0)
2760 {
2761 if (old_stack_level == 0)
2762 {
2763 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2764 old_stack_pointer_delta = stack_pointer_delta;
2765 old_pending_adj = pending_stack_adjust;
2766 pending_stack_adjust = 0;
2767 /* stack_arg_under_construction says whether a stack arg is
2768 being constructed at the old stack level. Pushing the stack
2769 gets a clean outgoing argument block. */
2770 old_stack_arg_under_construction = stack_arg_under_construction;
2771 stack_arg_under_construction = 0;
2772 }
2773 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2774 }
2775 else
2776 {
2777 /* Note that we must go through the motions of allocating an argument
2778 block even if the size is zero because we may be storing args
2779 in the area reserved for register arguments, which may be part of
2780 the stack frame. */
2781
2782 int needed = adjusted_args_size.constant;
2783
2784 /* Store the maximum argument space used. It will be pushed by
2785 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2786 checking). */
2787
2788 if (needed > current_function_outgoing_args_size)
2789 current_function_outgoing_args_size = needed;
2790
2791 if (must_preallocate)
2792 {
2793 if (ACCUMULATE_OUTGOING_ARGS)
2794 {
2795 /* Since the stack pointer will never be pushed, it is
2796 possible for the evaluation of a parm to clobber
2797 something we have already written to the stack.
2798 Since most function calls on RISC machines do not use
2799 the stack, this is uncommon, but must work correctly.
2800
2801 Therefore, we save any area of the stack that was already
2802 written and that we are using. Here we set up to do this
2803 by making a new stack usage map from the old one. The
2804 actual save will be done by store_one_arg.
2805
2806 Another approach might be to try to reorder the argument
2807 evaluations to avoid this conflicting stack usage. */
2808
2809 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2810 /* Since we will be writing into the entire argument area,
2811 the map must be allocated for its entire size, not just
2812 the part that is the responsibility of the caller. */
2813 needed += reg_parm_stack_space;
2814 #endif
2815
2816 #ifdef ARGS_GROW_DOWNWARD
2817 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2818 needed + 1);
2819 #else
2820 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2821 needed);
2822 #endif
2823 stack_usage_map = alloca (highest_outgoing_arg_in_use);
2824
2825 if (initial_highest_arg_in_use)
2826 memcpy (stack_usage_map, initial_stack_usage_map,
2827 initial_highest_arg_in_use);
2828
2829 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2830 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2831 (highest_outgoing_arg_in_use
2832 - initial_highest_arg_in_use));
2833 needed = 0;
2834
2835 /* The address of the outgoing argument list must not be
2836 copied to a register here, because argblock would be left
2837 pointing to the wrong place after the call to
2838 allocate_dynamic_stack_space below. */
2839
2840 argblock = virtual_outgoing_args_rtx;
2841 }
2842 else
2843 {
2844 if (inhibit_defer_pop == 0)
2845 {
2846 /* Try to reuse some or all of the pending_stack_adjust
2847 to get this space. */
2848 needed
2849 = (combine_pending_stack_adjustment_and_call
2850 (unadjusted_args_size,
2851 &adjusted_args_size,
2852 preferred_unit_stack_boundary));
2853
2854 /* combine_pending_stack_adjustment_and_call computes
2855 an adjustment before the arguments are allocated.
2856 Account for them and see whether or not the stack
2857 needs to go up or down. */
2858 needed = unadjusted_args_size - needed;
2859
2860 if (needed < 0)
2861 {
2862 /* We're releasing stack space. */
2863 /* ??? We can avoid any adjustment at all if we're
2864 already aligned. FIXME. */
2865 pending_stack_adjust = -needed;
2866 do_pending_stack_adjust ();
2867 needed = 0;
2868 }
2869 else
2870 /* We need to allocate space. We'll do that in
2871 push_block below. */
2872 pending_stack_adjust = 0;
2873 }
2874
2875 /* Special case this because overhead of `push_block' in
2876 this case is non-trivial. */
2877 if (needed == 0)
2878 argblock = virtual_outgoing_args_rtx;
2879 else
2880 {
2881 argblock = push_block (GEN_INT (needed), 0, 0);
2882 #ifdef ARGS_GROW_DOWNWARD
2883 argblock = plus_constant (argblock, needed);
2884 #endif
2885 }
2886
2887 /* We only really need to call `copy_to_reg' in the case
2888 where push insns are going to be used to pass ARGBLOCK
2889 to a function call in ARGS. In that case, the stack
2890 pointer changes value from the allocation point to the
2891 call point, and hence the value of
2892 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2893 as well always do it. */
2894 argblock = copy_to_reg (argblock);
2895 }
2896 }
2897 }
2898
2899 if (ACCUMULATE_OUTGOING_ARGS)
2900 {
2901 /* The save/restore code in store_one_arg handles all
2902 cases except one: a constructor call (including a C
2903 function returning a BLKmode struct) to initialize
2904 an argument. */
2905 if (stack_arg_under_construction)
2906 {
2907 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2908 rtx push_size = GEN_INT (reg_parm_stack_space
2909 + adjusted_args_size.constant);
2910 #else
2911 rtx push_size = GEN_INT (adjusted_args_size.constant);
2912 #endif
2913 if (old_stack_level == 0)
2914 {
2915 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2916 NULL_RTX);
2917 old_stack_pointer_delta = stack_pointer_delta;
2918 old_pending_adj = pending_stack_adjust;
2919 pending_stack_adjust = 0;
2920 /* stack_arg_under_construction says whether a stack
2921 arg is being constructed at the old stack level.
2922 Pushing the stack gets a clean outgoing argument
2923 block. */
2924 old_stack_arg_under_construction
2925 = stack_arg_under_construction;
2926 stack_arg_under_construction = 0;
2927 /* Make a new map for the new argument list. */
2928 stack_usage_map = alloca (highest_outgoing_arg_in_use);
2929 memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
2930 highest_outgoing_arg_in_use = 0;
2931 }
2932 allocate_dynamic_stack_space (push_size, NULL_RTX,
2933 BITS_PER_UNIT);
2934 }
2935
2936 /* If argument evaluation might modify the stack pointer,
2937 copy the address of the argument list to a register. */
2938 for (i = 0; i < num_actuals; i++)
2939 if (args[i].pass_on_stack)
2940 {
2941 argblock = copy_addr_to_reg (argblock);
2942 break;
2943 }
2944 }
2945
2946 compute_argument_addresses (args, argblock, num_actuals);
2947
2948 /* If we push args individually in reverse order, perform stack alignment
2949 before the first push (the last arg). */
2950 if (PUSH_ARGS_REVERSED && argblock == 0
2951 && adjusted_args_size.constant != unadjusted_args_size)
2952 {
2953 /* When the stack adjustment is pending, we get better code
2954 by combining the adjustments. */
2955 if (pending_stack_adjust
2956 && ! (flags & ECF_LIBCALL_BLOCK)
2957 && ! inhibit_defer_pop)
2958 {
2959 pending_stack_adjust
2960 = (combine_pending_stack_adjustment_and_call
2961 (unadjusted_args_size,
2962 &adjusted_args_size,
2963 preferred_unit_stack_boundary));
2964 do_pending_stack_adjust ();
2965 }
2966 else if (argblock == 0)
2967 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2968 - unadjusted_args_size));
2969 }
2970 /* Now that the stack is properly aligned, pops can't safely
2971 be deferred during the evaluation of the arguments. */
2972 NO_DEFER_POP;
2973
2974 funexp = rtx_for_function_call (fndecl, addr);
2975
2976 /* Figure out the register where the value, if any, will come back. */
2977 valreg = 0;
2978 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2979 && ! structure_value_addr)
2980 {
2981 if (pcc_struct_value)
2982 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2983 fndecl, (pass == 0));
2984 else
2985 valreg = hard_function_value (TREE_TYPE (exp), fndecl, (pass == 0));
2986 }
2987
2988 /* Precompute all register parameters. It isn't safe to compute anything
2989 once we have started filling any specific hard regs. */
2990 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
2991
2992 #ifdef REG_PARM_STACK_SPACE
2993 /* Save the fixed argument area if it's part of the caller's frame and
2994 is clobbered by argument setup for this call. */
2995 if (ACCUMULATE_OUTGOING_ARGS && pass)
2996 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2997 &low_to_save, &high_to_save);
2998 #endif
2999
3000 /* Now store (and compute if necessary) all non-register parms.
3001 These come before register parms, since they can require block-moves,
3002 which could clobber the registers used for register parms.
3003 Parms which have partial registers are not stored here,
3004 but we do preallocate space here if they want that. */
3005
3006 for (i = 0; i < num_actuals; i++)
3007 if (args[i].reg == 0 || args[i].pass_on_stack)
3008 {
3009 rtx before_arg = get_last_insn ();
3010
3011 if (store_one_arg (&args[i], argblock, flags,
3012 adjusted_args_size.var != 0,
3013 reg_parm_stack_space)
3014 || (pass == 0
3015 && check_sibcall_argument_overlap (before_arg,
3016 &args[i], 1)))
3017 sibcall_failure = 1;
3018
3019 if (flags & ECF_CONST
3020 && args[i].stack
3021 && args[i].value == args[i].stack)
3022 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3023 gen_rtx_USE (VOIDmode,
3024 args[i].value),
3025 call_fusage);
3026 }
3027
3028 /* If we have a parm that is passed in registers but not in memory
3029 and whose alignment does not permit a direct copy into registers,
3030 make a group of pseudos that correspond to each register that we
3031 will later fill. */
3032 if (STRICT_ALIGNMENT)
3033 store_unaligned_arguments_into_pseudos (args, num_actuals);
3034
3035 /* Now store any partially-in-registers parm.
3036 This is the last place a block-move can happen. */
3037 if (reg_parm_seen)
3038 for (i = 0; i < num_actuals; i++)
3039 if (args[i].partial != 0 && ! args[i].pass_on_stack)
3040 {
3041 rtx before_arg = get_last_insn ();
3042
3043 if (store_one_arg (&args[i], argblock, flags,
3044 adjusted_args_size.var != 0,
3045 reg_parm_stack_space)
3046 || (pass == 0
3047 && check_sibcall_argument_overlap (before_arg,
3048 &args[i], 1)))
3049 sibcall_failure = 1;
3050 }
3051
3052 /* If we pushed args in forward order, perform stack alignment
3053 after pushing the last arg. */
3054 if (!PUSH_ARGS_REVERSED && argblock == 0)
3055 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
3056 - unadjusted_args_size));
3057
3058 /* If register arguments require space on the stack and stack space
3059 was not preallocated, allocate stack space here for arguments
3060 passed in registers. */
3061 #ifdef OUTGOING_REG_PARM_STACK_SPACE
3062 if (!ACCUMULATE_OUTGOING_ARGS
3063 && must_preallocate == 0 && reg_parm_stack_space > 0)
3064 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
3065 #endif
3066
3067 /* Pass the function the address in which to return a
3068 structure value. */
3069 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
3070 {
3071 structure_value_addr
3072 = convert_memory_address (Pmode, structure_value_addr);
3073 emit_move_insn (struct_value,
3074 force_reg (Pmode,
3075 force_operand (structure_value_addr,
3076 NULL_RTX)));
3077
3078 if (GET_CODE (struct_value) == REG)
3079 use_reg (&call_fusage, struct_value);
3080 }
3081
3082 funexp = prepare_call_address (funexp, fndecl, &call_fusage,
3083 reg_parm_seen, pass == 0);
3084
3085 load_register_parameters (args, num_actuals, &call_fusage, flags,
3086 pass == 0, &sibcall_failure);
3087
3088 /* Perform postincrements before actually calling the function. */
3089 emit_queue ();
3090
3091 /* Save a pointer to the last insn before the call, so that we can
3092 later safely search backwards to find the CALL_INSN. */
3093 before_call = get_last_insn ();
3094
3095 /* Set up next argument register. For sibling calls on machines
3096 with register windows this should be the incoming register. */
3097 #ifdef FUNCTION_INCOMING_ARG
3098 if (pass == 0)
3099 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
3100 void_type_node, 1);
3101 else
3102 #endif
3103 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
3104 void_type_node, 1);
3105
3106 /* All arguments and registers used for the call must be set up by
3107 now! */
3108
3109 /* Stack must be properly aligned now. */
3110 if (pass && stack_pointer_delta % preferred_unit_stack_boundary)
3111 abort ();
3112
3113 /* Generate the actual call instruction. */
3114 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
3115 adjusted_args_size.constant, struct_value_size,
3116 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
3117 flags, & args_so_far);
3118
3119 /* If call is cse'able, make appropriate pair of reg-notes around it.
3120 Test valreg so we don't crash; may safely ignore `const'
3121 if return type is void. Disable for PARALLEL return values, because
3122 we have no way to move such values into a pseudo register. */
3123 if (pass && (flags & ECF_LIBCALL_BLOCK))
3124 {
3125 rtx insns;
3126 rtx insn;
3127 bool failed = valreg == 0 || GET_CODE (valreg) == PARALLEL;
3128
3129 insns = get_insns ();
3130
3131 /* Expansion of block moves possibly introduced a loop that may
3132 not appear inside libcall block. */
3133 for (insn = insns; insn; insn = NEXT_INSN (insn))
3134 if (GET_CODE (insn) == JUMP_INSN)
3135 failed = true;
3136
3137 if (failed)
3138 {
3139 end_sequence ();
3140 emit_insn (insns);
3141 }
3142 else
3143 {
3144 rtx note = 0;
3145 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3146
3147 /* Mark the return value as a pointer if needed. */
3148 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
3149 mark_reg_pointer (temp,
3150 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
3151
3152 end_sequence ();
3153 if (flag_unsafe_math_optimizations
3154 && fndecl
3155 && DECL_BUILT_IN (fndecl)
3156 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRT
3157 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTF
3158 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTL))
3159 note = gen_rtx_fmt_e (SQRT,
3160 GET_MODE (temp),
3161 args[0].initial_value);
3162 else
3163 {
3164 /* Construct an "equal form" for the value which
3165 mentions all the arguments in order as well as
3166 the function name. */
3167 for (i = 0; i < num_actuals; i++)
3168 note = gen_rtx_EXPR_LIST (VOIDmode,
3169 args[i].initial_value, note);
3170 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
3171
3172 if (flags & ECF_PURE)
3173 note = gen_rtx_EXPR_LIST (VOIDmode,
3174 gen_rtx_USE (VOIDmode,
3175 gen_rtx_MEM (BLKmode,
3176 gen_rtx_SCRATCH (VOIDmode))),
3177 note);
3178 }
3179 emit_libcall_block (insns, temp, valreg, note);
3180
3181 valreg = temp;
3182 }
3183 }
3184 else if (pass && (flags & ECF_MALLOC))
3185 {
3186 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3187 rtx last, insns;
3188
3189 /* The return value from a malloc-like function is a pointer. */
3190 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
3191 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
3192
3193 emit_move_insn (temp, valreg);
3194
3195 /* The return value from a malloc-like function can not alias
3196 anything else. */
3197 last = get_last_insn ();
3198 REG_NOTES (last) =
3199 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
3200
3201 /* Write out the sequence. */
3202 insns = get_insns ();
3203 end_sequence ();
3204 emit_insn (insns);
3205 valreg = temp;
3206 }
3207
3208 /* For calls to `setjmp', etc., inform flow.c it should complain
3209 if nonvolatile values are live. For functions that cannot return,
3210 inform flow that control does not fall through. */
3211
3212 if ((flags & (ECF_NORETURN | ECF_LONGJMP)) || pass == 0)
3213 {
3214 /* The barrier must be emitted
3215 immediately after the CALL_INSN. Some ports emit more
3216 than just a CALL_INSN above, so we must search for it here. */
3217
3218 rtx last = get_last_insn ();
3219 while (GET_CODE (last) != CALL_INSN)
3220 {
3221 last = PREV_INSN (last);
3222 /* There was no CALL_INSN? */
3223 if (last == before_call)
3224 abort ();
3225 }
3226
3227 emit_barrier_after (last);
3228
3229 /* Stack adjustments after a noreturn call are dead code.
3230 However when NO_DEFER_POP is in effect, we must preserve
3231 stack_pointer_delta. */
3232 if (inhibit_defer_pop == 0)
3233 {
3234 stack_pointer_delta = old_stack_allocated;
3235 pending_stack_adjust = 0;
3236 }
3237 }
3238
3239 if (flags & ECF_LONGJMP)
3240 current_function_calls_longjmp = 1;
3241
3242 /* If value type not void, return an rtx for the value. */
3243
3244 /* If there are cleanups to be called, don't use a hard reg as target.
3245 We need to double check this and see if it matters anymore. */
3246 if (any_pending_cleanups ())
3247 {
3248 if (target && REG_P (target)
3249 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3250 target = 0;
3251 sibcall_failure = 1;
3252 }
3253
3254 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
3255 || ignore)
3256 target = const0_rtx;
3257 else if (structure_value_addr)
3258 {
3259 if (target == 0 || GET_CODE (target) != MEM)
3260 {
3261 target
3262 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3263 memory_address (TYPE_MODE (TREE_TYPE (exp)),
3264 structure_value_addr));
3265 set_mem_attributes (target, exp, 1);
3266 }
3267 }
3268 else if (pcc_struct_value)
3269 {
3270 /* This is the special C++ case where we need to
3271 know what the true target was. We take care to
3272 never use this value more than once in one expression. */
3273 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3274 copy_to_reg (valreg));
3275 set_mem_attributes (target, exp, 1);
3276 }
3277 /* Handle calls that return values in multiple non-contiguous locations.
3278 The Irix 6 ABI has examples of this. */
3279 else if (GET_CODE (valreg) == PARALLEL)
3280 {
3281 /* Second condition is added because "target" is freed at the
3282 the end of "pass0" for -O2 when call is made to
3283 expand_end_target_temps (). Its "in_use" flag has been set
3284 to false, so allocate a new temp. */
3285 if (target == 0 || (pass == 1 && target == temp_target))
3286 {
3287 /* This will only be assigned once, so it can be readonly. */
3288 tree nt = build_qualified_type (TREE_TYPE (exp),
3289 (TYPE_QUALS (TREE_TYPE (exp))
3290 | TYPE_QUAL_CONST));
3291
3292 target = assign_temp (nt, 0, 1, 1);
3293 temp_target = target;
3294 preserve_temp_slots (target);
3295 }
3296
3297 if (! rtx_equal_p (target, valreg))
3298 emit_group_store (target, valreg, TREE_TYPE (exp),
3299 int_size_in_bytes (TREE_TYPE (exp)));
3300
3301 /* We can not support sibling calls for this case. */
3302 sibcall_failure = 1;
3303 }
3304 else if (target
3305 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
3306 && GET_MODE (target) == GET_MODE (valreg))
3307 {
3308 /* TARGET and VALREG cannot be equal at this point because the
3309 latter would not have REG_FUNCTION_VALUE_P true, while the
3310 former would if it were referring to the same register.
3311
3312 If they refer to the same register, this move will be a no-op,
3313 except when function inlining is being done. */
3314 emit_move_insn (target, valreg);
3315
3316 /* If we are setting a MEM, this code must be executed. Since it is
3317 emitted after the call insn, sibcall optimization cannot be
3318 performed in that case. */
3319 if (GET_CODE (target) == MEM)
3320 sibcall_failure = 1;
3321 }
3322 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
3323 {
3324 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
3325
3326 /* We can not support sibling calls for this case. */
3327 sibcall_failure = 1;
3328 }
3329 else
3330 {
3331 if (shift_returned_value (TREE_TYPE (exp), &valreg))
3332 sibcall_failure = 1;
3333
3334 target = copy_to_reg (valreg);
3335 }
3336
3337 if (targetm.calls.promote_function_return(funtype))
3338 {
3339 /* If we promoted this return value, make the proper SUBREG. TARGET
3340 might be const0_rtx here, so be careful. */
3341 if (GET_CODE (target) == REG
3342 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3343 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3344 {
3345 tree type = TREE_TYPE (exp);
3346 int unsignedp = TREE_UNSIGNED (type);
3347 int offset = 0;
3348
3349 /* If we don't promote as expected, something is wrong. */
3350 if (GET_MODE (target)
3351 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
3352 abort ();
3353
3354 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3355 && GET_MODE_SIZE (GET_MODE (target))
3356 > GET_MODE_SIZE (TYPE_MODE (type)))
3357 {
3358 offset = GET_MODE_SIZE (GET_MODE (target))
3359 - GET_MODE_SIZE (TYPE_MODE (type));
3360 if (! BYTES_BIG_ENDIAN)
3361 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3362 else if (! WORDS_BIG_ENDIAN)
3363 offset %= UNITS_PER_WORD;
3364 }
3365 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3366 SUBREG_PROMOTED_VAR_P (target) = 1;
3367 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3368 }
3369 }
3370
3371 /* If size of args is variable or this was a constructor call for a stack
3372 argument, restore saved stack-pointer value. */
3373
3374 if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
3375 {
3376 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3377 stack_pointer_delta = old_stack_pointer_delta;
3378 pending_stack_adjust = old_pending_adj;
3379 stack_arg_under_construction = old_stack_arg_under_construction;
3380 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3381 stack_usage_map = initial_stack_usage_map;
3382 sibcall_failure = 1;
3383 }
3384 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3385 {
3386 #ifdef REG_PARM_STACK_SPACE
3387 if (save_area)
3388 restore_fixed_argument_area (save_area, argblock,
3389 high_to_save, low_to_save);
3390 #endif
3391
3392 /* If we saved any argument areas, restore them. */
3393 for (i = 0; i < num_actuals; i++)
3394 if (args[i].save_area)
3395 {
3396 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3397 rtx stack_area
3398 = gen_rtx_MEM (save_mode,
3399 memory_address (save_mode,
3400 XEXP (args[i].stack_slot, 0)));
3401
3402 if (save_mode != BLKmode)
3403 emit_move_insn (stack_area, args[i].save_area);
3404 else
3405 emit_block_move (stack_area, args[i].save_area,
3406 GEN_INT (args[i].locate.size.constant),
3407 BLOCK_OP_CALL_PARM);
3408 }
3409
3410 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3411 stack_usage_map = initial_stack_usage_map;
3412 }
3413
3414 /* If this was alloca, record the new stack level for nonlocal gotos.
3415 Check for the handler slots since we might not have a save area
3416 for non-local gotos. */
3417
3418 if ((flags & ECF_MAY_BE_ALLOCA) && nonlocal_goto_handler_slots != 0)
3419 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
3420
3421 /* Free up storage we no longer need. */
3422 for (i = 0; i < num_actuals; ++i)
3423 if (args[i].aligned_regs)
3424 free (args[i].aligned_regs);
3425
3426 if (pass == 0)
3427 {
3428 /* Undo the fake expand_start_target_temps we did earlier. If
3429 there had been any cleanups created, we've already set
3430 sibcall_failure. */
3431 expand_end_target_temps ();
3432 }
3433
3434 /* If this function is returning into a memory location marked as
3435 readonly, it means it is initializing that location. We normally treat
3436 functions as not clobbering such locations, so we need to specify that
3437 this one does. We do this by adding the appropriate CLOBBER to the
3438 CALL_INSN function usage list. This cannot be done by emitting a
3439 standalone CLOBBER after the call because the latter would be ignored
3440 by at least the delay slot scheduling pass. We do this now instead of
3441 adding to call_fusage before the call to emit_call_1 because TARGET
3442 may be modified in the meantime. */
3443 if (structure_value_addr != 0 && target != 0
3444 && GET_CODE (target) == MEM && RTX_UNCHANGING_P (target))
3445 add_function_usage_to
3446 (last_call_insn (),
3447 gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_CLOBBER (VOIDmode, target),
3448 NULL_RTX));
3449
3450 insns = get_insns ();
3451 end_sequence ();
3452
3453 if (pass == 0)
3454 {
3455 tail_call_insns = insns;
3456
3457 /* Restore the pending stack adjustment now that we have
3458 finished generating the sibling call sequence. */
3459
3460 pending_stack_adjust = save_pending_stack_adjust;
3461 stack_pointer_delta = save_stack_pointer_delta;
3462
3463 /* Prepare arg structure for next iteration. */
3464 for (i = 0; i < num_actuals; i++)
3465 {
3466 args[i].value = 0;
3467 args[i].aligned_regs = 0;
3468 args[i].stack = 0;
3469 }
3470
3471 sbitmap_free (stored_args_map);
3472 }
3473 else
3474 {
3475 normal_call_insns = insns;
3476
3477 /* Verify that we've deallocated all the stack we used. */
3478 if (! (flags & (ECF_NORETURN | ECF_LONGJMP))
3479 && old_stack_allocated != stack_pointer_delta
3480 - pending_stack_adjust)
3481 abort ();
3482 }
3483
3484 /* If something prevents making this a sibling call,
3485 zero out the sequence. */
3486 if (sibcall_failure)
3487 tail_call_insns = NULL_RTX;
3488 }
3489
3490 /* The function optimize_sibling_and_tail_recursive_calls doesn't
3491 handle CALL_PLACEHOLDERs inside other CALL_PLACEHOLDERs. This
3492 can happen if the arguments to this function call an inline
3493 function who's expansion contains another CALL_PLACEHOLDER.
3494
3495 If there are any C_Ps in any of these sequences, replace them
3496 with their normal call. */
3497
3498 for (insn = normal_call_insns; insn; insn = NEXT_INSN (insn))
3499 if (GET_CODE (insn) == CALL_INSN
3500 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3501 replace_call_placeholder (insn, sibcall_use_normal);
3502
3503 for (insn = tail_call_insns; insn; insn = NEXT_INSN (insn))
3504 if (GET_CODE (insn) == CALL_INSN
3505 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3506 replace_call_placeholder (insn, sibcall_use_normal);
3507
3508 for (insn = tail_recursion_insns; insn; insn = NEXT_INSN (insn))
3509 if (GET_CODE (insn) == CALL_INSN
3510 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3511 replace_call_placeholder (insn, sibcall_use_normal);
3512
3513 /* If this was a potential tail recursion site, then emit a
3514 CALL_PLACEHOLDER with the normal and the tail recursion streams.
3515 One of them will be selected later. */
3516 if (tail_recursion_insns || tail_call_insns)
3517 {
3518 /* The tail recursion label must be kept around. We could expose
3519 its use in the CALL_PLACEHOLDER, but that creates unwanted edges
3520 and makes determining true tail recursion sites difficult.
3521
3522 So we set LABEL_PRESERVE_P here, then clear it when we select
3523 one of the call sequences after rtl generation is complete. */
3524 if (tail_recursion_insns)
3525 LABEL_PRESERVE_P (tail_recursion_label) = 1;
3526 emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode, normal_call_insns,
3527 tail_call_insns,
3528 tail_recursion_insns,
3529 tail_recursion_label));
3530 }
3531 else
3532 emit_insn (normal_call_insns);
3533
3534 currently_expanding_call--;
3535
3536 /* If this function returns with the stack pointer depressed, ensure
3537 this block saves and restores the stack pointer, show it was
3538 changed, and adjust for any outgoing arg space. */
3539 if (flags & ECF_SP_DEPRESSED)
3540 {
3541 clear_pending_stack_adjust ();
3542 emit_insn (gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx));
3543 emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
3544 save_stack_pointer ();
3545 }
3546
3547 return target;
3548 }
3549
3550 /* Traverse an argument list in VALUES and expand all complex
3551 arguments into their components. */
3552 tree
3553 split_complex_values (tree values)
3554 {
3555 tree p;
3556
3557 values = copy_list (values);
3558
3559 for (p = values; p; p = TREE_CHAIN (p))
3560 {
3561 tree complex_value = TREE_VALUE (p);
3562 tree complex_type;
3563
3564 complex_type = TREE_TYPE (complex_value);
3565 if (!complex_type)
3566 continue;
3567
3568 if (TREE_CODE (complex_type) == COMPLEX_TYPE)
3569 {
3570 tree subtype;
3571 tree real, imag, next;
3572
3573 subtype = TREE_TYPE (complex_type);
3574 complex_value = save_expr (complex_value);
3575 real = build1 (REALPART_EXPR, subtype, complex_value);
3576 imag = build1 (IMAGPART_EXPR, subtype, complex_value);
3577
3578 TREE_VALUE (p) = real;
3579 next = TREE_CHAIN (p);
3580 imag = build_tree_list (NULL_TREE, imag);
3581 TREE_CHAIN (p) = imag;
3582 TREE_CHAIN (imag) = next;
3583
3584 /* Skip the newly created node. */
3585 p = TREE_CHAIN (p);
3586 }
3587 }
3588
3589 return values;
3590 }
3591
3592 /* Traverse a list of TYPES and expand all complex types into their
3593 components. */
3594 tree
3595 split_complex_types (tree types)
3596 {
3597 tree p;
3598
3599 types = copy_list (types);
3600
3601 for (p = types; p; p = TREE_CHAIN (p))
3602 {
3603 tree complex_type = TREE_VALUE (p);
3604
3605 if (TREE_CODE (complex_type) == COMPLEX_TYPE)
3606 {
3607 tree next, imag;
3608
3609 /* Rewrite complex type with component type. */
3610 TREE_VALUE (p) = TREE_TYPE (complex_type);
3611 next = TREE_CHAIN (p);
3612
3613 /* Add another component type for the imaginary part. */
3614 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3615 TREE_CHAIN (p) = imag;
3616 TREE_CHAIN (imag) = next;
3617
3618 /* Skip the newly created node. */
3619 p = TREE_CHAIN (p);
3620 }
3621 }
3622
3623 return types;
3624 }
3625 \f
3626 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3627 The RETVAL parameter specifies whether return value needs to be saved, other
3628 parameters are documented in the emit_library_call function below. */
3629
3630 static rtx
3631 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3632 enum libcall_type fn_type,
3633 enum machine_mode outmode, int nargs, va_list p)
3634 {
3635 /* Total size in bytes of all the stack-parms scanned so far. */
3636 struct args_size args_size;
3637 /* Size of arguments before any adjustments (such as rounding). */
3638 struct args_size original_args_size;
3639 int argnum;
3640 rtx fun;
3641 int inc;
3642 int count;
3643 rtx argblock = 0;
3644 CUMULATIVE_ARGS args_so_far;
3645 struct arg
3646 {
3647 rtx value;
3648 enum machine_mode mode;
3649 rtx reg;
3650 int partial;
3651 struct locate_and_pad_arg_data locate;
3652 rtx save_area;
3653 };
3654 struct arg *argvec;
3655 int old_inhibit_defer_pop = inhibit_defer_pop;
3656 rtx call_fusage = 0;
3657 rtx mem_value = 0;
3658 rtx valreg;
3659 int pcc_struct_value = 0;
3660 int struct_value_size = 0;
3661 int flags;
3662 int reg_parm_stack_space = 0;
3663 int needed;
3664 rtx before_call;
3665 tree tfom; /* type_for_mode (outmode, 0) */
3666
3667 #ifdef REG_PARM_STACK_SPACE
3668 /* Define the boundary of the register parm stack space that needs to be
3669 save, if any. */
3670 int low_to_save, high_to_save;
3671 rtx save_area = 0; /* Place that it is saved. */
3672 #endif
3673
3674 /* Size of the stack reserved for parameter registers. */
3675 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3676 char *initial_stack_usage_map = stack_usage_map;
3677
3678 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3679
3680 #ifdef REG_PARM_STACK_SPACE
3681 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3682 #endif
3683
3684 /* By default, library functions can not throw. */
3685 flags = ECF_NOTHROW;
3686
3687 switch (fn_type)
3688 {
3689 case LCT_NORMAL:
3690 break;
3691 case LCT_CONST:
3692 flags |= ECF_CONST;
3693 break;
3694 case LCT_PURE:
3695 flags |= ECF_PURE;
3696 break;
3697 case LCT_CONST_MAKE_BLOCK:
3698 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
3699 break;
3700 case LCT_PURE_MAKE_BLOCK:
3701 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
3702 break;
3703 case LCT_NORETURN:
3704 flags |= ECF_NORETURN;
3705 break;
3706 case LCT_THROW:
3707 flags = ECF_NORETURN;
3708 break;
3709 case LCT_ALWAYS_RETURN:
3710 flags = ECF_ALWAYS_RETURN;
3711 break;
3712 case LCT_RETURNS_TWICE:
3713 flags = ECF_RETURNS_TWICE;
3714 break;
3715 }
3716 fun = orgfun;
3717
3718 /* Ensure current function's preferred stack boundary is at least
3719 what we need. */
3720 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3721 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3722
3723 /* If this kind of value comes back in memory,
3724 decide where in memory it should come back. */
3725 if (outmode != VOIDmode)
3726 {
3727 tfom = (*lang_hooks.types.type_for_mode) (outmode, 0);
3728 if (aggregate_value_p (tfom, 0))
3729 {
3730 #ifdef PCC_STATIC_STRUCT_RETURN
3731 rtx pointer_reg
3732 = hard_function_value (build_pointer_type (tfom), 0, 0);
3733 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3734 pcc_struct_value = 1;
3735 if (value == 0)
3736 value = gen_reg_rtx (outmode);
3737 #else /* not PCC_STATIC_STRUCT_RETURN */
3738 struct_value_size = GET_MODE_SIZE (outmode);
3739 if (value != 0 && GET_CODE (value) == MEM)
3740 mem_value = value;
3741 else
3742 mem_value = assign_temp (tfom, 0, 1, 1);
3743 #endif
3744 /* This call returns a big structure. */
3745 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3746 }
3747 }
3748 else
3749 tfom = void_type_node;
3750
3751 /* ??? Unfinished: must pass the memory address as an argument. */
3752
3753 /* Copy all the libcall-arguments out of the varargs data
3754 and into a vector ARGVEC.
3755
3756 Compute how to pass each argument. We only support a very small subset
3757 of the full argument passing conventions to limit complexity here since
3758 library functions shouldn't have many args. */
3759
3760 argvec = alloca ((nargs + 1) * sizeof (struct arg));
3761 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3762
3763 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3764 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3765 #else
3766 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0, nargs);
3767 #endif
3768
3769 args_size.constant = 0;
3770 args_size.var = 0;
3771
3772 count = 0;
3773
3774 /* Now we are about to start emitting insns that can be deleted
3775 if a libcall is deleted. */
3776 if (flags & ECF_LIBCALL_BLOCK)
3777 start_sequence ();
3778
3779 push_temp_slots ();
3780
3781 /* If there's a structure value address to be passed,
3782 either pass it in the special place, or pass it as an extra argument. */
3783 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3784 {
3785 rtx addr = XEXP (mem_value, 0);
3786 nargs++;
3787
3788 /* Make sure it is a reasonable operand for a move or push insn. */
3789 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3790 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3791 addr = force_operand (addr, NULL_RTX);
3792
3793 argvec[count].value = addr;
3794 argvec[count].mode = Pmode;
3795 argvec[count].partial = 0;
3796
3797 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3798 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3799 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3800 abort ();
3801 #endif
3802
3803 locate_and_pad_parm (Pmode, NULL_TREE,
3804 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3805 1,
3806 #else
3807 argvec[count].reg != 0,
3808 #endif
3809 0, NULL_TREE, &args_size, &argvec[count].locate);
3810
3811 if (argvec[count].reg == 0 || argvec[count].partial != 0
3812 || reg_parm_stack_space > 0)
3813 args_size.constant += argvec[count].locate.size.constant;
3814
3815 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3816
3817 count++;
3818 }
3819
3820 for (; count < nargs; count++)
3821 {
3822 rtx val = va_arg (p, rtx);
3823 enum machine_mode mode = va_arg (p, enum machine_mode);
3824
3825 /* We cannot convert the arg value to the mode the library wants here;
3826 must do it earlier where we know the signedness of the arg. */
3827 if (mode == BLKmode
3828 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3829 abort ();
3830
3831 /* There's no need to call protect_from_queue, because
3832 either emit_move_insn or emit_push_insn will do that. */
3833
3834 /* Make sure it is a reasonable operand for a move or push insn. */
3835 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3836 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3837 val = force_operand (val, NULL_RTX);
3838
3839 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3840 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3841 {
3842 rtx slot;
3843 int must_copy = 1
3844 #ifdef FUNCTION_ARG_CALLEE_COPIES
3845 && ! FUNCTION_ARG_CALLEE_COPIES (args_so_far, mode,
3846 NULL_TREE, 1)
3847 #endif
3848 ;
3849
3850 /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3851 functions, so we have to pretend this isn't such a function. */
3852 if (flags & ECF_LIBCALL_BLOCK)
3853 {
3854 rtx insns = get_insns ();
3855 end_sequence ();
3856 emit_insn (insns);
3857 }
3858 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3859
3860 /* If this was a CONST function, it is now PURE since
3861 it now reads memory. */
3862 if (flags & ECF_CONST)
3863 {
3864 flags &= ~ECF_CONST;
3865 flags |= ECF_PURE;
3866 }
3867
3868 if (GET_MODE (val) == MEM && ! must_copy)
3869 slot = val;
3870 else if (must_copy)
3871 {
3872 slot = assign_temp ((*lang_hooks.types.type_for_mode) (mode, 0),
3873 0, 1, 1);
3874 emit_move_insn (slot, val);
3875 }
3876 else
3877 {
3878 tree type = (*lang_hooks.types.type_for_mode) (mode, 0);
3879
3880 slot
3881 = gen_rtx_MEM (mode,
3882 expand_expr (build1 (ADDR_EXPR,
3883 build_pointer_type (type),
3884 make_tree (type, val)),
3885 NULL_RTX, VOIDmode, 0));
3886 }
3887
3888 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3889 gen_rtx_USE (VOIDmode, slot),
3890 call_fusage);
3891 if (must_copy)
3892 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3893 gen_rtx_CLOBBER (VOIDmode,
3894 slot),
3895 call_fusage);
3896
3897 mode = Pmode;
3898 val = force_operand (XEXP (slot, 0), NULL_RTX);
3899 }
3900 #endif
3901
3902 argvec[count].value = val;
3903 argvec[count].mode = mode;
3904
3905 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3906
3907 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3908 argvec[count].partial
3909 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3910 #else
3911 argvec[count].partial = 0;
3912 #endif
3913
3914 locate_and_pad_parm (mode, NULL_TREE,
3915 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3916 1,
3917 #else
3918 argvec[count].reg != 0,
3919 #endif
3920 argvec[count].partial,
3921 NULL_TREE, &args_size, &argvec[count].locate);
3922
3923 if (argvec[count].locate.size.var)
3924 abort ();
3925
3926 if (argvec[count].reg == 0 || argvec[count].partial != 0
3927 || reg_parm_stack_space > 0)
3928 args_size.constant += argvec[count].locate.size.constant;
3929
3930 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3931 }
3932
3933 /* If this machine requires an external definition for library
3934 functions, write one out. */
3935 assemble_external_libcall (fun);
3936
3937 original_args_size = args_size;
3938 args_size.constant = (((args_size.constant
3939 + stack_pointer_delta
3940 + STACK_BYTES - 1)
3941 / STACK_BYTES
3942 * STACK_BYTES)
3943 - stack_pointer_delta);
3944
3945 args_size.constant = MAX (args_size.constant,
3946 reg_parm_stack_space);
3947
3948 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3949 args_size.constant -= reg_parm_stack_space;
3950 #endif
3951
3952 if (args_size.constant > current_function_outgoing_args_size)
3953 current_function_outgoing_args_size = args_size.constant;
3954
3955 if (ACCUMULATE_OUTGOING_ARGS)
3956 {
3957 /* Since the stack pointer will never be pushed, it is possible for
3958 the evaluation of a parm to clobber something we have already
3959 written to the stack. Since most function calls on RISC machines
3960 do not use the stack, this is uncommon, but must work correctly.
3961
3962 Therefore, we save any area of the stack that was already written
3963 and that we are using. Here we set up to do this by making a new
3964 stack usage map from the old one.
3965
3966 Another approach might be to try to reorder the argument
3967 evaluations to avoid this conflicting stack usage. */
3968
3969 needed = args_size.constant;
3970
3971 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3972 /* Since we will be writing into the entire argument area, the
3973 map must be allocated for its entire size, not just the part that
3974 is the responsibility of the caller. */
3975 needed += reg_parm_stack_space;
3976 #endif
3977
3978 #ifdef ARGS_GROW_DOWNWARD
3979 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3980 needed + 1);
3981 #else
3982 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3983 needed);
3984 #endif
3985 stack_usage_map = alloca (highest_outgoing_arg_in_use);
3986
3987 if (initial_highest_arg_in_use)
3988 memcpy (stack_usage_map, initial_stack_usage_map,
3989 initial_highest_arg_in_use);
3990
3991 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3992 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3993 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3994 needed = 0;
3995
3996 /* We must be careful to use virtual regs before they're instantiated,
3997 and real regs afterwards. Loop optimization, for example, can create
3998 new libcalls after we've instantiated the virtual regs, and if we
3999 use virtuals anyway, they won't match the rtl patterns. */
4000
4001 if (virtuals_instantiated)
4002 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
4003 else
4004 argblock = virtual_outgoing_args_rtx;
4005 }
4006 else
4007 {
4008 if (!PUSH_ARGS)
4009 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
4010 }
4011
4012 /* If we push args individually in reverse order, perform stack alignment
4013 before the first push (the last arg). */
4014 if (argblock == 0 && PUSH_ARGS_REVERSED)
4015 anti_adjust_stack (GEN_INT (args_size.constant
4016 - original_args_size.constant));
4017
4018 if (PUSH_ARGS_REVERSED)
4019 {
4020 inc = -1;
4021 argnum = nargs - 1;
4022 }
4023 else
4024 {
4025 inc = 1;
4026 argnum = 0;
4027 }
4028
4029 #ifdef REG_PARM_STACK_SPACE
4030 if (ACCUMULATE_OUTGOING_ARGS)
4031 {
4032 /* The argument list is the property of the called routine and it
4033 may clobber it. If the fixed area has been used for previous
4034 parameters, we must save and restore it. */
4035 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
4036 &low_to_save, &high_to_save);
4037 }
4038 #endif
4039
4040 /* Push the args that need to be pushed. */
4041
4042 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4043 are to be pushed. */
4044 for (count = 0; count < nargs; count++, argnum += inc)
4045 {
4046 enum machine_mode mode = argvec[argnum].mode;
4047 rtx val = argvec[argnum].value;
4048 rtx reg = argvec[argnum].reg;
4049 int partial = argvec[argnum].partial;
4050 int lower_bound = 0, upper_bound = 0, i;
4051
4052 if (! (reg != 0 && partial == 0))
4053 {
4054 if (ACCUMULATE_OUTGOING_ARGS)
4055 {
4056 /* If this is being stored into a pre-allocated, fixed-size,
4057 stack area, save any previous data at that location. */
4058
4059 #ifdef ARGS_GROW_DOWNWARD
4060 /* stack_slot is negative, but we want to index stack_usage_map
4061 with positive values. */
4062 upper_bound = -argvec[argnum].locate.offset.constant + 1;
4063 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
4064 #else
4065 lower_bound = argvec[argnum].locate.offset.constant;
4066 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
4067 #endif
4068
4069 i = lower_bound;
4070 /* Don't worry about things in the fixed argument area;
4071 it has already been saved. */
4072 if (i < reg_parm_stack_space)
4073 i = reg_parm_stack_space;
4074 while (i < upper_bound && stack_usage_map[i] == 0)
4075 i++;
4076
4077 if (i < upper_bound)
4078 {
4079 /* We need to make a save area. */
4080 unsigned int size
4081 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
4082 enum machine_mode save_mode
4083 = mode_for_size (size, MODE_INT, 1);
4084 rtx adr
4085 = plus_constant (argblock,
4086 argvec[argnum].locate.offset.constant);
4087 rtx stack_area
4088 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
4089
4090 if (save_mode == BLKmode)
4091 {
4092 argvec[argnum].save_area
4093 = assign_stack_temp (BLKmode,
4094 argvec[argnum].locate.size.constant,
4095 0);
4096
4097 emit_block_move (validize_mem (argvec[argnum].save_area),
4098 stack_area,
4099 GEN_INT (argvec[argnum].locate.size.constant),
4100 BLOCK_OP_CALL_PARM);
4101 }
4102 else
4103 {
4104 argvec[argnum].save_area = gen_reg_rtx (save_mode);
4105
4106 emit_move_insn (argvec[argnum].save_area, stack_area);
4107 }
4108 }
4109 }
4110
4111 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
4112 partial, reg, 0, argblock,
4113 GEN_INT (argvec[argnum].locate.offset.constant),
4114 reg_parm_stack_space,
4115 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
4116
4117 /* Now mark the segment we just used. */
4118 if (ACCUMULATE_OUTGOING_ARGS)
4119 for (i = lower_bound; i < upper_bound; i++)
4120 stack_usage_map[i] = 1;
4121
4122 NO_DEFER_POP;
4123 }
4124 }
4125
4126 /* If we pushed args in forward order, perform stack alignment
4127 after pushing the last arg. */
4128 if (argblock == 0 && !PUSH_ARGS_REVERSED)
4129 anti_adjust_stack (GEN_INT (args_size.constant
4130 - original_args_size.constant));
4131
4132 if (PUSH_ARGS_REVERSED)
4133 argnum = nargs - 1;
4134 else
4135 argnum = 0;
4136
4137 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0, 0);
4138
4139 /* Now load any reg parms into their regs. */
4140
4141 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4142 are to be pushed. */
4143 for (count = 0; count < nargs; count++, argnum += inc)
4144 {
4145 rtx val = argvec[argnum].value;
4146 rtx reg = argvec[argnum].reg;
4147 int partial = argvec[argnum].partial;
4148
4149 /* Handle calls that pass values in multiple non-contiguous
4150 locations. The PA64 has examples of this for library calls. */
4151 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4152 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (GET_MODE (val)));
4153 else if (reg != 0 && partial == 0)
4154 emit_move_insn (reg, val);
4155
4156 NO_DEFER_POP;
4157 }
4158
4159 /* Any regs containing parms remain in use through the call. */
4160 for (count = 0; count < nargs; count++)
4161 {
4162 rtx reg = argvec[count].reg;
4163 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4164 use_group_regs (&call_fusage, reg);
4165 else if (reg != 0)
4166 use_reg (&call_fusage, reg);
4167 }
4168
4169 /* Pass the function the address in which to return a structure value. */
4170 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
4171 {
4172 emit_move_insn (struct_value,
4173 force_reg (Pmode,
4174 force_operand (XEXP (mem_value, 0),
4175 NULL_RTX)));
4176 if (GET_CODE (struct_value) == REG)
4177 use_reg (&call_fusage, struct_value);
4178 }
4179
4180 /* Don't allow popping to be deferred, since then
4181 cse'ing of library calls could delete a call and leave the pop. */
4182 NO_DEFER_POP;
4183 valreg = (mem_value == 0 && outmode != VOIDmode
4184 ? hard_libcall_value (outmode) : NULL_RTX);
4185
4186 /* Stack must be properly aligned now. */
4187 if (stack_pointer_delta & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))
4188 abort ();
4189
4190 before_call = get_last_insn ();
4191
4192 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
4193 will set inhibit_defer_pop to that value. */
4194 /* The return type is needed to decide how many bytes the function pops.
4195 Signedness plays no role in that, so for simplicity, we pretend it's
4196 always signed. We also assume that the list of arguments passed has
4197 no impact, so we pretend it is unknown. */
4198
4199 emit_call_1 (fun,
4200 get_identifier (XSTR (orgfun, 0)),
4201 build_function_type (tfom, NULL_TREE),
4202 original_args_size.constant, args_size.constant,
4203 struct_value_size,
4204 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
4205 valreg,
4206 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
4207
4208 /* For calls to `setjmp', etc., inform flow.c it should complain
4209 if nonvolatile values are live. For functions that cannot return,
4210 inform flow that control does not fall through. */
4211
4212 if (flags & (ECF_NORETURN | ECF_LONGJMP))
4213 {
4214 /* The barrier note must be emitted
4215 immediately after the CALL_INSN. Some ports emit more than
4216 just a CALL_INSN above, so we must search for it here. */
4217
4218 rtx last = get_last_insn ();
4219 while (GET_CODE (last) != CALL_INSN)
4220 {
4221 last = PREV_INSN (last);
4222 /* There was no CALL_INSN? */
4223 if (last == before_call)
4224 abort ();
4225 }
4226
4227 emit_barrier_after (last);
4228 }
4229
4230 /* Now restore inhibit_defer_pop to its actual original value. */
4231 OK_DEFER_POP;
4232
4233 /* If call is cse'able, make appropriate pair of reg-notes around it.
4234 Test valreg so we don't crash; may safely ignore `const'
4235 if return type is void. Disable for PARALLEL return values, because
4236 we have no way to move such values into a pseudo register. */
4237 if (flags & ECF_LIBCALL_BLOCK)
4238 {
4239 rtx insns;
4240
4241 if (valreg == 0)
4242 {
4243 insns = get_insns ();
4244 end_sequence ();
4245 emit_insn (insns);
4246 }
4247 else
4248 {
4249 rtx note = 0;
4250 rtx temp;
4251 int i;
4252
4253 if (GET_CODE (valreg) == PARALLEL)
4254 {
4255 temp = gen_reg_rtx (outmode);
4256 emit_group_store (temp, valreg, NULL_TREE,
4257 GET_MODE_SIZE (outmode));
4258 valreg = temp;
4259 }
4260
4261 temp = gen_reg_rtx (GET_MODE (valreg));
4262
4263 /* Construct an "equal form" for the value which mentions all the
4264 arguments in order as well as the function name. */
4265 for (i = 0; i < nargs; i++)
4266 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
4267 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
4268
4269 insns = get_insns ();
4270 end_sequence ();
4271
4272 if (flags & ECF_PURE)
4273 note = gen_rtx_EXPR_LIST (VOIDmode,
4274 gen_rtx_USE (VOIDmode,
4275 gen_rtx_MEM (BLKmode,
4276 gen_rtx_SCRATCH (VOIDmode))),
4277 note);
4278
4279 emit_libcall_block (insns, temp, valreg, note);
4280
4281 valreg = temp;
4282 }
4283 }
4284 pop_temp_slots ();
4285
4286 /* Copy the value to the right place. */
4287 if (outmode != VOIDmode && retval)
4288 {
4289 if (mem_value)
4290 {
4291 if (value == 0)
4292 value = mem_value;
4293 if (value != mem_value)
4294 emit_move_insn (value, mem_value);
4295 }
4296 else if (GET_CODE (valreg) == PARALLEL)
4297 {
4298 if (value == 0)
4299 value = gen_reg_rtx (outmode);
4300 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
4301 }
4302 else if (value != 0)
4303 emit_move_insn (value, valreg);
4304 else
4305 value = valreg;
4306 }
4307
4308 if (ACCUMULATE_OUTGOING_ARGS)
4309 {
4310 #ifdef REG_PARM_STACK_SPACE
4311 if (save_area)
4312 restore_fixed_argument_area (save_area, argblock,
4313 high_to_save, low_to_save);
4314 #endif
4315
4316 /* If we saved any argument areas, restore them. */
4317 for (count = 0; count < nargs; count++)
4318 if (argvec[count].save_area)
4319 {
4320 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
4321 rtx adr = plus_constant (argblock,
4322 argvec[count].locate.offset.constant);
4323 rtx stack_area = gen_rtx_MEM (save_mode,
4324 memory_address (save_mode, adr));
4325
4326 if (save_mode == BLKmode)
4327 emit_block_move (stack_area,
4328 validize_mem (argvec[count].save_area),
4329 GEN_INT (argvec[count].locate.size.constant),
4330 BLOCK_OP_CALL_PARM);
4331 else
4332 emit_move_insn (stack_area, argvec[count].save_area);
4333 }
4334
4335 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4336 stack_usage_map = initial_stack_usage_map;
4337 }
4338
4339 return value;
4340
4341 }
4342 \f
4343 /* Output a library call to function FUN (a SYMBOL_REF rtx)
4344 (emitting the queue unless NO_QUEUE is nonzero),
4345 for a value of mode OUTMODE,
4346 with NARGS different arguments, passed as alternating rtx values
4347 and machine_modes to convert them to.
4348 The rtx values should have been passed through protect_from_queue already.
4349
4350 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
4351 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
4352 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
4353 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
4354 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
4355 or other LCT_ value for other types of library calls. */
4356
4357 void
4358 emit_library_call (rtx orgfun, enum libcall_type fn_type,
4359 enum machine_mode outmode, int nargs, ...)
4360 {
4361 va_list p;
4362
4363 va_start (p, nargs);
4364 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4365 va_end (p);
4366 }
4367 \f
4368 /* Like emit_library_call except that an extra argument, VALUE,
4369 comes second and says where to store the result.
4370 (If VALUE is zero, this function chooses a convenient way
4371 to return the value.
4372
4373 This function returns an rtx for where the value is to be found.
4374 If VALUE is nonzero, VALUE is returned. */
4375
4376 rtx
4377 emit_library_call_value (rtx orgfun, rtx value,
4378 enum libcall_type fn_type,
4379 enum machine_mode outmode, int nargs, ...)
4380 {
4381 rtx result;
4382 va_list p;
4383
4384 va_start (p, nargs);
4385 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4386 nargs, p);
4387 va_end (p);
4388
4389 return result;
4390 }
4391 \f
4392 /* Store a single argument for a function call
4393 into the register or memory area where it must be passed.
4394 *ARG describes the argument value and where to pass it.
4395
4396 ARGBLOCK is the address of the stack-block for all the arguments,
4397 or 0 on a machine where arguments are pushed individually.
4398
4399 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4400 so must be careful about how the stack is used.
4401
4402 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4403 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4404 that we need not worry about saving and restoring the stack.
4405
4406 FNDECL is the declaration of the function we are calling.
4407
4408 Return nonzero if this arg should cause sibcall failure,
4409 zero otherwise. */
4410
4411 static int
4412 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4413 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
4414 {
4415 tree pval = arg->tree_value;
4416 rtx reg = 0;
4417 int partial = 0;
4418 int used = 0;
4419 int i, lower_bound = 0, upper_bound = 0;
4420 int sibcall_failure = 0;
4421
4422 if (TREE_CODE (pval) == ERROR_MARK)
4423 return 1;
4424
4425 /* Push a new temporary level for any temporaries we make for
4426 this argument. */
4427 push_temp_slots ();
4428
4429 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4430 {
4431 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4432 save any previous data at that location. */
4433 if (argblock && ! variable_size && arg->stack)
4434 {
4435 #ifdef ARGS_GROW_DOWNWARD
4436 /* stack_slot is negative, but we want to index stack_usage_map
4437 with positive values. */
4438 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4439 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4440 else
4441 upper_bound = 0;
4442
4443 lower_bound = upper_bound - arg->locate.size.constant;
4444 #else
4445 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4446 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4447 else
4448 lower_bound = 0;
4449
4450 upper_bound = lower_bound + arg->locate.size.constant;
4451 #endif
4452
4453 i = lower_bound;
4454 /* Don't worry about things in the fixed argument area;
4455 it has already been saved. */
4456 if (i < reg_parm_stack_space)
4457 i = reg_parm_stack_space;
4458 while (i < upper_bound && stack_usage_map[i] == 0)
4459 i++;
4460
4461 if (i < upper_bound)
4462 {
4463 /* We need to make a save area. */
4464 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4465 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4466 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4467 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4468
4469 if (save_mode == BLKmode)
4470 {
4471 tree ot = TREE_TYPE (arg->tree_value);
4472 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4473 | TYPE_QUAL_CONST));
4474
4475 arg->save_area = assign_temp (nt, 0, 1, 1);
4476 preserve_temp_slots (arg->save_area);
4477 emit_block_move (validize_mem (arg->save_area), stack_area,
4478 expr_size (arg->tree_value),
4479 BLOCK_OP_CALL_PARM);
4480 }
4481 else
4482 {
4483 arg->save_area = gen_reg_rtx (save_mode);
4484 emit_move_insn (arg->save_area, stack_area);
4485 }
4486 }
4487 }
4488 }
4489
4490 /* If this isn't going to be placed on both the stack and in registers,
4491 set up the register and number of words. */
4492 if (! arg->pass_on_stack)
4493 {
4494 if (flags & ECF_SIBCALL)
4495 reg = arg->tail_call_reg;
4496 else
4497 reg = arg->reg;
4498 partial = arg->partial;
4499 }
4500
4501 if (reg != 0 && partial == 0)
4502 /* Being passed entirely in a register. We shouldn't be called in
4503 this case. */
4504 abort ();
4505
4506 /* If this arg needs special alignment, don't load the registers
4507 here. */
4508 if (arg->n_aligned_regs != 0)
4509 reg = 0;
4510
4511 /* If this is being passed partially in a register, we can't evaluate
4512 it directly into its stack slot. Otherwise, we can. */
4513 if (arg->value == 0)
4514 {
4515 /* stack_arg_under_construction is nonzero if a function argument is
4516 being evaluated directly into the outgoing argument list and
4517 expand_call must take special action to preserve the argument list
4518 if it is called recursively.
4519
4520 For scalar function arguments stack_usage_map is sufficient to
4521 determine which stack slots must be saved and restored. Scalar
4522 arguments in general have pass_on_stack == 0.
4523
4524 If this argument is initialized by a function which takes the
4525 address of the argument (a C++ constructor or a C function
4526 returning a BLKmode structure), then stack_usage_map is
4527 insufficient and expand_call must push the stack around the
4528 function call. Such arguments have pass_on_stack == 1.
4529
4530 Note that it is always safe to set stack_arg_under_construction,
4531 but this generates suboptimal code if set when not needed. */
4532
4533 if (arg->pass_on_stack)
4534 stack_arg_under_construction++;
4535
4536 arg->value = expand_expr (pval,
4537 (partial
4538 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4539 ? NULL_RTX : arg->stack,
4540 VOIDmode, EXPAND_STACK_PARM);
4541
4542 /* If we are promoting object (or for any other reason) the mode
4543 doesn't agree, convert the mode. */
4544
4545 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4546 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4547 arg->value, arg->unsignedp);
4548
4549 if (arg->pass_on_stack)
4550 stack_arg_under_construction--;
4551 }
4552
4553 /* Don't allow anything left on stack from computation
4554 of argument to alloca. */
4555 if (flags & ECF_MAY_BE_ALLOCA)
4556 do_pending_stack_adjust ();
4557
4558 if (arg->value == arg->stack)
4559 /* If the value is already in the stack slot, we are done. */
4560 ;
4561 else if (arg->mode != BLKmode)
4562 {
4563 int size;
4564
4565 /* Argument is a scalar, not entirely passed in registers.
4566 (If part is passed in registers, arg->partial says how much
4567 and emit_push_insn will take care of putting it there.)
4568
4569 Push it, and if its size is less than the
4570 amount of space allocated to it,
4571 also bump stack pointer by the additional space.
4572 Note that in C the default argument promotions
4573 will prevent such mismatches. */
4574
4575 size = GET_MODE_SIZE (arg->mode);
4576 /* Compute how much space the push instruction will push.
4577 On many machines, pushing a byte will advance the stack
4578 pointer by a halfword. */
4579 #ifdef PUSH_ROUNDING
4580 size = PUSH_ROUNDING (size);
4581 #endif
4582 used = size;
4583
4584 /* Compute how much space the argument should get:
4585 round up to a multiple of the alignment for arguments. */
4586 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4587 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4588 / (PARM_BOUNDARY / BITS_PER_UNIT))
4589 * (PARM_BOUNDARY / BITS_PER_UNIT));
4590
4591 /* This isn't already where we want it on the stack, so put it there.
4592 This can either be done with push or copy insns. */
4593 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4594 PARM_BOUNDARY, partial, reg, used - size, argblock,
4595 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4596 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4597
4598 /* Unless this is a partially-in-register argument, the argument is now
4599 in the stack. */
4600 if (partial == 0)
4601 arg->value = arg->stack;
4602 }
4603 else
4604 {
4605 /* BLKmode, at least partly to be pushed. */
4606
4607 unsigned int parm_align;
4608 int excess;
4609 rtx size_rtx;
4610
4611 /* Pushing a nonscalar.
4612 If part is passed in registers, PARTIAL says how much
4613 and emit_push_insn will take care of putting it there. */
4614
4615 /* Round its size up to a multiple
4616 of the allocation unit for arguments. */
4617
4618 if (arg->locate.size.var != 0)
4619 {
4620 excess = 0;
4621 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4622 }
4623 else
4624 {
4625 /* PUSH_ROUNDING has no effect on us, because
4626 emit_push_insn for BLKmode is careful to avoid it. */
4627 if (reg && GET_CODE (reg) == PARALLEL)
4628 {
4629 /* Use the size of the elt to compute excess. */
4630 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
4631 excess = (arg->locate.size.constant
4632 - int_size_in_bytes (TREE_TYPE (pval))
4633 + partial * GET_MODE_SIZE (GET_MODE (elt)));
4634 }
4635 else
4636 excess = (arg->locate.size.constant
4637 - int_size_in_bytes (TREE_TYPE (pval))
4638 + partial * UNITS_PER_WORD);
4639 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4640 NULL_RTX, TYPE_MODE (sizetype), 0);
4641 }
4642
4643 /* Some types will require stricter alignment, which will be
4644 provided for elsewhere in argument layout. */
4645 parm_align = MAX (PARM_BOUNDARY, TYPE_ALIGN (TREE_TYPE (pval)));
4646
4647 /* When an argument is padded down, the block is aligned to
4648 PARM_BOUNDARY, but the actual argument isn't. */
4649 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4650 {
4651 if (arg->locate.size.var)
4652 parm_align = BITS_PER_UNIT;
4653 else if (excess)
4654 {
4655 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4656 parm_align = MIN (parm_align, excess_align);
4657 }
4658 }
4659
4660 if ((flags & ECF_SIBCALL) && GET_CODE (arg->value) == MEM)
4661 {
4662 /* emit_push_insn might not work properly if arg->value and
4663 argblock + arg->locate.offset areas overlap. */
4664 rtx x = arg->value;
4665 int i = 0;
4666
4667 if (XEXP (x, 0) == current_function_internal_arg_pointer
4668 || (GET_CODE (XEXP (x, 0)) == PLUS
4669 && XEXP (XEXP (x, 0), 0) ==
4670 current_function_internal_arg_pointer
4671 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4672 {
4673 if (XEXP (x, 0) != current_function_internal_arg_pointer)
4674 i = INTVAL (XEXP (XEXP (x, 0), 1));
4675
4676 /* expand_call should ensure this. */
4677 if (arg->locate.offset.var || GET_CODE (size_rtx) != CONST_INT)
4678 abort ();
4679
4680 if (arg->locate.offset.constant > i)
4681 {
4682 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4683 sibcall_failure = 1;
4684 }
4685 else if (arg->locate.offset.constant < i)
4686 {
4687 if (i < arg->locate.offset.constant + INTVAL (size_rtx))
4688 sibcall_failure = 1;
4689 }
4690 }
4691 }
4692
4693 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4694 parm_align, partial, reg, excess, argblock,
4695 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4696 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4697
4698 /* Unless this is a partially-in-register argument, the argument is now
4699 in the stack.
4700
4701 ??? Unlike the case above, in which we want the actual
4702 address of the data, so that we can load it directly into a
4703 register, here we want the address of the stack slot, so that
4704 it's properly aligned for word-by-word copying or something
4705 like that. It's not clear that this is always correct. */
4706 if (partial == 0)
4707 arg->value = arg->stack_slot;
4708 }
4709
4710 /* Mark all slots this store used. */
4711 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4712 && argblock && ! variable_size && arg->stack)
4713 for (i = lower_bound; i < upper_bound; i++)
4714 stack_usage_map[i] = 1;
4715
4716 /* Once we have pushed something, pops can't safely
4717 be deferred during the rest of the arguments. */
4718 NO_DEFER_POP;
4719
4720 /* ANSI doesn't require a sequence point here,
4721 but PCC has one, so this will avoid some problems. */
4722 emit_queue ();
4723
4724 /* Free any temporary slots made in processing this argument. Show
4725 that we might have taken the address of something and pushed that
4726 as an operand. */
4727 preserve_temp_slots (NULL_RTX);
4728 free_temp_slots ();
4729 pop_temp_slots ();
4730
4731 return sibcall_failure;
4732 }
4733
4734 /* Nonzero if we do not know how to pass TYPE solely in registers.
4735 We cannot do so in the following cases:
4736
4737 - if the type has variable size
4738 - if the type is marked as addressable (it is required to be constructed
4739 into the stack)
4740 - if the padding and mode of the type is such that a copy into a register
4741 would put it into the wrong part of the register.
4742
4743 Which padding can't be supported depends on the byte endianness.
4744
4745 A value in a register is implicitly padded at the most significant end.
4746 On a big-endian machine, that is the lower end in memory.
4747 So a value padded in memory at the upper end can't go in a register.
4748 For a little-endian machine, the reverse is true. */
4749
4750 bool
4751 default_must_pass_in_stack (enum machine_mode mode, tree type)
4752 {
4753 if (!type)
4754 return false;
4755
4756 /* If the type has variable size... */
4757 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4758 return true;
4759
4760 /* If the type is marked as addressable (it is required
4761 to be constructed into the stack)... */
4762 if (TREE_ADDRESSABLE (type))
4763 return true;
4764
4765 /* If the padding and mode of the type is such that a copy into
4766 a register would put it into the wrong part of the register. */
4767 if (mode == BLKmode
4768 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4769 && (FUNCTION_ARG_PADDING (mode, type)
4770 == (BYTES_BIG_ENDIAN ? upward : downward)))
4771 return true;
4772
4773 return false;
4774 }