re PR middle-end/16417 (crappy code (gcc.c-torture/compile/20020210-1.c) in arguments...
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "regs.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "output.h"
53 #include "basic-block.h"
54 #include "toplev.h"
55 #include "hashtab.h"
56 #include "ggc.h"
57 #include "tm_p.h"
58 #include "integrate.h"
59 #include "langhooks.h"
60 #include "target.h"
61 #include "cfglayout.h"
62 #include "tree-gimple.h"
63
64 #ifndef LOCAL_ALIGNMENT
65 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
66 #endif
67
68 #ifndef STACK_ALIGNMENT_NEEDED
69 #define STACK_ALIGNMENT_NEEDED 1
70 #endif
71
72 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
73
74 /* Some systems use __main in a way incompatible with its use in gcc, in these
75 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
76 give the same symbol without quotes for an alternative entry point. You
77 must define both, or neither. */
78 #ifndef NAME__MAIN
79 #define NAME__MAIN "__main"
80 #endif
81
82 /* Round a value to the lowest integer less than it that is a multiple of
83 the required alignment. Avoid using division in case the value is
84 negative. Assume the alignment is a power of two. */
85 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
86
87 /* Similar, but round to the next highest integer that meets the
88 alignment. */
89 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
90
91 /* Nonzero if function being compiled doesn't contain any calls
92 (ignoring the prologue and epilogue). This is set prior to
93 local register allocation and is valid for the remaining
94 compiler passes. */
95 int current_function_is_leaf;
96
97 /* Nonzero if function being compiled doesn't modify the stack pointer
98 (ignoring the prologue and epilogue). This is only valid after
99 life_analysis has run. */
100 int current_function_sp_is_unchanging;
101
102 /* Nonzero if the function being compiled is a leaf function which only
103 uses leaf registers. This is valid after reload (specifically after
104 sched2) and is useful only if the port defines LEAF_REGISTERS. */
105 int current_function_uses_only_leaf_regs;
106
107 /* Nonzero once virtual register instantiation has been done.
108 assign_stack_local uses frame_pointer_rtx when this is nonzero.
109 calls.c:emit_library_call_value_1 uses it to set up
110 post-instantiation libcalls. */
111 int virtuals_instantiated;
112
113 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
114 static GTY(()) int funcdef_no;
115
116 /* These variables hold pointers to functions to create and destroy
117 target specific, per-function data structures. */
118 struct machine_function * (*init_machine_status) (void);
119
120 /* The currently compiled function. */
121 struct function *cfun = 0;
122
123 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
124 static GTY(()) varray_type prologue;
125 static GTY(()) varray_type epilogue;
126
127 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
128 in this function. */
129 static GTY(()) varray_type sibcall_epilogue;
130 \f
131 /* In order to evaluate some expressions, such as function calls returning
132 structures in memory, we need to temporarily allocate stack locations.
133 We record each allocated temporary in the following structure.
134
135 Associated with each temporary slot is a nesting level. When we pop up
136 one level, all temporaries associated with the previous level are freed.
137 Normally, all temporaries are freed after the execution of the statement
138 in which they were created. However, if we are inside a ({...}) grouping,
139 the result may be in a temporary and hence must be preserved. If the
140 result could be in a temporary, we preserve it if we can determine which
141 one it is in. If we cannot determine which temporary may contain the
142 result, all temporaries are preserved. A temporary is preserved by
143 pretending it was allocated at the previous nesting level.
144
145 Automatic variables are also assigned temporary slots, at the nesting
146 level where they are defined. They are marked a "kept" so that
147 free_temp_slots will not free them. */
148
149 struct temp_slot GTY(())
150 {
151 /* Points to next temporary slot. */
152 struct temp_slot *next;
153 /* Points to previous temporary slot. */
154 struct temp_slot *prev;
155
156 /* The rtx to used to reference the slot. */
157 rtx slot;
158 /* The rtx used to represent the address if not the address of the
159 slot above. May be an EXPR_LIST if multiple addresses exist. */
160 rtx address;
161 /* The alignment (in bits) of the slot. */
162 unsigned int align;
163 /* The size, in units, of the slot. */
164 HOST_WIDE_INT size;
165 /* The type of the object in the slot, or zero if it doesn't correspond
166 to a type. We use this to determine whether a slot can be reused.
167 It can be reused if objects of the type of the new slot will always
168 conflict with objects of the type of the old slot. */
169 tree type;
170 /* Nonzero if this temporary is currently in use. */
171 char in_use;
172 /* Nonzero if this temporary has its address taken. */
173 char addr_taken;
174 /* Nesting level at which this slot is being used. */
175 int level;
176 /* Nonzero if this should survive a call to free_temp_slots. */
177 int keep;
178 /* The offset of the slot from the frame_pointer, including extra space
179 for alignment. This info is for combine_temp_slots. */
180 HOST_WIDE_INT base_offset;
181 /* The size of the slot, including extra space for alignment. This
182 info is for combine_temp_slots. */
183 HOST_WIDE_INT full_size;
184 };
185 \f
186 /* Forward declarations. */
187
188 static rtx assign_stack_local_1 (enum machine_mode, HOST_WIDE_INT, int,
189 struct function *);
190 static struct temp_slot *find_temp_slot_from_address (rtx);
191 static void instantiate_decls (tree, int);
192 static void instantiate_decls_1 (tree, int);
193 static void instantiate_decl (rtx, HOST_WIDE_INT, int);
194 static rtx instantiate_new_reg (rtx, HOST_WIDE_INT *);
195 static int instantiate_virtual_regs_1 (rtx *, rtx, int);
196 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
197 static void pad_below (struct args_size *, enum machine_mode, tree);
198 static void reorder_blocks_1 (rtx, tree, varray_type *);
199 static void reorder_fix_fragments (tree);
200 static int all_blocks (tree, tree *);
201 static tree *get_block_vector (tree, int *);
202 extern tree debug_find_var_in_block_tree (tree, tree);
203 /* We always define `record_insns' even if it's not used so that we
204 can always export `prologue_epilogue_contains'. */
205 static void record_insns (rtx, varray_type *) ATTRIBUTE_UNUSED;
206 static int contains (rtx, varray_type);
207 #ifdef HAVE_return
208 static void emit_return_into_block (basic_block, rtx);
209 #endif
210 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
211 static rtx keep_stack_depressed (rtx);
212 #endif
213 static void prepare_function_start (tree);
214 static void do_clobber_return_reg (rtx, void *);
215 static void do_use_return_reg (rtx, void *);
216 static void instantiate_virtual_regs_lossage (rtx);
217 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
218 \f
219 /* Pointer to chain of `struct function' for containing functions. */
220 struct function *outer_function_chain;
221
222 /* Given a function decl for a containing function,
223 return the `struct function' for it. */
224
225 struct function *
226 find_function_data (tree decl)
227 {
228 struct function *p;
229
230 for (p = outer_function_chain; p; p = p->outer)
231 if (p->decl == decl)
232 return p;
233
234 gcc_unreachable ();
235 }
236
237 /* Save the current context for compilation of a nested function.
238 This is called from language-specific code. The caller should use
239 the enter_nested langhook to save any language-specific state,
240 since this function knows only about language-independent
241 variables. */
242
243 void
244 push_function_context_to (tree context)
245 {
246 struct function *p;
247
248 if (context)
249 {
250 if (context == current_function_decl)
251 cfun->contains_functions = 1;
252 else
253 {
254 struct function *containing = find_function_data (context);
255 containing->contains_functions = 1;
256 }
257 }
258
259 if (cfun == 0)
260 init_dummy_function_start ();
261 p = cfun;
262
263 p->outer = outer_function_chain;
264 outer_function_chain = p;
265
266 lang_hooks.function.enter_nested (p);
267
268 cfun = 0;
269 }
270
271 void
272 push_function_context (void)
273 {
274 push_function_context_to (current_function_decl);
275 }
276
277 /* Restore the last saved context, at the end of a nested function.
278 This function is called from language-specific code. */
279
280 void
281 pop_function_context_from (tree context ATTRIBUTE_UNUSED)
282 {
283 struct function *p = outer_function_chain;
284
285 cfun = p;
286 outer_function_chain = p->outer;
287
288 current_function_decl = p->decl;
289 reg_renumber = 0;
290
291 lang_hooks.function.leave_nested (p);
292
293 /* Reset variables that have known state during rtx generation. */
294 virtuals_instantiated = 0;
295 generating_concat_p = 1;
296 }
297
298 void
299 pop_function_context (void)
300 {
301 pop_function_context_from (current_function_decl);
302 }
303
304 /* Clear out all parts of the state in F that can safely be discarded
305 after the function has been parsed, but not compiled, to let
306 garbage collection reclaim the memory. */
307
308 void
309 free_after_parsing (struct function *f)
310 {
311 /* f->expr->forced_labels is used by code generation. */
312 /* f->emit->regno_reg_rtx is used by code generation. */
313 /* f->varasm is used by code generation. */
314 /* f->eh->eh_return_stub_label is used by code generation. */
315
316 lang_hooks.function.final (f);
317 }
318
319 /* Clear out all parts of the state in F that can safely be discarded
320 after the function has been compiled, to let garbage collection
321 reclaim the memory. */
322
323 void
324 free_after_compilation (struct function *f)
325 {
326 f->eh = NULL;
327 f->expr = NULL;
328 f->emit = NULL;
329 f->varasm = NULL;
330 f->machine = NULL;
331
332 f->x_avail_temp_slots = NULL;
333 f->x_used_temp_slots = NULL;
334 f->arg_offset_rtx = NULL;
335 f->return_rtx = NULL;
336 f->internal_arg_pointer = NULL;
337 f->x_nonlocal_goto_handler_labels = NULL;
338 f->x_return_label = NULL;
339 f->x_naked_return_label = NULL;
340 f->x_stack_slot_list = NULL;
341 f->x_tail_recursion_reentry = NULL;
342 f->x_arg_pointer_save_area = NULL;
343 f->x_parm_birth_insn = NULL;
344 f->original_arg_vector = NULL;
345 f->original_decl_initial = NULL;
346 f->epilogue_delay_list = NULL;
347 }
348 \f
349 /* Allocate fixed slots in the stack frame of the current function. */
350
351 /* Return size needed for stack frame based on slots so far allocated in
352 function F.
353 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
354 the caller may have to do that. */
355
356 HOST_WIDE_INT
357 get_func_frame_size (struct function *f)
358 {
359 #ifdef FRAME_GROWS_DOWNWARD
360 return -f->x_frame_offset;
361 #else
362 return f->x_frame_offset;
363 #endif
364 }
365
366 /* Return size needed for stack frame based on slots so far allocated.
367 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
368 the caller may have to do that. */
369 HOST_WIDE_INT
370 get_frame_size (void)
371 {
372 return get_func_frame_size (cfun);
373 }
374
375 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
376 with machine mode MODE.
377
378 ALIGN controls the amount of alignment for the address of the slot:
379 0 means according to MODE,
380 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
381 -2 means use BITS_PER_UNIT,
382 positive specifies alignment boundary in bits.
383
384 We do not round to stack_boundary here.
385
386 FUNCTION specifies the function to allocate in. */
387
388 static rtx
389 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, int align,
390 struct function *function)
391 {
392 rtx x, addr;
393 int bigend_correction = 0;
394 unsigned int alignment;
395 int frame_off, frame_alignment, frame_phase;
396
397 if (align == 0)
398 {
399 tree type;
400
401 if (mode == BLKmode)
402 alignment = BIGGEST_ALIGNMENT;
403 else
404 alignment = GET_MODE_ALIGNMENT (mode);
405
406 /* Allow the target to (possibly) increase the alignment of this
407 stack slot. */
408 type = lang_hooks.types.type_for_mode (mode, 0);
409 if (type)
410 alignment = LOCAL_ALIGNMENT (type, alignment);
411
412 alignment /= BITS_PER_UNIT;
413 }
414 else if (align == -1)
415 {
416 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
417 size = CEIL_ROUND (size, alignment);
418 }
419 else if (align == -2)
420 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
421 else
422 alignment = align / BITS_PER_UNIT;
423
424 #ifdef FRAME_GROWS_DOWNWARD
425 function->x_frame_offset -= size;
426 #endif
427
428 /* Ignore alignment we can't do with expected alignment of the boundary. */
429 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
430 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
431
432 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
433 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
434
435 /* Calculate how many bytes the start of local variables is off from
436 stack alignment. */
437 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
438 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
439 frame_phase = frame_off ? frame_alignment - frame_off : 0;
440
441 /* Round the frame offset to the specified alignment. The default is
442 to always honor requests to align the stack but a port may choose to
443 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
444 if (STACK_ALIGNMENT_NEEDED
445 || mode != BLKmode
446 || size != 0)
447 {
448 /* We must be careful here, since FRAME_OFFSET might be negative and
449 division with a negative dividend isn't as well defined as we might
450 like. So we instead assume that ALIGNMENT is a power of two and
451 use logical operations which are unambiguous. */
452 #ifdef FRAME_GROWS_DOWNWARD
453 function->x_frame_offset
454 = (FLOOR_ROUND (function->x_frame_offset - frame_phase,
455 (unsigned HOST_WIDE_INT) alignment)
456 + frame_phase);
457 #else
458 function->x_frame_offset
459 = (CEIL_ROUND (function->x_frame_offset - frame_phase,
460 (unsigned HOST_WIDE_INT) alignment)
461 + frame_phase);
462 #endif
463 }
464
465 /* On a big-endian machine, if we are allocating more space than we will use,
466 use the least significant bytes of those that are allocated. */
467 if (BYTES_BIG_ENDIAN && mode != BLKmode)
468 bigend_correction = size - GET_MODE_SIZE (mode);
469
470 /* If we have already instantiated virtual registers, return the actual
471 address relative to the frame pointer. */
472 if (function == cfun && virtuals_instantiated)
473 addr = plus_constant (frame_pointer_rtx,
474 trunc_int_for_mode
475 (frame_offset + bigend_correction
476 + STARTING_FRAME_OFFSET, Pmode));
477 else
478 addr = plus_constant (virtual_stack_vars_rtx,
479 trunc_int_for_mode
480 (function->x_frame_offset + bigend_correction,
481 Pmode));
482
483 #ifndef FRAME_GROWS_DOWNWARD
484 function->x_frame_offset += size;
485 #endif
486
487 x = gen_rtx_MEM (mode, addr);
488
489 function->x_stack_slot_list
490 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
491
492 return x;
493 }
494
495 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
496 current function. */
497
498 rtx
499 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
500 {
501 return assign_stack_local_1 (mode, size, align, cfun);
502 }
503
504 \f
505 /* Removes temporary slot TEMP from LIST. */
506
507 static void
508 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
509 {
510 if (temp->next)
511 temp->next->prev = temp->prev;
512 if (temp->prev)
513 temp->prev->next = temp->next;
514 else
515 *list = temp->next;
516
517 temp->prev = temp->next = NULL;
518 }
519
520 /* Inserts temporary slot TEMP to LIST. */
521
522 static void
523 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
524 {
525 temp->next = *list;
526 if (*list)
527 (*list)->prev = temp;
528 temp->prev = NULL;
529 *list = temp;
530 }
531
532 /* Returns the list of used temp slots at LEVEL. */
533
534 static struct temp_slot **
535 temp_slots_at_level (int level)
536 {
537
538 if (!used_temp_slots)
539 VARRAY_GENERIC_PTR_INIT (used_temp_slots, 3, "used_temp_slots");
540
541 while (level >= (int) VARRAY_ACTIVE_SIZE (used_temp_slots))
542 VARRAY_PUSH_GENERIC_PTR (used_temp_slots, NULL);
543
544 return (struct temp_slot **) &VARRAY_GENERIC_PTR (used_temp_slots, level);
545 }
546
547 /* Returns the maximal temporary slot level. */
548
549 static int
550 max_slot_level (void)
551 {
552 if (!used_temp_slots)
553 return -1;
554
555 return VARRAY_ACTIVE_SIZE (used_temp_slots) - 1;
556 }
557
558 /* Moves temporary slot TEMP to LEVEL. */
559
560 static void
561 move_slot_to_level (struct temp_slot *temp, int level)
562 {
563 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
564 insert_slot_to_list (temp, temp_slots_at_level (level));
565 temp->level = level;
566 }
567
568 /* Make temporary slot TEMP available. */
569
570 static void
571 make_slot_available (struct temp_slot *temp)
572 {
573 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
574 insert_slot_to_list (temp, &avail_temp_slots);
575 temp->in_use = 0;
576 temp->level = -1;
577 }
578 \f
579 /* Allocate a temporary stack slot and record it for possible later
580 reuse.
581
582 MODE is the machine mode to be given to the returned rtx.
583
584 SIZE is the size in units of the space required. We do no rounding here
585 since assign_stack_local will do any required rounding.
586
587 KEEP is 1 if this slot is to be retained after a call to
588 free_temp_slots. Automatic variables for a block are allocated
589 with this flag. KEEP values of 2 or 3 were needed respectively
590 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
591 or for SAVE_EXPRs, but they are now unused and will abort.
592
593 TYPE is the type that will be used for the stack slot. */
594
595 rtx
596 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size, int keep,
597 tree type)
598 {
599 unsigned int align;
600 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
601 rtx slot;
602
603 /* If SIZE is -1 it means that somebody tried to allocate a temporary
604 of a variable size. */
605 gcc_assert (size != -1);
606
607 /* These are now unused. */
608 gcc_assert (keep <= 1);
609
610 if (mode == BLKmode)
611 align = BIGGEST_ALIGNMENT;
612 else
613 align = GET_MODE_ALIGNMENT (mode);
614
615 if (! type)
616 type = lang_hooks.types.type_for_mode (mode, 0);
617
618 if (type)
619 align = LOCAL_ALIGNMENT (type, align);
620
621 /* Try to find an available, already-allocated temporary of the proper
622 mode which meets the size and alignment requirements. Choose the
623 smallest one with the closest alignment. */
624 for (p = avail_temp_slots; p; p = p->next)
625 {
626 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
627 && objects_must_conflict_p (p->type, type)
628 && (best_p == 0 || best_p->size > p->size
629 || (best_p->size == p->size && best_p->align > p->align)))
630 {
631 if (p->align == align && p->size == size)
632 {
633 selected = p;
634 cut_slot_from_list (selected, &avail_temp_slots);
635 best_p = 0;
636 break;
637 }
638 best_p = p;
639 }
640 }
641
642 /* Make our best, if any, the one to use. */
643 if (best_p)
644 {
645 selected = best_p;
646 cut_slot_from_list (selected, &avail_temp_slots);
647
648 /* If there are enough aligned bytes left over, make them into a new
649 temp_slot so that the extra bytes don't get wasted. Do this only
650 for BLKmode slots, so that we can be sure of the alignment. */
651 if (GET_MODE (best_p->slot) == BLKmode)
652 {
653 int alignment = best_p->align / BITS_PER_UNIT;
654 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
655
656 if (best_p->size - rounded_size >= alignment)
657 {
658 p = ggc_alloc (sizeof (struct temp_slot));
659 p->in_use = p->addr_taken = 0;
660 p->size = best_p->size - rounded_size;
661 p->base_offset = best_p->base_offset + rounded_size;
662 p->full_size = best_p->full_size - rounded_size;
663 p->slot = gen_rtx_MEM (BLKmode,
664 plus_constant (XEXP (best_p->slot, 0),
665 rounded_size));
666 p->align = best_p->align;
667 p->address = 0;
668 p->type = best_p->type;
669 insert_slot_to_list (p, &avail_temp_slots);
670
671 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
672 stack_slot_list);
673
674 best_p->size = rounded_size;
675 best_p->full_size = rounded_size;
676 }
677 }
678 }
679
680 /* If we still didn't find one, make a new temporary. */
681 if (selected == 0)
682 {
683 HOST_WIDE_INT frame_offset_old = frame_offset;
684
685 p = ggc_alloc (sizeof (struct temp_slot));
686
687 /* We are passing an explicit alignment request to assign_stack_local.
688 One side effect of that is assign_stack_local will not round SIZE
689 to ensure the frame offset remains suitably aligned.
690
691 So for requests which depended on the rounding of SIZE, we go ahead
692 and round it now. We also make sure ALIGNMENT is at least
693 BIGGEST_ALIGNMENT. */
694 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
695 p->slot = assign_stack_local (mode,
696 (mode == BLKmode
697 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
698 : size),
699 align);
700
701 p->align = align;
702
703 /* The following slot size computation is necessary because we don't
704 know the actual size of the temporary slot until assign_stack_local
705 has performed all the frame alignment and size rounding for the
706 requested temporary. Note that extra space added for alignment
707 can be either above or below this stack slot depending on which
708 way the frame grows. We include the extra space if and only if it
709 is above this slot. */
710 #ifdef FRAME_GROWS_DOWNWARD
711 p->size = frame_offset_old - frame_offset;
712 #else
713 p->size = size;
714 #endif
715
716 /* Now define the fields used by combine_temp_slots. */
717 #ifdef FRAME_GROWS_DOWNWARD
718 p->base_offset = frame_offset;
719 p->full_size = frame_offset_old - frame_offset;
720 #else
721 p->base_offset = frame_offset_old;
722 p->full_size = frame_offset - frame_offset_old;
723 #endif
724 p->address = 0;
725
726 selected = p;
727 }
728
729 p = selected;
730 p->in_use = 1;
731 p->addr_taken = 0;
732 p->type = type;
733 p->level = temp_slot_level;
734 p->keep = keep;
735
736 pp = temp_slots_at_level (p->level);
737 insert_slot_to_list (p, pp);
738
739 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
740 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
741 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
742
743 /* If we know the alias set for the memory that will be used, use
744 it. If there's no TYPE, then we don't know anything about the
745 alias set for the memory. */
746 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
747 set_mem_align (slot, align);
748
749 /* If a type is specified, set the relevant flags. */
750 if (type != 0)
751 {
752 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
753 MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
754 }
755
756 return slot;
757 }
758
759 /* Allocate a temporary stack slot and record it for possible later
760 reuse. First three arguments are same as in preceding function. */
761
762 rtx
763 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
764 {
765 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
766 }
767 \f
768 /* Assign a temporary.
769 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
770 and so that should be used in error messages. In either case, we
771 allocate of the given type.
772 KEEP is as for assign_stack_temp.
773 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
774 it is 0 if a register is OK.
775 DONT_PROMOTE is 1 if we should not promote values in register
776 to wider modes. */
777
778 rtx
779 assign_temp (tree type_or_decl, int keep, int memory_required,
780 int dont_promote ATTRIBUTE_UNUSED)
781 {
782 tree type, decl;
783 enum machine_mode mode;
784 #ifdef PROMOTE_MODE
785 int unsignedp;
786 #endif
787
788 if (DECL_P (type_or_decl))
789 decl = type_or_decl, type = TREE_TYPE (decl);
790 else
791 decl = NULL, type = type_or_decl;
792
793 mode = TYPE_MODE (type);
794 #ifdef PROMOTE_MODE
795 unsignedp = TYPE_UNSIGNED (type);
796 #endif
797
798 if (mode == BLKmode || memory_required)
799 {
800 HOST_WIDE_INT size = int_size_in_bytes (type);
801 tree size_tree;
802 rtx tmp;
803
804 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
805 problems with allocating the stack space. */
806 if (size == 0)
807 size = 1;
808
809 /* Unfortunately, we don't yet know how to allocate variable-sized
810 temporaries. However, sometimes we have a fixed upper limit on
811 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
812 instead. This is the case for Chill variable-sized strings. */
813 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
814 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
815 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
816 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
817
818 /* If we still haven't been able to get a size, see if the language
819 can compute a maximum size. */
820 if (size == -1
821 && (size_tree = lang_hooks.types.max_size (type)) != 0
822 && host_integerp (size_tree, 1))
823 size = tree_low_cst (size_tree, 1);
824
825 /* The size of the temporary may be too large to fit into an integer. */
826 /* ??? Not sure this should happen except for user silliness, so limit
827 this to things that aren't compiler-generated temporaries. The
828 rest of the time we'll abort in assign_stack_temp_for_type. */
829 if (decl && size == -1
830 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
831 {
832 error ("%Jsize of variable %qD is too large", decl, decl);
833 size = 1;
834 }
835
836 tmp = assign_stack_temp_for_type (mode, size, keep, type);
837 return tmp;
838 }
839
840 #ifdef PROMOTE_MODE
841 if (! dont_promote)
842 mode = promote_mode (type, mode, &unsignedp, 0);
843 #endif
844
845 return gen_reg_rtx (mode);
846 }
847 \f
848 /* Combine temporary stack slots which are adjacent on the stack.
849
850 This allows for better use of already allocated stack space. This is only
851 done for BLKmode slots because we can be sure that we won't have alignment
852 problems in this case. */
853
854 static void
855 combine_temp_slots (void)
856 {
857 struct temp_slot *p, *q, *next, *next_q;
858 int num_slots;
859
860 /* We can't combine slots, because the information about which slot
861 is in which alias set will be lost. */
862 if (flag_strict_aliasing)
863 return;
864
865 /* If there are a lot of temp slots, don't do anything unless
866 high levels of optimization. */
867 if (! flag_expensive_optimizations)
868 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
869 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
870 return;
871
872 for (p = avail_temp_slots; p; p = next)
873 {
874 int delete_p = 0;
875
876 next = p->next;
877
878 if (GET_MODE (p->slot) != BLKmode)
879 continue;
880
881 for (q = p->next; q; q = next_q)
882 {
883 int delete_q = 0;
884
885 next_q = q->next;
886
887 if (GET_MODE (q->slot) != BLKmode)
888 continue;
889
890 if (p->base_offset + p->full_size == q->base_offset)
891 {
892 /* Q comes after P; combine Q into P. */
893 p->size += q->size;
894 p->full_size += q->full_size;
895 delete_q = 1;
896 }
897 else if (q->base_offset + q->full_size == p->base_offset)
898 {
899 /* P comes after Q; combine P into Q. */
900 q->size += p->size;
901 q->full_size += p->full_size;
902 delete_p = 1;
903 break;
904 }
905 if (delete_q)
906 cut_slot_from_list (q, &avail_temp_slots);
907 }
908
909 /* Either delete P or advance past it. */
910 if (delete_p)
911 cut_slot_from_list (p, &avail_temp_slots);
912 }
913 }
914 \f
915 /* Find the temp slot corresponding to the object at address X. */
916
917 static struct temp_slot *
918 find_temp_slot_from_address (rtx x)
919 {
920 struct temp_slot *p;
921 rtx next;
922 int i;
923
924 for (i = max_slot_level (); i >= 0; i--)
925 for (p = *temp_slots_at_level (i); p; p = p->next)
926 {
927 if (XEXP (p->slot, 0) == x
928 || p->address == x
929 || (GET_CODE (x) == PLUS
930 && XEXP (x, 0) == virtual_stack_vars_rtx
931 && GET_CODE (XEXP (x, 1)) == CONST_INT
932 && INTVAL (XEXP (x, 1)) >= p->base_offset
933 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
934 return p;
935
936 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
937 for (next = p->address; next; next = XEXP (next, 1))
938 if (XEXP (next, 0) == x)
939 return p;
940 }
941
942 /* If we have a sum involving a register, see if it points to a temp
943 slot. */
944 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
945 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
946 return p;
947 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
948 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
949 return p;
950
951 return 0;
952 }
953
954 /* Indicate that NEW is an alternate way of referring to the temp slot
955 that previously was known by OLD. */
956
957 void
958 update_temp_slot_address (rtx old, rtx new)
959 {
960 struct temp_slot *p;
961
962 if (rtx_equal_p (old, new))
963 return;
964
965 p = find_temp_slot_from_address (old);
966
967 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
968 is a register, see if one operand of the PLUS is a temporary
969 location. If so, NEW points into it. Otherwise, if both OLD and
970 NEW are a PLUS and if there is a register in common between them.
971 If so, try a recursive call on those values. */
972 if (p == 0)
973 {
974 if (GET_CODE (old) != PLUS)
975 return;
976
977 if (REG_P (new))
978 {
979 update_temp_slot_address (XEXP (old, 0), new);
980 update_temp_slot_address (XEXP (old, 1), new);
981 return;
982 }
983 else if (GET_CODE (new) != PLUS)
984 return;
985
986 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
987 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
988 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
989 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
990 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
991 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
992 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
993 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
994
995 return;
996 }
997
998 /* Otherwise add an alias for the temp's address. */
999 else if (p->address == 0)
1000 p->address = new;
1001 else
1002 {
1003 if (GET_CODE (p->address) != EXPR_LIST)
1004 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1005
1006 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1007 }
1008 }
1009
1010 /* If X could be a reference to a temporary slot, mark the fact that its
1011 address was taken. */
1012
1013 void
1014 mark_temp_addr_taken (rtx x)
1015 {
1016 struct temp_slot *p;
1017
1018 if (x == 0)
1019 return;
1020
1021 /* If X is not in memory or is at a constant address, it cannot be in
1022 a temporary slot. */
1023 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
1024 return;
1025
1026 p = find_temp_slot_from_address (XEXP (x, 0));
1027 if (p != 0)
1028 p->addr_taken = 1;
1029 }
1030
1031 /* If X could be a reference to a temporary slot, mark that slot as
1032 belonging to the to one level higher than the current level. If X
1033 matched one of our slots, just mark that one. Otherwise, we can't
1034 easily predict which it is, so upgrade all of them. Kept slots
1035 need not be touched.
1036
1037 This is called when an ({...}) construct occurs and a statement
1038 returns a value in memory. */
1039
1040 void
1041 preserve_temp_slots (rtx x)
1042 {
1043 struct temp_slot *p = 0, *next;
1044
1045 /* If there is no result, we still might have some objects whose address
1046 were taken, so we need to make sure they stay around. */
1047 if (x == 0)
1048 {
1049 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1050 {
1051 next = p->next;
1052
1053 if (p->addr_taken)
1054 move_slot_to_level (p, temp_slot_level - 1);
1055 }
1056
1057 return;
1058 }
1059
1060 /* If X is a register that is being used as a pointer, see if we have
1061 a temporary slot we know it points to. To be consistent with
1062 the code below, we really should preserve all non-kept slots
1063 if we can't find a match, but that seems to be much too costly. */
1064 if (REG_P (x) && REG_POINTER (x))
1065 p = find_temp_slot_from_address (x);
1066
1067 /* If X is not in memory or is at a constant address, it cannot be in
1068 a temporary slot, but it can contain something whose address was
1069 taken. */
1070 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1071 {
1072 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1073 {
1074 next = p->next;
1075
1076 if (p->addr_taken)
1077 move_slot_to_level (p, temp_slot_level - 1);
1078 }
1079
1080 return;
1081 }
1082
1083 /* First see if we can find a match. */
1084 if (p == 0)
1085 p = find_temp_slot_from_address (XEXP (x, 0));
1086
1087 if (p != 0)
1088 {
1089 /* Move everything at our level whose address was taken to our new
1090 level in case we used its address. */
1091 struct temp_slot *q;
1092
1093 if (p->level == temp_slot_level)
1094 {
1095 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1096 {
1097 next = q->next;
1098
1099 if (p != q && q->addr_taken)
1100 move_slot_to_level (q, temp_slot_level - 1);
1101 }
1102
1103 move_slot_to_level (p, temp_slot_level - 1);
1104 p->addr_taken = 0;
1105 }
1106 return;
1107 }
1108
1109 /* Otherwise, preserve all non-kept slots at this level. */
1110 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1111 {
1112 next = p->next;
1113
1114 if (!p->keep)
1115 move_slot_to_level (p, temp_slot_level - 1);
1116 }
1117 }
1118
1119 /* Free all temporaries used so far. This is normally called at the
1120 end of generating code for a statement. */
1121
1122 void
1123 free_temp_slots (void)
1124 {
1125 struct temp_slot *p, *next;
1126
1127 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1128 {
1129 next = p->next;
1130
1131 if (!p->keep)
1132 make_slot_available (p);
1133 }
1134
1135 combine_temp_slots ();
1136 }
1137
1138 /* Push deeper into the nesting level for stack temporaries. */
1139
1140 void
1141 push_temp_slots (void)
1142 {
1143 temp_slot_level++;
1144 }
1145
1146 /* Pop a temporary nesting level. All slots in use in the current level
1147 are freed. */
1148
1149 void
1150 pop_temp_slots (void)
1151 {
1152 struct temp_slot *p, *next;
1153
1154 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1155 {
1156 next = p->next;
1157 make_slot_available (p);
1158 }
1159
1160 combine_temp_slots ();
1161
1162 temp_slot_level--;
1163 }
1164
1165 /* Initialize temporary slots. */
1166
1167 void
1168 init_temp_slots (void)
1169 {
1170 /* We have not allocated any temporaries yet. */
1171 avail_temp_slots = 0;
1172 used_temp_slots = 0;
1173 temp_slot_level = 0;
1174 }
1175 \f
1176 /* These routines are responsible for converting virtual register references
1177 to the actual hard register references once RTL generation is complete.
1178
1179 The following four variables are used for communication between the
1180 routines. They contain the offsets of the virtual registers from their
1181 respective hard registers. */
1182
1183 static int in_arg_offset;
1184 static int var_offset;
1185 static int dynamic_offset;
1186 static int out_arg_offset;
1187 static int cfa_offset;
1188
1189 /* In most machines, the stack pointer register is equivalent to the bottom
1190 of the stack. */
1191
1192 #ifndef STACK_POINTER_OFFSET
1193 #define STACK_POINTER_OFFSET 0
1194 #endif
1195
1196 /* If not defined, pick an appropriate default for the offset of dynamically
1197 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1198 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1199
1200 #ifndef STACK_DYNAMIC_OFFSET
1201
1202 /* The bottom of the stack points to the actual arguments. If
1203 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1204 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1205 stack space for register parameters is not pushed by the caller, but
1206 rather part of the fixed stack areas and hence not included in
1207 `current_function_outgoing_args_size'. Nevertheless, we must allow
1208 for it when allocating stack dynamic objects. */
1209
1210 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1211 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1212 ((ACCUMULATE_OUTGOING_ARGS \
1213 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
1214 + (STACK_POINTER_OFFSET)) \
1215
1216 #else
1217 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1218 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
1219 + (STACK_POINTER_OFFSET))
1220 #endif
1221 #endif
1222
1223 /* On most machines, the CFA coincides with the first incoming parm. */
1224
1225 #ifndef ARG_POINTER_CFA_OFFSET
1226 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
1227 #endif
1228
1229 \f
1230 /* Pass through the INSNS of function FNDECL and convert virtual register
1231 references to hard register references. */
1232
1233 void
1234 instantiate_virtual_regs (void)
1235 {
1236 rtx insn;
1237
1238 /* Compute the offsets to use for this function. */
1239 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1240 var_offset = STARTING_FRAME_OFFSET;
1241 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1242 out_arg_offset = STACK_POINTER_OFFSET;
1243 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1244
1245 /* Scan all variables and parameters of this function. For each that is
1246 in memory, instantiate all virtual registers if the result is a valid
1247 address. If not, we do it later. That will handle most uses of virtual
1248 regs on many machines. */
1249 instantiate_decls (current_function_decl, 1);
1250
1251 /* Initialize recognition, indicating that volatile is OK. */
1252 init_recog ();
1253
1254 /* Scan through all the insns, instantiating every virtual register still
1255 present. */
1256 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1257 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1258 || GET_CODE (insn) == CALL_INSN)
1259 {
1260 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
1261 if (INSN_DELETED_P (insn))
1262 continue;
1263 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
1264 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1265 if (GET_CODE (insn) == CALL_INSN)
1266 instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
1267 NULL_RTX, 0);
1268
1269 /* Past this point all ASM statements should match. Verify that
1270 to avoid failures later in the compilation process. */
1271 if (asm_noperands (PATTERN (insn)) >= 0
1272 && ! check_asm_operands (PATTERN (insn)))
1273 instantiate_virtual_regs_lossage (insn);
1274 }
1275
1276 /* Now instantiate the remaining register equivalences for debugging info.
1277 These will not be valid addresses. */
1278 instantiate_decls (current_function_decl, 0);
1279
1280 /* Indicate that, from now on, assign_stack_local should use
1281 frame_pointer_rtx. */
1282 virtuals_instantiated = 1;
1283 }
1284
1285 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1286 all virtual registers in their DECL_RTL's.
1287
1288 If VALID_ONLY, do this only if the resulting address is still valid.
1289 Otherwise, always do it. */
1290
1291 static void
1292 instantiate_decls (tree fndecl, int valid_only)
1293 {
1294 tree decl;
1295
1296 /* Process all parameters of the function. */
1297 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1298 {
1299 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
1300 HOST_WIDE_INT size_rtl;
1301
1302 instantiate_decl (DECL_RTL (decl), size, valid_only);
1303
1304 /* If the parameter was promoted, then the incoming RTL mode may be
1305 larger than the declared type size. We must use the larger of
1306 the two sizes. */
1307 size_rtl = GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl)));
1308 size = MAX (size_rtl, size);
1309 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
1310 }
1311
1312 /* Now process all variables defined in the function or its subblocks. */
1313 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
1314 }
1315
1316 /* Subroutine of instantiate_decls: Process all decls in the given
1317 BLOCK node and all its subblocks. */
1318
1319 static void
1320 instantiate_decls_1 (tree let, int valid_only)
1321 {
1322 tree t;
1323
1324 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1325 if (DECL_RTL_SET_P (t))
1326 instantiate_decl (DECL_RTL (t),
1327 int_size_in_bytes (TREE_TYPE (t)),
1328 valid_only);
1329
1330 /* Process all subblocks. */
1331 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1332 instantiate_decls_1 (t, valid_only);
1333 }
1334
1335 /* Subroutine of the preceding procedures: Given RTL representing a
1336 decl and the size of the object, do any instantiation required.
1337
1338 If VALID_ONLY is nonzero, it means that the RTL should only be
1339 changed if the new address is valid. */
1340
1341 static void
1342 instantiate_decl (rtx x, HOST_WIDE_INT size, int valid_only)
1343 {
1344 enum machine_mode mode;
1345 rtx addr;
1346
1347 if (x == 0)
1348 return;
1349
1350 /* If this is a CONCAT, recurse for the pieces. */
1351 if (GET_CODE (x) == CONCAT)
1352 {
1353 instantiate_decl (XEXP (x, 0), size / 2, valid_only);
1354 instantiate_decl (XEXP (x, 1), size / 2, valid_only);
1355 return;
1356 }
1357
1358 /* If this is not a MEM, no need to do anything. Similarly if the
1359 address is a constant or a register that is not a virtual register. */
1360 if (!MEM_P (x))
1361 return;
1362
1363 addr = XEXP (x, 0);
1364 if (CONSTANT_P (addr)
1365 || (REG_P (addr)
1366 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1367 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1368 return;
1369
1370 /* If we should only do this if the address is valid, copy the address.
1371 We need to do this so we can undo any changes that might make the
1372 address invalid. This copy is unfortunate, but probably can't be
1373 avoided. */
1374
1375 if (valid_only)
1376 addr = copy_rtx (addr);
1377
1378 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
1379
1380 if (valid_only && size >= 0)
1381 {
1382 unsigned HOST_WIDE_INT decl_size = size;
1383
1384 /* Now verify that the resulting address is valid for every integer or
1385 floating-point mode up to and including SIZE bytes long. We do this
1386 since the object might be accessed in any mode and frame addresses
1387 are shared. */
1388
1389 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1390 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
1391 mode = GET_MODE_WIDER_MODE (mode))
1392 if (! memory_address_p (mode, addr))
1393 return;
1394
1395 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
1396 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
1397 mode = GET_MODE_WIDER_MODE (mode))
1398 if (! memory_address_p (mode, addr))
1399 return;
1400 }
1401
1402 /* Put back the address now that we have updated it and we either know
1403 it is valid or we don't care whether it is valid. */
1404
1405 XEXP (x, 0) = addr;
1406 }
1407 \f
1408 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1409 is a virtual register, return the equivalent hard register and set the
1410 offset indirectly through the pointer. Otherwise, return 0. */
1411
1412 static rtx
1413 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1414 {
1415 rtx new;
1416 HOST_WIDE_INT offset;
1417
1418 if (x == virtual_incoming_args_rtx)
1419 new = arg_pointer_rtx, offset = in_arg_offset;
1420 else if (x == virtual_stack_vars_rtx)
1421 new = frame_pointer_rtx, offset = var_offset;
1422 else if (x == virtual_stack_dynamic_rtx)
1423 new = stack_pointer_rtx, offset = dynamic_offset;
1424 else if (x == virtual_outgoing_args_rtx)
1425 new = stack_pointer_rtx, offset = out_arg_offset;
1426 else if (x == virtual_cfa_rtx)
1427 new = arg_pointer_rtx, offset = cfa_offset;
1428 else
1429 return 0;
1430
1431 *poffset = offset;
1432 return new;
1433 }
1434 \f
1435
1436 /* Called when instantiate_virtual_regs has failed to update the instruction.
1437 Usually this means that non-matching instruction has been emit, however for
1438 asm statements it may be the problem in the constraints. */
1439 static void
1440 instantiate_virtual_regs_lossage (rtx insn)
1441 {
1442 gcc_assert (asm_noperands (PATTERN (insn)) >= 0);
1443 error_for_asm (insn, "impossible constraint in %<asm%>");
1444 delete_insn (insn);
1445 }
1446 /* Given a pointer to a piece of rtx and an optional pointer to the
1447 containing object, instantiate any virtual registers present in it.
1448
1449 If EXTRA_INSNS, we always do the replacement and generate
1450 any extra insns before OBJECT. If it zero, we do nothing if replacement
1451 is not valid.
1452
1453 Return 1 if we either had nothing to do or if we were able to do the
1454 needed replacement. Return 0 otherwise; we only return zero if
1455 EXTRA_INSNS is zero.
1456
1457 We first try some simple transformations to avoid the creation of extra
1458 pseudos. */
1459
1460 static int
1461 instantiate_virtual_regs_1 (rtx *loc, rtx object, int extra_insns)
1462 {
1463 rtx x;
1464 RTX_CODE code;
1465 rtx new = 0;
1466 HOST_WIDE_INT offset = 0;
1467 rtx temp;
1468 rtx seq;
1469 int i, j;
1470 const char *fmt;
1471
1472 /* Re-start here to avoid recursion in common cases. */
1473 restart:
1474
1475 x = *loc;
1476 if (x == 0)
1477 return 1;
1478
1479 /* We may have detected and deleted invalid asm statements. */
1480 if (object && INSN_P (object) && INSN_DELETED_P (object))
1481 return 1;
1482
1483 code = GET_CODE (x);
1484
1485 /* Check for some special cases. */
1486 switch (code)
1487 {
1488 case CONST_INT:
1489 case CONST_DOUBLE:
1490 case CONST_VECTOR:
1491 case CONST:
1492 case SYMBOL_REF:
1493 case CODE_LABEL:
1494 case PC:
1495 case CC0:
1496 case ASM_INPUT:
1497 case ADDR_VEC:
1498 case ADDR_DIFF_VEC:
1499 case RETURN:
1500 return 1;
1501
1502 case SET:
1503 /* We are allowed to set the virtual registers. This means that
1504 the actual register should receive the source minus the
1505 appropriate offset. This is used, for example, in the handling
1506 of non-local gotos. */
1507 if ((new = instantiate_new_reg (SET_DEST (x), &offset)) != 0)
1508 {
1509 rtx src = SET_SRC (x);
1510
1511 /* We are setting the register, not using it, so the relevant
1512 offset is the negative of the offset to use were we using
1513 the register. */
1514 offset = - offset;
1515 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
1516
1517 /* The only valid sources here are PLUS or REG. Just do
1518 the simplest possible thing to handle them. */
1519 if (!REG_P (src) && GET_CODE (src) != PLUS)
1520 {
1521 instantiate_virtual_regs_lossage (object);
1522 return 1;
1523 }
1524
1525 start_sequence ();
1526 if (!REG_P (src))
1527 temp = force_operand (src, NULL_RTX);
1528 else
1529 temp = src;
1530 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
1531 seq = get_insns ();
1532 end_sequence ();
1533
1534 emit_insn_before (seq, object);
1535 SET_DEST (x) = new;
1536
1537 if (! validate_change (object, &SET_SRC (x), temp, 0)
1538 || ! extra_insns)
1539 instantiate_virtual_regs_lossage (object);
1540
1541 return 1;
1542 }
1543
1544 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
1545 loc = &SET_SRC (x);
1546 goto restart;
1547
1548 case PLUS:
1549 /* Handle special case of virtual register plus constant. */
1550 if (CONSTANT_P (XEXP (x, 1)))
1551 {
1552 rtx old, new_offset;
1553
1554 /* Check for (plus (plus VIRT foo) (const_int)) first. */
1555 if (GET_CODE (XEXP (x, 0)) == PLUS)
1556 {
1557 if ((new = instantiate_new_reg (XEXP (XEXP (x, 0), 0), &offset)))
1558 {
1559 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
1560 extra_insns);
1561 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
1562 }
1563 else
1564 {
1565 loc = &XEXP (x, 0);
1566 goto restart;
1567 }
1568 }
1569
1570 #ifdef POINTERS_EXTEND_UNSIGNED
1571 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1572 we can commute the PLUS and SUBREG because pointers into the
1573 frame are well-behaved. */
1574 else if (GET_CODE (XEXP (x, 0)) == SUBREG && GET_MODE (x) == ptr_mode
1575 && GET_CODE (XEXP (x, 1)) == CONST_INT
1576 && 0 != (new
1577 = instantiate_new_reg (SUBREG_REG (XEXP (x, 0)),
1578 &offset))
1579 && validate_change (object, loc,
1580 plus_constant (gen_lowpart (ptr_mode,
1581 new),
1582 offset
1583 + INTVAL (XEXP (x, 1))),
1584 0))
1585 return 1;
1586 #endif
1587 else if ((new = instantiate_new_reg (XEXP (x, 0), &offset)) == 0)
1588 {
1589 /* We know the second operand is a constant. Unless the
1590 first operand is a REG (which has been already checked),
1591 it needs to be checked. */
1592 if (!REG_P (XEXP (x, 0)))
1593 {
1594 loc = &XEXP (x, 0);
1595 goto restart;
1596 }
1597 return 1;
1598 }
1599
1600 new_offset = plus_constant (XEXP (x, 1), offset);
1601
1602 /* If the new constant is zero, try to replace the sum with just
1603 the register. */
1604 if (new_offset == const0_rtx
1605 && validate_change (object, loc, new, 0))
1606 return 1;
1607
1608 /* Next try to replace the register and new offset.
1609 There are two changes to validate here and we can't assume that
1610 in the case of old offset equals new just changing the register
1611 will yield a valid insn. In the interests of a little efficiency,
1612 however, we only call validate change once (we don't queue up the
1613 changes and then call apply_change_group). */
1614
1615 old = XEXP (x, 0);
1616 if (offset == 0
1617 ? ! validate_change (object, &XEXP (x, 0), new, 0)
1618 : (XEXP (x, 0) = new,
1619 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
1620 {
1621 if (! extra_insns)
1622 {
1623 XEXP (x, 0) = old;
1624 return 0;
1625 }
1626
1627 /* Otherwise copy the new constant into a register and replace
1628 constant with that register. */
1629 temp = gen_reg_rtx (Pmode);
1630 XEXP (x, 0) = new;
1631 if (validate_change (object, &XEXP (x, 1), temp, 0))
1632 emit_insn_before (gen_move_insn (temp, new_offset), object);
1633 else
1634 {
1635 /* If that didn't work, replace this expression with a
1636 register containing the sum. */
1637
1638 XEXP (x, 0) = old;
1639 new = gen_rtx_PLUS (Pmode, new, new_offset);
1640
1641 start_sequence ();
1642 temp = force_operand (new, NULL_RTX);
1643 seq = get_insns ();
1644 end_sequence ();
1645
1646 emit_insn_before (seq, object);
1647 if (! validate_change (object, loc, temp, 0)
1648 && ! validate_replace_rtx (x, temp, object))
1649 {
1650 instantiate_virtual_regs_lossage (object);
1651 return 1;
1652 }
1653 }
1654 }
1655
1656 return 1;
1657 }
1658
1659 /* Fall through to generic two-operand expression case. */
1660 case EXPR_LIST:
1661 case CALL:
1662 case COMPARE:
1663 case MINUS:
1664 case MULT:
1665 case DIV: case UDIV:
1666 case MOD: case UMOD:
1667 case AND: case IOR: case XOR:
1668 case ROTATERT: case ROTATE:
1669 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
1670 case NE: case EQ:
1671 case GE: case GT: case GEU: case GTU:
1672 case LE: case LT: case LEU: case LTU:
1673 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
1674 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
1675 loc = &XEXP (x, 0);
1676 goto restart;
1677
1678 case MEM:
1679 /* Most cases of MEM that convert to valid addresses have already been
1680 handled by our scan of decls. The only special handling we
1681 need here is to make a copy of the rtx to ensure it isn't being
1682 shared if we have to change it to a pseudo.
1683
1684 If the rtx is a simple reference to an address via a virtual register,
1685 it can potentially be shared. In such cases, first try to make it
1686 a valid address, which can also be shared. Otherwise, copy it and
1687 proceed normally.
1688
1689 First check for common cases that need no processing. These are
1690 usually due to instantiation already being done on a previous instance
1691 of a shared rtx. */
1692
1693 temp = XEXP (x, 0);
1694 if (CONSTANT_ADDRESS_P (temp)
1695 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1696 || temp == arg_pointer_rtx
1697 #endif
1698 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1699 || temp == hard_frame_pointer_rtx
1700 #endif
1701 || temp == frame_pointer_rtx)
1702 return 1;
1703
1704 if (GET_CODE (temp) == PLUS
1705 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
1706 && (XEXP (temp, 0) == frame_pointer_rtx
1707 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1708 || XEXP (temp, 0) == hard_frame_pointer_rtx
1709 #endif
1710 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1711 || XEXP (temp, 0) == arg_pointer_rtx
1712 #endif
1713 ))
1714 return 1;
1715
1716 if (temp == virtual_stack_vars_rtx
1717 || temp == virtual_incoming_args_rtx
1718 || (GET_CODE (temp) == PLUS
1719 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
1720 && (XEXP (temp, 0) == virtual_stack_vars_rtx
1721 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
1722 {
1723 /* This MEM may be shared. If the substitution can be done without
1724 the need to generate new pseudos, we want to do it in place
1725 so all copies of the shared rtx benefit. The call below will
1726 only make substitutions if the resulting address is still
1727 valid.
1728
1729 Note that we cannot pass X as the object in the recursive call
1730 since the insn being processed may not allow all valid
1731 addresses. However, if we were not passed on object, we can
1732 only modify X without copying it if X will have a valid
1733 address.
1734
1735 ??? Also note that this can still lose if OBJECT is an insn that
1736 has less restrictions on an address that some other insn.
1737 In that case, we will modify the shared address. This case
1738 doesn't seem very likely, though. One case where this could
1739 happen is in the case of a USE or CLOBBER reference, but we
1740 take care of that below. */
1741
1742 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
1743 object ? object : x, 0))
1744 return 1;
1745
1746 /* Otherwise make a copy and process that copy. We copy the entire
1747 RTL expression since it might be a PLUS which could also be
1748 shared. */
1749 *loc = x = copy_rtx (x);
1750 }
1751
1752 /* Fall through to generic unary operation case. */
1753 case PREFETCH:
1754 case SUBREG:
1755 case STRICT_LOW_PART:
1756 case NEG: case NOT:
1757 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
1758 case SIGN_EXTEND: case ZERO_EXTEND:
1759 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
1760 case FLOAT: case FIX:
1761 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
1762 case ABS:
1763 case SQRT:
1764 case FFS:
1765 case CLZ: case CTZ:
1766 case POPCOUNT: case PARITY:
1767 /* These case either have just one operand or we know that we need not
1768 check the rest of the operands. */
1769 loc = &XEXP (x, 0);
1770 goto restart;
1771
1772 case USE:
1773 case CLOBBER:
1774 /* If the operand is a MEM, see if the change is a valid MEM. If not,
1775 go ahead and make the invalid one, but do it to a copy. For a REG,
1776 just make the recursive call, since there's no chance of a problem. */
1777
1778 if ((MEM_P (XEXP (x, 0))
1779 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
1780 0))
1781 || (REG_P (XEXP (x, 0))
1782 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
1783 return 1;
1784
1785 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
1786 loc = &XEXP (x, 0);
1787 goto restart;
1788
1789 case REG:
1790 /* Try to replace with a PLUS. If that doesn't work, compute the sum
1791 in front of this insn and substitute the temporary. */
1792 if ((new = instantiate_new_reg (x, &offset)) != 0)
1793 {
1794 temp = plus_constant (new, offset);
1795 if (!validate_change (object, loc, temp, 0))
1796 {
1797 if (! extra_insns)
1798 return 0;
1799
1800 start_sequence ();
1801 temp = force_operand (temp, NULL_RTX);
1802 seq = get_insns ();
1803 end_sequence ();
1804
1805 emit_insn_before (seq, object);
1806 if (! validate_change (object, loc, temp, 0)
1807 && ! validate_replace_rtx (x, temp, object))
1808 instantiate_virtual_regs_lossage (object);
1809 }
1810 }
1811
1812 return 1;
1813
1814 default:
1815 break;
1816 }
1817
1818 /* Scan all subexpressions. */
1819 fmt = GET_RTX_FORMAT (code);
1820 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1821 if (*fmt == 'e')
1822 {
1823 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
1824 return 0;
1825 }
1826 else if (*fmt == 'E')
1827 for (j = 0; j < XVECLEN (x, i); j++)
1828 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
1829 extra_insns))
1830 return 0;
1831
1832 return 1;
1833 }
1834 \f
1835 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1836 This means a type for which function calls must pass an address to the
1837 function or get an address back from the function.
1838 EXP may be a type node or an expression (whose type is tested). */
1839
1840 int
1841 aggregate_value_p (tree exp, tree fntype)
1842 {
1843 int i, regno, nregs;
1844 rtx reg;
1845
1846 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1847
1848 if (fntype)
1849 switch (TREE_CODE (fntype))
1850 {
1851 case CALL_EXPR:
1852 fntype = get_callee_fndecl (fntype);
1853 fntype = fntype ? TREE_TYPE (fntype) : 0;
1854 break;
1855 case FUNCTION_DECL:
1856 fntype = TREE_TYPE (fntype);
1857 break;
1858 case FUNCTION_TYPE:
1859 case METHOD_TYPE:
1860 break;
1861 case IDENTIFIER_NODE:
1862 fntype = 0;
1863 break;
1864 default:
1865 /* We don't expect other rtl types here. */
1866 gcc_unreachable ();
1867 }
1868
1869 if (TREE_CODE (type) == VOID_TYPE)
1870 return 0;
1871 /* If the front end has decided that this needs to be passed by
1872 reference, do so. */
1873 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1874 && DECL_BY_REFERENCE (exp))
1875 return 1;
1876 if (targetm.calls.return_in_memory (type, fntype))
1877 return 1;
1878 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1879 and thus can't be returned in registers. */
1880 if (TREE_ADDRESSABLE (type))
1881 return 1;
1882 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
1883 return 1;
1884 /* Make sure we have suitable call-clobbered regs to return
1885 the value in; if not, we must return it in memory. */
1886 reg = hard_function_value (type, 0, 0);
1887
1888 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1889 it is OK. */
1890 if (!REG_P (reg))
1891 return 0;
1892
1893 regno = REGNO (reg);
1894 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
1895 for (i = 0; i < nregs; i++)
1896 if (! call_used_regs[regno + i])
1897 return 1;
1898 return 0;
1899 }
1900 \f
1901 /* Return true if we should assign DECL a pseudo register; false if it
1902 should live on the local stack. */
1903
1904 bool
1905 use_register_for_decl (tree decl)
1906 {
1907 /* Honor volatile. */
1908 if (TREE_SIDE_EFFECTS (decl))
1909 return false;
1910
1911 /* Honor addressability. */
1912 if (TREE_ADDRESSABLE (decl))
1913 return false;
1914
1915 /* Only register-like things go in registers. */
1916 if (DECL_MODE (decl) == BLKmode)
1917 return false;
1918
1919 /* If -ffloat-store specified, don't put explicit float variables
1920 into registers. */
1921 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1922 propagates values across these stores, and it probably shouldn't. */
1923 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1924 return false;
1925
1926 /* Compiler-generated temporaries can always go in registers. */
1927 if (DECL_ARTIFICIAL (decl))
1928 return true;
1929
1930 return (optimize || DECL_REGISTER (decl));
1931 }
1932
1933 /* Return true if TYPE should be passed by invisible reference. */
1934
1935 bool
1936 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1937 tree type, bool named_arg)
1938 {
1939 if (type)
1940 {
1941 /* If this type contains non-trivial constructors, then it is
1942 forbidden for the middle-end to create any new copies. */
1943 if (TREE_ADDRESSABLE (type))
1944 return true;
1945
1946 /* GCC post 3.4 passes *all* variable sized types by reference. */
1947 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1948 return true;
1949 }
1950
1951 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
1952 }
1953
1954 /* Return true if TYPE, which is passed by reference, should be callee
1955 copied instead of caller copied. */
1956
1957 bool
1958 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1959 tree type, bool named_arg)
1960 {
1961 if (type && TREE_ADDRESSABLE (type))
1962 return false;
1963 return targetm.calls.callee_copies (ca, mode, type, named_arg);
1964 }
1965
1966 /* Structures to communicate between the subroutines of assign_parms.
1967 The first holds data persistent across all parameters, the second
1968 is cleared out for each parameter. */
1969
1970 struct assign_parm_data_all
1971 {
1972 CUMULATIVE_ARGS args_so_far;
1973 struct args_size stack_args_size;
1974 tree function_result_decl;
1975 tree orig_fnargs;
1976 rtx conversion_insns;
1977 HOST_WIDE_INT pretend_args_size;
1978 HOST_WIDE_INT extra_pretend_bytes;
1979 int reg_parm_stack_space;
1980 };
1981
1982 struct assign_parm_data_one
1983 {
1984 tree nominal_type;
1985 tree passed_type;
1986 rtx entry_parm;
1987 rtx stack_parm;
1988 enum machine_mode nominal_mode;
1989 enum machine_mode passed_mode;
1990 enum machine_mode promoted_mode;
1991 struct locate_and_pad_arg_data locate;
1992 int partial;
1993 BOOL_BITFIELD named_arg : 1;
1994 BOOL_BITFIELD last_named : 1;
1995 BOOL_BITFIELD passed_pointer : 1;
1996 BOOL_BITFIELD on_stack : 1;
1997 BOOL_BITFIELD loaded_in_reg : 1;
1998 };
1999
2000 /* A subroutine of assign_parms. Initialize ALL. */
2001
2002 static void
2003 assign_parms_initialize_all (struct assign_parm_data_all *all)
2004 {
2005 tree fntype;
2006
2007 memset (all, 0, sizeof (*all));
2008
2009 fntype = TREE_TYPE (current_function_decl);
2010
2011 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2012 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
2013 #else
2014 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
2015 current_function_decl, -1);
2016 #endif
2017
2018 #ifdef REG_PARM_STACK_SPACE
2019 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
2020 #endif
2021 }
2022
2023 /* If ARGS contains entries with complex types, split the entry into two
2024 entries of the component type. Return a new list of substitutions are
2025 needed, else the old list. */
2026
2027 static tree
2028 split_complex_args (tree args)
2029 {
2030 tree p;
2031
2032 /* Before allocating memory, check for the common case of no complex. */
2033 for (p = args; p; p = TREE_CHAIN (p))
2034 {
2035 tree type = TREE_TYPE (p);
2036 if (TREE_CODE (type) == COMPLEX_TYPE
2037 && targetm.calls.split_complex_arg (type))
2038 goto found;
2039 }
2040 return args;
2041
2042 found:
2043 args = copy_list (args);
2044
2045 for (p = args; p; p = TREE_CHAIN (p))
2046 {
2047 tree type = TREE_TYPE (p);
2048 if (TREE_CODE (type) == COMPLEX_TYPE
2049 && targetm.calls.split_complex_arg (type))
2050 {
2051 tree decl;
2052 tree subtype = TREE_TYPE (type);
2053
2054 /* Rewrite the PARM_DECL's type with its component. */
2055 TREE_TYPE (p) = subtype;
2056 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2057 DECL_MODE (p) = VOIDmode;
2058 DECL_SIZE (p) = NULL;
2059 DECL_SIZE_UNIT (p) = NULL;
2060 layout_decl (p, 0);
2061
2062 /* Build a second synthetic decl. */
2063 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
2064 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2065 layout_decl (decl, 0);
2066
2067 /* Splice it in; skip the new decl. */
2068 TREE_CHAIN (decl) = TREE_CHAIN (p);
2069 TREE_CHAIN (p) = decl;
2070 p = decl;
2071 }
2072 }
2073
2074 return args;
2075 }
2076
2077 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2078 the hidden struct return argument, and (abi willing) complex args.
2079 Return the new parameter list. */
2080
2081 static tree
2082 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2083 {
2084 tree fndecl = current_function_decl;
2085 tree fntype = TREE_TYPE (fndecl);
2086 tree fnargs = DECL_ARGUMENTS (fndecl);
2087
2088 /* If struct value address is treated as the first argument, make it so. */
2089 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2090 && ! current_function_returns_pcc_struct
2091 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2092 {
2093 tree type = build_pointer_type (TREE_TYPE (fntype));
2094 tree decl;
2095
2096 decl = build_decl (PARM_DECL, NULL_TREE, type);
2097 DECL_ARG_TYPE (decl) = type;
2098 DECL_ARTIFICIAL (decl) = 1;
2099
2100 TREE_CHAIN (decl) = fnargs;
2101 fnargs = decl;
2102 all->function_result_decl = decl;
2103 }
2104
2105 all->orig_fnargs = fnargs;
2106
2107 /* If the target wants to split complex arguments into scalars, do so. */
2108 if (targetm.calls.split_complex_arg)
2109 fnargs = split_complex_args (fnargs);
2110
2111 return fnargs;
2112 }
2113
2114 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2115 data for the parameter. Incorporate ABI specifics such as pass-by-
2116 reference and type promotion. */
2117
2118 static void
2119 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2120 struct assign_parm_data_one *data)
2121 {
2122 tree nominal_type, passed_type;
2123 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2124
2125 memset (data, 0, sizeof (*data));
2126
2127 /* Set LAST_NAMED if this is last named arg before last anonymous args. */
2128 if (current_function_stdarg)
2129 {
2130 tree tem;
2131 for (tem = TREE_CHAIN (parm); tem; tem = TREE_CHAIN (tem))
2132 if (DECL_NAME (tem))
2133 break;
2134 if (tem == 0)
2135 data->last_named = true;
2136 }
2137
2138 /* Set NAMED_ARG if this arg should be treated as a named arg. For
2139 most machines, if this is a varargs/stdarg function, then we treat
2140 the last named arg as if it were anonymous too. */
2141 if (targetm.calls.strict_argument_naming (&all->args_so_far))
2142 data->named_arg = 1;
2143 else
2144 data->named_arg = !data->last_named;
2145
2146 nominal_type = TREE_TYPE (parm);
2147 passed_type = DECL_ARG_TYPE (parm);
2148
2149 /* Look out for errors propagating this far. Also, if the parameter's
2150 type is void then its value doesn't matter. */
2151 if (TREE_TYPE (parm) == error_mark_node
2152 /* This can happen after weird syntax errors
2153 or if an enum type is defined among the parms. */
2154 || TREE_CODE (parm) != PARM_DECL
2155 || passed_type == NULL
2156 || VOID_TYPE_P (nominal_type))
2157 {
2158 nominal_type = passed_type = void_type_node;
2159 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2160 goto egress;
2161 }
2162
2163 /* Find mode of arg as it is passed, and mode of arg as it should be
2164 during execution of this function. */
2165 passed_mode = TYPE_MODE (passed_type);
2166 nominal_mode = TYPE_MODE (nominal_type);
2167
2168 /* If the parm is to be passed as a transparent union, use the type of
2169 the first field for the tests below. We have already verified that
2170 the modes are the same. */
2171 if (DECL_TRANSPARENT_UNION (parm)
2172 || (TREE_CODE (passed_type) == UNION_TYPE
2173 && TYPE_TRANSPARENT_UNION (passed_type)))
2174 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2175
2176 /* See if this arg was passed by invisible reference. */
2177 if (pass_by_reference (&all->args_so_far, passed_mode,
2178 passed_type, data->named_arg))
2179 {
2180 passed_type = nominal_type = build_pointer_type (passed_type);
2181 data->passed_pointer = true;
2182 passed_mode = nominal_mode = Pmode;
2183 }
2184
2185 /* Find mode as it is passed by the ABI. */
2186 promoted_mode = passed_mode;
2187 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2188 {
2189 int unsignedp = TYPE_UNSIGNED (passed_type);
2190 promoted_mode = promote_mode (passed_type, promoted_mode,
2191 &unsignedp, 1);
2192 }
2193
2194 egress:
2195 data->nominal_type = nominal_type;
2196 data->passed_type = passed_type;
2197 data->nominal_mode = nominal_mode;
2198 data->passed_mode = passed_mode;
2199 data->promoted_mode = promoted_mode;
2200 }
2201
2202 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2203
2204 static void
2205 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2206 struct assign_parm_data_one *data, bool no_rtl)
2207 {
2208 int varargs_pretend_bytes = 0;
2209
2210 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2211 data->promoted_mode,
2212 data->passed_type,
2213 &varargs_pretend_bytes, no_rtl);
2214
2215 /* If the back-end has requested extra stack space, record how much is
2216 needed. Do not change pretend_args_size otherwise since it may be
2217 nonzero from an earlier partial argument. */
2218 if (varargs_pretend_bytes > 0)
2219 all->pretend_args_size = varargs_pretend_bytes;
2220 }
2221
2222 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2223 the incoming location of the current parameter. */
2224
2225 static void
2226 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2227 struct assign_parm_data_one *data)
2228 {
2229 HOST_WIDE_INT pretend_bytes = 0;
2230 rtx entry_parm;
2231 bool in_regs;
2232
2233 if (data->promoted_mode == VOIDmode)
2234 {
2235 data->entry_parm = data->stack_parm = const0_rtx;
2236 return;
2237 }
2238
2239 #ifdef FUNCTION_INCOMING_ARG
2240 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2241 data->passed_type, data->named_arg);
2242 #else
2243 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2244 data->passed_type, data->named_arg);
2245 #endif
2246
2247 if (entry_parm == 0)
2248 data->promoted_mode = data->passed_mode;
2249
2250 /* Determine parm's home in the stack, in case it arrives in the stack
2251 or we should pretend it did. Compute the stack position and rtx where
2252 the argument arrives and its size.
2253
2254 There is one complexity here: If this was a parameter that would
2255 have been passed in registers, but wasn't only because it is
2256 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2257 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2258 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2259 as it was the previous time. */
2260 in_regs = entry_parm != 0;
2261 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2262 in_regs = true;
2263 #endif
2264 if (!in_regs && !data->named_arg)
2265 {
2266 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2267 {
2268 rtx tem;
2269 #ifdef FUNCTION_INCOMING_ARG
2270 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2271 data->passed_type, true);
2272 #else
2273 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2274 data->passed_type, true);
2275 #endif
2276 in_regs = tem != NULL;
2277 }
2278 }
2279
2280 /* If this parameter was passed both in registers and in the stack, use
2281 the copy on the stack. */
2282 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2283 data->passed_type))
2284 entry_parm = 0;
2285
2286 if (entry_parm)
2287 {
2288 int partial;
2289
2290 partial = FUNCTION_ARG_PARTIAL_NREGS (all->args_so_far,
2291 data->promoted_mode,
2292 data->passed_type,
2293 data->named_arg);
2294 data->partial = partial;
2295
2296 /* The caller might already have allocated stack space for the
2297 register parameters. */
2298 if (partial != 0 && all->reg_parm_stack_space == 0)
2299 {
2300 /* Part of this argument is passed in registers and part
2301 is passed on the stack. Ask the prologue code to extend
2302 the stack part so that we can recreate the full value.
2303
2304 PRETEND_BYTES is the size of the registers we need to store.
2305 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2306 stack space that the prologue should allocate.
2307
2308 Internally, gcc assumes that the argument pointer is aligned
2309 to STACK_BOUNDARY bits. This is used both for alignment
2310 optimizations (see init_emit) and to locate arguments that are
2311 aligned to more than PARM_BOUNDARY bits. We must preserve this
2312 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2313 a stack boundary. */
2314
2315 /* We assume at most one partial arg, and it must be the first
2316 argument on the stack. */
2317 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2318
2319 pretend_bytes = partial * UNITS_PER_WORD;
2320 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2321
2322 /* We want to align relative to the actual stack pointer, so
2323 don't include this in the stack size until later. */
2324 all->extra_pretend_bytes = all->pretend_args_size;
2325 }
2326 }
2327
2328 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2329 entry_parm ? data->partial : 0, current_function_decl,
2330 &all->stack_args_size, &data->locate);
2331
2332 /* Adjust offsets to include the pretend args. */
2333 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2334 data->locate.slot_offset.constant += pretend_bytes;
2335 data->locate.offset.constant += pretend_bytes;
2336
2337 data->entry_parm = entry_parm;
2338 }
2339
2340 /* A subroutine of assign_parms. If there is actually space on the stack
2341 for this parm, count it in stack_args_size and return true. */
2342
2343 static bool
2344 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2345 struct assign_parm_data_one *data)
2346 {
2347 /* Trivially true if we've no incoming register. */
2348 if (data->entry_parm == NULL)
2349 ;
2350 /* Also true if we're partially in registers and partially not,
2351 since we've arranged to drop the entire argument on the stack. */
2352 else if (data->partial != 0)
2353 ;
2354 /* Also true if the target says that it's passed in both registers
2355 and on the stack. */
2356 else if (GET_CODE (data->entry_parm) == PARALLEL
2357 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2358 ;
2359 /* Also true if the target says that there's stack allocated for
2360 all register parameters. */
2361 else if (all->reg_parm_stack_space > 0)
2362 ;
2363 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2364 else
2365 return false;
2366
2367 all->stack_args_size.constant += data->locate.size.constant;
2368 if (data->locate.size.var)
2369 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2370
2371 return true;
2372 }
2373
2374 /* A subroutine of assign_parms. Given that this parameter is allocated
2375 stack space by the ABI, find it. */
2376
2377 static void
2378 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2379 {
2380 rtx offset_rtx, stack_parm;
2381 unsigned int align, boundary;
2382
2383 /* If we're passing this arg using a reg, make its stack home the
2384 aligned stack slot. */
2385 if (data->entry_parm)
2386 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2387 else
2388 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2389
2390 stack_parm = current_function_internal_arg_pointer;
2391 if (offset_rtx != const0_rtx)
2392 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2393 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2394
2395 set_mem_attributes (stack_parm, parm, 1);
2396
2397 boundary = FUNCTION_ARG_BOUNDARY (data->promoted_mode, data->passed_type);
2398 align = 0;
2399
2400 /* If we're padding upward, we know that the alignment of the slot
2401 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2402 intentionally forcing upward padding. Otherwise we have to come
2403 up with a guess at the alignment based on OFFSET_RTX. */
2404 if (data->locate.where_pad == upward || data->entry_parm)
2405 align = boundary;
2406 else if (GET_CODE (offset_rtx) == CONST_INT)
2407 {
2408 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2409 align = align & -align;
2410 }
2411 if (align > 0)
2412 set_mem_align (stack_parm, align);
2413
2414 if (data->entry_parm)
2415 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2416
2417 data->stack_parm = stack_parm;
2418 }
2419
2420 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2421 always valid and contiguous. */
2422
2423 static void
2424 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2425 {
2426 rtx entry_parm = data->entry_parm;
2427 rtx stack_parm = data->stack_parm;
2428
2429 /* If this parm was passed part in regs and part in memory, pretend it
2430 arrived entirely in memory by pushing the register-part onto the stack.
2431 In the special case of a DImode or DFmode that is split, we could put
2432 it together in a pseudoreg directly, but for now that's not worth
2433 bothering with. */
2434 if (data->partial != 0)
2435 {
2436 /* Handle calls that pass values in multiple non-contiguous
2437 locations. The Irix 6 ABI has examples of this. */
2438 if (GET_CODE (entry_parm) == PARALLEL)
2439 emit_group_store (validize_mem (stack_parm), entry_parm,
2440 data->passed_type,
2441 int_size_in_bytes (data->passed_type));
2442 else
2443 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2444 data->partial);
2445
2446 entry_parm = stack_parm;
2447 }
2448
2449 /* If we didn't decide this parm came in a register, by default it came
2450 on the stack. */
2451 else if (entry_parm == NULL)
2452 entry_parm = stack_parm;
2453
2454 /* When an argument is passed in multiple locations, we can't make use
2455 of this information, but we can save some copying if the whole argument
2456 is passed in a single register. */
2457 else if (GET_CODE (entry_parm) == PARALLEL
2458 && data->nominal_mode != BLKmode
2459 && data->passed_mode != BLKmode)
2460 {
2461 size_t i, len = XVECLEN (entry_parm, 0);
2462
2463 for (i = 0; i < len; i++)
2464 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2465 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2466 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2467 == data->passed_mode)
2468 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2469 {
2470 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2471 break;
2472 }
2473 }
2474
2475 data->entry_parm = entry_parm;
2476 }
2477
2478 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2479 always valid and properly aligned. */
2480
2481
2482 static void
2483 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2484 {
2485 rtx stack_parm = data->stack_parm;
2486
2487 /* If we can't trust the parm stack slot to be aligned enough for its
2488 ultimate type, don't use that slot after entry. We'll make another
2489 stack slot, if we need one. */
2490 if (STRICT_ALIGNMENT && stack_parm
2491 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2492 stack_parm = NULL;
2493
2494 /* If parm was passed in memory, and we need to convert it on entry,
2495 don't store it back in that same slot. */
2496 else if (data->entry_parm == stack_parm
2497 && data->nominal_mode != BLKmode
2498 && data->nominal_mode != data->passed_mode)
2499 stack_parm = NULL;
2500
2501 data->stack_parm = stack_parm;
2502 }
2503
2504 /* A subroutine of assign_parms. Return true if the current parameter
2505 should be stored as a BLKmode in the current frame. */
2506
2507 static bool
2508 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2509 {
2510 if (data->nominal_mode == BLKmode)
2511 return true;
2512 if (GET_CODE (data->entry_parm) == PARALLEL)
2513 return true;
2514
2515 #ifdef BLOCK_REG_PADDING
2516 /* Only assign_parm_setup_block knows how to deal with register arguments
2517 that are padded at the least significant end. */
2518 if (REG_P (data->entry_parm)
2519 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2520 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2521 == (BYTES_BIG_ENDIAN ? upward : downward)))
2522 return true;
2523 #endif
2524
2525 return false;
2526 }
2527
2528 /* A subroutine of assign_parms. Arrange for the parameter to be
2529 present and valid in DATA->STACK_RTL. */
2530
2531 static void
2532 assign_parm_setup_block (struct assign_parm_data_all *all,
2533 tree parm, struct assign_parm_data_one *data)
2534 {
2535 rtx entry_parm = data->entry_parm;
2536 rtx stack_parm = data->stack_parm;
2537
2538 if (GET_CODE (entry_parm) == PARALLEL)
2539 entry_parm = emit_group_move_into_temps (entry_parm);
2540
2541 /* If we've a non-block object that's nevertheless passed in parts,
2542 reconstitute it in register operations rather than on the stack. */
2543 if (GET_CODE (entry_parm) == PARALLEL
2544 && data->nominal_mode != BLKmode
2545 && XVECLEN (entry_parm, 0) > 1
2546 && use_register_for_decl (parm))
2547 {
2548 rtx parmreg = gen_reg_rtx (data->nominal_mode);
2549
2550 push_to_sequence (all->conversion_insns);
2551
2552 /* For values returned in multiple registers, handle possible
2553 incompatible calls to emit_group_store.
2554
2555 For example, the following would be invalid, and would have to
2556 be fixed by the conditional below:
2557
2558 emit_group_store ((reg:SF), (parallel:DF))
2559 emit_group_store ((reg:SI), (parallel:DI))
2560
2561 An example of this are doubles in e500 v2:
2562 (parallel:DF (expr_list (reg:SI) (const_int 0))
2563 (expr_list (reg:SI) (const_int 4))). */
2564 if (data->nominal_mode != data->passed_mode)
2565 {
2566 rtx t = gen_reg_rtx (GET_MODE (entry_parm));
2567 emit_group_store (t, entry_parm, NULL_TREE,
2568 GET_MODE_SIZE (GET_MODE (entry_parm)));
2569 convert_move (parmreg, t, 0);
2570 }
2571 else
2572 emit_group_store (parmreg, entry_parm, data->nominal_type,
2573 int_size_in_bytes (data->nominal_type));
2574
2575 all->conversion_insns = get_insns ();
2576 end_sequence ();
2577
2578 SET_DECL_RTL (parm, parmreg);
2579 return;
2580 }
2581
2582 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2583 calls that pass values in multiple non-contiguous locations. */
2584 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2585 {
2586 HOST_WIDE_INT size = int_size_in_bytes (data->passed_type);
2587 HOST_WIDE_INT size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2588 rtx mem;
2589
2590 /* Note that we will be storing an integral number of words.
2591 So we have to be careful to ensure that we allocate an
2592 integral number of words. We do this below in the
2593 assign_stack_local if space was not allocated in the argument
2594 list. If it was, this will not work if PARM_BOUNDARY is not
2595 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2596 if it becomes a problem. Exception is when BLKmode arrives
2597 with arguments not conforming to word_mode. */
2598
2599 if (stack_parm == 0)
2600 {
2601 stack_parm = assign_stack_local (BLKmode, size_stored, 0);
2602 data->stack_parm = stack_parm;
2603 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2604 set_mem_attributes (stack_parm, parm, 1);
2605 }
2606 else if (GET_CODE (entry_parm) == PARALLEL)
2607 ;
2608 else
2609 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2610
2611 mem = validize_mem (stack_parm);
2612
2613 /* Handle values in multiple non-contiguous locations. */
2614 if (GET_CODE (entry_parm) == PARALLEL)
2615 {
2616 push_to_sequence (all->conversion_insns);
2617 emit_group_store (mem, entry_parm, data->passed_type, size);
2618 all->conversion_insns = get_insns ();
2619 end_sequence ();
2620 }
2621
2622 else if (size == 0)
2623 ;
2624
2625 /* If SIZE is that of a mode no bigger than a word, just use
2626 that mode's store operation. */
2627 else if (size <= UNITS_PER_WORD)
2628 {
2629 enum machine_mode mode
2630 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2631
2632 if (mode != BLKmode
2633 #ifdef BLOCK_REG_PADDING
2634 && (size == UNITS_PER_WORD
2635 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2636 != (BYTES_BIG_ENDIAN ? upward : downward)))
2637 #endif
2638 )
2639 {
2640 rtx reg = gen_rtx_REG (mode, REGNO (entry_parm));
2641 emit_move_insn (change_address (mem, mode, 0), reg);
2642 }
2643
2644 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2645 machine must be aligned to the left before storing
2646 to memory. Note that the previous test doesn't
2647 handle all cases (e.g. SIZE == 3). */
2648 else if (size != UNITS_PER_WORD
2649 #ifdef BLOCK_REG_PADDING
2650 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2651 == downward)
2652 #else
2653 && BYTES_BIG_ENDIAN
2654 #endif
2655 )
2656 {
2657 rtx tem, x;
2658 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2659 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2660
2661 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2662 build_int_cst (NULL_TREE, by),
2663 NULL_RTX, 1);
2664 tem = change_address (mem, word_mode, 0);
2665 emit_move_insn (tem, x);
2666 }
2667 else
2668 move_block_from_reg (REGNO (entry_parm), mem,
2669 size_stored / UNITS_PER_WORD);
2670 }
2671 else
2672 move_block_from_reg (REGNO (entry_parm), mem,
2673 size_stored / UNITS_PER_WORD);
2674 }
2675
2676 SET_DECL_RTL (parm, stack_parm);
2677 }
2678
2679 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2680 parameter. Get it there. Perform all ABI specified conversions. */
2681
2682 static void
2683 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2684 struct assign_parm_data_one *data)
2685 {
2686 rtx parmreg;
2687 enum machine_mode promoted_nominal_mode;
2688 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2689 bool did_conversion = false;
2690
2691 /* Store the parm in a pseudoregister during the function, but we may
2692 need to do it in a wider mode. */
2693
2694 promoted_nominal_mode
2695 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 0);
2696
2697 parmreg = gen_reg_rtx (promoted_nominal_mode);
2698
2699 if (!DECL_ARTIFICIAL (parm))
2700 mark_user_reg (parmreg);
2701
2702 /* If this was an item that we received a pointer to,
2703 set DECL_RTL appropriately. */
2704 if (data->passed_pointer)
2705 {
2706 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2707 set_mem_attributes (x, parm, 1);
2708 SET_DECL_RTL (parm, x);
2709 }
2710 else
2711 SET_DECL_RTL (parm, parmreg);
2712
2713 /* Copy the value into the register. */
2714 if (data->nominal_mode != data->passed_mode
2715 || promoted_nominal_mode != data->promoted_mode)
2716 {
2717 int save_tree_used;
2718
2719 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2720 mode, by the caller. We now have to convert it to
2721 NOMINAL_MODE, if different. However, PARMREG may be in
2722 a different mode than NOMINAL_MODE if it is being stored
2723 promoted.
2724
2725 If ENTRY_PARM is a hard register, it might be in a register
2726 not valid for operating in its mode (e.g., an odd-numbered
2727 register for a DFmode). In that case, moves are the only
2728 thing valid, so we can't do a convert from there. This
2729 occurs when the calling sequence allow such misaligned
2730 usages.
2731
2732 In addition, the conversion may involve a call, which could
2733 clobber parameters which haven't been copied to pseudo
2734 registers yet. Therefore, we must first copy the parm to
2735 a pseudo reg here, and save the conversion until after all
2736 parameters have been moved. */
2737
2738 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2739
2740 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2741
2742 push_to_sequence (all->conversion_insns);
2743 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2744
2745 if (GET_CODE (tempreg) == SUBREG
2746 && GET_MODE (tempreg) == data->nominal_mode
2747 && REG_P (SUBREG_REG (tempreg))
2748 && data->nominal_mode == data->passed_mode
2749 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2750 && GET_MODE_SIZE (GET_MODE (tempreg))
2751 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
2752 {
2753 /* The argument is already sign/zero extended, so note it
2754 into the subreg. */
2755 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2756 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2757 }
2758
2759 /* TREE_USED gets set erroneously during expand_assignment. */
2760 save_tree_used = TREE_USED (parm);
2761 expand_assignment (parm, make_tree (data->nominal_type, tempreg));
2762 TREE_USED (parm) = save_tree_used;
2763 all->conversion_insns = get_insns ();
2764 end_sequence ();
2765
2766 did_conversion = true;
2767 }
2768 else
2769 emit_move_insn (parmreg, validize_mem (data->entry_parm));
2770
2771 /* If we were passed a pointer but the actual value can safely live
2772 in a register, put it in one. */
2773 if (data->passed_pointer
2774 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2775 /* If by-reference argument was promoted, demote it. */
2776 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2777 || use_register_for_decl (parm)))
2778 {
2779 /* We can't use nominal_mode, because it will have been set to
2780 Pmode above. We must use the actual mode of the parm. */
2781 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2782 mark_user_reg (parmreg);
2783
2784 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2785 {
2786 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2787 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2788
2789 push_to_sequence (all->conversion_insns);
2790 emit_move_insn (tempreg, DECL_RTL (parm));
2791 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2792 emit_move_insn (parmreg, tempreg);
2793 all->conversion_insns = get_insns ();
2794 end_sequence ();
2795
2796 did_conversion = true;
2797 }
2798 else
2799 emit_move_insn (parmreg, DECL_RTL (parm));
2800
2801 SET_DECL_RTL (parm, parmreg);
2802
2803 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2804 now the parm. */
2805 data->stack_parm = NULL;
2806 }
2807
2808 /* Mark the register as eliminable if we did no conversion and it was
2809 copied from memory at a fixed offset, and the arg pointer was not
2810 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2811 offset formed an invalid address, such memory-equivalences as we
2812 make here would screw up life analysis for it. */
2813 if (data->nominal_mode == data->passed_mode
2814 && !did_conversion
2815 && data->stack_parm != 0
2816 && MEM_P (data->stack_parm)
2817 && data->locate.offset.var == 0
2818 && reg_mentioned_p (virtual_incoming_args_rtx,
2819 XEXP (data->stack_parm, 0)))
2820 {
2821 rtx linsn = get_last_insn ();
2822 rtx sinsn, set;
2823
2824 /* Mark complex types separately. */
2825 if (GET_CODE (parmreg) == CONCAT)
2826 {
2827 enum machine_mode submode
2828 = GET_MODE_INNER (GET_MODE (parmreg));
2829 int regnor = REGNO (XEXP (parmreg, 0));
2830 int regnoi = REGNO (XEXP (parmreg, 1));
2831 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
2832 rtx stacki = adjust_address_nv (data->stack_parm, submode,
2833 GET_MODE_SIZE (submode));
2834
2835 /* Scan backwards for the set of the real and
2836 imaginary parts. */
2837 for (sinsn = linsn; sinsn != 0;
2838 sinsn = prev_nonnote_insn (sinsn))
2839 {
2840 set = single_set (sinsn);
2841 if (set == 0)
2842 continue;
2843
2844 if (SET_DEST (set) == regno_reg_rtx [regnoi])
2845 REG_NOTES (sinsn)
2846 = gen_rtx_EXPR_LIST (REG_EQUIV, stacki,
2847 REG_NOTES (sinsn));
2848 else if (SET_DEST (set) == regno_reg_rtx [regnor])
2849 REG_NOTES (sinsn)
2850 = gen_rtx_EXPR_LIST (REG_EQUIV, stackr,
2851 REG_NOTES (sinsn));
2852 }
2853 }
2854 else if ((set = single_set (linsn)) != 0
2855 && SET_DEST (set) == parmreg)
2856 REG_NOTES (linsn)
2857 = gen_rtx_EXPR_LIST (REG_EQUIV,
2858 data->stack_parm, REG_NOTES (linsn));
2859 }
2860
2861 /* For pointer data type, suggest pointer register. */
2862 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2863 mark_reg_pointer (parmreg,
2864 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2865 }
2866
2867 /* A subroutine of assign_parms. Allocate stack space to hold the current
2868 parameter. Get it there. Perform all ABI specified conversions. */
2869
2870 static void
2871 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2872 struct assign_parm_data_one *data)
2873 {
2874 /* Value must be stored in the stack slot STACK_PARM during function
2875 execution. */
2876
2877 if (data->promoted_mode != data->nominal_mode)
2878 {
2879 /* Conversion is required. */
2880 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2881
2882 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2883
2884 push_to_sequence (all->conversion_insns);
2885 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2886 TYPE_UNSIGNED (TREE_TYPE (parm)));
2887
2888 if (data->stack_parm)
2889 /* ??? This may need a big-endian conversion on sparc64. */
2890 data->stack_parm
2891 = adjust_address (data->stack_parm, data->nominal_mode, 0);
2892
2893 all->conversion_insns = get_insns ();
2894 end_sequence ();
2895 }
2896
2897 if (data->entry_parm != data->stack_parm)
2898 {
2899 if (data->stack_parm == 0)
2900 {
2901 data->stack_parm
2902 = assign_stack_local (GET_MODE (data->entry_parm),
2903 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
2904 0);
2905 set_mem_attributes (data->stack_parm, parm, 1);
2906 }
2907
2908 if (data->promoted_mode != data->nominal_mode)
2909 {
2910 push_to_sequence (all->conversion_insns);
2911 emit_move_insn (validize_mem (data->stack_parm),
2912 validize_mem (data->entry_parm));
2913 all->conversion_insns = get_insns ();
2914 end_sequence ();
2915 }
2916 else
2917 emit_move_insn (validize_mem (data->stack_parm),
2918 validize_mem (data->entry_parm));
2919 }
2920
2921 SET_DECL_RTL (parm, data->stack_parm);
2922 }
2923
2924 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
2925 undo the frobbing that we did in assign_parms_augmented_arg_list. */
2926
2927 static void
2928 assign_parms_unsplit_complex (tree orig_fnargs, tree fnargs)
2929 {
2930 tree parm;
2931
2932 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2933 {
2934 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2935 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2936 {
2937 rtx tmp, real, imag;
2938 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
2939
2940 real = DECL_RTL (fnargs);
2941 imag = DECL_RTL (TREE_CHAIN (fnargs));
2942 if (inner != GET_MODE (real))
2943 {
2944 real = gen_lowpart_SUBREG (inner, real);
2945 imag = gen_lowpart_SUBREG (inner, imag);
2946 }
2947 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2948 SET_DECL_RTL (parm, tmp);
2949
2950 real = DECL_INCOMING_RTL (fnargs);
2951 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
2952 if (inner != GET_MODE (real))
2953 {
2954 real = gen_lowpart_SUBREG (inner, real);
2955 imag = gen_lowpart_SUBREG (inner, imag);
2956 }
2957 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2958 set_decl_incoming_rtl (parm, tmp);
2959 fnargs = TREE_CHAIN (fnargs);
2960 }
2961 else
2962 {
2963 SET_DECL_RTL (parm, DECL_RTL (fnargs));
2964 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs));
2965
2966 /* Set MEM_EXPR to the original decl, i.e. to PARM,
2967 instead of the copy of decl, i.e. FNARGS. */
2968 if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
2969 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
2970 }
2971
2972 fnargs = TREE_CHAIN (fnargs);
2973 }
2974 }
2975
2976 /* Assign RTL expressions to the function's parameters. This may involve
2977 copying them into registers and using those registers as the DECL_RTL. */
2978
2979 static void
2980 assign_parms (tree fndecl)
2981 {
2982 struct assign_parm_data_all all;
2983 tree fnargs, parm;
2984 rtx internal_arg_pointer;
2985 int varargs_setup = 0;
2986
2987 /* If the reg that the virtual arg pointer will be translated into is
2988 not a fixed reg or is the stack pointer, make a copy of the virtual
2989 arg pointer, and address parms via the copy. The frame pointer is
2990 considered fixed even though it is not marked as such.
2991
2992 The second time through, simply use ap to avoid generating rtx. */
2993
2994 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
2995 || ! (fixed_regs[ARG_POINTER_REGNUM]
2996 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
2997 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
2998 else
2999 internal_arg_pointer = virtual_incoming_args_rtx;
3000 current_function_internal_arg_pointer = internal_arg_pointer;
3001
3002 assign_parms_initialize_all (&all);
3003 fnargs = assign_parms_augmented_arg_list (&all);
3004
3005 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3006 {
3007 struct assign_parm_data_one data;
3008
3009 /* Extract the type of PARM; adjust it according to ABI. */
3010 assign_parm_find_data_types (&all, parm, &data);
3011
3012 /* Early out for errors and void parameters. */
3013 if (data.passed_mode == VOIDmode)
3014 {
3015 SET_DECL_RTL (parm, const0_rtx);
3016 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3017 continue;
3018 }
3019
3020 /* Handle stdargs. LAST_NAMED is a slight mis-nomer; it's also true
3021 for the unnamed dummy argument following the last named argument.
3022 See ABI silliness wrt strict_argument_naming and NAMED_ARG. So
3023 we only want to do this when we get to the actual last named
3024 argument, which will be the first time LAST_NAMED gets set. */
3025 if (data.last_named && !varargs_setup)
3026 {
3027 varargs_setup = true;
3028 assign_parms_setup_varargs (&all, &data, false);
3029 }
3030
3031 /* Find out where the parameter arrives in this function. */
3032 assign_parm_find_entry_rtl (&all, &data);
3033
3034 /* Find out where stack space for this parameter might be. */
3035 if (assign_parm_is_stack_parm (&all, &data))
3036 {
3037 assign_parm_find_stack_rtl (parm, &data);
3038 assign_parm_adjust_entry_rtl (&data);
3039 }
3040
3041 /* Record permanently how this parm was passed. */
3042 set_decl_incoming_rtl (parm, data.entry_parm);
3043
3044 /* Update info on where next arg arrives in registers. */
3045 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3046 data.passed_type, data.named_arg);
3047
3048 assign_parm_adjust_stack_rtl (&data);
3049
3050 if (assign_parm_setup_block_p (&data))
3051 assign_parm_setup_block (&all, parm, &data);
3052 else if (data.passed_pointer || use_register_for_decl (parm))
3053 assign_parm_setup_reg (&all, parm, &data);
3054 else
3055 assign_parm_setup_stack (&all, parm, &data);
3056 }
3057
3058 if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
3059 assign_parms_unsplit_complex (all.orig_fnargs, fnargs);
3060
3061 /* Output all parameter conversion instructions (possibly including calls)
3062 now that all parameters have been copied out of hard registers. */
3063 emit_insn (all.conversion_insns);
3064
3065 /* If we are receiving a struct value address as the first argument, set up
3066 the RTL for the function result. As this might require code to convert
3067 the transmitted address to Pmode, we do this here to ensure that possible
3068 preliminary conversions of the address have been emitted already. */
3069 if (all.function_result_decl)
3070 {
3071 tree result = DECL_RESULT (current_function_decl);
3072 rtx addr = DECL_RTL (all.function_result_decl);
3073 rtx x;
3074
3075 if (DECL_BY_REFERENCE (result))
3076 x = addr;
3077 else
3078 {
3079 addr = convert_memory_address (Pmode, addr);
3080 x = gen_rtx_MEM (DECL_MODE (result), addr);
3081 set_mem_attributes (x, result, 1);
3082 }
3083 SET_DECL_RTL (result, x);
3084 }
3085
3086 /* We have aligned all the args, so add space for the pretend args. */
3087 current_function_pretend_args_size = all.pretend_args_size;
3088 all.stack_args_size.constant += all.extra_pretend_bytes;
3089 current_function_args_size = all.stack_args_size.constant;
3090
3091 /* Adjust function incoming argument size for alignment and
3092 minimum length. */
3093
3094 #ifdef REG_PARM_STACK_SPACE
3095 current_function_args_size = MAX (current_function_args_size,
3096 REG_PARM_STACK_SPACE (fndecl));
3097 #endif
3098
3099 current_function_args_size
3100 = ((current_function_args_size + STACK_BYTES - 1)
3101 / STACK_BYTES) * STACK_BYTES;
3102
3103 #ifdef ARGS_GROW_DOWNWARD
3104 current_function_arg_offset_rtx
3105 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3106 : expand_expr (size_diffop (all.stack_args_size.var,
3107 size_int (-all.stack_args_size.constant)),
3108 NULL_RTX, VOIDmode, 0));
3109 #else
3110 current_function_arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3111 #endif
3112
3113 /* See how many bytes, if any, of its args a function should try to pop
3114 on return. */
3115
3116 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
3117 current_function_args_size);
3118
3119 /* For stdarg.h function, save info about
3120 regs and stack space used by the named args. */
3121
3122 current_function_args_info = all.args_so_far;
3123
3124 /* Set the rtx used for the function return value. Put this in its
3125 own variable so any optimizers that need this information don't have
3126 to include tree.h. Do this here so it gets done when an inlined
3127 function gets output. */
3128
3129 current_function_return_rtx
3130 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3131 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3132
3133 /* If scalar return value was computed in a pseudo-reg, or was a named
3134 return value that got dumped to the stack, copy that to the hard
3135 return register. */
3136 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3137 {
3138 tree decl_result = DECL_RESULT (fndecl);
3139 rtx decl_rtl = DECL_RTL (decl_result);
3140
3141 if (REG_P (decl_rtl)
3142 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3143 : DECL_REGISTER (decl_result))
3144 {
3145 rtx real_decl_rtl;
3146
3147 #ifdef FUNCTION_OUTGOING_VALUE
3148 real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result),
3149 fndecl);
3150 #else
3151 real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result),
3152 fndecl);
3153 #endif
3154 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3155 /* The delay slot scheduler assumes that current_function_return_rtx
3156 holds the hard register containing the return value, not a
3157 temporary pseudo. */
3158 current_function_return_rtx = real_decl_rtl;
3159 }
3160 }
3161 }
3162
3163 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3164 For all seen types, gimplify their sizes. */
3165
3166 static tree
3167 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3168 {
3169 tree t = *tp;
3170
3171 *walk_subtrees = 0;
3172 if (TYPE_P (t))
3173 {
3174 if (POINTER_TYPE_P (t))
3175 *walk_subtrees = 1;
3176 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t)))
3177 {
3178 gimplify_type_sizes (t, (tree *) data);
3179 *walk_subtrees = 1;
3180 }
3181 }
3182
3183 return NULL;
3184 }
3185
3186 /* Gimplify the parameter list for current_function_decl. This involves
3187 evaluating SAVE_EXPRs of variable sized parameters and generating code
3188 to implement callee-copies reference parameters. Returns a list of
3189 statements to add to the beginning of the function, or NULL if nothing
3190 to do. */
3191
3192 tree
3193 gimplify_parameters (void)
3194 {
3195 struct assign_parm_data_all all;
3196 tree fnargs, parm, stmts = NULL;
3197
3198 assign_parms_initialize_all (&all);
3199 fnargs = assign_parms_augmented_arg_list (&all);
3200
3201 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3202 {
3203 struct assign_parm_data_one data;
3204
3205 /* Extract the type of PARM; adjust it according to ABI. */
3206 assign_parm_find_data_types (&all, parm, &data);
3207
3208 /* Early out for errors and void parameters. */
3209 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3210 continue;
3211
3212 /* Update info on where next arg arrives in registers. */
3213 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3214 data.passed_type, data.named_arg);
3215
3216 /* ??? Once upon a time variable_size stuffed parameter list
3217 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3218 turned out to be less than manageable in the gimple world.
3219 Now we have to hunt them down ourselves. */
3220 walk_tree_without_duplicates (&data.passed_type,
3221 gimplify_parm_type, &stmts);
3222
3223 if (!TREE_CONSTANT (DECL_SIZE (parm)))
3224 {
3225 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3226 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3227 }
3228
3229 if (data.passed_pointer)
3230 {
3231 tree type = TREE_TYPE (data.passed_type);
3232 if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3233 type, data.named_arg))
3234 {
3235 tree local, t;
3236
3237 /* For constant sized objects, this is trivial; for
3238 variable-sized objects, we have to play games. */
3239 if (TREE_CONSTANT (DECL_SIZE (parm)))
3240 {
3241 local = create_tmp_var (type, get_name (parm));
3242 DECL_IGNORED_P (local) = 0;
3243 }
3244 else
3245 {
3246 tree ptr_type, addr, args;
3247
3248 ptr_type = build_pointer_type (type);
3249 addr = create_tmp_var (ptr_type, get_name (parm));
3250 DECL_IGNORED_P (addr) = 0;
3251 local = build_fold_indirect_ref (addr);
3252
3253 args = tree_cons (NULL, DECL_SIZE_UNIT (parm), NULL);
3254 t = built_in_decls[BUILT_IN_ALLOCA];
3255 t = build_function_call_expr (t, args);
3256 t = fold_convert (ptr_type, t);
3257 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
3258 gimplify_and_add (t, &stmts);
3259 }
3260
3261 t = build2 (MODIFY_EXPR, void_type_node, local, parm);
3262 gimplify_and_add (t, &stmts);
3263
3264 DECL_VALUE_EXPR (parm) = local;
3265 }
3266 }
3267 }
3268
3269 return stmts;
3270 }
3271 \f
3272 /* Indicate whether REGNO is an incoming argument to the current function
3273 that was promoted to a wider mode. If so, return the RTX for the
3274 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
3275 that REGNO is promoted from and whether the promotion was signed or
3276 unsigned. */
3277
3278 rtx
3279 promoted_input_arg (unsigned int regno, enum machine_mode *pmode, int *punsignedp)
3280 {
3281 tree arg;
3282
3283 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
3284 arg = TREE_CHAIN (arg))
3285 if (REG_P (DECL_INCOMING_RTL (arg))
3286 && REGNO (DECL_INCOMING_RTL (arg)) == regno
3287 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
3288 {
3289 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
3290 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (arg));
3291
3292 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
3293 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
3294 && mode != DECL_MODE (arg))
3295 {
3296 *pmode = DECL_MODE (arg);
3297 *punsignedp = unsignedp;
3298 return DECL_INCOMING_RTL (arg);
3299 }
3300 }
3301
3302 return 0;
3303 }
3304
3305 \f
3306 /* Compute the size and offset from the start of the stacked arguments for a
3307 parm passed in mode PASSED_MODE and with type TYPE.
3308
3309 INITIAL_OFFSET_PTR points to the current offset into the stacked
3310 arguments.
3311
3312 The starting offset and size for this parm are returned in
3313 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3314 nonzero, the offset is that of stack slot, which is returned in
3315 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3316 padding required from the initial offset ptr to the stack slot.
3317
3318 IN_REGS is nonzero if the argument will be passed in registers. It will
3319 never be set if REG_PARM_STACK_SPACE is not defined.
3320
3321 FNDECL is the function in which the argument was defined.
3322
3323 There are two types of rounding that are done. The first, controlled by
3324 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3325 list to be aligned to the specific boundary (in bits). This rounding
3326 affects the initial and starting offsets, but not the argument size.
3327
3328 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3329 optionally rounds the size of the parm to PARM_BOUNDARY. The
3330 initial offset is not affected by this rounding, while the size always
3331 is and the starting offset may be. */
3332
3333 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3334 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3335 callers pass in the total size of args so far as
3336 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3337
3338 void
3339 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3340 int partial, tree fndecl ATTRIBUTE_UNUSED,
3341 struct args_size *initial_offset_ptr,
3342 struct locate_and_pad_arg_data *locate)
3343 {
3344 tree sizetree;
3345 enum direction where_pad;
3346 int boundary;
3347 int reg_parm_stack_space = 0;
3348 int part_size_in_regs;
3349
3350 #ifdef REG_PARM_STACK_SPACE
3351 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3352
3353 /* If we have found a stack parm before we reach the end of the
3354 area reserved for registers, skip that area. */
3355 if (! in_regs)
3356 {
3357 if (reg_parm_stack_space > 0)
3358 {
3359 if (initial_offset_ptr->var)
3360 {
3361 initial_offset_ptr->var
3362 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3363 ssize_int (reg_parm_stack_space));
3364 initial_offset_ptr->constant = 0;
3365 }
3366 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3367 initial_offset_ptr->constant = reg_parm_stack_space;
3368 }
3369 }
3370 #endif /* REG_PARM_STACK_SPACE */
3371
3372 part_size_in_regs = 0;
3373 if (reg_parm_stack_space == 0)
3374 part_size_in_regs = ((partial * UNITS_PER_WORD)
3375 / (PARM_BOUNDARY / BITS_PER_UNIT)
3376 * (PARM_BOUNDARY / BITS_PER_UNIT));
3377
3378 sizetree
3379 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3380 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3381 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3382 locate->where_pad = where_pad;
3383
3384 #ifdef ARGS_GROW_DOWNWARD
3385 locate->slot_offset.constant = -initial_offset_ptr->constant;
3386 if (initial_offset_ptr->var)
3387 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3388 initial_offset_ptr->var);
3389
3390 {
3391 tree s2 = sizetree;
3392 if (where_pad != none
3393 && (!host_integerp (sizetree, 1)
3394 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3395 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3396 SUB_PARM_SIZE (locate->slot_offset, s2);
3397 }
3398
3399 locate->slot_offset.constant += part_size_in_regs;
3400
3401 if (!in_regs
3402 #ifdef REG_PARM_STACK_SPACE
3403 || REG_PARM_STACK_SPACE (fndecl) > 0
3404 #endif
3405 )
3406 pad_to_arg_alignment (&locate->slot_offset, boundary,
3407 &locate->alignment_pad);
3408
3409 locate->size.constant = (-initial_offset_ptr->constant
3410 - locate->slot_offset.constant);
3411 if (initial_offset_ptr->var)
3412 locate->size.var = size_binop (MINUS_EXPR,
3413 size_binop (MINUS_EXPR,
3414 ssize_int (0),
3415 initial_offset_ptr->var),
3416 locate->slot_offset.var);
3417
3418 /* Pad_below needs the pre-rounded size to know how much to pad
3419 below. */
3420 locate->offset = locate->slot_offset;
3421 if (where_pad == downward)
3422 pad_below (&locate->offset, passed_mode, sizetree);
3423
3424 #else /* !ARGS_GROW_DOWNWARD */
3425 if (!in_regs
3426 #ifdef REG_PARM_STACK_SPACE
3427 || REG_PARM_STACK_SPACE (fndecl) > 0
3428 #endif
3429 )
3430 pad_to_arg_alignment (initial_offset_ptr, boundary,
3431 &locate->alignment_pad);
3432 locate->slot_offset = *initial_offset_ptr;
3433
3434 #ifdef PUSH_ROUNDING
3435 if (passed_mode != BLKmode)
3436 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3437 #endif
3438
3439 /* Pad_below needs the pre-rounded size to know how much to pad below
3440 so this must be done before rounding up. */
3441 locate->offset = locate->slot_offset;
3442 if (where_pad == downward)
3443 pad_below (&locate->offset, passed_mode, sizetree);
3444
3445 if (where_pad != none
3446 && (!host_integerp (sizetree, 1)
3447 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3448 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3449
3450 ADD_PARM_SIZE (locate->size, sizetree);
3451
3452 locate->size.constant -= part_size_in_regs;
3453 #endif /* ARGS_GROW_DOWNWARD */
3454 }
3455
3456 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3457 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3458
3459 static void
3460 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3461 struct args_size *alignment_pad)
3462 {
3463 tree save_var = NULL_TREE;
3464 HOST_WIDE_INT save_constant = 0;
3465 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3466 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3467
3468 #ifdef SPARC_STACK_BOUNDARY_HACK
3469 /* The sparc port has a bug. It sometimes claims a STACK_BOUNDARY
3470 higher than the real alignment of %sp. However, when it does this,
3471 the alignment of %sp+STACK_POINTER_OFFSET will be STACK_BOUNDARY.
3472 This is a temporary hack while the sparc port is fixed. */
3473 if (SPARC_STACK_BOUNDARY_HACK)
3474 sp_offset = 0;
3475 #endif
3476
3477 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3478 {
3479 save_var = offset_ptr->var;
3480 save_constant = offset_ptr->constant;
3481 }
3482
3483 alignment_pad->var = NULL_TREE;
3484 alignment_pad->constant = 0;
3485
3486 if (boundary > BITS_PER_UNIT)
3487 {
3488 if (offset_ptr->var)
3489 {
3490 tree sp_offset_tree = ssize_int (sp_offset);
3491 tree offset = size_binop (PLUS_EXPR,
3492 ARGS_SIZE_TREE (*offset_ptr),
3493 sp_offset_tree);
3494 #ifdef ARGS_GROW_DOWNWARD
3495 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3496 #else
3497 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3498 #endif
3499
3500 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3501 /* ARGS_SIZE_TREE includes constant term. */
3502 offset_ptr->constant = 0;
3503 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3504 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3505 save_var);
3506 }
3507 else
3508 {
3509 offset_ptr->constant = -sp_offset +
3510 #ifdef ARGS_GROW_DOWNWARD
3511 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3512 #else
3513 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3514 #endif
3515 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3516 alignment_pad->constant = offset_ptr->constant - save_constant;
3517 }
3518 }
3519 }
3520
3521 static void
3522 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3523 {
3524 if (passed_mode != BLKmode)
3525 {
3526 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3527 offset_ptr->constant
3528 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3529 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3530 - GET_MODE_SIZE (passed_mode));
3531 }
3532 else
3533 {
3534 if (TREE_CODE (sizetree) != INTEGER_CST
3535 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3536 {
3537 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3538 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3539 /* Add it in. */
3540 ADD_PARM_SIZE (*offset_ptr, s2);
3541 SUB_PARM_SIZE (*offset_ptr, sizetree);
3542 }
3543 }
3544 }
3545 \f
3546 /* Walk the tree of blocks describing the binding levels within a function
3547 and warn about variables the might be killed by setjmp or vfork.
3548 This is done after calling flow_analysis and before global_alloc
3549 clobbers the pseudo-regs to hard regs. */
3550
3551 void
3552 setjmp_vars_warning (tree block)
3553 {
3554 tree decl, sub;
3555
3556 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3557 {
3558 if (TREE_CODE (decl) == VAR_DECL
3559 && DECL_RTL_SET_P (decl)
3560 && REG_P (DECL_RTL (decl))
3561 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3562 warning ("%Jvariable %qD might be clobbered by %<longjmp%>"
3563 " or %<vfork%>",
3564 decl, decl);
3565 }
3566
3567 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3568 setjmp_vars_warning (sub);
3569 }
3570
3571 /* Do the appropriate part of setjmp_vars_warning
3572 but for arguments instead of local variables. */
3573
3574 void
3575 setjmp_args_warning (void)
3576 {
3577 tree decl;
3578 for (decl = DECL_ARGUMENTS (current_function_decl);
3579 decl; decl = TREE_CHAIN (decl))
3580 if (DECL_RTL (decl) != 0
3581 && REG_P (DECL_RTL (decl))
3582 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3583 warning ("%Jargument %qD might be clobbered by %<longjmp%> or %<vfork%>",
3584 decl, decl);
3585 }
3586
3587 \f
3588 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3589 and create duplicate blocks. */
3590 /* ??? Need an option to either create block fragments or to create
3591 abstract origin duplicates of a source block. It really depends
3592 on what optimization has been performed. */
3593
3594 void
3595 reorder_blocks (void)
3596 {
3597 tree block = DECL_INITIAL (current_function_decl);
3598 varray_type block_stack;
3599
3600 if (block == NULL_TREE)
3601 return;
3602
3603 VARRAY_TREE_INIT (block_stack, 10, "block_stack");
3604
3605 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3606 clear_block_marks (block);
3607
3608 /* Prune the old trees away, so that they don't get in the way. */
3609 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3610 BLOCK_CHAIN (block) = NULL_TREE;
3611
3612 /* Recreate the block tree from the note nesting. */
3613 reorder_blocks_1 (get_insns (), block, &block_stack);
3614 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
3615
3616 /* Remove deleted blocks from the block fragment chains. */
3617 reorder_fix_fragments (block);
3618 }
3619
3620 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3621
3622 void
3623 clear_block_marks (tree block)
3624 {
3625 while (block)
3626 {
3627 TREE_ASM_WRITTEN (block) = 0;
3628 clear_block_marks (BLOCK_SUBBLOCKS (block));
3629 block = BLOCK_CHAIN (block);
3630 }
3631 }
3632
3633 static void
3634 reorder_blocks_1 (rtx insns, tree current_block, varray_type *p_block_stack)
3635 {
3636 rtx insn;
3637
3638 for (insn = insns; insn; insn = NEXT_INSN (insn))
3639 {
3640 if (NOTE_P (insn))
3641 {
3642 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3643 {
3644 tree block = NOTE_BLOCK (insn);
3645
3646 /* If we have seen this block before, that means it now
3647 spans multiple address regions. Create a new fragment. */
3648 if (TREE_ASM_WRITTEN (block))
3649 {
3650 tree new_block = copy_node (block);
3651 tree origin;
3652
3653 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3654 ? BLOCK_FRAGMENT_ORIGIN (block)
3655 : block);
3656 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3657 BLOCK_FRAGMENT_CHAIN (new_block)
3658 = BLOCK_FRAGMENT_CHAIN (origin);
3659 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3660
3661 NOTE_BLOCK (insn) = new_block;
3662 block = new_block;
3663 }
3664
3665 BLOCK_SUBBLOCKS (block) = 0;
3666 TREE_ASM_WRITTEN (block) = 1;
3667 /* When there's only one block for the entire function,
3668 current_block == block and we mustn't do this, it
3669 will cause infinite recursion. */
3670 if (block != current_block)
3671 {
3672 BLOCK_SUPERCONTEXT (block) = current_block;
3673 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3674 BLOCK_SUBBLOCKS (current_block) = block;
3675 current_block = block;
3676 }
3677 VARRAY_PUSH_TREE (*p_block_stack, block);
3678 }
3679 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3680 {
3681 NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
3682 VARRAY_POP (*p_block_stack);
3683 BLOCK_SUBBLOCKS (current_block)
3684 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3685 current_block = BLOCK_SUPERCONTEXT (current_block);
3686 }
3687 }
3688 }
3689 }
3690
3691 /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
3692 appears in the block tree, select one of the fragments to become
3693 the new origin block. */
3694
3695 static void
3696 reorder_fix_fragments (tree block)
3697 {
3698 while (block)
3699 {
3700 tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
3701 tree new_origin = NULL_TREE;
3702
3703 if (dup_origin)
3704 {
3705 if (! TREE_ASM_WRITTEN (dup_origin))
3706 {
3707 new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
3708
3709 /* Find the first of the remaining fragments. There must
3710 be at least one -- the current block. */
3711 while (! TREE_ASM_WRITTEN (new_origin))
3712 new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
3713 BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
3714 }
3715 }
3716 else if (! dup_origin)
3717 new_origin = block;
3718
3719 /* Re-root the rest of the fragments to the new origin. In the
3720 case that DUP_ORIGIN was null, that means BLOCK was the origin
3721 of a chain of fragments and we want to remove those fragments
3722 that didn't make it to the output. */
3723 if (new_origin)
3724 {
3725 tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
3726 tree chain = *pp;
3727
3728 while (chain)
3729 {
3730 if (TREE_ASM_WRITTEN (chain))
3731 {
3732 BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
3733 *pp = chain;
3734 pp = &BLOCK_FRAGMENT_CHAIN (chain);
3735 }
3736 chain = BLOCK_FRAGMENT_CHAIN (chain);
3737 }
3738 *pp = NULL_TREE;
3739 }
3740
3741 reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
3742 block = BLOCK_CHAIN (block);
3743 }
3744 }
3745
3746 /* Reverse the order of elements in the chain T of blocks,
3747 and return the new head of the chain (old last element). */
3748
3749 tree
3750 blocks_nreverse (tree t)
3751 {
3752 tree prev = 0, decl, next;
3753 for (decl = t; decl; decl = next)
3754 {
3755 next = BLOCK_CHAIN (decl);
3756 BLOCK_CHAIN (decl) = prev;
3757 prev = decl;
3758 }
3759 return prev;
3760 }
3761
3762 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3763 non-NULL, list them all into VECTOR, in a depth-first preorder
3764 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3765 blocks. */
3766
3767 static int
3768 all_blocks (tree block, tree *vector)
3769 {
3770 int n_blocks = 0;
3771
3772 while (block)
3773 {
3774 TREE_ASM_WRITTEN (block) = 0;
3775
3776 /* Record this block. */
3777 if (vector)
3778 vector[n_blocks] = block;
3779
3780 ++n_blocks;
3781
3782 /* Record the subblocks, and their subblocks... */
3783 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3784 vector ? vector + n_blocks : 0);
3785 block = BLOCK_CHAIN (block);
3786 }
3787
3788 return n_blocks;
3789 }
3790
3791 /* Return a vector containing all the blocks rooted at BLOCK. The
3792 number of elements in the vector is stored in N_BLOCKS_P. The
3793 vector is dynamically allocated; it is the caller's responsibility
3794 to call `free' on the pointer returned. */
3795
3796 static tree *
3797 get_block_vector (tree block, int *n_blocks_p)
3798 {
3799 tree *block_vector;
3800
3801 *n_blocks_p = all_blocks (block, NULL);
3802 block_vector = xmalloc (*n_blocks_p * sizeof (tree));
3803 all_blocks (block, block_vector);
3804
3805 return block_vector;
3806 }
3807
3808 static GTY(()) int next_block_index = 2;
3809
3810 /* Set BLOCK_NUMBER for all the blocks in FN. */
3811
3812 void
3813 number_blocks (tree fn)
3814 {
3815 int i;
3816 int n_blocks;
3817 tree *block_vector;
3818
3819 /* For SDB and XCOFF debugging output, we start numbering the blocks
3820 from 1 within each function, rather than keeping a running
3821 count. */
3822 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3823 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3824 next_block_index = 1;
3825 #endif
3826
3827 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3828
3829 /* The top-level BLOCK isn't numbered at all. */
3830 for (i = 1; i < n_blocks; ++i)
3831 /* We number the blocks from two. */
3832 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3833
3834 free (block_vector);
3835
3836 return;
3837 }
3838
3839 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3840
3841 tree
3842 debug_find_var_in_block_tree (tree var, tree block)
3843 {
3844 tree t;
3845
3846 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3847 if (t == var)
3848 return block;
3849
3850 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3851 {
3852 tree ret = debug_find_var_in_block_tree (var, t);
3853 if (ret)
3854 return ret;
3855 }
3856
3857 return NULL_TREE;
3858 }
3859 \f
3860 /* Allocate a function structure for FNDECL and set its contents
3861 to the defaults. */
3862
3863 void
3864 allocate_struct_function (tree fndecl)
3865 {
3866 tree result;
3867 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
3868
3869 cfun = ggc_alloc_cleared (sizeof (struct function));
3870
3871 cfun->stack_alignment_needed = STACK_BOUNDARY;
3872 cfun->preferred_stack_boundary = STACK_BOUNDARY;
3873
3874 current_function_funcdef_no = funcdef_no++;
3875
3876 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
3877
3878 init_eh_for_function ();
3879
3880 lang_hooks.function.init (cfun);
3881 if (init_machine_status)
3882 cfun->machine = (*init_machine_status) ();
3883
3884 if (fndecl == NULL)
3885 return;
3886
3887 DECL_STRUCT_FUNCTION (fndecl) = cfun;
3888 cfun->decl = fndecl;
3889
3890 result = DECL_RESULT (fndecl);
3891 if (aggregate_value_p (result, fndecl))
3892 {
3893 #ifdef PCC_STATIC_STRUCT_RETURN
3894 current_function_returns_pcc_struct = 1;
3895 #endif
3896 current_function_returns_struct = 1;
3897 }
3898
3899 current_function_returns_pointer = POINTER_TYPE_P (TREE_TYPE (result));
3900
3901 current_function_stdarg
3902 = (fntype
3903 && TYPE_ARG_TYPES (fntype) != 0
3904 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3905 != void_type_node));
3906 }
3907
3908 /* Reset cfun, and other non-struct-function variables to defaults as
3909 appropriate for emitting rtl at the start of a function. */
3910
3911 static void
3912 prepare_function_start (tree fndecl)
3913 {
3914 if (fndecl && DECL_STRUCT_FUNCTION (fndecl))
3915 cfun = DECL_STRUCT_FUNCTION (fndecl);
3916 else
3917 allocate_struct_function (fndecl);
3918 init_emit ();
3919 init_varasm_status (cfun);
3920 init_expr ();
3921
3922 cse_not_expected = ! optimize;
3923
3924 /* Caller save not needed yet. */
3925 caller_save_needed = 0;
3926
3927 /* We haven't done register allocation yet. */
3928 reg_renumber = 0;
3929
3930 /* Indicate that we have not instantiated virtual registers yet. */
3931 virtuals_instantiated = 0;
3932
3933 /* Indicate that we want CONCATs now. */
3934 generating_concat_p = 1;
3935
3936 /* Indicate we have no need of a frame pointer yet. */
3937 frame_pointer_needed = 0;
3938 }
3939
3940 /* Initialize the rtl expansion mechanism so that we can do simple things
3941 like generate sequences. This is used to provide a context during global
3942 initialization of some passes. */
3943 void
3944 init_dummy_function_start (void)
3945 {
3946 prepare_function_start (NULL);
3947 }
3948
3949 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3950 and initialize static variables for generating RTL for the statements
3951 of the function. */
3952
3953 void
3954 init_function_start (tree subr)
3955 {
3956 prepare_function_start (subr);
3957
3958 /* Prevent ever trying to delete the first instruction of a
3959 function. Also tell final how to output a linenum before the
3960 function prologue. Note linenums could be missing, e.g. when
3961 compiling a Java .class file. */
3962 if (! DECL_IS_BUILTIN (subr))
3963 emit_line_note (DECL_SOURCE_LOCATION (subr));
3964
3965 /* Make sure first insn is a note even if we don't want linenums.
3966 This makes sure the first insn will never be deleted.
3967 Also, final expects a note to appear there. */
3968 emit_note (NOTE_INSN_DELETED);
3969
3970 /* Warn if this value is an aggregate type,
3971 regardless of which calling convention we are using for it. */
3972 if (warn_aggregate_return
3973 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
3974 warning ("function returns an aggregate");
3975 }
3976
3977 /* Make sure all values used by the optimization passes have sane
3978 defaults. */
3979 void
3980 init_function_for_compilation (void)
3981 {
3982 reg_renumber = 0;
3983
3984 /* No prologue/epilogue insns yet. */
3985 VARRAY_GROW (prologue, 0);
3986 VARRAY_GROW (epilogue, 0);
3987 VARRAY_GROW (sibcall_epilogue, 0);
3988 }
3989
3990 /* Expand a call to __main at the beginning of a possible main function. */
3991
3992 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
3993 #undef HAS_INIT_SECTION
3994 #define HAS_INIT_SECTION
3995 #endif
3996
3997 void
3998 expand_main_function (void)
3999 {
4000 #ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
4001 if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN)
4002 {
4003 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
4004 rtx tmp, seq;
4005
4006 start_sequence ();
4007 /* Forcibly align the stack. */
4008 #ifdef STACK_GROWS_DOWNWARD
4009 tmp = expand_simple_binop (Pmode, AND, stack_pointer_rtx, GEN_INT(-align),
4010 stack_pointer_rtx, 1, OPTAB_WIDEN);
4011 #else
4012 tmp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
4013 GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN);
4014 tmp = expand_simple_binop (Pmode, AND, tmp, GEN_INT (-align),
4015 stack_pointer_rtx, 1, OPTAB_WIDEN);
4016 #endif
4017 if (tmp != stack_pointer_rtx)
4018 emit_move_insn (stack_pointer_rtx, tmp);
4019
4020 /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
4021 tmp = force_reg (Pmode, const0_rtx);
4022 allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT);
4023 seq = get_insns ();
4024 end_sequence ();
4025
4026 for (tmp = get_last_insn (); tmp; tmp = PREV_INSN (tmp))
4027 if (NOTE_P (tmp) && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_FUNCTION_BEG)
4028 break;
4029 if (tmp)
4030 emit_insn_before (seq, tmp);
4031 else
4032 emit_insn (seq);
4033 }
4034 #endif
4035
4036 #ifndef HAS_INIT_SECTION
4037 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
4038 #endif
4039 }
4040 \f
4041 /* Start the RTL for a new function, and set variables used for
4042 emitting RTL.
4043 SUBR is the FUNCTION_DECL node.
4044 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4045 the function's parameters, which must be run at any return statement. */
4046
4047 void
4048 expand_function_start (tree subr)
4049 {
4050 /* Make sure volatile mem refs aren't considered
4051 valid operands of arithmetic insns. */
4052 init_recog_no_volatile ();
4053
4054 current_function_profile
4055 = (profile_flag
4056 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4057
4058 current_function_limit_stack
4059 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4060
4061 /* Make the label for return statements to jump to. Do not special
4062 case machines with special return instructions -- they will be
4063 handled later during jump, ifcvt, or epilogue creation. */
4064 return_label = gen_label_rtx ();
4065
4066 /* Initialize rtx used to return the value. */
4067 /* Do this before assign_parms so that we copy the struct value address
4068 before any library calls that assign parms might generate. */
4069
4070 /* Decide whether to return the value in memory or in a register. */
4071 if (aggregate_value_p (DECL_RESULT (subr), subr))
4072 {
4073 /* Returning something that won't go in a register. */
4074 rtx value_address = 0;
4075
4076 #ifdef PCC_STATIC_STRUCT_RETURN
4077 if (current_function_returns_pcc_struct)
4078 {
4079 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4080 value_address = assemble_static_space (size);
4081 }
4082 else
4083 #endif
4084 {
4085 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 1);
4086 /* Expect to be passed the address of a place to store the value.
4087 If it is passed as an argument, assign_parms will take care of
4088 it. */
4089 if (sv)
4090 {
4091 value_address = gen_reg_rtx (Pmode);
4092 emit_move_insn (value_address, sv);
4093 }
4094 }
4095 if (value_address)
4096 {
4097 rtx x = value_address;
4098 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4099 {
4100 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4101 set_mem_attributes (x, DECL_RESULT (subr), 1);
4102 }
4103 SET_DECL_RTL (DECL_RESULT (subr), x);
4104 }
4105 }
4106 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4107 /* If return mode is void, this decl rtl should not be used. */
4108 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4109 else
4110 {
4111 /* Compute the return values into a pseudo reg, which we will copy
4112 into the true return register after the cleanups are done. */
4113 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4114 if (TYPE_MODE (return_type) != BLKmode
4115 && targetm.calls.return_in_msb (return_type))
4116 /* expand_function_end will insert the appropriate padding in
4117 this case. Use the return value's natural (unpadded) mode
4118 within the function proper. */
4119 SET_DECL_RTL (DECL_RESULT (subr),
4120 gen_reg_rtx (TYPE_MODE (return_type)));
4121 else
4122 {
4123 /* In order to figure out what mode to use for the pseudo, we
4124 figure out what the mode of the eventual return register will
4125 actually be, and use that. */
4126 rtx hard_reg = hard_function_value (return_type, subr, 1);
4127
4128 /* Structures that are returned in registers are not
4129 aggregate_value_p, so we may see a PARALLEL or a REG. */
4130 if (REG_P (hard_reg))
4131 SET_DECL_RTL (DECL_RESULT (subr),
4132 gen_reg_rtx (GET_MODE (hard_reg)));
4133 else
4134 {
4135 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4136 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4137 }
4138 }
4139
4140 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4141 result to the real return register(s). */
4142 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4143 }
4144
4145 /* Initialize rtx for parameters and local variables.
4146 In some cases this requires emitting insns. */
4147 assign_parms (subr);
4148
4149 /* If function gets a static chain arg, store it. */
4150 if (cfun->static_chain_decl)
4151 {
4152 tree parm = cfun->static_chain_decl;
4153 rtx local = gen_reg_rtx (Pmode);
4154
4155 set_decl_incoming_rtl (parm, static_chain_incoming_rtx);
4156 SET_DECL_RTL (parm, local);
4157 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4158
4159 emit_move_insn (local, static_chain_incoming_rtx);
4160 }
4161
4162 /* If the function receives a non-local goto, then store the
4163 bits we need to restore the frame pointer. */
4164 if (cfun->nonlocal_goto_save_area)
4165 {
4166 tree t_save;
4167 rtx r_save;
4168
4169 /* ??? We need to do this save early. Unfortunately here is
4170 before the frame variable gets declared. Help out... */
4171 expand_var (TREE_OPERAND (cfun->nonlocal_goto_save_area, 0));
4172
4173 t_save = build4 (ARRAY_REF, ptr_type_node,
4174 cfun->nonlocal_goto_save_area,
4175 integer_zero_node, NULL_TREE, NULL_TREE);
4176 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4177 r_save = convert_memory_address (Pmode, r_save);
4178
4179 emit_move_insn (r_save, virtual_stack_vars_rtx);
4180 update_nonlocal_goto_save_area ();
4181 }
4182
4183 /* The following was moved from init_function_start.
4184 The move is supposed to make sdb output more accurate. */
4185 /* Indicate the beginning of the function body,
4186 as opposed to parm setup. */
4187 emit_note (NOTE_INSN_FUNCTION_BEG);
4188
4189 if (!NOTE_P (get_last_insn ()))
4190 emit_note (NOTE_INSN_DELETED);
4191 parm_birth_insn = get_last_insn ();
4192
4193 if (current_function_profile)
4194 {
4195 #ifdef PROFILE_HOOK
4196 PROFILE_HOOK (current_function_funcdef_no);
4197 #endif
4198 }
4199
4200 /* After the display initializations is where the tail-recursion label
4201 should go, if we end up needing one. Ensure we have a NOTE here
4202 since some things (like trampolines) get placed before this. */
4203 tail_recursion_reentry = emit_note (NOTE_INSN_DELETED);
4204
4205 /* Make sure there is a line number after the function entry setup code. */
4206 force_next_line_note ();
4207 }
4208 \f
4209 /* Undo the effects of init_dummy_function_start. */
4210 void
4211 expand_dummy_function_end (void)
4212 {
4213 /* End any sequences that failed to be closed due to syntax errors. */
4214 while (in_sequence_p ())
4215 end_sequence ();
4216
4217 /* Outside function body, can't compute type's actual size
4218 until next function's body starts. */
4219
4220 free_after_parsing (cfun);
4221 free_after_compilation (cfun);
4222 cfun = 0;
4223 }
4224
4225 /* Call DOIT for each hard register used as a return value from
4226 the current function. */
4227
4228 void
4229 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4230 {
4231 rtx outgoing = current_function_return_rtx;
4232
4233 if (! outgoing)
4234 return;
4235
4236 if (REG_P (outgoing))
4237 (*doit) (outgoing, arg);
4238 else if (GET_CODE (outgoing) == PARALLEL)
4239 {
4240 int i;
4241
4242 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4243 {
4244 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4245
4246 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4247 (*doit) (x, arg);
4248 }
4249 }
4250 }
4251
4252 static void
4253 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4254 {
4255 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
4256 }
4257
4258 void
4259 clobber_return_register (void)
4260 {
4261 diddle_return_value (do_clobber_return_reg, NULL);
4262
4263 /* In case we do use pseudo to return value, clobber it too. */
4264 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4265 {
4266 tree decl_result = DECL_RESULT (current_function_decl);
4267 rtx decl_rtl = DECL_RTL (decl_result);
4268 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4269 {
4270 do_clobber_return_reg (decl_rtl, NULL);
4271 }
4272 }
4273 }
4274
4275 static void
4276 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4277 {
4278 emit_insn (gen_rtx_USE (VOIDmode, reg));
4279 }
4280
4281 void
4282 use_return_register (void)
4283 {
4284 diddle_return_value (do_use_return_reg, NULL);
4285 }
4286
4287 /* Possibly warn about unused parameters. */
4288 void
4289 do_warn_unused_parameter (tree fn)
4290 {
4291 tree decl;
4292
4293 for (decl = DECL_ARGUMENTS (fn);
4294 decl; decl = TREE_CHAIN (decl))
4295 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4296 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl))
4297 warning ("%Junused parameter %qD", decl, decl);
4298 }
4299
4300 static GTY(()) rtx initial_trampoline;
4301
4302 /* Generate RTL for the end of the current function. */
4303
4304 void
4305 expand_function_end (void)
4306 {
4307 rtx clobber_after;
4308
4309 /* If arg_pointer_save_area was referenced only from a nested
4310 function, we will not have initialized it yet. Do that now. */
4311 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
4312 get_arg_pointer_save_area (cfun);
4313
4314 /* If we are doing stack checking and this function makes calls,
4315 do a stack probe at the start of the function to ensure we have enough
4316 space for another stack frame. */
4317 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
4318 {
4319 rtx insn, seq;
4320
4321 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4322 if (CALL_P (insn))
4323 {
4324 start_sequence ();
4325 probe_stack_range (STACK_CHECK_PROTECT,
4326 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4327 seq = get_insns ();
4328 end_sequence ();
4329 emit_insn_before (seq, tail_recursion_reentry);
4330 break;
4331 }
4332 }
4333
4334 /* Possibly warn about unused parameters.
4335 When frontend does unit-at-a-time, the warning is already
4336 issued at finalization time. */
4337 if (warn_unused_parameter
4338 && !lang_hooks.callgraph.expand_function)
4339 do_warn_unused_parameter (current_function_decl);
4340
4341 /* End any sequences that failed to be closed due to syntax errors. */
4342 while (in_sequence_p ())
4343 end_sequence ();
4344
4345 clear_pending_stack_adjust ();
4346 do_pending_stack_adjust ();
4347
4348 /* @@@ This is a kludge. We want to ensure that instructions that
4349 may trap are not moved into the epilogue by scheduling, because
4350 we don't always emit unwind information for the epilogue.
4351 However, not all machine descriptions define a blockage insn, so
4352 emit an ASM_INPUT to act as one. */
4353 if (flag_non_call_exceptions)
4354 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
4355
4356 /* Mark the end of the function body.
4357 If control reaches this insn, the function can drop through
4358 without returning a value. */
4359 emit_note (NOTE_INSN_FUNCTION_END);
4360
4361 /* Must mark the last line number note in the function, so that the test
4362 coverage code can avoid counting the last line twice. This just tells
4363 the code to ignore the immediately following line note, since there
4364 already exists a copy of this note somewhere above. This line number
4365 note is still needed for debugging though, so we can't delete it. */
4366 if (flag_test_coverage)
4367 emit_note (NOTE_INSN_REPEATED_LINE_NUMBER);
4368
4369 /* Output a linenumber for the end of the function.
4370 SDB depends on this. */
4371 force_next_line_note ();
4372 emit_line_note (input_location);
4373
4374 /* Before the return label (if any), clobber the return
4375 registers so that they are not propagated live to the rest of
4376 the function. This can only happen with functions that drop
4377 through; if there had been a return statement, there would
4378 have either been a return rtx, or a jump to the return label.
4379
4380 We delay actual code generation after the current_function_value_rtx
4381 is computed. */
4382 clobber_after = get_last_insn ();
4383
4384 /* Output the label for the actual return from the function. */
4385 emit_label (return_label);
4386
4387 /* Let except.c know where it should emit the call to unregister
4388 the function context for sjlj exceptions. */
4389 if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
4390 sjlj_emit_function_exit_after (get_last_insn ());
4391
4392 /* If we had calls to alloca, and this machine needs
4393 an accurate stack pointer to exit the function,
4394 insert some code to save and restore the stack pointer. */
4395 if (! EXIT_IGNORE_STACK
4396 && current_function_calls_alloca)
4397 {
4398 rtx tem = 0;
4399
4400 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4401 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4402 }
4403
4404 /* If scalar return value was computed in a pseudo-reg, or was a named
4405 return value that got dumped to the stack, copy that to the hard
4406 return register. */
4407 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4408 {
4409 tree decl_result = DECL_RESULT (current_function_decl);
4410 rtx decl_rtl = DECL_RTL (decl_result);
4411
4412 if (REG_P (decl_rtl)
4413 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4414 : DECL_REGISTER (decl_result))
4415 {
4416 rtx real_decl_rtl = current_function_return_rtx;
4417
4418 /* This should be set in assign_parms. */
4419 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
4420
4421 /* If this is a BLKmode structure being returned in registers,
4422 then use the mode computed in expand_return. Note that if
4423 decl_rtl is memory, then its mode may have been changed,
4424 but that current_function_return_rtx has not. */
4425 if (GET_MODE (real_decl_rtl) == BLKmode)
4426 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4427
4428 /* If a non-BLKmode return value should be padded at the least
4429 significant end of the register, shift it left by the appropriate
4430 amount. BLKmode results are handled using the group load/store
4431 machinery. */
4432 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
4433 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
4434 {
4435 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
4436 REGNO (real_decl_rtl)),
4437 decl_rtl);
4438 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
4439 }
4440 /* If a named return value dumped decl_return to memory, then
4441 we may need to re-do the PROMOTE_MODE signed/unsigned
4442 extension. */
4443 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4444 {
4445 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4446
4447 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4448 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4449 &unsignedp, 1);
4450
4451 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4452 }
4453 else if (GET_CODE (real_decl_rtl) == PARALLEL)
4454 {
4455 /* If expand_function_start has created a PARALLEL for decl_rtl,
4456 move the result to the real return registers. Otherwise, do
4457 a group load from decl_rtl for a named return. */
4458 if (GET_CODE (decl_rtl) == PARALLEL)
4459 emit_group_move (real_decl_rtl, decl_rtl);
4460 else
4461 emit_group_load (real_decl_rtl, decl_rtl,
4462 TREE_TYPE (decl_result),
4463 int_size_in_bytes (TREE_TYPE (decl_result)));
4464 }
4465 else
4466 emit_move_insn (real_decl_rtl, decl_rtl);
4467 }
4468 }
4469
4470 /* If returning a structure, arrange to return the address of the value
4471 in a place where debuggers expect to find it.
4472
4473 If returning a structure PCC style,
4474 the caller also depends on this value.
4475 And current_function_returns_pcc_struct is not necessarily set. */
4476 if (current_function_returns_struct
4477 || current_function_returns_pcc_struct)
4478 {
4479 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
4480 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4481 rtx outgoing;
4482
4483 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4484 type = TREE_TYPE (type);
4485 else
4486 value_address = XEXP (value_address, 0);
4487
4488 #ifdef FUNCTION_OUTGOING_VALUE
4489 outgoing = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
4490 current_function_decl);
4491 #else
4492 outgoing = FUNCTION_VALUE (build_pointer_type (type),
4493 current_function_decl);
4494 #endif
4495
4496 /* Mark this as a function return value so integrate will delete the
4497 assignment and USE below when inlining this function. */
4498 REG_FUNCTION_VALUE_P (outgoing) = 1;
4499
4500 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4501 value_address = convert_memory_address (GET_MODE (outgoing),
4502 value_address);
4503
4504 emit_move_insn (outgoing, value_address);
4505
4506 /* Show return register used to hold result (in this case the address
4507 of the result. */
4508 current_function_return_rtx = outgoing;
4509 }
4510
4511 /* If this is an implementation of throw, do what's necessary to
4512 communicate between __builtin_eh_return and the epilogue. */
4513 expand_eh_return ();
4514
4515 /* Emit the actual code to clobber return register. */
4516 {
4517 rtx seq;
4518
4519 start_sequence ();
4520 clobber_return_register ();
4521 expand_naked_return ();
4522 seq = get_insns ();
4523 end_sequence ();
4524
4525 emit_insn_after (seq, clobber_after);
4526 }
4527
4528 /* Output the label for the naked return from the function. */
4529 emit_label (naked_return_label);
4530
4531 /* ??? This should no longer be necessary since stupid is no longer with
4532 us, but there are some parts of the compiler (eg reload_combine, and
4533 sh mach_dep_reorg) that still try and compute their own lifetime info
4534 instead of using the general framework. */
4535 use_return_register ();
4536 }
4537
4538 rtx
4539 get_arg_pointer_save_area (struct function *f)
4540 {
4541 rtx ret = f->x_arg_pointer_save_area;
4542
4543 if (! ret)
4544 {
4545 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
4546 f->x_arg_pointer_save_area = ret;
4547 }
4548
4549 if (f == cfun && ! f->arg_pointer_save_area_init)
4550 {
4551 rtx seq;
4552
4553 /* Save the arg pointer at the beginning of the function. The
4554 generated stack slot may not be a valid memory address, so we
4555 have to check it and fix it if necessary. */
4556 start_sequence ();
4557 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
4558 seq = get_insns ();
4559 end_sequence ();
4560
4561 push_topmost_sequence ();
4562 emit_insn_after (seq, get_insns ());
4563 pop_topmost_sequence ();
4564 }
4565
4566 return ret;
4567 }
4568 \f
4569 /* Extend a vector that records the INSN_UIDs of INSNS
4570 (a list of one or more insns). */
4571
4572 static void
4573 record_insns (rtx insns, varray_type *vecp)
4574 {
4575 int i, len;
4576 rtx tmp;
4577
4578 tmp = insns;
4579 len = 0;
4580 while (tmp != NULL_RTX)
4581 {
4582 len++;
4583 tmp = NEXT_INSN (tmp);
4584 }
4585
4586 i = VARRAY_SIZE (*vecp);
4587 VARRAY_GROW (*vecp, i + len);
4588 tmp = insns;
4589 while (tmp != NULL_RTX)
4590 {
4591 VARRAY_INT (*vecp, i) = INSN_UID (tmp);
4592 i++;
4593 tmp = NEXT_INSN (tmp);
4594 }
4595 }
4596
4597 /* Set the locator of the insn chain starting at INSN to LOC. */
4598 static void
4599 set_insn_locators (rtx insn, int loc)
4600 {
4601 while (insn != NULL_RTX)
4602 {
4603 if (INSN_P (insn))
4604 INSN_LOCATOR (insn) = loc;
4605 insn = NEXT_INSN (insn);
4606 }
4607 }
4608
4609 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4610 be running after reorg, SEQUENCE rtl is possible. */
4611
4612 static int
4613 contains (rtx insn, varray_type vec)
4614 {
4615 int i, j;
4616
4617 if (NONJUMP_INSN_P (insn)
4618 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4619 {
4620 int count = 0;
4621 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4622 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
4623 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
4624 count++;
4625 return count;
4626 }
4627 else
4628 {
4629 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
4630 if (INSN_UID (insn) == VARRAY_INT (vec, j))
4631 return 1;
4632 }
4633 return 0;
4634 }
4635
4636 int
4637 prologue_epilogue_contains (rtx insn)
4638 {
4639 if (contains (insn, prologue))
4640 return 1;
4641 if (contains (insn, epilogue))
4642 return 1;
4643 return 0;
4644 }
4645
4646 int
4647 sibcall_epilogue_contains (rtx insn)
4648 {
4649 if (sibcall_epilogue)
4650 return contains (insn, sibcall_epilogue);
4651 return 0;
4652 }
4653
4654 #ifdef HAVE_return
4655 /* Insert gen_return at the end of block BB. This also means updating
4656 block_for_insn appropriately. */
4657
4658 static void
4659 emit_return_into_block (basic_block bb, rtx line_note)
4660 {
4661 emit_jump_insn_after (gen_return (), BB_END (bb));
4662 if (line_note)
4663 emit_note_copy_after (line_note, PREV_INSN (BB_END (bb)));
4664 }
4665 #endif /* HAVE_return */
4666
4667 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
4668
4669 /* These functions convert the epilogue into a variant that does not modify the
4670 stack pointer. This is used in cases where a function returns an object
4671 whose size is not known until it is computed. The called function leaves the
4672 object on the stack, leaves the stack depressed, and returns a pointer to
4673 the object.
4674
4675 What we need to do is track all modifications and references to the stack
4676 pointer, deleting the modifications and changing the references to point to
4677 the location the stack pointer would have pointed to had the modifications
4678 taken place.
4679
4680 These functions need to be portable so we need to make as few assumptions
4681 about the epilogue as we can. However, the epilogue basically contains
4682 three things: instructions to reset the stack pointer, instructions to
4683 reload registers, possibly including the frame pointer, and an
4684 instruction to return to the caller.
4685
4686 If we can't be sure of what a relevant epilogue insn is doing, we abort.
4687 We also make no attempt to validate the insns we make since if they are
4688 invalid, we probably can't do anything valid. The intent is that these
4689 routines get "smarter" as more and more machines start to use them and
4690 they try operating on different epilogues.
4691
4692 We use the following structure to track what the part of the epilogue that
4693 we've already processed has done. We keep two copies of the SP equivalence,
4694 one for use during the insn we are processing and one for use in the next
4695 insn. The difference is because one part of a PARALLEL may adjust SP
4696 and the other may use it. */
4697
4698 struct epi_info
4699 {
4700 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
4701 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
4702 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
4703 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
4704 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
4705 should be set to once we no longer need
4706 its value. */
4707 rtx const_equiv[FIRST_PSEUDO_REGISTER]; /* Any known constant equivalences
4708 for registers. */
4709 };
4710
4711 static void handle_epilogue_set (rtx, struct epi_info *);
4712 static void update_epilogue_consts (rtx, rtx, void *);
4713 static void emit_equiv_load (struct epi_info *);
4714
4715 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
4716 no modifications to the stack pointer. Return the new list of insns. */
4717
4718 static rtx
4719 keep_stack_depressed (rtx insns)
4720 {
4721 int j;
4722 struct epi_info info;
4723 rtx insn, next;
4724
4725 /* If the epilogue is just a single instruction, it must be OK as is. */
4726 if (NEXT_INSN (insns) == NULL_RTX)
4727 return insns;
4728
4729 /* Otherwise, start a sequence, initialize the information we have, and
4730 process all the insns we were given. */
4731 start_sequence ();
4732
4733 info.sp_equiv_reg = stack_pointer_rtx;
4734 info.sp_offset = 0;
4735 info.equiv_reg_src = 0;
4736
4737 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
4738 info.const_equiv[j] = 0;
4739
4740 insn = insns;
4741 next = NULL_RTX;
4742 while (insn != NULL_RTX)
4743 {
4744 next = NEXT_INSN (insn);
4745
4746 if (!INSN_P (insn))
4747 {
4748 add_insn (insn);
4749 insn = next;
4750 continue;
4751 }
4752
4753 /* If this insn references the register that SP is equivalent to and
4754 we have a pending load to that register, we must force out the load
4755 first and then indicate we no longer know what SP's equivalent is. */
4756 if (info.equiv_reg_src != 0
4757 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
4758 {
4759 emit_equiv_load (&info);
4760 info.sp_equiv_reg = 0;
4761 }
4762
4763 info.new_sp_equiv_reg = info.sp_equiv_reg;
4764 info.new_sp_offset = info.sp_offset;
4765
4766 /* If this is a (RETURN) and the return address is on the stack,
4767 update the address and change to an indirect jump. */
4768 if (GET_CODE (PATTERN (insn)) == RETURN
4769 || (GET_CODE (PATTERN (insn)) == PARALLEL
4770 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
4771 {
4772 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
4773 rtx base = 0;
4774 HOST_WIDE_INT offset = 0;
4775 rtx jump_insn, jump_set;
4776
4777 /* If the return address is in a register, we can emit the insn
4778 unchanged. Otherwise, it must be a MEM and we see what the
4779 base register and offset are. In any case, we have to emit any
4780 pending load to the equivalent reg of SP, if any. */
4781 if (REG_P (retaddr))
4782 {
4783 emit_equiv_load (&info);
4784 add_insn (insn);
4785 insn = next;
4786 continue;
4787 }
4788 else
4789 {
4790 rtx ret_ptr;
4791 gcc_assert (MEM_P (retaddr));
4792
4793 ret_ptr = XEXP (retaddr, 0);
4794
4795 if (REG_P (ret_ptr))
4796 {
4797 base = gen_rtx_REG (Pmode, REGNO (ret_ptr));
4798 offset = 0;
4799 }
4800 else
4801 {
4802 gcc_assert (GET_CODE (ret_ptr) == PLUS
4803 && REG_P (XEXP (ret_ptr, 0))
4804 && GET_CODE (XEXP (ret_ptr, 1)) == CONST_INT);
4805 base = gen_rtx_REG (Pmode, REGNO (XEXP (ret_ptr, 0)));
4806 offset = INTVAL (XEXP (ret_ptr, 1));
4807 }
4808 }
4809
4810 /* If the base of the location containing the return pointer
4811 is SP, we must update it with the replacement address. Otherwise,
4812 just build the necessary MEM. */
4813 retaddr = plus_constant (base, offset);
4814 if (base == stack_pointer_rtx)
4815 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
4816 plus_constant (info.sp_equiv_reg,
4817 info.sp_offset));
4818
4819 retaddr = gen_rtx_MEM (Pmode, retaddr);
4820
4821 /* If there is a pending load to the equivalent register for SP
4822 and we reference that register, we must load our address into
4823 a scratch register and then do that load. */
4824 if (info.equiv_reg_src
4825 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
4826 {
4827 unsigned int regno;
4828 rtx reg;
4829
4830 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4831 if (HARD_REGNO_MODE_OK (regno, Pmode)
4832 && !fixed_regs[regno]
4833 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
4834 && !REGNO_REG_SET_P (EXIT_BLOCK_PTR->global_live_at_start,
4835 regno)
4836 && !refers_to_regno_p (regno,
4837 regno + hard_regno_nregs[regno]
4838 [Pmode],
4839 info.equiv_reg_src, NULL)
4840 && info.const_equiv[regno] == 0)
4841 break;
4842
4843 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
4844
4845 reg = gen_rtx_REG (Pmode, regno);
4846 emit_move_insn (reg, retaddr);
4847 retaddr = reg;
4848 }
4849
4850 emit_equiv_load (&info);
4851 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
4852
4853 /* Show the SET in the above insn is a RETURN. */
4854 jump_set = single_set (jump_insn);
4855 gcc_assert (jump_set);
4856 SET_IS_RETURN_P (jump_set) = 1;
4857 }
4858
4859 /* If SP is not mentioned in the pattern and its equivalent register, if
4860 any, is not modified, just emit it. Otherwise, if neither is set,
4861 replace the reference to SP and emit the insn. If none of those are
4862 true, handle each SET individually. */
4863 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
4864 && (info.sp_equiv_reg == stack_pointer_rtx
4865 || !reg_set_p (info.sp_equiv_reg, insn)))
4866 add_insn (insn);
4867 else if (! reg_set_p (stack_pointer_rtx, insn)
4868 && (info.sp_equiv_reg == stack_pointer_rtx
4869 || !reg_set_p (info.sp_equiv_reg, insn)))
4870 {
4871 int changed;
4872
4873 changed = validate_replace_rtx (stack_pointer_rtx,
4874 plus_constant (info.sp_equiv_reg,
4875 info.sp_offset),
4876 insn);
4877 gcc_assert (changed);
4878
4879 add_insn (insn);
4880 }
4881 else if (GET_CODE (PATTERN (insn)) == SET)
4882 handle_epilogue_set (PATTERN (insn), &info);
4883 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
4884 {
4885 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
4886 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
4887 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
4888 }
4889 else
4890 add_insn (insn);
4891
4892 info.sp_equiv_reg = info.new_sp_equiv_reg;
4893 info.sp_offset = info.new_sp_offset;
4894
4895 /* Now update any constants this insn sets. */
4896 note_stores (PATTERN (insn), update_epilogue_consts, &info);
4897 insn = next;
4898 }
4899
4900 insns = get_insns ();
4901 end_sequence ();
4902 return insns;
4903 }
4904
4905 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
4906 structure that contains information about what we've seen so far. We
4907 process this SET by either updating that data or by emitting one or
4908 more insns. */
4909
4910 static void
4911 handle_epilogue_set (rtx set, struct epi_info *p)
4912 {
4913 /* First handle the case where we are setting SP. Record what it is being
4914 set from. If unknown, abort. */
4915 if (reg_set_p (stack_pointer_rtx, set))
4916 {
4917 gcc_assert (SET_DEST (set) == stack_pointer_rtx);
4918
4919 if (GET_CODE (SET_SRC (set)) == PLUS)
4920 {
4921 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
4922 if (GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
4923 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
4924 else
4925 {
4926 gcc_assert (REG_P (XEXP (SET_SRC (set), 1))
4927 && (REGNO (XEXP (SET_SRC (set), 1))
4928 < FIRST_PSEUDO_REGISTER)
4929 && p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4930 p->new_sp_offset
4931 = INTVAL (p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4932 }
4933 }
4934 else
4935 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
4936
4937 /* If we are adjusting SP, we adjust from the old data. */
4938 if (p->new_sp_equiv_reg == stack_pointer_rtx)
4939 {
4940 p->new_sp_equiv_reg = p->sp_equiv_reg;
4941 p->new_sp_offset += p->sp_offset;
4942 }
4943
4944 gcc_assert (p->new_sp_equiv_reg && REG_P (p->new_sp_equiv_reg));
4945
4946 return;
4947 }
4948
4949 /* Next handle the case where we are setting SP's equivalent register.
4950 If we already have a value to set it to, abort. We could update, but
4951 there seems little point in handling that case. Note that we have
4952 to allow for the case where we are setting the register set in
4953 the previous part of a PARALLEL inside a single insn. But use the
4954 old offset for any updates within this insn. We must allow for the case
4955 where the register is being set in a different (usually wider) mode than
4956 Pmode). */
4957 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
4958 {
4959 gcc_assert (!p->equiv_reg_src
4960 && REG_P (p->new_sp_equiv_reg)
4961 && REG_P (SET_DEST (set))
4962 && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set)))
4963 <= BITS_PER_WORD)
4964 && REGNO (p->new_sp_equiv_reg) == REGNO (SET_DEST (set)));
4965 p->equiv_reg_src
4966 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
4967 plus_constant (p->sp_equiv_reg,
4968 p->sp_offset));
4969 }
4970
4971 /* Otherwise, replace any references to SP in the insn to its new value
4972 and emit the insn. */
4973 else
4974 {
4975 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
4976 plus_constant (p->sp_equiv_reg,
4977 p->sp_offset));
4978 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
4979 plus_constant (p->sp_equiv_reg,
4980 p->sp_offset));
4981 emit_insn (set);
4982 }
4983 }
4984
4985 /* Update the tracking information for registers set to constants. */
4986
4987 static void
4988 update_epilogue_consts (rtx dest, rtx x, void *data)
4989 {
4990 struct epi_info *p = (struct epi_info *) data;
4991 rtx new;
4992
4993 if (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER)
4994 return;
4995
4996 /* If we are either clobbering a register or doing a partial set,
4997 show we don't know the value. */
4998 else if (GET_CODE (x) == CLOBBER || ! rtx_equal_p (dest, SET_DEST (x)))
4999 p->const_equiv[REGNO (dest)] = 0;
5000
5001 /* If we are setting it to a constant, record that constant. */
5002 else if (GET_CODE (SET_SRC (x)) == CONST_INT)
5003 p->const_equiv[REGNO (dest)] = SET_SRC (x);
5004
5005 /* If this is a binary operation between a register we have been tracking
5006 and a constant, see if we can compute a new constant value. */
5007 else if (ARITHMETIC_P (SET_SRC (x))
5008 && REG_P (XEXP (SET_SRC (x), 0))
5009 && REGNO (XEXP (SET_SRC (x), 0)) < FIRST_PSEUDO_REGISTER
5010 && p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))] != 0
5011 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
5012 && 0 != (new = simplify_binary_operation
5013 (GET_CODE (SET_SRC (x)), GET_MODE (dest),
5014 p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))],
5015 XEXP (SET_SRC (x), 1)))
5016 && GET_CODE (new) == CONST_INT)
5017 p->const_equiv[REGNO (dest)] = new;
5018
5019 /* Otherwise, we can't do anything with this value. */
5020 else
5021 p->const_equiv[REGNO (dest)] = 0;
5022 }
5023
5024 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
5025
5026 static void
5027 emit_equiv_load (struct epi_info *p)
5028 {
5029 if (p->equiv_reg_src != 0)
5030 {
5031 rtx dest = p->sp_equiv_reg;
5032
5033 if (GET_MODE (p->equiv_reg_src) != GET_MODE (dest))
5034 dest = gen_rtx_REG (GET_MODE (p->equiv_reg_src),
5035 REGNO (p->sp_equiv_reg));
5036
5037 emit_move_insn (dest, p->equiv_reg_src);
5038 p->equiv_reg_src = 0;
5039 }
5040 }
5041 #endif
5042
5043 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5044 this into place with notes indicating where the prologue ends and where
5045 the epilogue begins. Update the basic block information when possible. */
5046
5047 void
5048 thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED)
5049 {
5050 int inserted = 0;
5051 edge e;
5052 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
5053 rtx seq;
5054 #endif
5055 #ifdef HAVE_prologue
5056 rtx prologue_end = NULL_RTX;
5057 #endif
5058 #if defined (HAVE_epilogue) || defined(HAVE_return)
5059 rtx epilogue_end = NULL_RTX;
5060 #endif
5061 edge_iterator ei;
5062
5063 #ifdef HAVE_prologue
5064 if (HAVE_prologue)
5065 {
5066 start_sequence ();
5067 seq = gen_prologue ();
5068 emit_insn (seq);
5069
5070 /* Retain a map of the prologue insns. */
5071 record_insns (seq, &prologue);
5072 prologue_end = emit_note (NOTE_INSN_PROLOGUE_END);
5073
5074 seq = get_insns ();
5075 end_sequence ();
5076 set_insn_locators (seq, prologue_locator);
5077
5078 /* Can't deal with multiple successors of the entry block
5079 at the moment. Function should always have at least one
5080 entry point. */
5081 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
5082
5083 insert_insn_on_edge (seq, EDGE_SUCC (ENTRY_BLOCK_PTR, 0));
5084 inserted = 1;
5085 }
5086 #endif
5087
5088 /* If the exit block has no non-fake predecessors, we don't need
5089 an epilogue. */
5090 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5091 if ((e->flags & EDGE_FAKE) == 0)
5092 break;
5093 if (e == NULL)
5094 goto epilogue_done;
5095
5096 #ifdef HAVE_return
5097 if (optimize && HAVE_return)
5098 {
5099 /* If we're allowed to generate a simple return instruction,
5100 then by definition we don't need a full epilogue. Examine
5101 the block that falls through to EXIT. If it does not
5102 contain any code, examine its predecessors and try to
5103 emit (conditional) return instructions. */
5104
5105 basic_block last;
5106 rtx label;
5107
5108 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5109 if (e->flags & EDGE_FALLTHRU)
5110 break;
5111 if (e == NULL)
5112 goto epilogue_done;
5113 last = e->src;
5114
5115 /* Verify that there are no active instructions in the last block. */
5116 label = BB_END (last);
5117 while (label && !LABEL_P (label))
5118 {
5119 if (active_insn_p (label))
5120 break;
5121 label = PREV_INSN (label);
5122 }
5123
5124 if (BB_HEAD (last) == label && LABEL_P (label))
5125 {
5126 edge_iterator ei2;
5127 rtx epilogue_line_note = NULL_RTX;
5128
5129 /* Locate the line number associated with the closing brace,
5130 if we can find one. */
5131 for (seq = get_last_insn ();
5132 seq && ! active_insn_p (seq);
5133 seq = PREV_INSN (seq))
5134 if (NOTE_P (seq) && NOTE_LINE_NUMBER (seq) > 0)
5135 {
5136 epilogue_line_note = seq;
5137 break;
5138 }
5139
5140 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
5141 {
5142 basic_block bb = e->src;
5143 rtx jump;
5144
5145 if (bb == ENTRY_BLOCK_PTR)
5146 {
5147 ei_next (&ei2);
5148 continue;
5149 }
5150
5151 jump = BB_END (bb);
5152 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5153 {
5154 ei_next (&ei2);
5155 continue;
5156 }
5157
5158 /* If we have an unconditional jump, we can replace that
5159 with a simple return instruction. */
5160 if (simplejump_p (jump))
5161 {
5162 emit_return_into_block (bb, epilogue_line_note);
5163 delete_insn (jump);
5164 }
5165
5166 /* If we have a conditional jump, we can try to replace
5167 that with a conditional return instruction. */
5168 else if (condjump_p (jump))
5169 {
5170 if (! redirect_jump (jump, 0, 0))
5171 {
5172 ei_next (&ei2);
5173 continue;
5174 }
5175
5176 /* If this block has only one successor, it both jumps
5177 and falls through to the fallthru block, so we can't
5178 delete the edge. */
5179 if (EDGE_COUNT (bb->succs) == 1)
5180 {
5181 ei_next (&ei2);
5182 continue;
5183 }
5184 }
5185 else
5186 {
5187 ei_next (&ei2);
5188 continue;
5189 }
5190
5191 /* Fix up the CFG for the successful change we just made. */
5192 redirect_edge_succ (e, EXIT_BLOCK_PTR);
5193 }
5194
5195 /* Emit a return insn for the exit fallthru block. Whether
5196 this is still reachable will be determined later. */
5197
5198 emit_barrier_after (BB_END (last));
5199 emit_return_into_block (last, epilogue_line_note);
5200 epilogue_end = BB_END (last);
5201 EDGE_SUCC (last, 0)->flags &= ~EDGE_FALLTHRU;
5202 goto epilogue_done;
5203 }
5204 }
5205 #endif
5206 /* Find the edge that falls through to EXIT. Other edges may exist
5207 due to RETURN instructions, but those don't need epilogues.
5208 There really shouldn't be a mixture -- either all should have
5209 been converted or none, however... */
5210
5211 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5212 if (e->flags & EDGE_FALLTHRU)
5213 break;
5214 if (e == NULL)
5215 goto epilogue_done;
5216
5217 #ifdef HAVE_epilogue
5218 if (HAVE_epilogue)
5219 {
5220 start_sequence ();
5221 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5222
5223 seq = gen_epilogue ();
5224
5225 #ifdef INCOMING_RETURN_ADDR_RTX
5226 /* If this function returns with the stack depressed and we can support
5227 it, massage the epilogue to actually do that. */
5228 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5229 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
5230 seq = keep_stack_depressed (seq);
5231 #endif
5232
5233 emit_jump_insn (seq);
5234
5235 /* Retain a map of the epilogue insns. */
5236 record_insns (seq, &epilogue);
5237 set_insn_locators (seq, epilogue_locator);
5238
5239 seq = get_insns ();
5240 end_sequence ();
5241
5242 insert_insn_on_edge (seq, e);
5243 inserted = 1;
5244 }
5245 else
5246 #endif
5247 {
5248 basic_block cur_bb;
5249
5250 if (! next_active_insn (BB_END (e->src)))
5251 goto epilogue_done;
5252 /* We have a fall-through edge to the exit block, the source is not
5253 at the end of the function, and there will be an assembler epilogue
5254 at the end of the function.
5255 We can't use force_nonfallthru here, because that would try to
5256 use return. Inserting a jump 'by hand' is extremely messy, so
5257 we take advantage of cfg_layout_finalize using
5258 fixup_fallthru_exit_predecessor. */
5259 cfg_layout_initialize (0);
5260 FOR_EACH_BB (cur_bb)
5261 if (cur_bb->index >= 0 && cur_bb->next_bb->index >= 0)
5262 cur_bb->rbi->next = cur_bb->next_bb;
5263 cfg_layout_finalize ();
5264 }
5265 epilogue_done:
5266
5267 if (inserted)
5268 commit_edge_insertions ();
5269
5270 #ifdef HAVE_sibcall_epilogue
5271 /* Emit sibling epilogues before any sibling call sites. */
5272 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
5273 {
5274 basic_block bb = e->src;
5275 rtx insn = BB_END (bb);
5276 rtx i;
5277 rtx newinsn;
5278
5279 if (!CALL_P (insn)
5280 || ! SIBLING_CALL_P (insn))
5281 {
5282 ei_next (&ei);
5283 continue;
5284 }
5285
5286 start_sequence ();
5287 emit_insn (gen_sibcall_epilogue ());
5288 seq = get_insns ();
5289 end_sequence ();
5290
5291 /* Retain a map of the epilogue insns. Used in life analysis to
5292 avoid getting rid of sibcall epilogue insns. Do this before we
5293 actually emit the sequence. */
5294 record_insns (seq, &sibcall_epilogue);
5295 set_insn_locators (seq, epilogue_locator);
5296
5297 i = PREV_INSN (insn);
5298 newinsn = emit_insn_before (seq, insn);
5299 ei_next (&ei);
5300 }
5301 #endif
5302
5303 #ifdef HAVE_prologue
5304 /* This is probably all useless now that we use locators. */
5305 if (prologue_end)
5306 {
5307 rtx insn, prev;
5308
5309 /* GDB handles `break f' by setting a breakpoint on the first
5310 line note after the prologue. Which means (1) that if
5311 there are line number notes before where we inserted the
5312 prologue we should move them, and (2) we should generate a
5313 note before the end of the first basic block, if there isn't
5314 one already there.
5315
5316 ??? This behavior is completely broken when dealing with
5317 multiple entry functions. We simply place the note always
5318 into first basic block and let alternate entry points
5319 to be missed.
5320 */
5321
5322 for (insn = prologue_end; insn; insn = prev)
5323 {
5324 prev = PREV_INSN (insn);
5325 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5326 {
5327 /* Note that we cannot reorder the first insn in the
5328 chain, since rest_of_compilation relies on that
5329 remaining constant. */
5330 if (prev == NULL)
5331 break;
5332 reorder_insns (insn, insn, prologue_end);
5333 }
5334 }
5335
5336 /* Find the last line number note in the first block. */
5337 for (insn = BB_END (ENTRY_BLOCK_PTR->next_bb);
5338 insn != prologue_end && insn;
5339 insn = PREV_INSN (insn))
5340 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5341 break;
5342
5343 /* If we didn't find one, make a copy of the first line number
5344 we run across. */
5345 if (! insn)
5346 {
5347 for (insn = next_active_insn (prologue_end);
5348 insn;
5349 insn = PREV_INSN (insn))
5350 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5351 {
5352 emit_note_copy_after (insn, prologue_end);
5353 break;
5354 }
5355 }
5356 }
5357 #endif
5358 #ifdef HAVE_epilogue
5359 if (epilogue_end)
5360 {
5361 rtx insn, next;
5362
5363 /* Similarly, move any line notes that appear after the epilogue.
5364 There is no need, however, to be quite so anal about the existence
5365 of such a note. Also move the NOTE_INSN_FUNCTION_END and (possibly)
5366 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5367 info generation. */
5368 for (insn = epilogue_end; insn; insn = next)
5369 {
5370 next = NEXT_INSN (insn);
5371 if (NOTE_P (insn)
5372 && (NOTE_LINE_NUMBER (insn) > 0
5373 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG
5374 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END))
5375 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5376 }
5377 }
5378 #endif
5379 }
5380
5381 /* Reposition the prologue-end and epilogue-begin notes after instruction
5382 scheduling and delayed branch scheduling. */
5383
5384 void
5385 reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED)
5386 {
5387 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5388 rtx insn, last, note;
5389 int len;
5390
5391 if ((len = VARRAY_SIZE (prologue)) > 0)
5392 {
5393 last = 0, note = 0;
5394
5395 /* Scan from the beginning until we reach the last prologue insn.
5396 We apparently can't depend on basic_block_{head,end} after
5397 reorg has run. */
5398 for (insn = f; insn; insn = NEXT_INSN (insn))
5399 {
5400 if (NOTE_P (insn))
5401 {
5402 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5403 note = insn;
5404 }
5405 else if (contains (insn, prologue))
5406 {
5407 last = insn;
5408 if (--len == 0)
5409 break;
5410 }
5411 }
5412
5413 if (last)
5414 {
5415 /* Find the prologue-end note if we haven't already, and
5416 move it to just after the last prologue insn. */
5417 if (note == 0)
5418 {
5419 for (note = last; (note = NEXT_INSN (note));)
5420 if (NOTE_P (note)
5421 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5422 break;
5423 }
5424
5425 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5426 if (LABEL_P (last))
5427 last = NEXT_INSN (last);
5428 reorder_insns (note, note, last);
5429 }
5430 }
5431
5432 if ((len = VARRAY_SIZE (epilogue)) > 0)
5433 {
5434 last = 0, note = 0;
5435
5436 /* Scan from the end until we reach the first epilogue insn.
5437 We apparently can't depend on basic_block_{head,end} after
5438 reorg has run. */
5439 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
5440 {
5441 if (NOTE_P (insn))
5442 {
5443 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5444 note = insn;
5445 }
5446 else if (contains (insn, epilogue))
5447 {
5448 last = insn;
5449 if (--len == 0)
5450 break;
5451 }
5452 }
5453
5454 if (last)
5455 {
5456 /* Find the epilogue-begin note if we haven't already, and
5457 move it to just before the first epilogue insn. */
5458 if (note == 0)
5459 {
5460 for (note = insn; (note = PREV_INSN (note));)
5461 if (NOTE_P (note)
5462 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5463 break;
5464 }
5465
5466 if (PREV_INSN (last) != note)
5467 reorder_insns (note, note, PREV_INSN (last));
5468 }
5469 }
5470 #endif /* HAVE_prologue or HAVE_epilogue */
5471 }
5472
5473 /* Called once, at initialization, to initialize function.c. */
5474
5475 void
5476 init_function_once (void)
5477 {
5478 VARRAY_INT_INIT (prologue, 0, "prologue");
5479 VARRAY_INT_INIT (epilogue, 0, "epilogue");
5480 VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
5481 }
5482
5483 /* Resets insn_block_boundaries array. */
5484
5485 void
5486 reset_block_changes (void)
5487 {
5488 VARRAY_TREE_INIT (cfun->ib_boundaries_block, 100, "ib_boundaries_block");
5489 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, NULL_TREE);
5490 }
5491
5492 /* Record the boundary for BLOCK. */
5493 void
5494 record_block_change (tree block)
5495 {
5496 int i, n;
5497 tree last_block;
5498
5499 if (!block)
5500 return;
5501
5502 last_block = VARRAY_TOP_TREE (cfun->ib_boundaries_block);
5503 VARRAY_POP (cfun->ib_boundaries_block);
5504 n = get_max_uid ();
5505 for (i = VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block); i < n; i++)
5506 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, last_block);
5507
5508 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, block);
5509 }
5510
5511 /* Finishes record of boundaries. */
5512 void finalize_block_changes (void)
5513 {
5514 record_block_change (DECL_INITIAL (current_function_decl));
5515 }
5516
5517 /* For INSN return the BLOCK it belongs to. */
5518 void
5519 check_block_change (rtx insn, tree *block)
5520 {
5521 unsigned uid = INSN_UID (insn);
5522
5523 if (uid >= VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block))
5524 return;
5525
5526 *block = VARRAY_TREE (cfun->ib_boundaries_block, uid);
5527 }
5528
5529 /* Releases the ib_boundaries_block records. */
5530 void
5531 free_block_changes (void)
5532 {
5533 cfun->ib_boundaries_block = NULL;
5534 }
5535
5536 /* Returns the name of the current function. */
5537 const char *
5538 current_function_name (void)
5539 {
5540 return lang_hooks.decl_printable_name (cfun->decl, 2);
5541 }
5542
5543 #include "gt-function.h"