real.h (struct real_format): Split the signbit field into two two fields, signbit_ro...
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
22
23 /* This file handles the generation of rtl code from tree structure
24 at the level of the function as a whole.
25 It creates the rtl expressions for parameters and auto variables
26 and has full responsibility for allocating stack slots.
27
28 `expand_function_start' is called at the beginning of a function,
29 before the function body is parsed, and `expand_function_end' is
30 called after parsing the body.
31
32 Call `assign_stack_local' to allocate a stack slot for a local variable.
33 This is usually done during the RTL generation for the function body,
34 but it can also be done in the reload pass when a pseudo-register does
35 not get a hard register. */
36
37 #include "config.h"
38 #include "system.h"
39 #include "coretypes.h"
40 #include "tm.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "flags.h"
44 #include "except.h"
45 #include "function.h"
46 #include "expr.h"
47 #include "optabs.h"
48 #include "libfuncs.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "insn-config.h"
52 #include "recog.h"
53 #include "output.h"
54 #include "basic-block.h"
55 #include "toplev.h"
56 #include "hashtab.h"
57 #include "ggc.h"
58 #include "tm_p.h"
59 #include "integrate.h"
60 #include "langhooks.h"
61 #include "target.h"
62 #include "cfglayout.h"
63 #include "tree-gimple.h"
64
65 #ifndef LOCAL_ALIGNMENT
66 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
67 #endif
68
69 #ifndef STACK_ALIGNMENT_NEEDED
70 #define STACK_ALIGNMENT_NEEDED 1
71 #endif
72
73 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
74
75 /* Some systems use __main in a way incompatible with its use in gcc, in these
76 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
77 give the same symbol without quotes for an alternative entry point. You
78 must define both, or neither. */
79 #ifndef NAME__MAIN
80 #define NAME__MAIN "__main"
81 #endif
82
83 /* Round a value to the lowest integer less than it that is a multiple of
84 the required alignment. Avoid using division in case the value is
85 negative. Assume the alignment is a power of two. */
86 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
87
88 /* Similar, but round to the next highest integer that meets the
89 alignment. */
90 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
91
92 /* Nonzero if function being compiled doesn't contain any calls
93 (ignoring the prologue and epilogue). This is set prior to
94 local register allocation and is valid for the remaining
95 compiler passes. */
96 int current_function_is_leaf;
97
98 /* Nonzero if function being compiled doesn't modify the stack pointer
99 (ignoring the prologue and epilogue). This is only valid after
100 life_analysis has run. */
101 int current_function_sp_is_unchanging;
102
103 /* Nonzero if the function being compiled is a leaf function which only
104 uses leaf registers. This is valid after reload (specifically after
105 sched2) and is useful only if the port defines LEAF_REGISTERS. */
106 int current_function_uses_only_leaf_regs;
107
108 /* Nonzero once virtual register instantiation has been done.
109 assign_stack_local uses frame_pointer_rtx when this is nonzero.
110 calls.c:emit_library_call_value_1 uses it to set up
111 post-instantiation libcalls. */
112 int virtuals_instantiated;
113
114 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
115 static GTY(()) int funcdef_no;
116
117 /* These variables hold pointers to functions to create and destroy
118 target specific, per-function data structures. */
119 struct machine_function * (*init_machine_status) (void);
120
121 /* The currently compiled function. */
122 struct function *cfun = 0;
123
124 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
125 static GTY(()) varray_type prologue;
126 static GTY(()) varray_type epilogue;
127
128 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
129 in this function. */
130 static GTY(()) varray_type sibcall_epilogue;
131 \f
132 /* In order to evaluate some expressions, such as function calls returning
133 structures in memory, we need to temporarily allocate stack locations.
134 We record each allocated temporary in the following structure.
135
136 Associated with each temporary slot is a nesting level. When we pop up
137 one level, all temporaries associated with the previous level are freed.
138 Normally, all temporaries are freed after the execution of the statement
139 in which they were created. However, if we are inside a ({...}) grouping,
140 the result may be in a temporary and hence must be preserved. If the
141 result could be in a temporary, we preserve it if we can determine which
142 one it is in. If we cannot determine which temporary may contain the
143 result, all temporaries are preserved. A temporary is preserved by
144 pretending it was allocated at the previous nesting level.
145
146 Automatic variables are also assigned temporary slots, at the nesting
147 level where they are defined. They are marked a "kept" so that
148 free_temp_slots will not free them. */
149
150 struct temp_slot GTY(())
151 {
152 /* Points to next temporary slot. */
153 struct temp_slot *next;
154 /* Points to previous temporary slot. */
155 struct temp_slot *prev;
156
157 /* The rtx to used to reference the slot. */
158 rtx slot;
159 /* The rtx used to represent the address if not the address of the
160 slot above. May be an EXPR_LIST if multiple addresses exist. */
161 rtx address;
162 /* The alignment (in bits) of the slot. */
163 unsigned int align;
164 /* The size, in units, of the slot. */
165 HOST_WIDE_INT size;
166 /* The type of the object in the slot, or zero if it doesn't correspond
167 to a type. We use this to determine whether a slot can be reused.
168 It can be reused if objects of the type of the new slot will always
169 conflict with objects of the type of the old slot. */
170 tree type;
171 /* Nonzero if this temporary is currently in use. */
172 char in_use;
173 /* Nonzero if this temporary has its address taken. */
174 char addr_taken;
175 /* Nesting level at which this slot is being used. */
176 int level;
177 /* Nonzero if this should survive a call to free_temp_slots. */
178 int keep;
179 /* The offset of the slot from the frame_pointer, including extra space
180 for alignment. This info is for combine_temp_slots. */
181 HOST_WIDE_INT base_offset;
182 /* The size of the slot, including extra space for alignment. This
183 info is for combine_temp_slots. */
184 HOST_WIDE_INT full_size;
185 };
186 \f
187 /* Forward declarations. */
188
189 static rtx assign_stack_local_1 (enum machine_mode, HOST_WIDE_INT, int,
190 struct function *);
191 static struct temp_slot *find_temp_slot_from_address (rtx);
192 static void instantiate_decls (tree, int);
193 static void instantiate_decls_1 (tree, int);
194 static void instantiate_decl (rtx, HOST_WIDE_INT, int);
195 static rtx instantiate_new_reg (rtx, HOST_WIDE_INT *);
196 static int instantiate_virtual_regs_1 (rtx *, rtx, int);
197 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
198 static void pad_below (struct args_size *, enum machine_mode, tree);
199 static void reorder_blocks_1 (rtx, tree, varray_type *);
200 static void reorder_fix_fragments (tree);
201 static int all_blocks (tree, tree *);
202 static tree *get_block_vector (tree, int *);
203 extern tree debug_find_var_in_block_tree (tree, tree);
204 /* We always define `record_insns' even if it's not used so that we
205 can always export `prologue_epilogue_contains'. */
206 static void record_insns (rtx, varray_type *) ATTRIBUTE_UNUSED;
207 static int contains (rtx, varray_type);
208 #ifdef HAVE_return
209 static void emit_return_into_block (basic_block, rtx);
210 #endif
211 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
212 static rtx keep_stack_depressed (rtx);
213 #endif
214 static void prepare_function_start (tree);
215 static void do_clobber_return_reg (rtx, void *);
216 static void do_use_return_reg (rtx, void *);
217 static void instantiate_virtual_regs_lossage (rtx);
218 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
219 \f
220 /* Pointer to chain of `struct function' for containing functions. */
221 struct function *outer_function_chain;
222
223 /* Given a function decl for a containing function,
224 return the `struct function' for it. */
225
226 struct function *
227 find_function_data (tree decl)
228 {
229 struct function *p;
230
231 for (p = outer_function_chain; p; p = p->outer)
232 if (p->decl == decl)
233 return p;
234
235 gcc_unreachable ();
236 }
237
238 /* Save the current context for compilation of a nested function.
239 This is called from language-specific code. The caller should use
240 the enter_nested langhook to save any language-specific state,
241 since this function knows only about language-independent
242 variables. */
243
244 void
245 push_function_context_to (tree context)
246 {
247 struct function *p;
248
249 if (context)
250 {
251 if (context == current_function_decl)
252 cfun->contains_functions = 1;
253 else
254 {
255 struct function *containing = find_function_data (context);
256 containing->contains_functions = 1;
257 }
258 }
259
260 if (cfun == 0)
261 init_dummy_function_start ();
262 p = cfun;
263
264 p->outer = outer_function_chain;
265 outer_function_chain = p;
266
267 lang_hooks.function.enter_nested (p);
268
269 cfun = 0;
270 }
271
272 void
273 push_function_context (void)
274 {
275 push_function_context_to (current_function_decl);
276 }
277
278 /* Restore the last saved context, at the end of a nested function.
279 This function is called from language-specific code. */
280
281 void
282 pop_function_context_from (tree context ATTRIBUTE_UNUSED)
283 {
284 struct function *p = outer_function_chain;
285
286 cfun = p;
287 outer_function_chain = p->outer;
288
289 current_function_decl = p->decl;
290
291 lang_hooks.function.leave_nested (p);
292
293 /* Reset variables that have known state during rtx generation. */
294 virtuals_instantiated = 0;
295 generating_concat_p = 1;
296 }
297
298 void
299 pop_function_context (void)
300 {
301 pop_function_context_from (current_function_decl);
302 }
303
304 /* Clear out all parts of the state in F that can safely be discarded
305 after the function has been parsed, but not compiled, to let
306 garbage collection reclaim the memory. */
307
308 void
309 free_after_parsing (struct function *f)
310 {
311 /* f->expr->forced_labels is used by code generation. */
312 /* f->emit->regno_reg_rtx is used by code generation. */
313 /* f->varasm is used by code generation. */
314 /* f->eh->eh_return_stub_label is used by code generation. */
315
316 lang_hooks.function.final (f);
317 }
318
319 /* Clear out all parts of the state in F that can safely be discarded
320 after the function has been compiled, to let garbage collection
321 reclaim the memory. */
322
323 void
324 free_after_compilation (struct function *f)
325 {
326 f->eh = NULL;
327 f->expr = NULL;
328 f->emit = NULL;
329 f->varasm = NULL;
330 f->machine = NULL;
331
332 f->x_avail_temp_slots = NULL;
333 f->x_used_temp_slots = NULL;
334 f->arg_offset_rtx = NULL;
335 f->return_rtx = NULL;
336 f->internal_arg_pointer = NULL;
337 f->x_nonlocal_goto_handler_labels = NULL;
338 f->x_return_label = NULL;
339 f->x_naked_return_label = NULL;
340 f->x_stack_slot_list = NULL;
341 f->x_tail_recursion_reentry = NULL;
342 f->x_arg_pointer_save_area = NULL;
343 f->x_parm_birth_insn = NULL;
344 f->original_arg_vector = NULL;
345 f->original_decl_initial = NULL;
346 f->epilogue_delay_list = NULL;
347 }
348 \f
349 /* Allocate fixed slots in the stack frame of the current function. */
350
351 /* Return size needed for stack frame based on slots so far allocated in
352 function F.
353 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
354 the caller may have to do that. */
355
356 static HOST_WIDE_INT
357 get_func_frame_size (struct function *f)
358 {
359 #ifdef FRAME_GROWS_DOWNWARD
360 return -f->x_frame_offset;
361 #else
362 return f->x_frame_offset;
363 #endif
364 }
365
366 /* Return size needed for stack frame based on slots so far allocated.
367 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
368 the caller may have to do that. */
369 HOST_WIDE_INT
370 get_frame_size (void)
371 {
372 return get_func_frame_size (cfun);
373 }
374
375 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
376 with machine mode MODE.
377
378 ALIGN controls the amount of alignment for the address of the slot:
379 0 means according to MODE,
380 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
381 -2 means use BITS_PER_UNIT,
382 positive specifies alignment boundary in bits.
383
384 We do not round to stack_boundary here.
385
386 FUNCTION specifies the function to allocate in. */
387
388 static rtx
389 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, int align,
390 struct function *function)
391 {
392 rtx x, addr;
393 int bigend_correction = 0;
394 unsigned int alignment;
395 int frame_off, frame_alignment, frame_phase;
396
397 if (align == 0)
398 {
399 tree type;
400
401 if (mode == BLKmode)
402 alignment = BIGGEST_ALIGNMENT;
403 else
404 alignment = GET_MODE_ALIGNMENT (mode);
405
406 /* Allow the target to (possibly) increase the alignment of this
407 stack slot. */
408 type = lang_hooks.types.type_for_mode (mode, 0);
409 if (type)
410 alignment = LOCAL_ALIGNMENT (type, alignment);
411
412 alignment /= BITS_PER_UNIT;
413 }
414 else if (align == -1)
415 {
416 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
417 size = CEIL_ROUND (size, alignment);
418 }
419 else if (align == -2)
420 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
421 else
422 alignment = align / BITS_PER_UNIT;
423
424 #ifdef FRAME_GROWS_DOWNWARD
425 function->x_frame_offset -= size;
426 #endif
427
428 /* Ignore alignment we can't do with expected alignment of the boundary. */
429 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
430 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
431
432 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
433 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
434
435 /* Calculate how many bytes the start of local variables is off from
436 stack alignment. */
437 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
438 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
439 frame_phase = frame_off ? frame_alignment - frame_off : 0;
440
441 /* Round the frame offset to the specified alignment. The default is
442 to always honor requests to align the stack but a port may choose to
443 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
444 if (STACK_ALIGNMENT_NEEDED
445 || mode != BLKmode
446 || size != 0)
447 {
448 /* We must be careful here, since FRAME_OFFSET might be negative and
449 division with a negative dividend isn't as well defined as we might
450 like. So we instead assume that ALIGNMENT is a power of two and
451 use logical operations which are unambiguous. */
452 #ifdef FRAME_GROWS_DOWNWARD
453 function->x_frame_offset
454 = (FLOOR_ROUND (function->x_frame_offset - frame_phase,
455 (unsigned HOST_WIDE_INT) alignment)
456 + frame_phase);
457 #else
458 function->x_frame_offset
459 = (CEIL_ROUND (function->x_frame_offset - frame_phase,
460 (unsigned HOST_WIDE_INT) alignment)
461 + frame_phase);
462 #endif
463 }
464
465 /* On a big-endian machine, if we are allocating more space than we will use,
466 use the least significant bytes of those that are allocated. */
467 if (BYTES_BIG_ENDIAN && mode != BLKmode)
468 bigend_correction = size - GET_MODE_SIZE (mode);
469
470 /* If we have already instantiated virtual registers, return the actual
471 address relative to the frame pointer. */
472 if (function == cfun && virtuals_instantiated)
473 addr = plus_constant (frame_pointer_rtx,
474 trunc_int_for_mode
475 (frame_offset + bigend_correction
476 + STARTING_FRAME_OFFSET, Pmode));
477 else
478 addr = plus_constant (virtual_stack_vars_rtx,
479 trunc_int_for_mode
480 (function->x_frame_offset + bigend_correction,
481 Pmode));
482
483 #ifndef FRAME_GROWS_DOWNWARD
484 function->x_frame_offset += size;
485 #endif
486
487 x = gen_rtx_MEM (mode, addr);
488
489 function->x_stack_slot_list
490 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
491
492 return x;
493 }
494
495 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
496 current function. */
497
498 rtx
499 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
500 {
501 return assign_stack_local_1 (mode, size, align, cfun);
502 }
503
504 \f
505 /* Removes temporary slot TEMP from LIST. */
506
507 static void
508 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
509 {
510 if (temp->next)
511 temp->next->prev = temp->prev;
512 if (temp->prev)
513 temp->prev->next = temp->next;
514 else
515 *list = temp->next;
516
517 temp->prev = temp->next = NULL;
518 }
519
520 /* Inserts temporary slot TEMP to LIST. */
521
522 static void
523 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
524 {
525 temp->next = *list;
526 if (*list)
527 (*list)->prev = temp;
528 temp->prev = NULL;
529 *list = temp;
530 }
531
532 /* Returns the list of used temp slots at LEVEL. */
533
534 static struct temp_slot **
535 temp_slots_at_level (int level)
536 {
537
538 if (!used_temp_slots)
539 VARRAY_GENERIC_PTR_INIT (used_temp_slots, 3, "used_temp_slots");
540
541 while (level >= (int) VARRAY_ACTIVE_SIZE (used_temp_slots))
542 VARRAY_PUSH_GENERIC_PTR (used_temp_slots, NULL);
543
544 return (struct temp_slot **) &VARRAY_GENERIC_PTR (used_temp_slots, level);
545 }
546
547 /* Returns the maximal temporary slot level. */
548
549 static int
550 max_slot_level (void)
551 {
552 if (!used_temp_slots)
553 return -1;
554
555 return VARRAY_ACTIVE_SIZE (used_temp_slots) - 1;
556 }
557
558 /* Moves temporary slot TEMP to LEVEL. */
559
560 static void
561 move_slot_to_level (struct temp_slot *temp, int level)
562 {
563 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
564 insert_slot_to_list (temp, temp_slots_at_level (level));
565 temp->level = level;
566 }
567
568 /* Make temporary slot TEMP available. */
569
570 static void
571 make_slot_available (struct temp_slot *temp)
572 {
573 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
574 insert_slot_to_list (temp, &avail_temp_slots);
575 temp->in_use = 0;
576 temp->level = -1;
577 }
578 \f
579 /* Allocate a temporary stack slot and record it for possible later
580 reuse.
581
582 MODE is the machine mode to be given to the returned rtx.
583
584 SIZE is the size in units of the space required. We do no rounding here
585 since assign_stack_local will do any required rounding.
586
587 KEEP is 1 if this slot is to be retained after a call to
588 free_temp_slots. Automatic variables for a block are allocated
589 with this flag. KEEP values of 2 or 3 were needed respectively
590 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
591 or for SAVE_EXPRs, but they are now unused and will abort.
592
593 TYPE is the type that will be used for the stack slot. */
594
595 rtx
596 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size, int keep,
597 tree type)
598 {
599 unsigned int align;
600 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
601 rtx slot;
602
603 /* If SIZE is -1 it means that somebody tried to allocate a temporary
604 of a variable size. */
605 gcc_assert (size != -1);
606
607 /* These are now unused. */
608 gcc_assert (keep <= 1);
609
610 if (mode == BLKmode)
611 align = BIGGEST_ALIGNMENT;
612 else
613 align = GET_MODE_ALIGNMENT (mode);
614
615 if (! type)
616 type = lang_hooks.types.type_for_mode (mode, 0);
617
618 if (type)
619 align = LOCAL_ALIGNMENT (type, align);
620
621 /* Try to find an available, already-allocated temporary of the proper
622 mode which meets the size and alignment requirements. Choose the
623 smallest one with the closest alignment. */
624 for (p = avail_temp_slots; p; p = p->next)
625 {
626 if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
627 && objects_must_conflict_p (p->type, type)
628 && (best_p == 0 || best_p->size > p->size
629 || (best_p->size == p->size && best_p->align > p->align)))
630 {
631 if (p->align == align && p->size == size)
632 {
633 selected = p;
634 cut_slot_from_list (selected, &avail_temp_slots);
635 best_p = 0;
636 break;
637 }
638 best_p = p;
639 }
640 }
641
642 /* Make our best, if any, the one to use. */
643 if (best_p)
644 {
645 selected = best_p;
646 cut_slot_from_list (selected, &avail_temp_slots);
647
648 /* If there are enough aligned bytes left over, make them into a new
649 temp_slot so that the extra bytes don't get wasted. Do this only
650 for BLKmode slots, so that we can be sure of the alignment. */
651 if (GET_MODE (best_p->slot) == BLKmode)
652 {
653 int alignment = best_p->align / BITS_PER_UNIT;
654 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
655
656 if (best_p->size - rounded_size >= alignment)
657 {
658 p = ggc_alloc (sizeof (struct temp_slot));
659 p->in_use = p->addr_taken = 0;
660 p->size = best_p->size - rounded_size;
661 p->base_offset = best_p->base_offset + rounded_size;
662 p->full_size = best_p->full_size - rounded_size;
663 p->slot = gen_rtx_MEM (BLKmode,
664 plus_constant (XEXP (best_p->slot, 0),
665 rounded_size));
666 p->align = best_p->align;
667 p->address = 0;
668 p->type = best_p->type;
669 insert_slot_to_list (p, &avail_temp_slots);
670
671 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
672 stack_slot_list);
673
674 best_p->size = rounded_size;
675 best_p->full_size = rounded_size;
676 }
677 }
678 }
679
680 /* If we still didn't find one, make a new temporary. */
681 if (selected == 0)
682 {
683 HOST_WIDE_INT frame_offset_old = frame_offset;
684
685 p = ggc_alloc (sizeof (struct temp_slot));
686
687 /* We are passing an explicit alignment request to assign_stack_local.
688 One side effect of that is assign_stack_local will not round SIZE
689 to ensure the frame offset remains suitably aligned.
690
691 So for requests which depended on the rounding of SIZE, we go ahead
692 and round it now. We also make sure ALIGNMENT is at least
693 BIGGEST_ALIGNMENT. */
694 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
695 p->slot = assign_stack_local (mode,
696 (mode == BLKmode
697 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
698 : size),
699 align);
700
701 p->align = align;
702
703 /* The following slot size computation is necessary because we don't
704 know the actual size of the temporary slot until assign_stack_local
705 has performed all the frame alignment and size rounding for the
706 requested temporary. Note that extra space added for alignment
707 can be either above or below this stack slot depending on which
708 way the frame grows. We include the extra space if and only if it
709 is above this slot. */
710 #ifdef FRAME_GROWS_DOWNWARD
711 p->size = frame_offset_old - frame_offset;
712 #else
713 p->size = size;
714 #endif
715
716 /* Now define the fields used by combine_temp_slots. */
717 #ifdef FRAME_GROWS_DOWNWARD
718 p->base_offset = frame_offset;
719 p->full_size = frame_offset_old - frame_offset;
720 #else
721 p->base_offset = frame_offset_old;
722 p->full_size = frame_offset - frame_offset_old;
723 #endif
724 p->address = 0;
725
726 selected = p;
727 }
728
729 p = selected;
730 p->in_use = 1;
731 p->addr_taken = 0;
732 p->type = type;
733 p->level = temp_slot_level;
734 p->keep = keep;
735
736 pp = temp_slots_at_level (p->level);
737 insert_slot_to_list (p, pp);
738
739 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
740 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
741 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
742
743 /* If we know the alias set for the memory that will be used, use
744 it. If there's no TYPE, then we don't know anything about the
745 alias set for the memory. */
746 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
747 set_mem_align (slot, align);
748
749 /* If a type is specified, set the relevant flags. */
750 if (type != 0)
751 {
752 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
753 MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
754 }
755
756 return slot;
757 }
758
759 /* Allocate a temporary stack slot and record it for possible later
760 reuse. First three arguments are same as in preceding function. */
761
762 rtx
763 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
764 {
765 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
766 }
767 \f
768 /* Assign a temporary.
769 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
770 and so that should be used in error messages. In either case, we
771 allocate of the given type.
772 KEEP is as for assign_stack_temp.
773 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
774 it is 0 if a register is OK.
775 DONT_PROMOTE is 1 if we should not promote values in register
776 to wider modes. */
777
778 rtx
779 assign_temp (tree type_or_decl, int keep, int memory_required,
780 int dont_promote ATTRIBUTE_UNUSED)
781 {
782 tree type, decl;
783 enum machine_mode mode;
784 #ifdef PROMOTE_MODE
785 int unsignedp;
786 #endif
787
788 if (DECL_P (type_or_decl))
789 decl = type_or_decl, type = TREE_TYPE (decl);
790 else
791 decl = NULL, type = type_or_decl;
792
793 mode = TYPE_MODE (type);
794 #ifdef PROMOTE_MODE
795 unsignedp = TYPE_UNSIGNED (type);
796 #endif
797
798 if (mode == BLKmode || memory_required)
799 {
800 HOST_WIDE_INT size = int_size_in_bytes (type);
801 tree size_tree;
802 rtx tmp;
803
804 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
805 problems with allocating the stack space. */
806 if (size == 0)
807 size = 1;
808
809 /* Unfortunately, we don't yet know how to allocate variable-sized
810 temporaries. However, sometimes we have a fixed upper limit on
811 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
812 instead. This is the case for Chill variable-sized strings. */
813 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
814 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
815 && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
816 size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
817
818 /* If we still haven't been able to get a size, see if the language
819 can compute a maximum size. */
820 if (size == -1
821 && (size_tree = lang_hooks.types.max_size (type)) != 0
822 && host_integerp (size_tree, 1))
823 size = tree_low_cst (size_tree, 1);
824
825 /* The size of the temporary may be too large to fit into an integer. */
826 /* ??? Not sure this should happen except for user silliness, so limit
827 this to things that aren't compiler-generated temporaries. The
828 rest of the time we'll abort in assign_stack_temp_for_type. */
829 if (decl && size == -1
830 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
831 {
832 error ("%Jsize of variable %qD is too large", decl, decl);
833 size = 1;
834 }
835
836 tmp = assign_stack_temp_for_type (mode, size, keep, type);
837 return tmp;
838 }
839
840 #ifdef PROMOTE_MODE
841 if (! dont_promote)
842 mode = promote_mode (type, mode, &unsignedp, 0);
843 #endif
844
845 return gen_reg_rtx (mode);
846 }
847 \f
848 /* Combine temporary stack slots which are adjacent on the stack.
849
850 This allows for better use of already allocated stack space. This is only
851 done for BLKmode slots because we can be sure that we won't have alignment
852 problems in this case. */
853
854 static void
855 combine_temp_slots (void)
856 {
857 struct temp_slot *p, *q, *next, *next_q;
858 int num_slots;
859
860 /* We can't combine slots, because the information about which slot
861 is in which alias set will be lost. */
862 if (flag_strict_aliasing)
863 return;
864
865 /* If there are a lot of temp slots, don't do anything unless
866 high levels of optimization. */
867 if (! flag_expensive_optimizations)
868 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
869 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
870 return;
871
872 for (p = avail_temp_slots; p; p = next)
873 {
874 int delete_p = 0;
875
876 next = p->next;
877
878 if (GET_MODE (p->slot) != BLKmode)
879 continue;
880
881 for (q = p->next; q; q = next_q)
882 {
883 int delete_q = 0;
884
885 next_q = q->next;
886
887 if (GET_MODE (q->slot) != BLKmode)
888 continue;
889
890 if (p->base_offset + p->full_size == q->base_offset)
891 {
892 /* Q comes after P; combine Q into P. */
893 p->size += q->size;
894 p->full_size += q->full_size;
895 delete_q = 1;
896 }
897 else if (q->base_offset + q->full_size == p->base_offset)
898 {
899 /* P comes after Q; combine P into Q. */
900 q->size += p->size;
901 q->full_size += p->full_size;
902 delete_p = 1;
903 break;
904 }
905 if (delete_q)
906 cut_slot_from_list (q, &avail_temp_slots);
907 }
908
909 /* Either delete P or advance past it. */
910 if (delete_p)
911 cut_slot_from_list (p, &avail_temp_slots);
912 }
913 }
914 \f
915 /* Find the temp slot corresponding to the object at address X. */
916
917 static struct temp_slot *
918 find_temp_slot_from_address (rtx x)
919 {
920 struct temp_slot *p;
921 rtx next;
922 int i;
923
924 for (i = max_slot_level (); i >= 0; i--)
925 for (p = *temp_slots_at_level (i); p; p = p->next)
926 {
927 if (XEXP (p->slot, 0) == x
928 || p->address == x
929 || (GET_CODE (x) == PLUS
930 && XEXP (x, 0) == virtual_stack_vars_rtx
931 && GET_CODE (XEXP (x, 1)) == CONST_INT
932 && INTVAL (XEXP (x, 1)) >= p->base_offset
933 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
934 return p;
935
936 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
937 for (next = p->address; next; next = XEXP (next, 1))
938 if (XEXP (next, 0) == x)
939 return p;
940 }
941
942 /* If we have a sum involving a register, see if it points to a temp
943 slot. */
944 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
945 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
946 return p;
947 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
948 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
949 return p;
950
951 return 0;
952 }
953
954 /* Indicate that NEW is an alternate way of referring to the temp slot
955 that previously was known by OLD. */
956
957 void
958 update_temp_slot_address (rtx old, rtx new)
959 {
960 struct temp_slot *p;
961
962 if (rtx_equal_p (old, new))
963 return;
964
965 p = find_temp_slot_from_address (old);
966
967 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
968 is a register, see if one operand of the PLUS is a temporary
969 location. If so, NEW points into it. Otherwise, if both OLD and
970 NEW are a PLUS and if there is a register in common between them.
971 If so, try a recursive call on those values. */
972 if (p == 0)
973 {
974 if (GET_CODE (old) != PLUS)
975 return;
976
977 if (REG_P (new))
978 {
979 update_temp_slot_address (XEXP (old, 0), new);
980 update_temp_slot_address (XEXP (old, 1), new);
981 return;
982 }
983 else if (GET_CODE (new) != PLUS)
984 return;
985
986 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
987 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
988 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
989 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
990 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
991 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
992 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
993 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
994
995 return;
996 }
997
998 /* Otherwise add an alias for the temp's address. */
999 else if (p->address == 0)
1000 p->address = new;
1001 else
1002 {
1003 if (GET_CODE (p->address) != EXPR_LIST)
1004 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1005
1006 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1007 }
1008 }
1009
1010 /* If X could be a reference to a temporary slot, mark the fact that its
1011 address was taken. */
1012
1013 void
1014 mark_temp_addr_taken (rtx x)
1015 {
1016 struct temp_slot *p;
1017
1018 if (x == 0)
1019 return;
1020
1021 /* If X is not in memory or is at a constant address, it cannot be in
1022 a temporary slot. */
1023 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
1024 return;
1025
1026 p = find_temp_slot_from_address (XEXP (x, 0));
1027 if (p != 0)
1028 p->addr_taken = 1;
1029 }
1030
1031 /* If X could be a reference to a temporary slot, mark that slot as
1032 belonging to the to one level higher than the current level. If X
1033 matched one of our slots, just mark that one. Otherwise, we can't
1034 easily predict which it is, so upgrade all of them. Kept slots
1035 need not be touched.
1036
1037 This is called when an ({...}) construct occurs and a statement
1038 returns a value in memory. */
1039
1040 void
1041 preserve_temp_slots (rtx x)
1042 {
1043 struct temp_slot *p = 0, *next;
1044
1045 /* If there is no result, we still might have some objects whose address
1046 were taken, so we need to make sure they stay around. */
1047 if (x == 0)
1048 {
1049 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1050 {
1051 next = p->next;
1052
1053 if (p->addr_taken)
1054 move_slot_to_level (p, temp_slot_level - 1);
1055 }
1056
1057 return;
1058 }
1059
1060 /* If X is a register that is being used as a pointer, see if we have
1061 a temporary slot we know it points to. To be consistent with
1062 the code below, we really should preserve all non-kept slots
1063 if we can't find a match, but that seems to be much too costly. */
1064 if (REG_P (x) && REG_POINTER (x))
1065 p = find_temp_slot_from_address (x);
1066
1067 /* If X is not in memory or is at a constant address, it cannot be in
1068 a temporary slot, but it can contain something whose address was
1069 taken. */
1070 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1071 {
1072 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1073 {
1074 next = p->next;
1075
1076 if (p->addr_taken)
1077 move_slot_to_level (p, temp_slot_level - 1);
1078 }
1079
1080 return;
1081 }
1082
1083 /* First see if we can find a match. */
1084 if (p == 0)
1085 p = find_temp_slot_from_address (XEXP (x, 0));
1086
1087 if (p != 0)
1088 {
1089 /* Move everything at our level whose address was taken to our new
1090 level in case we used its address. */
1091 struct temp_slot *q;
1092
1093 if (p->level == temp_slot_level)
1094 {
1095 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1096 {
1097 next = q->next;
1098
1099 if (p != q && q->addr_taken)
1100 move_slot_to_level (q, temp_slot_level - 1);
1101 }
1102
1103 move_slot_to_level (p, temp_slot_level - 1);
1104 p->addr_taken = 0;
1105 }
1106 return;
1107 }
1108
1109 /* Otherwise, preserve all non-kept slots at this level. */
1110 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1111 {
1112 next = p->next;
1113
1114 if (!p->keep)
1115 move_slot_to_level (p, temp_slot_level - 1);
1116 }
1117 }
1118
1119 /* Free all temporaries used so far. This is normally called at the
1120 end of generating code for a statement. */
1121
1122 void
1123 free_temp_slots (void)
1124 {
1125 struct temp_slot *p, *next;
1126
1127 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1128 {
1129 next = p->next;
1130
1131 if (!p->keep)
1132 make_slot_available (p);
1133 }
1134
1135 combine_temp_slots ();
1136 }
1137
1138 /* Push deeper into the nesting level for stack temporaries. */
1139
1140 void
1141 push_temp_slots (void)
1142 {
1143 temp_slot_level++;
1144 }
1145
1146 /* Pop a temporary nesting level. All slots in use in the current level
1147 are freed. */
1148
1149 void
1150 pop_temp_slots (void)
1151 {
1152 struct temp_slot *p, *next;
1153
1154 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1155 {
1156 next = p->next;
1157 make_slot_available (p);
1158 }
1159
1160 combine_temp_slots ();
1161
1162 temp_slot_level--;
1163 }
1164
1165 /* Initialize temporary slots. */
1166
1167 void
1168 init_temp_slots (void)
1169 {
1170 /* We have not allocated any temporaries yet. */
1171 avail_temp_slots = 0;
1172 used_temp_slots = 0;
1173 temp_slot_level = 0;
1174 }
1175 \f
1176 /* These routines are responsible for converting virtual register references
1177 to the actual hard register references once RTL generation is complete.
1178
1179 The following four variables are used for communication between the
1180 routines. They contain the offsets of the virtual registers from their
1181 respective hard registers. */
1182
1183 static int in_arg_offset;
1184 static int var_offset;
1185 static int dynamic_offset;
1186 static int out_arg_offset;
1187 static int cfa_offset;
1188
1189 /* In most machines, the stack pointer register is equivalent to the bottom
1190 of the stack. */
1191
1192 #ifndef STACK_POINTER_OFFSET
1193 #define STACK_POINTER_OFFSET 0
1194 #endif
1195
1196 /* If not defined, pick an appropriate default for the offset of dynamically
1197 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1198 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1199
1200 #ifndef STACK_DYNAMIC_OFFSET
1201
1202 /* The bottom of the stack points to the actual arguments. If
1203 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1204 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1205 stack space for register parameters is not pushed by the caller, but
1206 rather part of the fixed stack areas and hence not included in
1207 `current_function_outgoing_args_size'. Nevertheless, we must allow
1208 for it when allocating stack dynamic objects. */
1209
1210 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1211 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1212 ((ACCUMULATE_OUTGOING_ARGS \
1213 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
1214 + (STACK_POINTER_OFFSET)) \
1215
1216 #else
1217 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1218 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
1219 + (STACK_POINTER_OFFSET))
1220 #endif
1221 #endif
1222
1223 /* On most machines, the CFA coincides with the first incoming parm. */
1224
1225 #ifndef ARG_POINTER_CFA_OFFSET
1226 #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
1227 #endif
1228
1229 \f
1230 /* Pass through the INSNS of function FNDECL and convert virtual register
1231 references to hard register references. */
1232
1233 void
1234 instantiate_virtual_regs (void)
1235 {
1236 rtx insn;
1237
1238 /* Compute the offsets to use for this function. */
1239 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1240 var_offset = STARTING_FRAME_OFFSET;
1241 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1242 out_arg_offset = STACK_POINTER_OFFSET;
1243 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1244
1245 /* Scan all variables and parameters of this function. For each that is
1246 in memory, instantiate all virtual registers if the result is a valid
1247 address. If not, we do it later. That will handle most uses of virtual
1248 regs on many machines. */
1249 instantiate_decls (current_function_decl, 1);
1250
1251 /* Initialize recognition, indicating that volatile is OK. */
1252 init_recog ();
1253
1254 /* Scan through all the insns, instantiating every virtual register still
1255 present. */
1256 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1257 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1258 || GET_CODE (insn) == CALL_INSN)
1259 {
1260 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
1261 if (INSN_DELETED_P (insn))
1262 continue;
1263 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
1264 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1265 if (GET_CODE (insn) == CALL_INSN)
1266 instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
1267 NULL_RTX, 0);
1268
1269 /* Past this point all ASM statements should match. Verify that
1270 to avoid failures later in the compilation process. */
1271 if (asm_noperands (PATTERN (insn)) >= 0
1272 && ! check_asm_operands (PATTERN (insn)))
1273 instantiate_virtual_regs_lossage (insn);
1274 }
1275
1276 /* Now instantiate the remaining register equivalences for debugging info.
1277 These will not be valid addresses. */
1278 instantiate_decls (current_function_decl, 0);
1279
1280 /* Indicate that, from now on, assign_stack_local should use
1281 frame_pointer_rtx. */
1282 virtuals_instantiated = 1;
1283 }
1284
1285 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1286 all virtual registers in their DECL_RTL's.
1287
1288 If VALID_ONLY, do this only if the resulting address is still valid.
1289 Otherwise, always do it. */
1290
1291 static void
1292 instantiate_decls (tree fndecl, int valid_only)
1293 {
1294 tree decl;
1295
1296 /* Process all parameters of the function. */
1297 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1298 {
1299 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
1300 HOST_WIDE_INT size_rtl;
1301
1302 instantiate_decl (DECL_RTL (decl), size, valid_only);
1303
1304 /* If the parameter was promoted, then the incoming RTL mode may be
1305 larger than the declared type size. We must use the larger of
1306 the two sizes. */
1307 size_rtl = GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl)));
1308 size = MAX (size_rtl, size);
1309 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
1310 }
1311
1312 /* Now process all variables defined in the function or its subblocks. */
1313 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
1314 }
1315
1316 /* Subroutine of instantiate_decls: Process all decls in the given
1317 BLOCK node and all its subblocks. */
1318
1319 static void
1320 instantiate_decls_1 (tree let, int valid_only)
1321 {
1322 tree t;
1323
1324 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1325 if (DECL_RTL_SET_P (t))
1326 instantiate_decl (DECL_RTL (t),
1327 int_size_in_bytes (TREE_TYPE (t)),
1328 valid_only);
1329
1330 /* Process all subblocks. */
1331 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1332 instantiate_decls_1 (t, valid_only);
1333 }
1334
1335 /* Subroutine of the preceding procedures: Given RTL representing a
1336 decl and the size of the object, do any instantiation required.
1337
1338 If VALID_ONLY is nonzero, it means that the RTL should only be
1339 changed if the new address is valid. */
1340
1341 static void
1342 instantiate_decl (rtx x, HOST_WIDE_INT size, int valid_only)
1343 {
1344 enum machine_mode mode;
1345 rtx addr;
1346
1347 if (x == 0)
1348 return;
1349
1350 /* If this is a CONCAT, recurse for the pieces. */
1351 if (GET_CODE (x) == CONCAT)
1352 {
1353 instantiate_decl (XEXP (x, 0), size / 2, valid_only);
1354 instantiate_decl (XEXP (x, 1), size / 2, valid_only);
1355 return;
1356 }
1357
1358 /* If this is not a MEM, no need to do anything. Similarly if the
1359 address is a constant or a register that is not a virtual register. */
1360 if (!MEM_P (x))
1361 return;
1362
1363 addr = XEXP (x, 0);
1364 if (CONSTANT_P (addr)
1365 || (REG_P (addr)
1366 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1367 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1368 return;
1369
1370 /* If we should only do this if the address is valid, copy the address.
1371 We need to do this so we can undo any changes that might make the
1372 address invalid. This copy is unfortunate, but probably can't be
1373 avoided. */
1374
1375 if (valid_only)
1376 addr = copy_rtx (addr);
1377
1378 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
1379
1380 if (valid_only && size >= 0)
1381 {
1382 unsigned HOST_WIDE_INT decl_size = size;
1383
1384 /* Now verify that the resulting address is valid for every integer or
1385 floating-point mode up to and including SIZE bytes long. We do this
1386 since the object might be accessed in any mode and frame addresses
1387 are shared. */
1388
1389 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1390 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
1391 mode = GET_MODE_WIDER_MODE (mode))
1392 if (! memory_address_p (mode, addr))
1393 return;
1394
1395 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
1396 mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
1397 mode = GET_MODE_WIDER_MODE (mode))
1398 if (! memory_address_p (mode, addr))
1399 return;
1400 }
1401
1402 /* Put back the address now that we have updated it and we either know
1403 it is valid or we don't care whether it is valid. */
1404
1405 XEXP (x, 0) = addr;
1406 }
1407 \f
1408 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1409 is a virtual register, return the equivalent hard register and set the
1410 offset indirectly through the pointer. Otherwise, return 0. */
1411
1412 static rtx
1413 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1414 {
1415 rtx new;
1416 HOST_WIDE_INT offset;
1417
1418 if (x == virtual_incoming_args_rtx)
1419 new = arg_pointer_rtx, offset = in_arg_offset;
1420 else if (x == virtual_stack_vars_rtx)
1421 new = frame_pointer_rtx, offset = var_offset;
1422 else if (x == virtual_stack_dynamic_rtx)
1423 new = stack_pointer_rtx, offset = dynamic_offset;
1424 else if (x == virtual_outgoing_args_rtx)
1425 new = stack_pointer_rtx, offset = out_arg_offset;
1426 else if (x == virtual_cfa_rtx)
1427 new = arg_pointer_rtx, offset = cfa_offset;
1428 else
1429 return 0;
1430
1431 *poffset = offset;
1432 return new;
1433 }
1434 \f
1435
1436 /* Called when instantiate_virtual_regs has failed to update the instruction.
1437 Usually this means that non-matching instruction has been emit, however for
1438 asm statements it may be the problem in the constraints. */
1439 static void
1440 instantiate_virtual_regs_lossage (rtx insn)
1441 {
1442 gcc_assert (asm_noperands (PATTERN (insn)) >= 0);
1443 error_for_asm (insn, "impossible constraint in %<asm%>");
1444 delete_insn (insn);
1445 }
1446 /* Given a pointer to a piece of rtx and an optional pointer to the
1447 containing object, instantiate any virtual registers present in it.
1448
1449 If EXTRA_INSNS, we always do the replacement and generate
1450 any extra insns before OBJECT. If it zero, we do nothing if replacement
1451 is not valid.
1452
1453 Return 1 if we either had nothing to do or if we were able to do the
1454 needed replacement. Return 0 otherwise; we only return zero if
1455 EXTRA_INSNS is zero.
1456
1457 We first try some simple transformations to avoid the creation of extra
1458 pseudos. */
1459
1460 static int
1461 instantiate_virtual_regs_1 (rtx *loc, rtx object, int extra_insns)
1462 {
1463 rtx x;
1464 RTX_CODE code;
1465 rtx new = 0;
1466 HOST_WIDE_INT offset = 0;
1467 rtx temp;
1468 rtx seq;
1469 int i, j;
1470 const char *fmt;
1471
1472 /* Re-start here to avoid recursion in common cases. */
1473 restart:
1474
1475 x = *loc;
1476 if (x == 0)
1477 return 1;
1478
1479 /* We may have detected and deleted invalid asm statements. */
1480 if (object && INSN_P (object) && INSN_DELETED_P (object))
1481 return 1;
1482
1483 code = GET_CODE (x);
1484
1485 /* Check for some special cases. */
1486 switch (code)
1487 {
1488 case CONST_INT:
1489 case CONST_DOUBLE:
1490 case CONST_VECTOR:
1491 case CONST:
1492 case SYMBOL_REF:
1493 case CODE_LABEL:
1494 case PC:
1495 case CC0:
1496 case ASM_INPUT:
1497 case ADDR_VEC:
1498 case ADDR_DIFF_VEC:
1499 case RETURN:
1500 return 1;
1501
1502 case SET:
1503 /* We are allowed to set the virtual registers. This means that
1504 the actual register should receive the source minus the
1505 appropriate offset. This is used, for example, in the handling
1506 of non-local gotos. */
1507 if ((new = instantiate_new_reg (SET_DEST (x), &offset)) != 0)
1508 {
1509 rtx src = SET_SRC (x);
1510
1511 /* We are setting the register, not using it, so the relevant
1512 offset is the negative of the offset to use were we using
1513 the register. */
1514 offset = - offset;
1515 instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
1516
1517 /* The only valid sources here are PLUS or REG. Just do
1518 the simplest possible thing to handle them. */
1519 if (!REG_P (src) && GET_CODE (src) != PLUS)
1520 {
1521 instantiate_virtual_regs_lossage (object);
1522 return 1;
1523 }
1524
1525 start_sequence ();
1526 if (!REG_P (src))
1527 temp = force_operand (src, NULL_RTX);
1528 else
1529 temp = src;
1530 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
1531 seq = get_insns ();
1532 end_sequence ();
1533
1534 emit_insn_before (seq, object);
1535 SET_DEST (x) = new;
1536
1537 if (! validate_change (object, &SET_SRC (x), temp, 0)
1538 || ! extra_insns)
1539 instantiate_virtual_regs_lossage (object);
1540
1541 return 1;
1542 }
1543
1544 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
1545 loc = &SET_SRC (x);
1546 goto restart;
1547
1548 case PLUS:
1549 /* Handle special case of virtual register plus constant. */
1550 if (CONSTANT_P (XEXP (x, 1)))
1551 {
1552 rtx old, new_offset;
1553
1554 /* Check for (plus (plus VIRT foo) (const_int)) first. */
1555 if (GET_CODE (XEXP (x, 0)) == PLUS)
1556 {
1557 if ((new = instantiate_new_reg (XEXP (XEXP (x, 0), 0), &offset)))
1558 {
1559 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
1560 extra_insns);
1561 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
1562 }
1563 else
1564 {
1565 loc = &XEXP (x, 0);
1566 goto restart;
1567 }
1568 }
1569
1570 #ifdef POINTERS_EXTEND_UNSIGNED
1571 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1572 we can commute the PLUS and SUBREG because pointers into the
1573 frame are well-behaved. */
1574 else if (GET_CODE (XEXP (x, 0)) == SUBREG && GET_MODE (x) == ptr_mode
1575 && GET_CODE (XEXP (x, 1)) == CONST_INT
1576 && 0 != (new
1577 = instantiate_new_reg (SUBREG_REG (XEXP (x, 0)),
1578 &offset))
1579 && validate_change (object, loc,
1580 plus_constant (gen_lowpart (ptr_mode,
1581 new),
1582 offset
1583 + INTVAL (XEXP (x, 1))),
1584 0))
1585 return 1;
1586 #endif
1587 else if ((new = instantiate_new_reg (XEXP (x, 0), &offset)) == 0)
1588 {
1589 /* We know the second operand is a constant. Unless the
1590 first operand is a REG (which has been already checked),
1591 it needs to be checked. */
1592 if (!REG_P (XEXP (x, 0)))
1593 {
1594 loc = &XEXP (x, 0);
1595 goto restart;
1596 }
1597 return 1;
1598 }
1599
1600 new_offset = plus_constant (XEXP (x, 1), offset);
1601
1602 /* If the new constant is zero, try to replace the sum with just
1603 the register. */
1604 if (new_offset == const0_rtx
1605 && validate_change (object, loc, new, 0))
1606 return 1;
1607
1608 /* Next try to replace the register and new offset.
1609 There are two changes to validate here and we can't assume that
1610 in the case of old offset equals new just changing the register
1611 will yield a valid insn. In the interests of a little efficiency,
1612 however, we only call validate change once (we don't queue up the
1613 changes and then call apply_change_group). */
1614
1615 old = XEXP (x, 0);
1616 if (offset == 0
1617 ? ! validate_change (object, &XEXP (x, 0), new, 0)
1618 : (XEXP (x, 0) = new,
1619 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
1620 {
1621 if (! extra_insns)
1622 {
1623 XEXP (x, 0) = old;
1624 return 0;
1625 }
1626
1627 /* Otherwise copy the new constant into a register and replace
1628 constant with that register. */
1629 temp = gen_reg_rtx (Pmode);
1630 XEXP (x, 0) = new;
1631 if (validate_change (object, &XEXP (x, 1), temp, 0))
1632 emit_insn_before (gen_move_insn (temp, new_offset), object);
1633 else
1634 {
1635 /* If that didn't work, replace this expression with a
1636 register containing the sum. */
1637
1638 XEXP (x, 0) = old;
1639 new = gen_rtx_PLUS (Pmode, new, new_offset);
1640
1641 start_sequence ();
1642 temp = force_operand (new, NULL_RTX);
1643 seq = get_insns ();
1644 end_sequence ();
1645
1646 emit_insn_before (seq, object);
1647 if (! validate_change (object, loc, temp, 0)
1648 && ! validate_replace_rtx (x, temp, object))
1649 {
1650 instantiate_virtual_regs_lossage (object);
1651 return 1;
1652 }
1653 }
1654 }
1655
1656 return 1;
1657 }
1658
1659 /* Fall through to generic two-operand expression case. */
1660 case EXPR_LIST:
1661 case CALL:
1662 case COMPARE:
1663 case MINUS:
1664 case MULT:
1665 case DIV: case UDIV:
1666 case MOD: case UMOD:
1667 case AND: case IOR: case XOR:
1668 case ROTATERT: case ROTATE:
1669 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
1670 case NE: case EQ:
1671 case GE: case GT: case GEU: case GTU:
1672 case LE: case LT: case LEU: case LTU:
1673 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
1674 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
1675 loc = &XEXP (x, 0);
1676 goto restart;
1677
1678 case MEM:
1679 /* Most cases of MEM that convert to valid addresses have already been
1680 handled by our scan of decls. The only special handling we
1681 need here is to make a copy of the rtx to ensure it isn't being
1682 shared if we have to change it to a pseudo.
1683
1684 If the rtx is a simple reference to an address via a virtual register,
1685 it can potentially be shared. In such cases, first try to make it
1686 a valid address, which can also be shared. Otherwise, copy it and
1687 proceed normally.
1688
1689 First check for common cases that need no processing. These are
1690 usually due to instantiation already being done on a previous instance
1691 of a shared rtx. */
1692
1693 temp = XEXP (x, 0);
1694 if (CONSTANT_ADDRESS_P (temp)
1695 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1696 || temp == arg_pointer_rtx
1697 #endif
1698 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1699 || temp == hard_frame_pointer_rtx
1700 #endif
1701 || temp == frame_pointer_rtx)
1702 return 1;
1703
1704 if (GET_CODE (temp) == PLUS
1705 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
1706 && (XEXP (temp, 0) == frame_pointer_rtx
1707 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1708 || XEXP (temp, 0) == hard_frame_pointer_rtx
1709 #endif
1710 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1711 || XEXP (temp, 0) == arg_pointer_rtx
1712 #endif
1713 ))
1714 return 1;
1715
1716 if (temp == virtual_stack_vars_rtx
1717 || temp == virtual_incoming_args_rtx
1718 || (GET_CODE (temp) == PLUS
1719 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
1720 && (XEXP (temp, 0) == virtual_stack_vars_rtx
1721 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
1722 {
1723 /* This MEM may be shared. If the substitution can be done without
1724 the need to generate new pseudos, we want to do it in place
1725 so all copies of the shared rtx benefit. The call below will
1726 only make substitutions if the resulting address is still
1727 valid.
1728
1729 Note that we cannot pass X as the object in the recursive call
1730 since the insn being processed may not allow all valid
1731 addresses. However, if we were not passed on object, we can
1732 only modify X without copying it if X will have a valid
1733 address.
1734
1735 ??? Also note that this can still lose if OBJECT is an insn that
1736 has less restrictions on an address that some other insn.
1737 In that case, we will modify the shared address. This case
1738 doesn't seem very likely, though. One case where this could
1739 happen is in the case of a USE or CLOBBER reference, but we
1740 take care of that below. */
1741
1742 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
1743 object ? object : x, 0))
1744 return 1;
1745
1746 /* Otherwise make a copy and process that copy. We copy the entire
1747 RTL expression since it might be a PLUS which could also be
1748 shared. */
1749 *loc = x = copy_rtx (x);
1750 }
1751
1752 /* Fall through to generic unary operation case. */
1753 case PREFETCH:
1754 case SUBREG:
1755 case STRICT_LOW_PART:
1756 case NEG: case NOT:
1757 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
1758 case SIGN_EXTEND: case ZERO_EXTEND:
1759 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
1760 case FLOAT: case FIX:
1761 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
1762 case ABS:
1763 case SQRT:
1764 case FFS:
1765 case CLZ: case CTZ:
1766 case POPCOUNT: case PARITY:
1767 /* These case either have just one operand or we know that we need not
1768 check the rest of the operands. */
1769 loc = &XEXP (x, 0);
1770 goto restart;
1771
1772 case USE:
1773 case CLOBBER:
1774 /* If the operand is a MEM, see if the change is a valid MEM. If not,
1775 go ahead and make the invalid one, but do it to a copy. For a REG,
1776 just make the recursive call, since there's no chance of a problem. */
1777
1778 if ((MEM_P (XEXP (x, 0))
1779 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
1780 0))
1781 || (REG_P (XEXP (x, 0))
1782 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
1783 return 1;
1784
1785 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
1786 loc = &XEXP (x, 0);
1787 goto restart;
1788
1789 case REG:
1790 /* Try to replace with a PLUS. If that doesn't work, compute the sum
1791 in front of this insn and substitute the temporary. */
1792 if ((new = instantiate_new_reg (x, &offset)) != 0)
1793 {
1794 temp = plus_constant (new, offset);
1795 if (!validate_change (object, loc, temp, 0))
1796 {
1797 if (! extra_insns)
1798 return 0;
1799
1800 start_sequence ();
1801 temp = force_operand (temp, NULL_RTX);
1802 seq = get_insns ();
1803 end_sequence ();
1804
1805 emit_insn_before (seq, object);
1806 if (! validate_change (object, loc, temp, 0)
1807 && ! validate_replace_rtx (x, temp, object))
1808 instantiate_virtual_regs_lossage (object);
1809 }
1810 }
1811
1812 return 1;
1813
1814 default:
1815 break;
1816 }
1817
1818 /* Scan all subexpressions. */
1819 fmt = GET_RTX_FORMAT (code);
1820 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1821 if (*fmt == 'e')
1822 {
1823 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
1824 return 0;
1825 }
1826 else if (*fmt == 'E')
1827 for (j = 0; j < XVECLEN (x, i); j++)
1828 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
1829 extra_insns))
1830 return 0;
1831
1832 return 1;
1833 }
1834 \f
1835 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1836 This means a type for which function calls must pass an address to the
1837 function or get an address back from the function.
1838 EXP may be a type node or an expression (whose type is tested). */
1839
1840 int
1841 aggregate_value_p (tree exp, tree fntype)
1842 {
1843 int i, regno, nregs;
1844 rtx reg;
1845
1846 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1847
1848 if (fntype)
1849 switch (TREE_CODE (fntype))
1850 {
1851 case CALL_EXPR:
1852 fntype = get_callee_fndecl (fntype);
1853 fntype = fntype ? TREE_TYPE (fntype) : 0;
1854 break;
1855 case FUNCTION_DECL:
1856 fntype = TREE_TYPE (fntype);
1857 break;
1858 case FUNCTION_TYPE:
1859 case METHOD_TYPE:
1860 break;
1861 case IDENTIFIER_NODE:
1862 fntype = 0;
1863 break;
1864 default:
1865 /* We don't expect other rtl types here. */
1866 gcc_unreachable ();
1867 }
1868
1869 if (TREE_CODE (type) == VOID_TYPE)
1870 return 0;
1871 /* If the front end has decided that this needs to be passed by
1872 reference, do so. */
1873 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1874 && DECL_BY_REFERENCE (exp))
1875 return 1;
1876 if (targetm.calls.return_in_memory (type, fntype))
1877 return 1;
1878 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1879 and thus can't be returned in registers. */
1880 if (TREE_ADDRESSABLE (type))
1881 return 1;
1882 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
1883 return 1;
1884 /* Make sure we have suitable call-clobbered regs to return
1885 the value in; if not, we must return it in memory. */
1886 reg = hard_function_value (type, 0, 0);
1887
1888 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1889 it is OK. */
1890 if (!REG_P (reg))
1891 return 0;
1892
1893 regno = REGNO (reg);
1894 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
1895 for (i = 0; i < nregs; i++)
1896 if (! call_used_regs[regno + i])
1897 return 1;
1898 return 0;
1899 }
1900 \f
1901 /* Return true if we should assign DECL a pseudo register; false if it
1902 should live on the local stack. */
1903
1904 bool
1905 use_register_for_decl (tree decl)
1906 {
1907 /* Honor volatile. */
1908 if (TREE_SIDE_EFFECTS (decl))
1909 return false;
1910
1911 /* Honor addressability. */
1912 if (TREE_ADDRESSABLE (decl))
1913 return false;
1914
1915 /* Only register-like things go in registers. */
1916 if (DECL_MODE (decl) == BLKmode)
1917 return false;
1918
1919 /* If -ffloat-store specified, don't put explicit float variables
1920 into registers. */
1921 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1922 propagates values across these stores, and it probably shouldn't. */
1923 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1924 return false;
1925
1926 /* If we're not interested in tracking debugging information for
1927 this decl, then we can certainly put it in a register. */
1928 if (DECL_IGNORED_P (decl))
1929 return true;
1930
1931 return (optimize || DECL_REGISTER (decl));
1932 }
1933
1934 /* Return true if TYPE should be passed by invisible reference. */
1935
1936 bool
1937 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1938 tree type, bool named_arg)
1939 {
1940 if (type)
1941 {
1942 /* If this type contains non-trivial constructors, then it is
1943 forbidden for the middle-end to create any new copies. */
1944 if (TREE_ADDRESSABLE (type))
1945 return true;
1946
1947 /* GCC post 3.4 passes *all* variable sized types by reference. */
1948 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1949 return true;
1950 }
1951
1952 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
1953 }
1954
1955 /* Return true if TYPE, which is passed by reference, should be callee
1956 copied instead of caller copied. */
1957
1958 bool
1959 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1960 tree type, bool named_arg)
1961 {
1962 if (type && TREE_ADDRESSABLE (type))
1963 return false;
1964 return targetm.calls.callee_copies (ca, mode, type, named_arg);
1965 }
1966
1967 /* Structures to communicate between the subroutines of assign_parms.
1968 The first holds data persistent across all parameters, the second
1969 is cleared out for each parameter. */
1970
1971 struct assign_parm_data_all
1972 {
1973 CUMULATIVE_ARGS args_so_far;
1974 struct args_size stack_args_size;
1975 tree function_result_decl;
1976 tree orig_fnargs;
1977 rtx conversion_insns;
1978 HOST_WIDE_INT pretend_args_size;
1979 HOST_WIDE_INT extra_pretend_bytes;
1980 int reg_parm_stack_space;
1981 };
1982
1983 struct assign_parm_data_one
1984 {
1985 tree nominal_type;
1986 tree passed_type;
1987 rtx entry_parm;
1988 rtx stack_parm;
1989 enum machine_mode nominal_mode;
1990 enum machine_mode passed_mode;
1991 enum machine_mode promoted_mode;
1992 struct locate_and_pad_arg_data locate;
1993 int partial;
1994 BOOL_BITFIELD named_arg : 1;
1995 BOOL_BITFIELD passed_pointer : 1;
1996 BOOL_BITFIELD on_stack : 1;
1997 BOOL_BITFIELD loaded_in_reg : 1;
1998 };
1999
2000 /* A subroutine of assign_parms. Initialize ALL. */
2001
2002 static void
2003 assign_parms_initialize_all (struct assign_parm_data_all *all)
2004 {
2005 tree fntype;
2006
2007 memset (all, 0, sizeof (*all));
2008
2009 fntype = TREE_TYPE (current_function_decl);
2010
2011 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2012 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
2013 #else
2014 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
2015 current_function_decl, -1);
2016 #endif
2017
2018 #ifdef REG_PARM_STACK_SPACE
2019 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
2020 #endif
2021 }
2022
2023 /* If ARGS contains entries with complex types, split the entry into two
2024 entries of the component type. Return a new list of substitutions are
2025 needed, else the old list. */
2026
2027 static tree
2028 split_complex_args (tree args)
2029 {
2030 tree p;
2031
2032 /* Before allocating memory, check for the common case of no complex. */
2033 for (p = args; p; p = TREE_CHAIN (p))
2034 {
2035 tree type = TREE_TYPE (p);
2036 if (TREE_CODE (type) == COMPLEX_TYPE
2037 && targetm.calls.split_complex_arg (type))
2038 goto found;
2039 }
2040 return args;
2041
2042 found:
2043 args = copy_list (args);
2044
2045 for (p = args; p; p = TREE_CHAIN (p))
2046 {
2047 tree type = TREE_TYPE (p);
2048 if (TREE_CODE (type) == COMPLEX_TYPE
2049 && targetm.calls.split_complex_arg (type))
2050 {
2051 tree decl;
2052 tree subtype = TREE_TYPE (type);
2053 bool addressable = TREE_ADDRESSABLE (p);
2054
2055 /* Rewrite the PARM_DECL's type with its component. */
2056 TREE_TYPE (p) = subtype;
2057 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2058 DECL_MODE (p) = VOIDmode;
2059 DECL_SIZE (p) = NULL;
2060 DECL_SIZE_UNIT (p) = NULL;
2061 /* If this arg must go in memory, put it in a pseudo here.
2062 We can't allow it to go in memory as per normal parms,
2063 because the usual place might not have the imag part
2064 adjacent to the real part. */
2065 DECL_ARTIFICIAL (p) = addressable;
2066 DECL_IGNORED_P (p) = addressable;
2067 TREE_ADDRESSABLE (p) = 0;
2068 layout_decl (p, 0);
2069
2070 /* Build a second synthetic decl. */
2071 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
2072 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2073 DECL_ARTIFICIAL (decl) = addressable;
2074 DECL_IGNORED_P (decl) = addressable;
2075 layout_decl (decl, 0);
2076
2077 /* Splice it in; skip the new decl. */
2078 TREE_CHAIN (decl) = TREE_CHAIN (p);
2079 TREE_CHAIN (p) = decl;
2080 p = decl;
2081 }
2082 }
2083
2084 return args;
2085 }
2086
2087 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2088 the hidden struct return argument, and (abi willing) complex args.
2089 Return the new parameter list. */
2090
2091 static tree
2092 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2093 {
2094 tree fndecl = current_function_decl;
2095 tree fntype = TREE_TYPE (fndecl);
2096 tree fnargs = DECL_ARGUMENTS (fndecl);
2097
2098 /* If struct value address is treated as the first argument, make it so. */
2099 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2100 && ! current_function_returns_pcc_struct
2101 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2102 {
2103 tree type = build_pointer_type (TREE_TYPE (fntype));
2104 tree decl;
2105
2106 decl = build_decl (PARM_DECL, NULL_TREE, type);
2107 DECL_ARG_TYPE (decl) = type;
2108 DECL_ARTIFICIAL (decl) = 1;
2109 DECL_IGNORED_P (decl) = 1;
2110
2111 TREE_CHAIN (decl) = fnargs;
2112 fnargs = decl;
2113 all->function_result_decl = decl;
2114 }
2115
2116 all->orig_fnargs = fnargs;
2117
2118 /* If the target wants to split complex arguments into scalars, do so. */
2119 if (targetm.calls.split_complex_arg)
2120 fnargs = split_complex_args (fnargs);
2121
2122 return fnargs;
2123 }
2124
2125 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2126 data for the parameter. Incorporate ABI specifics such as pass-by-
2127 reference and type promotion. */
2128
2129 static void
2130 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2131 struct assign_parm_data_one *data)
2132 {
2133 tree nominal_type, passed_type;
2134 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2135
2136 memset (data, 0, sizeof (*data));
2137
2138 /* NAMED_ARG is a mis-nomer. We really mean 'non-varadic'. */
2139 if (!current_function_stdarg)
2140 data->named_arg = 1; /* No varadic parms. */
2141 else if (TREE_CHAIN (parm))
2142 data->named_arg = 1; /* Not the last non-varadic parm. */
2143 else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2144 data->named_arg = 1; /* Only varadic ones are unnamed. */
2145 else
2146 data->named_arg = 0; /* Treat as varadic. */
2147
2148 nominal_type = TREE_TYPE (parm);
2149 passed_type = DECL_ARG_TYPE (parm);
2150
2151 /* Look out for errors propagating this far. Also, if the parameter's
2152 type is void then its value doesn't matter. */
2153 if (TREE_TYPE (parm) == error_mark_node
2154 /* This can happen after weird syntax errors
2155 or if an enum type is defined among the parms. */
2156 || TREE_CODE (parm) != PARM_DECL
2157 || passed_type == NULL
2158 || VOID_TYPE_P (nominal_type))
2159 {
2160 nominal_type = passed_type = void_type_node;
2161 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2162 goto egress;
2163 }
2164
2165 /* Find mode of arg as it is passed, and mode of arg as it should be
2166 during execution of this function. */
2167 passed_mode = TYPE_MODE (passed_type);
2168 nominal_mode = TYPE_MODE (nominal_type);
2169
2170 /* If the parm is to be passed as a transparent union, use the type of
2171 the first field for the tests below. We have already verified that
2172 the modes are the same. */
2173 if (DECL_TRANSPARENT_UNION (parm)
2174 || (TREE_CODE (passed_type) == UNION_TYPE
2175 && TYPE_TRANSPARENT_UNION (passed_type)))
2176 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2177
2178 /* See if this arg was passed by invisible reference. */
2179 if (pass_by_reference (&all->args_so_far, passed_mode,
2180 passed_type, data->named_arg))
2181 {
2182 passed_type = nominal_type = build_pointer_type (passed_type);
2183 data->passed_pointer = true;
2184 passed_mode = nominal_mode = Pmode;
2185 }
2186
2187 /* Find mode as it is passed by the ABI. */
2188 promoted_mode = passed_mode;
2189 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2190 {
2191 int unsignedp = TYPE_UNSIGNED (passed_type);
2192 promoted_mode = promote_mode (passed_type, promoted_mode,
2193 &unsignedp, 1);
2194 }
2195
2196 egress:
2197 data->nominal_type = nominal_type;
2198 data->passed_type = passed_type;
2199 data->nominal_mode = nominal_mode;
2200 data->passed_mode = passed_mode;
2201 data->promoted_mode = promoted_mode;
2202 }
2203
2204 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2205
2206 static void
2207 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2208 struct assign_parm_data_one *data, bool no_rtl)
2209 {
2210 int varargs_pretend_bytes = 0;
2211
2212 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2213 data->promoted_mode,
2214 data->passed_type,
2215 &varargs_pretend_bytes, no_rtl);
2216
2217 /* If the back-end has requested extra stack space, record how much is
2218 needed. Do not change pretend_args_size otherwise since it may be
2219 nonzero from an earlier partial argument. */
2220 if (varargs_pretend_bytes > 0)
2221 all->pretend_args_size = varargs_pretend_bytes;
2222 }
2223
2224 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2225 the incoming location of the current parameter. */
2226
2227 static void
2228 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2229 struct assign_parm_data_one *data)
2230 {
2231 HOST_WIDE_INT pretend_bytes = 0;
2232 rtx entry_parm;
2233 bool in_regs;
2234
2235 if (data->promoted_mode == VOIDmode)
2236 {
2237 data->entry_parm = data->stack_parm = const0_rtx;
2238 return;
2239 }
2240
2241 #ifdef FUNCTION_INCOMING_ARG
2242 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2243 data->passed_type, data->named_arg);
2244 #else
2245 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2246 data->passed_type, data->named_arg);
2247 #endif
2248
2249 if (entry_parm == 0)
2250 data->promoted_mode = data->passed_mode;
2251
2252 /* Determine parm's home in the stack, in case it arrives in the stack
2253 or we should pretend it did. Compute the stack position and rtx where
2254 the argument arrives and its size.
2255
2256 There is one complexity here: If this was a parameter that would
2257 have been passed in registers, but wasn't only because it is
2258 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2259 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2260 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2261 as it was the previous time. */
2262 in_regs = entry_parm != 0;
2263 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2264 in_regs = true;
2265 #endif
2266 if (!in_regs && !data->named_arg)
2267 {
2268 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2269 {
2270 rtx tem;
2271 #ifdef FUNCTION_INCOMING_ARG
2272 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2273 data->passed_type, true);
2274 #else
2275 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2276 data->passed_type, true);
2277 #endif
2278 in_regs = tem != NULL;
2279 }
2280 }
2281
2282 /* If this parameter was passed both in registers and in the stack, use
2283 the copy on the stack. */
2284 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2285 data->passed_type))
2286 entry_parm = 0;
2287
2288 if (entry_parm)
2289 {
2290 int partial;
2291
2292 partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2293 data->promoted_mode,
2294 data->passed_type,
2295 data->named_arg);
2296 data->partial = partial;
2297
2298 /* The caller might already have allocated stack space for the
2299 register parameters. */
2300 if (partial != 0 && all->reg_parm_stack_space == 0)
2301 {
2302 /* Part of this argument is passed in registers and part
2303 is passed on the stack. Ask the prologue code to extend
2304 the stack part so that we can recreate the full value.
2305
2306 PRETEND_BYTES is the size of the registers we need to store.
2307 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2308 stack space that the prologue should allocate.
2309
2310 Internally, gcc assumes that the argument pointer is aligned
2311 to STACK_BOUNDARY bits. This is used both for alignment
2312 optimizations (see init_emit) and to locate arguments that are
2313 aligned to more than PARM_BOUNDARY bits. We must preserve this
2314 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2315 a stack boundary. */
2316
2317 /* We assume at most one partial arg, and it must be the first
2318 argument on the stack. */
2319 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2320
2321 pretend_bytes = partial;
2322 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2323
2324 /* We want to align relative to the actual stack pointer, so
2325 don't include this in the stack size until later. */
2326 all->extra_pretend_bytes = all->pretend_args_size;
2327 }
2328 }
2329
2330 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2331 entry_parm ? data->partial : 0, current_function_decl,
2332 &all->stack_args_size, &data->locate);
2333
2334 /* Adjust offsets to include the pretend args. */
2335 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2336 data->locate.slot_offset.constant += pretend_bytes;
2337 data->locate.offset.constant += pretend_bytes;
2338
2339 data->entry_parm = entry_parm;
2340 }
2341
2342 /* A subroutine of assign_parms. If there is actually space on the stack
2343 for this parm, count it in stack_args_size and return true. */
2344
2345 static bool
2346 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2347 struct assign_parm_data_one *data)
2348 {
2349 /* Trivially true if we've no incoming register. */
2350 if (data->entry_parm == NULL)
2351 ;
2352 /* Also true if we're partially in registers and partially not,
2353 since we've arranged to drop the entire argument on the stack. */
2354 else if (data->partial != 0)
2355 ;
2356 /* Also true if the target says that it's passed in both registers
2357 and on the stack. */
2358 else if (GET_CODE (data->entry_parm) == PARALLEL
2359 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2360 ;
2361 /* Also true if the target says that there's stack allocated for
2362 all register parameters. */
2363 else if (all->reg_parm_stack_space > 0)
2364 ;
2365 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2366 else
2367 return false;
2368
2369 all->stack_args_size.constant += data->locate.size.constant;
2370 if (data->locate.size.var)
2371 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2372
2373 return true;
2374 }
2375
2376 /* A subroutine of assign_parms. Given that this parameter is allocated
2377 stack space by the ABI, find it. */
2378
2379 static void
2380 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2381 {
2382 rtx offset_rtx, stack_parm;
2383 unsigned int align, boundary;
2384
2385 /* If we're passing this arg using a reg, make its stack home the
2386 aligned stack slot. */
2387 if (data->entry_parm)
2388 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2389 else
2390 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2391
2392 stack_parm = current_function_internal_arg_pointer;
2393 if (offset_rtx != const0_rtx)
2394 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2395 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2396
2397 set_mem_attributes (stack_parm, parm, 1);
2398
2399 boundary = data->locate.boundary;
2400 align = BITS_PER_UNIT;
2401
2402 /* If we're padding upward, we know that the alignment of the slot
2403 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2404 intentionally forcing upward padding. Otherwise we have to come
2405 up with a guess at the alignment based on OFFSET_RTX. */
2406 if (data->locate.where_pad != downward || data->entry_parm)
2407 align = boundary;
2408 else if (GET_CODE (offset_rtx) == CONST_INT)
2409 {
2410 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2411 align = align & -align;
2412 }
2413 set_mem_align (stack_parm, align);
2414
2415 if (data->entry_parm)
2416 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2417
2418 data->stack_parm = stack_parm;
2419 }
2420
2421 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2422 always valid and contiguous. */
2423
2424 static void
2425 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2426 {
2427 rtx entry_parm = data->entry_parm;
2428 rtx stack_parm = data->stack_parm;
2429
2430 /* If this parm was passed part in regs and part in memory, pretend it
2431 arrived entirely in memory by pushing the register-part onto the stack.
2432 In the special case of a DImode or DFmode that is split, we could put
2433 it together in a pseudoreg directly, but for now that's not worth
2434 bothering with. */
2435 if (data->partial != 0)
2436 {
2437 /* Handle calls that pass values in multiple non-contiguous
2438 locations. The Irix 6 ABI has examples of this. */
2439 if (GET_CODE (entry_parm) == PARALLEL)
2440 emit_group_store (validize_mem (stack_parm), entry_parm,
2441 data->passed_type,
2442 int_size_in_bytes (data->passed_type));
2443 else
2444 {
2445 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2446 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2447 data->partial / UNITS_PER_WORD);
2448 }
2449
2450 entry_parm = stack_parm;
2451 }
2452
2453 /* If we didn't decide this parm came in a register, by default it came
2454 on the stack. */
2455 else if (entry_parm == NULL)
2456 entry_parm = stack_parm;
2457
2458 /* When an argument is passed in multiple locations, we can't make use
2459 of this information, but we can save some copying if the whole argument
2460 is passed in a single register. */
2461 else if (GET_CODE (entry_parm) == PARALLEL
2462 && data->nominal_mode != BLKmode
2463 && data->passed_mode != BLKmode)
2464 {
2465 size_t i, len = XVECLEN (entry_parm, 0);
2466
2467 for (i = 0; i < len; i++)
2468 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2469 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2470 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2471 == data->passed_mode)
2472 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2473 {
2474 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2475 break;
2476 }
2477 }
2478
2479 data->entry_parm = entry_parm;
2480 }
2481
2482 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2483 always valid and properly aligned. */
2484
2485 static void
2486 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2487 {
2488 rtx stack_parm = data->stack_parm;
2489
2490 /* If we can't trust the parm stack slot to be aligned enough for its
2491 ultimate type, don't use that slot after entry. We'll make another
2492 stack slot, if we need one. */
2493 if (stack_parm
2494 && ((STRICT_ALIGNMENT
2495 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2496 || (data->nominal_type
2497 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2498 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2499 stack_parm = NULL;
2500
2501 /* If parm was passed in memory, and we need to convert it on entry,
2502 don't store it back in that same slot. */
2503 else if (data->entry_parm == stack_parm
2504 && data->nominal_mode != BLKmode
2505 && data->nominal_mode != data->passed_mode)
2506 stack_parm = NULL;
2507
2508 data->stack_parm = stack_parm;
2509 }
2510
2511 /* A subroutine of assign_parms. Return true if the current parameter
2512 should be stored as a BLKmode in the current frame. */
2513
2514 static bool
2515 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2516 {
2517 if (data->nominal_mode == BLKmode)
2518 return true;
2519 if (GET_CODE (data->entry_parm) == PARALLEL)
2520 return true;
2521
2522 #ifdef BLOCK_REG_PADDING
2523 /* Only assign_parm_setup_block knows how to deal with register arguments
2524 that are padded at the least significant end. */
2525 if (REG_P (data->entry_parm)
2526 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2527 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2528 == (BYTES_BIG_ENDIAN ? upward : downward)))
2529 return true;
2530 #endif
2531
2532 return false;
2533 }
2534
2535 /* A subroutine of assign_parms. Arrange for the parameter to be
2536 present and valid in DATA->STACK_RTL. */
2537
2538 static void
2539 assign_parm_setup_block (struct assign_parm_data_all *all,
2540 tree parm, struct assign_parm_data_one *data)
2541 {
2542 rtx entry_parm = data->entry_parm;
2543 rtx stack_parm = data->stack_parm;
2544 HOST_WIDE_INT size;
2545 HOST_WIDE_INT size_stored;
2546 rtx orig_entry_parm = entry_parm;
2547
2548 if (GET_CODE (entry_parm) == PARALLEL)
2549 entry_parm = emit_group_move_into_temps (entry_parm);
2550
2551 /* If we've a non-block object that's nevertheless passed in parts,
2552 reconstitute it in register operations rather than on the stack. */
2553 if (GET_CODE (entry_parm) == PARALLEL
2554 && data->nominal_mode != BLKmode)
2555 {
2556 rtx elt0 = XEXP (XVECEXP (orig_entry_parm, 0, 0), 0);
2557
2558 if ((XVECLEN (entry_parm, 0) > 1
2559 || hard_regno_nregs[REGNO (elt0)][GET_MODE (elt0)] > 1)
2560 && use_register_for_decl (parm))
2561 {
2562 rtx parmreg = gen_reg_rtx (data->nominal_mode);
2563
2564 push_to_sequence (all->conversion_insns);
2565
2566 /* For values returned in multiple registers, handle possible
2567 incompatible calls to emit_group_store.
2568
2569 For example, the following would be invalid, and would have to
2570 be fixed by the conditional below:
2571
2572 emit_group_store ((reg:SF), (parallel:DF))
2573 emit_group_store ((reg:SI), (parallel:DI))
2574
2575 An example of this are doubles in e500 v2:
2576 (parallel:DF (expr_list (reg:SI) (const_int 0))
2577 (expr_list (reg:SI) (const_int 4))). */
2578 if (data->nominal_mode != data->passed_mode)
2579 {
2580 rtx t = gen_reg_rtx (GET_MODE (entry_parm));
2581 emit_group_store (t, entry_parm, NULL_TREE,
2582 GET_MODE_SIZE (GET_MODE (entry_parm)));
2583 convert_move (parmreg, t, 0);
2584 }
2585 else
2586 emit_group_store (parmreg, entry_parm, data->nominal_type,
2587 int_size_in_bytes (data->nominal_type));
2588
2589 all->conversion_insns = get_insns ();
2590 end_sequence ();
2591
2592 SET_DECL_RTL (parm, parmreg);
2593 return;
2594 }
2595 }
2596
2597 size = int_size_in_bytes (data->passed_type);
2598 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2599 if (stack_parm == 0)
2600 {
2601 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2602 stack_parm = assign_stack_local (BLKmode, size_stored,
2603 DECL_ALIGN (parm));
2604 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2605 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2606 set_mem_attributes (stack_parm, parm, 1);
2607 }
2608
2609 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2610 calls that pass values in multiple non-contiguous locations. */
2611 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2612 {
2613 rtx mem;
2614
2615 /* Note that we will be storing an integral number of words.
2616 So we have to be careful to ensure that we allocate an
2617 integral number of words. We do this above when we call
2618 assign_stack_local if space was not allocated in the argument
2619 list. If it was, this will not work if PARM_BOUNDARY is not
2620 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2621 if it becomes a problem. Exception is when BLKmode arrives
2622 with arguments not conforming to word_mode. */
2623
2624 if (data->stack_parm == 0)
2625 ;
2626 else if (GET_CODE (entry_parm) == PARALLEL)
2627 ;
2628 else
2629 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2630
2631 mem = validize_mem (stack_parm);
2632
2633 /* Handle values in multiple non-contiguous locations. */
2634 if (GET_CODE (entry_parm) == PARALLEL)
2635 {
2636 push_to_sequence (all->conversion_insns);
2637 emit_group_store (mem, entry_parm, data->passed_type, size);
2638 all->conversion_insns = get_insns ();
2639 end_sequence ();
2640 }
2641
2642 else if (size == 0)
2643 ;
2644
2645 /* If SIZE is that of a mode no bigger than a word, just use
2646 that mode's store operation. */
2647 else if (size <= UNITS_PER_WORD)
2648 {
2649 enum machine_mode mode
2650 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2651
2652 if (mode != BLKmode
2653 #ifdef BLOCK_REG_PADDING
2654 && (size == UNITS_PER_WORD
2655 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2656 != (BYTES_BIG_ENDIAN ? upward : downward)))
2657 #endif
2658 )
2659 {
2660 rtx reg = gen_rtx_REG (mode, REGNO (entry_parm));
2661 emit_move_insn (change_address (mem, mode, 0), reg);
2662 }
2663
2664 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2665 machine must be aligned to the left before storing
2666 to memory. Note that the previous test doesn't
2667 handle all cases (e.g. SIZE == 3). */
2668 else if (size != UNITS_PER_WORD
2669 #ifdef BLOCK_REG_PADDING
2670 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2671 == downward)
2672 #else
2673 && BYTES_BIG_ENDIAN
2674 #endif
2675 )
2676 {
2677 rtx tem, x;
2678 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2679 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2680
2681 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2682 build_int_cst (NULL_TREE, by),
2683 NULL_RTX, 1);
2684 tem = change_address (mem, word_mode, 0);
2685 emit_move_insn (tem, x);
2686 }
2687 else
2688 move_block_from_reg (REGNO (entry_parm), mem,
2689 size_stored / UNITS_PER_WORD);
2690 }
2691 else
2692 move_block_from_reg (REGNO (entry_parm), mem,
2693 size_stored / UNITS_PER_WORD);
2694 }
2695 else if (data->stack_parm == 0)
2696 {
2697 push_to_sequence (all->conversion_insns);
2698 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2699 BLOCK_OP_NORMAL);
2700 all->conversion_insns = get_insns ();
2701 end_sequence ();
2702 }
2703
2704 data->stack_parm = stack_parm;
2705 SET_DECL_RTL (parm, stack_parm);
2706 }
2707
2708 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2709 parameter. Get it there. Perform all ABI specified conversions. */
2710
2711 static void
2712 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2713 struct assign_parm_data_one *data)
2714 {
2715 rtx parmreg;
2716 enum machine_mode promoted_nominal_mode;
2717 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2718 bool did_conversion = false;
2719
2720 /* Store the parm in a pseudoregister during the function, but we may
2721 need to do it in a wider mode. */
2722
2723 promoted_nominal_mode
2724 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 0);
2725
2726 parmreg = gen_reg_rtx (promoted_nominal_mode);
2727
2728 if (!DECL_ARTIFICIAL (parm))
2729 mark_user_reg (parmreg);
2730
2731 /* If this was an item that we received a pointer to,
2732 set DECL_RTL appropriately. */
2733 if (data->passed_pointer)
2734 {
2735 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2736 set_mem_attributes (x, parm, 1);
2737 SET_DECL_RTL (parm, x);
2738 }
2739 else
2740 SET_DECL_RTL (parm, parmreg);
2741
2742 /* Copy the value into the register. */
2743 if (data->nominal_mode != data->passed_mode
2744 || promoted_nominal_mode != data->promoted_mode)
2745 {
2746 int save_tree_used;
2747
2748 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2749 mode, by the caller. We now have to convert it to
2750 NOMINAL_MODE, if different. However, PARMREG may be in
2751 a different mode than NOMINAL_MODE if it is being stored
2752 promoted.
2753
2754 If ENTRY_PARM is a hard register, it might be in a register
2755 not valid for operating in its mode (e.g., an odd-numbered
2756 register for a DFmode). In that case, moves are the only
2757 thing valid, so we can't do a convert from there. This
2758 occurs when the calling sequence allow such misaligned
2759 usages.
2760
2761 In addition, the conversion may involve a call, which could
2762 clobber parameters which haven't been copied to pseudo
2763 registers yet. Therefore, we must first copy the parm to
2764 a pseudo reg here, and save the conversion until after all
2765 parameters have been moved. */
2766
2767 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2768
2769 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2770
2771 push_to_sequence (all->conversion_insns);
2772 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2773
2774 if (GET_CODE (tempreg) == SUBREG
2775 && GET_MODE (tempreg) == data->nominal_mode
2776 && REG_P (SUBREG_REG (tempreg))
2777 && data->nominal_mode == data->passed_mode
2778 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2779 && GET_MODE_SIZE (GET_MODE (tempreg))
2780 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
2781 {
2782 /* The argument is already sign/zero extended, so note it
2783 into the subreg. */
2784 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2785 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2786 }
2787
2788 /* TREE_USED gets set erroneously during expand_assignment. */
2789 save_tree_used = TREE_USED (parm);
2790 expand_assignment (parm, make_tree (data->nominal_type, tempreg));
2791 TREE_USED (parm) = save_tree_used;
2792 all->conversion_insns = get_insns ();
2793 end_sequence ();
2794
2795 did_conversion = true;
2796 }
2797 else
2798 emit_move_insn (parmreg, validize_mem (data->entry_parm));
2799
2800 /* If we were passed a pointer but the actual value can safely live
2801 in a register, put it in one. */
2802 if (data->passed_pointer
2803 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2804 /* If by-reference argument was promoted, demote it. */
2805 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2806 || use_register_for_decl (parm)))
2807 {
2808 /* We can't use nominal_mode, because it will have been set to
2809 Pmode above. We must use the actual mode of the parm. */
2810 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2811 mark_user_reg (parmreg);
2812
2813 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2814 {
2815 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2816 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2817
2818 push_to_sequence (all->conversion_insns);
2819 emit_move_insn (tempreg, DECL_RTL (parm));
2820 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2821 emit_move_insn (parmreg, tempreg);
2822 all->conversion_insns = get_insns ();
2823 end_sequence ();
2824
2825 did_conversion = true;
2826 }
2827 else
2828 emit_move_insn (parmreg, DECL_RTL (parm));
2829
2830 SET_DECL_RTL (parm, parmreg);
2831
2832 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2833 now the parm. */
2834 data->stack_parm = NULL;
2835 }
2836
2837 /* Mark the register as eliminable if we did no conversion and it was
2838 copied from memory at a fixed offset, and the arg pointer was not
2839 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2840 offset formed an invalid address, such memory-equivalences as we
2841 make here would screw up life analysis for it. */
2842 if (data->nominal_mode == data->passed_mode
2843 && !did_conversion
2844 && data->stack_parm != 0
2845 && MEM_P (data->stack_parm)
2846 && data->locate.offset.var == 0
2847 && reg_mentioned_p (virtual_incoming_args_rtx,
2848 XEXP (data->stack_parm, 0)))
2849 {
2850 rtx linsn = get_last_insn ();
2851 rtx sinsn, set;
2852
2853 /* Mark complex types separately. */
2854 if (GET_CODE (parmreg) == CONCAT)
2855 {
2856 enum machine_mode submode
2857 = GET_MODE_INNER (GET_MODE (parmreg));
2858 int regnor = REGNO (XEXP (parmreg, 0));
2859 int regnoi = REGNO (XEXP (parmreg, 1));
2860 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
2861 rtx stacki = adjust_address_nv (data->stack_parm, submode,
2862 GET_MODE_SIZE (submode));
2863
2864 /* Scan backwards for the set of the real and
2865 imaginary parts. */
2866 for (sinsn = linsn; sinsn != 0;
2867 sinsn = prev_nonnote_insn (sinsn))
2868 {
2869 set = single_set (sinsn);
2870 if (set == 0)
2871 continue;
2872
2873 if (SET_DEST (set) == regno_reg_rtx [regnoi])
2874 REG_NOTES (sinsn)
2875 = gen_rtx_EXPR_LIST (REG_EQUIV, stacki,
2876 REG_NOTES (sinsn));
2877 else if (SET_DEST (set) == regno_reg_rtx [regnor])
2878 REG_NOTES (sinsn)
2879 = gen_rtx_EXPR_LIST (REG_EQUIV, stackr,
2880 REG_NOTES (sinsn));
2881 }
2882 }
2883 else if ((set = single_set (linsn)) != 0
2884 && SET_DEST (set) == parmreg)
2885 REG_NOTES (linsn)
2886 = gen_rtx_EXPR_LIST (REG_EQUIV,
2887 data->stack_parm, REG_NOTES (linsn));
2888 }
2889
2890 /* For pointer data type, suggest pointer register. */
2891 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2892 mark_reg_pointer (parmreg,
2893 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2894 }
2895
2896 /* A subroutine of assign_parms. Allocate stack space to hold the current
2897 parameter. Get it there. Perform all ABI specified conversions. */
2898
2899 static void
2900 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2901 struct assign_parm_data_one *data)
2902 {
2903 /* Value must be stored in the stack slot STACK_PARM during function
2904 execution. */
2905 bool to_conversion = false;
2906
2907 if (data->promoted_mode != data->nominal_mode)
2908 {
2909 /* Conversion is required. */
2910 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2911
2912 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2913
2914 push_to_sequence (all->conversion_insns);
2915 to_conversion = true;
2916
2917 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2918 TYPE_UNSIGNED (TREE_TYPE (parm)));
2919
2920 if (data->stack_parm)
2921 /* ??? This may need a big-endian conversion on sparc64. */
2922 data->stack_parm
2923 = adjust_address (data->stack_parm, data->nominal_mode, 0);
2924 }
2925
2926 if (data->entry_parm != data->stack_parm)
2927 {
2928 rtx src, dest;
2929
2930 if (data->stack_parm == 0)
2931 {
2932 data->stack_parm
2933 = assign_stack_local (GET_MODE (data->entry_parm),
2934 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
2935 TYPE_ALIGN (data->passed_type));
2936 set_mem_attributes (data->stack_parm, parm, 1);
2937 }
2938
2939 dest = validize_mem (data->stack_parm);
2940 src = validize_mem (data->entry_parm);
2941
2942 if (MEM_P (src))
2943 {
2944 /* Use a block move to handle potentially misaligned entry_parm. */
2945 if (!to_conversion)
2946 push_to_sequence (all->conversion_insns);
2947 to_conversion = true;
2948
2949 emit_block_move (dest, src,
2950 GEN_INT (int_size_in_bytes (data->passed_type)),
2951 BLOCK_OP_NORMAL);
2952 }
2953 else
2954 emit_move_insn (dest, src);
2955 }
2956
2957 if (to_conversion)
2958 {
2959 all->conversion_insns = get_insns ();
2960 end_sequence ();
2961 }
2962
2963 SET_DECL_RTL (parm, data->stack_parm);
2964 }
2965
2966 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
2967 undo the frobbing that we did in assign_parms_augmented_arg_list. */
2968
2969 static void
2970 assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
2971 {
2972 tree parm;
2973 tree orig_fnargs = all->orig_fnargs;
2974
2975 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2976 {
2977 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2978 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2979 {
2980 rtx tmp, real, imag;
2981 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
2982
2983 real = DECL_RTL (fnargs);
2984 imag = DECL_RTL (TREE_CHAIN (fnargs));
2985 if (inner != GET_MODE (real))
2986 {
2987 real = gen_lowpart_SUBREG (inner, real);
2988 imag = gen_lowpart_SUBREG (inner, imag);
2989 }
2990
2991 if (TREE_ADDRESSABLE (parm))
2992 {
2993 rtx rmem, imem;
2994 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
2995
2996 /* split_complex_arg put the real and imag parts in
2997 pseudos. Move them to memory. */
2998 tmp = assign_stack_local (DECL_MODE (parm), size,
2999 TYPE_ALIGN (TREE_TYPE (parm)));
3000 set_mem_attributes (tmp, parm, 1);
3001 rmem = adjust_address_nv (tmp, inner, 0);
3002 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3003 push_to_sequence (all->conversion_insns);
3004 emit_move_insn (rmem, real);
3005 emit_move_insn (imem, imag);
3006 all->conversion_insns = get_insns ();
3007 end_sequence ();
3008 }
3009 else
3010 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3011 SET_DECL_RTL (parm, tmp);
3012
3013 real = DECL_INCOMING_RTL (fnargs);
3014 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
3015 if (inner != GET_MODE (real))
3016 {
3017 real = gen_lowpart_SUBREG (inner, real);
3018 imag = gen_lowpart_SUBREG (inner, imag);
3019 }
3020 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3021 set_decl_incoming_rtl (parm, tmp);
3022 fnargs = TREE_CHAIN (fnargs);
3023 }
3024 else
3025 {
3026 SET_DECL_RTL (parm, DECL_RTL (fnargs));
3027 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs));
3028
3029 /* Set MEM_EXPR to the original decl, i.e. to PARM,
3030 instead of the copy of decl, i.e. FNARGS. */
3031 if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
3032 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
3033 }
3034
3035 fnargs = TREE_CHAIN (fnargs);
3036 }
3037 }
3038
3039 /* Assign RTL expressions to the function's parameters. This may involve
3040 copying them into registers and using those registers as the DECL_RTL. */
3041
3042 static void
3043 assign_parms (tree fndecl)
3044 {
3045 struct assign_parm_data_all all;
3046 tree fnargs, parm;
3047 rtx internal_arg_pointer;
3048
3049 /* If the reg that the virtual arg pointer will be translated into is
3050 not a fixed reg or is the stack pointer, make a copy of the virtual
3051 arg pointer, and address parms via the copy. The frame pointer is
3052 considered fixed even though it is not marked as such.
3053
3054 The second time through, simply use ap to avoid generating rtx. */
3055
3056 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3057 || ! (fixed_regs[ARG_POINTER_REGNUM]
3058 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
3059 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3060 else
3061 internal_arg_pointer = virtual_incoming_args_rtx;
3062 current_function_internal_arg_pointer = internal_arg_pointer;
3063
3064 assign_parms_initialize_all (&all);
3065 fnargs = assign_parms_augmented_arg_list (&all);
3066
3067 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3068 {
3069 struct assign_parm_data_one data;
3070
3071 /* Extract the type of PARM; adjust it according to ABI. */
3072 assign_parm_find_data_types (&all, parm, &data);
3073
3074 /* Early out for errors and void parameters. */
3075 if (data.passed_mode == VOIDmode)
3076 {
3077 SET_DECL_RTL (parm, const0_rtx);
3078 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3079 continue;
3080 }
3081
3082 if (current_function_stdarg && !TREE_CHAIN (parm))
3083 assign_parms_setup_varargs (&all, &data, false);
3084
3085 /* Find out where the parameter arrives in this function. */
3086 assign_parm_find_entry_rtl (&all, &data);
3087
3088 /* Find out where stack space for this parameter might be. */
3089 if (assign_parm_is_stack_parm (&all, &data))
3090 {
3091 assign_parm_find_stack_rtl (parm, &data);
3092 assign_parm_adjust_entry_rtl (&data);
3093 }
3094
3095 /* Record permanently how this parm was passed. */
3096 set_decl_incoming_rtl (parm, data.entry_parm);
3097
3098 /* Update info on where next arg arrives in registers. */
3099 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3100 data.passed_type, data.named_arg);
3101
3102 assign_parm_adjust_stack_rtl (&data);
3103
3104 if (assign_parm_setup_block_p (&data))
3105 assign_parm_setup_block (&all, parm, &data);
3106 else if (data.passed_pointer || use_register_for_decl (parm))
3107 assign_parm_setup_reg (&all, parm, &data);
3108 else
3109 assign_parm_setup_stack (&all, parm, &data);
3110 }
3111
3112 if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
3113 assign_parms_unsplit_complex (&all, fnargs);
3114
3115 /* Output all parameter conversion instructions (possibly including calls)
3116 now that all parameters have been copied out of hard registers. */
3117 emit_insn (all.conversion_insns);
3118
3119 /* If we are receiving a struct value address as the first argument, set up
3120 the RTL for the function result. As this might require code to convert
3121 the transmitted address to Pmode, we do this here to ensure that possible
3122 preliminary conversions of the address have been emitted already. */
3123 if (all.function_result_decl)
3124 {
3125 tree result = DECL_RESULT (current_function_decl);
3126 rtx addr = DECL_RTL (all.function_result_decl);
3127 rtx x;
3128
3129 if (DECL_BY_REFERENCE (result))
3130 x = addr;
3131 else
3132 {
3133 addr = convert_memory_address (Pmode, addr);
3134 x = gen_rtx_MEM (DECL_MODE (result), addr);
3135 set_mem_attributes (x, result, 1);
3136 }
3137 SET_DECL_RTL (result, x);
3138 }
3139
3140 /* We have aligned all the args, so add space for the pretend args. */
3141 current_function_pretend_args_size = all.pretend_args_size;
3142 all.stack_args_size.constant += all.extra_pretend_bytes;
3143 current_function_args_size = all.stack_args_size.constant;
3144
3145 /* Adjust function incoming argument size for alignment and
3146 minimum length. */
3147
3148 #ifdef REG_PARM_STACK_SPACE
3149 current_function_args_size = MAX (current_function_args_size,
3150 REG_PARM_STACK_SPACE (fndecl));
3151 #endif
3152
3153 current_function_args_size
3154 = ((current_function_args_size + STACK_BYTES - 1)
3155 / STACK_BYTES) * STACK_BYTES;
3156
3157 #ifdef ARGS_GROW_DOWNWARD
3158 current_function_arg_offset_rtx
3159 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3160 : expand_expr (size_diffop (all.stack_args_size.var,
3161 size_int (-all.stack_args_size.constant)),
3162 NULL_RTX, VOIDmode, 0));
3163 #else
3164 current_function_arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3165 #endif
3166
3167 /* See how many bytes, if any, of its args a function should try to pop
3168 on return. */
3169
3170 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
3171 current_function_args_size);
3172
3173 /* For stdarg.h function, save info about
3174 regs and stack space used by the named args. */
3175
3176 current_function_args_info = all.args_so_far;
3177
3178 /* Set the rtx used for the function return value. Put this in its
3179 own variable so any optimizers that need this information don't have
3180 to include tree.h. Do this here so it gets done when an inlined
3181 function gets output. */
3182
3183 current_function_return_rtx
3184 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3185 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3186
3187 /* If scalar return value was computed in a pseudo-reg, or was a named
3188 return value that got dumped to the stack, copy that to the hard
3189 return register. */
3190 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3191 {
3192 tree decl_result = DECL_RESULT (fndecl);
3193 rtx decl_rtl = DECL_RTL (decl_result);
3194
3195 if (REG_P (decl_rtl)
3196 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3197 : DECL_REGISTER (decl_result))
3198 {
3199 rtx real_decl_rtl;
3200
3201 #ifdef FUNCTION_OUTGOING_VALUE
3202 real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result),
3203 fndecl);
3204 #else
3205 real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result),
3206 fndecl);
3207 #endif
3208 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3209 /* The delay slot scheduler assumes that current_function_return_rtx
3210 holds the hard register containing the return value, not a
3211 temporary pseudo. */
3212 current_function_return_rtx = real_decl_rtl;
3213 }
3214 }
3215 }
3216
3217 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3218 For all seen types, gimplify their sizes. */
3219
3220 static tree
3221 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3222 {
3223 tree t = *tp;
3224
3225 *walk_subtrees = 0;
3226 if (TYPE_P (t))
3227 {
3228 if (POINTER_TYPE_P (t))
3229 *walk_subtrees = 1;
3230 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3231 && !TYPE_SIZES_GIMPLIFIED (t))
3232 {
3233 gimplify_type_sizes (t, (tree *) data);
3234 *walk_subtrees = 1;
3235 }
3236 }
3237
3238 return NULL;
3239 }
3240
3241 /* Gimplify the parameter list for current_function_decl. This involves
3242 evaluating SAVE_EXPRs of variable sized parameters and generating code
3243 to implement callee-copies reference parameters. Returns a list of
3244 statements to add to the beginning of the function, or NULL if nothing
3245 to do. */
3246
3247 tree
3248 gimplify_parameters (void)
3249 {
3250 struct assign_parm_data_all all;
3251 tree fnargs, parm, stmts = NULL;
3252
3253 assign_parms_initialize_all (&all);
3254 fnargs = assign_parms_augmented_arg_list (&all);
3255
3256 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3257 {
3258 struct assign_parm_data_one data;
3259
3260 /* Extract the type of PARM; adjust it according to ABI. */
3261 assign_parm_find_data_types (&all, parm, &data);
3262
3263 /* Early out for errors and void parameters. */
3264 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3265 continue;
3266
3267 /* Update info on where next arg arrives in registers. */
3268 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3269 data.passed_type, data.named_arg);
3270
3271 /* ??? Once upon a time variable_size stuffed parameter list
3272 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3273 turned out to be less than manageable in the gimple world.
3274 Now we have to hunt them down ourselves. */
3275 walk_tree_without_duplicates (&data.passed_type,
3276 gimplify_parm_type, &stmts);
3277
3278 if (!TREE_CONSTANT (DECL_SIZE (parm)))
3279 {
3280 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3281 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3282 }
3283
3284 if (data.passed_pointer)
3285 {
3286 tree type = TREE_TYPE (data.passed_type);
3287 if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3288 type, data.named_arg))
3289 {
3290 tree local, t;
3291
3292 /* For constant sized objects, this is trivial; for
3293 variable-sized objects, we have to play games. */
3294 if (TREE_CONSTANT (DECL_SIZE (parm)))
3295 {
3296 local = create_tmp_var (type, get_name (parm));
3297 DECL_IGNORED_P (local) = 0;
3298 }
3299 else
3300 {
3301 tree ptr_type, addr, args;
3302
3303 ptr_type = build_pointer_type (type);
3304 addr = create_tmp_var (ptr_type, get_name (parm));
3305 DECL_IGNORED_P (addr) = 0;
3306 local = build_fold_indirect_ref (addr);
3307
3308 args = tree_cons (NULL, DECL_SIZE_UNIT (parm), NULL);
3309 t = built_in_decls[BUILT_IN_ALLOCA];
3310 t = build_function_call_expr (t, args);
3311 t = fold_convert (ptr_type, t);
3312 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
3313 gimplify_and_add (t, &stmts);
3314 }
3315
3316 t = build2 (MODIFY_EXPR, void_type_node, local, parm);
3317 gimplify_and_add (t, &stmts);
3318
3319 DECL_VALUE_EXPR (parm) = local;
3320 }
3321 }
3322 }
3323
3324 return stmts;
3325 }
3326 \f
3327 /* Indicate whether REGNO is an incoming argument to the current function
3328 that was promoted to a wider mode. If so, return the RTX for the
3329 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
3330 that REGNO is promoted from and whether the promotion was signed or
3331 unsigned. */
3332
3333 rtx
3334 promoted_input_arg (unsigned int regno, enum machine_mode *pmode, int *punsignedp)
3335 {
3336 tree arg;
3337
3338 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
3339 arg = TREE_CHAIN (arg))
3340 if (REG_P (DECL_INCOMING_RTL (arg))
3341 && REGNO (DECL_INCOMING_RTL (arg)) == regno
3342 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
3343 {
3344 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
3345 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (arg));
3346
3347 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
3348 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
3349 && mode != DECL_MODE (arg))
3350 {
3351 *pmode = DECL_MODE (arg);
3352 *punsignedp = unsignedp;
3353 return DECL_INCOMING_RTL (arg);
3354 }
3355 }
3356
3357 return 0;
3358 }
3359
3360 \f
3361 /* Compute the size and offset from the start of the stacked arguments for a
3362 parm passed in mode PASSED_MODE and with type TYPE.
3363
3364 INITIAL_OFFSET_PTR points to the current offset into the stacked
3365 arguments.
3366
3367 The starting offset and size for this parm are returned in
3368 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3369 nonzero, the offset is that of stack slot, which is returned in
3370 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3371 padding required from the initial offset ptr to the stack slot.
3372
3373 IN_REGS is nonzero if the argument will be passed in registers. It will
3374 never be set if REG_PARM_STACK_SPACE is not defined.
3375
3376 FNDECL is the function in which the argument was defined.
3377
3378 There are two types of rounding that are done. The first, controlled by
3379 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3380 list to be aligned to the specific boundary (in bits). This rounding
3381 affects the initial and starting offsets, but not the argument size.
3382
3383 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3384 optionally rounds the size of the parm to PARM_BOUNDARY. The
3385 initial offset is not affected by this rounding, while the size always
3386 is and the starting offset may be. */
3387
3388 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3389 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3390 callers pass in the total size of args so far as
3391 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3392
3393 void
3394 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3395 int partial, tree fndecl ATTRIBUTE_UNUSED,
3396 struct args_size *initial_offset_ptr,
3397 struct locate_and_pad_arg_data *locate)
3398 {
3399 tree sizetree;
3400 enum direction where_pad;
3401 int boundary;
3402 int reg_parm_stack_space = 0;
3403 int part_size_in_regs;
3404
3405 #ifdef REG_PARM_STACK_SPACE
3406 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3407
3408 /* If we have found a stack parm before we reach the end of the
3409 area reserved for registers, skip that area. */
3410 if (! in_regs)
3411 {
3412 if (reg_parm_stack_space > 0)
3413 {
3414 if (initial_offset_ptr->var)
3415 {
3416 initial_offset_ptr->var
3417 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3418 ssize_int (reg_parm_stack_space));
3419 initial_offset_ptr->constant = 0;
3420 }
3421 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3422 initial_offset_ptr->constant = reg_parm_stack_space;
3423 }
3424 }
3425 #endif /* REG_PARM_STACK_SPACE */
3426
3427 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3428
3429 sizetree
3430 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3431 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3432 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3433 locate->where_pad = where_pad;
3434 locate->boundary = boundary;
3435
3436 #ifdef ARGS_GROW_DOWNWARD
3437 locate->slot_offset.constant = -initial_offset_ptr->constant;
3438 if (initial_offset_ptr->var)
3439 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3440 initial_offset_ptr->var);
3441
3442 {
3443 tree s2 = sizetree;
3444 if (where_pad != none
3445 && (!host_integerp (sizetree, 1)
3446 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3447 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3448 SUB_PARM_SIZE (locate->slot_offset, s2);
3449 }
3450
3451 locate->slot_offset.constant += part_size_in_regs;
3452
3453 if (!in_regs
3454 #ifdef REG_PARM_STACK_SPACE
3455 || REG_PARM_STACK_SPACE (fndecl) > 0
3456 #endif
3457 )
3458 pad_to_arg_alignment (&locate->slot_offset, boundary,
3459 &locate->alignment_pad);
3460
3461 locate->size.constant = (-initial_offset_ptr->constant
3462 - locate->slot_offset.constant);
3463 if (initial_offset_ptr->var)
3464 locate->size.var = size_binop (MINUS_EXPR,
3465 size_binop (MINUS_EXPR,
3466 ssize_int (0),
3467 initial_offset_ptr->var),
3468 locate->slot_offset.var);
3469
3470 /* Pad_below needs the pre-rounded size to know how much to pad
3471 below. */
3472 locate->offset = locate->slot_offset;
3473 if (where_pad == downward)
3474 pad_below (&locate->offset, passed_mode, sizetree);
3475
3476 #else /* !ARGS_GROW_DOWNWARD */
3477 if (!in_regs
3478 #ifdef REG_PARM_STACK_SPACE
3479 || REG_PARM_STACK_SPACE (fndecl) > 0
3480 #endif
3481 )
3482 pad_to_arg_alignment (initial_offset_ptr, boundary,
3483 &locate->alignment_pad);
3484 locate->slot_offset = *initial_offset_ptr;
3485
3486 #ifdef PUSH_ROUNDING
3487 if (passed_mode != BLKmode)
3488 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3489 #endif
3490
3491 /* Pad_below needs the pre-rounded size to know how much to pad below
3492 so this must be done before rounding up. */
3493 locate->offset = locate->slot_offset;
3494 if (where_pad == downward)
3495 pad_below (&locate->offset, passed_mode, sizetree);
3496
3497 if (where_pad != none
3498 && (!host_integerp (sizetree, 1)
3499 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3500 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3501
3502 ADD_PARM_SIZE (locate->size, sizetree);
3503
3504 locate->size.constant -= part_size_in_regs;
3505 #endif /* ARGS_GROW_DOWNWARD */
3506 }
3507
3508 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3509 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3510
3511 static void
3512 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3513 struct args_size *alignment_pad)
3514 {
3515 tree save_var = NULL_TREE;
3516 HOST_WIDE_INT save_constant = 0;
3517 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3518 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3519
3520 #ifdef SPARC_STACK_BOUNDARY_HACK
3521 /* The sparc port has a bug. It sometimes claims a STACK_BOUNDARY
3522 higher than the real alignment of %sp. However, when it does this,
3523 the alignment of %sp+STACK_POINTER_OFFSET will be STACK_BOUNDARY.
3524 This is a temporary hack while the sparc port is fixed. */
3525 if (SPARC_STACK_BOUNDARY_HACK)
3526 sp_offset = 0;
3527 #endif
3528
3529 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3530 {
3531 save_var = offset_ptr->var;
3532 save_constant = offset_ptr->constant;
3533 }
3534
3535 alignment_pad->var = NULL_TREE;
3536 alignment_pad->constant = 0;
3537
3538 if (boundary > BITS_PER_UNIT)
3539 {
3540 if (offset_ptr->var)
3541 {
3542 tree sp_offset_tree = ssize_int (sp_offset);
3543 tree offset = size_binop (PLUS_EXPR,
3544 ARGS_SIZE_TREE (*offset_ptr),
3545 sp_offset_tree);
3546 #ifdef ARGS_GROW_DOWNWARD
3547 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3548 #else
3549 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3550 #endif
3551
3552 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3553 /* ARGS_SIZE_TREE includes constant term. */
3554 offset_ptr->constant = 0;
3555 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3556 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3557 save_var);
3558 }
3559 else
3560 {
3561 offset_ptr->constant = -sp_offset +
3562 #ifdef ARGS_GROW_DOWNWARD
3563 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3564 #else
3565 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3566 #endif
3567 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3568 alignment_pad->constant = offset_ptr->constant - save_constant;
3569 }
3570 }
3571 }
3572
3573 static void
3574 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3575 {
3576 if (passed_mode != BLKmode)
3577 {
3578 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3579 offset_ptr->constant
3580 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3581 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3582 - GET_MODE_SIZE (passed_mode));
3583 }
3584 else
3585 {
3586 if (TREE_CODE (sizetree) != INTEGER_CST
3587 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3588 {
3589 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3590 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3591 /* Add it in. */
3592 ADD_PARM_SIZE (*offset_ptr, s2);
3593 SUB_PARM_SIZE (*offset_ptr, sizetree);
3594 }
3595 }
3596 }
3597 \f
3598 /* Walk the tree of blocks describing the binding levels within a function
3599 and warn about variables the might be killed by setjmp or vfork.
3600 This is done after calling flow_analysis and before global_alloc
3601 clobbers the pseudo-regs to hard regs. */
3602
3603 void
3604 setjmp_vars_warning (tree block)
3605 {
3606 tree decl, sub;
3607
3608 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3609 {
3610 if (TREE_CODE (decl) == VAR_DECL
3611 && DECL_RTL_SET_P (decl)
3612 && REG_P (DECL_RTL (decl))
3613 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3614 warning ("%Jvariable %qD might be clobbered by %<longjmp%>"
3615 " or %<vfork%>",
3616 decl, decl);
3617 }
3618
3619 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3620 setjmp_vars_warning (sub);
3621 }
3622
3623 /* Do the appropriate part of setjmp_vars_warning
3624 but for arguments instead of local variables. */
3625
3626 void
3627 setjmp_args_warning (void)
3628 {
3629 tree decl;
3630 for (decl = DECL_ARGUMENTS (current_function_decl);
3631 decl; decl = TREE_CHAIN (decl))
3632 if (DECL_RTL (decl) != 0
3633 && REG_P (DECL_RTL (decl))
3634 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3635 warning ("%Jargument %qD might be clobbered by %<longjmp%> or %<vfork%>",
3636 decl, decl);
3637 }
3638
3639 \f
3640 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3641 and create duplicate blocks. */
3642 /* ??? Need an option to either create block fragments or to create
3643 abstract origin duplicates of a source block. It really depends
3644 on what optimization has been performed. */
3645
3646 void
3647 reorder_blocks (void)
3648 {
3649 tree block = DECL_INITIAL (current_function_decl);
3650 varray_type block_stack;
3651
3652 if (block == NULL_TREE)
3653 return;
3654
3655 VARRAY_TREE_INIT (block_stack, 10, "block_stack");
3656
3657 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3658 clear_block_marks (block);
3659
3660 /* Prune the old trees away, so that they don't get in the way. */
3661 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3662 BLOCK_CHAIN (block) = NULL_TREE;
3663
3664 /* Recreate the block tree from the note nesting. */
3665 reorder_blocks_1 (get_insns (), block, &block_stack);
3666 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
3667
3668 /* Remove deleted blocks from the block fragment chains. */
3669 reorder_fix_fragments (block);
3670 }
3671
3672 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3673
3674 void
3675 clear_block_marks (tree block)
3676 {
3677 while (block)
3678 {
3679 TREE_ASM_WRITTEN (block) = 0;
3680 clear_block_marks (BLOCK_SUBBLOCKS (block));
3681 block = BLOCK_CHAIN (block);
3682 }
3683 }
3684
3685 static void
3686 reorder_blocks_1 (rtx insns, tree current_block, varray_type *p_block_stack)
3687 {
3688 rtx insn;
3689
3690 for (insn = insns; insn; insn = NEXT_INSN (insn))
3691 {
3692 if (NOTE_P (insn))
3693 {
3694 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3695 {
3696 tree block = NOTE_BLOCK (insn);
3697
3698 /* If we have seen this block before, that means it now
3699 spans multiple address regions. Create a new fragment. */
3700 if (TREE_ASM_WRITTEN (block))
3701 {
3702 tree new_block = copy_node (block);
3703 tree origin;
3704
3705 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3706 ? BLOCK_FRAGMENT_ORIGIN (block)
3707 : block);
3708 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3709 BLOCK_FRAGMENT_CHAIN (new_block)
3710 = BLOCK_FRAGMENT_CHAIN (origin);
3711 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3712
3713 NOTE_BLOCK (insn) = new_block;
3714 block = new_block;
3715 }
3716
3717 BLOCK_SUBBLOCKS (block) = 0;
3718 TREE_ASM_WRITTEN (block) = 1;
3719 /* When there's only one block for the entire function,
3720 current_block == block and we mustn't do this, it
3721 will cause infinite recursion. */
3722 if (block != current_block)
3723 {
3724 BLOCK_SUPERCONTEXT (block) = current_block;
3725 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3726 BLOCK_SUBBLOCKS (current_block) = block;
3727 current_block = block;
3728 }
3729 VARRAY_PUSH_TREE (*p_block_stack, block);
3730 }
3731 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3732 {
3733 NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
3734 VARRAY_POP (*p_block_stack);
3735 BLOCK_SUBBLOCKS (current_block)
3736 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3737 current_block = BLOCK_SUPERCONTEXT (current_block);
3738 }
3739 }
3740 }
3741 }
3742
3743 /* Rationalize BLOCK_FRAGMENT_ORIGIN. If an origin block no longer
3744 appears in the block tree, select one of the fragments to become
3745 the new origin block. */
3746
3747 static void
3748 reorder_fix_fragments (tree block)
3749 {
3750 while (block)
3751 {
3752 tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
3753 tree new_origin = NULL_TREE;
3754
3755 if (dup_origin)
3756 {
3757 if (! TREE_ASM_WRITTEN (dup_origin))
3758 {
3759 new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
3760
3761 /* Find the first of the remaining fragments. There must
3762 be at least one -- the current block. */
3763 while (! TREE_ASM_WRITTEN (new_origin))
3764 new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
3765 BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
3766 }
3767 }
3768 else if (! dup_origin)
3769 new_origin = block;
3770
3771 /* Re-root the rest of the fragments to the new origin. In the
3772 case that DUP_ORIGIN was null, that means BLOCK was the origin
3773 of a chain of fragments and we want to remove those fragments
3774 that didn't make it to the output. */
3775 if (new_origin)
3776 {
3777 tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
3778 tree chain = *pp;
3779
3780 while (chain)
3781 {
3782 if (TREE_ASM_WRITTEN (chain))
3783 {
3784 BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
3785 *pp = chain;
3786 pp = &BLOCK_FRAGMENT_CHAIN (chain);
3787 }
3788 chain = BLOCK_FRAGMENT_CHAIN (chain);
3789 }
3790 *pp = NULL_TREE;
3791 }
3792
3793 reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
3794 block = BLOCK_CHAIN (block);
3795 }
3796 }
3797
3798 /* Reverse the order of elements in the chain T of blocks,
3799 and return the new head of the chain (old last element). */
3800
3801 tree
3802 blocks_nreverse (tree t)
3803 {
3804 tree prev = 0, decl, next;
3805 for (decl = t; decl; decl = next)
3806 {
3807 next = BLOCK_CHAIN (decl);
3808 BLOCK_CHAIN (decl) = prev;
3809 prev = decl;
3810 }
3811 return prev;
3812 }
3813
3814 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3815 non-NULL, list them all into VECTOR, in a depth-first preorder
3816 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3817 blocks. */
3818
3819 static int
3820 all_blocks (tree block, tree *vector)
3821 {
3822 int n_blocks = 0;
3823
3824 while (block)
3825 {
3826 TREE_ASM_WRITTEN (block) = 0;
3827
3828 /* Record this block. */
3829 if (vector)
3830 vector[n_blocks] = block;
3831
3832 ++n_blocks;
3833
3834 /* Record the subblocks, and their subblocks... */
3835 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3836 vector ? vector + n_blocks : 0);
3837 block = BLOCK_CHAIN (block);
3838 }
3839
3840 return n_blocks;
3841 }
3842
3843 /* Return a vector containing all the blocks rooted at BLOCK. The
3844 number of elements in the vector is stored in N_BLOCKS_P. The
3845 vector is dynamically allocated; it is the caller's responsibility
3846 to call `free' on the pointer returned. */
3847
3848 static tree *
3849 get_block_vector (tree block, int *n_blocks_p)
3850 {
3851 tree *block_vector;
3852
3853 *n_blocks_p = all_blocks (block, NULL);
3854 block_vector = xmalloc (*n_blocks_p * sizeof (tree));
3855 all_blocks (block, block_vector);
3856
3857 return block_vector;
3858 }
3859
3860 static GTY(()) int next_block_index = 2;
3861
3862 /* Set BLOCK_NUMBER for all the blocks in FN. */
3863
3864 void
3865 number_blocks (tree fn)
3866 {
3867 int i;
3868 int n_blocks;
3869 tree *block_vector;
3870
3871 /* For SDB and XCOFF debugging output, we start numbering the blocks
3872 from 1 within each function, rather than keeping a running
3873 count. */
3874 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3875 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3876 next_block_index = 1;
3877 #endif
3878
3879 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3880
3881 /* The top-level BLOCK isn't numbered at all. */
3882 for (i = 1; i < n_blocks; ++i)
3883 /* We number the blocks from two. */
3884 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3885
3886 free (block_vector);
3887
3888 return;
3889 }
3890
3891 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3892
3893 tree
3894 debug_find_var_in_block_tree (tree var, tree block)
3895 {
3896 tree t;
3897
3898 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3899 if (t == var)
3900 return block;
3901
3902 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3903 {
3904 tree ret = debug_find_var_in_block_tree (var, t);
3905 if (ret)
3906 return ret;
3907 }
3908
3909 return NULL_TREE;
3910 }
3911 \f
3912 /* Allocate a function structure for FNDECL and set its contents
3913 to the defaults. */
3914
3915 void
3916 allocate_struct_function (tree fndecl)
3917 {
3918 tree result;
3919 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
3920
3921 cfun = ggc_alloc_cleared (sizeof (struct function));
3922
3923 cfun->stack_alignment_needed = STACK_BOUNDARY;
3924 cfun->preferred_stack_boundary = STACK_BOUNDARY;
3925
3926 current_function_funcdef_no = funcdef_no++;
3927
3928 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
3929
3930 init_eh_for_function ();
3931
3932 lang_hooks.function.init (cfun);
3933 if (init_machine_status)
3934 cfun->machine = (*init_machine_status) ();
3935
3936 if (fndecl == NULL)
3937 return;
3938
3939 DECL_STRUCT_FUNCTION (fndecl) = cfun;
3940 cfun->decl = fndecl;
3941
3942 result = DECL_RESULT (fndecl);
3943 if (aggregate_value_p (result, fndecl))
3944 {
3945 #ifdef PCC_STATIC_STRUCT_RETURN
3946 current_function_returns_pcc_struct = 1;
3947 #endif
3948 current_function_returns_struct = 1;
3949 }
3950
3951 current_function_returns_pointer = POINTER_TYPE_P (TREE_TYPE (result));
3952
3953 current_function_stdarg
3954 = (fntype
3955 && TYPE_ARG_TYPES (fntype) != 0
3956 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3957 != void_type_node));
3958 }
3959
3960 /* Reset cfun, and other non-struct-function variables to defaults as
3961 appropriate for emitting rtl at the start of a function. */
3962
3963 static void
3964 prepare_function_start (tree fndecl)
3965 {
3966 if (fndecl && DECL_STRUCT_FUNCTION (fndecl))
3967 cfun = DECL_STRUCT_FUNCTION (fndecl);
3968 else
3969 allocate_struct_function (fndecl);
3970 init_emit ();
3971 init_varasm_status (cfun);
3972 init_expr ();
3973
3974 cse_not_expected = ! optimize;
3975
3976 /* Caller save not needed yet. */
3977 caller_save_needed = 0;
3978
3979 /* We haven't done register allocation yet. */
3980 reg_renumber = 0;
3981
3982 /* Indicate that we have not instantiated virtual registers yet. */
3983 virtuals_instantiated = 0;
3984
3985 /* Indicate that we want CONCATs now. */
3986 generating_concat_p = 1;
3987
3988 /* Indicate we have no need of a frame pointer yet. */
3989 frame_pointer_needed = 0;
3990 }
3991
3992 /* Initialize the rtl expansion mechanism so that we can do simple things
3993 like generate sequences. This is used to provide a context during global
3994 initialization of some passes. */
3995 void
3996 init_dummy_function_start (void)
3997 {
3998 prepare_function_start (NULL);
3999 }
4000
4001 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4002 and initialize static variables for generating RTL for the statements
4003 of the function. */
4004
4005 void
4006 init_function_start (tree subr)
4007 {
4008 prepare_function_start (subr);
4009
4010 /* Prevent ever trying to delete the first instruction of a
4011 function. Also tell final how to output a linenum before the
4012 function prologue. Note linenums could be missing, e.g. when
4013 compiling a Java .class file. */
4014 if (! DECL_IS_BUILTIN (subr))
4015 emit_line_note (DECL_SOURCE_LOCATION (subr));
4016
4017 /* Make sure first insn is a note even if we don't want linenums.
4018 This makes sure the first insn will never be deleted.
4019 Also, final expects a note to appear there. */
4020 emit_note (NOTE_INSN_DELETED);
4021
4022 /* Warn if this value is an aggregate type,
4023 regardless of which calling convention we are using for it. */
4024 if (warn_aggregate_return
4025 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4026 warning ("function returns an aggregate");
4027 }
4028
4029 /* Make sure all values used by the optimization passes have sane
4030 defaults. */
4031 void
4032 init_function_for_compilation (void)
4033 {
4034 reg_renumber = 0;
4035
4036 /* No prologue/epilogue insns yet. */
4037 VARRAY_GROW (prologue, 0);
4038 VARRAY_GROW (epilogue, 0);
4039 VARRAY_GROW (sibcall_epilogue, 0);
4040 }
4041
4042 /* Expand a call to __main at the beginning of a possible main function. */
4043
4044 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
4045 #undef HAS_INIT_SECTION
4046 #define HAS_INIT_SECTION
4047 #endif
4048
4049 void
4050 expand_main_function (void)
4051 {
4052 #ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
4053 if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN)
4054 {
4055 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
4056 rtx tmp, seq;
4057
4058 start_sequence ();
4059 /* Forcibly align the stack. */
4060 #ifdef STACK_GROWS_DOWNWARD
4061 tmp = expand_simple_binop (Pmode, AND, stack_pointer_rtx, GEN_INT(-align),
4062 stack_pointer_rtx, 1, OPTAB_WIDEN);
4063 #else
4064 tmp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
4065 GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN);
4066 tmp = expand_simple_binop (Pmode, AND, tmp, GEN_INT (-align),
4067 stack_pointer_rtx, 1, OPTAB_WIDEN);
4068 #endif
4069 if (tmp != stack_pointer_rtx)
4070 emit_move_insn (stack_pointer_rtx, tmp);
4071
4072 /* Enlist allocate_dynamic_stack_space to pick up the pieces. */
4073 tmp = force_reg (Pmode, const0_rtx);
4074 allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT);
4075 seq = get_insns ();
4076 end_sequence ();
4077
4078 for (tmp = get_last_insn (); tmp; tmp = PREV_INSN (tmp))
4079 if (NOTE_P (tmp) && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_FUNCTION_BEG)
4080 break;
4081 if (tmp)
4082 emit_insn_before (seq, tmp);
4083 else
4084 emit_insn (seq);
4085 }
4086 #endif
4087
4088 #ifndef HAS_INIT_SECTION
4089 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
4090 #endif
4091 }
4092 \f
4093 /* Start the RTL for a new function, and set variables used for
4094 emitting RTL.
4095 SUBR is the FUNCTION_DECL node.
4096 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4097 the function's parameters, which must be run at any return statement. */
4098
4099 void
4100 expand_function_start (tree subr)
4101 {
4102 /* Make sure volatile mem refs aren't considered
4103 valid operands of arithmetic insns. */
4104 init_recog_no_volatile ();
4105
4106 current_function_profile
4107 = (profile_flag
4108 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4109
4110 current_function_limit_stack
4111 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4112
4113 /* Make the label for return statements to jump to. Do not special
4114 case machines with special return instructions -- they will be
4115 handled later during jump, ifcvt, or epilogue creation. */
4116 return_label = gen_label_rtx ();
4117
4118 /* Initialize rtx used to return the value. */
4119 /* Do this before assign_parms so that we copy the struct value address
4120 before any library calls that assign parms might generate. */
4121
4122 /* Decide whether to return the value in memory or in a register. */
4123 if (aggregate_value_p (DECL_RESULT (subr), subr))
4124 {
4125 /* Returning something that won't go in a register. */
4126 rtx value_address = 0;
4127
4128 #ifdef PCC_STATIC_STRUCT_RETURN
4129 if (current_function_returns_pcc_struct)
4130 {
4131 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4132 value_address = assemble_static_space (size);
4133 }
4134 else
4135 #endif
4136 {
4137 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 1);
4138 /* Expect to be passed the address of a place to store the value.
4139 If it is passed as an argument, assign_parms will take care of
4140 it. */
4141 if (sv)
4142 {
4143 value_address = gen_reg_rtx (Pmode);
4144 emit_move_insn (value_address, sv);
4145 }
4146 }
4147 if (value_address)
4148 {
4149 rtx x = value_address;
4150 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4151 {
4152 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4153 set_mem_attributes (x, DECL_RESULT (subr), 1);
4154 }
4155 SET_DECL_RTL (DECL_RESULT (subr), x);
4156 }
4157 }
4158 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4159 /* If return mode is void, this decl rtl should not be used. */
4160 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4161 else
4162 {
4163 /* Compute the return values into a pseudo reg, which we will copy
4164 into the true return register after the cleanups are done. */
4165 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4166 if (TYPE_MODE (return_type) != BLKmode
4167 && targetm.calls.return_in_msb (return_type))
4168 /* expand_function_end will insert the appropriate padding in
4169 this case. Use the return value's natural (unpadded) mode
4170 within the function proper. */
4171 SET_DECL_RTL (DECL_RESULT (subr),
4172 gen_reg_rtx (TYPE_MODE (return_type)));
4173 else
4174 {
4175 /* In order to figure out what mode to use for the pseudo, we
4176 figure out what the mode of the eventual return register will
4177 actually be, and use that. */
4178 rtx hard_reg = hard_function_value (return_type, subr, 1);
4179
4180 /* Structures that are returned in registers are not
4181 aggregate_value_p, so we may see a PARALLEL or a REG. */
4182 if (REG_P (hard_reg))
4183 SET_DECL_RTL (DECL_RESULT (subr),
4184 gen_reg_rtx (GET_MODE (hard_reg)));
4185 else
4186 {
4187 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4188 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4189 }
4190 }
4191
4192 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4193 result to the real return register(s). */
4194 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4195 }
4196
4197 /* Initialize rtx for parameters and local variables.
4198 In some cases this requires emitting insns. */
4199 assign_parms (subr);
4200
4201 /* If function gets a static chain arg, store it. */
4202 if (cfun->static_chain_decl)
4203 {
4204 tree parm = cfun->static_chain_decl;
4205 rtx local = gen_reg_rtx (Pmode);
4206
4207 set_decl_incoming_rtl (parm, static_chain_incoming_rtx);
4208 SET_DECL_RTL (parm, local);
4209 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4210
4211 emit_move_insn (local, static_chain_incoming_rtx);
4212 }
4213
4214 /* If the function receives a non-local goto, then store the
4215 bits we need to restore the frame pointer. */
4216 if (cfun->nonlocal_goto_save_area)
4217 {
4218 tree t_save;
4219 rtx r_save;
4220
4221 /* ??? We need to do this save early. Unfortunately here is
4222 before the frame variable gets declared. Help out... */
4223 expand_var (TREE_OPERAND (cfun->nonlocal_goto_save_area, 0));
4224
4225 t_save = build4 (ARRAY_REF, ptr_type_node,
4226 cfun->nonlocal_goto_save_area,
4227 integer_zero_node, NULL_TREE, NULL_TREE);
4228 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4229 r_save = convert_memory_address (Pmode, r_save);
4230
4231 emit_move_insn (r_save, virtual_stack_vars_rtx);
4232 update_nonlocal_goto_save_area ();
4233 }
4234
4235 /* The following was moved from init_function_start.
4236 The move is supposed to make sdb output more accurate. */
4237 /* Indicate the beginning of the function body,
4238 as opposed to parm setup. */
4239 emit_note (NOTE_INSN_FUNCTION_BEG);
4240
4241 if (!NOTE_P (get_last_insn ()))
4242 emit_note (NOTE_INSN_DELETED);
4243 parm_birth_insn = get_last_insn ();
4244
4245 if (current_function_profile)
4246 {
4247 #ifdef PROFILE_HOOK
4248 PROFILE_HOOK (current_function_funcdef_no);
4249 #endif
4250 }
4251
4252 /* After the display initializations is where the tail-recursion label
4253 should go, if we end up needing one. Ensure we have a NOTE here
4254 since some things (like trampolines) get placed before this. */
4255 tail_recursion_reentry = emit_note (NOTE_INSN_DELETED);
4256
4257 /* Make sure there is a line number after the function entry setup code. */
4258 force_next_line_note ();
4259 }
4260 \f
4261 /* Undo the effects of init_dummy_function_start. */
4262 void
4263 expand_dummy_function_end (void)
4264 {
4265 /* End any sequences that failed to be closed due to syntax errors. */
4266 while (in_sequence_p ())
4267 end_sequence ();
4268
4269 /* Outside function body, can't compute type's actual size
4270 until next function's body starts. */
4271
4272 free_after_parsing (cfun);
4273 free_after_compilation (cfun);
4274 cfun = 0;
4275 }
4276
4277 /* Call DOIT for each hard register used as a return value from
4278 the current function. */
4279
4280 void
4281 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4282 {
4283 rtx outgoing = current_function_return_rtx;
4284
4285 if (! outgoing)
4286 return;
4287
4288 if (REG_P (outgoing))
4289 (*doit) (outgoing, arg);
4290 else if (GET_CODE (outgoing) == PARALLEL)
4291 {
4292 int i;
4293
4294 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4295 {
4296 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4297
4298 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4299 (*doit) (x, arg);
4300 }
4301 }
4302 }
4303
4304 static void
4305 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4306 {
4307 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
4308 }
4309
4310 void
4311 clobber_return_register (void)
4312 {
4313 diddle_return_value (do_clobber_return_reg, NULL);
4314
4315 /* In case we do use pseudo to return value, clobber it too. */
4316 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4317 {
4318 tree decl_result = DECL_RESULT (current_function_decl);
4319 rtx decl_rtl = DECL_RTL (decl_result);
4320 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4321 {
4322 do_clobber_return_reg (decl_rtl, NULL);
4323 }
4324 }
4325 }
4326
4327 static void
4328 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4329 {
4330 emit_insn (gen_rtx_USE (VOIDmode, reg));
4331 }
4332
4333 void
4334 use_return_register (void)
4335 {
4336 diddle_return_value (do_use_return_reg, NULL);
4337 }
4338
4339 /* Possibly warn about unused parameters. */
4340 void
4341 do_warn_unused_parameter (tree fn)
4342 {
4343 tree decl;
4344
4345 for (decl = DECL_ARGUMENTS (fn);
4346 decl; decl = TREE_CHAIN (decl))
4347 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4348 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl))
4349 warning ("%Junused parameter %qD", decl, decl);
4350 }
4351
4352 static GTY(()) rtx initial_trampoline;
4353
4354 /* Generate RTL for the end of the current function. */
4355
4356 void
4357 expand_function_end (void)
4358 {
4359 rtx clobber_after;
4360
4361 /* If arg_pointer_save_area was referenced only from a nested
4362 function, we will not have initialized it yet. Do that now. */
4363 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
4364 get_arg_pointer_save_area (cfun);
4365
4366 /* If we are doing stack checking and this function makes calls,
4367 do a stack probe at the start of the function to ensure we have enough
4368 space for another stack frame. */
4369 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
4370 {
4371 rtx insn, seq;
4372
4373 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4374 if (CALL_P (insn))
4375 {
4376 start_sequence ();
4377 probe_stack_range (STACK_CHECK_PROTECT,
4378 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4379 seq = get_insns ();
4380 end_sequence ();
4381 emit_insn_before (seq, tail_recursion_reentry);
4382 break;
4383 }
4384 }
4385
4386 /* Possibly warn about unused parameters.
4387 When frontend does unit-at-a-time, the warning is already
4388 issued at finalization time. */
4389 if (warn_unused_parameter
4390 && !lang_hooks.callgraph.expand_function)
4391 do_warn_unused_parameter (current_function_decl);
4392
4393 /* End any sequences that failed to be closed due to syntax errors. */
4394 while (in_sequence_p ())
4395 end_sequence ();
4396
4397 clear_pending_stack_adjust ();
4398 do_pending_stack_adjust ();
4399
4400 /* @@@ This is a kludge. We want to ensure that instructions that
4401 may trap are not moved into the epilogue by scheduling, because
4402 we don't always emit unwind information for the epilogue.
4403 However, not all machine descriptions define a blockage insn, so
4404 emit an ASM_INPUT to act as one. */
4405 if (flag_non_call_exceptions)
4406 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
4407
4408 /* Mark the end of the function body.
4409 If control reaches this insn, the function can drop through
4410 without returning a value. */
4411 emit_note (NOTE_INSN_FUNCTION_END);
4412
4413 /* Must mark the last line number note in the function, so that the test
4414 coverage code can avoid counting the last line twice. This just tells
4415 the code to ignore the immediately following line note, since there
4416 already exists a copy of this note somewhere above. This line number
4417 note is still needed for debugging though, so we can't delete it. */
4418 if (flag_test_coverage)
4419 emit_note (NOTE_INSN_REPEATED_LINE_NUMBER);
4420
4421 /* Output a linenumber for the end of the function.
4422 SDB depends on this. */
4423 force_next_line_note ();
4424 emit_line_note (input_location);
4425
4426 /* Before the return label (if any), clobber the return
4427 registers so that they are not propagated live to the rest of
4428 the function. This can only happen with functions that drop
4429 through; if there had been a return statement, there would
4430 have either been a return rtx, or a jump to the return label.
4431
4432 We delay actual code generation after the current_function_value_rtx
4433 is computed. */
4434 clobber_after = get_last_insn ();
4435
4436 /* Output the label for the actual return from the function. */
4437 emit_label (return_label);
4438
4439 /* Let except.c know where it should emit the call to unregister
4440 the function context for sjlj exceptions. */
4441 if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
4442 sjlj_emit_function_exit_after (get_last_insn ());
4443
4444 /* If scalar return value was computed in a pseudo-reg, or was a named
4445 return value that got dumped to the stack, copy that to the hard
4446 return register. */
4447 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4448 {
4449 tree decl_result = DECL_RESULT (current_function_decl);
4450 rtx decl_rtl = DECL_RTL (decl_result);
4451
4452 if (REG_P (decl_rtl)
4453 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4454 : DECL_REGISTER (decl_result))
4455 {
4456 rtx real_decl_rtl = current_function_return_rtx;
4457
4458 /* This should be set in assign_parms. */
4459 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
4460
4461 /* If this is a BLKmode structure being returned in registers,
4462 then use the mode computed in expand_return. Note that if
4463 decl_rtl is memory, then its mode may have been changed,
4464 but that current_function_return_rtx has not. */
4465 if (GET_MODE (real_decl_rtl) == BLKmode)
4466 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4467
4468 /* If a non-BLKmode return value should be padded at the least
4469 significant end of the register, shift it left by the appropriate
4470 amount. BLKmode results are handled using the group load/store
4471 machinery. */
4472 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
4473 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
4474 {
4475 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
4476 REGNO (real_decl_rtl)),
4477 decl_rtl);
4478 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
4479 }
4480 /* If a named return value dumped decl_return to memory, then
4481 we may need to re-do the PROMOTE_MODE signed/unsigned
4482 extension. */
4483 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4484 {
4485 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4486
4487 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4488 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4489 &unsignedp, 1);
4490
4491 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4492 }
4493 else if (GET_CODE (real_decl_rtl) == PARALLEL)
4494 {
4495 /* If expand_function_start has created a PARALLEL for decl_rtl,
4496 move the result to the real return registers. Otherwise, do
4497 a group load from decl_rtl for a named return. */
4498 if (GET_CODE (decl_rtl) == PARALLEL)
4499 emit_group_move (real_decl_rtl, decl_rtl);
4500 else
4501 emit_group_load (real_decl_rtl, decl_rtl,
4502 TREE_TYPE (decl_result),
4503 int_size_in_bytes (TREE_TYPE (decl_result)));
4504 }
4505 else
4506 emit_move_insn (real_decl_rtl, decl_rtl);
4507 }
4508 }
4509
4510 /* If returning a structure, arrange to return the address of the value
4511 in a place where debuggers expect to find it.
4512
4513 If returning a structure PCC style,
4514 the caller also depends on this value.
4515 And current_function_returns_pcc_struct is not necessarily set. */
4516 if (current_function_returns_struct
4517 || current_function_returns_pcc_struct)
4518 {
4519 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
4520 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4521 rtx outgoing;
4522
4523 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4524 type = TREE_TYPE (type);
4525 else
4526 value_address = XEXP (value_address, 0);
4527
4528 #ifdef FUNCTION_OUTGOING_VALUE
4529 outgoing = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
4530 current_function_decl);
4531 #else
4532 outgoing = FUNCTION_VALUE (build_pointer_type (type),
4533 current_function_decl);
4534 #endif
4535
4536 /* Mark this as a function return value so integrate will delete the
4537 assignment and USE below when inlining this function. */
4538 REG_FUNCTION_VALUE_P (outgoing) = 1;
4539
4540 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4541 value_address = convert_memory_address (GET_MODE (outgoing),
4542 value_address);
4543
4544 emit_move_insn (outgoing, value_address);
4545
4546 /* Show return register used to hold result (in this case the address
4547 of the result. */
4548 current_function_return_rtx = outgoing;
4549 }
4550
4551 /* If this is an implementation of throw, do what's necessary to
4552 communicate between __builtin_eh_return and the epilogue. */
4553 expand_eh_return ();
4554
4555 /* Emit the actual code to clobber return register. */
4556 {
4557 rtx seq;
4558
4559 start_sequence ();
4560 clobber_return_register ();
4561 expand_naked_return ();
4562 seq = get_insns ();
4563 end_sequence ();
4564
4565 emit_insn_after (seq, clobber_after);
4566 }
4567
4568 /* Output the label for the naked return from the function. */
4569 emit_label (naked_return_label);
4570
4571 /* If we had calls to alloca, and this machine needs
4572 an accurate stack pointer to exit the function,
4573 insert some code to save and restore the stack pointer. */
4574 if (! EXIT_IGNORE_STACK
4575 && current_function_calls_alloca)
4576 {
4577 rtx tem = 0;
4578
4579 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4580 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4581 }
4582
4583 /* ??? This should no longer be necessary since stupid is no longer with
4584 us, but there are some parts of the compiler (eg reload_combine, and
4585 sh mach_dep_reorg) that still try and compute their own lifetime info
4586 instead of using the general framework. */
4587 use_return_register ();
4588 }
4589
4590 rtx
4591 get_arg_pointer_save_area (struct function *f)
4592 {
4593 rtx ret = f->x_arg_pointer_save_area;
4594
4595 if (! ret)
4596 {
4597 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
4598 f->x_arg_pointer_save_area = ret;
4599 }
4600
4601 if (f == cfun && ! f->arg_pointer_save_area_init)
4602 {
4603 rtx seq;
4604
4605 /* Save the arg pointer at the beginning of the function. The
4606 generated stack slot may not be a valid memory address, so we
4607 have to check it and fix it if necessary. */
4608 start_sequence ();
4609 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
4610 seq = get_insns ();
4611 end_sequence ();
4612
4613 push_topmost_sequence ();
4614 emit_insn_after (seq, entry_of_function ());
4615 pop_topmost_sequence ();
4616 }
4617
4618 return ret;
4619 }
4620 \f
4621 /* Extend a vector that records the INSN_UIDs of INSNS
4622 (a list of one or more insns). */
4623
4624 static void
4625 record_insns (rtx insns, varray_type *vecp)
4626 {
4627 int i, len;
4628 rtx tmp;
4629
4630 tmp = insns;
4631 len = 0;
4632 while (tmp != NULL_RTX)
4633 {
4634 len++;
4635 tmp = NEXT_INSN (tmp);
4636 }
4637
4638 i = VARRAY_SIZE (*vecp);
4639 VARRAY_GROW (*vecp, i + len);
4640 tmp = insns;
4641 while (tmp != NULL_RTX)
4642 {
4643 VARRAY_INT (*vecp, i) = INSN_UID (tmp);
4644 i++;
4645 tmp = NEXT_INSN (tmp);
4646 }
4647 }
4648
4649 /* Set the locator of the insn chain starting at INSN to LOC. */
4650 static void
4651 set_insn_locators (rtx insn, int loc)
4652 {
4653 while (insn != NULL_RTX)
4654 {
4655 if (INSN_P (insn))
4656 INSN_LOCATOR (insn) = loc;
4657 insn = NEXT_INSN (insn);
4658 }
4659 }
4660
4661 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4662 be running after reorg, SEQUENCE rtl is possible. */
4663
4664 static int
4665 contains (rtx insn, varray_type vec)
4666 {
4667 int i, j;
4668
4669 if (NONJUMP_INSN_P (insn)
4670 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4671 {
4672 int count = 0;
4673 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4674 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
4675 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
4676 count++;
4677 return count;
4678 }
4679 else
4680 {
4681 for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
4682 if (INSN_UID (insn) == VARRAY_INT (vec, j))
4683 return 1;
4684 }
4685 return 0;
4686 }
4687
4688 int
4689 prologue_epilogue_contains (rtx insn)
4690 {
4691 if (contains (insn, prologue))
4692 return 1;
4693 if (contains (insn, epilogue))
4694 return 1;
4695 return 0;
4696 }
4697
4698 int
4699 sibcall_epilogue_contains (rtx insn)
4700 {
4701 if (sibcall_epilogue)
4702 return contains (insn, sibcall_epilogue);
4703 return 0;
4704 }
4705
4706 #ifdef HAVE_return
4707 /* Insert gen_return at the end of block BB. This also means updating
4708 block_for_insn appropriately. */
4709
4710 static void
4711 emit_return_into_block (basic_block bb, rtx line_note)
4712 {
4713 emit_jump_insn_after (gen_return (), BB_END (bb));
4714 if (line_note)
4715 emit_note_copy_after (line_note, PREV_INSN (BB_END (bb)));
4716 }
4717 #endif /* HAVE_return */
4718
4719 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
4720
4721 /* These functions convert the epilogue into a variant that does not modify the
4722 stack pointer. This is used in cases where a function returns an object
4723 whose size is not known until it is computed. The called function leaves the
4724 object on the stack, leaves the stack depressed, and returns a pointer to
4725 the object.
4726
4727 What we need to do is track all modifications and references to the stack
4728 pointer, deleting the modifications and changing the references to point to
4729 the location the stack pointer would have pointed to had the modifications
4730 taken place.
4731
4732 These functions need to be portable so we need to make as few assumptions
4733 about the epilogue as we can. However, the epilogue basically contains
4734 three things: instructions to reset the stack pointer, instructions to
4735 reload registers, possibly including the frame pointer, and an
4736 instruction to return to the caller.
4737
4738 If we can't be sure of what a relevant epilogue insn is doing, we abort.
4739 We also make no attempt to validate the insns we make since if they are
4740 invalid, we probably can't do anything valid. The intent is that these
4741 routines get "smarter" as more and more machines start to use them and
4742 they try operating on different epilogues.
4743
4744 We use the following structure to track what the part of the epilogue that
4745 we've already processed has done. We keep two copies of the SP equivalence,
4746 one for use during the insn we are processing and one for use in the next
4747 insn. The difference is because one part of a PARALLEL may adjust SP
4748 and the other may use it. */
4749
4750 struct epi_info
4751 {
4752 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
4753 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
4754 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
4755 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
4756 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
4757 should be set to once we no longer need
4758 its value. */
4759 rtx const_equiv[FIRST_PSEUDO_REGISTER]; /* Any known constant equivalences
4760 for registers. */
4761 };
4762
4763 static void handle_epilogue_set (rtx, struct epi_info *);
4764 static void update_epilogue_consts (rtx, rtx, void *);
4765 static void emit_equiv_load (struct epi_info *);
4766
4767 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
4768 no modifications to the stack pointer. Return the new list of insns. */
4769
4770 static rtx
4771 keep_stack_depressed (rtx insns)
4772 {
4773 int j;
4774 struct epi_info info;
4775 rtx insn, next;
4776
4777 /* If the epilogue is just a single instruction, it must be OK as is. */
4778 if (NEXT_INSN (insns) == NULL_RTX)
4779 return insns;
4780
4781 /* Otherwise, start a sequence, initialize the information we have, and
4782 process all the insns we were given. */
4783 start_sequence ();
4784
4785 info.sp_equiv_reg = stack_pointer_rtx;
4786 info.sp_offset = 0;
4787 info.equiv_reg_src = 0;
4788
4789 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
4790 info.const_equiv[j] = 0;
4791
4792 insn = insns;
4793 next = NULL_RTX;
4794 while (insn != NULL_RTX)
4795 {
4796 next = NEXT_INSN (insn);
4797
4798 if (!INSN_P (insn))
4799 {
4800 add_insn (insn);
4801 insn = next;
4802 continue;
4803 }
4804
4805 /* If this insn references the register that SP is equivalent to and
4806 we have a pending load to that register, we must force out the load
4807 first and then indicate we no longer know what SP's equivalent is. */
4808 if (info.equiv_reg_src != 0
4809 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
4810 {
4811 emit_equiv_load (&info);
4812 info.sp_equiv_reg = 0;
4813 }
4814
4815 info.new_sp_equiv_reg = info.sp_equiv_reg;
4816 info.new_sp_offset = info.sp_offset;
4817
4818 /* If this is a (RETURN) and the return address is on the stack,
4819 update the address and change to an indirect jump. */
4820 if (GET_CODE (PATTERN (insn)) == RETURN
4821 || (GET_CODE (PATTERN (insn)) == PARALLEL
4822 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
4823 {
4824 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
4825 rtx base = 0;
4826 HOST_WIDE_INT offset = 0;
4827 rtx jump_insn, jump_set;
4828
4829 /* If the return address is in a register, we can emit the insn
4830 unchanged. Otherwise, it must be a MEM and we see what the
4831 base register and offset are. In any case, we have to emit any
4832 pending load to the equivalent reg of SP, if any. */
4833 if (REG_P (retaddr))
4834 {
4835 emit_equiv_load (&info);
4836 add_insn (insn);
4837 insn = next;
4838 continue;
4839 }
4840 else
4841 {
4842 rtx ret_ptr;
4843 gcc_assert (MEM_P (retaddr));
4844
4845 ret_ptr = XEXP (retaddr, 0);
4846
4847 if (REG_P (ret_ptr))
4848 {
4849 base = gen_rtx_REG (Pmode, REGNO (ret_ptr));
4850 offset = 0;
4851 }
4852 else
4853 {
4854 gcc_assert (GET_CODE (ret_ptr) == PLUS
4855 && REG_P (XEXP (ret_ptr, 0))
4856 && GET_CODE (XEXP (ret_ptr, 1)) == CONST_INT);
4857 base = gen_rtx_REG (Pmode, REGNO (XEXP (ret_ptr, 0)));
4858 offset = INTVAL (XEXP (ret_ptr, 1));
4859 }
4860 }
4861
4862 /* If the base of the location containing the return pointer
4863 is SP, we must update it with the replacement address. Otherwise,
4864 just build the necessary MEM. */
4865 retaddr = plus_constant (base, offset);
4866 if (base == stack_pointer_rtx)
4867 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
4868 plus_constant (info.sp_equiv_reg,
4869 info.sp_offset));
4870
4871 retaddr = gen_rtx_MEM (Pmode, retaddr);
4872
4873 /* If there is a pending load to the equivalent register for SP
4874 and we reference that register, we must load our address into
4875 a scratch register and then do that load. */
4876 if (info.equiv_reg_src
4877 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
4878 {
4879 unsigned int regno;
4880 rtx reg;
4881
4882 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4883 if (HARD_REGNO_MODE_OK (regno, Pmode)
4884 && !fixed_regs[regno]
4885 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
4886 && !REGNO_REG_SET_P (EXIT_BLOCK_PTR->global_live_at_start,
4887 regno)
4888 && !refers_to_regno_p (regno,
4889 regno + hard_regno_nregs[regno]
4890 [Pmode],
4891 info.equiv_reg_src, NULL)
4892 && info.const_equiv[regno] == 0)
4893 break;
4894
4895 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
4896
4897 reg = gen_rtx_REG (Pmode, regno);
4898 emit_move_insn (reg, retaddr);
4899 retaddr = reg;
4900 }
4901
4902 emit_equiv_load (&info);
4903 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
4904
4905 /* Show the SET in the above insn is a RETURN. */
4906 jump_set = single_set (jump_insn);
4907 gcc_assert (jump_set);
4908 SET_IS_RETURN_P (jump_set) = 1;
4909 }
4910
4911 /* If SP is not mentioned in the pattern and its equivalent register, if
4912 any, is not modified, just emit it. Otherwise, if neither is set,
4913 replace the reference to SP and emit the insn. If none of those are
4914 true, handle each SET individually. */
4915 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
4916 && (info.sp_equiv_reg == stack_pointer_rtx
4917 || !reg_set_p (info.sp_equiv_reg, insn)))
4918 add_insn (insn);
4919 else if (! reg_set_p (stack_pointer_rtx, insn)
4920 && (info.sp_equiv_reg == stack_pointer_rtx
4921 || !reg_set_p (info.sp_equiv_reg, insn)))
4922 {
4923 int changed;
4924
4925 changed = validate_replace_rtx (stack_pointer_rtx,
4926 plus_constant (info.sp_equiv_reg,
4927 info.sp_offset),
4928 insn);
4929 gcc_assert (changed);
4930
4931 add_insn (insn);
4932 }
4933 else if (GET_CODE (PATTERN (insn)) == SET)
4934 handle_epilogue_set (PATTERN (insn), &info);
4935 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
4936 {
4937 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
4938 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
4939 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
4940 }
4941 else
4942 add_insn (insn);
4943
4944 info.sp_equiv_reg = info.new_sp_equiv_reg;
4945 info.sp_offset = info.new_sp_offset;
4946
4947 /* Now update any constants this insn sets. */
4948 note_stores (PATTERN (insn), update_epilogue_consts, &info);
4949 insn = next;
4950 }
4951
4952 insns = get_insns ();
4953 end_sequence ();
4954 return insns;
4955 }
4956
4957 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
4958 structure that contains information about what we've seen so far. We
4959 process this SET by either updating that data or by emitting one or
4960 more insns. */
4961
4962 static void
4963 handle_epilogue_set (rtx set, struct epi_info *p)
4964 {
4965 /* First handle the case where we are setting SP. Record what it is being
4966 set from. If unknown, abort. */
4967 if (reg_set_p (stack_pointer_rtx, set))
4968 {
4969 gcc_assert (SET_DEST (set) == stack_pointer_rtx);
4970
4971 if (GET_CODE (SET_SRC (set)) == PLUS)
4972 {
4973 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
4974 if (GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
4975 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
4976 else
4977 {
4978 gcc_assert (REG_P (XEXP (SET_SRC (set), 1))
4979 && (REGNO (XEXP (SET_SRC (set), 1))
4980 < FIRST_PSEUDO_REGISTER)
4981 && p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4982 p->new_sp_offset
4983 = INTVAL (p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4984 }
4985 }
4986 else
4987 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
4988
4989 /* If we are adjusting SP, we adjust from the old data. */
4990 if (p->new_sp_equiv_reg == stack_pointer_rtx)
4991 {
4992 p->new_sp_equiv_reg = p->sp_equiv_reg;
4993 p->new_sp_offset += p->sp_offset;
4994 }
4995
4996 gcc_assert (p->new_sp_equiv_reg && REG_P (p->new_sp_equiv_reg));
4997
4998 return;
4999 }
5000
5001 /* Next handle the case where we are setting SP's equivalent register.
5002 If we already have a value to set it to, abort. We could update, but
5003 there seems little point in handling that case. Note that we have
5004 to allow for the case where we are setting the register set in
5005 the previous part of a PARALLEL inside a single insn. But use the
5006 old offset for any updates within this insn. We must allow for the case
5007 where the register is being set in a different (usually wider) mode than
5008 Pmode). */
5009 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
5010 {
5011 gcc_assert (!p->equiv_reg_src
5012 && REG_P (p->new_sp_equiv_reg)
5013 && REG_P (SET_DEST (set))
5014 && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set)))
5015 <= BITS_PER_WORD)
5016 && REGNO (p->new_sp_equiv_reg) == REGNO (SET_DEST (set)));
5017 p->equiv_reg_src
5018 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
5019 plus_constant (p->sp_equiv_reg,
5020 p->sp_offset));
5021 }
5022
5023 /* Otherwise, replace any references to SP in the insn to its new value
5024 and emit the insn. */
5025 else
5026 {
5027 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
5028 plus_constant (p->sp_equiv_reg,
5029 p->sp_offset));
5030 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
5031 plus_constant (p->sp_equiv_reg,
5032 p->sp_offset));
5033 emit_insn (set);
5034 }
5035 }
5036
5037 /* Update the tracking information for registers set to constants. */
5038
5039 static void
5040 update_epilogue_consts (rtx dest, rtx x, void *data)
5041 {
5042 struct epi_info *p = (struct epi_info *) data;
5043 rtx new;
5044
5045 if (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5046 return;
5047
5048 /* If we are either clobbering a register or doing a partial set,
5049 show we don't know the value. */
5050 else if (GET_CODE (x) == CLOBBER || ! rtx_equal_p (dest, SET_DEST (x)))
5051 p->const_equiv[REGNO (dest)] = 0;
5052
5053 /* If we are setting it to a constant, record that constant. */
5054 else if (GET_CODE (SET_SRC (x)) == CONST_INT)
5055 p->const_equiv[REGNO (dest)] = SET_SRC (x);
5056
5057 /* If this is a binary operation between a register we have been tracking
5058 and a constant, see if we can compute a new constant value. */
5059 else if (ARITHMETIC_P (SET_SRC (x))
5060 && REG_P (XEXP (SET_SRC (x), 0))
5061 && REGNO (XEXP (SET_SRC (x), 0)) < FIRST_PSEUDO_REGISTER
5062 && p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))] != 0
5063 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
5064 && 0 != (new = simplify_binary_operation
5065 (GET_CODE (SET_SRC (x)), GET_MODE (dest),
5066 p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))],
5067 XEXP (SET_SRC (x), 1)))
5068 && GET_CODE (new) == CONST_INT)
5069 p->const_equiv[REGNO (dest)] = new;
5070
5071 /* Otherwise, we can't do anything with this value. */
5072 else
5073 p->const_equiv[REGNO (dest)] = 0;
5074 }
5075
5076 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
5077
5078 static void
5079 emit_equiv_load (struct epi_info *p)
5080 {
5081 if (p->equiv_reg_src != 0)
5082 {
5083 rtx dest = p->sp_equiv_reg;
5084
5085 if (GET_MODE (p->equiv_reg_src) != GET_MODE (dest))
5086 dest = gen_rtx_REG (GET_MODE (p->equiv_reg_src),
5087 REGNO (p->sp_equiv_reg));
5088
5089 emit_move_insn (dest, p->equiv_reg_src);
5090 p->equiv_reg_src = 0;
5091 }
5092 }
5093 #endif
5094
5095 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5096 this into place with notes indicating where the prologue ends and where
5097 the epilogue begins. Update the basic block information when possible. */
5098
5099 void
5100 thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED)
5101 {
5102 int inserted = 0;
5103 edge e;
5104 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
5105 rtx seq;
5106 #endif
5107 #ifdef HAVE_prologue
5108 rtx prologue_end = NULL_RTX;
5109 #endif
5110 #if defined (HAVE_epilogue) || defined(HAVE_return)
5111 rtx epilogue_end = NULL_RTX;
5112 #endif
5113 edge_iterator ei;
5114
5115 #ifdef HAVE_prologue
5116 if (HAVE_prologue)
5117 {
5118 start_sequence ();
5119 seq = gen_prologue ();
5120 emit_insn (seq);
5121
5122 /* Retain a map of the prologue insns. */
5123 record_insns (seq, &prologue);
5124 prologue_end = emit_note (NOTE_INSN_PROLOGUE_END);
5125
5126 seq = get_insns ();
5127 end_sequence ();
5128 set_insn_locators (seq, prologue_locator);
5129
5130 /* Can't deal with multiple successors of the entry block
5131 at the moment. Function should always have at least one
5132 entry point. */
5133 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5134
5135 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
5136 inserted = 1;
5137 }
5138 #endif
5139
5140 /* If the exit block has no non-fake predecessors, we don't need
5141 an epilogue. */
5142 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5143 if ((e->flags & EDGE_FAKE) == 0)
5144 break;
5145 if (e == NULL)
5146 goto epilogue_done;
5147
5148 #ifdef HAVE_return
5149 if (optimize && HAVE_return)
5150 {
5151 /* If we're allowed to generate a simple return instruction,
5152 then by definition we don't need a full epilogue. Examine
5153 the block that falls through to EXIT. If it does not
5154 contain any code, examine its predecessors and try to
5155 emit (conditional) return instructions. */
5156
5157 basic_block last;
5158 rtx label;
5159
5160 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5161 if (e->flags & EDGE_FALLTHRU)
5162 break;
5163 if (e == NULL)
5164 goto epilogue_done;
5165 last = e->src;
5166
5167 /* Verify that there are no active instructions in the last block. */
5168 label = BB_END (last);
5169 while (label && !LABEL_P (label))
5170 {
5171 if (active_insn_p (label))
5172 break;
5173 label = PREV_INSN (label);
5174 }
5175
5176 if (BB_HEAD (last) == label && LABEL_P (label))
5177 {
5178 edge_iterator ei2;
5179 rtx epilogue_line_note = NULL_RTX;
5180
5181 /* Locate the line number associated with the closing brace,
5182 if we can find one. */
5183 for (seq = get_last_insn ();
5184 seq && ! active_insn_p (seq);
5185 seq = PREV_INSN (seq))
5186 if (NOTE_P (seq) && NOTE_LINE_NUMBER (seq) > 0)
5187 {
5188 epilogue_line_note = seq;
5189 break;
5190 }
5191
5192 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
5193 {
5194 basic_block bb = e->src;
5195 rtx jump;
5196
5197 if (bb == ENTRY_BLOCK_PTR)
5198 {
5199 ei_next (&ei2);
5200 continue;
5201 }
5202
5203 jump = BB_END (bb);
5204 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5205 {
5206 ei_next (&ei2);
5207 continue;
5208 }
5209
5210 /* If we have an unconditional jump, we can replace that
5211 with a simple return instruction. */
5212 if (simplejump_p (jump))
5213 {
5214 emit_return_into_block (bb, epilogue_line_note);
5215 delete_insn (jump);
5216 }
5217
5218 /* If we have a conditional jump, we can try to replace
5219 that with a conditional return instruction. */
5220 else if (condjump_p (jump))
5221 {
5222 if (! redirect_jump (jump, 0, 0))
5223 {
5224 ei_next (&ei2);
5225 continue;
5226 }
5227
5228 /* If this block has only one successor, it both jumps
5229 and falls through to the fallthru block, so we can't
5230 delete the edge. */
5231 if (single_succ_p (bb))
5232 {
5233 ei_next (&ei2);
5234 continue;
5235 }
5236 }
5237 else
5238 {
5239 ei_next (&ei2);
5240 continue;
5241 }
5242
5243 /* Fix up the CFG for the successful change we just made. */
5244 redirect_edge_succ (e, EXIT_BLOCK_PTR);
5245 }
5246
5247 /* Emit a return insn for the exit fallthru block. Whether
5248 this is still reachable will be determined later. */
5249
5250 emit_barrier_after (BB_END (last));
5251 emit_return_into_block (last, epilogue_line_note);
5252 epilogue_end = BB_END (last);
5253 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
5254 goto epilogue_done;
5255 }
5256 }
5257 #endif
5258 /* Find the edge that falls through to EXIT. Other edges may exist
5259 due to RETURN instructions, but those don't need epilogues.
5260 There really shouldn't be a mixture -- either all should have
5261 been converted or none, however... */
5262
5263 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5264 if (e->flags & EDGE_FALLTHRU)
5265 break;
5266 if (e == NULL)
5267 goto epilogue_done;
5268
5269 #ifdef HAVE_epilogue
5270 if (HAVE_epilogue)
5271 {
5272 start_sequence ();
5273 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5274
5275 seq = gen_epilogue ();
5276
5277 #ifdef INCOMING_RETURN_ADDR_RTX
5278 /* If this function returns with the stack depressed and we can support
5279 it, massage the epilogue to actually do that. */
5280 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5281 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
5282 seq = keep_stack_depressed (seq);
5283 #endif
5284
5285 emit_jump_insn (seq);
5286
5287 /* Retain a map of the epilogue insns. */
5288 record_insns (seq, &epilogue);
5289 set_insn_locators (seq, epilogue_locator);
5290
5291 seq = get_insns ();
5292 end_sequence ();
5293
5294 insert_insn_on_edge (seq, e);
5295 inserted = 1;
5296 }
5297 else
5298 #endif
5299 {
5300 basic_block cur_bb;
5301
5302 if (! next_active_insn (BB_END (e->src)))
5303 goto epilogue_done;
5304 /* We have a fall-through edge to the exit block, the source is not
5305 at the end of the function, and there will be an assembler epilogue
5306 at the end of the function.
5307 We can't use force_nonfallthru here, because that would try to
5308 use return. Inserting a jump 'by hand' is extremely messy, so
5309 we take advantage of cfg_layout_finalize using
5310 fixup_fallthru_exit_predecessor. */
5311 cfg_layout_initialize (0);
5312 FOR_EACH_BB (cur_bb)
5313 if (cur_bb->index >= 0 && cur_bb->next_bb->index >= 0)
5314 cur_bb->rbi->next = cur_bb->next_bb;
5315 cfg_layout_finalize ();
5316 }
5317 epilogue_done:
5318
5319 if (inserted)
5320 commit_edge_insertions ();
5321
5322 #ifdef HAVE_sibcall_epilogue
5323 /* Emit sibling epilogues before any sibling call sites. */
5324 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
5325 {
5326 basic_block bb = e->src;
5327 rtx insn = BB_END (bb);
5328
5329 if (!CALL_P (insn)
5330 || ! SIBLING_CALL_P (insn))
5331 {
5332 ei_next (&ei);
5333 continue;
5334 }
5335
5336 start_sequence ();
5337 emit_insn (gen_sibcall_epilogue ());
5338 seq = get_insns ();
5339 end_sequence ();
5340
5341 /* Retain a map of the epilogue insns. Used in life analysis to
5342 avoid getting rid of sibcall epilogue insns. Do this before we
5343 actually emit the sequence. */
5344 record_insns (seq, &sibcall_epilogue);
5345 set_insn_locators (seq, epilogue_locator);
5346
5347 emit_insn_before (seq, insn);
5348 ei_next (&ei);
5349 }
5350 #endif
5351
5352 #ifdef HAVE_prologue
5353 /* This is probably all useless now that we use locators. */
5354 if (prologue_end)
5355 {
5356 rtx insn, prev;
5357
5358 /* GDB handles `break f' by setting a breakpoint on the first
5359 line note after the prologue. Which means (1) that if
5360 there are line number notes before where we inserted the
5361 prologue we should move them, and (2) we should generate a
5362 note before the end of the first basic block, if there isn't
5363 one already there.
5364
5365 ??? This behavior is completely broken when dealing with
5366 multiple entry functions. We simply place the note always
5367 into first basic block and let alternate entry points
5368 to be missed.
5369 */
5370
5371 for (insn = prologue_end; insn; insn = prev)
5372 {
5373 prev = PREV_INSN (insn);
5374 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5375 {
5376 /* Note that we cannot reorder the first insn in the
5377 chain, since rest_of_compilation relies on that
5378 remaining constant. */
5379 if (prev == NULL)
5380 break;
5381 reorder_insns (insn, insn, prologue_end);
5382 }
5383 }
5384
5385 /* Find the last line number note in the first block. */
5386 for (insn = BB_END (ENTRY_BLOCK_PTR->next_bb);
5387 insn != prologue_end && insn;
5388 insn = PREV_INSN (insn))
5389 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5390 break;
5391
5392 /* If we didn't find one, make a copy of the first line number
5393 we run across. */
5394 if (! insn)
5395 {
5396 for (insn = next_active_insn (prologue_end);
5397 insn;
5398 insn = PREV_INSN (insn))
5399 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5400 {
5401 emit_note_copy_after (insn, prologue_end);
5402 break;
5403 }
5404 }
5405 }
5406 #endif
5407 #ifdef HAVE_epilogue
5408 if (epilogue_end)
5409 {
5410 rtx insn, next;
5411
5412 /* Similarly, move any line notes that appear after the epilogue.
5413 There is no need, however, to be quite so anal about the existence
5414 of such a note. Also move the NOTE_INSN_FUNCTION_END and (possibly)
5415 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5416 info generation. */
5417 for (insn = epilogue_end; insn; insn = next)
5418 {
5419 next = NEXT_INSN (insn);
5420 if (NOTE_P (insn)
5421 && (NOTE_LINE_NUMBER (insn) > 0
5422 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG
5423 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END))
5424 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5425 }
5426 }
5427 #endif
5428 }
5429
5430 /* Reposition the prologue-end and epilogue-begin notes after instruction
5431 scheduling and delayed branch scheduling. */
5432
5433 void
5434 reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED)
5435 {
5436 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5437 rtx insn, last, note;
5438 int len;
5439
5440 if ((len = VARRAY_SIZE (prologue)) > 0)
5441 {
5442 last = 0, note = 0;
5443
5444 /* Scan from the beginning until we reach the last prologue insn.
5445 We apparently can't depend on basic_block_{head,end} after
5446 reorg has run. */
5447 for (insn = f; insn; insn = NEXT_INSN (insn))
5448 {
5449 if (NOTE_P (insn))
5450 {
5451 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5452 note = insn;
5453 }
5454 else if (contains (insn, prologue))
5455 {
5456 last = insn;
5457 if (--len == 0)
5458 break;
5459 }
5460 }
5461
5462 if (last)
5463 {
5464 /* Find the prologue-end note if we haven't already, and
5465 move it to just after the last prologue insn. */
5466 if (note == 0)
5467 {
5468 for (note = last; (note = NEXT_INSN (note));)
5469 if (NOTE_P (note)
5470 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5471 break;
5472 }
5473
5474 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5475 if (LABEL_P (last))
5476 last = NEXT_INSN (last);
5477 reorder_insns (note, note, last);
5478 }
5479 }
5480
5481 if ((len = VARRAY_SIZE (epilogue)) > 0)
5482 {
5483 last = 0, note = 0;
5484
5485 /* Scan from the end until we reach the first epilogue insn.
5486 We apparently can't depend on basic_block_{head,end} after
5487 reorg has run. */
5488 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
5489 {
5490 if (NOTE_P (insn))
5491 {
5492 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5493 note = insn;
5494 }
5495 else if (contains (insn, epilogue))
5496 {
5497 last = insn;
5498 if (--len == 0)
5499 break;
5500 }
5501 }
5502
5503 if (last)
5504 {
5505 /* Find the epilogue-begin note if we haven't already, and
5506 move it to just before the first epilogue insn. */
5507 if (note == 0)
5508 {
5509 for (note = insn; (note = PREV_INSN (note));)
5510 if (NOTE_P (note)
5511 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5512 break;
5513 }
5514
5515 if (PREV_INSN (last) != note)
5516 reorder_insns (note, note, PREV_INSN (last));
5517 }
5518 }
5519 #endif /* HAVE_prologue or HAVE_epilogue */
5520 }
5521
5522 /* Called once, at initialization, to initialize function.c. */
5523
5524 void
5525 init_function_once (void)
5526 {
5527 VARRAY_INT_INIT (prologue, 0, "prologue");
5528 VARRAY_INT_INIT (epilogue, 0, "epilogue");
5529 VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
5530 }
5531
5532 /* Resets insn_block_boundaries array. */
5533
5534 void
5535 reset_block_changes (void)
5536 {
5537 VARRAY_TREE_INIT (cfun->ib_boundaries_block, 100, "ib_boundaries_block");
5538 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, NULL_TREE);
5539 }
5540
5541 /* Record the boundary for BLOCK. */
5542 void
5543 record_block_change (tree block)
5544 {
5545 int i, n;
5546 tree last_block;
5547
5548 if (!block)
5549 return;
5550
5551 last_block = VARRAY_TOP_TREE (cfun->ib_boundaries_block);
5552 VARRAY_POP (cfun->ib_boundaries_block);
5553 n = get_max_uid ();
5554 for (i = VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block); i < n; i++)
5555 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, last_block);
5556
5557 VARRAY_PUSH_TREE (cfun->ib_boundaries_block, block);
5558 }
5559
5560 /* Finishes record of boundaries. */
5561 void finalize_block_changes (void)
5562 {
5563 record_block_change (DECL_INITIAL (current_function_decl));
5564 }
5565
5566 /* For INSN return the BLOCK it belongs to. */
5567 void
5568 check_block_change (rtx insn, tree *block)
5569 {
5570 unsigned uid = INSN_UID (insn);
5571
5572 if (uid >= VARRAY_ACTIVE_SIZE (cfun->ib_boundaries_block))
5573 return;
5574
5575 *block = VARRAY_TREE (cfun->ib_boundaries_block, uid);
5576 }
5577
5578 /* Releases the ib_boundaries_block records. */
5579 void
5580 free_block_changes (void)
5581 {
5582 cfun->ib_boundaries_block = NULL;
5583 }
5584
5585 /* Returns the name of the current function. */
5586 const char *
5587 current_function_name (void)
5588 {
5589 return lang_hooks.decl_printable_name (cfun->decl, 2);
5590 }
5591
5592 #include "gt-function.h"