mips.h (enum reg_class): Add FRAME_REGS.
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "regs.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "output.h"
53 #include "basic-block.h"
54 #include "toplev.h"
55 #include "hashtab.h"
56 #include "ggc.h"
57 #include "tm_p.h"
58 #include "integrate.h"
59 #include "langhooks.h"
60 #include "target.h"
61 #include "cfglayout.h"
62 #include "gimple.h"
63 #include "tree-pass.h"
64 #include "predict.h"
65 #include "df.h"
66 #include "timevar.h"
67 #include "vecprim.h"
68
69 /* So we can assign to cfun in this file. */
70 #undef cfun
71
72 #ifndef STACK_ALIGNMENT_NEEDED
73 #define STACK_ALIGNMENT_NEEDED 1
74 #endif
75
76 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
77
78 /* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
81 must define both, or neither. */
82 #ifndef NAME__MAIN
83 #define NAME__MAIN "__main"
84 #endif
85
86 /* Round a value to the lowest integer less than it that is a multiple of
87 the required alignment. Avoid using division in case the value is
88 negative. Assume the alignment is a power of two. */
89 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
90
91 /* Similar, but round to the next highest integer that meets the
92 alignment. */
93 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
94
95 /* Nonzero if function being compiled doesn't contain any calls
96 (ignoring the prologue and epilogue). This is set prior to
97 local register allocation and is valid for the remaining
98 compiler passes. */
99 int current_function_is_leaf;
100
101 /* Nonzero if function being compiled doesn't modify the stack pointer
102 (ignoring the prologue and epilogue). This is only valid after
103 pass_stack_ptr_mod has run. */
104 int current_function_sp_is_unchanging;
105
106 /* Nonzero if the function being compiled is a leaf function which only
107 uses leaf registers. This is valid after reload (specifically after
108 sched2) and is useful only if the port defines LEAF_REGISTERS. */
109 int current_function_uses_only_leaf_regs;
110
111 /* Nonzero once virtual register instantiation has been done.
112 assign_stack_local uses frame_pointer_rtx when this is nonzero.
113 calls.c:emit_library_call_value_1 uses it to set up
114 post-instantiation libcalls. */
115 int virtuals_instantiated;
116
117 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
118 static GTY(()) int funcdef_no;
119
120 /* These variables hold pointers to functions to create and destroy
121 target specific, per-function data structures. */
122 struct machine_function * (*init_machine_status) (void);
123
124 /* The currently compiled function. */
125 struct function *cfun = 0;
126
127 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
128 static VEC(int,heap) *prologue;
129 static VEC(int,heap) *epilogue;
130
131 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
132 in this function. */
133 static VEC(int,heap) *sibcall_epilogue;
134 \f
135 /* In order to evaluate some expressions, such as function calls returning
136 structures in memory, we need to temporarily allocate stack locations.
137 We record each allocated temporary in the following structure.
138
139 Associated with each temporary slot is a nesting level. When we pop up
140 one level, all temporaries associated with the previous level are freed.
141 Normally, all temporaries are freed after the execution of the statement
142 in which they were created. However, if we are inside a ({...}) grouping,
143 the result may be in a temporary and hence must be preserved. If the
144 result could be in a temporary, we preserve it if we can determine which
145 one it is in. If we cannot determine which temporary may contain the
146 result, all temporaries are preserved. A temporary is preserved by
147 pretending it was allocated at the previous nesting level.
148
149 Automatic variables are also assigned temporary slots, at the nesting
150 level where they are defined. They are marked a "kept" so that
151 free_temp_slots will not free them. */
152
153 struct temp_slot GTY(())
154 {
155 /* Points to next temporary slot. */
156 struct temp_slot *next;
157 /* Points to previous temporary slot. */
158 struct temp_slot *prev;
159
160 /* The rtx to used to reference the slot. */
161 rtx slot;
162 /* The rtx used to represent the address if not the address of the
163 slot above. May be an EXPR_LIST if multiple addresses exist. */
164 rtx address;
165 /* The alignment (in bits) of the slot. */
166 unsigned int align;
167 /* The size, in units, of the slot. */
168 HOST_WIDE_INT size;
169 /* The type of the object in the slot, or zero if it doesn't correspond
170 to a type. We use this to determine whether a slot can be reused.
171 It can be reused if objects of the type of the new slot will always
172 conflict with objects of the type of the old slot. */
173 tree type;
174 /* Nonzero if this temporary is currently in use. */
175 char in_use;
176 /* Nonzero if this temporary has its address taken. */
177 char addr_taken;
178 /* Nesting level at which this slot is being used. */
179 int level;
180 /* Nonzero if this should survive a call to free_temp_slots. */
181 int keep;
182 /* The offset of the slot from the frame_pointer, including extra space
183 for alignment. This info is for combine_temp_slots. */
184 HOST_WIDE_INT base_offset;
185 /* The size of the slot, including extra space for alignment. This
186 info is for combine_temp_slots. */
187 HOST_WIDE_INT full_size;
188 };
189 \f
190 /* Forward declarations. */
191
192 static struct temp_slot *find_temp_slot_from_address (rtx);
193 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
194 static void pad_below (struct args_size *, enum machine_mode, tree);
195 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
196 static int all_blocks (tree, tree *);
197 static tree *get_block_vector (tree, int *);
198 extern tree debug_find_var_in_block_tree (tree, tree);
199 /* We always define `record_insns' even if it's not used so that we
200 can always export `prologue_epilogue_contains'. */
201 static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED;
202 static int contains (const_rtx, VEC(int,heap) **);
203 #ifdef HAVE_return
204 static void emit_return_into_block (basic_block);
205 #endif
206 static void prepare_function_start (void);
207 static void do_clobber_return_reg (rtx, void *);
208 static void do_use_return_reg (rtx, void *);
209 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
210 \f
211 /* Pointer to chain of `struct function' for containing functions. */
212 struct function *outer_function_chain;
213
214 /* Given a function decl for a containing function,
215 return the `struct function' for it. */
216
217 struct function *
218 find_function_data (tree decl)
219 {
220 struct function *p;
221
222 for (p = outer_function_chain; p; p = p->outer)
223 if (p->decl == decl)
224 return p;
225
226 gcc_unreachable ();
227 }
228
229 /* Save the current context for compilation of a nested function.
230 This is called from language-specific code. */
231
232 void
233 push_function_context (void)
234 {
235 if (cfun == 0)
236 allocate_struct_function (NULL, false);
237
238 cfun->outer = outer_function_chain;
239 outer_function_chain = cfun;
240 set_cfun (NULL);
241 }
242
243 /* Restore the last saved context, at the end of a nested function.
244 This function is called from language-specific code. */
245
246 void
247 pop_function_context (void)
248 {
249 struct function *p = outer_function_chain;
250
251 set_cfun (p);
252 outer_function_chain = p->outer;
253 current_function_decl = p->decl;
254
255 /* Reset variables that have known state during rtx generation. */
256 virtuals_instantiated = 0;
257 generating_concat_p = 1;
258 }
259
260 /* Clear out all parts of the state in F that can safely be discarded
261 after the function has been parsed, but not compiled, to let
262 garbage collection reclaim the memory. */
263
264 void
265 free_after_parsing (struct function *f)
266 {
267 f->language = 0;
268 }
269
270 /* Clear out all parts of the state in F that can safely be discarded
271 after the function has been compiled, to let garbage collection
272 reclaim the memory. */
273
274 void
275 free_after_compilation (struct function *f)
276 {
277 VEC_free (int, heap, prologue);
278 VEC_free (int, heap, epilogue);
279 VEC_free (int, heap, sibcall_epilogue);
280 if (crtl->emit.regno_pointer_align)
281 free (crtl->emit.regno_pointer_align);
282
283 memset (crtl, 0, sizeof (struct rtl_data));
284 f->eh = NULL;
285 f->machine = NULL;
286 f->cfg = NULL;
287
288 regno_reg_rtx = NULL;
289 }
290 \f
291 /* Return size needed for stack frame based on slots so far allocated.
292 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
293 the caller may have to do that. */
294
295 HOST_WIDE_INT
296 get_frame_size (void)
297 {
298 if (FRAME_GROWS_DOWNWARD)
299 return -frame_offset;
300 else
301 return frame_offset;
302 }
303
304 /* Issue an error message and return TRUE if frame OFFSET overflows in
305 the signed target pointer arithmetics for function FUNC. Otherwise
306 return FALSE. */
307
308 bool
309 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
310 {
311 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
312
313 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
314 /* Leave room for the fixed part of the frame. */
315 - 64 * UNITS_PER_WORD)
316 {
317 error ("%Jtotal size of local objects too large", func);
318 return TRUE;
319 }
320
321 return FALSE;
322 }
323
324 /* Return stack slot alignment in bits for TYPE and MODE. */
325
326 static unsigned int
327 get_stack_local_alignment (tree type, enum machine_mode mode)
328 {
329 unsigned int alignment;
330
331 if (mode == BLKmode)
332 alignment = BIGGEST_ALIGNMENT;
333 else
334 alignment = GET_MODE_ALIGNMENT (mode);
335
336 /* Allow the frond-end to (possibly) increase the alignment of this
337 stack slot. */
338 if (! type)
339 type = lang_hooks.types.type_for_mode (mode, 0);
340
341 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
342 }
343
344 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
345 with machine mode MODE.
346
347 ALIGN controls the amount of alignment for the address of the slot:
348 0 means according to MODE,
349 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
350 -2 means use BITS_PER_UNIT,
351 positive specifies alignment boundary in bits.
352
353 If REDUCE_ALIGNMENT_OK is true, it is OK to reduce alignment.
354
355 We do not round to stack_boundary here. */
356
357 rtx
358 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size,
359 int align,
360 bool reduce_alignment_ok ATTRIBUTE_UNUSED)
361 {
362 rtx x, addr;
363 int bigend_correction = 0;
364 unsigned int alignment, alignment_in_bits;
365 int frame_off, frame_alignment, frame_phase;
366
367 if (align == 0)
368 {
369 alignment = get_stack_local_alignment (NULL, mode);
370 alignment /= BITS_PER_UNIT;
371 }
372 else if (align == -1)
373 {
374 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
375 size = CEIL_ROUND (size, alignment);
376 }
377 else if (align == -2)
378 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
379 else
380 alignment = align / BITS_PER_UNIT;
381
382 alignment_in_bits = alignment * BITS_PER_UNIT;
383
384 if (FRAME_GROWS_DOWNWARD)
385 frame_offset -= size;
386
387 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
388 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
389 {
390 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
391 alignment = alignment_in_bits / BITS_PER_UNIT;
392 }
393
394 if (SUPPORTS_STACK_ALIGNMENT)
395 {
396 if (crtl->stack_alignment_estimated < alignment_in_bits)
397 {
398 if (!crtl->stack_realign_processed)
399 crtl->stack_alignment_estimated = alignment_in_bits;
400 else
401 {
402 /* If stack is realigned and stack alignment value
403 hasn't been finalized, it is OK not to increase
404 stack_alignment_estimated. The bigger alignment
405 requirement is recorded in stack_alignment_needed
406 below. */
407 gcc_assert (!crtl->stack_realign_finalized);
408 if (!crtl->stack_realign_needed)
409 {
410 /* It is OK to reduce the alignment as long as the
411 requested size is 0 or the estimated stack
412 alignment >= mode alignment. */
413 gcc_assert (reduce_alignment_ok
414 || size == 0
415 || (crtl->stack_alignment_estimated
416 >= GET_MODE_ALIGNMENT (mode)));
417 alignment_in_bits = crtl->stack_alignment_estimated;
418 alignment = alignment_in_bits / BITS_PER_UNIT;
419 }
420 }
421 }
422 }
423
424 if (crtl->stack_alignment_needed < alignment_in_bits)
425 crtl->stack_alignment_needed = alignment_in_bits;
426 if (crtl->max_used_stack_slot_alignment < crtl->stack_alignment_needed)
427 crtl->max_used_stack_slot_alignment = crtl->stack_alignment_needed;
428
429 /* Calculate how many bytes the start of local variables is off from
430 stack alignment. */
431 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
432 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
433 frame_phase = frame_off ? frame_alignment - frame_off : 0;
434
435 /* Round the frame offset to the specified alignment. The default is
436 to always honor requests to align the stack but a port may choose to
437 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
438 if (STACK_ALIGNMENT_NEEDED
439 || mode != BLKmode
440 || size != 0)
441 {
442 /* We must be careful here, since FRAME_OFFSET might be negative and
443 division with a negative dividend isn't as well defined as we might
444 like. So we instead assume that ALIGNMENT is a power of two and
445 use logical operations which are unambiguous. */
446 if (FRAME_GROWS_DOWNWARD)
447 frame_offset
448 = (FLOOR_ROUND (frame_offset - frame_phase,
449 (unsigned HOST_WIDE_INT) alignment)
450 + frame_phase);
451 else
452 frame_offset
453 = (CEIL_ROUND (frame_offset - frame_phase,
454 (unsigned HOST_WIDE_INT) alignment)
455 + frame_phase);
456 }
457
458 /* On a big-endian machine, if we are allocating more space than we will use,
459 use the least significant bytes of those that are allocated. */
460 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
461 bigend_correction = size - GET_MODE_SIZE (mode);
462
463 /* If we have already instantiated virtual registers, return the actual
464 address relative to the frame pointer. */
465 if (virtuals_instantiated)
466 addr = plus_constant (frame_pointer_rtx,
467 trunc_int_for_mode
468 (frame_offset + bigend_correction
469 + STARTING_FRAME_OFFSET, Pmode));
470 else
471 addr = plus_constant (virtual_stack_vars_rtx,
472 trunc_int_for_mode
473 (frame_offset + bigend_correction,
474 Pmode));
475
476 if (!FRAME_GROWS_DOWNWARD)
477 frame_offset += size;
478
479 x = gen_rtx_MEM (mode, addr);
480 set_mem_align (x, alignment_in_bits);
481 MEM_NOTRAP_P (x) = 1;
482
483 stack_slot_list
484 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
485
486 if (frame_offset_overflow (frame_offset, current_function_decl))
487 frame_offset = 0;
488
489 return x;
490 }
491
492 /* Wrap up assign_stack_local_1 with last parameter as false. */
493
494 rtx
495 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
496 {
497 return assign_stack_local_1 (mode, size, align, false);
498 }
499 \f
500 /* Removes temporary slot TEMP from LIST. */
501
502 static void
503 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
504 {
505 if (temp->next)
506 temp->next->prev = temp->prev;
507 if (temp->prev)
508 temp->prev->next = temp->next;
509 else
510 *list = temp->next;
511
512 temp->prev = temp->next = NULL;
513 }
514
515 /* Inserts temporary slot TEMP to LIST. */
516
517 static void
518 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
519 {
520 temp->next = *list;
521 if (*list)
522 (*list)->prev = temp;
523 temp->prev = NULL;
524 *list = temp;
525 }
526
527 /* Returns the list of used temp slots at LEVEL. */
528
529 static struct temp_slot **
530 temp_slots_at_level (int level)
531 {
532 if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
533 VEC_safe_grow_cleared (temp_slot_p, gc, used_temp_slots, level + 1);
534
535 return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
536 }
537
538 /* Returns the maximal temporary slot level. */
539
540 static int
541 max_slot_level (void)
542 {
543 if (!used_temp_slots)
544 return -1;
545
546 return VEC_length (temp_slot_p, used_temp_slots) - 1;
547 }
548
549 /* Moves temporary slot TEMP to LEVEL. */
550
551 static void
552 move_slot_to_level (struct temp_slot *temp, int level)
553 {
554 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
555 insert_slot_to_list (temp, temp_slots_at_level (level));
556 temp->level = level;
557 }
558
559 /* Make temporary slot TEMP available. */
560
561 static void
562 make_slot_available (struct temp_slot *temp)
563 {
564 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
565 insert_slot_to_list (temp, &avail_temp_slots);
566 temp->in_use = 0;
567 temp->level = -1;
568 }
569 \f
570 /* Allocate a temporary stack slot and record it for possible later
571 reuse.
572
573 MODE is the machine mode to be given to the returned rtx.
574
575 SIZE is the size in units of the space required. We do no rounding here
576 since assign_stack_local will do any required rounding.
577
578 KEEP is 1 if this slot is to be retained after a call to
579 free_temp_slots. Automatic variables for a block are allocated
580 with this flag. KEEP values of 2 or 3 were needed respectively
581 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
582 or for SAVE_EXPRs, but they are now unused.
583
584 TYPE is the type that will be used for the stack slot. */
585
586 rtx
587 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
588 int keep, tree type)
589 {
590 unsigned int align;
591 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
592 rtx slot;
593
594 /* If SIZE is -1 it means that somebody tried to allocate a temporary
595 of a variable size. */
596 gcc_assert (size != -1);
597
598 /* These are now unused. */
599 gcc_assert (keep <= 1);
600
601 align = get_stack_local_alignment (type, mode);
602
603 /* Try to find an available, already-allocated temporary of the proper
604 mode which meets the size and alignment requirements. Choose the
605 smallest one with the closest alignment.
606
607 If assign_stack_temp is called outside of the tree->rtl expansion,
608 we cannot reuse the stack slots (that may still refer to
609 VIRTUAL_STACK_VARS_REGNUM). */
610 if (!virtuals_instantiated)
611 {
612 for (p = avail_temp_slots; p; p = p->next)
613 {
614 if (p->align >= align && p->size >= size
615 && GET_MODE (p->slot) == mode
616 && objects_must_conflict_p (p->type, type)
617 && (best_p == 0 || best_p->size > p->size
618 || (best_p->size == p->size && best_p->align > p->align)))
619 {
620 if (p->align == align && p->size == size)
621 {
622 selected = p;
623 cut_slot_from_list (selected, &avail_temp_slots);
624 best_p = 0;
625 break;
626 }
627 best_p = p;
628 }
629 }
630 }
631
632 /* Make our best, if any, the one to use. */
633 if (best_p)
634 {
635 selected = best_p;
636 cut_slot_from_list (selected, &avail_temp_slots);
637
638 /* If there are enough aligned bytes left over, make them into a new
639 temp_slot so that the extra bytes don't get wasted. Do this only
640 for BLKmode slots, so that we can be sure of the alignment. */
641 if (GET_MODE (best_p->slot) == BLKmode)
642 {
643 int alignment = best_p->align / BITS_PER_UNIT;
644 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
645
646 if (best_p->size - rounded_size >= alignment)
647 {
648 p = GGC_NEW (struct temp_slot);
649 p->in_use = p->addr_taken = 0;
650 p->size = best_p->size - rounded_size;
651 p->base_offset = best_p->base_offset + rounded_size;
652 p->full_size = best_p->full_size - rounded_size;
653 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
654 p->align = best_p->align;
655 p->address = 0;
656 p->type = best_p->type;
657 insert_slot_to_list (p, &avail_temp_slots);
658
659 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
660 stack_slot_list);
661
662 best_p->size = rounded_size;
663 best_p->full_size = rounded_size;
664 }
665 }
666 }
667
668 /* If we still didn't find one, make a new temporary. */
669 if (selected == 0)
670 {
671 HOST_WIDE_INT frame_offset_old = frame_offset;
672
673 p = GGC_NEW (struct temp_slot);
674
675 /* We are passing an explicit alignment request to assign_stack_local.
676 One side effect of that is assign_stack_local will not round SIZE
677 to ensure the frame offset remains suitably aligned.
678
679 So for requests which depended on the rounding of SIZE, we go ahead
680 and round it now. We also make sure ALIGNMENT is at least
681 BIGGEST_ALIGNMENT. */
682 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
683 p->slot = assign_stack_local (mode,
684 (mode == BLKmode
685 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
686 : size),
687 align);
688
689 p->align = align;
690
691 /* The following slot size computation is necessary because we don't
692 know the actual size of the temporary slot until assign_stack_local
693 has performed all the frame alignment and size rounding for the
694 requested temporary. Note that extra space added for alignment
695 can be either above or below this stack slot depending on which
696 way the frame grows. We include the extra space if and only if it
697 is above this slot. */
698 if (FRAME_GROWS_DOWNWARD)
699 p->size = frame_offset_old - frame_offset;
700 else
701 p->size = size;
702
703 /* Now define the fields used by combine_temp_slots. */
704 if (FRAME_GROWS_DOWNWARD)
705 {
706 p->base_offset = frame_offset;
707 p->full_size = frame_offset_old - frame_offset;
708 }
709 else
710 {
711 p->base_offset = frame_offset_old;
712 p->full_size = frame_offset - frame_offset_old;
713 }
714 p->address = 0;
715
716 selected = p;
717 }
718
719 p = selected;
720 p->in_use = 1;
721 p->addr_taken = 0;
722 p->type = type;
723 p->level = temp_slot_level;
724 p->keep = keep;
725
726 pp = temp_slots_at_level (p->level);
727 insert_slot_to_list (p, pp);
728
729 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
730 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
731 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
732
733 /* If we know the alias set for the memory that will be used, use
734 it. If there's no TYPE, then we don't know anything about the
735 alias set for the memory. */
736 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
737 set_mem_align (slot, align);
738
739 /* If a type is specified, set the relevant flags. */
740 if (type != 0)
741 {
742 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
743 MEM_SET_IN_STRUCT_P (slot, (AGGREGATE_TYPE_P (type)
744 || TREE_CODE (type) == COMPLEX_TYPE));
745 }
746 MEM_NOTRAP_P (slot) = 1;
747
748 return slot;
749 }
750
751 /* Allocate a temporary stack slot and record it for possible later
752 reuse. First three arguments are same as in preceding function. */
753
754 rtx
755 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
756 {
757 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
758 }
759 \f
760 /* Assign a temporary.
761 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
762 and so that should be used in error messages. In either case, we
763 allocate of the given type.
764 KEEP is as for assign_stack_temp.
765 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
766 it is 0 if a register is OK.
767 DONT_PROMOTE is 1 if we should not promote values in register
768 to wider modes. */
769
770 rtx
771 assign_temp (tree type_or_decl, int keep, int memory_required,
772 int dont_promote ATTRIBUTE_UNUSED)
773 {
774 tree type, decl;
775 enum machine_mode mode;
776 #ifdef PROMOTE_MODE
777 int unsignedp;
778 #endif
779
780 if (DECL_P (type_or_decl))
781 decl = type_or_decl, type = TREE_TYPE (decl);
782 else
783 decl = NULL, type = type_or_decl;
784
785 mode = TYPE_MODE (type);
786 #ifdef PROMOTE_MODE
787 unsignedp = TYPE_UNSIGNED (type);
788 #endif
789
790 if (mode == BLKmode || memory_required)
791 {
792 HOST_WIDE_INT size = int_size_in_bytes (type);
793 rtx tmp;
794
795 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
796 problems with allocating the stack space. */
797 if (size == 0)
798 size = 1;
799
800 /* Unfortunately, we don't yet know how to allocate variable-sized
801 temporaries. However, sometimes we can find a fixed upper limit on
802 the size, so try that instead. */
803 else if (size == -1)
804 size = max_int_size_in_bytes (type);
805
806 /* The size of the temporary may be too large to fit into an integer. */
807 /* ??? Not sure this should happen except for user silliness, so limit
808 this to things that aren't compiler-generated temporaries. The
809 rest of the time we'll die in assign_stack_temp_for_type. */
810 if (decl && size == -1
811 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
812 {
813 error ("size of variable %q+D is too large", decl);
814 size = 1;
815 }
816
817 tmp = assign_stack_temp_for_type (mode, size, keep, type);
818 return tmp;
819 }
820
821 #ifdef PROMOTE_MODE
822 if (! dont_promote)
823 mode = promote_mode (type, mode, &unsignedp, 0);
824 #endif
825
826 return gen_reg_rtx (mode);
827 }
828 \f
829 /* Combine temporary stack slots which are adjacent on the stack.
830
831 This allows for better use of already allocated stack space. This is only
832 done for BLKmode slots because we can be sure that we won't have alignment
833 problems in this case. */
834
835 static void
836 combine_temp_slots (void)
837 {
838 struct temp_slot *p, *q, *next, *next_q;
839 int num_slots;
840
841 /* We can't combine slots, because the information about which slot
842 is in which alias set will be lost. */
843 if (flag_strict_aliasing)
844 return;
845
846 /* If there are a lot of temp slots, don't do anything unless
847 high levels of optimization. */
848 if (! flag_expensive_optimizations)
849 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
850 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
851 return;
852
853 for (p = avail_temp_slots; p; p = next)
854 {
855 int delete_p = 0;
856
857 next = p->next;
858
859 if (GET_MODE (p->slot) != BLKmode)
860 continue;
861
862 for (q = p->next; q; q = next_q)
863 {
864 int delete_q = 0;
865
866 next_q = q->next;
867
868 if (GET_MODE (q->slot) != BLKmode)
869 continue;
870
871 if (p->base_offset + p->full_size == q->base_offset)
872 {
873 /* Q comes after P; combine Q into P. */
874 p->size += q->size;
875 p->full_size += q->full_size;
876 delete_q = 1;
877 }
878 else if (q->base_offset + q->full_size == p->base_offset)
879 {
880 /* P comes after Q; combine P into Q. */
881 q->size += p->size;
882 q->full_size += p->full_size;
883 delete_p = 1;
884 break;
885 }
886 if (delete_q)
887 cut_slot_from_list (q, &avail_temp_slots);
888 }
889
890 /* Either delete P or advance past it. */
891 if (delete_p)
892 cut_slot_from_list (p, &avail_temp_slots);
893 }
894 }
895 \f
896 /* Find the temp slot corresponding to the object at address X. */
897
898 static struct temp_slot *
899 find_temp_slot_from_address (rtx x)
900 {
901 struct temp_slot *p;
902 rtx next;
903 int i;
904
905 for (i = max_slot_level (); i >= 0; i--)
906 for (p = *temp_slots_at_level (i); p; p = p->next)
907 {
908 if (XEXP (p->slot, 0) == x
909 || p->address == x
910 || (GET_CODE (x) == PLUS
911 && XEXP (x, 0) == virtual_stack_vars_rtx
912 && GET_CODE (XEXP (x, 1)) == CONST_INT
913 && INTVAL (XEXP (x, 1)) >= p->base_offset
914 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
915 return p;
916
917 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
918 for (next = p->address; next; next = XEXP (next, 1))
919 if (XEXP (next, 0) == x)
920 return p;
921 }
922
923 /* If we have a sum involving a register, see if it points to a temp
924 slot. */
925 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
926 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
927 return p;
928 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
929 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
930 return p;
931
932 return 0;
933 }
934
935 /* Indicate that NEW_RTX is an alternate way of referring to the temp
936 slot that previously was known by OLD_RTX. */
937
938 void
939 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
940 {
941 struct temp_slot *p;
942
943 if (rtx_equal_p (old_rtx, new_rtx))
944 return;
945
946 p = find_temp_slot_from_address (old_rtx);
947
948 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
949 NEW_RTX is a register, see if one operand of the PLUS is a
950 temporary location. If so, NEW_RTX points into it. Otherwise,
951 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
952 in common between them. If so, try a recursive call on those
953 values. */
954 if (p == 0)
955 {
956 if (GET_CODE (old_rtx) != PLUS)
957 return;
958
959 if (REG_P (new_rtx))
960 {
961 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
962 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
963 return;
964 }
965 else if (GET_CODE (new_rtx) != PLUS)
966 return;
967
968 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
969 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
970 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
971 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
972 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
973 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
974 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
975 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
976
977 return;
978 }
979
980 /* Otherwise add an alias for the temp's address. */
981 else if (p->address == 0)
982 p->address = new_rtx;
983 else
984 {
985 if (GET_CODE (p->address) != EXPR_LIST)
986 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
987
988 p->address = gen_rtx_EXPR_LIST (VOIDmode, new_rtx, p->address);
989 }
990 }
991
992 /* If X could be a reference to a temporary slot, mark the fact that its
993 address was taken. */
994
995 void
996 mark_temp_addr_taken (rtx x)
997 {
998 struct temp_slot *p;
999
1000 if (x == 0)
1001 return;
1002
1003 /* If X is not in memory or is at a constant address, it cannot be in
1004 a temporary slot. */
1005 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
1006 return;
1007
1008 p = find_temp_slot_from_address (XEXP (x, 0));
1009 if (p != 0)
1010 p->addr_taken = 1;
1011 }
1012
1013 /* If X could be a reference to a temporary slot, mark that slot as
1014 belonging to the to one level higher than the current level. If X
1015 matched one of our slots, just mark that one. Otherwise, we can't
1016 easily predict which it is, so upgrade all of them. Kept slots
1017 need not be touched.
1018
1019 This is called when an ({...}) construct occurs and a statement
1020 returns a value in memory. */
1021
1022 void
1023 preserve_temp_slots (rtx x)
1024 {
1025 struct temp_slot *p = 0, *next;
1026
1027 /* If there is no result, we still might have some objects whose address
1028 were taken, so we need to make sure they stay around. */
1029 if (x == 0)
1030 {
1031 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1032 {
1033 next = p->next;
1034
1035 if (p->addr_taken)
1036 move_slot_to_level (p, temp_slot_level - 1);
1037 }
1038
1039 return;
1040 }
1041
1042 /* If X is a register that is being used as a pointer, see if we have
1043 a temporary slot we know it points to. To be consistent with
1044 the code below, we really should preserve all non-kept slots
1045 if we can't find a match, but that seems to be much too costly. */
1046 if (REG_P (x) && REG_POINTER (x))
1047 p = find_temp_slot_from_address (x);
1048
1049 /* If X is not in memory or is at a constant address, it cannot be in
1050 a temporary slot, but it can contain something whose address was
1051 taken. */
1052 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1053 {
1054 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1055 {
1056 next = p->next;
1057
1058 if (p->addr_taken)
1059 move_slot_to_level (p, temp_slot_level - 1);
1060 }
1061
1062 return;
1063 }
1064
1065 /* First see if we can find a match. */
1066 if (p == 0)
1067 p = find_temp_slot_from_address (XEXP (x, 0));
1068
1069 if (p != 0)
1070 {
1071 /* Move everything at our level whose address was taken to our new
1072 level in case we used its address. */
1073 struct temp_slot *q;
1074
1075 if (p->level == temp_slot_level)
1076 {
1077 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1078 {
1079 next = q->next;
1080
1081 if (p != q && q->addr_taken)
1082 move_slot_to_level (q, temp_slot_level - 1);
1083 }
1084
1085 move_slot_to_level (p, temp_slot_level - 1);
1086 p->addr_taken = 0;
1087 }
1088 return;
1089 }
1090
1091 /* Otherwise, preserve all non-kept slots at this level. */
1092 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1093 {
1094 next = p->next;
1095
1096 if (!p->keep)
1097 move_slot_to_level (p, temp_slot_level - 1);
1098 }
1099 }
1100
1101 /* Free all temporaries used so far. This is normally called at the
1102 end of generating code for a statement. */
1103
1104 void
1105 free_temp_slots (void)
1106 {
1107 struct temp_slot *p, *next;
1108
1109 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1110 {
1111 next = p->next;
1112
1113 if (!p->keep)
1114 make_slot_available (p);
1115 }
1116
1117 combine_temp_slots ();
1118 }
1119
1120 /* Push deeper into the nesting level for stack temporaries. */
1121
1122 void
1123 push_temp_slots (void)
1124 {
1125 temp_slot_level++;
1126 }
1127
1128 /* Pop a temporary nesting level. All slots in use in the current level
1129 are freed. */
1130
1131 void
1132 pop_temp_slots (void)
1133 {
1134 struct temp_slot *p, *next;
1135
1136 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1137 {
1138 next = p->next;
1139 make_slot_available (p);
1140 }
1141
1142 combine_temp_slots ();
1143
1144 temp_slot_level--;
1145 }
1146
1147 /* Initialize temporary slots. */
1148
1149 void
1150 init_temp_slots (void)
1151 {
1152 /* We have not allocated any temporaries yet. */
1153 avail_temp_slots = 0;
1154 used_temp_slots = 0;
1155 temp_slot_level = 0;
1156 }
1157 \f
1158 /* These routines are responsible for converting virtual register references
1159 to the actual hard register references once RTL generation is complete.
1160
1161 The following four variables are used for communication between the
1162 routines. They contain the offsets of the virtual registers from their
1163 respective hard registers. */
1164
1165 static int in_arg_offset;
1166 static int var_offset;
1167 static int dynamic_offset;
1168 static int out_arg_offset;
1169 static int cfa_offset;
1170
1171 /* In most machines, the stack pointer register is equivalent to the bottom
1172 of the stack. */
1173
1174 #ifndef STACK_POINTER_OFFSET
1175 #define STACK_POINTER_OFFSET 0
1176 #endif
1177
1178 /* If not defined, pick an appropriate default for the offset of dynamically
1179 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1180 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1181
1182 #ifndef STACK_DYNAMIC_OFFSET
1183
1184 /* The bottom of the stack points to the actual arguments. If
1185 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1186 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1187 stack space for register parameters is not pushed by the caller, but
1188 rather part of the fixed stack areas and hence not included in
1189 `crtl->outgoing_args_size'. Nevertheless, we must allow
1190 for it when allocating stack dynamic objects. */
1191
1192 #if defined(REG_PARM_STACK_SPACE)
1193 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1194 ((ACCUMULATE_OUTGOING_ARGS \
1195 ? (crtl->outgoing_args_size \
1196 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1197 : REG_PARM_STACK_SPACE (FNDECL))) \
1198 : 0) + (STACK_POINTER_OFFSET))
1199 #else
1200 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1201 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1202 + (STACK_POINTER_OFFSET))
1203 #endif
1204 #endif
1205
1206 \f
1207 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1208 is a virtual register, return the equivalent hard register and set the
1209 offset indirectly through the pointer. Otherwise, return 0. */
1210
1211 static rtx
1212 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1213 {
1214 rtx new_rtx;
1215 HOST_WIDE_INT offset;
1216
1217 if (x == virtual_incoming_args_rtx)
1218 {
1219 if (stack_realign_drap)
1220 {
1221 /* Replace virtual_incoming_args_rtx with internal arg
1222 pointer if DRAP is used to realign stack. */
1223 new_rtx = crtl->args.internal_arg_pointer;
1224 offset = 0;
1225 }
1226 else
1227 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1228 }
1229 else if (x == virtual_stack_vars_rtx)
1230 new_rtx = frame_pointer_rtx, offset = var_offset;
1231 else if (x == virtual_stack_dynamic_rtx)
1232 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1233 else if (x == virtual_outgoing_args_rtx)
1234 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1235 else if (x == virtual_cfa_rtx)
1236 {
1237 #ifdef FRAME_POINTER_CFA_OFFSET
1238 new_rtx = frame_pointer_rtx;
1239 #else
1240 new_rtx = arg_pointer_rtx;
1241 #endif
1242 offset = cfa_offset;
1243 }
1244 else
1245 return NULL_RTX;
1246
1247 *poffset = offset;
1248 return new_rtx;
1249 }
1250
1251 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1252 Instantiate any virtual registers present inside of *LOC. The expression
1253 is simplified, as much as possible, but is not to be considered "valid"
1254 in any sense implied by the target. If any change is made, set CHANGED
1255 to true. */
1256
1257 static int
1258 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1259 {
1260 HOST_WIDE_INT offset;
1261 bool *changed = (bool *) data;
1262 rtx x, new_rtx;
1263
1264 x = *loc;
1265 if (x == 0)
1266 return 0;
1267
1268 switch (GET_CODE (x))
1269 {
1270 case REG:
1271 new_rtx = instantiate_new_reg (x, &offset);
1272 if (new_rtx)
1273 {
1274 *loc = plus_constant (new_rtx, offset);
1275 if (changed)
1276 *changed = true;
1277 }
1278 return -1;
1279
1280 case PLUS:
1281 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1282 if (new_rtx)
1283 {
1284 new_rtx = plus_constant (new_rtx, offset);
1285 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new_rtx, XEXP (x, 1));
1286 if (changed)
1287 *changed = true;
1288 return -1;
1289 }
1290
1291 /* FIXME -- from old code */
1292 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1293 we can commute the PLUS and SUBREG because pointers into the
1294 frame are well-behaved. */
1295 break;
1296
1297 default:
1298 break;
1299 }
1300
1301 return 0;
1302 }
1303
1304 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1305 matches the predicate for insn CODE operand OPERAND. */
1306
1307 static int
1308 safe_insn_predicate (int code, int operand, rtx x)
1309 {
1310 const struct insn_operand_data *op_data;
1311
1312 if (code < 0)
1313 return true;
1314
1315 op_data = &insn_data[code].operand[operand];
1316 if (op_data->predicate == NULL)
1317 return true;
1318
1319 return op_data->predicate (x, op_data->mode);
1320 }
1321
1322 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1323 registers present inside of insn. The result will be a valid insn. */
1324
1325 static void
1326 instantiate_virtual_regs_in_insn (rtx insn)
1327 {
1328 HOST_WIDE_INT offset;
1329 int insn_code, i;
1330 bool any_change = false;
1331 rtx set, new_rtx, x, seq;
1332
1333 /* There are some special cases to be handled first. */
1334 set = single_set (insn);
1335 if (set)
1336 {
1337 /* We're allowed to assign to a virtual register. This is interpreted
1338 to mean that the underlying register gets assigned the inverse
1339 transformation. This is used, for example, in the handling of
1340 non-local gotos. */
1341 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1342 if (new_rtx)
1343 {
1344 start_sequence ();
1345
1346 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1347 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1348 GEN_INT (-offset));
1349 x = force_operand (x, new_rtx);
1350 if (x != new_rtx)
1351 emit_move_insn (new_rtx, x);
1352
1353 seq = get_insns ();
1354 end_sequence ();
1355
1356 emit_insn_before (seq, insn);
1357 delete_insn (insn);
1358 return;
1359 }
1360
1361 /* Handle a straight copy from a virtual register by generating a
1362 new add insn. The difference between this and falling through
1363 to the generic case is avoiding a new pseudo and eliminating a
1364 move insn in the initial rtl stream. */
1365 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1366 if (new_rtx && offset != 0
1367 && REG_P (SET_DEST (set))
1368 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1369 {
1370 start_sequence ();
1371
1372 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1373 new_rtx, GEN_INT (offset), SET_DEST (set),
1374 1, OPTAB_LIB_WIDEN);
1375 if (x != SET_DEST (set))
1376 emit_move_insn (SET_DEST (set), x);
1377
1378 seq = get_insns ();
1379 end_sequence ();
1380
1381 emit_insn_before (seq, insn);
1382 delete_insn (insn);
1383 return;
1384 }
1385
1386 extract_insn (insn);
1387 insn_code = INSN_CODE (insn);
1388
1389 /* Handle a plus involving a virtual register by determining if the
1390 operands remain valid if they're modified in place. */
1391 if (GET_CODE (SET_SRC (set)) == PLUS
1392 && recog_data.n_operands >= 3
1393 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1394 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1395 && GET_CODE (recog_data.operand[2]) == CONST_INT
1396 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1397 {
1398 offset += INTVAL (recog_data.operand[2]);
1399
1400 /* If the sum is zero, then replace with a plain move. */
1401 if (offset == 0
1402 && REG_P (SET_DEST (set))
1403 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1404 {
1405 start_sequence ();
1406 emit_move_insn (SET_DEST (set), new_rtx);
1407 seq = get_insns ();
1408 end_sequence ();
1409
1410 emit_insn_before (seq, insn);
1411 delete_insn (insn);
1412 return;
1413 }
1414
1415 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1416
1417 /* Using validate_change and apply_change_group here leaves
1418 recog_data in an invalid state. Since we know exactly what
1419 we want to check, do those two by hand. */
1420 if (safe_insn_predicate (insn_code, 1, new_rtx)
1421 && safe_insn_predicate (insn_code, 2, x))
1422 {
1423 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1424 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1425 any_change = true;
1426
1427 /* Fall through into the regular operand fixup loop in
1428 order to take care of operands other than 1 and 2. */
1429 }
1430 }
1431 }
1432 else
1433 {
1434 extract_insn (insn);
1435 insn_code = INSN_CODE (insn);
1436 }
1437
1438 /* In the general case, we expect virtual registers to appear only in
1439 operands, and then only as either bare registers or inside memories. */
1440 for (i = 0; i < recog_data.n_operands; ++i)
1441 {
1442 x = recog_data.operand[i];
1443 switch (GET_CODE (x))
1444 {
1445 case MEM:
1446 {
1447 rtx addr = XEXP (x, 0);
1448 bool changed = false;
1449
1450 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1451 if (!changed)
1452 continue;
1453
1454 start_sequence ();
1455 x = replace_equiv_address (x, addr);
1456 /* It may happen that the address with the virtual reg
1457 was valid (e.g. based on the virtual stack reg, which might
1458 be acceptable to the predicates with all offsets), whereas
1459 the address now isn't anymore, for instance when the address
1460 is still offsetted, but the base reg isn't virtual-stack-reg
1461 anymore. Below we would do a force_reg on the whole operand,
1462 but this insn might actually only accept memory. Hence,
1463 before doing that last resort, try to reload the address into
1464 a register, so this operand stays a MEM. */
1465 if (!safe_insn_predicate (insn_code, i, x))
1466 {
1467 addr = force_reg (GET_MODE (addr), addr);
1468 x = replace_equiv_address (x, addr);
1469 }
1470 seq = get_insns ();
1471 end_sequence ();
1472 if (seq)
1473 emit_insn_before (seq, insn);
1474 }
1475 break;
1476
1477 case REG:
1478 new_rtx = instantiate_new_reg (x, &offset);
1479 if (new_rtx == NULL)
1480 continue;
1481 if (offset == 0)
1482 x = new_rtx;
1483 else
1484 {
1485 start_sequence ();
1486
1487 /* Careful, special mode predicates may have stuff in
1488 insn_data[insn_code].operand[i].mode that isn't useful
1489 to us for computing a new value. */
1490 /* ??? Recognize address_operand and/or "p" constraints
1491 to see if (plus new offset) is a valid before we put
1492 this through expand_simple_binop. */
1493 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1494 GEN_INT (offset), NULL_RTX,
1495 1, OPTAB_LIB_WIDEN);
1496 seq = get_insns ();
1497 end_sequence ();
1498 emit_insn_before (seq, insn);
1499 }
1500 break;
1501
1502 case SUBREG:
1503 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1504 if (new_rtx == NULL)
1505 continue;
1506 if (offset != 0)
1507 {
1508 start_sequence ();
1509 new_rtx = expand_simple_binop (GET_MODE (new_rtx), PLUS, new_rtx,
1510 GEN_INT (offset), NULL_RTX,
1511 1, OPTAB_LIB_WIDEN);
1512 seq = get_insns ();
1513 end_sequence ();
1514 emit_insn_before (seq, insn);
1515 }
1516 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1517 GET_MODE (new_rtx), SUBREG_BYTE (x));
1518 gcc_assert (x);
1519 break;
1520
1521 default:
1522 continue;
1523 }
1524
1525 /* At this point, X contains the new value for the operand.
1526 Validate the new value vs the insn predicate. Note that
1527 asm insns will have insn_code -1 here. */
1528 if (!safe_insn_predicate (insn_code, i, x))
1529 {
1530 start_sequence ();
1531 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1532 seq = get_insns ();
1533 end_sequence ();
1534 if (seq)
1535 emit_insn_before (seq, insn);
1536 }
1537
1538 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1539 any_change = true;
1540 }
1541
1542 if (any_change)
1543 {
1544 /* Propagate operand changes into the duplicates. */
1545 for (i = 0; i < recog_data.n_dups; ++i)
1546 *recog_data.dup_loc[i]
1547 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1548
1549 /* Force re-recognition of the instruction for validation. */
1550 INSN_CODE (insn) = -1;
1551 }
1552
1553 if (asm_noperands (PATTERN (insn)) >= 0)
1554 {
1555 if (!check_asm_operands (PATTERN (insn)))
1556 {
1557 error_for_asm (insn, "impossible constraint in %<asm%>");
1558 delete_insn (insn);
1559 }
1560 }
1561 else
1562 {
1563 if (recog_memoized (insn) < 0)
1564 fatal_insn_not_found (insn);
1565 }
1566 }
1567
1568 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1569 do any instantiation required. */
1570
1571 void
1572 instantiate_decl_rtl (rtx x)
1573 {
1574 rtx addr;
1575
1576 if (x == 0)
1577 return;
1578
1579 /* If this is a CONCAT, recurse for the pieces. */
1580 if (GET_CODE (x) == CONCAT)
1581 {
1582 instantiate_decl_rtl (XEXP (x, 0));
1583 instantiate_decl_rtl (XEXP (x, 1));
1584 return;
1585 }
1586
1587 /* If this is not a MEM, no need to do anything. Similarly if the
1588 address is a constant or a register that is not a virtual register. */
1589 if (!MEM_P (x))
1590 return;
1591
1592 addr = XEXP (x, 0);
1593 if (CONSTANT_P (addr)
1594 || (REG_P (addr)
1595 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1596 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1597 return;
1598
1599 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1600 }
1601
1602 /* Helper for instantiate_decls called via walk_tree: Process all decls
1603 in the given DECL_VALUE_EXPR. */
1604
1605 static tree
1606 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1607 {
1608 tree t = *tp;
1609 if (! EXPR_P (t))
1610 {
1611 *walk_subtrees = 0;
1612 if (DECL_P (t) && DECL_RTL_SET_P (t))
1613 instantiate_decl_rtl (DECL_RTL (t));
1614 }
1615 return NULL;
1616 }
1617
1618 /* Subroutine of instantiate_decls: Process all decls in the given
1619 BLOCK node and all its subblocks. */
1620
1621 static void
1622 instantiate_decls_1 (tree let)
1623 {
1624 tree t;
1625
1626 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1627 {
1628 if (DECL_RTL_SET_P (t))
1629 instantiate_decl_rtl (DECL_RTL (t));
1630 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1631 {
1632 tree v = DECL_VALUE_EXPR (t);
1633 walk_tree (&v, instantiate_expr, NULL, NULL);
1634 }
1635 }
1636
1637 /* Process all subblocks. */
1638 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1639 instantiate_decls_1 (t);
1640 }
1641
1642 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1643 all virtual registers in their DECL_RTL's. */
1644
1645 static void
1646 instantiate_decls (tree fndecl)
1647 {
1648 tree decl;
1649
1650 /* Process all parameters of the function. */
1651 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1652 {
1653 instantiate_decl_rtl (DECL_RTL (decl));
1654 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1655 if (DECL_HAS_VALUE_EXPR_P (decl))
1656 {
1657 tree v = DECL_VALUE_EXPR (decl);
1658 walk_tree (&v, instantiate_expr, NULL, NULL);
1659 }
1660 }
1661
1662 /* Now process all variables defined in the function or its subblocks. */
1663 instantiate_decls_1 (DECL_INITIAL (fndecl));
1664 }
1665
1666 /* Pass through the INSNS of function FNDECL and convert virtual register
1667 references to hard register references. */
1668
1669 static unsigned int
1670 instantiate_virtual_regs (void)
1671 {
1672 rtx insn;
1673
1674 /* Compute the offsets to use for this function. */
1675 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1676 var_offset = STARTING_FRAME_OFFSET;
1677 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1678 out_arg_offset = STACK_POINTER_OFFSET;
1679 #ifdef FRAME_POINTER_CFA_OFFSET
1680 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1681 #else
1682 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1683 #endif
1684
1685 /* Initialize recognition, indicating that volatile is OK. */
1686 init_recog ();
1687
1688 /* Scan through all the insns, instantiating every virtual register still
1689 present. */
1690 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1691 if (INSN_P (insn))
1692 {
1693 /* These patterns in the instruction stream can never be recognized.
1694 Fortunately, they shouldn't contain virtual registers either. */
1695 if (GET_CODE (PATTERN (insn)) == USE
1696 || GET_CODE (PATTERN (insn)) == CLOBBER
1697 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1698 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1699 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1700 continue;
1701
1702 instantiate_virtual_regs_in_insn (insn);
1703
1704 if (INSN_DELETED_P (insn))
1705 continue;
1706
1707 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1708
1709 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1710 if (GET_CODE (insn) == CALL_INSN)
1711 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1712 instantiate_virtual_regs_in_rtx, NULL);
1713 }
1714
1715 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1716 instantiate_decls (current_function_decl);
1717
1718 targetm.instantiate_decls ();
1719
1720 /* Indicate that, from now on, assign_stack_local should use
1721 frame_pointer_rtx. */
1722 virtuals_instantiated = 1;
1723 return 0;
1724 }
1725
1726 struct rtl_opt_pass pass_instantiate_virtual_regs =
1727 {
1728 {
1729 RTL_PASS,
1730 "vregs", /* name */
1731 NULL, /* gate */
1732 instantiate_virtual_regs, /* execute */
1733 NULL, /* sub */
1734 NULL, /* next */
1735 0, /* static_pass_number */
1736 0, /* tv_id */
1737 0, /* properties_required */
1738 0, /* properties_provided */
1739 0, /* properties_destroyed */
1740 0, /* todo_flags_start */
1741 TODO_dump_func /* todo_flags_finish */
1742 }
1743 };
1744
1745 \f
1746 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1747 This means a type for which function calls must pass an address to the
1748 function or get an address back from the function.
1749 EXP may be a type node or an expression (whose type is tested). */
1750
1751 int
1752 aggregate_value_p (const_tree exp, const_tree fntype)
1753 {
1754 int i, regno, nregs;
1755 rtx reg;
1756
1757 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1758
1759 /* DECL node associated with FNTYPE when relevant, which we might need to
1760 check for by-invisible-reference returns, typically for CALL_EXPR input
1761 EXPressions. */
1762 const_tree fndecl = NULL_TREE;
1763
1764 if (fntype)
1765 switch (TREE_CODE (fntype))
1766 {
1767 case CALL_EXPR:
1768 fndecl = get_callee_fndecl (fntype);
1769 fntype = fndecl ? TREE_TYPE (fndecl) : 0;
1770 break;
1771 case FUNCTION_DECL:
1772 fndecl = fntype;
1773 fntype = TREE_TYPE (fndecl);
1774 break;
1775 case FUNCTION_TYPE:
1776 case METHOD_TYPE:
1777 break;
1778 case IDENTIFIER_NODE:
1779 fntype = 0;
1780 break;
1781 default:
1782 /* We don't expect other rtl types here. */
1783 gcc_unreachable ();
1784 }
1785
1786 if (TREE_CODE (type) == VOID_TYPE)
1787 return 0;
1788
1789 /* If the front end has decided that this needs to be passed by
1790 reference, do so. */
1791 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1792 && DECL_BY_REFERENCE (exp))
1793 return 1;
1794
1795 /* If the EXPression is a CALL_EXPR, honor DECL_BY_REFERENCE set on the
1796 called function RESULT_DECL, meaning the function returns in memory by
1797 invisible reference. This check lets front-ends not set TREE_ADDRESSABLE
1798 on the function type, which used to be the way to request such a return
1799 mechanism but might now be causing troubles at gimplification time if
1800 temporaries with the function type need to be created. */
1801 if (TREE_CODE (exp) == CALL_EXPR && fndecl && DECL_RESULT (fndecl)
1802 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
1803 return 1;
1804
1805 if (targetm.calls.return_in_memory (type, fntype))
1806 return 1;
1807 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1808 and thus can't be returned in registers. */
1809 if (TREE_ADDRESSABLE (type))
1810 return 1;
1811 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
1812 return 1;
1813 /* Make sure we have suitable call-clobbered regs to return
1814 the value in; if not, we must return it in memory. */
1815 reg = hard_function_value (type, 0, fntype, 0);
1816
1817 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1818 it is OK. */
1819 if (!REG_P (reg))
1820 return 0;
1821
1822 regno = REGNO (reg);
1823 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
1824 for (i = 0; i < nregs; i++)
1825 if (! call_used_regs[regno + i])
1826 return 1;
1827 return 0;
1828 }
1829 \f
1830 /* Return true if we should assign DECL a pseudo register; false if it
1831 should live on the local stack. */
1832
1833 bool
1834 use_register_for_decl (const_tree decl)
1835 {
1836 if (!targetm.calls.allocate_stack_slots_for_args())
1837 return true;
1838
1839 /* Honor volatile. */
1840 if (TREE_SIDE_EFFECTS (decl))
1841 return false;
1842
1843 /* Honor addressability. */
1844 if (TREE_ADDRESSABLE (decl))
1845 return false;
1846
1847 /* Only register-like things go in registers. */
1848 if (DECL_MODE (decl) == BLKmode)
1849 return false;
1850
1851 /* If -ffloat-store specified, don't put explicit float variables
1852 into registers. */
1853 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1854 propagates values across these stores, and it probably shouldn't. */
1855 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1856 return false;
1857
1858 /* If we're not interested in tracking debugging information for
1859 this decl, then we can certainly put it in a register. */
1860 if (DECL_IGNORED_P (decl))
1861 return true;
1862
1863 return (optimize || DECL_REGISTER (decl));
1864 }
1865
1866 /* Return true if TYPE should be passed by invisible reference. */
1867
1868 bool
1869 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1870 tree type, bool named_arg)
1871 {
1872 if (type)
1873 {
1874 /* If this type contains non-trivial constructors, then it is
1875 forbidden for the middle-end to create any new copies. */
1876 if (TREE_ADDRESSABLE (type))
1877 return true;
1878
1879 /* GCC post 3.4 passes *all* variable sized types by reference. */
1880 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1881 return true;
1882 }
1883
1884 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
1885 }
1886
1887 /* Return true if TYPE, which is passed by reference, should be callee
1888 copied instead of caller copied. */
1889
1890 bool
1891 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1892 tree type, bool named_arg)
1893 {
1894 if (type && TREE_ADDRESSABLE (type))
1895 return false;
1896 return targetm.calls.callee_copies (ca, mode, type, named_arg);
1897 }
1898
1899 /* Structures to communicate between the subroutines of assign_parms.
1900 The first holds data persistent across all parameters, the second
1901 is cleared out for each parameter. */
1902
1903 struct assign_parm_data_all
1904 {
1905 CUMULATIVE_ARGS args_so_far;
1906 struct args_size stack_args_size;
1907 tree function_result_decl;
1908 tree orig_fnargs;
1909 rtx first_conversion_insn;
1910 rtx last_conversion_insn;
1911 HOST_WIDE_INT pretend_args_size;
1912 HOST_WIDE_INT extra_pretend_bytes;
1913 int reg_parm_stack_space;
1914 };
1915
1916 struct assign_parm_data_one
1917 {
1918 tree nominal_type;
1919 tree passed_type;
1920 rtx entry_parm;
1921 rtx stack_parm;
1922 enum machine_mode nominal_mode;
1923 enum machine_mode passed_mode;
1924 enum machine_mode promoted_mode;
1925 struct locate_and_pad_arg_data locate;
1926 int partial;
1927 BOOL_BITFIELD named_arg : 1;
1928 BOOL_BITFIELD passed_pointer : 1;
1929 BOOL_BITFIELD on_stack : 1;
1930 BOOL_BITFIELD loaded_in_reg : 1;
1931 };
1932
1933 /* A subroutine of assign_parms. Initialize ALL. */
1934
1935 static void
1936 assign_parms_initialize_all (struct assign_parm_data_all *all)
1937 {
1938 tree fntype;
1939
1940 memset (all, 0, sizeof (*all));
1941
1942 fntype = TREE_TYPE (current_function_decl);
1943
1944 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
1945 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
1946 #else
1947 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
1948 current_function_decl, -1);
1949 #endif
1950
1951 #ifdef REG_PARM_STACK_SPACE
1952 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
1953 #endif
1954 }
1955
1956 /* If ARGS contains entries with complex types, split the entry into two
1957 entries of the component type. Return a new list of substitutions are
1958 needed, else the old list. */
1959
1960 static tree
1961 split_complex_args (tree args)
1962 {
1963 tree p;
1964
1965 /* Before allocating memory, check for the common case of no complex. */
1966 for (p = args; p; p = TREE_CHAIN (p))
1967 {
1968 tree type = TREE_TYPE (p);
1969 if (TREE_CODE (type) == COMPLEX_TYPE
1970 && targetm.calls.split_complex_arg (type))
1971 goto found;
1972 }
1973 return args;
1974
1975 found:
1976 args = copy_list (args);
1977
1978 for (p = args; p; p = TREE_CHAIN (p))
1979 {
1980 tree type = TREE_TYPE (p);
1981 if (TREE_CODE (type) == COMPLEX_TYPE
1982 && targetm.calls.split_complex_arg (type))
1983 {
1984 tree decl;
1985 tree subtype = TREE_TYPE (type);
1986 bool addressable = TREE_ADDRESSABLE (p);
1987
1988 /* Rewrite the PARM_DECL's type with its component. */
1989 TREE_TYPE (p) = subtype;
1990 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
1991 DECL_MODE (p) = VOIDmode;
1992 DECL_SIZE (p) = NULL;
1993 DECL_SIZE_UNIT (p) = NULL;
1994 /* If this arg must go in memory, put it in a pseudo here.
1995 We can't allow it to go in memory as per normal parms,
1996 because the usual place might not have the imag part
1997 adjacent to the real part. */
1998 DECL_ARTIFICIAL (p) = addressable;
1999 DECL_IGNORED_P (p) = addressable;
2000 TREE_ADDRESSABLE (p) = 0;
2001 layout_decl (p, 0);
2002
2003 /* Build a second synthetic decl. */
2004 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
2005 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2006 DECL_ARTIFICIAL (decl) = addressable;
2007 DECL_IGNORED_P (decl) = addressable;
2008 layout_decl (decl, 0);
2009
2010 /* Splice it in; skip the new decl. */
2011 TREE_CHAIN (decl) = TREE_CHAIN (p);
2012 TREE_CHAIN (p) = decl;
2013 p = decl;
2014 }
2015 }
2016
2017 return args;
2018 }
2019
2020 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2021 the hidden struct return argument, and (abi willing) complex args.
2022 Return the new parameter list. */
2023
2024 static tree
2025 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2026 {
2027 tree fndecl = current_function_decl;
2028 tree fntype = TREE_TYPE (fndecl);
2029 tree fnargs = DECL_ARGUMENTS (fndecl);
2030
2031 /* If struct value address is treated as the first argument, make it so. */
2032 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2033 && ! cfun->returns_pcc_struct
2034 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2035 {
2036 tree type = build_pointer_type (TREE_TYPE (fntype));
2037 tree decl;
2038
2039 decl = build_decl (PARM_DECL, NULL_TREE, type);
2040 DECL_ARG_TYPE (decl) = type;
2041 DECL_ARTIFICIAL (decl) = 1;
2042 DECL_IGNORED_P (decl) = 1;
2043
2044 TREE_CHAIN (decl) = fnargs;
2045 fnargs = decl;
2046 all->function_result_decl = decl;
2047 }
2048
2049 all->orig_fnargs = fnargs;
2050
2051 /* If the target wants to split complex arguments into scalars, do so. */
2052 if (targetm.calls.split_complex_arg)
2053 fnargs = split_complex_args (fnargs);
2054
2055 return fnargs;
2056 }
2057
2058 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2059 data for the parameter. Incorporate ABI specifics such as pass-by-
2060 reference and type promotion. */
2061
2062 static void
2063 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2064 struct assign_parm_data_one *data)
2065 {
2066 tree nominal_type, passed_type;
2067 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2068
2069 memset (data, 0, sizeof (*data));
2070
2071 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2072 if (!cfun->stdarg)
2073 data->named_arg = 1; /* No variadic parms. */
2074 else if (TREE_CHAIN (parm))
2075 data->named_arg = 1; /* Not the last non-variadic parm. */
2076 else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2077 data->named_arg = 1; /* Only variadic ones are unnamed. */
2078 else
2079 data->named_arg = 0; /* Treat as variadic. */
2080
2081 nominal_type = TREE_TYPE (parm);
2082 passed_type = DECL_ARG_TYPE (parm);
2083
2084 /* Look out for errors propagating this far. Also, if the parameter's
2085 type is void then its value doesn't matter. */
2086 if (TREE_TYPE (parm) == error_mark_node
2087 /* This can happen after weird syntax errors
2088 or if an enum type is defined among the parms. */
2089 || TREE_CODE (parm) != PARM_DECL
2090 || passed_type == NULL
2091 || VOID_TYPE_P (nominal_type))
2092 {
2093 nominal_type = passed_type = void_type_node;
2094 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2095 goto egress;
2096 }
2097
2098 /* Find mode of arg as it is passed, and mode of arg as it should be
2099 during execution of this function. */
2100 passed_mode = TYPE_MODE (passed_type);
2101 nominal_mode = TYPE_MODE (nominal_type);
2102
2103 /* If the parm is to be passed as a transparent union, use the type of
2104 the first field for the tests below. We have already verified that
2105 the modes are the same. */
2106 if (TREE_CODE (passed_type) == UNION_TYPE
2107 && TYPE_TRANSPARENT_UNION (passed_type))
2108 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2109
2110 /* See if this arg was passed by invisible reference. */
2111 if (pass_by_reference (&all->args_so_far, passed_mode,
2112 passed_type, data->named_arg))
2113 {
2114 passed_type = nominal_type = build_pointer_type (passed_type);
2115 data->passed_pointer = true;
2116 passed_mode = nominal_mode = Pmode;
2117 }
2118
2119 /* Find mode as it is passed by the ABI. */
2120 promoted_mode = passed_mode;
2121 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2122 {
2123 int unsignedp = TYPE_UNSIGNED (passed_type);
2124 promoted_mode = promote_mode (passed_type, promoted_mode,
2125 &unsignedp, 1);
2126 }
2127
2128 egress:
2129 data->nominal_type = nominal_type;
2130 data->passed_type = passed_type;
2131 data->nominal_mode = nominal_mode;
2132 data->passed_mode = passed_mode;
2133 data->promoted_mode = promoted_mode;
2134 }
2135
2136 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2137
2138 static void
2139 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2140 struct assign_parm_data_one *data, bool no_rtl)
2141 {
2142 int varargs_pretend_bytes = 0;
2143
2144 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2145 data->promoted_mode,
2146 data->passed_type,
2147 &varargs_pretend_bytes, no_rtl);
2148
2149 /* If the back-end has requested extra stack space, record how much is
2150 needed. Do not change pretend_args_size otherwise since it may be
2151 nonzero from an earlier partial argument. */
2152 if (varargs_pretend_bytes > 0)
2153 all->pretend_args_size = varargs_pretend_bytes;
2154 }
2155
2156 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2157 the incoming location of the current parameter. */
2158
2159 static void
2160 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2161 struct assign_parm_data_one *data)
2162 {
2163 HOST_WIDE_INT pretend_bytes = 0;
2164 rtx entry_parm;
2165 bool in_regs;
2166
2167 if (data->promoted_mode == VOIDmode)
2168 {
2169 data->entry_parm = data->stack_parm = const0_rtx;
2170 return;
2171 }
2172
2173 #ifdef FUNCTION_INCOMING_ARG
2174 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2175 data->passed_type, data->named_arg);
2176 #else
2177 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2178 data->passed_type, data->named_arg);
2179 #endif
2180
2181 if (entry_parm == 0)
2182 data->promoted_mode = data->passed_mode;
2183
2184 /* Determine parm's home in the stack, in case it arrives in the stack
2185 or we should pretend it did. Compute the stack position and rtx where
2186 the argument arrives and its size.
2187
2188 There is one complexity here: If this was a parameter that would
2189 have been passed in registers, but wasn't only because it is
2190 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2191 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2192 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2193 as it was the previous time. */
2194 in_regs = entry_parm != 0;
2195 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2196 in_regs = true;
2197 #endif
2198 if (!in_regs && !data->named_arg)
2199 {
2200 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2201 {
2202 rtx tem;
2203 #ifdef FUNCTION_INCOMING_ARG
2204 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2205 data->passed_type, true);
2206 #else
2207 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2208 data->passed_type, true);
2209 #endif
2210 in_regs = tem != NULL;
2211 }
2212 }
2213
2214 /* If this parameter was passed both in registers and in the stack, use
2215 the copy on the stack. */
2216 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2217 data->passed_type))
2218 entry_parm = 0;
2219
2220 if (entry_parm)
2221 {
2222 int partial;
2223
2224 partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2225 data->promoted_mode,
2226 data->passed_type,
2227 data->named_arg);
2228 data->partial = partial;
2229
2230 /* The caller might already have allocated stack space for the
2231 register parameters. */
2232 if (partial != 0 && all->reg_parm_stack_space == 0)
2233 {
2234 /* Part of this argument is passed in registers and part
2235 is passed on the stack. Ask the prologue code to extend
2236 the stack part so that we can recreate the full value.
2237
2238 PRETEND_BYTES is the size of the registers we need to store.
2239 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2240 stack space that the prologue should allocate.
2241
2242 Internally, gcc assumes that the argument pointer is aligned
2243 to STACK_BOUNDARY bits. This is used both for alignment
2244 optimizations (see init_emit) and to locate arguments that are
2245 aligned to more than PARM_BOUNDARY bits. We must preserve this
2246 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2247 a stack boundary. */
2248
2249 /* We assume at most one partial arg, and it must be the first
2250 argument on the stack. */
2251 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2252
2253 pretend_bytes = partial;
2254 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2255
2256 /* We want to align relative to the actual stack pointer, so
2257 don't include this in the stack size until later. */
2258 all->extra_pretend_bytes = all->pretend_args_size;
2259 }
2260 }
2261
2262 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2263 entry_parm ? data->partial : 0, current_function_decl,
2264 &all->stack_args_size, &data->locate);
2265
2266 /* Update parm_stack_boundary if this parameter is passed in the
2267 stack. */
2268 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2269 crtl->parm_stack_boundary = data->locate.boundary;
2270
2271 /* Adjust offsets to include the pretend args. */
2272 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2273 data->locate.slot_offset.constant += pretend_bytes;
2274 data->locate.offset.constant += pretend_bytes;
2275
2276 data->entry_parm = entry_parm;
2277 }
2278
2279 /* A subroutine of assign_parms. If there is actually space on the stack
2280 for this parm, count it in stack_args_size and return true. */
2281
2282 static bool
2283 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2284 struct assign_parm_data_one *data)
2285 {
2286 /* Trivially true if we've no incoming register. */
2287 if (data->entry_parm == NULL)
2288 ;
2289 /* Also true if we're partially in registers and partially not,
2290 since we've arranged to drop the entire argument on the stack. */
2291 else if (data->partial != 0)
2292 ;
2293 /* Also true if the target says that it's passed in both registers
2294 and on the stack. */
2295 else if (GET_CODE (data->entry_parm) == PARALLEL
2296 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2297 ;
2298 /* Also true if the target says that there's stack allocated for
2299 all register parameters. */
2300 else if (all->reg_parm_stack_space > 0)
2301 ;
2302 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2303 else
2304 return false;
2305
2306 all->stack_args_size.constant += data->locate.size.constant;
2307 if (data->locate.size.var)
2308 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2309
2310 return true;
2311 }
2312
2313 /* A subroutine of assign_parms. Given that this parameter is allocated
2314 stack space by the ABI, find it. */
2315
2316 static void
2317 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2318 {
2319 rtx offset_rtx, stack_parm;
2320 unsigned int align, boundary;
2321
2322 /* If we're passing this arg using a reg, make its stack home the
2323 aligned stack slot. */
2324 if (data->entry_parm)
2325 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2326 else
2327 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2328
2329 stack_parm = crtl->args.internal_arg_pointer;
2330 if (offset_rtx != const0_rtx)
2331 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2332 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2333
2334 set_mem_attributes (stack_parm, parm, 1);
2335
2336 boundary = data->locate.boundary;
2337 align = BITS_PER_UNIT;
2338
2339 /* If we're padding upward, we know that the alignment of the slot
2340 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2341 intentionally forcing upward padding. Otherwise we have to come
2342 up with a guess at the alignment based on OFFSET_RTX. */
2343 if (data->locate.where_pad != downward || data->entry_parm)
2344 align = boundary;
2345 else if (GET_CODE (offset_rtx) == CONST_INT)
2346 {
2347 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2348 align = align & -align;
2349 }
2350 set_mem_align (stack_parm, align);
2351
2352 if (data->entry_parm)
2353 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2354
2355 data->stack_parm = stack_parm;
2356 }
2357
2358 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2359 always valid and contiguous. */
2360
2361 static void
2362 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2363 {
2364 rtx entry_parm = data->entry_parm;
2365 rtx stack_parm = data->stack_parm;
2366
2367 /* If this parm was passed part in regs and part in memory, pretend it
2368 arrived entirely in memory by pushing the register-part onto the stack.
2369 In the special case of a DImode or DFmode that is split, we could put
2370 it together in a pseudoreg directly, but for now that's not worth
2371 bothering with. */
2372 if (data->partial != 0)
2373 {
2374 /* Handle calls that pass values in multiple non-contiguous
2375 locations. The Irix 6 ABI has examples of this. */
2376 if (GET_CODE (entry_parm) == PARALLEL)
2377 emit_group_store (validize_mem (stack_parm), entry_parm,
2378 data->passed_type,
2379 int_size_in_bytes (data->passed_type));
2380 else
2381 {
2382 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2383 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2384 data->partial / UNITS_PER_WORD);
2385 }
2386
2387 entry_parm = stack_parm;
2388 }
2389
2390 /* If we didn't decide this parm came in a register, by default it came
2391 on the stack. */
2392 else if (entry_parm == NULL)
2393 entry_parm = stack_parm;
2394
2395 /* When an argument is passed in multiple locations, we can't make use
2396 of this information, but we can save some copying if the whole argument
2397 is passed in a single register. */
2398 else if (GET_CODE (entry_parm) == PARALLEL
2399 && data->nominal_mode != BLKmode
2400 && data->passed_mode != BLKmode)
2401 {
2402 size_t i, len = XVECLEN (entry_parm, 0);
2403
2404 for (i = 0; i < len; i++)
2405 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2406 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2407 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2408 == data->passed_mode)
2409 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2410 {
2411 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2412 break;
2413 }
2414 }
2415
2416 data->entry_parm = entry_parm;
2417 }
2418
2419 /* A subroutine of assign_parms. Reconstitute any values which were
2420 passed in multiple registers and would fit in a single register. */
2421
2422 static void
2423 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2424 {
2425 rtx entry_parm = data->entry_parm;
2426
2427 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2428 This can be done with register operations rather than on the
2429 stack, even if we will store the reconstituted parameter on the
2430 stack later. */
2431 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2432 {
2433 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2434 emit_group_store (parmreg, entry_parm, NULL_TREE,
2435 GET_MODE_SIZE (GET_MODE (entry_parm)));
2436 entry_parm = parmreg;
2437 }
2438
2439 data->entry_parm = entry_parm;
2440 }
2441
2442 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2443 always valid and properly aligned. */
2444
2445 static void
2446 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2447 {
2448 rtx stack_parm = data->stack_parm;
2449
2450 /* If we can't trust the parm stack slot to be aligned enough for its
2451 ultimate type, don't use that slot after entry. We'll make another
2452 stack slot, if we need one. */
2453 if (stack_parm
2454 && ((STRICT_ALIGNMENT
2455 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2456 || (data->nominal_type
2457 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2458 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2459 stack_parm = NULL;
2460
2461 /* If parm was passed in memory, and we need to convert it on entry,
2462 don't store it back in that same slot. */
2463 else if (data->entry_parm == stack_parm
2464 && data->nominal_mode != BLKmode
2465 && data->nominal_mode != data->passed_mode)
2466 stack_parm = NULL;
2467
2468 /* If stack protection is in effect for this function, don't leave any
2469 pointers in their passed stack slots. */
2470 else if (crtl->stack_protect_guard
2471 && (flag_stack_protect == 2
2472 || data->passed_pointer
2473 || POINTER_TYPE_P (data->nominal_type)))
2474 stack_parm = NULL;
2475
2476 data->stack_parm = stack_parm;
2477 }
2478
2479 /* A subroutine of assign_parms. Return true if the current parameter
2480 should be stored as a BLKmode in the current frame. */
2481
2482 static bool
2483 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2484 {
2485 if (data->nominal_mode == BLKmode)
2486 return true;
2487 if (GET_MODE (data->entry_parm) == BLKmode)
2488 return true;
2489
2490 #ifdef BLOCK_REG_PADDING
2491 /* Only assign_parm_setup_block knows how to deal with register arguments
2492 that are padded at the least significant end. */
2493 if (REG_P (data->entry_parm)
2494 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2495 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2496 == (BYTES_BIG_ENDIAN ? upward : downward)))
2497 return true;
2498 #endif
2499
2500 return false;
2501 }
2502
2503 /* A subroutine of assign_parms. Arrange for the parameter to be
2504 present and valid in DATA->STACK_RTL. */
2505
2506 static void
2507 assign_parm_setup_block (struct assign_parm_data_all *all,
2508 tree parm, struct assign_parm_data_one *data)
2509 {
2510 rtx entry_parm = data->entry_parm;
2511 rtx stack_parm = data->stack_parm;
2512 HOST_WIDE_INT size;
2513 HOST_WIDE_INT size_stored;
2514
2515 if (GET_CODE (entry_parm) == PARALLEL)
2516 entry_parm = emit_group_move_into_temps (entry_parm);
2517
2518 size = int_size_in_bytes (data->passed_type);
2519 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2520 if (stack_parm == 0)
2521 {
2522 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2523 stack_parm = assign_stack_local (BLKmode, size_stored,
2524 DECL_ALIGN (parm));
2525 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2526 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2527 set_mem_attributes (stack_parm, parm, 1);
2528 }
2529
2530 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2531 calls that pass values in multiple non-contiguous locations. */
2532 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2533 {
2534 rtx mem;
2535
2536 /* Note that we will be storing an integral number of words.
2537 So we have to be careful to ensure that we allocate an
2538 integral number of words. We do this above when we call
2539 assign_stack_local if space was not allocated in the argument
2540 list. If it was, this will not work if PARM_BOUNDARY is not
2541 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2542 if it becomes a problem. Exception is when BLKmode arrives
2543 with arguments not conforming to word_mode. */
2544
2545 if (data->stack_parm == 0)
2546 ;
2547 else if (GET_CODE (entry_parm) == PARALLEL)
2548 ;
2549 else
2550 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2551
2552 mem = validize_mem (stack_parm);
2553
2554 /* Handle values in multiple non-contiguous locations. */
2555 if (GET_CODE (entry_parm) == PARALLEL)
2556 {
2557 push_to_sequence2 (all->first_conversion_insn,
2558 all->last_conversion_insn);
2559 emit_group_store (mem, entry_parm, data->passed_type, size);
2560 all->first_conversion_insn = get_insns ();
2561 all->last_conversion_insn = get_last_insn ();
2562 end_sequence ();
2563 }
2564
2565 else if (size == 0)
2566 ;
2567
2568 /* If SIZE is that of a mode no bigger than a word, just use
2569 that mode's store operation. */
2570 else if (size <= UNITS_PER_WORD)
2571 {
2572 enum machine_mode mode
2573 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2574
2575 if (mode != BLKmode
2576 #ifdef BLOCK_REG_PADDING
2577 && (size == UNITS_PER_WORD
2578 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2579 != (BYTES_BIG_ENDIAN ? upward : downward)))
2580 #endif
2581 )
2582 {
2583 rtx reg;
2584
2585 /* We are really truncating a word_mode value containing
2586 SIZE bytes into a value of mode MODE. If such an
2587 operation requires no actual instructions, we can refer
2588 to the value directly in mode MODE, otherwise we must
2589 start with the register in word_mode and explicitly
2590 convert it. */
2591 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2592 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2593 else
2594 {
2595 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2596 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2597 }
2598 emit_move_insn (change_address (mem, mode, 0), reg);
2599 }
2600
2601 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2602 machine must be aligned to the left before storing
2603 to memory. Note that the previous test doesn't
2604 handle all cases (e.g. SIZE == 3). */
2605 else if (size != UNITS_PER_WORD
2606 #ifdef BLOCK_REG_PADDING
2607 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2608 == downward)
2609 #else
2610 && BYTES_BIG_ENDIAN
2611 #endif
2612 )
2613 {
2614 rtx tem, x;
2615 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2616 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2617
2618 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2619 build_int_cst (NULL_TREE, by),
2620 NULL_RTX, 1);
2621 tem = change_address (mem, word_mode, 0);
2622 emit_move_insn (tem, x);
2623 }
2624 else
2625 move_block_from_reg (REGNO (entry_parm), mem,
2626 size_stored / UNITS_PER_WORD);
2627 }
2628 else
2629 move_block_from_reg (REGNO (entry_parm), mem,
2630 size_stored / UNITS_PER_WORD);
2631 }
2632 else if (data->stack_parm == 0)
2633 {
2634 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2635 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2636 BLOCK_OP_NORMAL);
2637 all->first_conversion_insn = get_insns ();
2638 all->last_conversion_insn = get_last_insn ();
2639 end_sequence ();
2640 }
2641
2642 data->stack_parm = stack_parm;
2643 SET_DECL_RTL (parm, stack_parm);
2644 }
2645
2646 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2647 parameter. Get it there. Perform all ABI specified conversions. */
2648
2649 static void
2650 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2651 struct assign_parm_data_one *data)
2652 {
2653 rtx parmreg;
2654 enum machine_mode promoted_nominal_mode;
2655 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2656 bool did_conversion = false;
2657
2658 /* Store the parm in a pseudoregister during the function, but we may
2659 need to do it in a wider mode. */
2660
2661 /* This is not really promoting for a call. However we need to be
2662 consistent with assign_parm_find_data_types and expand_expr_real_1. */
2663 promoted_nominal_mode
2664 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1);
2665
2666 parmreg = gen_reg_rtx (promoted_nominal_mode);
2667
2668 if (!DECL_ARTIFICIAL (parm))
2669 mark_user_reg (parmreg);
2670
2671 /* If this was an item that we received a pointer to,
2672 set DECL_RTL appropriately. */
2673 if (data->passed_pointer)
2674 {
2675 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2676 set_mem_attributes (x, parm, 1);
2677 SET_DECL_RTL (parm, x);
2678 }
2679 else
2680 SET_DECL_RTL (parm, parmreg);
2681
2682 assign_parm_remove_parallels (data);
2683
2684 /* Copy the value into the register. */
2685 if (data->nominal_mode != data->passed_mode
2686 || promoted_nominal_mode != data->promoted_mode)
2687 {
2688 int save_tree_used;
2689
2690 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2691 mode, by the caller. We now have to convert it to
2692 NOMINAL_MODE, if different. However, PARMREG may be in
2693 a different mode than NOMINAL_MODE if it is being stored
2694 promoted.
2695
2696 If ENTRY_PARM is a hard register, it might be in a register
2697 not valid for operating in its mode (e.g., an odd-numbered
2698 register for a DFmode). In that case, moves are the only
2699 thing valid, so we can't do a convert from there. This
2700 occurs when the calling sequence allow such misaligned
2701 usages.
2702
2703 In addition, the conversion may involve a call, which could
2704 clobber parameters which haven't been copied to pseudo
2705 registers yet. Therefore, we must first copy the parm to
2706 a pseudo reg here, and save the conversion until after all
2707 parameters have been moved. */
2708
2709 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2710
2711 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2712
2713 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2714 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2715
2716 if (GET_CODE (tempreg) == SUBREG
2717 && GET_MODE (tempreg) == data->nominal_mode
2718 && REG_P (SUBREG_REG (tempreg))
2719 && data->nominal_mode == data->passed_mode
2720 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2721 && GET_MODE_SIZE (GET_MODE (tempreg))
2722 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
2723 {
2724 /* The argument is already sign/zero extended, so note it
2725 into the subreg. */
2726 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2727 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2728 }
2729
2730 /* TREE_USED gets set erroneously during expand_assignment. */
2731 save_tree_used = TREE_USED (parm);
2732 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
2733 TREE_USED (parm) = save_tree_used;
2734 all->first_conversion_insn = get_insns ();
2735 all->last_conversion_insn = get_last_insn ();
2736 end_sequence ();
2737
2738 did_conversion = true;
2739 }
2740 else
2741 emit_move_insn (parmreg, validize_mem (data->entry_parm));
2742
2743 /* If we were passed a pointer but the actual value can safely live
2744 in a register, put it in one. */
2745 if (data->passed_pointer
2746 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2747 /* If by-reference argument was promoted, demote it. */
2748 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2749 || use_register_for_decl (parm)))
2750 {
2751 /* We can't use nominal_mode, because it will have been set to
2752 Pmode above. We must use the actual mode of the parm. */
2753 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2754 mark_user_reg (parmreg);
2755
2756 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2757 {
2758 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2759 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2760
2761 push_to_sequence2 (all->first_conversion_insn,
2762 all->last_conversion_insn);
2763 emit_move_insn (tempreg, DECL_RTL (parm));
2764 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2765 emit_move_insn (parmreg, tempreg);
2766 all->first_conversion_insn = get_insns ();
2767 all->last_conversion_insn = get_last_insn ();
2768 end_sequence ();
2769
2770 did_conversion = true;
2771 }
2772 else
2773 emit_move_insn (parmreg, DECL_RTL (parm));
2774
2775 SET_DECL_RTL (parm, parmreg);
2776
2777 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2778 now the parm. */
2779 data->stack_parm = NULL;
2780 }
2781
2782 /* Mark the register as eliminable if we did no conversion and it was
2783 copied from memory at a fixed offset, and the arg pointer was not
2784 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2785 offset formed an invalid address, such memory-equivalences as we
2786 make here would screw up life analysis for it. */
2787 if (data->nominal_mode == data->passed_mode
2788 && !did_conversion
2789 && data->stack_parm != 0
2790 && MEM_P (data->stack_parm)
2791 && data->locate.offset.var == 0
2792 && reg_mentioned_p (virtual_incoming_args_rtx,
2793 XEXP (data->stack_parm, 0)))
2794 {
2795 rtx linsn = get_last_insn ();
2796 rtx sinsn, set;
2797
2798 /* Mark complex types separately. */
2799 if (GET_CODE (parmreg) == CONCAT)
2800 {
2801 enum machine_mode submode
2802 = GET_MODE_INNER (GET_MODE (parmreg));
2803 int regnor = REGNO (XEXP (parmreg, 0));
2804 int regnoi = REGNO (XEXP (parmreg, 1));
2805 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
2806 rtx stacki = adjust_address_nv (data->stack_parm, submode,
2807 GET_MODE_SIZE (submode));
2808
2809 /* Scan backwards for the set of the real and
2810 imaginary parts. */
2811 for (sinsn = linsn; sinsn != 0;
2812 sinsn = prev_nonnote_insn (sinsn))
2813 {
2814 set = single_set (sinsn);
2815 if (set == 0)
2816 continue;
2817
2818 if (SET_DEST (set) == regno_reg_rtx [regnoi])
2819 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
2820 else if (SET_DEST (set) == regno_reg_rtx [regnor])
2821 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
2822 }
2823 }
2824 else if ((set = single_set (linsn)) != 0
2825 && SET_DEST (set) == parmreg)
2826 set_unique_reg_note (linsn, REG_EQUIV, data->stack_parm);
2827 }
2828
2829 /* For pointer data type, suggest pointer register. */
2830 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2831 mark_reg_pointer (parmreg,
2832 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2833 }
2834
2835 /* A subroutine of assign_parms. Allocate stack space to hold the current
2836 parameter. Get it there. Perform all ABI specified conversions. */
2837
2838 static void
2839 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2840 struct assign_parm_data_one *data)
2841 {
2842 /* Value must be stored in the stack slot STACK_PARM during function
2843 execution. */
2844 bool to_conversion = false;
2845
2846 assign_parm_remove_parallels (data);
2847
2848 if (data->promoted_mode != data->nominal_mode)
2849 {
2850 /* Conversion is required. */
2851 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2852
2853 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2854
2855 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2856 to_conversion = true;
2857
2858 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2859 TYPE_UNSIGNED (TREE_TYPE (parm)));
2860
2861 if (data->stack_parm)
2862 /* ??? This may need a big-endian conversion on sparc64. */
2863 data->stack_parm
2864 = adjust_address (data->stack_parm, data->nominal_mode, 0);
2865 }
2866
2867 if (data->entry_parm != data->stack_parm)
2868 {
2869 rtx src, dest;
2870
2871 if (data->stack_parm == 0)
2872 {
2873 data->stack_parm
2874 = assign_stack_local (GET_MODE (data->entry_parm),
2875 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
2876 TYPE_ALIGN (data->passed_type));
2877 set_mem_attributes (data->stack_parm, parm, 1);
2878 }
2879
2880 dest = validize_mem (data->stack_parm);
2881 src = validize_mem (data->entry_parm);
2882
2883 if (MEM_P (src))
2884 {
2885 /* Use a block move to handle potentially misaligned entry_parm. */
2886 if (!to_conversion)
2887 push_to_sequence2 (all->first_conversion_insn,
2888 all->last_conversion_insn);
2889 to_conversion = true;
2890
2891 emit_block_move (dest, src,
2892 GEN_INT (int_size_in_bytes (data->passed_type)),
2893 BLOCK_OP_NORMAL);
2894 }
2895 else
2896 emit_move_insn (dest, src);
2897 }
2898
2899 if (to_conversion)
2900 {
2901 all->first_conversion_insn = get_insns ();
2902 all->last_conversion_insn = get_last_insn ();
2903 end_sequence ();
2904 }
2905
2906 SET_DECL_RTL (parm, data->stack_parm);
2907 }
2908
2909 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
2910 undo the frobbing that we did in assign_parms_augmented_arg_list. */
2911
2912 static void
2913 assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
2914 {
2915 tree parm;
2916 tree orig_fnargs = all->orig_fnargs;
2917
2918 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2919 {
2920 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2921 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2922 {
2923 rtx tmp, real, imag;
2924 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
2925
2926 real = DECL_RTL (fnargs);
2927 imag = DECL_RTL (TREE_CHAIN (fnargs));
2928 if (inner != GET_MODE (real))
2929 {
2930 real = gen_lowpart_SUBREG (inner, real);
2931 imag = gen_lowpart_SUBREG (inner, imag);
2932 }
2933
2934 if (TREE_ADDRESSABLE (parm))
2935 {
2936 rtx rmem, imem;
2937 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
2938
2939 /* split_complex_arg put the real and imag parts in
2940 pseudos. Move them to memory. */
2941 tmp = assign_stack_local (DECL_MODE (parm), size,
2942 TYPE_ALIGN (TREE_TYPE (parm)));
2943 set_mem_attributes (tmp, parm, 1);
2944 rmem = adjust_address_nv (tmp, inner, 0);
2945 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
2946 push_to_sequence2 (all->first_conversion_insn,
2947 all->last_conversion_insn);
2948 emit_move_insn (rmem, real);
2949 emit_move_insn (imem, imag);
2950 all->first_conversion_insn = get_insns ();
2951 all->last_conversion_insn = get_last_insn ();
2952 end_sequence ();
2953 }
2954 else
2955 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2956 SET_DECL_RTL (parm, tmp);
2957
2958 real = DECL_INCOMING_RTL (fnargs);
2959 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
2960 if (inner != GET_MODE (real))
2961 {
2962 real = gen_lowpart_SUBREG (inner, real);
2963 imag = gen_lowpart_SUBREG (inner, imag);
2964 }
2965 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2966 set_decl_incoming_rtl (parm, tmp, false);
2967 fnargs = TREE_CHAIN (fnargs);
2968 }
2969 else
2970 {
2971 SET_DECL_RTL (parm, DECL_RTL (fnargs));
2972 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs), false);
2973
2974 /* Set MEM_EXPR to the original decl, i.e. to PARM,
2975 instead of the copy of decl, i.e. FNARGS. */
2976 if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
2977 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
2978 }
2979
2980 fnargs = TREE_CHAIN (fnargs);
2981 }
2982 }
2983
2984 /* Assign RTL expressions to the function's parameters. This may involve
2985 copying them into registers and using those registers as the DECL_RTL. */
2986
2987 static void
2988 assign_parms (tree fndecl)
2989 {
2990 struct assign_parm_data_all all;
2991 tree fnargs, parm;
2992
2993 crtl->args.internal_arg_pointer
2994 = targetm.calls.internal_arg_pointer ();
2995
2996 assign_parms_initialize_all (&all);
2997 fnargs = assign_parms_augmented_arg_list (&all);
2998
2999 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3000 {
3001 struct assign_parm_data_one data;
3002
3003 /* Extract the type of PARM; adjust it according to ABI. */
3004 assign_parm_find_data_types (&all, parm, &data);
3005
3006 /* Early out for errors and void parameters. */
3007 if (data.passed_mode == VOIDmode)
3008 {
3009 SET_DECL_RTL (parm, const0_rtx);
3010 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3011 continue;
3012 }
3013
3014 /* Estimate stack alignment from parameter alignment. */
3015 if (SUPPORTS_STACK_ALIGNMENT)
3016 {
3017 unsigned int align = FUNCTION_ARG_BOUNDARY (data.promoted_mode,
3018 data.passed_type);
3019 if (TYPE_ALIGN (data.nominal_type) > align)
3020 align = TYPE_ALIGN (data.passed_type);
3021 if (crtl->stack_alignment_estimated < align)
3022 {
3023 gcc_assert (!crtl->stack_realign_processed);
3024 crtl->stack_alignment_estimated = align;
3025 }
3026 }
3027
3028 if (cfun->stdarg && !TREE_CHAIN (parm))
3029 assign_parms_setup_varargs (&all, &data, false);
3030
3031 /* Find out where the parameter arrives in this function. */
3032 assign_parm_find_entry_rtl (&all, &data);
3033
3034 /* Find out where stack space for this parameter might be. */
3035 if (assign_parm_is_stack_parm (&all, &data))
3036 {
3037 assign_parm_find_stack_rtl (parm, &data);
3038 assign_parm_adjust_entry_rtl (&data);
3039 }
3040
3041 /* Record permanently how this parm was passed. */
3042 set_decl_incoming_rtl (parm, data.entry_parm, data.passed_pointer);
3043
3044 /* Update info on where next arg arrives in registers. */
3045 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3046 data.passed_type, data.named_arg);
3047
3048 assign_parm_adjust_stack_rtl (&data);
3049
3050 if (assign_parm_setup_block_p (&data))
3051 assign_parm_setup_block (&all, parm, &data);
3052 else if (data.passed_pointer || use_register_for_decl (parm))
3053 assign_parm_setup_reg (&all, parm, &data);
3054 else
3055 assign_parm_setup_stack (&all, parm, &data);
3056 }
3057
3058 if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
3059 assign_parms_unsplit_complex (&all, fnargs);
3060
3061 /* Output all parameter conversion instructions (possibly including calls)
3062 now that all parameters have been copied out of hard registers. */
3063 emit_insn (all.first_conversion_insn);
3064
3065 /* Estimate reload stack alignment from scalar return mode. */
3066 if (SUPPORTS_STACK_ALIGNMENT)
3067 {
3068 if (DECL_RESULT (fndecl))
3069 {
3070 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3071 enum machine_mode mode = TYPE_MODE (type);
3072
3073 if (mode != BLKmode
3074 && mode != VOIDmode
3075 && !AGGREGATE_TYPE_P (type))
3076 {
3077 unsigned int align = GET_MODE_ALIGNMENT (mode);
3078 if (crtl->stack_alignment_estimated < align)
3079 {
3080 gcc_assert (!crtl->stack_realign_processed);
3081 crtl->stack_alignment_estimated = align;
3082 }
3083 }
3084 }
3085 }
3086
3087 /* If we are receiving a struct value address as the first argument, set up
3088 the RTL for the function result. As this might require code to convert
3089 the transmitted address to Pmode, we do this here to ensure that possible
3090 preliminary conversions of the address have been emitted already. */
3091 if (all.function_result_decl)
3092 {
3093 tree result = DECL_RESULT (current_function_decl);
3094 rtx addr = DECL_RTL (all.function_result_decl);
3095 rtx x;
3096
3097 if (DECL_BY_REFERENCE (result))
3098 x = addr;
3099 else
3100 {
3101 addr = convert_memory_address (Pmode, addr);
3102 x = gen_rtx_MEM (DECL_MODE (result), addr);
3103 set_mem_attributes (x, result, 1);
3104 }
3105 SET_DECL_RTL (result, x);
3106 }
3107
3108 /* We have aligned all the args, so add space for the pretend args. */
3109 crtl->args.pretend_args_size = all.pretend_args_size;
3110 all.stack_args_size.constant += all.extra_pretend_bytes;
3111 crtl->args.size = all.stack_args_size.constant;
3112
3113 /* Adjust function incoming argument size for alignment and
3114 minimum length. */
3115
3116 #ifdef REG_PARM_STACK_SPACE
3117 crtl->args.size = MAX (crtl->args.size,
3118 REG_PARM_STACK_SPACE (fndecl));
3119 #endif
3120
3121 crtl->args.size = CEIL_ROUND (crtl->args.size,
3122 PARM_BOUNDARY / BITS_PER_UNIT);
3123
3124 #ifdef ARGS_GROW_DOWNWARD
3125 crtl->args.arg_offset_rtx
3126 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3127 : expand_expr (size_diffop (all.stack_args_size.var,
3128 size_int (-all.stack_args_size.constant)),
3129 NULL_RTX, VOIDmode, 0));
3130 #else
3131 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3132 #endif
3133
3134 /* See how many bytes, if any, of its args a function should try to pop
3135 on return. */
3136
3137 crtl->args.pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
3138 crtl->args.size);
3139
3140 /* For stdarg.h function, save info about
3141 regs and stack space used by the named args. */
3142
3143 crtl->args.info = all.args_so_far;
3144
3145 /* Set the rtx used for the function return value. Put this in its
3146 own variable so any optimizers that need this information don't have
3147 to include tree.h. Do this here so it gets done when an inlined
3148 function gets output. */
3149
3150 crtl->return_rtx
3151 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3152 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3153
3154 /* If scalar return value was computed in a pseudo-reg, or was a named
3155 return value that got dumped to the stack, copy that to the hard
3156 return register. */
3157 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3158 {
3159 tree decl_result = DECL_RESULT (fndecl);
3160 rtx decl_rtl = DECL_RTL (decl_result);
3161
3162 if (REG_P (decl_rtl)
3163 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3164 : DECL_REGISTER (decl_result))
3165 {
3166 rtx real_decl_rtl;
3167
3168 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3169 fndecl, true);
3170 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3171 /* The delay slot scheduler assumes that crtl->return_rtx
3172 holds the hard register containing the return value, not a
3173 temporary pseudo. */
3174 crtl->return_rtx = real_decl_rtl;
3175 }
3176 }
3177 }
3178
3179 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3180 For all seen types, gimplify their sizes. */
3181
3182 static tree
3183 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3184 {
3185 tree t = *tp;
3186
3187 *walk_subtrees = 0;
3188 if (TYPE_P (t))
3189 {
3190 if (POINTER_TYPE_P (t))
3191 *walk_subtrees = 1;
3192 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3193 && !TYPE_SIZES_GIMPLIFIED (t))
3194 {
3195 gimplify_type_sizes (t, (gimple_seq *) data);
3196 *walk_subtrees = 1;
3197 }
3198 }
3199
3200 return NULL;
3201 }
3202
3203 /* Gimplify the parameter list for current_function_decl. This involves
3204 evaluating SAVE_EXPRs of variable sized parameters and generating code
3205 to implement callee-copies reference parameters. Returns a sequence of
3206 statements to add to the beginning of the function. */
3207
3208 gimple_seq
3209 gimplify_parameters (void)
3210 {
3211 struct assign_parm_data_all all;
3212 tree fnargs, parm;
3213 gimple_seq stmts = NULL;
3214
3215 assign_parms_initialize_all (&all);
3216 fnargs = assign_parms_augmented_arg_list (&all);
3217
3218 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3219 {
3220 struct assign_parm_data_one data;
3221
3222 /* Extract the type of PARM; adjust it according to ABI. */
3223 assign_parm_find_data_types (&all, parm, &data);
3224
3225 /* Early out for errors and void parameters. */
3226 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3227 continue;
3228
3229 /* Update info on where next arg arrives in registers. */
3230 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3231 data.passed_type, data.named_arg);
3232
3233 /* ??? Once upon a time variable_size stuffed parameter list
3234 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3235 turned out to be less than manageable in the gimple world.
3236 Now we have to hunt them down ourselves. */
3237 walk_tree_without_duplicates (&data.passed_type,
3238 gimplify_parm_type, &stmts);
3239
3240 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
3241 {
3242 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3243 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3244 }
3245
3246 if (data.passed_pointer)
3247 {
3248 tree type = TREE_TYPE (data.passed_type);
3249 if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3250 type, data.named_arg))
3251 {
3252 tree local, t;
3253
3254 /* For constant-sized objects, this is trivial; for
3255 variable-sized objects, we have to play games. */
3256 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3257 && !(flag_stack_check == GENERIC_STACK_CHECK
3258 && compare_tree_int (DECL_SIZE_UNIT (parm),
3259 STACK_CHECK_MAX_VAR_SIZE) > 0))
3260 {
3261 local = create_tmp_var (type, get_name (parm));
3262 DECL_IGNORED_P (local) = 0;
3263 }
3264 else
3265 {
3266 tree ptr_type, addr;
3267
3268 ptr_type = build_pointer_type (type);
3269 addr = create_tmp_var (ptr_type, get_name (parm));
3270 DECL_IGNORED_P (addr) = 0;
3271 local = build_fold_indirect_ref (addr);
3272
3273 t = built_in_decls[BUILT_IN_ALLOCA];
3274 t = build_call_expr (t, 1, DECL_SIZE_UNIT (parm));
3275 t = fold_convert (ptr_type, t);
3276 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3277 gimplify_and_add (t, &stmts);
3278 }
3279
3280 gimplify_assign (local, parm, &stmts);
3281
3282 SET_DECL_VALUE_EXPR (parm, local);
3283 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3284 }
3285 }
3286 }
3287
3288 return stmts;
3289 }
3290 \f
3291 /* Compute the size and offset from the start of the stacked arguments for a
3292 parm passed in mode PASSED_MODE and with type TYPE.
3293
3294 INITIAL_OFFSET_PTR points to the current offset into the stacked
3295 arguments.
3296
3297 The starting offset and size for this parm are returned in
3298 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3299 nonzero, the offset is that of stack slot, which is returned in
3300 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3301 padding required from the initial offset ptr to the stack slot.
3302
3303 IN_REGS is nonzero if the argument will be passed in registers. It will
3304 never be set if REG_PARM_STACK_SPACE is not defined.
3305
3306 FNDECL is the function in which the argument was defined.
3307
3308 There are two types of rounding that are done. The first, controlled by
3309 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3310 list to be aligned to the specific boundary (in bits). This rounding
3311 affects the initial and starting offsets, but not the argument size.
3312
3313 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3314 optionally rounds the size of the parm to PARM_BOUNDARY. The
3315 initial offset is not affected by this rounding, while the size always
3316 is and the starting offset may be. */
3317
3318 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3319 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3320 callers pass in the total size of args so far as
3321 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3322
3323 void
3324 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3325 int partial, tree fndecl ATTRIBUTE_UNUSED,
3326 struct args_size *initial_offset_ptr,
3327 struct locate_and_pad_arg_data *locate)
3328 {
3329 tree sizetree;
3330 enum direction where_pad;
3331 unsigned int boundary;
3332 int reg_parm_stack_space = 0;
3333 int part_size_in_regs;
3334
3335 #ifdef REG_PARM_STACK_SPACE
3336 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3337
3338 /* If we have found a stack parm before we reach the end of the
3339 area reserved for registers, skip that area. */
3340 if (! in_regs)
3341 {
3342 if (reg_parm_stack_space > 0)
3343 {
3344 if (initial_offset_ptr->var)
3345 {
3346 initial_offset_ptr->var
3347 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3348 ssize_int (reg_parm_stack_space));
3349 initial_offset_ptr->constant = 0;
3350 }
3351 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3352 initial_offset_ptr->constant = reg_parm_stack_space;
3353 }
3354 }
3355 #endif /* REG_PARM_STACK_SPACE */
3356
3357 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3358
3359 sizetree
3360 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3361 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3362 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3363 locate->where_pad = where_pad;
3364
3365 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3366 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
3367 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
3368
3369 locate->boundary = boundary;
3370
3371 if (SUPPORTS_STACK_ALIGNMENT)
3372 {
3373 /* stack_alignment_estimated can't change after stack has been
3374 realigned. */
3375 if (crtl->stack_alignment_estimated < boundary)
3376 {
3377 if (!crtl->stack_realign_processed)
3378 crtl->stack_alignment_estimated = boundary;
3379 else
3380 {
3381 /* If stack is realigned and stack alignment value
3382 hasn't been finalized, it is OK not to increase
3383 stack_alignment_estimated. The bigger alignment
3384 requirement is recorded in stack_alignment_needed
3385 below. */
3386 gcc_assert (!crtl->stack_realign_finalized
3387 && crtl->stack_realign_needed);
3388 }
3389 }
3390 }
3391
3392 /* Remember if the outgoing parameter requires extra alignment on the
3393 calling function side. */
3394 if (crtl->stack_alignment_needed < boundary)
3395 crtl->stack_alignment_needed = boundary;
3396 if (crtl->max_used_stack_slot_alignment < crtl->stack_alignment_needed)
3397 crtl->max_used_stack_slot_alignment = crtl->stack_alignment_needed;
3398 if (crtl->preferred_stack_boundary < boundary)
3399 crtl->preferred_stack_boundary = boundary;
3400
3401 #ifdef ARGS_GROW_DOWNWARD
3402 locate->slot_offset.constant = -initial_offset_ptr->constant;
3403 if (initial_offset_ptr->var)
3404 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3405 initial_offset_ptr->var);
3406
3407 {
3408 tree s2 = sizetree;
3409 if (where_pad != none
3410 && (!host_integerp (sizetree, 1)
3411 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3412 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3413 SUB_PARM_SIZE (locate->slot_offset, s2);
3414 }
3415
3416 locate->slot_offset.constant += part_size_in_regs;
3417
3418 if (!in_regs
3419 #ifdef REG_PARM_STACK_SPACE
3420 || REG_PARM_STACK_SPACE (fndecl) > 0
3421 #endif
3422 )
3423 pad_to_arg_alignment (&locate->slot_offset, boundary,
3424 &locate->alignment_pad);
3425
3426 locate->size.constant = (-initial_offset_ptr->constant
3427 - locate->slot_offset.constant);
3428 if (initial_offset_ptr->var)
3429 locate->size.var = size_binop (MINUS_EXPR,
3430 size_binop (MINUS_EXPR,
3431 ssize_int (0),
3432 initial_offset_ptr->var),
3433 locate->slot_offset.var);
3434
3435 /* Pad_below needs the pre-rounded size to know how much to pad
3436 below. */
3437 locate->offset = locate->slot_offset;
3438 if (where_pad == downward)
3439 pad_below (&locate->offset, passed_mode, sizetree);
3440
3441 #else /* !ARGS_GROW_DOWNWARD */
3442 if (!in_regs
3443 #ifdef REG_PARM_STACK_SPACE
3444 || REG_PARM_STACK_SPACE (fndecl) > 0
3445 #endif
3446 )
3447 pad_to_arg_alignment (initial_offset_ptr, boundary,
3448 &locate->alignment_pad);
3449 locate->slot_offset = *initial_offset_ptr;
3450
3451 #ifdef PUSH_ROUNDING
3452 if (passed_mode != BLKmode)
3453 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3454 #endif
3455
3456 /* Pad_below needs the pre-rounded size to know how much to pad below
3457 so this must be done before rounding up. */
3458 locate->offset = locate->slot_offset;
3459 if (where_pad == downward)
3460 pad_below (&locate->offset, passed_mode, sizetree);
3461
3462 if (where_pad != none
3463 && (!host_integerp (sizetree, 1)
3464 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3465 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3466
3467 ADD_PARM_SIZE (locate->size, sizetree);
3468
3469 locate->size.constant -= part_size_in_regs;
3470 #endif /* ARGS_GROW_DOWNWARD */
3471 }
3472
3473 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3474 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3475
3476 static void
3477 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3478 struct args_size *alignment_pad)
3479 {
3480 tree save_var = NULL_TREE;
3481 HOST_WIDE_INT save_constant = 0;
3482 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3483 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3484
3485 #ifdef SPARC_STACK_BOUNDARY_HACK
3486 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3487 the real alignment of %sp. However, when it does this, the
3488 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3489 if (SPARC_STACK_BOUNDARY_HACK)
3490 sp_offset = 0;
3491 #endif
3492
3493 if (boundary > PARM_BOUNDARY)
3494 {
3495 save_var = offset_ptr->var;
3496 save_constant = offset_ptr->constant;
3497 }
3498
3499 alignment_pad->var = NULL_TREE;
3500 alignment_pad->constant = 0;
3501
3502 if (boundary > BITS_PER_UNIT)
3503 {
3504 if (offset_ptr->var)
3505 {
3506 tree sp_offset_tree = ssize_int (sp_offset);
3507 tree offset = size_binop (PLUS_EXPR,
3508 ARGS_SIZE_TREE (*offset_ptr),
3509 sp_offset_tree);
3510 #ifdef ARGS_GROW_DOWNWARD
3511 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3512 #else
3513 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3514 #endif
3515
3516 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3517 /* ARGS_SIZE_TREE includes constant term. */
3518 offset_ptr->constant = 0;
3519 if (boundary > PARM_BOUNDARY)
3520 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3521 save_var);
3522 }
3523 else
3524 {
3525 offset_ptr->constant = -sp_offset +
3526 #ifdef ARGS_GROW_DOWNWARD
3527 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3528 #else
3529 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3530 #endif
3531 if (boundary > PARM_BOUNDARY)
3532 alignment_pad->constant = offset_ptr->constant - save_constant;
3533 }
3534 }
3535 }
3536
3537 static void
3538 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3539 {
3540 if (passed_mode != BLKmode)
3541 {
3542 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3543 offset_ptr->constant
3544 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3545 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3546 - GET_MODE_SIZE (passed_mode));
3547 }
3548 else
3549 {
3550 if (TREE_CODE (sizetree) != INTEGER_CST
3551 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3552 {
3553 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3554 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3555 /* Add it in. */
3556 ADD_PARM_SIZE (*offset_ptr, s2);
3557 SUB_PARM_SIZE (*offset_ptr, sizetree);
3558 }
3559 }
3560 }
3561 \f
3562
3563 /* True if register REGNO was alive at a place where `setjmp' was
3564 called and was set more than once or is an argument. Such regs may
3565 be clobbered by `longjmp'. */
3566
3567 static bool
3568 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
3569 {
3570 /* There appear to be cases where some local vars never reach the
3571 backend but have bogus regnos. */
3572 if (regno >= max_reg_num ())
3573 return false;
3574
3575 return ((REG_N_SETS (regno) > 1
3576 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno))
3577 && REGNO_REG_SET_P (setjmp_crosses, regno));
3578 }
3579
3580 /* Walk the tree of blocks describing the binding levels within a
3581 function and warn about variables the might be killed by setjmp or
3582 vfork. This is done after calling flow_analysis before register
3583 allocation since that will clobber the pseudo-regs to hard
3584 regs. */
3585
3586 static void
3587 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
3588 {
3589 tree decl, sub;
3590
3591 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3592 {
3593 if (TREE_CODE (decl) == VAR_DECL
3594 && DECL_RTL_SET_P (decl)
3595 && REG_P (DECL_RTL (decl))
3596 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3597 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
3598 " %<longjmp%> or %<vfork%>", decl);
3599 }
3600
3601 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3602 setjmp_vars_warning (setjmp_crosses, sub);
3603 }
3604
3605 /* Do the appropriate part of setjmp_vars_warning
3606 but for arguments instead of local variables. */
3607
3608 static void
3609 setjmp_args_warning (bitmap setjmp_crosses)
3610 {
3611 tree decl;
3612 for (decl = DECL_ARGUMENTS (current_function_decl);
3613 decl; decl = TREE_CHAIN (decl))
3614 if (DECL_RTL (decl) != 0
3615 && REG_P (DECL_RTL (decl))
3616 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3617 warning (OPT_Wclobbered,
3618 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3619 decl);
3620 }
3621
3622 /* Generate warning messages for variables live across setjmp. */
3623
3624 void
3625 generate_setjmp_warnings (void)
3626 {
3627 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
3628
3629 if (n_basic_blocks == NUM_FIXED_BLOCKS
3630 || bitmap_empty_p (setjmp_crosses))
3631 return;
3632
3633 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
3634 setjmp_args_warning (setjmp_crosses);
3635 }
3636
3637 \f
3638 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3639 and create duplicate blocks. */
3640 /* ??? Need an option to either create block fragments or to create
3641 abstract origin duplicates of a source block. It really depends
3642 on what optimization has been performed. */
3643
3644 void
3645 reorder_blocks (void)
3646 {
3647 tree block = DECL_INITIAL (current_function_decl);
3648 VEC(tree,heap) *block_stack;
3649
3650 if (block == NULL_TREE)
3651 return;
3652
3653 block_stack = VEC_alloc (tree, heap, 10);
3654
3655 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3656 clear_block_marks (block);
3657
3658 /* Prune the old trees away, so that they don't get in the way. */
3659 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3660 BLOCK_CHAIN (block) = NULL_TREE;
3661
3662 /* Recreate the block tree from the note nesting. */
3663 reorder_blocks_1 (get_insns (), block, &block_stack);
3664 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
3665
3666 VEC_free (tree, heap, block_stack);
3667 }
3668
3669 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3670
3671 void
3672 clear_block_marks (tree block)
3673 {
3674 while (block)
3675 {
3676 TREE_ASM_WRITTEN (block) = 0;
3677 clear_block_marks (BLOCK_SUBBLOCKS (block));
3678 block = BLOCK_CHAIN (block);
3679 }
3680 }
3681
3682 static void
3683 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
3684 {
3685 rtx insn;
3686
3687 for (insn = insns; insn; insn = NEXT_INSN (insn))
3688 {
3689 if (NOTE_P (insn))
3690 {
3691 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
3692 {
3693 tree block = NOTE_BLOCK (insn);
3694 tree origin;
3695
3696 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3697 ? BLOCK_FRAGMENT_ORIGIN (block)
3698 : block);
3699
3700 /* If we have seen this block before, that means it now
3701 spans multiple address regions. Create a new fragment. */
3702 if (TREE_ASM_WRITTEN (block))
3703 {
3704 tree new_block = copy_node (block);
3705
3706 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3707 BLOCK_FRAGMENT_CHAIN (new_block)
3708 = BLOCK_FRAGMENT_CHAIN (origin);
3709 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3710
3711 NOTE_BLOCK (insn) = new_block;
3712 block = new_block;
3713 }
3714
3715 BLOCK_SUBBLOCKS (block) = 0;
3716 TREE_ASM_WRITTEN (block) = 1;
3717 /* When there's only one block for the entire function,
3718 current_block == block and we mustn't do this, it
3719 will cause infinite recursion. */
3720 if (block != current_block)
3721 {
3722 if (block != origin)
3723 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block);
3724
3725 BLOCK_SUPERCONTEXT (block) = current_block;
3726 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3727 BLOCK_SUBBLOCKS (current_block) = block;
3728 current_block = origin;
3729 }
3730 VEC_safe_push (tree, heap, *p_block_stack, block);
3731 }
3732 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
3733 {
3734 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
3735 BLOCK_SUBBLOCKS (current_block)
3736 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3737 current_block = BLOCK_SUPERCONTEXT (current_block);
3738 }
3739 }
3740 }
3741 }
3742
3743 /* Reverse the order of elements in the chain T of blocks,
3744 and return the new head of the chain (old last element). */
3745
3746 tree
3747 blocks_nreverse (tree t)
3748 {
3749 tree prev = 0, decl, next;
3750 for (decl = t; decl; decl = next)
3751 {
3752 next = BLOCK_CHAIN (decl);
3753 BLOCK_CHAIN (decl) = prev;
3754 prev = decl;
3755 }
3756 return prev;
3757 }
3758
3759 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3760 non-NULL, list them all into VECTOR, in a depth-first preorder
3761 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3762 blocks. */
3763
3764 static int
3765 all_blocks (tree block, tree *vector)
3766 {
3767 int n_blocks = 0;
3768
3769 while (block)
3770 {
3771 TREE_ASM_WRITTEN (block) = 0;
3772
3773 /* Record this block. */
3774 if (vector)
3775 vector[n_blocks] = block;
3776
3777 ++n_blocks;
3778
3779 /* Record the subblocks, and their subblocks... */
3780 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3781 vector ? vector + n_blocks : 0);
3782 block = BLOCK_CHAIN (block);
3783 }
3784
3785 return n_blocks;
3786 }
3787
3788 /* Return a vector containing all the blocks rooted at BLOCK. The
3789 number of elements in the vector is stored in N_BLOCKS_P. The
3790 vector is dynamically allocated; it is the caller's responsibility
3791 to call `free' on the pointer returned. */
3792
3793 static tree *
3794 get_block_vector (tree block, int *n_blocks_p)
3795 {
3796 tree *block_vector;
3797
3798 *n_blocks_p = all_blocks (block, NULL);
3799 block_vector = XNEWVEC (tree, *n_blocks_p);
3800 all_blocks (block, block_vector);
3801
3802 return block_vector;
3803 }
3804
3805 static GTY(()) int next_block_index = 2;
3806
3807 /* Set BLOCK_NUMBER for all the blocks in FN. */
3808
3809 void
3810 number_blocks (tree fn)
3811 {
3812 int i;
3813 int n_blocks;
3814 tree *block_vector;
3815
3816 /* For SDB and XCOFF debugging output, we start numbering the blocks
3817 from 1 within each function, rather than keeping a running
3818 count. */
3819 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3820 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3821 next_block_index = 1;
3822 #endif
3823
3824 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3825
3826 /* The top-level BLOCK isn't numbered at all. */
3827 for (i = 1; i < n_blocks; ++i)
3828 /* We number the blocks from two. */
3829 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3830
3831 free (block_vector);
3832
3833 return;
3834 }
3835
3836 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3837
3838 tree
3839 debug_find_var_in_block_tree (tree var, tree block)
3840 {
3841 tree t;
3842
3843 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3844 if (t == var)
3845 return block;
3846
3847 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3848 {
3849 tree ret = debug_find_var_in_block_tree (var, t);
3850 if (ret)
3851 return ret;
3852 }
3853
3854 return NULL_TREE;
3855 }
3856 \f
3857 /* Keep track of whether we're in a dummy function context. If we are,
3858 we don't want to invoke the set_current_function hook, because we'll
3859 get into trouble if the hook calls target_reinit () recursively or
3860 when the initial initialization is not yet complete. */
3861
3862 static bool in_dummy_function;
3863
3864 /* Invoke the target hook when setting cfun. Update the optimization options
3865 if the function uses different options than the default. */
3866
3867 static void
3868 invoke_set_current_function_hook (tree fndecl)
3869 {
3870 if (!in_dummy_function)
3871 {
3872 tree opts = ((fndecl)
3873 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
3874 : optimization_default_node);
3875
3876 if (!opts)
3877 opts = optimization_default_node;
3878
3879 /* Change optimization options if needed. */
3880 if (optimization_current_node != opts)
3881 {
3882 optimization_current_node = opts;
3883 cl_optimization_restore (TREE_OPTIMIZATION (opts));
3884 }
3885
3886 targetm.set_current_function (fndecl);
3887 }
3888 }
3889
3890 /* cfun should never be set directly; use this function. */
3891
3892 void
3893 set_cfun (struct function *new_cfun)
3894 {
3895 if (cfun != new_cfun)
3896 {
3897 cfun = new_cfun;
3898 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
3899 }
3900 }
3901
3902 /* Keep track of the cfun stack. */
3903
3904 typedef struct function *function_p;
3905
3906 DEF_VEC_P(function_p);
3907 DEF_VEC_ALLOC_P(function_p,heap);
3908
3909 /* Initialized with NOGC, making this poisonous to the garbage collector. */
3910
3911 static VEC(function_p,heap) *cfun_stack;
3912
3913 /* Push the current cfun onto the stack, and set cfun to new_cfun. */
3914
3915 void
3916 push_cfun (struct function *new_cfun)
3917 {
3918 VEC_safe_push (function_p, heap, cfun_stack, cfun);
3919 set_cfun (new_cfun);
3920 }
3921
3922 /* Pop cfun from the stack. */
3923
3924 void
3925 pop_cfun (void)
3926 {
3927 struct function *new_cfun = VEC_pop (function_p, cfun_stack);
3928 set_cfun (new_cfun);
3929 }
3930
3931 /* Return value of funcdef and increase it. */
3932 int
3933 get_next_funcdef_no (void)
3934 {
3935 return funcdef_no++;
3936 }
3937
3938 /* Allocate a function structure for FNDECL and set its contents
3939 to the defaults. Set cfun to the newly-allocated object.
3940 Some of the helper functions invoked during initialization assume
3941 that cfun has already been set. Therefore, assign the new object
3942 directly into cfun and invoke the back end hook explicitly at the
3943 very end, rather than initializing a temporary and calling set_cfun
3944 on it.
3945
3946 ABSTRACT_P is true if this is a function that will never be seen by
3947 the middle-end. Such functions are front-end concepts (like C++
3948 function templates) that do not correspond directly to functions
3949 placed in object files. */
3950
3951 void
3952 allocate_struct_function (tree fndecl, bool abstract_p)
3953 {
3954 tree result;
3955 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
3956
3957 cfun = GGC_CNEW (struct function);
3958
3959 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
3960
3961 init_eh_for_function ();
3962
3963 if (init_machine_status)
3964 cfun->machine = (*init_machine_status) ();
3965
3966 #ifdef OVERRIDE_ABI_FORMAT
3967 OVERRIDE_ABI_FORMAT (fndecl);
3968 #endif
3969
3970 if (fndecl != NULL_TREE)
3971 {
3972 DECL_STRUCT_FUNCTION (fndecl) = cfun;
3973 cfun->decl = fndecl;
3974 current_function_funcdef_no = get_next_funcdef_no ();
3975
3976 result = DECL_RESULT (fndecl);
3977 if (!abstract_p && aggregate_value_p (result, fndecl))
3978 {
3979 #ifdef PCC_STATIC_STRUCT_RETURN
3980 cfun->returns_pcc_struct = 1;
3981 #endif
3982 cfun->returns_struct = 1;
3983 }
3984
3985 cfun->stdarg
3986 = (fntype
3987 && TYPE_ARG_TYPES (fntype) != 0
3988 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3989 != void_type_node));
3990
3991 /* Assume all registers in stdarg functions need to be saved. */
3992 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
3993 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
3994 }
3995
3996 invoke_set_current_function_hook (fndecl);
3997 }
3998
3999 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4000 instead of just setting it. */
4001
4002 void
4003 push_struct_function (tree fndecl)
4004 {
4005 VEC_safe_push (function_p, heap, cfun_stack, cfun);
4006 allocate_struct_function (fndecl, false);
4007 }
4008
4009 /* Reset cfun, and other non-struct-function variables to defaults as
4010 appropriate for emitting rtl at the start of a function. */
4011
4012 static void
4013 prepare_function_start (void)
4014 {
4015 gcc_assert (!crtl->emit.x_last_insn);
4016 init_emit ();
4017 init_varasm_status ();
4018 init_expr ();
4019 default_rtl_profile ();
4020
4021 cse_not_expected = ! optimize;
4022
4023 /* Caller save not needed yet. */
4024 caller_save_needed = 0;
4025
4026 /* We haven't done register allocation yet. */
4027 reg_renumber = 0;
4028
4029 /* Indicate that we have not instantiated virtual registers yet. */
4030 virtuals_instantiated = 0;
4031
4032 /* Indicate that we want CONCATs now. */
4033 generating_concat_p = 1;
4034
4035 /* Indicate we have no need of a frame pointer yet. */
4036 frame_pointer_needed = 0;
4037 }
4038
4039 /* Initialize the rtl expansion mechanism so that we can do simple things
4040 like generate sequences. This is used to provide a context during global
4041 initialization of some passes. You must call expand_dummy_function_end
4042 to exit this context. */
4043
4044 void
4045 init_dummy_function_start (void)
4046 {
4047 gcc_assert (!in_dummy_function);
4048 in_dummy_function = true;
4049 push_struct_function (NULL_TREE);
4050 prepare_function_start ();
4051 }
4052
4053 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4054 and initialize static variables for generating RTL for the statements
4055 of the function. */
4056
4057 void
4058 init_function_start (tree subr)
4059 {
4060 if (subr && DECL_STRUCT_FUNCTION (subr))
4061 set_cfun (DECL_STRUCT_FUNCTION (subr));
4062 else
4063 allocate_struct_function (subr, false);
4064 prepare_function_start ();
4065
4066 /* Warn if this value is an aggregate type,
4067 regardless of which calling convention we are using for it. */
4068 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4069 warning (OPT_Waggregate_return, "function returns an aggregate");
4070 }
4071
4072 /* Make sure all values used by the optimization passes have sane
4073 defaults. */
4074 unsigned int
4075 init_function_for_compilation (void)
4076 {
4077 reg_renumber = 0;
4078
4079 /* No prologue/epilogue insns yet. Make sure that these vectors are
4080 empty. */
4081 gcc_assert (VEC_length (int, prologue) == 0);
4082 gcc_assert (VEC_length (int, epilogue) == 0);
4083 gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
4084 return 0;
4085 }
4086
4087 struct rtl_opt_pass pass_init_function =
4088 {
4089 {
4090 RTL_PASS,
4091 NULL, /* name */
4092 NULL, /* gate */
4093 init_function_for_compilation, /* execute */
4094 NULL, /* sub */
4095 NULL, /* next */
4096 0, /* static_pass_number */
4097 0, /* tv_id */
4098 0, /* properties_required */
4099 0, /* properties_provided */
4100 0, /* properties_destroyed */
4101 0, /* todo_flags_start */
4102 0 /* todo_flags_finish */
4103 }
4104 };
4105
4106
4107 void
4108 expand_main_function (void)
4109 {
4110 #if (defined(INVOKE__main) \
4111 || (!defined(HAS_INIT_SECTION) \
4112 && !defined(INIT_SECTION_ASM_OP) \
4113 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
4114 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
4115 #endif
4116 }
4117 \f
4118 /* Expand code to initialize the stack_protect_guard. This is invoked at
4119 the beginning of a function to be protected. */
4120
4121 #ifndef HAVE_stack_protect_set
4122 # define HAVE_stack_protect_set 0
4123 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
4124 #endif
4125
4126 void
4127 stack_protect_prologue (void)
4128 {
4129 tree guard_decl = targetm.stack_protect_guard ();
4130 rtx x, y;
4131
4132 /* Avoid expand_expr here, because we don't want guard_decl pulled
4133 into registers unless absolutely necessary. And we know that
4134 crtl->stack_protect_guard is a local stack slot, so this skips
4135 all the fluff. */
4136 x = validize_mem (DECL_RTL (crtl->stack_protect_guard));
4137 y = validize_mem (DECL_RTL (guard_decl));
4138
4139 /* Allow the target to copy from Y to X without leaking Y into a
4140 register. */
4141 if (HAVE_stack_protect_set)
4142 {
4143 rtx insn = gen_stack_protect_set (x, y);
4144 if (insn)
4145 {
4146 emit_insn (insn);
4147 return;
4148 }
4149 }
4150
4151 /* Otherwise do a straight move. */
4152 emit_move_insn (x, y);
4153 }
4154
4155 /* Expand code to verify the stack_protect_guard. This is invoked at
4156 the end of a function to be protected. */
4157
4158 #ifndef HAVE_stack_protect_test
4159 # define HAVE_stack_protect_test 0
4160 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4161 #endif
4162
4163 void
4164 stack_protect_epilogue (void)
4165 {
4166 tree guard_decl = targetm.stack_protect_guard ();
4167 rtx label = gen_label_rtx ();
4168 rtx x, y, tmp;
4169
4170 /* Avoid expand_expr here, because we don't want guard_decl pulled
4171 into registers unless absolutely necessary. And we know that
4172 crtl->stack_protect_guard is a local stack slot, so this skips
4173 all the fluff. */
4174 x = validize_mem (DECL_RTL (crtl->stack_protect_guard));
4175 y = validize_mem (DECL_RTL (guard_decl));
4176
4177 /* Allow the target to compare Y with X without leaking either into
4178 a register. */
4179 switch (HAVE_stack_protect_test != 0)
4180 {
4181 case 1:
4182 tmp = gen_stack_protect_test (x, y, label);
4183 if (tmp)
4184 {
4185 emit_insn (tmp);
4186 break;
4187 }
4188 /* FALLTHRU */
4189
4190 default:
4191 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4192 break;
4193 }
4194
4195 /* The noreturn predictor has been moved to the tree level. The rtl-level
4196 predictors estimate this branch about 20%, which isn't enough to get
4197 things moved out of line. Since this is the only extant case of adding
4198 a noreturn function at the rtl level, it doesn't seem worth doing ought
4199 except adding the prediction by hand. */
4200 tmp = get_last_insn ();
4201 if (JUMP_P (tmp))
4202 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4203
4204 expand_expr_stmt (targetm.stack_protect_fail ());
4205 emit_label (label);
4206 }
4207 \f
4208 /* Start the RTL for a new function, and set variables used for
4209 emitting RTL.
4210 SUBR is the FUNCTION_DECL node.
4211 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4212 the function's parameters, which must be run at any return statement. */
4213
4214 void
4215 expand_function_start (tree subr)
4216 {
4217 /* Make sure volatile mem refs aren't considered
4218 valid operands of arithmetic insns. */
4219 init_recog_no_volatile ();
4220
4221 crtl->profile
4222 = (profile_flag
4223 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4224
4225 crtl->limit_stack
4226 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4227
4228 /* Make the label for return statements to jump to. Do not special
4229 case machines with special return instructions -- they will be
4230 handled later during jump, ifcvt, or epilogue creation. */
4231 return_label = gen_label_rtx ();
4232
4233 /* Initialize rtx used to return the value. */
4234 /* Do this before assign_parms so that we copy the struct value address
4235 before any library calls that assign parms might generate. */
4236
4237 /* Decide whether to return the value in memory or in a register. */
4238 if (aggregate_value_p (DECL_RESULT (subr), subr))
4239 {
4240 /* Returning something that won't go in a register. */
4241 rtx value_address = 0;
4242
4243 #ifdef PCC_STATIC_STRUCT_RETURN
4244 if (cfun->returns_pcc_struct)
4245 {
4246 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4247 value_address = assemble_static_space (size);
4248 }
4249 else
4250 #endif
4251 {
4252 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4253 /* Expect to be passed the address of a place to store the value.
4254 If it is passed as an argument, assign_parms will take care of
4255 it. */
4256 if (sv)
4257 {
4258 value_address = gen_reg_rtx (Pmode);
4259 emit_move_insn (value_address, sv);
4260 }
4261 }
4262 if (value_address)
4263 {
4264 rtx x = value_address;
4265 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4266 {
4267 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4268 set_mem_attributes (x, DECL_RESULT (subr), 1);
4269 }
4270 SET_DECL_RTL (DECL_RESULT (subr), x);
4271 }
4272 }
4273 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4274 /* If return mode is void, this decl rtl should not be used. */
4275 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4276 else
4277 {
4278 /* Compute the return values into a pseudo reg, which we will copy
4279 into the true return register after the cleanups are done. */
4280 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4281 if (TYPE_MODE (return_type) != BLKmode
4282 && targetm.calls.return_in_msb (return_type))
4283 /* expand_function_end will insert the appropriate padding in
4284 this case. Use the return value's natural (unpadded) mode
4285 within the function proper. */
4286 SET_DECL_RTL (DECL_RESULT (subr),
4287 gen_reg_rtx (TYPE_MODE (return_type)));
4288 else
4289 {
4290 /* In order to figure out what mode to use for the pseudo, we
4291 figure out what the mode of the eventual return register will
4292 actually be, and use that. */
4293 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4294
4295 /* Structures that are returned in registers are not
4296 aggregate_value_p, so we may see a PARALLEL or a REG. */
4297 if (REG_P (hard_reg))
4298 SET_DECL_RTL (DECL_RESULT (subr),
4299 gen_reg_rtx (GET_MODE (hard_reg)));
4300 else
4301 {
4302 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4303 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4304 }
4305 }
4306
4307 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4308 result to the real return register(s). */
4309 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4310 }
4311
4312 /* Initialize rtx for parameters and local variables.
4313 In some cases this requires emitting insns. */
4314 assign_parms (subr);
4315
4316 /* If function gets a static chain arg, store it. */
4317 if (cfun->static_chain_decl)
4318 {
4319 tree parm = cfun->static_chain_decl;
4320 rtx local = gen_reg_rtx (Pmode);
4321
4322 set_decl_incoming_rtl (parm, static_chain_incoming_rtx, false);
4323 SET_DECL_RTL (parm, local);
4324 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4325
4326 emit_move_insn (local, static_chain_incoming_rtx);
4327 }
4328
4329 /* If the function receives a non-local goto, then store the
4330 bits we need to restore the frame pointer. */
4331 if (cfun->nonlocal_goto_save_area)
4332 {
4333 tree t_save;
4334 rtx r_save;
4335
4336 /* ??? We need to do this save early. Unfortunately here is
4337 before the frame variable gets declared. Help out... */
4338 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
4339 if (!DECL_RTL_SET_P (var))
4340 expand_decl (var);
4341
4342 t_save = build4 (ARRAY_REF, ptr_type_node,
4343 cfun->nonlocal_goto_save_area,
4344 integer_zero_node, NULL_TREE, NULL_TREE);
4345 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4346 r_save = convert_memory_address (Pmode, r_save);
4347
4348 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
4349 update_nonlocal_goto_save_area ();
4350 }
4351
4352 /* The following was moved from init_function_start.
4353 The move is supposed to make sdb output more accurate. */
4354 /* Indicate the beginning of the function body,
4355 as opposed to parm setup. */
4356 emit_note (NOTE_INSN_FUNCTION_BEG);
4357
4358 gcc_assert (NOTE_P (get_last_insn ()));
4359
4360 parm_birth_insn = get_last_insn ();
4361
4362 if (crtl->profile)
4363 {
4364 #ifdef PROFILE_HOOK
4365 PROFILE_HOOK (current_function_funcdef_no);
4366 #endif
4367 }
4368
4369 /* After the display initializations is where the stack checking
4370 probe should go. */
4371 if(flag_stack_check)
4372 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4373
4374 /* Make sure there is a line number after the function entry setup code. */
4375 force_next_line_note ();
4376 }
4377 \f
4378 /* Undo the effects of init_dummy_function_start. */
4379 void
4380 expand_dummy_function_end (void)
4381 {
4382 gcc_assert (in_dummy_function);
4383
4384 /* End any sequences that failed to be closed due to syntax errors. */
4385 while (in_sequence_p ())
4386 end_sequence ();
4387
4388 /* Outside function body, can't compute type's actual size
4389 until next function's body starts. */
4390
4391 free_after_parsing (cfun);
4392 free_after_compilation (cfun);
4393 pop_cfun ();
4394 in_dummy_function = false;
4395 }
4396
4397 /* Call DOIT for each hard register used as a return value from
4398 the current function. */
4399
4400 void
4401 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4402 {
4403 rtx outgoing = crtl->return_rtx;
4404
4405 if (! outgoing)
4406 return;
4407
4408 if (REG_P (outgoing))
4409 (*doit) (outgoing, arg);
4410 else if (GET_CODE (outgoing) == PARALLEL)
4411 {
4412 int i;
4413
4414 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4415 {
4416 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4417
4418 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4419 (*doit) (x, arg);
4420 }
4421 }
4422 }
4423
4424 static void
4425 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4426 {
4427 emit_clobber (reg);
4428 }
4429
4430 void
4431 clobber_return_register (void)
4432 {
4433 diddle_return_value (do_clobber_return_reg, NULL);
4434
4435 /* In case we do use pseudo to return value, clobber it too. */
4436 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4437 {
4438 tree decl_result = DECL_RESULT (current_function_decl);
4439 rtx decl_rtl = DECL_RTL (decl_result);
4440 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4441 {
4442 do_clobber_return_reg (decl_rtl, NULL);
4443 }
4444 }
4445 }
4446
4447 static void
4448 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4449 {
4450 emit_use (reg);
4451 }
4452
4453 static void
4454 use_return_register (void)
4455 {
4456 diddle_return_value (do_use_return_reg, NULL);
4457 }
4458
4459 /* Possibly warn about unused parameters. */
4460 void
4461 do_warn_unused_parameter (tree fn)
4462 {
4463 tree decl;
4464
4465 for (decl = DECL_ARGUMENTS (fn);
4466 decl; decl = TREE_CHAIN (decl))
4467 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4468 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
4469 && !TREE_NO_WARNING (decl))
4470 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4471 }
4472
4473 static GTY(()) rtx initial_trampoline;
4474
4475 /* Generate RTL for the end of the current function. */
4476
4477 void
4478 expand_function_end (void)
4479 {
4480 rtx clobber_after;
4481
4482 /* If arg_pointer_save_area was referenced only from a nested
4483 function, we will not have initialized it yet. Do that now. */
4484 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
4485 get_arg_pointer_save_area ();
4486
4487 /* If we are doing generic stack checking and this function makes calls,
4488 do a stack probe at the start of the function to ensure we have enough
4489 space for another stack frame. */
4490 if (flag_stack_check == GENERIC_STACK_CHECK)
4491 {
4492 rtx insn, seq;
4493
4494 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4495 if (CALL_P (insn))
4496 {
4497 start_sequence ();
4498 probe_stack_range (STACK_OLD_CHECK_PROTECT,
4499 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4500 seq = get_insns ();
4501 end_sequence ();
4502 emit_insn_before (seq, stack_check_probe_note);
4503 break;
4504 }
4505 }
4506
4507 /* End any sequences that failed to be closed due to syntax errors. */
4508 while (in_sequence_p ())
4509 end_sequence ();
4510
4511 clear_pending_stack_adjust ();
4512 do_pending_stack_adjust ();
4513
4514 /* Output a linenumber for the end of the function.
4515 SDB depends on this. */
4516 force_next_line_note ();
4517 set_curr_insn_source_location (input_location);
4518
4519 /* Before the return label (if any), clobber the return
4520 registers so that they are not propagated live to the rest of
4521 the function. This can only happen with functions that drop
4522 through; if there had been a return statement, there would
4523 have either been a return rtx, or a jump to the return label.
4524
4525 We delay actual code generation after the current_function_value_rtx
4526 is computed. */
4527 clobber_after = get_last_insn ();
4528
4529 /* Output the label for the actual return from the function. */
4530 emit_label (return_label);
4531
4532 if (USING_SJLJ_EXCEPTIONS)
4533 {
4534 /* Let except.c know where it should emit the call to unregister
4535 the function context for sjlj exceptions. */
4536 if (flag_exceptions)
4537 sjlj_emit_function_exit_after (get_last_insn ());
4538 }
4539 else
4540 {
4541 /* We want to ensure that instructions that may trap are not
4542 moved into the epilogue by scheduling, because we don't
4543 always emit unwind information for the epilogue. */
4544 if (flag_non_call_exceptions)
4545 emit_insn (gen_blockage ());
4546 }
4547
4548 /* If this is an implementation of throw, do what's necessary to
4549 communicate between __builtin_eh_return and the epilogue. */
4550 expand_eh_return ();
4551
4552 /* If scalar return value was computed in a pseudo-reg, or was a named
4553 return value that got dumped to the stack, copy that to the hard
4554 return register. */
4555 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4556 {
4557 tree decl_result = DECL_RESULT (current_function_decl);
4558 rtx decl_rtl = DECL_RTL (decl_result);
4559
4560 if (REG_P (decl_rtl)
4561 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4562 : DECL_REGISTER (decl_result))
4563 {
4564 rtx real_decl_rtl = crtl->return_rtx;
4565
4566 /* This should be set in assign_parms. */
4567 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
4568
4569 /* If this is a BLKmode structure being returned in registers,
4570 then use the mode computed in expand_return. Note that if
4571 decl_rtl is memory, then its mode may have been changed,
4572 but that crtl->return_rtx has not. */
4573 if (GET_MODE (real_decl_rtl) == BLKmode)
4574 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4575
4576 /* If a non-BLKmode return value should be padded at the least
4577 significant end of the register, shift it left by the appropriate
4578 amount. BLKmode results are handled using the group load/store
4579 machinery. */
4580 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
4581 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
4582 {
4583 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
4584 REGNO (real_decl_rtl)),
4585 decl_rtl);
4586 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
4587 }
4588 /* If a named return value dumped decl_return to memory, then
4589 we may need to re-do the PROMOTE_MODE signed/unsigned
4590 extension. */
4591 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4592 {
4593 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4594
4595 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4596 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4597 &unsignedp, 1);
4598
4599 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4600 }
4601 else if (GET_CODE (real_decl_rtl) == PARALLEL)
4602 {
4603 /* If expand_function_start has created a PARALLEL for decl_rtl,
4604 move the result to the real return registers. Otherwise, do
4605 a group load from decl_rtl for a named return. */
4606 if (GET_CODE (decl_rtl) == PARALLEL)
4607 emit_group_move (real_decl_rtl, decl_rtl);
4608 else
4609 emit_group_load (real_decl_rtl, decl_rtl,
4610 TREE_TYPE (decl_result),
4611 int_size_in_bytes (TREE_TYPE (decl_result)));
4612 }
4613 /* In the case of complex integer modes smaller than a word, we'll
4614 need to generate some non-trivial bitfield insertions. Do that
4615 on a pseudo and not the hard register. */
4616 else if (GET_CODE (decl_rtl) == CONCAT
4617 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
4618 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
4619 {
4620 int old_generating_concat_p;
4621 rtx tmp;
4622
4623 old_generating_concat_p = generating_concat_p;
4624 generating_concat_p = 0;
4625 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
4626 generating_concat_p = old_generating_concat_p;
4627
4628 emit_move_insn (tmp, decl_rtl);
4629 emit_move_insn (real_decl_rtl, tmp);
4630 }
4631 else
4632 emit_move_insn (real_decl_rtl, decl_rtl);
4633 }
4634 }
4635
4636 /* If returning a structure, arrange to return the address of the value
4637 in a place where debuggers expect to find it.
4638
4639 If returning a structure PCC style,
4640 the caller also depends on this value.
4641 And cfun->returns_pcc_struct is not necessarily set. */
4642 if (cfun->returns_struct
4643 || cfun->returns_pcc_struct)
4644 {
4645 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
4646 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4647 rtx outgoing;
4648
4649 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4650 type = TREE_TYPE (type);
4651 else
4652 value_address = XEXP (value_address, 0);
4653
4654 outgoing = targetm.calls.function_value (build_pointer_type (type),
4655 current_function_decl, true);
4656
4657 /* Mark this as a function return value so integrate will delete the
4658 assignment and USE below when inlining this function. */
4659 REG_FUNCTION_VALUE_P (outgoing) = 1;
4660
4661 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4662 value_address = convert_memory_address (GET_MODE (outgoing),
4663 value_address);
4664
4665 emit_move_insn (outgoing, value_address);
4666
4667 /* Show return register used to hold result (in this case the address
4668 of the result. */
4669 crtl->return_rtx = outgoing;
4670 }
4671
4672 /* Emit the actual code to clobber return register. */
4673 {
4674 rtx seq;
4675
4676 start_sequence ();
4677 clobber_return_register ();
4678 expand_naked_return ();
4679 seq = get_insns ();
4680 end_sequence ();
4681
4682 emit_insn_after (seq, clobber_after);
4683 }
4684
4685 /* Output the label for the naked return from the function. */
4686 emit_label (naked_return_label);
4687
4688 /* @@@ This is a kludge. We want to ensure that instructions that
4689 may trap are not moved into the epilogue by scheduling, because
4690 we don't always emit unwind information for the epilogue. */
4691 if (! USING_SJLJ_EXCEPTIONS && flag_non_call_exceptions)
4692 emit_insn (gen_blockage ());
4693
4694 /* If stack protection is enabled for this function, check the guard. */
4695 if (crtl->stack_protect_guard)
4696 stack_protect_epilogue ();
4697
4698 /* If we had calls to alloca, and this machine needs
4699 an accurate stack pointer to exit the function,
4700 insert some code to save and restore the stack pointer. */
4701 if (! EXIT_IGNORE_STACK
4702 && cfun->calls_alloca)
4703 {
4704 rtx tem = 0;
4705
4706 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4707 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4708 }
4709
4710 /* ??? This should no longer be necessary since stupid is no longer with
4711 us, but there are some parts of the compiler (eg reload_combine, and
4712 sh mach_dep_reorg) that still try and compute their own lifetime info
4713 instead of using the general framework. */
4714 use_return_register ();
4715 }
4716
4717 rtx
4718 get_arg_pointer_save_area (void)
4719 {
4720 rtx ret = arg_pointer_save_area;
4721
4722 if (! ret)
4723 {
4724 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
4725 arg_pointer_save_area = ret;
4726 }
4727
4728 if (! crtl->arg_pointer_save_area_init)
4729 {
4730 rtx seq;
4731
4732 /* Save the arg pointer at the beginning of the function. The
4733 generated stack slot may not be a valid memory address, so we
4734 have to check it and fix it if necessary. */
4735 start_sequence ();
4736 emit_move_insn (validize_mem (ret),
4737 crtl->args.internal_arg_pointer);
4738 seq = get_insns ();
4739 end_sequence ();
4740
4741 push_topmost_sequence ();
4742 emit_insn_after (seq, entry_of_function ());
4743 pop_topmost_sequence ();
4744 }
4745
4746 return ret;
4747 }
4748 \f
4749 /* Extend a vector that records the INSN_UIDs of INSNS
4750 (a list of one or more insns). */
4751
4752 static void
4753 record_insns (rtx insns, VEC(int,heap) **vecp)
4754 {
4755 rtx tmp;
4756
4757 for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp))
4758 VEC_safe_push (int, heap, *vecp, INSN_UID (tmp));
4759 }
4760
4761 /* Set the locator of the insn chain starting at INSN to LOC. */
4762 static void
4763 set_insn_locators (rtx insn, int loc)
4764 {
4765 while (insn != NULL_RTX)
4766 {
4767 if (INSN_P (insn))
4768 INSN_LOCATOR (insn) = loc;
4769 insn = NEXT_INSN (insn);
4770 }
4771 }
4772
4773 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4774 be running after reorg, SEQUENCE rtl is possible. */
4775
4776 static int
4777 contains (const_rtx insn, VEC(int,heap) **vec)
4778 {
4779 int i, j;
4780
4781 if (NONJUMP_INSN_P (insn)
4782 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4783 {
4784 int count = 0;
4785 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4786 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4787 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i))
4788 == VEC_index (int, *vec, j))
4789 count++;
4790 return count;
4791 }
4792 else
4793 {
4794 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4795 if (INSN_UID (insn) == VEC_index (int, *vec, j))
4796 return 1;
4797 }
4798 return 0;
4799 }
4800
4801 int
4802 prologue_epilogue_contains (const_rtx insn)
4803 {
4804 if (contains (insn, &prologue))
4805 return 1;
4806 if (contains (insn, &epilogue))
4807 return 1;
4808 return 0;
4809 }
4810
4811 int
4812 sibcall_epilogue_contains (const_rtx insn)
4813 {
4814 if (sibcall_epilogue)
4815 return contains (insn, &sibcall_epilogue);
4816 return 0;
4817 }
4818
4819 #ifdef HAVE_return
4820 /* Insert gen_return at the end of block BB. This also means updating
4821 block_for_insn appropriately. */
4822
4823 static void
4824 emit_return_into_block (basic_block bb)
4825 {
4826 emit_jump_insn_after (gen_return (), BB_END (bb));
4827 }
4828 #endif /* HAVE_return */
4829
4830 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
4831 this into place with notes indicating where the prologue ends and where
4832 the epilogue begins. Update the basic block information when possible. */
4833
4834 static void
4835 thread_prologue_and_epilogue_insns (void)
4836 {
4837 int inserted = 0;
4838 edge e;
4839 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
4840 rtx seq;
4841 #endif
4842 #if defined (HAVE_epilogue) || defined(HAVE_return)
4843 rtx epilogue_end = NULL_RTX;
4844 #endif
4845 edge_iterator ei;
4846
4847 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4848 #ifdef HAVE_prologue
4849 if (HAVE_prologue)
4850 {
4851 start_sequence ();
4852 seq = gen_prologue ();
4853 emit_insn (seq);
4854
4855 /* Insert an explicit USE for the frame pointer
4856 if the profiling is on and the frame pointer is required. */
4857 if (crtl->profile && frame_pointer_needed)
4858 emit_use (hard_frame_pointer_rtx);
4859
4860 /* Retain a map of the prologue insns. */
4861 record_insns (seq, &prologue);
4862 emit_note (NOTE_INSN_PROLOGUE_END);
4863
4864 #ifndef PROFILE_BEFORE_PROLOGUE
4865 /* Ensure that instructions are not moved into the prologue when
4866 profiling is on. The call to the profiling routine can be
4867 emitted within the live range of a call-clobbered register. */
4868 if (crtl->profile)
4869 emit_insn (gen_blockage ());
4870 #endif
4871
4872 seq = get_insns ();
4873 end_sequence ();
4874 set_insn_locators (seq, prologue_locator);
4875
4876 /* Can't deal with multiple successors of the entry block
4877 at the moment. Function should always have at least one
4878 entry point. */
4879 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
4880
4881 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
4882 inserted = 1;
4883 }
4884 #endif
4885
4886 /* If the exit block has no non-fake predecessors, we don't need
4887 an epilogue. */
4888 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4889 if ((e->flags & EDGE_FAKE) == 0)
4890 break;
4891 if (e == NULL)
4892 goto epilogue_done;
4893
4894 rtl_profile_for_bb (EXIT_BLOCK_PTR);
4895 #ifdef HAVE_return
4896 if (optimize && HAVE_return)
4897 {
4898 /* If we're allowed to generate a simple return instruction,
4899 then by definition we don't need a full epilogue. Examine
4900 the block that falls through to EXIT. If it does not
4901 contain any code, examine its predecessors and try to
4902 emit (conditional) return instructions. */
4903
4904 basic_block last;
4905 rtx label;
4906
4907 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4908 if (e->flags & EDGE_FALLTHRU)
4909 break;
4910 if (e == NULL)
4911 goto epilogue_done;
4912 last = e->src;
4913
4914 /* Verify that there are no active instructions in the last block. */
4915 label = BB_END (last);
4916 while (label && !LABEL_P (label))
4917 {
4918 if (active_insn_p (label))
4919 break;
4920 label = PREV_INSN (label);
4921 }
4922
4923 if (BB_HEAD (last) == label && LABEL_P (label))
4924 {
4925 edge_iterator ei2;
4926
4927 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
4928 {
4929 basic_block bb = e->src;
4930 rtx jump;
4931
4932 if (bb == ENTRY_BLOCK_PTR)
4933 {
4934 ei_next (&ei2);
4935 continue;
4936 }
4937
4938 jump = BB_END (bb);
4939 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
4940 {
4941 ei_next (&ei2);
4942 continue;
4943 }
4944
4945 /* If we have an unconditional jump, we can replace that
4946 with a simple return instruction. */
4947 if (simplejump_p (jump))
4948 {
4949 emit_return_into_block (bb);
4950 delete_insn (jump);
4951 }
4952
4953 /* If we have a conditional jump, we can try to replace
4954 that with a conditional return instruction. */
4955 else if (condjump_p (jump))
4956 {
4957 if (! redirect_jump (jump, 0, 0))
4958 {
4959 ei_next (&ei2);
4960 continue;
4961 }
4962
4963 /* If this block has only one successor, it both jumps
4964 and falls through to the fallthru block, so we can't
4965 delete the edge. */
4966 if (single_succ_p (bb))
4967 {
4968 ei_next (&ei2);
4969 continue;
4970 }
4971 }
4972 else
4973 {
4974 ei_next (&ei2);
4975 continue;
4976 }
4977
4978 /* Fix up the CFG for the successful change we just made. */
4979 redirect_edge_succ (e, EXIT_BLOCK_PTR);
4980 }
4981
4982 /* Emit a return insn for the exit fallthru block. Whether
4983 this is still reachable will be determined later. */
4984
4985 emit_barrier_after (BB_END (last));
4986 emit_return_into_block (last);
4987 epilogue_end = BB_END (last);
4988 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
4989 goto epilogue_done;
4990 }
4991 }
4992 #endif
4993 /* Find the edge that falls through to EXIT. Other edges may exist
4994 due to RETURN instructions, but those don't need epilogues.
4995 There really shouldn't be a mixture -- either all should have
4996 been converted or none, however... */
4997
4998 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4999 if (e->flags & EDGE_FALLTHRU)
5000 break;
5001 if (e == NULL)
5002 goto epilogue_done;
5003
5004 #ifdef HAVE_epilogue
5005 if (HAVE_epilogue)
5006 {
5007 start_sequence ();
5008 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5009 seq = gen_epilogue ();
5010 emit_jump_insn (seq);
5011
5012 /* Retain a map of the epilogue insns. */
5013 record_insns (seq, &epilogue);
5014 set_insn_locators (seq, epilogue_locator);
5015
5016 seq = get_insns ();
5017 end_sequence ();
5018
5019 insert_insn_on_edge (seq, e);
5020 inserted = 1;
5021 }
5022 else
5023 #endif
5024 {
5025 basic_block cur_bb;
5026
5027 if (! next_active_insn (BB_END (e->src)))
5028 goto epilogue_done;
5029 /* We have a fall-through edge to the exit block, the source is not
5030 at the end of the function, and there will be an assembler epilogue
5031 at the end of the function.
5032 We can't use force_nonfallthru here, because that would try to
5033 use return. Inserting a jump 'by hand' is extremely messy, so
5034 we take advantage of cfg_layout_finalize using
5035 fixup_fallthru_exit_predecessor. */
5036 cfg_layout_initialize (0);
5037 FOR_EACH_BB (cur_bb)
5038 if (cur_bb->index >= NUM_FIXED_BLOCKS
5039 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
5040 cur_bb->aux = cur_bb->next_bb;
5041 cfg_layout_finalize ();
5042 }
5043 epilogue_done:
5044 default_rtl_profile ();
5045
5046 if (inserted)
5047 {
5048 commit_edge_insertions ();
5049
5050 /* The epilogue insns we inserted may cause the exit edge to no longer
5051 be fallthru. */
5052 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5053 {
5054 if (((e->flags & EDGE_FALLTHRU) != 0)
5055 && returnjump_p (BB_END (e->src)))
5056 e->flags &= ~EDGE_FALLTHRU;
5057 }
5058 }
5059
5060 #ifdef HAVE_sibcall_epilogue
5061 /* Emit sibling epilogues before any sibling call sites. */
5062 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
5063 {
5064 basic_block bb = e->src;
5065 rtx insn = BB_END (bb);
5066
5067 if (!CALL_P (insn)
5068 || ! SIBLING_CALL_P (insn))
5069 {
5070 ei_next (&ei);
5071 continue;
5072 }
5073
5074 start_sequence ();
5075 emit_insn (gen_sibcall_epilogue ());
5076 seq = get_insns ();
5077 end_sequence ();
5078
5079 /* Retain a map of the epilogue insns. Used in life analysis to
5080 avoid getting rid of sibcall epilogue insns. Do this before we
5081 actually emit the sequence. */
5082 record_insns (seq, &sibcall_epilogue);
5083 set_insn_locators (seq, epilogue_locator);
5084
5085 emit_insn_before (seq, insn);
5086 ei_next (&ei);
5087 }
5088 #endif
5089
5090 #ifdef HAVE_epilogue
5091 if (epilogue_end)
5092 {
5093 rtx insn, next;
5094
5095 /* Similarly, move any line notes that appear after the epilogue.
5096 There is no need, however, to be quite so anal about the existence
5097 of such a note. Also possibly move
5098 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5099 info generation. */
5100 for (insn = epilogue_end; insn; insn = next)
5101 {
5102 next = NEXT_INSN (insn);
5103 if (NOTE_P (insn)
5104 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
5105 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5106 }
5107 }
5108 #endif
5109
5110 /* Threading the prologue and epilogue changes the artificial refs
5111 in the entry and exit blocks. */
5112 epilogue_completed = 1;
5113 df_update_entry_exit_and_calls ();
5114 }
5115
5116 /* Reposition the prologue-end and epilogue-begin notes after instruction
5117 scheduling and delayed branch scheduling. */
5118
5119 void
5120 reposition_prologue_and_epilogue_notes (void)
5121 {
5122 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5123 rtx insn, last, note;
5124 int len;
5125
5126 if ((len = VEC_length (int, prologue)) > 0)
5127 {
5128 last = 0, note = 0;
5129
5130 /* Scan from the beginning until we reach the last prologue insn.
5131 We apparently can't depend on basic_block_{head,end} after
5132 reorg has run. */
5133 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5134 {
5135 if (NOTE_P (insn))
5136 {
5137 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
5138 note = insn;
5139 }
5140 else if (contains (insn, &prologue))
5141 {
5142 last = insn;
5143 if (--len == 0)
5144 break;
5145 }
5146 }
5147
5148 if (last)
5149 {
5150 /* Find the prologue-end note if we haven't already, and
5151 move it to just after the last prologue insn. */
5152 if (note == 0)
5153 {
5154 for (note = last; (note = NEXT_INSN (note));)
5155 if (NOTE_P (note)
5156 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
5157 break;
5158 }
5159
5160 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5161 if (LABEL_P (last))
5162 last = NEXT_INSN (last);
5163 reorder_insns (note, note, last);
5164 }
5165 }
5166
5167 if ((len = VEC_length (int, epilogue)) > 0)
5168 {
5169 last = 0, note = 0;
5170
5171 /* Scan from the end until we reach the first epilogue insn.
5172 We apparently can't depend on basic_block_{head,end} after
5173 reorg has run. */
5174 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
5175 {
5176 if (NOTE_P (insn))
5177 {
5178 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
5179 note = insn;
5180 }
5181 else if (contains (insn, &epilogue))
5182 {
5183 last = insn;
5184 if (--len == 0)
5185 break;
5186 }
5187 }
5188
5189 if (last)
5190 {
5191 /* Find the epilogue-begin note if we haven't already, and
5192 move it to just before the first epilogue insn. */
5193 if (note == 0)
5194 {
5195 for (note = insn; (note = PREV_INSN (note));)
5196 if (NOTE_P (note)
5197 && NOTE_KIND (note) == NOTE_INSN_EPILOGUE_BEG)
5198 break;
5199 }
5200
5201 if (PREV_INSN (last) != note)
5202 reorder_insns (note, note, PREV_INSN (last));
5203 }
5204 }
5205 #endif /* HAVE_prologue or HAVE_epilogue */
5206 }
5207
5208 /* Returns the name of the current function. */
5209 const char *
5210 current_function_name (void)
5211 {
5212 return lang_hooks.decl_printable_name (cfun->decl, 2);
5213 }
5214
5215 /* Returns the raw (mangled) name of the current function. */
5216 const char *
5217 current_function_assembler_name (void)
5218 {
5219 return IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (cfun->decl));
5220 }
5221 \f
5222
5223 static unsigned int
5224 rest_of_handle_check_leaf_regs (void)
5225 {
5226 #ifdef LEAF_REGISTERS
5227 current_function_uses_only_leaf_regs
5228 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5229 #endif
5230 return 0;
5231 }
5232
5233 /* Insert a TYPE into the used types hash table of CFUN. */
5234 static void
5235 used_types_insert_helper (tree type, struct function *func)
5236 {
5237 if (type != NULL && func != NULL)
5238 {
5239 void **slot;
5240
5241 if (func->used_types_hash == NULL)
5242 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
5243 htab_eq_pointer, NULL);
5244 slot = htab_find_slot (func->used_types_hash, type, INSERT);
5245 if (*slot == NULL)
5246 *slot = type;
5247 }
5248 }
5249
5250 /* Given a type, insert it into the used hash table in cfun. */
5251 void
5252 used_types_insert (tree t)
5253 {
5254 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
5255 t = TREE_TYPE (t);
5256 t = TYPE_MAIN_VARIANT (t);
5257 if (debug_info_level > DINFO_LEVEL_NONE)
5258 used_types_insert_helper (t, cfun);
5259 }
5260
5261 struct rtl_opt_pass pass_leaf_regs =
5262 {
5263 {
5264 RTL_PASS,
5265 NULL, /* name */
5266 NULL, /* gate */
5267 rest_of_handle_check_leaf_regs, /* execute */
5268 NULL, /* sub */
5269 NULL, /* next */
5270 0, /* static_pass_number */
5271 0, /* tv_id */
5272 0, /* properties_required */
5273 0, /* properties_provided */
5274 0, /* properties_destroyed */
5275 0, /* todo_flags_start */
5276 0 /* todo_flags_finish */
5277 }
5278 };
5279
5280 static unsigned int
5281 rest_of_handle_thread_prologue_and_epilogue (void)
5282 {
5283 if (optimize)
5284 cleanup_cfg (CLEANUP_EXPENSIVE);
5285 /* On some machines, the prologue and epilogue code, or parts thereof,
5286 can be represented as RTL. Doing so lets us schedule insns between
5287 it and the rest of the code and also allows delayed branch
5288 scheduling to operate in the epilogue. */
5289
5290 thread_prologue_and_epilogue_insns ();
5291 return 0;
5292 }
5293
5294 struct rtl_opt_pass pass_thread_prologue_and_epilogue =
5295 {
5296 {
5297 RTL_PASS,
5298 "pro_and_epilogue", /* name */
5299 NULL, /* gate */
5300 rest_of_handle_thread_prologue_and_epilogue, /* execute */
5301 NULL, /* sub */
5302 NULL, /* next */
5303 0, /* static_pass_number */
5304 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
5305 0, /* properties_required */
5306 0, /* properties_provided */
5307 0, /* properties_destroyed */
5308 TODO_verify_flow, /* todo_flags_start */
5309 TODO_dump_func |
5310 TODO_df_verify |
5311 TODO_df_finish | TODO_verify_rtl_sharing |
5312 TODO_ggc_collect /* todo_flags_finish */
5313 }
5314 };
5315 \f
5316
5317 /* This mini-pass fixes fall-out from SSA in asm statements that have
5318 in-out constraints. Say you start with
5319
5320 orig = inout;
5321 asm ("": "+mr" (inout));
5322 use (orig);
5323
5324 which is transformed very early to use explicit output and match operands:
5325
5326 orig = inout;
5327 asm ("": "=mr" (inout) : "0" (inout));
5328 use (orig);
5329
5330 Or, after SSA and copyprop,
5331
5332 asm ("": "=mr" (inout_2) : "0" (inout_1));
5333 use (inout_1);
5334
5335 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
5336 they represent two separate values, so they will get different pseudo
5337 registers during expansion. Then, since the two operands need to match
5338 per the constraints, but use different pseudo registers, reload can
5339 only register a reload for these operands. But reloads can only be
5340 satisfied by hardregs, not by memory, so we need a register for this
5341 reload, just because we are presented with non-matching operands.
5342 So, even though we allow memory for this operand, no memory can be
5343 used for it, just because the two operands don't match. This can
5344 cause reload failures on register-starved targets.
5345
5346 So it's a symptom of reload not being able to use memory for reloads
5347 or, alternatively it's also a symptom of both operands not coming into
5348 reload as matching (in which case the pseudo could go to memory just
5349 fine, as the alternative allows it, and no reload would be necessary).
5350 We fix the latter problem here, by transforming
5351
5352 asm ("": "=mr" (inout_2) : "0" (inout_1));
5353
5354 back to
5355
5356 inout_2 = inout_1;
5357 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
5358
5359 static void
5360 match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs)
5361 {
5362 int i;
5363 bool changed = false;
5364 rtx op = SET_SRC (p_sets[0]);
5365 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
5366 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
5367 bool *output_matched = XALLOCAVEC (bool, noutputs);
5368
5369 memset (output_matched, 0, noutputs * sizeof (bool));
5370 for (i = 0; i < ninputs; i++)
5371 {
5372 rtx input, output, insns;
5373 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
5374 char *end;
5375 int match, j;
5376
5377 match = strtoul (constraint, &end, 10);
5378 if (end == constraint)
5379 continue;
5380
5381 gcc_assert (match < noutputs);
5382 output = SET_DEST (p_sets[match]);
5383 input = RTVEC_ELT (inputs, i);
5384 /* Only do the transformation for pseudos. */
5385 if (! REG_P (output)
5386 || rtx_equal_p (output, input)
5387 || (GET_MODE (input) != VOIDmode
5388 && GET_MODE (input) != GET_MODE (output)))
5389 continue;
5390
5391 /* We can't do anything if the output is also used as input,
5392 as we're going to overwrite it. */
5393 for (j = 0; j < ninputs; j++)
5394 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
5395 break;
5396 if (j != ninputs)
5397 continue;
5398
5399 /* Avoid changing the same input several times. For
5400 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
5401 only change in once (to out1), rather than changing it
5402 first to out1 and afterwards to out2. */
5403 if (i > 0)
5404 {
5405 for (j = 0; j < noutputs; j++)
5406 if (output_matched[j] && input == SET_DEST (p_sets[j]))
5407 break;
5408 if (j != noutputs)
5409 continue;
5410 }
5411 output_matched[match] = true;
5412
5413 start_sequence ();
5414 emit_move_insn (output, input);
5415 insns = get_insns ();
5416 end_sequence ();
5417 emit_insn_before (insns, insn);
5418
5419 /* Now replace all mentions of the input with output. We can't
5420 just replace the occurrence in inputs[i], as the register might
5421 also be used in some other input (or even in an address of an
5422 output), which would mean possibly increasing the number of
5423 inputs by one (namely 'output' in addition), which might pose
5424 a too complicated problem for reload to solve. E.g. this situation:
5425
5426 asm ("" : "=r" (output), "=m" (input) : "0" (input))
5427
5428 Here 'input' is used in two occurrences as input (once for the
5429 input operand, once for the address in the second output operand).
5430 If we would replace only the occurrence of the input operand (to
5431 make the matching) we would be left with this:
5432
5433 output = input
5434 asm ("" : "=r" (output), "=m" (input) : "0" (output))
5435
5436 Now we suddenly have two different input values (containing the same
5437 value, but different pseudos) where we formerly had only one.
5438 With more complicated asms this might lead to reload failures
5439 which wouldn't have happen without this pass. So, iterate over
5440 all operands and replace all occurrences of the register used. */
5441 for (j = 0; j < noutputs; j++)
5442 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
5443 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
5444 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
5445 input, output);
5446 for (j = 0; j < ninputs; j++)
5447 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
5448 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
5449 input, output);
5450
5451 changed = true;
5452 }
5453
5454 if (changed)
5455 df_insn_rescan (insn);
5456 }
5457
5458 static unsigned
5459 rest_of_match_asm_constraints (void)
5460 {
5461 basic_block bb;
5462 rtx insn, pat, *p_sets;
5463 int noutputs;
5464
5465 if (!crtl->has_asm_statement)
5466 return 0;
5467
5468 df_set_flags (DF_DEFER_INSN_RESCAN);
5469 FOR_EACH_BB (bb)
5470 {
5471 FOR_BB_INSNS (bb, insn)
5472 {
5473 if (!INSN_P (insn))
5474 continue;
5475
5476 pat = PATTERN (insn);
5477 if (GET_CODE (pat) == PARALLEL)
5478 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
5479 else if (GET_CODE (pat) == SET)
5480 p_sets = &PATTERN (insn), noutputs = 1;
5481 else
5482 continue;
5483
5484 if (GET_CODE (*p_sets) == SET
5485 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
5486 match_asm_constraints_1 (insn, p_sets, noutputs);
5487 }
5488 }
5489
5490 return TODO_df_finish;
5491 }
5492
5493 struct rtl_opt_pass pass_match_asm_constraints =
5494 {
5495 {
5496 RTL_PASS,
5497 "asmcons", /* name */
5498 NULL, /* gate */
5499 rest_of_match_asm_constraints, /* execute */
5500 NULL, /* sub */
5501 NULL, /* next */
5502 0, /* static_pass_number */
5503 0, /* tv_id */
5504 0, /* properties_required */
5505 0, /* properties_provided */
5506 0, /* properties_destroyed */
5507 0, /* todo_flags_start */
5508 TODO_dump_func /* todo_flags_finish */
5509 }
5510 };
5511
5512
5513 #include "gt-function.h"