function.c (free_after_compilation): Free epilogue_delay_list.
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "regs.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "output.h"
53 #include "basic-block.h"
54 #include "toplev.h"
55 #include "hashtab.h"
56 #include "ggc.h"
57 #include "tm_p.h"
58 #include "integrate.h"
59 #include "langhooks.h"
60 #include "target.h"
61 #include "cfglayout.h"
62 #include "tree-gimple.h"
63 #include "tree-pass.h"
64 #include "predict.h"
65 #include "df.h"
66 #include "timevar.h"
67 #include "vecprim.h"
68
69 /* So we can assign to cfun in this file. */
70 #undef cfun
71
72 #ifndef LOCAL_ALIGNMENT
73 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
74 #endif
75
76 #ifndef STACK_ALIGNMENT_NEEDED
77 #define STACK_ALIGNMENT_NEEDED 1
78 #endif
79
80 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
81
82 /* Some systems use __main in a way incompatible with its use in gcc, in these
83 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
84 give the same symbol without quotes for an alternative entry point. You
85 must define both, or neither. */
86 #ifndef NAME__MAIN
87 #define NAME__MAIN "__main"
88 #endif
89
90 /* Round a value to the lowest integer less than it that is a multiple of
91 the required alignment. Avoid using division in case the value is
92 negative. Assume the alignment is a power of two. */
93 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
94
95 /* Similar, but round to the next highest integer that meets the
96 alignment. */
97 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
98
99 /* Nonzero if function being compiled doesn't contain any calls
100 (ignoring the prologue and epilogue). This is set prior to
101 local register allocation and is valid for the remaining
102 compiler passes. */
103 int current_function_is_leaf;
104
105 /* Nonzero if function being compiled doesn't modify the stack pointer
106 (ignoring the prologue and epilogue). This is only valid after
107 pass_stack_ptr_mod has run. */
108 int current_function_sp_is_unchanging;
109
110 /* Nonzero if the function being compiled is a leaf function which only
111 uses leaf registers. This is valid after reload (specifically after
112 sched2) and is useful only if the port defines LEAF_REGISTERS. */
113 int current_function_uses_only_leaf_regs;
114
115 /* Nonzero once virtual register instantiation has been done.
116 assign_stack_local uses frame_pointer_rtx when this is nonzero.
117 calls.c:emit_library_call_value_1 uses it to set up
118 post-instantiation libcalls. */
119 int virtuals_instantiated;
120
121 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
122 static GTY(()) int funcdef_no;
123
124 /* These variables hold pointers to functions to create and destroy
125 target specific, per-function data structures. */
126 struct machine_function * (*init_machine_status) (void);
127
128 /* The currently compiled function. */
129 struct function *cfun = 0;
130
131 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
132 static VEC(int,heap) *prologue;
133 static VEC(int,heap) *epilogue;
134
135 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
136 in this function. */
137 static VEC(int,heap) *sibcall_epilogue;
138 \f
139 /* In order to evaluate some expressions, such as function calls returning
140 structures in memory, we need to temporarily allocate stack locations.
141 We record each allocated temporary in the following structure.
142
143 Associated with each temporary slot is a nesting level. When we pop up
144 one level, all temporaries associated with the previous level are freed.
145 Normally, all temporaries are freed after the execution of the statement
146 in which they were created. However, if we are inside a ({...}) grouping,
147 the result may be in a temporary and hence must be preserved. If the
148 result could be in a temporary, we preserve it if we can determine which
149 one it is in. If we cannot determine which temporary may contain the
150 result, all temporaries are preserved. A temporary is preserved by
151 pretending it was allocated at the previous nesting level.
152
153 Automatic variables are also assigned temporary slots, at the nesting
154 level where they are defined. They are marked a "kept" so that
155 free_temp_slots will not free them. */
156
157 struct temp_slot GTY(())
158 {
159 /* Points to next temporary slot. */
160 struct temp_slot *next;
161 /* Points to previous temporary slot. */
162 struct temp_slot *prev;
163
164 /* The rtx to used to reference the slot. */
165 rtx slot;
166 /* The rtx used to represent the address if not the address of the
167 slot above. May be an EXPR_LIST if multiple addresses exist. */
168 rtx address;
169 /* The alignment (in bits) of the slot. */
170 unsigned int align;
171 /* The size, in units, of the slot. */
172 HOST_WIDE_INT size;
173 /* The type of the object in the slot, or zero if it doesn't correspond
174 to a type. We use this to determine whether a slot can be reused.
175 It can be reused if objects of the type of the new slot will always
176 conflict with objects of the type of the old slot. */
177 tree type;
178 /* Nonzero if this temporary is currently in use. */
179 char in_use;
180 /* Nonzero if this temporary has its address taken. */
181 char addr_taken;
182 /* Nesting level at which this slot is being used. */
183 int level;
184 /* Nonzero if this should survive a call to free_temp_slots. */
185 int keep;
186 /* The offset of the slot from the frame_pointer, including extra space
187 for alignment. This info is for combine_temp_slots. */
188 HOST_WIDE_INT base_offset;
189 /* The size of the slot, including extra space for alignment. This
190 info is for combine_temp_slots. */
191 HOST_WIDE_INT full_size;
192 };
193 \f
194 /* Forward declarations. */
195
196 static struct temp_slot *find_temp_slot_from_address (rtx);
197 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
198 static void pad_below (struct args_size *, enum machine_mode, tree);
199 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
200 static int all_blocks (tree, tree *);
201 static tree *get_block_vector (tree, int *);
202 extern tree debug_find_var_in_block_tree (tree, tree);
203 /* We always define `record_insns' even if it's not used so that we
204 can always export `prologue_epilogue_contains'. */
205 static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED;
206 static int contains (const_rtx, VEC(int,heap) **);
207 #ifdef HAVE_return
208 static void emit_return_into_block (basic_block);
209 #endif
210 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
211 static rtx keep_stack_depressed (rtx);
212 #endif
213 static void prepare_function_start (void);
214 static void do_clobber_return_reg (rtx, void *);
215 static void do_use_return_reg (rtx, void *);
216 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
217 \f
218 /* Pointer to chain of `struct function' for containing functions. */
219 struct function *outer_function_chain;
220
221 /* Given a function decl for a containing function,
222 return the `struct function' for it. */
223
224 struct function *
225 find_function_data (tree decl)
226 {
227 struct function *p;
228
229 for (p = outer_function_chain; p; p = p->outer)
230 if (p->decl == decl)
231 return p;
232
233 gcc_unreachable ();
234 }
235
236 /* Save the current context for compilation of a nested function.
237 This is called from language-specific code. The caller should use
238 the enter_nested langhook to save any language-specific state,
239 since this function knows only about language-independent
240 variables. */
241
242 void
243 push_function_context_to (tree context ATTRIBUTE_UNUSED)
244 {
245 struct function *p;
246
247 if (cfun == 0)
248 allocate_struct_function (NULL, false);
249 p = cfun;
250
251 p->outer = outer_function_chain;
252 outer_function_chain = p;
253
254 lang_hooks.function.enter_nested (p);
255
256 set_cfun (NULL);
257 }
258
259 void
260 push_function_context (void)
261 {
262 push_function_context_to (current_function_decl);
263 }
264
265 /* Restore the last saved context, at the end of a nested function.
266 This function is called from language-specific code. */
267
268 void
269 pop_function_context_from (tree context ATTRIBUTE_UNUSED)
270 {
271 struct function *p = outer_function_chain;
272
273 set_cfun (p);
274 outer_function_chain = p->outer;
275
276 current_function_decl = p->decl;
277
278 lang_hooks.function.leave_nested (p);
279
280 /* Reset variables that have known state during rtx generation. */
281 virtuals_instantiated = 0;
282 generating_concat_p = 1;
283 }
284
285 void
286 pop_function_context (void)
287 {
288 pop_function_context_from (current_function_decl);
289 }
290
291 /* Clear out all parts of the state in F that can safely be discarded
292 after the function has been parsed, but not compiled, to let
293 garbage collection reclaim the memory. */
294
295 void
296 free_after_parsing (struct function *f)
297 {
298 /* f->expr->forced_labels is used by code generation. */
299 /* f->emit->regno_reg_rtx is used by code generation. */
300 /* f->varasm is used by code generation. */
301 /* f->eh->eh_return_stub_label is used by code generation. */
302
303 lang_hooks.function.final (f);
304 }
305
306 /* Clear out all parts of the state in F that can safely be discarded
307 after the function has been compiled, to let garbage collection
308 reclaim the memory. */
309
310 void
311 free_after_compilation (struct function *f)
312 {
313 VEC_free (int, heap, prologue);
314 VEC_free (int, heap, epilogue);
315 VEC_free (int, heap, sibcall_epilogue);
316 if (rtl.emit.regno_pointer_align)
317 free (rtl.emit.regno_pointer_align);
318
319 memset (&rtl, 0, sizeof (rtl));
320 f->eh = NULL;
321 f->machine = NULL;
322 f->cfg = NULL;
323
324 f->arg_offset_rtx = NULL;
325 f->return_rtx = NULL;
326 f->internal_arg_pointer = NULL;
327 f->epilogue_delay_list = NULL;
328 }
329 \f
330 /* Return size needed for stack frame based on slots so far allocated.
331 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
332 the caller may have to do that. */
333
334 HOST_WIDE_INT
335 get_frame_size (void)
336 {
337 if (FRAME_GROWS_DOWNWARD)
338 return -frame_offset;
339 else
340 return frame_offset;
341 }
342
343 /* Issue an error message and return TRUE if frame OFFSET overflows in
344 the signed target pointer arithmetics for function FUNC. Otherwise
345 return FALSE. */
346
347 bool
348 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
349 {
350 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
351
352 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
353 /* Leave room for the fixed part of the frame. */
354 - 64 * UNITS_PER_WORD)
355 {
356 error ("%Jtotal size of local objects too large", func);
357 return TRUE;
358 }
359
360 return FALSE;
361 }
362
363 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
364 with machine mode MODE.
365
366 ALIGN controls the amount of alignment for the address of the slot:
367 0 means according to MODE,
368 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
369 -2 means use BITS_PER_UNIT,
370 positive specifies alignment boundary in bits.
371
372 We do not round to stack_boundary here. */
373
374 rtx
375 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
376 {
377 rtx x, addr;
378 int bigend_correction = 0;
379 unsigned int alignment;
380 int frame_off, frame_alignment, frame_phase;
381
382 if (align == 0)
383 {
384 tree type;
385
386 if (mode == BLKmode)
387 alignment = BIGGEST_ALIGNMENT;
388 else
389 alignment = GET_MODE_ALIGNMENT (mode);
390
391 /* Allow the target to (possibly) increase the alignment of this
392 stack slot. */
393 type = lang_hooks.types.type_for_mode (mode, 0);
394 if (type)
395 alignment = LOCAL_ALIGNMENT (type, alignment);
396
397 alignment /= BITS_PER_UNIT;
398 }
399 else if (align == -1)
400 {
401 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
402 size = CEIL_ROUND (size, alignment);
403 }
404 else if (align == -2)
405 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
406 else
407 alignment = align / BITS_PER_UNIT;
408
409 if (FRAME_GROWS_DOWNWARD)
410 frame_offset -= size;
411
412 /* Ignore alignment we can't do with expected alignment of the boundary. */
413 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
414 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
415
416 if (cfun->stack_alignment_needed < alignment * BITS_PER_UNIT)
417 cfun->stack_alignment_needed = alignment * BITS_PER_UNIT;
418
419 /* Calculate how many bytes the start of local variables is off from
420 stack alignment. */
421 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
422 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
423 frame_phase = frame_off ? frame_alignment - frame_off : 0;
424
425 /* Round the frame offset to the specified alignment. The default is
426 to always honor requests to align the stack but a port may choose to
427 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
428 if (STACK_ALIGNMENT_NEEDED
429 || mode != BLKmode
430 || size != 0)
431 {
432 /* We must be careful here, since FRAME_OFFSET might be negative and
433 division with a negative dividend isn't as well defined as we might
434 like. So we instead assume that ALIGNMENT is a power of two and
435 use logical operations which are unambiguous. */
436 if (FRAME_GROWS_DOWNWARD)
437 frame_offset
438 = (FLOOR_ROUND (frame_offset - frame_phase,
439 (unsigned HOST_WIDE_INT) alignment)
440 + frame_phase);
441 else
442 frame_offset
443 = (CEIL_ROUND (frame_offset - frame_phase,
444 (unsigned HOST_WIDE_INT) alignment)
445 + frame_phase);
446 }
447
448 /* On a big-endian machine, if we are allocating more space than we will use,
449 use the least significant bytes of those that are allocated. */
450 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
451 bigend_correction = size - GET_MODE_SIZE (mode);
452
453 /* If we have already instantiated virtual registers, return the actual
454 address relative to the frame pointer. */
455 if (virtuals_instantiated)
456 addr = plus_constant (frame_pointer_rtx,
457 trunc_int_for_mode
458 (frame_offset + bigend_correction
459 + STARTING_FRAME_OFFSET, Pmode));
460 else
461 addr = plus_constant (virtual_stack_vars_rtx,
462 trunc_int_for_mode
463 (frame_offset + bigend_correction,
464 Pmode));
465
466 if (!FRAME_GROWS_DOWNWARD)
467 frame_offset += size;
468
469 x = gen_rtx_MEM (mode, addr);
470 MEM_NOTRAP_P (x) = 1;
471
472 stack_slot_list
473 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
474
475 if (frame_offset_overflow (frame_offset, current_function_decl))
476 frame_offset = 0;
477
478 return x;
479 }
480 \f
481 /* Removes temporary slot TEMP from LIST. */
482
483 static void
484 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
485 {
486 if (temp->next)
487 temp->next->prev = temp->prev;
488 if (temp->prev)
489 temp->prev->next = temp->next;
490 else
491 *list = temp->next;
492
493 temp->prev = temp->next = NULL;
494 }
495
496 /* Inserts temporary slot TEMP to LIST. */
497
498 static void
499 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
500 {
501 temp->next = *list;
502 if (*list)
503 (*list)->prev = temp;
504 temp->prev = NULL;
505 *list = temp;
506 }
507
508 /* Returns the list of used temp slots at LEVEL. */
509
510 static struct temp_slot **
511 temp_slots_at_level (int level)
512 {
513 if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
514 VEC_safe_grow_cleared (temp_slot_p, gc, used_temp_slots, level + 1);
515
516 return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
517 }
518
519 /* Returns the maximal temporary slot level. */
520
521 static int
522 max_slot_level (void)
523 {
524 if (!used_temp_slots)
525 return -1;
526
527 return VEC_length (temp_slot_p, used_temp_slots) - 1;
528 }
529
530 /* Moves temporary slot TEMP to LEVEL. */
531
532 static void
533 move_slot_to_level (struct temp_slot *temp, int level)
534 {
535 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
536 insert_slot_to_list (temp, temp_slots_at_level (level));
537 temp->level = level;
538 }
539
540 /* Make temporary slot TEMP available. */
541
542 static void
543 make_slot_available (struct temp_slot *temp)
544 {
545 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
546 insert_slot_to_list (temp, &avail_temp_slots);
547 temp->in_use = 0;
548 temp->level = -1;
549 }
550 \f
551 /* Allocate a temporary stack slot and record it for possible later
552 reuse.
553
554 MODE is the machine mode to be given to the returned rtx.
555
556 SIZE is the size in units of the space required. We do no rounding here
557 since assign_stack_local will do any required rounding.
558
559 KEEP is 1 if this slot is to be retained after a call to
560 free_temp_slots. Automatic variables for a block are allocated
561 with this flag. KEEP values of 2 or 3 were needed respectively
562 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
563 or for SAVE_EXPRs, but they are now unused.
564
565 TYPE is the type that will be used for the stack slot. */
566
567 rtx
568 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
569 int keep, tree type)
570 {
571 unsigned int align;
572 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
573 rtx slot;
574
575 /* If SIZE is -1 it means that somebody tried to allocate a temporary
576 of a variable size. */
577 gcc_assert (size != -1);
578
579 /* These are now unused. */
580 gcc_assert (keep <= 1);
581
582 if (mode == BLKmode)
583 align = BIGGEST_ALIGNMENT;
584 else
585 align = GET_MODE_ALIGNMENT (mode);
586
587 if (! type)
588 type = lang_hooks.types.type_for_mode (mode, 0);
589
590 if (type)
591 align = LOCAL_ALIGNMENT (type, align);
592
593 /* Try to find an available, already-allocated temporary of the proper
594 mode which meets the size and alignment requirements. Choose the
595 smallest one with the closest alignment.
596
597 If assign_stack_temp is called outside of the tree->rtl expansion,
598 we cannot reuse the stack slots (that may still refer to
599 VIRTUAL_STACK_VARS_REGNUM). */
600 if (!virtuals_instantiated)
601 {
602 for (p = avail_temp_slots; p; p = p->next)
603 {
604 if (p->align >= align && p->size >= size
605 && GET_MODE (p->slot) == mode
606 && objects_must_conflict_p (p->type, type)
607 && (best_p == 0 || best_p->size > p->size
608 || (best_p->size == p->size && best_p->align > p->align)))
609 {
610 if (p->align == align && p->size == size)
611 {
612 selected = p;
613 cut_slot_from_list (selected, &avail_temp_slots);
614 best_p = 0;
615 break;
616 }
617 best_p = p;
618 }
619 }
620 }
621
622 /* Make our best, if any, the one to use. */
623 if (best_p)
624 {
625 selected = best_p;
626 cut_slot_from_list (selected, &avail_temp_slots);
627
628 /* If there are enough aligned bytes left over, make them into a new
629 temp_slot so that the extra bytes don't get wasted. Do this only
630 for BLKmode slots, so that we can be sure of the alignment. */
631 if (GET_MODE (best_p->slot) == BLKmode)
632 {
633 int alignment = best_p->align / BITS_PER_UNIT;
634 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
635
636 if (best_p->size - rounded_size >= alignment)
637 {
638 p = ggc_alloc (sizeof (struct temp_slot));
639 p->in_use = p->addr_taken = 0;
640 p->size = best_p->size - rounded_size;
641 p->base_offset = best_p->base_offset + rounded_size;
642 p->full_size = best_p->full_size - rounded_size;
643 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
644 p->align = best_p->align;
645 p->address = 0;
646 p->type = best_p->type;
647 insert_slot_to_list (p, &avail_temp_slots);
648
649 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
650 stack_slot_list);
651
652 best_p->size = rounded_size;
653 best_p->full_size = rounded_size;
654 }
655 }
656 }
657
658 /* If we still didn't find one, make a new temporary. */
659 if (selected == 0)
660 {
661 HOST_WIDE_INT frame_offset_old = frame_offset;
662
663 p = ggc_alloc (sizeof (struct temp_slot));
664
665 /* We are passing an explicit alignment request to assign_stack_local.
666 One side effect of that is assign_stack_local will not round SIZE
667 to ensure the frame offset remains suitably aligned.
668
669 So for requests which depended on the rounding of SIZE, we go ahead
670 and round it now. We also make sure ALIGNMENT is at least
671 BIGGEST_ALIGNMENT. */
672 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
673 p->slot = assign_stack_local (mode,
674 (mode == BLKmode
675 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
676 : size),
677 align);
678
679 p->align = align;
680
681 /* The following slot size computation is necessary because we don't
682 know the actual size of the temporary slot until assign_stack_local
683 has performed all the frame alignment and size rounding for the
684 requested temporary. Note that extra space added for alignment
685 can be either above or below this stack slot depending on which
686 way the frame grows. We include the extra space if and only if it
687 is above this slot. */
688 if (FRAME_GROWS_DOWNWARD)
689 p->size = frame_offset_old - frame_offset;
690 else
691 p->size = size;
692
693 /* Now define the fields used by combine_temp_slots. */
694 if (FRAME_GROWS_DOWNWARD)
695 {
696 p->base_offset = frame_offset;
697 p->full_size = frame_offset_old - frame_offset;
698 }
699 else
700 {
701 p->base_offset = frame_offset_old;
702 p->full_size = frame_offset - frame_offset_old;
703 }
704 p->address = 0;
705
706 selected = p;
707 }
708
709 p = selected;
710 p->in_use = 1;
711 p->addr_taken = 0;
712 p->type = type;
713 p->level = temp_slot_level;
714 p->keep = keep;
715
716 pp = temp_slots_at_level (p->level);
717 insert_slot_to_list (p, pp);
718
719 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
720 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
721 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
722
723 /* If we know the alias set for the memory that will be used, use
724 it. If there's no TYPE, then we don't know anything about the
725 alias set for the memory. */
726 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
727 set_mem_align (slot, align);
728
729 /* If a type is specified, set the relevant flags. */
730 if (type != 0)
731 {
732 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
733 MEM_SET_IN_STRUCT_P (slot, (AGGREGATE_TYPE_P (type)
734 || TREE_CODE (type) == COMPLEX_TYPE));
735 }
736 MEM_NOTRAP_P (slot) = 1;
737
738 return slot;
739 }
740
741 /* Allocate a temporary stack slot and record it for possible later
742 reuse. First three arguments are same as in preceding function. */
743
744 rtx
745 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
746 {
747 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
748 }
749 \f
750 /* Assign a temporary.
751 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
752 and so that should be used in error messages. In either case, we
753 allocate of the given type.
754 KEEP is as for assign_stack_temp.
755 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
756 it is 0 if a register is OK.
757 DONT_PROMOTE is 1 if we should not promote values in register
758 to wider modes. */
759
760 rtx
761 assign_temp (tree type_or_decl, int keep, int memory_required,
762 int dont_promote ATTRIBUTE_UNUSED)
763 {
764 tree type, decl;
765 enum machine_mode mode;
766 #ifdef PROMOTE_MODE
767 int unsignedp;
768 #endif
769
770 if (DECL_P (type_or_decl))
771 decl = type_or_decl, type = TREE_TYPE (decl);
772 else
773 decl = NULL, type = type_or_decl;
774
775 mode = TYPE_MODE (type);
776 #ifdef PROMOTE_MODE
777 unsignedp = TYPE_UNSIGNED (type);
778 #endif
779
780 if (mode == BLKmode || memory_required)
781 {
782 HOST_WIDE_INT size = int_size_in_bytes (type);
783 rtx tmp;
784
785 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
786 problems with allocating the stack space. */
787 if (size == 0)
788 size = 1;
789
790 /* Unfortunately, we don't yet know how to allocate variable-sized
791 temporaries. However, sometimes we can find a fixed upper limit on
792 the size, so try that instead. */
793 else if (size == -1)
794 size = max_int_size_in_bytes (type);
795
796 /* The size of the temporary may be too large to fit into an integer. */
797 /* ??? Not sure this should happen except for user silliness, so limit
798 this to things that aren't compiler-generated temporaries. The
799 rest of the time we'll die in assign_stack_temp_for_type. */
800 if (decl && size == -1
801 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
802 {
803 error ("size of variable %q+D is too large", decl);
804 size = 1;
805 }
806
807 tmp = assign_stack_temp_for_type (mode, size, keep, type);
808 return tmp;
809 }
810
811 #ifdef PROMOTE_MODE
812 if (! dont_promote)
813 mode = promote_mode (type, mode, &unsignedp, 0);
814 #endif
815
816 return gen_reg_rtx (mode);
817 }
818 \f
819 /* Combine temporary stack slots which are adjacent on the stack.
820
821 This allows for better use of already allocated stack space. This is only
822 done for BLKmode slots because we can be sure that we won't have alignment
823 problems in this case. */
824
825 static void
826 combine_temp_slots (void)
827 {
828 struct temp_slot *p, *q, *next, *next_q;
829 int num_slots;
830
831 /* We can't combine slots, because the information about which slot
832 is in which alias set will be lost. */
833 if (flag_strict_aliasing)
834 return;
835
836 /* If there are a lot of temp slots, don't do anything unless
837 high levels of optimization. */
838 if (! flag_expensive_optimizations)
839 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
840 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
841 return;
842
843 for (p = avail_temp_slots; p; p = next)
844 {
845 int delete_p = 0;
846
847 next = p->next;
848
849 if (GET_MODE (p->slot) != BLKmode)
850 continue;
851
852 for (q = p->next; q; q = next_q)
853 {
854 int delete_q = 0;
855
856 next_q = q->next;
857
858 if (GET_MODE (q->slot) != BLKmode)
859 continue;
860
861 if (p->base_offset + p->full_size == q->base_offset)
862 {
863 /* Q comes after P; combine Q into P. */
864 p->size += q->size;
865 p->full_size += q->full_size;
866 delete_q = 1;
867 }
868 else if (q->base_offset + q->full_size == p->base_offset)
869 {
870 /* P comes after Q; combine P into Q. */
871 q->size += p->size;
872 q->full_size += p->full_size;
873 delete_p = 1;
874 break;
875 }
876 if (delete_q)
877 cut_slot_from_list (q, &avail_temp_slots);
878 }
879
880 /* Either delete P or advance past it. */
881 if (delete_p)
882 cut_slot_from_list (p, &avail_temp_slots);
883 }
884 }
885 \f
886 /* Find the temp slot corresponding to the object at address X. */
887
888 static struct temp_slot *
889 find_temp_slot_from_address (rtx x)
890 {
891 struct temp_slot *p;
892 rtx next;
893 int i;
894
895 for (i = max_slot_level (); i >= 0; i--)
896 for (p = *temp_slots_at_level (i); p; p = p->next)
897 {
898 if (XEXP (p->slot, 0) == x
899 || p->address == x
900 || (GET_CODE (x) == PLUS
901 && XEXP (x, 0) == virtual_stack_vars_rtx
902 && GET_CODE (XEXP (x, 1)) == CONST_INT
903 && INTVAL (XEXP (x, 1)) >= p->base_offset
904 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
905 return p;
906
907 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
908 for (next = p->address; next; next = XEXP (next, 1))
909 if (XEXP (next, 0) == x)
910 return p;
911 }
912
913 /* If we have a sum involving a register, see if it points to a temp
914 slot. */
915 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
916 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
917 return p;
918 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
919 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
920 return p;
921
922 return 0;
923 }
924
925 /* Indicate that NEW is an alternate way of referring to the temp slot
926 that previously was known by OLD. */
927
928 void
929 update_temp_slot_address (rtx old, rtx new)
930 {
931 struct temp_slot *p;
932
933 if (rtx_equal_p (old, new))
934 return;
935
936 p = find_temp_slot_from_address (old);
937
938 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
939 is a register, see if one operand of the PLUS is a temporary
940 location. If so, NEW points into it. Otherwise, if both OLD and
941 NEW are a PLUS and if there is a register in common between them.
942 If so, try a recursive call on those values. */
943 if (p == 0)
944 {
945 if (GET_CODE (old) != PLUS)
946 return;
947
948 if (REG_P (new))
949 {
950 update_temp_slot_address (XEXP (old, 0), new);
951 update_temp_slot_address (XEXP (old, 1), new);
952 return;
953 }
954 else if (GET_CODE (new) != PLUS)
955 return;
956
957 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
958 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
959 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
960 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
961 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
962 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
963 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
964 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
965
966 return;
967 }
968
969 /* Otherwise add an alias for the temp's address. */
970 else if (p->address == 0)
971 p->address = new;
972 else
973 {
974 if (GET_CODE (p->address) != EXPR_LIST)
975 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
976
977 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
978 }
979 }
980
981 /* If X could be a reference to a temporary slot, mark the fact that its
982 address was taken. */
983
984 void
985 mark_temp_addr_taken (rtx x)
986 {
987 struct temp_slot *p;
988
989 if (x == 0)
990 return;
991
992 /* If X is not in memory or is at a constant address, it cannot be in
993 a temporary slot. */
994 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
995 return;
996
997 p = find_temp_slot_from_address (XEXP (x, 0));
998 if (p != 0)
999 p->addr_taken = 1;
1000 }
1001
1002 /* If X could be a reference to a temporary slot, mark that slot as
1003 belonging to the to one level higher than the current level. If X
1004 matched one of our slots, just mark that one. Otherwise, we can't
1005 easily predict which it is, so upgrade all of them. Kept slots
1006 need not be touched.
1007
1008 This is called when an ({...}) construct occurs and a statement
1009 returns a value in memory. */
1010
1011 void
1012 preserve_temp_slots (rtx x)
1013 {
1014 struct temp_slot *p = 0, *next;
1015
1016 /* If there is no result, we still might have some objects whose address
1017 were taken, so we need to make sure they stay around. */
1018 if (x == 0)
1019 {
1020 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1021 {
1022 next = p->next;
1023
1024 if (p->addr_taken)
1025 move_slot_to_level (p, temp_slot_level - 1);
1026 }
1027
1028 return;
1029 }
1030
1031 /* If X is a register that is being used as a pointer, see if we have
1032 a temporary slot we know it points to. To be consistent with
1033 the code below, we really should preserve all non-kept slots
1034 if we can't find a match, but that seems to be much too costly. */
1035 if (REG_P (x) && REG_POINTER (x))
1036 p = find_temp_slot_from_address (x);
1037
1038 /* If X is not in memory or is at a constant address, it cannot be in
1039 a temporary slot, but it can contain something whose address was
1040 taken. */
1041 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1042 {
1043 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1044 {
1045 next = p->next;
1046
1047 if (p->addr_taken)
1048 move_slot_to_level (p, temp_slot_level - 1);
1049 }
1050
1051 return;
1052 }
1053
1054 /* First see if we can find a match. */
1055 if (p == 0)
1056 p = find_temp_slot_from_address (XEXP (x, 0));
1057
1058 if (p != 0)
1059 {
1060 /* Move everything at our level whose address was taken to our new
1061 level in case we used its address. */
1062 struct temp_slot *q;
1063
1064 if (p->level == temp_slot_level)
1065 {
1066 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1067 {
1068 next = q->next;
1069
1070 if (p != q && q->addr_taken)
1071 move_slot_to_level (q, temp_slot_level - 1);
1072 }
1073
1074 move_slot_to_level (p, temp_slot_level - 1);
1075 p->addr_taken = 0;
1076 }
1077 return;
1078 }
1079
1080 /* Otherwise, preserve all non-kept slots at this level. */
1081 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1082 {
1083 next = p->next;
1084
1085 if (!p->keep)
1086 move_slot_to_level (p, temp_slot_level - 1);
1087 }
1088 }
1089
1090 /* Free all temporaries used so far. This is normally called at the
1091 end of generating code for a statement. */
1092
1093 void
1094 free_temp_slots (void)
1095 {
1096 struct temp_slot *p, *next;
1097
1098 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1099 {
1100 next = p->next;
1101
1102 if (!p->keep)
1103 make_slot_available (p);
1104 }
1105
1106 combine_temp_slots ();
1107 }
1108
1109 /* Push deeper into the nesting level for stack temporaries. */
1110
1111 void
1112 push_temp_slots (void)
1113 {
1114 temp_slot_level++;
1115 }
1116
1117 /* Pop a temporary nesting level. All slots in use in the current level
1118 are freed. */
1119
1120 void
1121 pop_temp_slots (void)
1122 {
1123 struct temp_slot *p, *next;
1124
1125 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1126 {
1127 next = p->next;
1128 make_slot_available (p);
1129 }
1130
1131 combine_temp_slots ();
1132
1133 temp_slot_level--;
1134 }
1135
1136 /* Initialize temporary slots. */
1137
1138 void
1139 init_temp_slots (void)
1140 {
1141 /* We have not allocated any temporaries yet. */
1142 avail_temp_slots = 0;
1143 used_temp_slots = 0;
1144 temp_slot_level = 0;
1145 }
1146 \f
1147 /* These routines are responsible for converting virtual register references
1148 to the actual hard register references once RTL generation is complete.
1149
1150 The following four variables are used for communication between the
1151 routines. They contain the offsets of the virtual registers from their
1152 respective hard registers. */
1153
1154 static int in_arg_offset;
1155 static int var_offset;
1156 static int dynamic_offset;
1157 static int out_arg_offset;
1158 static int cfa_offset;
1159
1160 /* In most machines, the stack pointer register is equivalent to the bottom
1161 of the stack. */
1162
1163 #ifndef STACK_POINTER_OFFSET
1164 #define STACK_POINTER_OFFSET 0
1165 #endif
1166
1167 /* If not defined, pick an appropriate default for the offset of dynamically
1168 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1169 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1170
1171 #ifndef STACK_DYNAMIC_OFFSET
1172
1173 /* The bottom of the stack points to the actual arguments. If
1174 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1175 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1176 stack space for register parameters is not pushed by the caller, but
1177 rather part of the fixed stack areas and hence not included in
1178 `current_function_outgoing_args_size'. Nevertheless, we must allow
1179 for it when allocating stack dynamic objects. */
1180
1181 #if defined(REG_PARM_STACK_SPACE)
1182 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1183 ((ACCUMULATE_OUTGOING_ARGS \
1184 ? (current_function_outgoing_args_size \
1185 + (OUTGOING_REG_PARM_STACK_SPACE ? 0 : REG_PARM_STACK_SPACE (FNDECL))) \
1186 : 0) + (STACK_POINTER_OFFSET))
1187 #else
1188 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1189 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
1190 + (STACK_POINTER_OFFSET))
1191 #endif
1192 #endif
1193
1194 \f
1195 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1196 is a virtual register, return the equivalent hard register and set the
1197 offset indirectly through the pointer. Otherwise, return 0. */
1198
1199 static rtx
1200 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1201 {
1202 rtx new;
1203 HOST_WIDE_INT offset;
1204
1205 if (x == virtual_incoming_args_rtx)
1206 new = arg_pointer_rtx, offset = in_arg_offset;
1207 else if (x == virtual_stack_vars_rtx)
1208 new = frame_pointer_rtx, offset = var_offset;
1209 else if (x == virtual_stack_dynamic_rtx)
1210 new = stack_pointer_rtx, offset = dynamic_offset;
1211 else if (x == virtual_outgoing_args_rtx)
1212 new = stack_pointer_rtx, offset = out_arg_offset;
1213 else if (x == virtual_cfa_rtx)
1214 {
1215 #ifdef FRAME_POINTER_CFA_OFFSET
1216 new = frame_pointer_rtx;
1217 #else
1218 new = arg_pointer_rtx;
1219 #endif
1220 offset = cfa_offset;
1221 }
1222 else
1223 return NULL_RTX;
1224
1225 *poffset = offset;
1226 return new;
1227 }
1228
1229 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1230 Instantiate any virtual registers present inside of *LOC. The expression
1231 is simplified, as much as possible, but is not to be considered "valid"
1232 in any sense implied by the target. If any change is made, set CHANGED
1233 to true. */
1234
1235 static int
1236 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1237 {
1238 HOST_WIDE_INT offset;
1239 bool *changed = (bool *) data;
1240 rtx x, new;
1241
1242 x = *loc;
1243 if (x == 0)
1244 return 0;
1245
1246 switch (GET_CODE (x))
1247 {
1248 case REG:
1249 new = instantiate_new_reg (x, &offset);
1250 if (new)
1251 {
1252 *loc = plus_constant (new, offset);
1253 if (changed)
1254 *changed = true;
1255 }
1256 return -1;
1257
1258 case PLUS:
1259 new = instantiate_new_reg (XEXP (x, 0), &offset);
1260 if (new)
1261 {
1262 new = plus_constant (new, offset);
1263 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new, XEXP (x, 1));
1264 if (changed)
1265 *changed = true;
1266 return -1;
1267 }
1268
1269 /* FIXME -- from old code */
1270 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1271 we can commute the PLUS and SUBREG because pointers into the
1272 frame are well-behaved. */
1273 break;
1274
1275 default:
1276 break;
1277 }
1278
1279 return 0;
1280 }
1281
1282 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1283 matches the predicate for insn CODE operand OPERAND. */
1284
1285 static int
1286 safe_insn_predicate (int code, int operand, rtx x)
1287 {
1288 const struct insn_operand_data *op_data;
1289
1290 if (code < 0)
1291 return true;
1292
1293 op_data = &insn_data[code].operand[operand];
1294 if (op_data->predicate == NULL)
1295 return true;
1296
1297 return op_data->predicate (x, op_data->mode);
1298 }
1299
1300 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1301 registers present inside of insn. The result will be a valid insn. */
1302
1303 static void
1304 instantiate_virtual_regs_in_insn (rtx insn)
1305 {
1306 HOST_WIDE_INT offset;
1307 int insn_code, i;
1308 bool any_change = false;
1309 rtx set, new, x, seq;
1310
1311 /* There are some special cases to be handled first. */
1312 set = single_set (insn);
1313 if (set)
1314 {
1315 /* We're allowed to assign to a virtual register. This is interpreted
1316 to mean that the underlying register gets assigned the inverse
1317 transformation. This is used, for example, in the handling of
1318 non-local gotos. */
1319 new = instantiate_new_reg (SET_DEST (set), &offset);
1320 if (new)
1321 {
1322 start_sequence ();
1323
1324 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1325 x = simplify_gen_binary (PLUS, GET_MODE (new), SET_SRC (set),
1326 GEN_INT (-offset));
1327 x = force_operand (x, new);
1328 if (x != new)
1329 emit_move_insn (new, x);
1330
1331 seq = get_insns ();
1332 end_sequence ();
1333
1334 emit_insn_before (seq, insn);
1335 delete_insn (insn);
1336 return;
1337 }
1338
1339 /* Handle a straight copy from a virtual register by generating a
1340 new add insn. The difference between this and falling through
1341 to the generic case is avoiding a new pseudo and eliminating a
1342 move insn in the initial rtl stream. */
1343 new = instantiate_new_reg (SET_SRC (set), &offset);
1344 if (new && offset != 0
1345 && REG_P (SET_DEST (set))
1346 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1347 {
1348 start_sequence ();
1349
1350 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1351 new, GEN_INT (offset), SET_DEST (set),
1352 1, OPTAB_LIB_WIDEN);
1353 if (x != SET_DEST (set))
1354 emit_move_insn (SET_DEST (set), x);
1355
1356 seq = get_insns ();
1357 end_sequence ();
1358
1359 emit_insn_before (seq, insn);
1360 delete_insn (insn);
1361 return;
1362 }
1363
1364 extract_insn (insn);
1365 insn_code = INSN_CODE (insn);
1366
1367 /* Handle a plus involving a virtual register by determining if the
1368 operands remain valid if they're modified in place. */
1369 if (GET_CODE (SET_SRC (set)) == PLUS
1370 && recog_data.n_operands >= 3
1371 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1372 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1373 && GET_CODE (recog_data.operand[2]) == CONST_INT
1374 && (new = instantiate_new_reg (recog_data.operand[1], &offset)))
1375 {
1376 offset += INTVAL (recog_data.operand[2]);
1377
1378 /* If the sum is zero, then replace with a plain move. */
1379 if (offset == 0
1380 && REG_P (SET_DEST (set))
1381 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1382 {
1383 start_sequence ();
1384 emit_move_insn (SET_DEST (set), new);
1385 seq = get_insns ();
1386 end_sequence ();
1387
1388 emit_insn_before (seq, insn);
1389 delete_insn (insn);
1390 return;
1391 }
1392
1393 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1394
1395 /* Using validate_change and apply_change_group here leaves
1396 recog_data in an invalid state. Since we know exactly what
1397 we want to check, do those two by hand. */
1398 if (safe_insn_predicate (insn_code, 1, new)
1399 && safe_insn_predicate (insn_code, 2, x))
1400 {
1401 *recog_data.operand_loc[1] = recog_data.operand[1] = new;
1402 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1403 any_change = true;
1404
1405 /* Fall through into the regular operand fixup loop in
1406 order to take care of operands other than 1 and 2. */
1407 }
1408 }
1409 }
1410 else
1411 {
1412 extract_insn (insn);
1413 insn_code = INSN_CODE (insn);
1414 }
1415
1416 /* In the general case, we expect virtual registers to appear only in
1417 operands, and then only as either bare registers or inside memories. */
1418 for (i = 0; i < recog_data.n_operands; ++i)
1419 {
1420 x = recog_data.operand[i];
1421 switch (GET_CODE (x))
1422 {
1423 case MEM:
1424 {
1425 rtx addr = XEXP (x, 0);
1426 bool changed = false;
1427
1428 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1429 if (!changed)
1430 continue;
1431
1432 start_sequence ();
1433 x = replace_equiv_address (x, addr);
1434 /* It may happen that the address with the virtual reg
1435 was valid (e.g. based on the virtual stack reg, which might
1436 be acceptable to the predicates with all offsets), whereas
1437 the address now isn't anymore, for instance when the address
1438 is still offsetted, but the base reg isn't virtual-stack-reg
1439 anymore. Below we would do a force_reg on the whole operand,
1440 but this insn might actually only accept memory. Hence,
1441 before doing that last resort, try to reload the address into
1442 a register, so this operand stays a MEM. */
1443 if (!safe_insn_predicate (insn_code, i, x))
1444 {
1445 addr = force_reg (GET_MODE (addr), addr);
1446 x = replace_equiv_address (x, addr);
1447 }
1448 seq = get_insns ();
1449 end_sequence ();
1450 if (seq)
1451 emit_insn_before (seq, insn);
1452 }
1453 break;
1454
1455 case REG:
1456 new = instantiate_new_reg (x, &offset);
1457 if (new == NULL)
1458 continue;
1459 if (offset == 0)
1460 x = new;
1461 else
1462 {
1463 start_sequence ();
1464
1465 /* Careful, special mode predicates may have stuff in
1466 insn_data[insn_code].operand[i].mode that isn't useful
1467 to us for computing a new value. */
1468 /* ??? Recognize address_operand and/or "p" constraints
1469 to see if (plus new offset) is a valid before we put
1470 this through expand_simple_binop. */
1471 x = expand_simple_binop (GET_MODE (x), PLUS, new,
1472 GEN_INT (offset), NULL_RTX,
1473 1, OPTAB_LIB_WIDEN);
1474 seq = get_insns ();
1475 end_sequence ();
1476 emit_insn_before (seq, insn);
1477 }
1478 break;
1479
1480 case SUBREG:
1481 new = instantiate_new_reg (SUBREG_REG (x), &offset);
1482 if (new == NULL)
1483 continue;
1484 if (offset != 0)
1485 {
1486 start_sequence ();
1487 new = expand_simple_binop (GET_MODE (new), PLUS, new,
1488 GEN_INT (offset), NULL_RTX,
1489 1, OPTAB_LIB_WIDEN);
1490 seq = get_insns ();
1491 end_sequence ();
1492 emit_insn_before (seq, insn);
1493 }
1494 x = simplify_gen_subreg (recog_data.operand_mode[i], new,
1495 GET_MODE (new), SUBREG_BYTE (x));
1496 break;
1497
1498 default:
1499 continue;
1500 }
1501
1502 /* At this point, X contains the new value for the operand.
1503 Validate the new value vs the insn predicate. Note that
1504 asm insns will have insn_code -1 here. */
1505 if (!safe_insn_predicate (insn_code, i, x))
1506 {
1507 start_sequence ();
1508 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1509 seq = get_insns ();
1510 end_sequence ();
1511 if (seq)
1512 emit_insn_before (seq, insn);
1513 }
1514
1515 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1516 any_change = true;
1517 }
1518
1519 if (any_change)
1520 {
1521 /* Propagate operand changes into the duplicates. */
1522 for (i = 0; i < recog_data.n_dups; ++i)
1523 *recog_data.dup_loc[i]
1524 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1525
1526 /* Force re-recognition of the instruction for validation. */
1527 INSN_CODE (insn) = -1;
1528 }
1529
1530 if (asm_noperands (PATTERN (insn)) >= 0)
1531 {
1532 if (!check_asm_operands (PATTERN (insn)))
1533 {
1534 error_for_asm (insn, "impossible constraint in %<asm%>");
1535 delete_insn (insn);
1536 }
1537 }
1538 else
1539 {
1540 if (recog_memoized (insn) < 0)
1541 fatal_insn_not_found (insn);
1542 }
1543 }
1544
1545 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1546 do any instantiation required. */
1547
1548 void
1549 instantiate_decl_rtl (rtx x)
1550 {
1551 rtx addr;
1552
1553 if (x == 0)
1554 return;
1555
1556 /* If this is a CONCAT, recurse for the pieces. */
1557 if (GET_CODE (x) == CONCAT)
1558 {
1559 instantiate_decl_rtl (XEXP (x, 0));
1560 instantiate_decl_rtl (XEXP (x, 1));
1561 return;
1562 }
1563
1564 /* If this is not a MEM, no need to do anything. Similarly if the
1565 address is a constant or a register that is not a virtual register. */
1566 if (!MEM_P (x))
1567 return;
1568
1569 addr = XEXP (x, 0);
1570 if (CONSTANT_P (addr)
1571 || (REG_P (addr)
1572 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1573 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1574 return;
1575
1576 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1577 }
1578
1579 /* Helper for instantiate_decls called via walk_tree: Process all decls
1580 in the given DECL_VALUE_EXPR. */
1581
1582 static tree
1583 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1584 {
1585 tree t = *tp;
1586 if (! EXPR_P (t) && ! GIMPLE_STMT_P (t))
1587 {
1588 *walk_subtrees = 0;
1589 if (DECL_P (t) && DECL_RTL_SET_P (t))
1590 instantiate_decl_rtl (DECL_RTL (t));
1591 }
1592 return NULL;
1593 }
1594
1595 /* Subroutine of instantiate_decls: Process all decls in the given
1596 BLOCK node and all its subblocks. */
1597
1598 static void
1599 instantiate_decls_1 (tree let)
1600 {
1601 tree t;
1602
1603 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1604 {
1605 if (DECL_RTL_SET_P (t))
1606 instantiate_decl_rtl (DECL_RTL (t));
1607 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1608 {
1609 tree v = DECL_VALUE_EXPR (t);
1610 walk_tree (&v, instantiate_expr, NULL, NULL);
1611 }
1612 }
1613
1614 /* Process all subblocks. */
1615 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1616 instantiate_decls_1 (t);
1617 }
1618
1619 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1620 all virtual registers in their DECL_RTL's. */
1621
1622 static void
1623 instantiate_decls (tree fndecl)
1624 {
1625 tree decl;
1626
1627 /* Process all parameters of the function. */
1628 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1629 {
1630 instantiate_decl_rtl (DECL_RTL (decl));
1631 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1632 if (DECL_HAS_VALUE_EXPR_P (decl))
1633 {
1634 tree v = DECL_VALUE_EXPR (decl);
1635 walk_tree (&v, instantiate_expr, NULL, NULL);
1636 }
1637 }
1638
1639 /* Now process all variables defined in the function or its subblocks. */
1640 instantiate_decls_1 (DECL_INITIAL (fndecl));
1641 }
1642
1643 /* Pass through the INSNS of function FNDECL and convert virtual register
1644 references to hard register references. */
1645
1646 static unsigned int
1647 instantiate_virtual_regs (void)
1648 {
1649 rtx insn;
1650
1651 /* Compute the offsets to use for this function. */
1652 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1653 var_offset = STARTING_FRAME_OFFSET;
1654 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1655 out_arg_offset = STACK_POINTER_OFFSET;
1656 #ifdef FRAME_POINTER_CFA_OFFSET
1657 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1658 #else
1659 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1660 #endif
1661
1662 /* Initialize recognition, indicating that volatile is OK. */
1663 init_recog ();
1664
1665 /* Scan through all the insns, instantiating every virtual register still
1666 present. */
1667 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1668 if (INSN_P (insn))
1669 {
1670 /* These patterns in the instruction stream can never be recognized.
1671 Fortunately, they shouldn't contain virtual registers either. */
1672 if (GET_CODE (PATTERN (insn)) == USE
1673 || GET_CODE (PATTERN (insn)) == CLOBBER
1674 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1675 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1676 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1677 continue;
1678
1679 instantiate_virtual_regs_in_insn (insn);
1680
1681 if (INSN_DELETED_P (insn))
1682 continue;
1683
1684 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1685
1686 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1687 if (GET_CODE (insn) == CALL_INSN)
1688 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1689 instantiate_virtual_regs_in_rtx, NULL);
1690 }
1691
1692 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1693 instantiate_decls (current_function_decl);
1694
1695 targetm.instantiate_decls ();
1696
1697 /* Indicate that, from now on, assign_stack_local should use
1698 frame_pointer_rtx. */
1699 virtuals_instantiated = 1;
1700 return 0;
1701 }
1702
1703 struct rtl_opt_pass pass_instantiate_virtual_regs =
1704 {
1705 {
1706 RTL_PASS,
1707 "vregs", /* name */
1708 NULL, /* gate */
1709 instantiate_virtual_regs, /* execute */
1710 NULL, /* sub */
1711 NULL, /* next */
1712 0, /* static_pass_number */
1713 0, /* tv_id */
1714 0, /* properties_required */
1715 0, /* properties_provided */
1716 0, /* properties_destroyed */
1717 0, /* todo_flags_start */
1718 TODO_dump_func /* todo_flags_finish */
1719 }
1720 };
1721
1722 \f
1723 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1724 This means a type for which function calls must pass an address to the
1725 function or get an address back from the function.
1726 EXP may be a type node or an expression (whose type is tested). */
1727
1728 int
1729 aggregate_value_p (const_tree exp, const_tree fntype)
1730 {
1731 int i, regno, nregs;
1732 rtx reg;
1733
1734 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1735
1736 /* DECL node associated with FNTYPE when relevant, which we might need to
1737 check for by-invisible-reference returns, typically for CALL_EXPR input
1738 EXPressions. */
1739 const_tree fndecl = NULL_TREE;
1740
1741 if (fntype)
1742 switch (TREE_CODE (fntype))
1743 {
1744 case CALL_EXPR:
1745 fndecl = get_callee_fndecl (fntype);
1746 fntype = fndecl ? TREE_TYPE (fndecl) : 0;
1747 break;
1748 case FUNCTION_DECL:
1749 fndecl = fntype;
1750 fntype = TREE_TYPE (fndecl);
1751 break;
1752 case FUNCTION_TYPE:
1753 case METHOD_TYPE:
1754 break;
1755 case IDENTIFIER_NODE:
1756 fntype = 0;
1757 break;
1758 default:
1759 /* We don't expect other rtl types here. */
1760 gcc_unreachable ();
1761 }
1762
1763 if (TREE_CODE (type) == VOID_TYPE)
1764 return 0;
1765
1766 /* If the front end has decided that this needs to be passed by
1767 reference, do so. */
1768 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1769 && DECL_BY_REFERENCE (exp))
1770 return 1;
1771
1772 /* If the EXPression is a CALL_EXPR, honor DECL_BY_REFERENCE set on the
1773 called function RESULT_DECL, meaning the function returns in memory by
1774 invisible reference. This check lets front-ends not set TREE_ADDRESSABLE
1775 on the function type, which used to be the way to request such a return
1776 mechanism but might now be causing troubles at gimplification time if
1777 temporaries with the function type need to be created. */
1778 if (TREE_CODE (exp) == CALL_EXPR && fndecl && DECL_RESULT (fndecl)
1779 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
1780 return 1;
1781
1782 if (targetm.calls.return_in_memory (type, fntype))
1783 return 1;
1784 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1785 and thus can't be returned in registers. */
1786 if (TREE_ADDRESSABLE (type))
1787 return 1;
1788 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
1789 return 1;
1790 /* Make sure we have suitable call-clobbered regs to return
1791 the value in; if not, we must return it in memory. */
1792 reg = hard_function_value (type, 0, fntype, 0);
1793
1794 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1795 it is OK. */
1796 if (!REG_P (reg))
1797 return 0;
1798
1799 regno = REGNO (reg);
1800 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
1801 for (i = 0; i < nregs; i++)
1802 if (! call_used_regs[regno + i])
1803 return 1;
1804 return 0;
1805 }
1806 \f
1807 /* Return true if we should assign DECL a pseudo register; false if it
1808 should live on the local stack. */
1809
1810 bool
1811 use_register_for_decl (const_tree decl)
1812 {
1813 /* Honor volatile. */
1814 if (TREE_SIDE_EFFECTS (decl))
1815 return false;
1816
1817 /* Honor addressability. */
1818 if (TREE_ADDRESSABLE (decl))
1819 return false;
1820
1821 /* Only register-like things go in registers. */
1822 if (DECL_MODE (decl) == BLKmode)
1823 return false;
1824
1825 /* If -ffloat-store specified, don't put explicit float variables
1826 into registers. */
1827 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1828 propagates values across these stores, and it probably shouldn't. */
1829 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1830 return false;
1831
1832 /* If we're not interested in tracking debugging information for
1833 this decl, then we can certainly put it in a register. */
1834 if (DECL_IGNORED_P (decl))
1835 return true;
1836
1837 return (optimize || DECL_REGISTER (decl));
1838 }
1839
1840 /* Return true if TYPE should be passed by invisible reference. */
1841
1842 bool
1843 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1844 tree type, bool named_arg)
1845 {
1846 if (type)
1847 {
1848 /* If this type contains non-trivial constructors, then it is
1849 forbidden for the middle-end to create any new copies. */
1850 if (TREE_ADDRESSABLE (type))
1851 return true;
1852
1853 /* GCC post 3.4 passes *all* variable sized types by reference. */
1854 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1855 return true;
1856 }
1857
1858 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
1859 }
1860
1861 /* Return true if TYPE, which is passed by reference, should be callee
1862 copied instead of caller copied. */
1863
1864 bool
1865 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1866 tree type, bool named_arg)
1867 {
1868 if (type && TREE_ADDRESSABLE (type))
1869 return false;
1870 return targetm.calls.callee_copies (ca, mode, type, named_arg);
1871 }
1872
1873 /* Structures to communicate between the subroutines of assign_parms.
1874 The first holds data persistent across all parameters, the second
1875 is cleared out for each parameter. */
1876
1877 struct assign_parm_data_all
1878 {
1879 CUMULATIVE_ARGS args_so_far;
1880 struct args_size stack_args_size;
1881 tree function_result_decl;
1882 tree orig_fnargs;
1883 rtx first_conversion_insn;
1884 rtx last_conversion_insn;
1885 HOST_WIDE_INT pretend_args_size;
1886 HOST_WIDE_INT extra_pretend_bytes;
1887 int reg_parm_stack_space;
1888 };
1889
1890 struct assign_parm_data_one
1891 {
1892 tree nominal_type;
1893 tree passed_type;
1894 rtx entry_parm;
1895 rtx stack_parm;
1896 enum machine_mode nominal_mode;
1897 enum machine_mode passed_mode;
1898 enum machine_mode promoted_mode;
1899 struct locate_and_pad_arg_data locate;
1900 int partial;
1901 BOOL_BITFIELD named_arg : 1;
1902 BOOL_BITFIELD passed_pointer : 1;
1903 BOOL_BITFIELD on_stack : 1;
1904 BOOL_BITFIELD loaded_in_reg : 1;
1905 };
1906
1907 /* A subroutine of assign_parms. Initialize ALL. */
1908
1909 static void
1910 assign_parms_initialize_all (struct assign_parm_data_all *all)
1911 {
1912 tree fntype;
1913
1914 memset (all, 0, sizeof (*all));
1915
1916 fntype = TREE_TYPE (current_function_decl);
1917
1918 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
1919 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
1920 #else
1921 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
1922 current_function_decl, -1);
1923 #endif
1924
1925 #ifdef REG_PARM_STACK_SPACE
1926 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
1927 #endif
1928 }
1929
1930 /* If ARGS contains entries with complex types, split the entry into two
1931 entries of the component type. Return a new list of substitutions are
1932 needed, else the old list. */
1933
1934 static tree
1935 split_complex_args (tree args)
1936 {
1937 tree p;
1938
1939 /* Before allocating memory, check for the common case of no complex. */
1940 for (p = args; p; p = TREE_CHAIN (p))
1941 {
1942 tree type = TREE_TYPE (p);
1943 if (TREE_CODE (type) == COMPLEX_TYPE
1944 && targetm.calls.split_complex_arg (type))
1945 goto found;
1946 }
1947 return args;
1948
1949 found:
1950 args = copy_list (args);
1951
1952 for (p = args; p; p = TREE_CHAIN (p))
1953 {
1954 tree type = TREE_TYPE (p);
1955 if (TREE_CODE (type) == COMPLEX_TYPE
1956 && targetm.calls.split_complex_arg (type))
1957 {
1958 tree decl;
1959 tree subtype = TREE_TYPE (type);
1960 bool addressable = TREE_ADDRESSABLE (p);
1961
1962 /* Rewrite the PARM_DECL's type with its component. */
1963 TREE_TYPE (p) = subtype;
1964 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
1965 DECL_MODE (p) = VOIDmode;
1966 DECL_SIZE (p) = NULL;
1967 DECL_SIZE_UNIT (p) = NULL;
1968 /* If this arg must go in memory, put it in a pseudo here.
1969 We can't allow it to go in memory as per normal parms,
1970 because the usual place might not have the imag part
1971 adjacent to the real part. */
1972 DECL_ARTIFICIAL (p) = addressable;
1973 DECL_IGNORED_P (p) = addressable;
1974 TREE_ADDRESSABLE (p) = 0;
1975 layout_decl (p, 0);
1976
1977 /* Build a second synthetic decl. */
1978 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
1979 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
1980 DECL_ARTIFICIAL (decl) = addressable;
1981 DECL_IGNORED_P (decl) = addressable;
1982 layout_decl (decl, 0);
1983
1984 /* Splice it in; skip the new decl. */
1985 TREE_CHAIN (decl) = TREE_CHAIN (p);
1986 TREE_CHAIN (p) = decl;
1987 p = decl;
1988 }
1989 }
1990
1991 return args;
1992 }
1993
1994 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
1995 the hidden struct return argument, and (abi willing) complex args.
1996 Return the new parameter list. */
1997
1998 static tree
1999 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2000 {
2001 tree fndecl = current_function_decl;
2002 tree fntype = TREE_TYPE (fndecl);
2003 tree fnargs = DECL_ARGUMENTS (fndecl);
2004
2005 /* If struct value address is treated as the first argument, make it so. */
2006 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2007 && ! current_function_returns_pcc_struct
2008 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2009 {
2010 tree type = build_pointer_type (TREE_TYPE (fntype));
2011 tree decl;
2012
2013 decl = build_decl (PARM_DECL, NULL_TREE, type);
2014 DECL_ARG_TYPE (decl) = type;
2015 DECL_ARTIFICIAL (decl) = 1;
2016 DECL_IGNORED_P (decl) = 1;
2017
2018 TREE_CHAIN (decl) = fnargs;
2019 fnargs = decl;
2020 all->function_result_decl = decl;
2021 }
2022
2023 all->orig_fnargs = fnargs;
2024
2025 /* If the target wants to split complex arguments into scalars, do so. */
2026 if (targetm.calls.split_complex_arg)
2027 fnargs = split_complex_args (fnargs);
2028
2029 return fnargs;
2030 }
2031
2032 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2033 data for the parameter. Incorporate ABI specifics such as pass-by-
2034 reference and type promotion. */
2035
2036 static void
2037 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2038 struct assign_parm_data_one *data)
2039 {
2040 tree nominal_type, passed_type;
2041 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2042
2043 memset (data, 0, sizeof (*data));
2044
2045 /* NAMED_ARG is a mis-nomer. We really mean 'non-varadic'. */
2046 if (!current_function_stdarg)
2047 data->named_arg = 1; /* No varadic parms. */
2048 else if (TREE_CHAIN (parm))
2049 data->named_arg = 1; /* Not the last non-varadic parm. */
2050 else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2051 data->named_arg = 1; /* Only varadic ones are unnamed. */
2052 else
2053 data->named_arg = 0; /* Treat as varadic. */
2054
2055 nominal_type = TREE_TYPE (parm);
2056 passed_type = DECL_ARG_TYPE (parm);
2057
2058 /* Look out for errors propagating this far. Also, if the parameter's
2059 type is void then its value doesn't matter. */
2060 if (TREE_TYPE (parm) == error_mark_node
2061 /* This can happen after weird syntax errors
2062 or if an enum type is defined among the parms. */
2063 || TREE_CODE (parm) != PARM_DECL
2064 || passed_type == NULL
2065 || VOID_TYPE_P (nominal_type))
2066 {
2067 nominal_type = passed_type = void_type_node;
2068 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2069 goto egress;
2070 }
2071
2072 /* Find mode of arg as it is passed, and mode of arg as it should be
2073 during execution of this function. */
2074 passed_mode = TYPE_MODE (passed_type);
2075 nominal_mode = TYPE_MODE (nominal_type);
2076
2077 /* If the parm is to be passed as a transparent union, use the type of
2078 the first field for the tests below. We have already verified that
2079 the modes are the same. */
2080 if (TREE_CODE (passed_type) == UNION_TYPE
2081 && TYPE_TRANSPARENT_UNION (passed_type))
2082 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2083
2084 /* See if this arg was passed by invisible reference. */
2085 if (pass_by_reference (&all->args_so_far, passed_mode,
2086 passed_type, data->named_arg))
2087 {
2088 passed_type = nominal_type = build_pointer_type (passed_type);
2089 data->passed_pointer = true;
2090 passed_mode = nominal_mode = Pmode;
2091 }
2092
2093 /* Find mode as it is passed by the ABI. */
2094 promoted_mode = passed_mode;
2095 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2096 {
2097 int unsignedp = TYPE_UNSIGNED (passed_type);
2098 promoted_mode = promote_mode (passed_type, promoted_mode,
2099 &unsignedp, 1);
2100 }
2101
2102 egress:
2103 data->nominal_type = nominal_type;
2104 data->passed_type = passed_type;
2105 data->nominal_mode = nominal_mode;
2106 data->passed_mode = passed_mode;
2107 data->promoted_mode = promoted_mode;
2108 }
2109
2110 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2111
2112 static void
2113 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2114 struct assign_parm_data_one *data, bool no_rtl)
2115 {
2116 int varargs_pretend_bytes = 0;
2117
2118 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2119 data->promoted_mode,
2120 data->passed_type,
2121 &varargs_pretend_bytes, no_rtl);
2122
2123 /* If the back-end has requested extra stack space, record how much is
2124 needed. Do not change pretend_args_size otherwise since it may be
2125 nonzero from an earlier partial argument. */
2126 if (varargs_pretend_bytes > 0)
2127 all->pretend_args_size = varargs_pretend_bytes;
2128 }
2129
2130 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2131 the incoming location of the current parameter. */
2132
2133 static void
2134 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2135 struct assign_parm_data_one *data)
2136 {
2137 HOST_WIDE_INT pretend_bytes = 0;
2138 rtx entry_parm;
2139 bool in_regs;
2140
2141 if (data->promoted_mode == VOIDmode)
2142 {
2143 data->entry_parm = data->stack_parm = const0_rtx;
2144 return;
2145 }
2146
2147 #ifdef FUNCTION_INCOMING_ARG
2148 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2149 data->passed_type, data->named_arg);
2150 #else
2151 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2152 data->passed_type, data->named_arg);
2153 #endif
2154
2155 if (entry_parm == 0)
2156 data->promoted_mode = data->passed_mode;
2157
2158 /* Determine parm's home in the stack, in case it arrives in the stack
2159 or we should pretend it did. Compute the stack position and rtx where
2160 the argument arrives and its size.
2161
2162 There is one complexity here: If this was a parameter that would
2163 have been passed in registers, but wasn't only because it is
2164 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2165 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2166 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2167 as it was the previous time. */
2168 in_regs = entry_parm != 0;
2169 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2170 in_regs = true;
2171 #endif
2172 if (!in_regs && !data->named_arg)
2173 {
2174 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2175 {
2176 rtx tem;
2177 #ifdef FUNCTION_INCOMING_ARG
2178 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2179 data->passed_type, true);
2180 #else
2181 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2182 data->passed_type, true);
2183 #endif
2184 in_regs = tem != NULL;
2185 }
2186 }
2187
2188 /* If this parameter was passed both in registers and in the stack, use
2189 the copy on the stack. */
2190 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2191 data->passed_type))
2192 entry_parm = 0;
2193
2194 if (entry_parm)
2195 {
2196 int partial;
2197
2198 partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2199 data->promoted_mode,
2200 data->passed_type,
2201 data->named_arg);
2202 data->partial = partial;
2203
2204 /* The caller might already have allocated stack space for the
2205 register parameters. */
2206 if (partial != 0 && all->reg_parm_stack_space == 0)
2207 {
2208 /* Part of this argument is passed in registers and part
2209 is passed on the stack. Ask the prologue code to extend
2210 the stack part so that we can recreate the full value.
2211
2212 PRETEND_BYTES is the size of the registers we need to store.
2213 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2214 stack space that the prologue should allocate.
2215
2216 Internally, gcc assumes that the argument pointer is aligned
2217 to STACK_BOUNDARY bits. This is used both for alignment
2218 optimizations (see init_emit) and to locate arguments that are
2219 aligned to more than PARM_BOUNDARY bits. We must preserve this
2220 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2221 a stack boundary. */
2222
2223 /* We assume at most one partial arg, and it must be the first
2224 argument on the stack. */
2225 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2226
2227 pretend_bytes = partial;
2228 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2229
2230 /* We want to align relative to the actual stack pointer, so
2231 don't include this in the stack size until later. */
2232 all->extra_pretend_bytes = all->pretend_args_size;
2233 }
2234 }
2235
2236 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2237 entry_parm ? data->partial : 0, current_function_decl,
2238 &all->stack_args_size, &data->locate);
2239
2240 /* Adjust offsets to include the pretend args. */
2241 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2242 data->locate.slot_offset.constant += pretend_bytes;
2243 data->locate.offset.constant += pretend_bytes;
2244
2245 data->entry_parm = entry_parm;
2246 }
2247
2248 /* A subroutine of assign_parms. If there is actually space on the stack
2249 for this parm, count it in stack_args_size and return true. */
2250
2251 static bool
2252 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2253 struct assign_parm_data_one *data)
2254 {
2255 /* Trivially true if we've no incoming register. */
2256 if (data->entry_parm == NULL)
2257 ;
2258 /* Also true if we're partially in registers and partially not,
2259 since we've arranged to drop the entire argument on the stack. */
2260 else if (data->partial != 0)
2261 ;
2262 /* Also true if the target says that it's passed in both registers
2263 and on the stack. */
2264 else if (GET_CODE (data->entry_parm) == PARALLEL
2265 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2266 ;
2267 /* Also true if the target says that there's stack allocated for
2268 all register parameters. */
2269 else if (all->reg_parm_stack_space > 0)
2270 ;
2271 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2272 else
2273 return false;
2274
2275 all->stack_args_size.constant += data->locate.size.constant;
2276 if (data->locate.size.var)
2277 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2278
2279 return true;
2280 }
2281
2282 /* A subroutine of assign_parms. Given that this parameter is allocated
2283 stack space by the ABI, find it. */
2284
2285 static void
2286 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2287 {
2288 rtx offset_rtx, stack_parm;
2289 unsigned int align, boundary;
2290
2291 /* If we're passing this arg using a reg, make its stack home the
2292 aligned stack slot. */
2293 if (data->entry_parm)
2294 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2295 else
2296 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2297
2298 stack_parm = current_function_internal_arg_pointer;
2299 if (offset_rtx != const0_rtx)
2300 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2301 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2302
2303 set_mem_attributes (stack_parm, parm, 1);
2304
2305 boundary = data->locate.boundary;
2306 align = BITS_PER_UNIT;
2307
2308 /* If we're padding upward, we know that the alignment of the slot
2309 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2310 intentionally forcing upward padding. Otherwise we have to come
2311 up with a guess at the alignment based on OFFSET_RTX. */
2312 if (data->locate.where_pad != downward || data->entry_parm)
2313 align = boundary;
2314 else if (GET_CODE (offset_rtx) == CONST_INT)
2315 {
2316 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2317 align = align & -align;
2318 }
2319 set_mem_align (stack_parm, align);
2320
2321 if (data->entry_parm)
2322 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2323
2324 data->stack_parm = stack_parm;
2325 }
2326
2327 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2328 always valid and contiguous. */
2329
2330 static void
2331 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2332 {
2333 rtx entry_parm = data->entry_parm;
2334 rtx stack_parm = data->stack_parm;
2335
2336 /* If this parm was passed part in regs and part in memory, pretend it
2337 arrived entirely in memory by pushing the register-part onto the stack.
2338 In the special case of a DImode or DFmode that is split, we could put
2339 it together in a pseudoreg directly, but for now that's not worth
2340 bothering with. */
2341 if (data->partial != 0)
2342 {
2343 /* Handle calls that pass values in multiple non-contiguous
2344 locations. The Irix 6 ABI has examples of this. */
2345 if (GET_CODE (entry_parm) == PARALLEL)
2346 emit_group_store (validize_mem (stack_parm), entry_parm,
2347 data->passed_type,
2348 int_size_in_bytes (data->passed_type));
2349 else
2350 {
2351 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2352 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2353 data->partial / UNITS_PER_WORD);
2354 }
2355
2356 entry_parm = stack_parm;
2357 }
2358
2359 /* If we didn't decide this parm came in a register, by default it came
2360 on the stack. */
2361 else if (entry_parm == NULL)
2362 entry_parm = stack_parm;
2363
2364 /* When an argument is passed in multiple locations, we can't make use
2365 of this information, but we can save some copying if the whole argument
2366 is passed in a single register. */
2367 else if (GET_CODE (entry_parm) == PARALLEL
2368 && data->nominal_mode != BLKmode
2369 && data->passed_mode != BLKmode)
2370 {
2371 size_t i, len = XVECLEN (entry_parm, 0);
2372
2373 for (i = 0; i < len; i++)
2374 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2375 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2376 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2377 == data->passed_mode)
2378 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2379 {
2380 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2381 break;
2382 }
2383 }
2384
2385 data->entry_parm = entry_parm;
2386 }
2387
2388 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2389 always valid and properly aligned. */
2390
2391 static void
2392 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2393 {
2394 rtx stack_parm = data->stack_parm;
2395
2396 /* If we can't trust the parm stack slot to be aligned enough for its
2397 ultimate type, don't use that slot after entry. We'll make another
2398 stack slot, if we need one. */
2399 if (stack_parm
2400 && ((STRICT_ALIGNMENT
2401 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2402 || (data->nominal_type
2403 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2404 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2405 stack_parm = NULL;
2406
2407 /* If parm was passed in memory, and we need to convert it on entry,
2408 don't store it back in that same slot. */
2409 else if (data->entry_parm == stack_parm
2410 && data->nominal_mode != BLKmode
2411 && data->nominal_mode != data->passed_mode)
2412 stack_parm = NULL;
2413
2414 /* If stack protection is in effect for this function, don't leave any
2415 pointers in their passed stack slots. */
2416 else if (cfun->stack_protect_guard
2417 && (flag_stack_protect == 2
2418 || data->passed_pointer
2419 || POINTER_TYPE_P (data->nominal_type)))
2420 stack_parm = NULL;
2421
2422 data->stack_parm = stack_parm;
2423 }
2424
2425 /* A subroutine of assign_parms. Return true if the current parameter
2426 should be stored as a BLKmode in the current frame. */
2427
2428 static bool
2429 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2430 {
2431 if (data->nominal_mode == BLKmode)
2432 return true;
2433 if (GET_CODE (data->entry_parm) == PARALLEL)
2434 return true;
2435
2436 #ifdef BLOCK_REG_PADDING
2437 /* Only assign_parm_setup_block knows how to deal with register arguments
2438 that are padded at the least significant end. */
2439 if (REG_P (data->entry_parm)
2440 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2441 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2442 == (BYTES_BIG_ENDIAN ? upward : downward)))
2443 return true;
2444 #endif
2445
2446 return false;
2447 }
2448
2449 /* A subroutine of assign_parms. Arrange for the parameter to be
2450 present and valid in DATA->STACK_RTL. */
2451
2452 static void
2453 assign_parm_setup_block (struct assign_parm_data_all *all,
2454 tree parm, struct assign_parm_data_one *data)
2455 {
2456 rtx entry_parm = data->entry_parm;
2457 rtx stack_parm = data->stack_parm;
2458 HOST_WIDE_INT size;
2459 HOST_WIDE_INT size_stored;
2460 rtx orig_entry_parm = entry_parm;
2461
2462 if (GET_CODE (entry_parm) == PARALLEL)
2463 entry_parm = emit_group_move_into_temps (entry_parm);
2464
2465 /* If we've a non-block object that's nevertheless passed in parts,
2466 reconstitute it in register operations rather than on the stack. */
2467 if (GET_CODE (entry_parm) == PARALLEL
2468 && data->nominal_mode != BLKmode)
2469 {
2470 rtx elt0 = XEXP (XVECEXP (orig_entry_parm, 0, 0), 0);
2471
2472 if ((XVECLEN (entry_parm, 0) > 1
2473 || hard_regno_nregs[REGNO (elt0)][GET_MODE (elt0)] > 1)
2474 && use_register_for_decl (parm))
2475 {
2476 rtx parmreg = gen_reg_rtx (data->nominal_mode);
2477
2478 push_to_sequence2 (all->first_conversion_insn,
2479 all->last_conversion_insn);
2480
2481 /* For values returned in multiple registers, handle possible
2482 incompatible calls to emit_group_store.
2483
2484 For example, the following would be invalid, and would have to
2485 be fixed by the conditional below:
2486
2487 emit_group_store ((reg:SF), (parallel:DF))
2488 emit_group_store ((reg:SI), (parallel:DI))
2489
2490 An example of this are doubles in e500 v2:
2491 (parallel:DF (expr_list (reg:SI) (const_int 0))
2492 (expr_list (reg:SI) (const_int 4))). */
2493 if (data->nominal_mode != data->passed_mode)
2494 {
2495 rtx t = gen_reg_rtx (GET_MODE (entry_parm));
2496 emit_group_store (t, entry_parm, NULL_TREE,
2497 GET_MODE_SIZE (GET_MODE (entry_parm)));
2498 convert_move (parmreg, t, 0);
2499 }
2500 else
2501 emit_group_store (parmreg, entry_parm, data->nominal_type,
2502 int_size_in_bytes (data->nominal_type));
2503
2504 all->first_conversion_insn = get_insns ();
2505 all->last_conversion_insn = get_last_insn ();
2506 end_sequence ();
2507
2508 SET_DECL_RTL (parm, parmreg);
2509 return;
2510 }
2511 }
2512
2513 size = int_size_in_bytes (data->passed_type);
2514 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2515 if (stack_parm == 0)
2516 {
2517 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2518 stack_parm = assign_stack_local (BLKmode, size_stored,
2519 DECL_ALIGN (parm));
2520 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2521 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2522 set_mem_attributes (stack_parm, parm, 1);
2523 }
2524
2525 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2526 calls that pass values in multiple non-contiguous locations. */
2527 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2528 {
2529 rtx mem;
2530
2531 /* Note that we will be storing an integral number of words.
2532 So we have to be careful to ensure that we allocate an
2533 integral number of words. We do this above when we call
2534 assign_stack_local if space was not allocated in the argument
2535 list. If it was, this will not work if PARM_BOUNDARY is not
2536 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2537 if it becomes a problem. Exception is when BLKmode arrives
2538 with arguments not conforming to word_mode. */
2539
2540 if (data->stack_parm == 0)
2541 ;
2542 else if (GET_CODE (entry_parm) == PARALLEL)
2543 ;
2544 else
2545 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2546
2547 mem = validize_mem (stack_parm);
2548
2549 /* Handle values in multiple non-contiguous locations. */
2550 if (GET_CODE (entry_parm) == PARALLEL)
2551 {
2552 push_to_sequence2 (all->first_conversion_insn,
2553 all->last_conversion_insn);
2554 emit_group_store (mem, entry_parm, data->passed_type, size);
2555 all->first_conversion_insn = get_insns ();
2556 all->last_conversion_insn = get_last_insn ();
2557 end_sequence ();
2558 }
2559
2560 else if (size == 0)
2561 ;
2562
2563 /* If SIZE is that of a mode no bigger than a word, just use
2564 that mode's store operation. */
2565 else if (size <= UNITS_PER_WORD)
2566 {
2567 enum machine_mode mode
2568 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2569
2570 if (mode != BLKmode
2571 #ifdef BLOCK_REG_PADDING
2572 && (size == UNITS_PER_WORD
2573 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2574 != (BYTES_BIG_ENDIAN ? upward : downward)))
2575 #endif
2576 )
2577 {
2578 rtx reg;
2579
2580 /* We are really truncating a word_mode value containing
2581 SIZE bytes into a value of mode MODE. If such an
2582 operation requires no actual instructions, we can refer
2583 to the value directly in mode MODE, otherwise we must
2584 start with the register in word_mode and explicitly
2585 convert it. */
2586 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2587 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2588 else
2589 {
2590 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2591 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2592 }
2593 emit_move_insn (change_address (mem, mode, 0), reg);
2594 }
2595
2596 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2597 machine must be aligned to the left before storing
2598 to memory. Note that the previous test doesn't
2599 handle all cases (e.g. SIZE == 3). */
2600 else if (size != UNITS_PER_WORD
2601 #ifdef BLOCK_REG_PADDING
2602 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2603 == downward)
2604 #else
2605 && BYTES_BIG_ENDIAN
2606 #endif
2607 )
2608 {
2609 rtx tem, x;
2610 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2611 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2612
2613 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2614 build_int_cst (NULL_TREE, by),
2615 NULL_RTX, 1);
2616 tem = change_address (mem, word_mode, 0);
2617 emit_move_insn (tem, x);
2618 }
2619 else
2620 move_block_from_reg (REGNO (entry_parm), mem,
2621 size_stored / UNITS_PER_WORD);
2622 }
2623 else
2624 move_block_from_reg (REGNO (entry_parm), mem,
2625 size_stored / UNITS_PER_WORD);
2626 }
2627 else if (data->stack_parm == 0)
2628 {
2629 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2630 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2631 BLOCK_OP_NORMAL);
2632 all->first_conversion_insn = get_insns ();
2633 all->last_conversion_insn = get_last_insn ();
2634 end_sequence ();
2635 }
2636
2637 data->stack_parm = stack_parm;
2638 SET_DECL_RTL (parm, stack_parm);
2639 }
2640
2641 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2642 parameter. Get it there. Perform all ABI specified conversions. */
2643
2644 static void
2645 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2646 struct assign_parm_data_one *data)
2647 {
2648 rtx parmreg;
2649 enum machine_mode promoted_nominal_mode;
2650 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2651 bool did_conversion = false;
2652
2653 /* Store the parm in a pseudoregister during the function, but we may
2654 need to do it in a wider mode. */
2655
2656 /* This is not really promoting for a call. However we need to be
2657 consistent with assign_parm_find_data_types and expand_expr_real_1. */
2658 promoted_nominal_mode
2659 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1);
2660
2661 parmreg = gen_reg_rtx (promoted_nominal_mode);
2662
2663 if (!DECL_ARTIFICIAL (parm))
2664 mark_user_reg (parmreg);
2665
2666 /* If this was an item that we received a pointer to,
2667 set DECL_RTL appropriately. */
2668 if (data->passed_pointer)
2669 {
2670 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2671 set_mem_attributes (x, parm, 1);
2672 SET_DECL_RTL (parm, x);
2673 }
2674 else
2675 SET_DECL_RTL (parm, parmreg);
2676
2677 /* Copy the value into the register. */
2678 if (data->nominal_mode != data->passed_mode
2679 || promoted_nominal_mode != data->promoted_mode)
2680 {
2681 int save_tree_used;
2682
2683 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2684 mode, by the caller. We now have to convert it to
2685 NOMINAL_MODE, if different. However, PARMREG may be in
2686 a different mode than NOMINAL_MODE if it is being stored
2687 promoted.
2688
2689 If ENTRY_PARM is a hard register, it might be in a register
2690 not valid for operating in its mode (e.g., an odd-numbered
2691 register for a DFmode). In that case, moves are the only
2692 thing valid, so we can't do a convert from there. This
2693 occurs when the calling sequence allow such misaligned
2694 usages.
2695
2696 In addition, the conversion may involve a call, which could
2697 clobber parameters which haven't been copied to pseudo
2698 registers yet. Therefore, we must first copy the parm to
2699 a pseudo reg here, and save the conversion until after all
2700 parameters have been moved. */
2701
2702 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2703
2704 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2705
2706 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2707 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2708
2709 if (GET_CODE (tempreg) == SUBREG
2710 && GET_MODE (tempreg) == data->nominal_mode
2711 && REG_P (SUBREG_REG (tempreg))
2712 && data->nominal_mode == data->passed_mode
2713 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2714 && GET_MODE_SIZE (GET_MODE (tempreg))
2715 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
2716 {
2717 /* The argument is already sign/zero extended, so note it
2718 into the subreg. */
2719 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2720 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2721 }
2722
2723 /* TREE_USED gets set erroneously during expand_assignment. */
2724 save_tree_used = TREE_USED (parm);
2725 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
2726 TREE_USED (parm) = save_tree_used;
2727 all->first_conversion_insn = get_insns ();
2728 all->last_conversion_insn = get_last_insn ();
2729 end_sequence ();
2730
2731 did_conversion = true;
2732 }
2733 else
2734 emit_move_insn (parmreg, validize_mem (data->entry_parm));
2735
2736 /* If we were passed a pointer but the actual value can safely live
2737 in a register, put it in one. */
2738 if (data->passed_pointer
2739 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2740 /* If by-reference argument was promoted, demote it. */
2741 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2742 || use_register_for_decl (parm)))
2743 {
2744 /* We can't use nominal_mode, because it will have been set to
2745 Pmode above. We must use the actual mode of the parm. */
2746 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2747 mark_user_reg (parmreg);
2748
2749 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2750 {
2751 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2752 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2753
2754 push_to_sequence2 (all->first_conversion_insn,
2755 all->last_conversion_insn);
2756 emit_move_insn (tempreg, DECL_RTL (parm));
2757 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2758 emit_move_insn (parmreg, tempreg);
2759 all->first_conversion_insn = get_insns ();
2760 all->last_conversion_insn = get_last_insn ();
2761 end_sequence ();
2762
2763 did_conversion = true;
2764 }
2765 else
2766 emit_move_insn (parmreg, DECL_RTL (parm));
2767
2768 SET_DECL_RTL (parm, parmreg);
2769
2770 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2771 now the parm. */
2772 data->stack_parm = NULL;
2773 }
2774
2775 /* Mark the register as eliminable if we did no conversion and it was
2776 copied from memory at a fixed offset, and the arg pointer was not
2777 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2778 offset formed an invalid address, such memory-equivalences as we
2779 make here would screw up life analysis for it. */
2780 if (data->nominal_mode == data->passed_mode
2781 && !did_conversion
2782 && data->stack_parm != 0
2783 && MEM_P (data->stack_parm)
2784 && data->locate.offset.var == 0
2785 && reg_mentioned_p (virtual_incoming_args_rtx,
2786 XEXP (data->stack_parm, 0)))
2787 {
2788 rtx linsn = get_last_insn ();
2789 rtx sinsn, set;
2790
2791 /* Mark complex types separately. */
2792 if (GET_CODE (parmreg) == CONCAT)
2793 {
2794 enum machine_mode submode
2795 = GET_MODE_INNER (GET_MODE (parmreg));
2796 int regnor = REGNO (XEXP (parmreg, 0));
2797 int regnoi = REGNO (XEXP (parmreg, 1));
2798 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
2799 rtx stacki = adjust_address_nv (data->stack_parm, submode,
2800 GET_MODE_SIZE (submode));
2801
2802 /* Scan backwards for the set of the real and
2803 imaginary parts. */
2804 for (sinsn = linsn; sinsn != 0;
2805 sinsn = prev_nonnote_insn (sinsn))
2806 {
2807 set = single_set (sinsn);
2808 if (set == 0)
2809 continue;
2810
2811 if (SET_DEST (set) == regno_reg_rtx [regnoi])
2812 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
2813 else if (SET_DEST (set) == regno_reg_rtx [regnor])
2814 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
2815 }
2816 }
2817 else if ((set = single_set (linsn)) != 0
2818 && SET_DEST (set) == parmreg)
2819 set_unique_reg_note (linsn, REG_EQUIV, data->stack_parm);
2820 }
2821
2822 /* For pointer data type, suggest pointer register. */
2823 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2824 mark_reg_pointer (parmreg,
2825 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2826 }
2827
2828 /* A subroutine of assign_parms. Allocate stack space to hold the current
2829 parameter. Get it there. Perform all ABI specified conversions. */
2830
2831 static void
2832 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2833 struct assign_parm_data_one *data)
2834 {
2835 /* Value must be stored in the stack slot STACK_PARM during function
2836 execution. */
2837 bool to_conversion = false;
2838
2839 if (data->promoted_mode != data->nominal_mode)
2840 {
2841 /* Conversion is required. */
2842 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2843
2844 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2845
2846 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2847 to_conversion = true;
2848
2849 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2850 TYPE_UNSIGNED (TREE_TYPE (parm)));
2851
2852 if (data->stack_parm)
2853 /* ??? This may need a big-endian conversion on sparc64. */
2854 data->stack_parm
2855 = adjust_address (data->stack_parm, data->nominal_mode, 0);
2856 }
2857
2858 if (data->entry_parm != data->stack_parm)
2859 {
2860 rtx src, dest;
2861
2862 if (data->stack_parm == 0)
2863 {
2864 data->stack_parm
2865 = assign_stack_local (GET_MODE (data->entry_parm),
2866 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
2867 TYPE_ALIGN (data->passed_type));
2868 set_mem_attributes (data->stack_parm, parm, 1);
2869 }
2870
2871 dest = validize_mem (data->stack_parm);
2872 src = validize_mem (data->entry_parm);
2873
2874 if (MEM_P (src))
2875 {
2876 /* Use a block move to handle potentially misaligned entry_parm. */
2877 if (!to_conversion)
2878 push_to_sequence2 (all->first_conversion_insn,
2879 all->last_conversion_insn);
2880 to_conversion = true;
2881
2882 emit_block_move (dest, src,
2883 GEN_INT (int_size_in_bytes (data->passed_type)),
2884 BLOCK_OP_NORMAL);
2885 }
2886 else
2887 emit_move_insn (dest, src);
2888 }
2889
2890 if (to_conversion)
2891 {
2892 all->first_conversion_insn = get_insns ();
2893 all->last_conversion_insn = get_last_insn ();
2894 end_sequence ();
2895 }
2896
2897 SET_DECL_RTL (parm, data->stack_parm);
2898 }
2899
2900 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
2901 undo the frobbing that we did in assign_parms_augmented_arg_list. */
2902
2903 static void
2904 assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
2905 {
2906 tree parm;
2907 tree orig_fnargs = all->orig_fnargs;
2908
2909 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2910 {
2911 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2912 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2913 {
2914 rtx tmp, real, imag;
2915 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
2916
2917 real = DECL_RTL (fnargs);
2918 imag = DECL_RTL (TREE_CHAIN (fnargs));
2919 if (inner != GET_MODE (real))
2920 {
2921 real = gen_lowpart_SUBREG (inner, real);
2922 imag = gen_lowpart_SUBREG (inner, imag);
2923 }
2924
2925 if (TREE_ADDRESSABLE (parm))
2926 {
2927 rtx rmem, imem;
2928 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
2929
2930 /* split_complex_arg put the real and imag parts in
2931 pseudos. Move them to memory. */
2932 tmp = assign_stack_local (DECL_MODE (parm), size,
2933 TYPE_ALIGN (TREE_TYPE (parm)));
2934 set_mem_attributes (tmp, parm, 1);
2935 rmem = adjust_address_nv (tmp, inner, 0);
2936 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
2937 push_to_sequence2 (all->first_conversion_insn,
2938 all->last_conversion_insn);
2939 emit_move_insn (rmem, real);
2940 emit_move_insn (imem, imag);
2941 all->first_conversion_insn = get_insns ();
2942 all->last_conversion_insn = get_last_insn ();
2943 end_sequence ();
2944 }
2945 else
2946 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2947 SET_DECL_RTL (parm, tmp);
2948
2949 real = DECL_INCOMING_RTL (fnargs);
2950 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
2951 if (inner != GET_MODE (real))
2952 {
2953 real = gen_lowpart_SUBREG (inner, real);
2954 imag = gen_lowpart_SUBREG (inner, imag);
2955 }
2956 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2957 set_decl_incoming_rtl (parm, tmp, false);
2958 fnargs = TREE_CHAIN (fnargs);
2959 }
2960 else
2961 {
2962 SET_DECL_RTL (parm, DECL_RTL (fnargs));
2963 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs), false);
2964
2965 /* Set MEM_EXPR to the original decl, i.e. to PARM,
2966 instead of the copy of decl, i.e. FNARGS. */
2967 if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
2968 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
2969 }
2970
2971 fnargs = TREE_CHAIN (fnargs);
2972 }
2973 }
2974
2975 /* Assign RTL expressions to the function's parameters. This may involve
2976 copying them into registers and using those registers as the DECL_RTL. */
2977
2978 static void
2979 assign_parms (tree fndecl)
2980 {
2981 struct assign_parm_data_all all;
2982 tree fnargs, parm;
2983
2984 current_function_internal_arg_pointer
2985 = targetm.calls.internal_arg_pointer ();
2986
2987 assign_parms_initialize_all (&all);
2988 fnargs = assign_parms_augmented_arg_list (&all);
2989
2990 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2991 {
2992 struct assign_parm_data_one data;
2993
2994 /* Extract the type of PARM; adjust it according to ABI. */
2995 assign_parm_find_data_types (&all, parm, &data);
2996
2997 /* Early out for errors and void parameters. */
2998 if (data.passed_mode == VOIDmode)
2999 {
3000 SET_DECL_RTL (parm, const0_rtx);
3001 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3002 continue;
3003 }
3004
3005 if (current_function_stdarg && !TREE_CHAIN (parm))
3006 assign_parms_setup_varargs (&all, &data, false);
3007
3008 /* Find out where the parameter arrives in this function. */
3009 assign_parm_find_entry_rtl (&all, &data);
3010
3011 /* Find out where stack space for this parameter might be. */
3012 if (assign_parm_is_stack_parm (&all, &data))
3013 {
3014 assign_parm_find_stack_rtl (parm, &data);
3015 assign_parm_adjust_entry_rtl (&data);
3016 }
3017
3018 /* Record permanently how this parm was passed. */
3019 set_decl_incoming_rtl (parm, data.entry_parm, data.passed_pointer);
3020
3021 /* Update info on where next arg arrives in registers. */
3022 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3023 data.passed_type, data.named_arg);
3024
3025 assign_parm_adjust_stack_rtl (&data);
3026
3027 if (assign_parm_setup_block_p (&data))
3028 assign_parm_setup_block (&all, parm, &data);
3029 else if (data.passed_pointer || use_register_for_decl (parm))
3030 assign_parm_setup_reg (&all, parm, &data);
3031 else
3032 assign_parm_setup_stack (&all, parm, &data);
3033 }
3034
3035 if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
3036 assign_parms_unsplit_complex (&all, fnargs);
3037
3038 /* Output all parameter conversion instructions (possibly including calls)
3039 now that all parameters have been copied out of hard registers. */
3040 emit_insn (all.first_conversion_insn);
3041
3042 /* If we are receiving a struct value address as the first argument, set up
3043 the RTL for the function result. As this might require code to convert
3044 the transmitted address to Pmode, we do this here to ensure that possible
3045 preliminary conversions of the address have been emitted already. */
3046 if (all.function_result_decl)
3047 {
3048 tree result = DECL_RESULT (current_function_decl);
3049 rtx addr = DECL_RTL (all.function_result_decl);
3050 rtx x;
3051
3052 if (DECL_BY_REFERENCE (result))
3053 x = addr;
3054 else
3055 {
3056 addr = convert_memory_address (Pmode, addr);
3057 x = gen_rtx_MEM (DECL_MODE (result), addr);
3058 set_mem_attributes (x, result, 1);
3059 }
3060 SET_DECL_RTL (result, x);
3061 }
3062
3063 /* We have aligned all the args, so add space for the pretend args. */
3064 current_function_pretend_args_size = all.pretend_args_size;
3065 all.stack_args_size.constant += all.extra_pretend_bytes;
3066 current_function_args_size = all.stack_args_size.constant;
3067
3068 /* Adjust function incoming argument size for alignment and
3069 minimum length. */
3070
3071 #ifdef REG_PARM_STACK_SPACE
3072 current_function_args_size = MAX (current_function_args_size,
3073 REG_PARM_STACK_SPACE (fndecl));
3074 #endif
3075
3076 current_function_args_size = CEIL_ROUND (current_function_args_size,
3077 PARM_BOUNDARY / BITS_PER_UNIT);
3078
3079 #ifdef ARGS_GROW_DOWNWARD
3080 current_function_arg_offset_rtx
3081 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3082 : expand_expr (size_diffop (all.stack_args_size.var,
3083 size_int (-all.stack_args_size.constant)),
3084 NULL_RTX, VOIDmode, 0));
3085 #else
3086 current_function_arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3087 #endif
3088
3089 /* See how many bytes, if any, of its args a function should try to pop
3090 on return. */
3091
3092 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
3093 current_function_args_size);
3094
3095 /* For stdarg.h function, save info about
3096 regs and stack space used by the named args. */
3097
3098 current_function_args_info = all.args_so_far;
3099
3100 /* Set the rtx used for the function return value. Put this in its
3101 own variable so any optimizers that need this information don't have
3102 to include tree.h. Do this here so it gets done when an inlined
3103 function gets output. */
3104
3105 current_function_return_rtx
3106 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3107 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3108
3109 /* If scalar return value was computed in a pseudo-reg, or was a named
3110 return value that got dumped to the stack, copy that to the hard
3111 return register. */
3112 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3113 {
3114 tree decl_result = DECL_RESULT (fndecl);
3115 rtx decl_rtl = DECL_RTL (decl_result);
3116
3117 if (REG_P (decl_rtl)
3118 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3119 : DECL_REGISTER (decl_result))
3120 {
3121 rtx real_decl_rtl;
3122
3123 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3124 fndecl, true);
3125 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3126 /* The delay slot scheduler assumes that current_function_return_rtx
3127 holds the hard register containing the return value, not a
3128 temporary pseudo. */
3129 current_function_return_rtx = real_decl_rtl;
3130 }
3131 }
3132 }
3133
3134 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3135 For all seen types, gimplify their sizes. */
3136
3137 static tree
3138 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3139 {
3140 tree t = *tp;
3141
3142 *walk_subtrees = 0;
3143 if (TYPE_P (t))
3144 {
3145 if (POINTER_TYPE_P (t))
3146 *walk_subtrees = 1;
3147 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3148 && !TYPE_SIZES_GIMPLIFIED (t))
3149 {
3150 gimplify_type_sizes (t, (tree *) data);
3151 *walk_subtrees = 1;
3152 }
3153 }
3154
3155 return NULL;
3156 }
3157
3158 /* Gimplify the parameter list for current_function_decl. This involves
3159 evaluating SAVE_EXPRs of variable sized parameters and generating code
3160 to implement callee-copies reference parameters. Returns a list of
3161 statements to add to the beginning of the function, or NULL if nothing
3162 to do. */
3163
3164 tree
3165 gimplify_parameters (void)
3166 {
3167 struct assign_parm_data_all all;
3168 tree fnargs, parm, stmts = NULL;
3169
3170 assign_parms_initialize_all (&all);
3171 fnargs = assign_parms_augmented_arg_list (&all);
3172
3173 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3174 {
3175 struct assign_parm_data_one data;
3176
3177 /* Extract the type of PARM; adjust it according to ABI. */
3178 assign_parm_find_data_types (&all, parm, &data);
3179
3180 /* Early out for errors and void parameters. */
3181 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3182 continue;
3183
3184 /* Update info on where next arg arrives in registers. */
3185 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3186 data.passed_type, data.named_arg);
3187
3188 /* ??? Once upon a time variable_size stuffed parameter list
3189 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3190 turned out to be less than manageable in the gimple world.
3191 Now we have to hunt them down ourselves. */
3192 walk_tree_without_duplicates (&data.passed_type,
3193 gimplify_parm_type, &stmts);
3194
3195 if (!TREE_CONSTANT (DECL_SIZE (parm)))
3196 {
3197 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3198 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3199 }
3200
3201 if (data.passed_pointer)
3202 {
3203 tree type = TREE_TYPE (data.passed_type);
3204 if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3205 type, data.named_arg))
3206 {
3207 tree local, t;
3208
3209 /* For constant sized objects, this is trivial; for
3210 variable-sized objects, we have to play games. */
3211 if (TREE_CONSTANT (DECL_SIZE (parm)))
3212 {
3213 local = create_tmp_var (type, get_name (parm));
3214 DECL_IGNORED_P (local) = 0;
3215 }
3216 else
3217 {
3218 tree ptr_type, addr;
3219
3220 ptr_type = build_pointer_type (type);
3221 addr = create_tmp_var (ptr_type, get_name (parm));
3222 DECL_IGNORED_P (addr) = 0;
3223 local = build_fold_indirect_ref (addr);
3224
3225 t = built_in_decls[BUILT_IN_ALLOCA];
3226 t = build_call_expr (t, 1, DECL_SIZE_UNIT (parm));
3227 t = fold_convert (ptr_type, t);
3228 t = build_gimple_modify_stmt (addr, t);
3229 gimplify_and_add (t, &stmts);
3230 }
3231
3232 t = build_gimple_modify_stmt (local, parm);
3233 gimplify_and_add (t, &stmts);
3234
3235 SET_DECL_VALUE_EXPR (parm, local);
3236 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3237 }
3238 }
3239 }
3240
3241 return stmts;
3242 }
3243 \f
3244 /* Compute the size and offset from the start of the stacked arguments for a
3245 parm passed in mode PASSED_MODE and with type TYPE.
3246
3247 INITIAL_OFFSET_PTR points to the current offset into the stacked
3248 arguments.
3249
3250 The starting offset and size for this parm are returned in
3251 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3252 nonzero, the offset is that of stack slot, which is returned in
3253 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3254 padding required from the initial offset ptr to the stack slot.
3255
3256 IN_REGS is nonzero if the argument will be passed in registers. It will
3257 never be set if REG_PARM_STACK_SPACE is not defined.
3258
3259 FNDECL is the function in which the argument was defined.
3260
3261 There are two types of rounding that are done. The first, controlled by
3262 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3263 list to be aligned to the specific boundary (in bits). This rounding
3264 affects the initial and starting offsets, but not the argument size.
3265
3266 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3267 optionally rounds the size of the parm to PARM_BOUNDARY. The
3268 initial offset is not affected by this rounding, while the size always
3269 is and the starting offset may be. */
3270
3271 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3272 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3273 callers pass in the total size of args so far as
3274 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3275
3276 void
3277 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3278 int partial, tree fndecl ATTRIBUTE_UNUSED,
3279 struct args_size *initial_offset_ptr,
3280 struct locate_and_pad_arg_data *locate)
3281 {
3282 tree sizetree;
3283 enum direction where_pad;
3284 unsigned int boundary;
3285 int reg_parm_stack_space = 0;
3286 int part_size_in_regs;
3287
3288 #ifdef REG_PARM_STACK_SPACE
3289 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3290
3291 /* If we have found a stack parm before we reach the end of the
3292 area reserved for registers, skip that area. */
3293 if (! in_regs)
3294 {
3295 if (reg_parm_stack_space > 0)
3296 {
3297 if (initial_offset_ptr->var)
3298 {
3299 initial_offset_ptr->var
3300 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3301 ssize_int (reg_parm_stack_space));
3302 initial_offset_ptr->constant = 0;
3303 }
3304 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3305 initial_offset_ptr->constant = reg_parm_stack_space;
3306 }
3307 }
3308 #endif /* REG_PARM_STACK_SPACE */
3309
3310 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3311
3312 sizetree
3313 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3314 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3315 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3316 locate->where_pad = where_pad;
3317 locate->boundary = boundary;
3318
3319 /* Remember if the outgoing parameter requires extra alignment on the
3320 calling function side. */
3321 if (boundary > PREFERRED_STACK_BOUNDARY)
3322 boundary = PREFERRED_STACK_BOUNDARY;
3323 if (cfun->stack_alignment_needed < boundary)
3324 cfun->stack_alignment_needed = boundary;
3325
3326 #ifdef ARGS_GROW_DOWNWARD
3327 locate->slot_offset.constant = -initial_offset_ptr->constant;
3328 if (initial_offset_ptr->var)
3329 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3330 initial_offset_ptr->var);
3331
3332 {
3333 tree s2 = sizetree;
3334 if (where_pad != none
3335 && (!host_integerp (sizetree, 1)
3336 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3337 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3338 SUB_PARM_SIZE (locate->slot_offset, s2);
3339 }
3340
3341 locate->slot_offset.constant += part_size_in_regs;
3342
3343 if (!in_regs
3344 #ifdef REG_PARM_STACK_SPACE
3345 || REG_PARM_STACK_SPACE (fndecl) > 0
3346 #endif
3347 )
3348 pad_to_arg_alignment (&locate->slot_offset, boundary,
3349 &locate->alignment_pad);
3350
3351 locate->size.constant = (-initial_offset_ptr->constant
3352 - locate->slot_offset.constant);
3353 if (initial_offset_ptr->var)
3354 locate->size.var = size_binop (MINUS_EXPR,
3355 size_binop (MINUS_EXPR,
3356 ssize_int (0),
3357 initial_offset_ptr->var),
3358 locate->slot_offset.var);
3359
3360 /* Pad_below needs the pre-rounded size to know how much to pad
3361 below. */
3362 locate->offset = locate->slot_offset;
3363 if (where_pad == downward)
3364 pad_below (&locate->offset, passed_mode, sizetree);
3365
3366 #else /* !ARGS_GROW_DOWNWARD */
3367 if (!in_regs
3368 #ifdef REG_PARM_STACK_SPACE
3369 || REG_PARM_STACK_SPACE (fndecl) > 0
3370 #endif
3371 )
3372 pad_to_arg_alignment (initial_offset_ptr, boundary,
3373 &locate->alignment_pad);
3374 locate->slot_offset = *initial_offset_ptr;
3375
3376 #ifdef PUSH_ROUNDING
3377 if (passed_mode != BLKmode)
3378 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3379 #endif
3380
3381 /* Pad_below needs the pre-rounded size to know how much to pad below
3382 so this must be done before rounding up. */
3383 locate->offset = locate->slot_offset;
3384 if (where_pad == downward)
3385 pad_below (&locate->offset, passed_mode, sizetree);
3386
3387 if (where_pad != none
3388 && (!host_integerp (sizetree, 1)
3389 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3390 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3391
3392 ADD_PARM_SIZE (locate->size, sizetree);
3393
3394 locate->size.constant -= part_size_in_regs;
3395 #endif /* ARGS_GROW_DOWNWARD */
3396 }
3397
3398 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3399 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3400
3401 static void
3402 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3403 struct args_size *alignment_pad)
3404 {
3405 tree save_var = NULL_TREE;
3406 HOST_WIDE_INT save_constant = 0;
3407 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3408 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3409
3410 #ifdef SPARC_STACK_BOUNDARY_HACK
3411 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3412 the real alignment of %sp. However, when it does this, the
3413 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3414 if (SPARC_STACK_BOUNDARY_HACK)
3415 sp_offset = 0;
3416 #endif
3417
3418 if (boundary > PARM_BOUNDARY)
3419 {
3420 save_var = offset_ptr->var;
3421 save_constant = offset_ptr->constant;
3422 }
3423
3424 alignment_pad->var = NULL_TREE;
3425 alignment_pad->constant = 0;
3426
3427 if (boundary > BITS_PER_UNIT)
3428 {
3429 if (offset_ptr->var)
3430 {
3431 tree sp_offset_tree = ssize_int (sp_offset);
3432 tree offset = size_binop (PLUS_EXPR,
3433 ARGS_SIZE_TREE (*offset_ptr),
3434 sp_offset_tree);
3435 #ifdef ARGS_GROW_DOWNWARD
3436 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3437 #else
3438 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3439 #endif
3440
3441 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3442 /* ARGS_SIZE_TREE includes constant term. */
3443 offset_ptr->constant = 0;
3444 if (boundary > PARM_BOUNDARY)
3445 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3446 save_var);
3447 }
3448 else
3449 {
3450 offset_ptr->constant = -sp_offset +
3451 #ifdef ARGS_GROW_DOWNWARD
3452 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3453 #else
3454 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3455 #endif
3456 if (boundary > PARM_BOUNDARY)
3457 alignment_pad->constant = offset_ptr->constant - save_constant;
3458 }
3459 }
3460 }
3461
3462 static void
3463 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3464 {
3465 if (passed_mode != BLKmode)
3466 {
3467 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3468 offset_ptr->constant
3469 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3470 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3471 - GET_MODE_SIZE (passed_mode));
3472 }
3473 else
3474 {
3475 if (TREE_CODE (sizetree) != INTEGER_CST
3476 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3477 {
3478 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3479 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3480 /* Add it in. */
3481 ADD_PARM_SIZE (*offset_ptr, s2);
3482 SUB_PARM_SIZE (*offset_ptr, sizetree);
3483 }
3484 }
3485 }
3486 \f
3487
3488 /* True if register REGNO was alive at a place where `setjmp' was
3489 called and was set more than once or is an argument. Such regs may
3490 be clobbered by `longjmp'. */
3491
3492 static bool
3493 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
3494 {
3495 /* There appear to be cases where some local vars never reach the
3496 backend but have bogus regnos. */
3497 if (regno >= max_reg_num ())
3498 return false;
3499
3500 return ((REG_N_SETS (regno) > 1
3501 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno))
3502 && REGNO_REG_SET_P (setjmp_crosses, regno));
3503 }
3504
3505 /* Walk the tree of blocks describing the binding levels within a
3506 function and warn about variables the might be killed by setjmp or
3507 vfork. This is done after calling flow_analysis before register
3508 allocation since that will clobber the pseudo-regs to hard
3509 regs. */
3510
3511 static void
3512 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
3513 {
3514 tree decl, sub;
3515
3516 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3517 {
3518 if (TREE_CODE (decl) == VAR_DECL
3519 && DECL_RTL_SET_P (decl)
3520 && REG_P (DECL_RTL (decl))
3521 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3522 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
3523 " %<longjmp%> or %<vfork%>", decl);
3524 }
3525
3526 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3527 setjmp_vars_warning (setjmp_crosses, sub);
3528 }
3529
3530 /* Do the appropriate part of setjmp_vars_warning
3531 but for arguments instead of local variables. */
3532
3533 static void
3534 setjmp_args_warning (bitmap setjmp_crosses)
3535 {
3536 tree decl;
3537 for (decl = DECL_ARGUMENTS (current_function_decl);
3538 decl; decl = TREE_CHAIN (decl))
3539 if (DECL_RTL (decl) != 0
3540 && REG_P (DECL_RTL (decl))
3541 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3542 warning (OPT_Wclobbered,
3543 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3544 decl);
3545 }
3546
3547 /* Generate warning messages for variables live across setjmp. */
3548
3549 void
3550 generate_setjmp_warnings (void)
3551 {
3552 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
3553
3554 if (n_basic_blocks == NUM_FIXED_BLOCKS
3555 || bitmap_empty_p (setjmp_crosses))
3556 return;
3557
3558 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
3559 setjmp_args_warning (setjmp_crosses);
3560 }
3561
3562 \f
3563 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3564 and create duplicate blocks. */
3565 /* ??? Need an option to either create block fragments or to create
3566 abstract origin duplicates of a source block. It really depends
3567 on what optimization has been performed. */
3568
3569 void
3570 reorder_blocks (void)
3571 {
3572 tree block = DECL_INITIAL (current_function_decl);
3573 VEC(tree,heap) *block_stack;
3574
3575 if (block == NULL_TREE)
3576 return;
3577
3578 block_stack = VEC_alloc (tree, heap, 10);
3579
3580 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3581 clear_block_marks (block);
3582
3583 /* Prune the old trees away, so that they don't get in the way. */
3584 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3585 BLOCK_CHAIN (block) = NULL_TREE;
3586
3587 /* Recreate the block tree from the note nesting. */
3588 reorder_blocks_1 (get_insns (), block, &block_stack);
3589 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
3590
3591 VEC_free (tree, heap, block_stack);
3592 }
3593
3594 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3595
3596 void
3597 clear_block_marks (tree block)
3598 {
3599 while (block)
3600 {
3601 TREE_ASM_WRITTEN (block) = 0;
3602 clear_block_marks (BLOCK_SUBBLOCKS (block));
3603 block = BLOCK_CHAIN (block);
3604 }
3605 }
3606
3607 static void
3608 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
3609 {
3610 rtx insn;
3611
3612 for (insn = insns; insn; insn = NEXT_INSN (insn))
3613 {
3614 if (NOTE_P (insn))
3615 {
3616 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
3617 {
3618 tree block = NOTE_BLOCK (insn);
3619 tree origin;
3620
3621 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3622 ? BLOCK_FRAGMENT_ORIGIN (block)
3623 : block);
3624
3625 /* If we have seen this block before, that means it now
3626 spans multiple address regions. Create a new fragment. */
3627 if (TREE_ASM_WRITTEN (block))
3628 {
3629 tree new_block = copy_node (block);
3630
3631 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3632 BLOCK_FRAGMENT_CHAIN (new_block)
3633 = BLOCK_FRAGMENT_CHAIN (origin);
3634 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3635
3636 NOTE_BLOCK (insn) = new_block;
3637 block = new_block;
3638 }
3639
3640 BLOCK_SUBBLOCKS (block) = 0;
3641 TREE_ASM_WRITTEN (block) = 1;
3642 /* When there's only one block for the entire function,
3643 current_block == block and we mustn't do this, it
3644 will cause infinite recursion. */
3645 if (block != current_block)
3646 {
3647 if (block != origin)
3648 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block);
3649
3650 BLOCK_SUPERCONTEXT (block) = current_block;
3651 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3652 BLOCK_SUBBLOCKS (current_block) = block;
3653 current_block = origin;
3654 }
3655 VEC_safe_push (tree, heap, *p_block_stack, block);
3656 }
3657 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
3658 {
3659 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
3660 BLOCK_SUBBLOCKS (current_block)
3661 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3662 current_block = BLOCK_SUPERCONTEXT (current_block);
3663 }
3664 }
3665 }
3666 }
3667
3668 /* Reverse the order of elements in the chain T of blocks,
3669 and return the new head of the chain (old last element). */
3670
3671 tree
3672 blocks_nreverse (tree t)
3673 {
3674 tree prev = 0, decl, next;
3675 for (decl = t; decl; decl = next)
3676 {
3677 next = BLOCK_CHAIN (decl);
3678 BLOCK_CHAIN (decl) = prev;
3679 prev = decl;
3680 }
3681 return prev;
3682 }
3683
3684 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3685 non-NULL, list them all into VECTOR, in a depth-first preorder
3686 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3687 blocks. */
3688
3689 static int
3690 all_blocks (tree block, tree *vector)
3691 {
3692 int n_blocks = 0;
3693
3694 while (block)
3695 {
3696 TREE_ASM_WRITTEN (block) = 0;
3697
3698 /* Record this block. */
3699 if (vector)
3700 vector[n_blocks] = block;
3701
3702 ++n_blocks;
3703
3704 /* Record the subblocks, and their subblocks... */
3705 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3706 vector ? vector + n_blocks : 0);
3707 block = BLOCK_CHAIN (block);
3708 }
3709
3710 return n_blocks;
3711 }
3712
3713 /* Return a vector containing all the blocks rooted at BLOCK. The
3714 number of elements in the vector is stored in N_BLOCKS_P. The
3715 vector is dynamically allocated; it is the caller's responsibility
3716 to call `free' on the pointer returned. */
3717
3718 static tree *
3719 get_block_vector (tree block, int *n_blocks_p)
3720 {
3721 tree *block_vector;
3722
3723 *n_blocks_p = all_blocks (block, NULL);
3724 block_vector = XNEWVEC (tree, *n_blocks_p);
3725 all_blocks (block, block_vector);
3726
3727 return block_vector;
3728 }
3729
3730 static GTY(()) int next_block_index = 2;
3731
3732 /* Set BLOCK_NUMBER for all the blocks in FN. */
3733
3734 void
3735 number_blocks (tree fn)
3736 {
3737 int i;
3738 int n_blocks;
3739 tree *block_vector;
3740
3741 /* For SDB and XCOFF debugging output, we start numbering the blocks
3742 from 1 within each function, rather than keeping a running
3743 count. */
3744 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3745 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3746 next_block_index = 1;
3747 #endif
3748
3749 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3750
3751 /* The top-level BLOCK isn't numbered at all. */
3752 for (i = 1; i < n_blocks; ++i)
3753 /* We number the blocks from two. */
3754 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3755
3756 free (block_vector);
3757
3758 return;
3759 }
3760
3761 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3762
3763 tree
3764 debug_find_var_in_block_tree (tree var, tree block)
3765 {
3766 tree t;
3767
3768 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3769 if (t == var)
3770 return block;
3771
3772 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3773 {
3774 tree ret = debug_find_var_in_block_tree (var, t);
3775 if (ret)
3776 return ret;
3777 }
3778
3779 return NULL_TREE;
3780 }
3781 \f
3782 /* Keep track of whether we're in a dummy function context. If we are,
3783 we don't want to invoke the set_current_function hook, because we'll
3784 get into trouble if the hook calls target_reinit () recursively or
3785 when the initial initialization is not yet complete. */
3786
3787 static bool in_dummy_function;
3788
3789 /* Invoke the target hook when setting cfun. */
3790
3791 static void
3792 invoke_set_current_function_hook (tree fndecl)
3793 {
3794 if (!in_dummy_function)
3795 targetm.set_current_function (fndecl);
3796 }
3797
3798 /* cfun should never be set directly; use this function. */
3799
3800 void
3801 set_cfun (struct function *new_cfun)
3802 {
3803 if (cfun != new_cfun)
3804 {
3805 cfun = new_cfun;
3806 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
3807 }
3808 }
3809
3810 /* Keep track of the cfun stack. */
3811
3812 typedef struct function *function_p;
3813
3814 DEF_VEC_P(function_p);
3815 DEF_VEC_ALLOC_P(function_p,heap);
3816
3817 /* Initialized with NOGC, making this poisonous to the garbage collector. */
3818
3819 static VEC(function_p,heap) *cfun_stack;
3820
3821 /* We save the value of in_system_header here when pushing the first
3822 function on the cfun stack, and we restore it from here when
3823 popping the last function. */
3824
3825 static bool saved_in_system_header;
3826
3827 /* Push the current cfun onto the stack, and set cfun to new_cfun. */
3828
3829 void
3830 push_cfun (struct function *new_cfun)
3831 {
3832 if (cfun == NULL)
3833 saved_in_system_header = in_system_header;
3834 VEC_safe_push (function_p, heap, cfun_stack, cfun);
3835 if (new_cfun)
3836 in_system_header = DECL_IN_SYSTEM_HEADER (new_cfun->decl);
3837 set_cfun (new_cfun);
3838 }
3839
3840 /* Pop cfun from the stack. */
3841
3842 void
3843 pop_cfun (void)
3844 {
3845 struct function *new_cfun = VEC_pop (function_p, cfun_stack);
3846 in_system_header = ((new_cfun == NULL) ? saved_in_system_header
3847 : DECL_IN_SYSTEM_HEADER (new_cfun->decl));
3848 set_cfun (new_cfun);
3849 }
3850
3851 /* Return value of funcdef and increase it. */
3852 int
3853 get_next_funcdef_no (void)
3854 {
3855 return funcdef_no++;
3856 }
3857
3858 /* Allocate a function structure for FNDECL and set its contents
3859 to the defaults. Set cfun to the newly-allocated object.
3860 Some of the helper functions invoked during initialization assume
3861 that cfun has already been set. Therefore, assign the new object
3862 directly into cfun and invoke the back end hook explicitly at the
3863 very end, rather than initializing a temporary and calling set_cfun
3864 on it.
3865
3866 ABSTRACT_P is true if this is a function that will never be seen by
3867 the middle-end. Such functions are front-end concepts (like C++
3868 function templates) that do not correspond directly to functions
3869 placed in object files. */
3870
3871 void
3872 allocate_struct_function (tree fndecl, bool abstract_p)
3873 {
3874 tree result;
3875 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
3876
3877 cfun = ggc_alloc_cleared (sizeof (struct function));
3878
3879 cfun->stack_alignment_needed = STACK_BOUNDARY;
3880 cfun->preferred_stack_boundary = STACK_BOUNDARY;
3881
3882 current_function_funcdef_no = get_next_funcdef_no ();
3883
3884 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
3885
3886 init_eh_for_function ();
3887
3888 lang_hooks.function.init (cfun);
3889 if (init_machine_status)
3890 cfun->machine = (*init_machine_status) ();
3891
3892 if (fndecl != NULL)
3893 {
3894 DECL_STRUCT_FUNCTION (fndecl) = cfun;
3895 cfun->decl = fndecl;
3896
3897 result = DECL_RESULT (fndecl);
3898 if (!abstract_p && aggregate_value_p (result, fndecl))
3899 {
3900 #ifdef PCC_STATIC_STRUCT_RETURN
3901 current_function_returns_pcc_struct = 1;
3902 #endif
3903 current_function_returns_struct = 1;
3904 }
3905
3906 current_function_stdarg
3907 = (fntype
3908 && TYPE_ARG_TYPES (fntype) != 0
3909 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3910 != void_type_node));
3911
3912 /* Assume all registers in stdarg functions need to be saved. */
3913 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
3914 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
3915 }
3916
3917 invoke_set_current_function_hook (fndecl);
3918 }
3919
3920 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
3921 instead of just setting it. */
3922
3923 void
3924 push_struct_function (tree fndecl)
3925 {
3926 if (cfun == NULL)
3927 saved_in_system_header = in_system_header;
3928 VEC_safe_push (function_p, heap, cfun_stack, cfun);
3929 if (fndecl)
3930 in_system_header = DECL_IN_SYSTEM_HEADER (fndecl);
3931 allocate_struct_function (fndecl, false);
3932 }
3933
3934 /* Reset cfun, and other non-struct-function variables to defaults as
3935 appropriate for emitting rtl at the start of a function. */
3936
3937 static void
3938 prepare_function_start (void)
3939 {
3940 gcc_assert (!rtl.emit.x_last_insn);
3941 init_emit ();
3942 init_varasm_status ();
3943 init_expr ();
3944
3945 cse_not_expected = ! optimize;
3946
3947 /* Caller save not needed yet. */
3948 caller_save_needed = 0;
3949
3950 /* We haven't done register allocation yet. */
3951 reg_renumber = 0;
3952
3953 /* Indicate that we have not instantiated virtual registers yet. */
3954 virtuals_instantiated = 0;
3955
3956 /* Indicate that we want CONCATs now. */
3957 generating_concat_p = 1;
3958
3959 /* Indicate we have no need of a frame pointer yet. */
3960 frame_pointer_needed = 0;
3961 }
3962
3963 /* Initialize the rtl expansion mechanism so that we can do simple things
3964 like generate sequences. This is used to provide a context during global
3965 initialization of some passes. You must call expand_dummy_function_end
3966 to exit this context. */
3967
3968 void
3969 init_dummy_function_start (void)
3970 {
3971 gcc_assert (!in_dummy_function);
3972 in_dummy_function = true;
3973 push_struct_function (NULL_TREE);
3974 prepare_function_start ();
3975 }
3976
3977 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3978 and initialize static variables for generating RTL for the statements
3979 of the function. */
3980
3981 void
3982 init_function_start (tree subr)
3983 {
3984 if (subr && DECL_STRUCT_FUNCTION (subr))
3985 set_cfun (DECL_STRUCT_FUNCTION (subr));
3986 else
3987 allocate_struct_function (subr, false);
3988 prepare_function_start ();
3989
3990 /* Warn if this value is an aggregate type,
3991 regardless of which calling convention we are using for it. */
3992 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
3993 warning (OPT_Waggregate_return, "function returns an aggregate");
3994 }
3995
3996 /* Make sure all values used by the optimization passes have sane
3997 defaults. */
3998 unsigned int
3999 init_function_for_compilation (void)
4000 {
4001 reg_renumber = 0;
4002
4003 /* No prologue/epilogue insns yet. Make sure that these vectors are
4004 empty. */
4005 gcc_assert (VEC_length (int, prologue) == 0);
4006 gcc_assert (VEC_length (int, epilogue) == 0);
4007 gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
4008 return 0;
4009 }
4010
4011 struct rtl_opt_pass pass_init_function =
4012 {
4013 {
4014 RTL_PASS,
4015 NULL, /* name */
4016 NULL, /* gate */
4017 init_function_for_compilation, /* execute */
4018 NULL, /* sub */
4019 NULL, /* next */
4020 0, /* static_pass_number */
4021 0, /* tv_id */
4022 0, /* properties_required */
4023 0, /* properties_provided */
4024 0, /* properties_destroyed */
4025 0, /* todo_flags_start */
4026 0 /* todo_flags_finish */
4027 }
4028 };
4029
4030
4031 void
4032 expand_main_function (void)
4033 {
4034 #if (defined(INVOKE__main) \
4035 || (!defined(HAS_INIT_SECTION) \
4036 && !defined(INIT_SECTION_ASM_OP) \
4037 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
4038 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
4039 #endif
4040 }
4041 \f
4042 /* Expand code to initialize the stack_protect_guard. This is invoked at
4043 the beginning of a function to be protected. */
4044
4045 #ifndef HAVE_stack_protect_set
4046 # define HAVE_stack_protect_set 0
4047 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
4048 #endif
4049
4050 void
4051 stack_protect_prologue (void)
4052 {
4053 tree guard_decl = targetm.stack_protect_guard ();
4054 rtx x, y;
4055
4056 /* Avoid expand_expr here, because we don't want guard_decl pulled
4057 into registers unless absolutely necessary. And we know that
4058 cfun->stack_protect_guard is a local stack slot, so this skips
4059 all the fluff. */
4060 x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
4061 y = validize_mem (DECL_RTL (guard_decl));
4062
4063 /* Allow the target to copy from Y to X without leaking Y into a
4064 register. */
4065 if (HAVE_stack_protect_set)
4066 {
4067 rtx insn = gen_stack_protect_set (x, y);
4068 if (insn)
4069 {
4070 emit_insn (insn);
4071 return;
4072 }
4073 }
4074
4075 /* Otherwise do a straight move. */
4076 emit_move_insn (x, y);
4077 }
4078
4079 /* Expand code to verify the stack_protect_guard. This is invoked at
4080 the end of a function to be protected. */
4081
4082 #ifndef HAVE_stack_protect_test
4083 # define HAVE_stack_protect_test 0
4084 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4085 #endif
4086
4087 void
4088 stack_protect_epilogue (void)
4089 {
4090 tree guard_decl = targetm.stack_protect_guard ();
4091 rtx label = gen_label_rtx ();
4092 rtx x, y, tmp;
4093
4094 /* Avoid expand_expr here, because we don't want guard_decl pulled
4095 into registers unless absolutely necessary. And we know that
4096 cfun->stack_protect_guard is a local stack slot, so this skips
4097 all the fluff. */
4098 x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
4099 y = validize_mem (DECL_RTL (guard_decl));
4100
4101 /* Allow the target to compare Y with X without leaking either into
4102 a register. */
4103 switch (HAVE_stack_protect_test != 0)
4104 {
4105 case 1:
4106 tmp = gen_stack_protect_test (x, y, label);
4107 if (tmp)
4108 {
4109 emit_insn (tmp);
4110 break;
4111 }
4112 /* FALLTHRU */
4113
4114 default:
4115 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4116 break;
4117 }
4118
4119 /* The noreturn predictor has been moved to the tree level. The rtl-level
4120 predictors estimate this branch about 20%, which isn't enough to get
4121 things moved out of line. Since this is the only extant case of adding
4122 a noreturn function at the rtl level, it doesn't seem worth doing ought
4123 except adding the prediction by hand. */
4124 tmp = get_last_insn ();
4125 if (JUMP_P (tmp))
4126 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4127
4128 expand_expr_stmt (targetm.stack_protect_fail ());
4129 emit_label (label);
4130 }
4131 \f
4132 /* Start the RTL for a new function, and set variables used for
4133 emitting RTL.
4134 SUBR is the FUNCTION_DECL node.
4135 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4136 the function's parameters, which must be run at any return statement. */
4137
4138 void
4139 expand_function_start (tree subr)
4140 {
4141 /* Make sure volatile mem refs aren't considered
4142 valid operands of arithmetic insns. */
4143 init_recog_no_volatile ();
4144
4145 current_function_profile
4146 = (profile_flag
4147 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4148
4149 current_function_limit_stack
4150 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4151
4152 /* Make the label for return statements to jump to. Do not special
4153 case machines with special return instructions -- they will be
4154 handled later during jump, ifcvt, or epilogue creation. */
4155 return_label = gen_label_rtx ();
4156
4157 /* Initialize rtx used to return the value. */
4158 /* Do this before assign_parms so that we copy the struct value address
4159 before any library calls that assign parms might generate. */
4160
4161 /* Decide whether to return the value in memory or in a register. */
4162 if (aggregate_value_p (DECL_RESULT (subr), subr))
4163 {
4164 /* Returning something that won't go in a register. */
4165 rtx value_address = 0;
4166
4167 #ifdef PCC_STATIC_STRUCT_RETURN
4168 if (current_function_returns_pcc_struct)
4169 {
4170 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4171 value_address = assemble_static_space (size);
4172 }
4173 else
4174 #endif
4175 {
4176 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4177 /* Expect to be passed the address of a place to store the value.
4178 If it is passed as an argument, assign_parms will take care of
4179 it. */
4180 if (sv)
4181 {
4182 value_address = gen_reg_rtx (Pmode);
4183 emit_move_insn (value_address, sv);
4184 }
4185 }
4186 if (value_address)
4187 {
4188 rtx x = value_address;
4189 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4190 {
4191 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4192 set_mem_attributes (x, DECL_RESULT (subr), 1);
4193 }
4194 SET_DECL_RTL (DECL_RESULT (subr), x);
4195 }
4196 }
4197 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4198 /* If return mode is void, this decl rtl should not be used. */
4199 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4200 else
4201 {
4202 /* Compute the return values into a pseudo reg, which we will copy
4203 into the true return register after the cleanups are done. */
4204 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4205 if (TYPE_MODE (return_type) != BLKmode
4206 && targetm.calls.return_in_msb (return_type))
4207 /* expand_function_end will insert the appropriate padding in
4208 this case. Use the return value's natural (unpadded) mode
4209 within the function proper. */
4210 SET_DECL_RTL (DECL_RESULT (subr),
4211 gen_reg_rtx (TYPE_MODE (return_type)));
4212 else
4213 {
4214 /* In order to figure out what mode to use for the pseudo, we
4215 figure out what the mode of the eventual return register will
4216 actually be, and use that. */
4217 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4218
4219 /* Structures that are returned in registers are not
4220 aggregate_value_p, so we may see a PARALLEL or a REG. */
4221 if (REG_P (hard_reg))
4222 SET_DECL_RTL (DECL_RESULT (subr),
4223 gen_reg_rtx (GET_MODE (hard_reg)));
4224 else
4225 {
4226 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4227 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4228 }
4229 }
4230
4231 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4232 result to the real return register(s). */
4233 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4234 }
4235
4236 /* Initialize rtx for parameters and local variables.
4237 In some cases this requires emitting insns. */
4238 assign_parms (subr);
4239
4240 /* If function gets a static chain arg, store it. */
4241 if (cfun->static_chain_decl)
4242 {
4243 tree parm = cfun->static_chain_decl;
4244 rtx local = gen_reg_rtx (Pmode);
4245
4246 set_decl_incoming_rtl (parm, static_chain_incoming_rtx, false);
4247 SET_DECL_RTL (parm, local);
4248 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4249
4250 emit_move_insn (local, static_chain_incoming_rtx);
4251 }
4252
4253 /* If the function receives a non-local goto, then store the
4254 bits we need to restore the frame pointer. */
4255 if (cfun->nonlocal_goto_save_area)
4256 {
4257 tree t_save;
4258 rtx r_save;
4259
4260 /* ??? We need to do this save early. Unfortunately here is
4261 before the frame variable gets declared. Help out... */
4262 expand_var (TREE_OPERAND (cfun->nonlocal_goto_save_area, 0));
4263
4264 t_save = build4 (ARRAY_REF, ptr_type_node,
4265 cfun->nonlocal_goto_save_area,
4266 integer_zero_node, NULL_TREE, NULL_TREE);
4267 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4268 r_save = convert_memory_address (Pmode, r_save);
4269
4270 emit_move_insn (r_save, virtual_stack_vars_rtx);
4271 update_nonlocal_goto_save_area ();
4272 }
4273
4274 /* The following was moved from init_function_start.
4275 The move is supposed to make sdb output more accurate. */
4276 /* Indicate the beginning of the function body,
4277 as opposed to parm setup. */
4278 emit_note (NOTE_INSN_FUNCTION_BEG);
4279
4280 gcc_assert (NOTE_P (get_last_insn ()));
4281
4282 parm_birth_insn = get_last_insn ();
4283
4284 if (current_function_profile)
4285 {
4286 #ifdef PROFILE_HOOK
4287 PROFILE_HOOK (current_function_funcdef_no);
4288 #endif
4289 }
4290
4291 /* After the display initializations is where the stack checking
4292 probe should go. */
4293 if(flag_stack_check)
4294 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4295
4296 /* Make sure there is a line number after the function entry setup code. */
4297 force_next_line_note ();
4298 }
4299 \f
4300 /* Undo the effects of init_dummy_function_start. */
4301 void
4302 expand_dummy_function_end (void)
4303 {
4304 gcc_assert (in_dummy_function);
4305
4306 /* End any sequences that failed to be closed due to syntax errors. */
4307 while (in_sequence_p ())
4308 end_sequence ();
4309
4310 /* Outside function body, can't compute type's actual size
4311 until next function's body starts. */
4312
4313 free_after_parsing (cfun);
4314 free_after_compilation (cfun);
4315 pop_cfun ();
4316 in_dummy_function = false;
4317 }
4318
4319 /* Call DOIT for each hard register used as a return value from
4320 the current function. */
4321
4322 void
4323 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4324 {
4325 rtx outgoing = current_function_return_rtx;
4326
4327 if (! outgoing)
4328 return;
4329
4330 if (REG_P (outgoing))
4331 (*doit) (outgoing, arg);
4332 else if (GET_CODE (outgoing) == PARALLEL)
4333 {
4334 int i;
4335
4336 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4337 {
4338 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4339
4340 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4341 (*doit) (x, arg);
4342 }
4343 }
4344 }
4345
4346 static void
4347 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4348 {
4349 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
4350 }
4351
4352 void
4353 clobber_return_register (void)
4354 {
4355 diddle_return_value (do_clobber_return_reg, NULL);
4356
4357 /* In case we do use pseudo to return value, clobber it too. */
4358 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4359 {
4360 tree decl_result = DECL_RESULT (current_function_decl);
4361 rtx decl_rtl = DECL_RTL (decl_result);
4362 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4363 {
4364 do_clobber_return_reg (decl_rtl, NULL);
4365 }
4366 }
4367 }
4368
4369 static void
4370 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4371 {
4372 emit_insn (gen_rtx_USE (VOIDmode, reg));
4373 }
4374
4375 static void
4376 use_return_register (void)
4377 {
4378 diddle_return_value (do_use_return_reg, NULL);
4379 }
4380
4381 /* Possibly warn about unused parameters. */
4382 void
4383 do_warn_unused_parameter (tree fn)
4384 {
4385 tree decl;
4386
4387 for (decl = DECL_ARGUMENTS (fn);
4388 decl; decl = TREE_CHAIN (decl))
4389 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4390 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
4391 && !TREE_NO_WARNING (decl))
4392 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4393 }
4394
4395 static GTY(()) rtx initial_trampoline;
4396
4397 /* Generate RTL for the end of the current function. */
4398
4399 void
4400 expand_function_end (void)
4401 {
4402 rtx clobber_after;
4403
4404 /* If arg_pointer_save_area was referenced only from a nested
4405 function, we will not have initialized it yet. Do that now. */
4406 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
4407 get_arg_pointer_save_area ();
4408
4409 /* If we are doing stack checking and this function makes calls,
4410 do a stack probe at the start of the function to ensure we have enough
4411 space for another stack frame. */
4412 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
4413 {
4414 rtx insn, seq;
4415
4416 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4417 if (CALL_P (insn))
4418 {
4419 start_sequence ();
4420 probe_stack_range (STACK_CHECK_PROTECT,
4421 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4422 seq = get_insns ();
4423 end_sequence ();
4424 emit_insn_before (seq, stack_check_probe_note);
4425 break;
4426 }
4427 }
4428
4429 /* End any sequences that failed to be closed due to syntax errors. */
4430 while (in_sequence_p ())
4431 end_sequence ();
4432
4433 clear_pending_stack_adjust ();
4434 do_pending_stack_adjust ();
4435
4436 /* Output a linenumber for the end of the function.
4437 SDB depends on this. */
4438 force_next_line_note ();
4439 set_curr_insn_source_location (input_location);
4440
4441 /* Before the return label (if any), clobber the return
4442 registers so that they are not propagated live to the rest of
4443 the function. This can only happen with functions that drop
4444 through; if there had been a return statement, there would
4445 have either been a return rtx, or a jump to the return label.
4446
4447 We delay actual code generation after the current_function_value_rtx
4448 is computed. */
4449 clobber_after = get_last_insn ();
4450
4451 /* Output the label for the actual return from the function. */
4452 emit_label (return_label);
4453
4454 if (USING_SJLJ_EXCEPTIONS)
4455 {
4456 /* Let except.c know where it should emit the call to unregister
4457 the function context for sjlj exceptions. */
4458 if (flag_exceptions)
4459 sjlj_emit_function_exit_after (get_last_insn ());
4460 }
4461 else
4462 {
4463 /* We want to ensure that instructions that may trap are not
4464 moved into the epilogue by scheduling, because we don't
4465 always emit unwind information for the epilogue. */
4466 if (flag_non_call_exceptions)
4467 emit_insn (gen_blockage ());
4468 }
4469
4470 /* If this is an implementation of throw, do what's necessary to
4471 communicate between __builtin_eh_return and the epilogue. */
4472 expand_eh_return ();
4473
4474 /* If scalar return value was computed in a pseudo-reg, or was a named
4475 return value that got dumped to the stack, copy that to the hard
4476 return register. */
4477 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4478 {
4479 tree decl_result = DECL_RESULT (current_function_decl);
4480 rtx decl_rtl = DECL_RTL (decl_result);
4481
4482 if (REG_P (decl_rtl)
4483 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4484 : DECL_REGISTER (decl_result))
4485 {
4486 rtx real_decl_rtl = current_function_return_rtx;
4487
4488 /* This should be set in assign_parms. */
4489 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
4490
4491 /* If this is a BLKmode structure being returned in registers,
4492 then use the mode computed in expand_return. Note that if
4493 decl_rtl is memory, then its mode may have been changed,
4494 but that current_function_return_rtx has not. */
4495 if (GET_MODE (real_decl_rtl) == BLKmode)
4496 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4497
4498 /* If a non-BLKmode return value should be padded at the least
4499 significant end of the register, shift it left by the appropriate
4500 amount. BLKmode results are handled using the group load/store
4501 machinery. */
4502 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
4503 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
4504 {
4505 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
4506 REGNO (real_decl_rtl)),
4507 decl_rtl);
4508 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
4509 }
4510 /* If a named return value dumped decl_return to memory, then
4511 we may need to re-do the PROMOTE_MODE signed/unsigned
4512 extension. */
4513 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4514 {
4515 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4516
4517 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4518 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4519 &unsignedp, 1);
4520
4521 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4522 }
4523 else if (GET_CODE (real_decl_rtl) == PARALLEL)
4524 {
4525 /* If expand_function_start has created a PARALLEL for decl_rtl,
4526 move the result to the real return registers. Otherwise, do
4527 a group load from decl_rtl for a named return. */
4528 if (GET_CODE (decl_rtl) == PARALLEL)
4529 emit_group_move (real_decl_rtl, decl_rtl);
4530 else
4531 emit_group_load (real_decl_rtl, decl_rtl,
4532 TREE_TYPE (decl_result),
4533 int_size_in_bytes (TREE_TYPE (decl_result)));
4534 }
4535 /* In the case of complex integer modes smaller than a word, we'll
4536 need to generate some non-trivial bitfield insertions. Do that
4537 on a pseudo and not the hard register. */
4538 else if (GET_CODE (decl_rtl) == CONCAT
4539 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
4540 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
4541 {
4542 int old_generating_concat_p;
4543 rtx tmp;
4544
4545 old_generating_concat_p = generating_concat_p;
4546 generating_concat_p = 0;
4547 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
4548 generating_concat_p = old_generating_concat_p;
4549
4550 emit_move_insn (tmp, decl_rtl);
4551 emit_move_insn (real_decl_rtl, tmp);
4552 }
4553 else
4554 emit_move_insn (real_decl_rtl, decl_rtl);
4555 }
4556 }
4557
4558 /* If returning a structure, arrange to return the address of the value
4559 in a place where debuggers expect to find it.
4560
4561 If returning a structure PCC style,
4562 the caller also depends on this value.
4563 And current_function_returns_pcc_struct is not necessarily set. */
4564 if (current_function_returns_struct
4565 || current_function_returns_pcc_struct)
4566 {
4567 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
4568 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4569 rtx outgoing;
4570
4571 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4572 type = TREE_TYPE (type);
4573 else
4574 value_address = XEXP (value_address, 0);
4575
4576 outgoing = targetm.calls.function_value (build_pointer_type (type),
4577 current_function_decl, true);
4578
4579 /* Mark this as a function return value so integrate will delete the
4580 assignment and USE below when inlining this function. */
4581 REG_FUNCTION_VALUE_P (outgoing) = 1;
4582
4583 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4584 value_address = convert_memory_address (GET_MODE (outgoing),
4585 value_address);
4586
4587 emit_move_insn (outgoing, value_address);
4588
4589 /* Show return register used to hold result (in this case the address
4590 of the result. */
4591 current_function_return_rtx = outgoing;
4592 }
4593
4594 /* Emit the actual code to clobber return register. */
4595 {
4596 rtx seq;
4597
4598 start_sequence ();
4599 clobber_return_register ();
4600 expand_naked_return ();
4601 seq = get_insns ();
4602 end_sequence ();
4603
4604 emit_insn_after (seq, clobber_after);
4605 }
4606
4607 /* Output the label for the naked return from the function. */
4608 emit_label (naked_return_label);
4609
4610 /* @@@ This is a kludge. We want to ensure that instructions that
4611 may trap are not moved into the epilogue by scheduling, because
4612 we don't always emit unwind information for the epilogue. */
4613 if (! USING_SJLJ_EXCEPTIONS && flag_non_call_exceptions)
4614 emit_insn (gen_blockage ());
4615
4616 /* If stack protection is enabled for this function, check the guard. */
4617 if (cfun->stack_protect_guard)
4618 stack_protect_epilogue ();
4619
4620 /* If we had calls to alloca, and this machine needs
4621 an accurate stack pointer to exit the function,
4622 insert some code to save and restore the stack pointer. */
4623 if (! EXIT_IGNORE_STACK
4624 && current_function_calls_alloca)
4625 {
4626 rtx tem = 0;
4627
4628 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4629 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4630 }
4631
4632 /* ??? This should no longer be necessary since stupid is no longer with
4633 us, but there are some parts of the compiler (eg reload_combine, and
4634 sh mach_dep_reorg) that still try and compute their own lifetime info
4635 instead of using the general framework. */
4636 use_return_register ();
4637 }
4638
4639 rtx
4640 get_arg_pointer_save_area (void)
4641 {
4642 rtx ret = arg_pointer_save_area;
4643
4644 if (! ret)
4645 {
4646 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
4647 arg_pointer_save_area = ret;
4648 }
4649
4650 if (! cfun->arg_pointer_save_area_init)
4651 {
4652 rtx seq;
4653
4654 /* Save the arg pointer at the beginning of the function. The
4655 generated stack slot may not be a valid memory address, so we
4656 have to check it and fix it if necessary. */
4657 start_sequence ();
4658 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
4659 seq = get_insns ();
4660 end_sequence ();
4661
4662 push_topmost_sequence ();
4663 emit_insn_after (seq, entry_of_function ());
4664 pop_topmost_sequence ();
4665 }
4666
4667 return ret;
4668 }
4669 \f
4670 /* Extend a vector that records the INSN_UIDs of INSNS
4671 (a list of one or more insns). */
4672
4673 static void
4674 record_insns (rtx insns, VEC(int,heap) **vecp)
4675 {
4676 rtx tmp;
4677
4678 for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp))
4679 VEC_safe_push (int, heap, *vecp, INSN_UID (tmp));
4680 }
4681
4682 /* Set the locator of the insn chain starting at INSN to LOC. */
4683 static void
4684 set_insn_locators (rtx insn, int loc)
4685 {
4686 while (insn != NULL_RTX)
4687 {
4688 if (INSN_P (insn))
4689 INSN_LOCATOR (insn) = loc;
4690 insn = NEXT_INSN (insn);
4691 }
4692 }
4693
4694 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4695 be running after reorg, SEQUENCE rtl is possible. */
4696
4697 static int
4698 contains (const_rtx insn, VEC(int,heap) **vec)
4699 {
4700 int i, j;
4701
4702 if (NONJUMP_INSN_P (insn)
4703 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4704 {
4705 int count = 0;
4706 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4707 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4708 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i))
4709 == VEC_index (int, *vec, j))
4710 count++;
4711 return count;
4712 }
4713 else
4714 {
4715 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4716 if (INSN_UID (insn) == VEC_index (int, *vec, j))
4717 return 1;
4718 }
4719 return 0;
4720 }
4721
4722 int
4723 prologue_epilogue_contains (const_rtx insn)
4724 {
4725 if (contains (insn, &prologue))
4726 return 1;
4727 if (contains (insn, &epilogue))
4728 return 1;
4729 return 0;
4730 }
4731
4732 int
4733 sibcall_epilogue_contains (const_rtx insn)
4734 {
4735 if (sibcall_epilogue)
4736 return contains (insn, &sibcall_epilogue);
4737 return 0;
4738 }
4739
4740 #ifdef HAVE_return
4741 /* Insert gen_return at the end of block BB. This also means updating
4742 block_for_insn appropriately. */
4743
4744 static void
4745 emit_return_into_block (basic_block bb)
4746 {
4747 emit_jump_insn_after (gen_return (), BB_END (bb));
4748 }
4749 #endif /* HAVE_return */
4750
4751 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
4752
4753 /* These functions convert the epilogue into a variant that does not
4754 modify the stack pointer. This is used in cases where a function
4755 returns an object whose size is not known until it is computed.
4756 The called function leaves the object on the stack, leaves the
4757 stack depressed, and returns a pointer to the object.
4758
4759 What we need to do is track all modifications and references to the
4760 stack pointer, deleting the modifications and changing the
4761 references to point to the location the stack pointer would have
4762 pointed to had the modifications taken place.
4763
4764 These functions need to be portable so we need to make as few
4765 assumptions about the epilogue as we can. However, the epilogue
4766 basically contains three things: instructions to reset the stack
4767 pointer, instructions to reload registers, possibly including the
4768 frame pointer, and an instruction to return to the caller.
4769
4770 We must be sure of what a relevant epilogue insn is doing. We also
4771 make no attempt to validate the insns we make since if they are
4772 invalid, we probably can't do anything valid. The intent is that
4773 these routines get "smarter" as more and more machines start to use
4774 them and they try operating on different epilogues.
4775
4776 We use the following structure to track what the part of the
4777 epilogue that we've already processed has done. We keep two copies
4778 of the SP equivalence, one for use during the insn we are
4779 processing and one for use in the next insn. The difference is
4780 because one part of a PARALLEL may adjust SP and the other may use
4781 it. */
4782
4783 struct epi_info
4784 {
4785 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
4786 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
4787 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
4788 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
4789 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
4790 should be set to once we no longer need
4791 its value. */
4792 rtx const_equiv[FIRST_PSEUDO_REGISTER]; /* Any known constant equivalences
4793 for registers. */
4794 };
4795
4796 static void handle_epilogue_set (rtx, struct epi_info *);
4797 static void update_epilogue_consts (rtx, const_rtx, void *);
4798 static void emit_equiv_load (struct epi_info *);
4799
4800 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
4801 no modifications to the stack pointer. Return the new list of insns. */
4802
4803 static rtx
4804 keep_stack_depressed (rtx insns)
4805 {
4806 int j;
4807 struct epi_info info;
4808 rtx insn, next;
4809
4810 /* If the epilogue is just a single instruction, it must be OK as is. */
4811 if (NEXT_INSN (insns) == NULL_RTX)
4812 return insns;
4813
4814 /* Otherwise, start a sequence, initialize the information we have, and
4815 process all the insns we were given. */
4816 start_sequence ();
4817
4818 info.sp_equiv_reg = stack_pointer_rtx;
4819 info.sp_offset = 0;
4820 info.equiv_reg_src = 0;
4821
4822 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
4823 info.const_equiv[j] = 0;
4824
4825 insn = insns;
4826 next = NULL_RTX;
4827 while (insn != NULL_RTX)
4828 {
4829 next = NEXT_INSN (insn);
4830
4831 if (!INSN_P (insn))
4832 {
4833 add_insn (insn);
4834 insn = next;
4835 continue;
4836 }
4837
4838 /* If this insn references the register that SP is equivalent to and
4839 we have a pending load to that register, we must force out the load
4840 first and then indicate we no longer know what SP's equivalent is. */
4841 if (info.equiv_reg_src != 0
4842 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
4843 {
4844 emit_equiv_load (&info);
4845 info.sp_equiv_reg = 0;
4846 }
4847
4848 info.new_sp_equiv_reg = info.sp_equiv_reg;
4849 info.new_sp_offset = info.sp_offset;
4850
4851 /* If this is a (RETURN) and the return address is on the stack,
4852 update the address and change to an indirect jump. */
4853 if (GET_CODE (PATTERN (insn)) == RETURN
4854 || (GET_CODE (PATTERN (insn)) == PARALLEL
4855 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
4856 {
4857 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
4858 rtx base = 0;
4859 HOST_WIDE_INT offset = 0;
4860 rtx jump_insn, jump_set;
4861
4862 /* If the return address is in a register, we can emit the insn
4863 unchanged. Otherwise, it must be a MEM and we see what the
4864 base register and offset are. In any case, we have to emit any
4865 pending load to the equivalent reg of SP, if any. */
4866 if (REG_P (retaddr))
4867 {
4868 emit_equiv_load (&info);
4869 add_insn (insn);
4870 insn = next;
4871 continue;
4872 }
4873 else
4874 {
4875 rtx ret_ptr;
4876 gcc_assert (MEM_P (retaddr));
4877
4878 ret_ptr = XEXP (retaddr, 0);
4879
4880 if (REG_P (ret_ptr))
4881 {
4882 base = gen_rtx_REG (Pmode, REGNO (ret_ptr));
4883 offset = 0;
4884 }
4885 else
4886 {
4887 gcc_assert (GET_CODE (ret_ptr) == PLUS
4888 && REG_P (XEXP (ret_ptr, 0))
4889 && GET_CODE (XEXP (ret_ptr, 1)) == CONST_INT);
4890 base = gen_rtx_REG (Pmode, REGNO (XEXP (ret_ptr, 0)));
4891 offset = INTVAL (XEXP (ret_ptr, 1));
4892 }
4893 }
4894
4895 /* If the base of the location containing the return pointer
4896 is SP, we must update it with the replacement address. Otherwise,
4897 just build the necessary MEM. */
4898 retaddr = plus_constant (base, offset);
4899 if (base == stack_pointer_rtx)
4900 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
4901 plus_constant (info.sp_equiv_reg,
4902 info.sp_offset));
4903
4904 retaddr = gen_rtx_MEM (Pmode, retaddr);
4905 MEM_NOTRAP_P (retaddr) = 1;
4906
4907 /* If there is a pending load to the equivalent register for SP
4908 and we reference that register, we must load our address into
4909 a scratch register and then do that load. */
4910 if (info.equiv_reg_src
4911 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
4912 {
4913 unsigned int regno;
4914 rtx reg;
4915
4916 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4917 if (HARD_REGNO_MODE_OK (regno, Pmode)
4918 && !fixed_regs[regno]
4919 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
4920 && !REGNO_REG_SET_P
4921 (DF_LR_IN (EXIT_BLOCK_PTR), regno)
4922 && !refers_to_regno_p (regno,
4923 end_hard_regno (Pmode, regno),
4924 info.equiv_reg_src, NULL)
4925 && info.const_equiv[regno] == 0)
4926 break;
4927
4928 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
4929
4930 reg = gen_rtx_REG (Pmode, regno);
4931 emit_move_insn (reg, retaddr);
4932 retaddr = reg;
4933 }
4934
4935 emit_equiv_load (&info);
4936 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
4937
4938 /* Show the SET in the above insn is a RETURN. */
4939 jump_set = single_set (jump_insn);
4940 gcc_assert (jump_set);
4941 SET_IS_RETURN_P (jump_set) = 1;
4942 }
4943
4944 /* If SP is not mentioned in the pattern and its equivalent register, if
4945 any, is not modified, just emit it. Otherwise, if neither is set,
4946 replace the reference to SP and emit the insn. If none of those are
4947 true, handle each SET individually. */
4948 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
4949 && (info.sp_equiv_reg == stack_pointer_rtx
4950 || !reg_set_p (info.sp_equiv_reg, insn)))
4951 add_insn (insn);
4952 else if (! reg_set_p (stack_pointer_rtx, insn)
4953 && (info.sp_equiv_reg == stack_pointer_rtx
4954 || !reg_set_p (info.sp_equiv_reg, insn)))
4955 {
4956 int changed;
4957
4958 changed = validate_replace_rtx (stack_pointer_rtx,
4959 plus_constant (info.sp_equiv_reg,
4960 info.sp_offset),
4961 insn);
4962 gcc_assert (changed);
4963
4964 add_insn (insn);
4965 }
4966 else if (GET_CODE (PATTERN (insn)) == SET)
4967 handle_epilogue_set (PATTERN (insn), &info);
4968 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
4969 {
4970 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
4971 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
4972 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
4973 }
4974 else
4975 add_insn (insn);
4976
4977 info.sp_equiv_reg = info.new_sp_equiv_reg;
4978 info.sp_offset = info.new_sp_offset;
4979
4980 /* Now update any constants this insn sets. */
4981 note_stores (PATTERN (insn), update_epilogue_consts, &info);
4982 insn = next;
4983 }
4984
4985 insns = get_insns ();
4986 end_sequence ();
4987 return insns;
4988 }
4989
4990 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
4991 structure that contains information about what we've seen so far. We
4992 process this SET by either updating that data or by emitting one or
4993 more insns. */
4994
4995 static void
4996 handle_epilogue_set (rtx set, struct epi_info *p)
4997 {
4998 /* First handle the case where we are setting SP. Record what it is being
4999 set from, which we must be able to determine */
5000 if (reg_set_p (stack_pointer_rtx, set))
5001 {
5002 gcc_assert (SET_DEST (set) == stack_pointer_rtx);
5003
5004 if (GET_CODE (SET_SRC (set)) == PLUS)
5005 {
5006 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
5007 if (GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
5008 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
5009 else
5010 {
5011 gcc_assert (REG_P (XEXP (SET_SRC (set), 1))
5012 && (REGNO (XEXP (SET_SRC (set), 1))
5013 < FIRST_PSEUDO_REGISTER)
5014 && p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
5015 p->new_sp_offset
5016 = INTVAL (p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
5017 }
5018 }
5019 else
5020 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
5021
5022 /* If we are adjusting SP, we adjust from the old data. */
5023 if (p->new_sp_equiv_reg == stack_pointer_rtx)
5024 {
5025 p->new_sp_equiv_reg = p->sp_equiv_reg;
5026 p->new_sp_offset += p->sp_offset;
5027 }
5028
5029 gcc_assert (p->new_sp_equiv_reg && REG_P (p->new_sp_equiv_reg));
5030
5031 return;
5032 }
5033
5034 /* Next handle the case where we are setting SP's equivalent
5035 register. We must not already have a value to set it to. We
5036 could update, but there seems little point in handling that case.
5037 Note that we have to allow for the case where we are setting the
5038 register set in the previous part of a PARALLEL inside a single
5039 insn. But use the old offset for any updates within this insn.
5040 We must allow for the case where the register is being set in a
5041 different (usually wider) mode than Pmode). */
5042 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
5043 {
5044 gcc_assert (!p->equiv_reg_src
5045 && REG_P (p->new_sp_equiv_reg)
5046 && REG_P (SET_DEST (set))
5047 && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set)))
5048 <= BITS_PER_WORD)
5049 && REGNO (p->new_sp_equiv_reg) == REGNO (SET_DEST (set)));
5050 p->equiv_reg_src
5051 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
5052 plus_constant (p->sp_equiv_reg,
5053 p->sp_offset));
5054 }
5055
5056 /* Otherwise, replace any references to SP in the insn to its new value
5057 and emit the insn. */
5058 else
5059 {
5060 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
5061 plus_constant (p->sp_equiv_reg,
5062 p->sp_offset));
5063 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
5064 plus_constant (p->sp_equiv_reg,
5065 p->sp_offset));
5066 emit_insn (set);
5067 }
5068 }
5069
5070 /* Update the tracking information for registers set to constants. */
5071
5072 static void
5073 update_epilogue_consts (rtx dest, const_rtx x, void *data)
5074 {
5075 struct epi_info *p = (struct epi_info *) data;
5076 rtx new;
5077
5078 if (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5079 return;
5080
5081 /* If we are either clobbering a register or doing a partial set,
5082 show we don't know the value. */
5083 else if (GET_CODE (x) == CLOBBER || ! rtx_equal_p (dest, SET_DEST (x)))
5084 p->const_equiv[REGNO (dest)] = 0;
5085
5086 /* If we are setting it to a constant, record that constant. */
5087 else if (GET_CODE (SET_SRC (x)) == CONST_INT)
5088 p->const_equiv[REGNO (dest)] = SET_SRC (x);
5089
5090 /* If this is a binary operation between a register we have been tracking
5091 and a constant, see if we can compute a new constant value. */
5092 else if (ARITHMETIC_P (SET_SRC (x))
5093 && REG_P (XEXP (SET_SRC (x), 0))
5094 && REGNO (XEXP (SET_SRC (x), 0)) < FIRST_PSEUDO_REGISTER
5095 && p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))] != 0
5096 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
5097 && 0 != (new = simplify_binary_operation
5098 (GET_CODE (SET_SRC (x)), GET_MODE (dest),
5099 p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))],
5100 XEXP (SET_SRC (x), 1)))
5101 && GET_CODE (new) == CONST_INT)
5102 p->const_equiv[REGNO (dest)] = new;
5103
5104 /* Otherwise, we can't do anything with this value. */
5105 else
5106 p->const_equiv[REGNO (dest)] = 0;
5107 }
5108
5109 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
5110
5111 static void
5112 emit_equiv_load (struct epi_info *p)
5113 {
5114 if (p->equiv_reg_src != 0)
5115 {
5116 rtx dest = p->sp_equiv_reg;
5117
5118 if (GET_MODE (p->equiv_reg_src) != GET_MODE (dest))
5119 dest = gen_rtx_REG (GET_MODE (p->equiv_reg_src),
5120 REGNO (p->sp_equiv_reg));
5121
5122 emit_move_insn (dest, p->equiv_reg_src);
5123 p->equiv_reg_src = 0;
5124 }
5125 }
5126 #endif
5127
5128 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5129 this into place with notes indicating where the prologue ends and where
5130 the epilogue begins. Update the basic block information when possible. */
5131
5132 static void
5133 thread_prologue_and_epilogue_insns (void)
5134 {
5135 int inserted = 0;
5136 edge e;
5137 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
5138 rtx seq;
5139 #endif
5140 #if defined (HAVE_epilogue) || defined(HAVE_return)
5141 rtx epilogue_end = NULL_RTX;
5142 #endif
5143 edge_iterator ei;
5144
5145 #ifdef HAVE_prologue
5146 if (HAVE_prologue)
5147 {
5148 start_sequence ();
5149 seq = gen_prologue ();
5150 emit_insn (seq);
5151
5152 /* Insert an explicit USE for the frame pointer
5153 if the profiling is on and the frame pointer is required. */
5154 if (current_function_profile && frame_pointer_needed)
5155 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
5156
5157 /* Retain a map of the prologue insns. */
5158 record_insns (seq, &prologue);
5159 emit_note (NOTE_INSN_PROLOGUE_END);
5160
5161 #ifndef PROFILE_BEFORE_PROLOGUE
5162 /* Ensure that instructions are not moved into the prologue when
5163 profiling is on. The call to the profiling routine can be
5164 emitted within the live range of a call-clobbered register. */
5165 if (current_function_profile)
5166 emit_insn (gen_blockage ());
5167 #endif
5168
5169 seq = get_insns ();
5170 end_sequence ();
5171 set_insn_locators (seq, prologue_locator);
5172
5173 /* Can't deal with multiple successors of the entry block
5174 at the moment. Function should always have at least one
5175 entry point. */
5176 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5177
5178 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
5179 inserted = 1;
5180 }
5181 #endif
5182
5183 /* If the exit block has no non-fake predecessors, we don't need
5184 an epilogue. */
5185 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5186 if ((e->flags & EDGE_FAKE) == 0)
5187 break;
5188 if (e == NULL)
5189 goto epilogue_done;
5190
5191 #ifdef HAVE_return
5192 if (optimize && HAVE_return)
5193 {
5194 /* If we're allowed to generate a simple return instruction,
5195 then by definition we don't need a full epilogue. Examine
5196 the block that falls through to EXIT. If it does not
5197 contain any code, examine its predecessors and try to
5198 emit (conditional) return instructions. */
5199
5200 basic_block last;
5201 rtx label;
5202
5203 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5204 if (e->flags & EDGE_FALLTHRU)
5205 break;
5206 if (e == NULL)
5207 goto epilogue_done;
5208 last = e->src;
5209
5210 /* Verify that there are no active instructions in the last block. */
5211 label = BB_END (last);
5212 while (label && !LABEL_P (label))
5213 {
5214 if (active_insn_p (label))
5215 break;
5216 label = PREV_INSN (label);
5217 }
5218
5219 if (BB_HEAD (last) == label && LABEL_P (label))
5220 {
5221 edge_iterator ei2;
5222
5223 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
5224 {
5225 basic_block bb = e->src;
5226 rtx jump;
5227
5228 if (bb == ENTRY_BLOCK_PTR)
5229 {
5230 ei_next (&ei2);
5231 continue;
5232 }
5233
5234 jump = BB_END (bb);
5235 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5236 {
5237 ei_next (&ei2);
5238 continue;
5239 }
5240
5241 /* If we have an unconditional jump, we can replace that
5242 with a simple return instruction. */
5243 if (simplejump_p (jump))
5244 {
5245 emit_return_into_block (bb);
5246 delete_insn (jump);
5247 }
5248
5249 /* If we have a conditional jump, we can try to replace
5250 that with a conditional return instruction. */
5251 else if (condjump_p (jump))
5252 {
5253 if (! redirect_jump (jump, 0, 0))
5254 {
5255 ei_next (&ei2);
5256 continue;
5257 }
5258
5259 /* If this block has only one successor, it both jumps
5260 and falls through to the fallthru block, so we can't
5261 delete the edge. */
5262 if (single_succ_p (bb))
5263 {
5264 ei_next (&ei2);
5265 continue;
5266 }
5267 }
5268 else
5269 {
5270 ei_next (&ei2);
5271 continue;
5272 }
5273
5274 /* Fix up the CFG for the successful change we just made. */
5275 redirect_edge_succ (e, EXIT_BLOCK_PTR);
5276 }
5277
5278 /* Emit a return insn for the exit fallthru block. Whether
5279 this is still reachable will be determined later. */
5280
5281 emit_barrier_after (BB_END (last));
5282 emit_return_into_block (last);
5283 epilogue_end = BB_END (last);
5284 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
5285 goto epilogue_done;
5286 }
5287 }
5288 #endif
5289 /* Find the edge that falls through to EXIT. Other edges may exist
5290 due to RETURN instructions, but those don't need epilogues.
5291 There really shouldn't be a mixture -- either all should have
5292 been converted or none, however... */
5293
5294 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5295 if (e->flags & EDGE_FALLTHRU)
5296 break;
5297 if (e == NULL)
5298 goto epilogue_done;
5299
5300 #ifdef HAVE_epilogue
5301 if (HAVE_epilogue)
5302 {
5303 start_sequence ();
5304 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5305
5306 seq = gen_epilogue ();
5307
5308 #ifdef INCOMING_RETURN_ADDR_RTX
5309 /* If this function returns with the stack depressed and we can support
5310 it, massage the epilogue to actually do that. */
5311 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5312 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
5313 seq = keep_stack_depressed (seq);
5314 #endif
5315
5316 emit_jump_insn (seq);
5317
5318 /* Retain a map of the epilogue insns. */
5319 record_insns (seq, &epilogue);
5320 set_insn_locators (seq, epilogue_locator);
5321
5322 seq = get_insns ();
5323 end_sequence ();
5324
5325 insert_insn_on_edge (seq, e);
5326 inserted = 1;
5327 }
5328 else
5329 #endif
5330 {
5331 basic_block cur_bb;
5332
5333 if (! next_active_insn (BB_END (e->src)))
5334 goto epilogue_done;
5335 /* We have a fall-through edge to the exit block, the source is not
5336 at the end of the function, and there will be an assembler epilogue
5337 at the end of the function.
5338 We can't use force_nonfallthru here, because that would try to
5339 use return. Inserting a jump 'by hand' is extremely messy, so
5340 we take advantage of cfg_layout_finalize using
5341 fixup_fallthru_exit_predecessor. */
5342 cfg_layout_initialize (0);
5343 FOR_EACH_BB (cur_bb)
5344 if (cur_bb->index >= NUM_FIXED_BLOCKS
5345 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
5346 cur_bb->aux = cur_bb->next_bb;
5347 cfg_layout_finalize ();
5348 }
5349 epilogue_done:
5350
5351 if (inserted)
5352 {
5353 commit_edge_insertions ();
5354
5355 /* The epilogue insns we inserted may cause the exit edge to no longer
5356 be fallthru. */
5357 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5358 {
5359 if (((e->flags & EDGE_FALLTHRU) != 0)
5360 && returnjump_p (BB_END (e->src)))
5361 e->flags &= ~EDGE_FALLTHRU;
5362 }
5363 }
5364
5365 #ifdef HAVE_sibcall_epilogue
5366 /* Emit sibling epilogues before any sibling call sites. */
5367 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
5368 {
5369 basic_block bb = e->src;
5370 rtx insn = BB_END (bb);
5371
5372 if (!CALL_P (insn)
5373 || ! SIBLING_CALL_P (insn))
5374 {
5375 ei_next (&ei);
5376 continue;
5377 }
5378
5379 start_sequence ();
5380 emit_insn (gen_sibcall_epilogue ());
5381 seq = get_insns ();
5382 end_sequence ();
5383
5384 /* Retain a map of the epilogue insns. Used in life analysis to
5385 avoid getting rid of sibcall epilogue insns. Do this before we
5386 actually emit the sequence. */
5387 record_insns (seq, &sibcall_epilogue);
5388 set_insn_locators (seq, epilogue_locator);
5389
5390 emit_insn_before (seq, insn);
5391 ei_next (&ei);
5392 }
5393 #endif
5394
5395 #ifdef HAVE_epilogue
5396 if (epilogue_end)
5397 {
5398 rtx insn, next;
5399
5400 /* Similarly, move any line notes that appear after the epilogue.
5401 There is no need, however, to be quite so anal about the existence
5402 of such a note. Also possibly move
5403 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5404 info generation. */
5405 for (insn = epilogue_end; insn; insn = next)
5406 {
5407 next = NEXT_INSN (insn);
5408 if (NOTE_P (insn)
5409 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
5410 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5411 }
5412 }
5413 #endif
5414
5415 /* Threading the prologue and epilogue changes the artificial refs
5416 in the entry and exit blocks. */
5417 epilogue_completed = 1;
5418 df_update_entry_exit_and_calls ();
5419 }
5420
5421 /* Reposition the prologue-end and epilogue-begin notes after instruction
5422 scheduling and delayed branch scheduling. */
5423
5424 void
5425 reposition_prologue_and_epilogue_notes (void)
5426 {
5427 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5428 rtx insn, last, note;
5429 int len;
5430
5431 if ((len = VEC_length (int, prologue)) > 0)
5432 {
5433 last = 0, note = 0;
5434
5435 /* Scan from the beginning until we reach the last prologue insn.
5436 We apparently can't depend on basic_block_{head,end} after
5437 reorg has run. */
5438 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5439 {
5440 if (NOTE_P (insn))
5441 {
5442 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
5443 note = insn;
5444 }
5445 else if (contains (insn, &prologue))
5446 {
5447 last = insn;
5448 if (--len == 0)
5449 break;
5450 }
5451 }
5452
5453 if (last)
5454 {
5455 /* Find the prologue-end note if we haven't already, and
5456 move it to just after the last prologue insn. */
5457 if (note == 0)
5458 {
5459 for (note = last; (note = NEXT_INSN (note));)
5460 if (NOTE_P (note)
5461 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
5462 break;
5463 }
5464
5465 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5466 if (LABEL_P (last))
5467 last = NEXT_INSN (last);
5468 reorder_insns (note, note, last);
5469 }
5470 }
5471
5472 if ((len = VEC_length (int, epilogue)) > 0)
5473 {
5474 last = 0, note = 0;
5475
5476 /* Scan from the end until we reach the first epilogue insn.
5477 We apparently can't depend on basic_block_{head,end} after
5478 reorg has run. */
5479 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
5480 {
5481 if (NOTE_P (insn))
5482 {
5483 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
5484 note = insn;
5485 }
5486 else if (contains (insn, &epilogue))
5487 {
5488 last = insn;
5489 if (--len == 0)
5490 break;
5491 }
5492 }
5493
5494 if (last)
5495 {
5496 /* Find the epilogue-begin note if we haven't already, and
5497 move it to just before the first epilogue insn. */
5498 if (note == 0)
5499 {
5500 for (note = insn; (note = PREV_INSN (note));)
5501 if (NOTE_P (note)
5502 && NOTE_KIND (note) == NOTE_INSN_EPILOGUE_BEG)
5503 break;
5504 }
5505
5506 if (PREV_INSN (last) != note)
5507 reorder_insns (note, note, PREV_INSN (last));
5508 }
5509 }
5510 #endif /* HAVE_prologue or HAVE_epilogue */
5511 }
5512
5513 /* Returns the name of the current function. */
5514 const char *
5515 current_function_name (void)
5516 {
5517 return lang_hooks.decl_printable_name (cfun->decl, 2);
5518 }
5519
5520 /* Returns the raw (mangled) name of the current function. */
5521 const char *
5522 current_function_assembler_name (void)
5523 {
5524 return IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (cfun->decl));
5525 }
5526 \f
5527
5528 static unsigned int
5529 rest_of_handle_check_leaf_regs (void)
5530 {
5531 #ifdef LEAF_REGISTERS
5532 current_function_uses_only_leaf_regs
5533 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5534 #endif
5535 return 0;
5536 }
5537
5538 /* Insert a TYPE into the used types hash table of CFUN. */
5539 static void
5540 used_types_insert_helper (tree type, struct function *func)
5541 {
5542 if (type != NULL && func != NULL)
5543 {
5544 void **slot;
5545
5546 if (func->used_types_hash == NULL)
5547 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
5548 htab_eq_pointer, NULL);
5549 slot = htab_find_slot (func->used_types_hash, type, INSERT);
5550 if (*slot == NULL)
5551 *slot = type;
5552 }
5553 }
5554
5555 /* Given a type, insert it into the used hash table in cfun. */
5556 void
5557 used_types_insert (tree t)
5558 {
5559 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
5560 t = TREE_TYPE (t);
5561 t = TYPE_MAIN_VARIANT (t);
5562 if (debug_info_level > DINFO_LEVEL_NONE)
5563 used_types_insert_helper (t, cfun);
5564 }
5565
5566 struct rtl_opt_pass pass_leaf_regs =
5567 {
5568 {
5569 RTL_PASS,
5570 NULL, /* name */
5571 NULL, /* gate */
5572 rest_of_handle_check_leaf_regs, /* execute */
5573 NULL, /* sub */
5574 NULL, /* next */
5575 0, /* static_pass_number */
5576 0, /* tv_id */
5577 0, /* properties_required */
5578 0, /* properties_provided */
5579 0, /* properties_destroyed */
5580 0, /* todo_flags_start */
5581 0 /* todo_flags_finish */
5582 }
5583 };
5584
5585 static unsigned int
5586 rest_of_handle_thread_prologue_and_epilogue (void)
5587 {
5588 if (optimize)
5589 cleanup_cfg (CLEANUP_EXPENSIVE);
5590 /* On some machines, the prologue and epilogue code, or parts thereof,
5591 can be represented as RTL. Doing so lets us schedule insns between
5592 it and the rest of the code and also allows delayed branch
5593 scheduling to operate in the epilogue. */
5594
5595 thread_prologue_and_epilogue_insns ();
5596 return 0;
5597 }
5598
5599 struct rtl_opt_pass pass_thread_prologue_and_epilogue =
5600 {
5601 {
5602 RTL_PASS,
5603 "pro_and_epilogue", /* name */
5604 NULL, /* gate */
5605 rest_of_handle_thread_prologue_and_epilogue, /* execute */
5606 NULL, /* sub */
5607 NULL, /* next */
5608 0, /* static_pass_number */
5609 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
5610 0, /* properties_required */
5611 0, /* properties_provided */
5612 0, /* properties_destroyed */
5613 TODO_verify_flow, /* todo_flags_start */
5614 TODO_dump_func |
5615 TODO_df_verify |
5616 TODO_df_finish | TODO_verify_rtl_sharing |
5617 TODO_ggc_collect /* todo_flags_finish */
5618 }
5619 };
5620 \f
5621
5622 /* This mini-pass fixes fall-out from SSA in asm statements that have
5623 in-out constraints. Say you start with
5624
5625 orig = inout;
5626 asm ("": "+mr" (inout));
5627 use (orig);
5628
5629 which is transformed very early to use explicit output and match operands:
5630
5631 orig = inout;
5632 asm ("": "=mr" (inout) : "0" (inout));
5633 use (orig);
5634
5635 Or, after SSA and copyprop,
5636
5637 asm ("": "=mr" (inout_2) : "0" (inout_1));
5638 use (inout_1);
5639
5640 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
5641 they represent two separate values, so they will get different pseudo
5642 registers during expansion. Then, since the two operands need to match
5643 per the constraints, but use different pseudo registers, reload can
5644 only register a reload for these operands. But reloads can only be
5645 satisfied by hardregs, not by memory, so we need a register for this
5646 reload, just because we are presented with non-matching operands.
5647 So, even though we allow memory for this operand, no memory can be
5648 used for it, just because the two operands don't match. This can
5649 cause reload failures on register-starved targets.
5650
5651 So it's a symptom of reload not being able to use memory for reloads
5652 or, alternatively it's also a symptom of both operands not coming into
5653 reload as matching (in which case the pseudo could go to memory just
5654 fine, as the alternative allows it, and no reload would be necessary).
5655 We fix the latter problem here, by transforming
5656
5657 asm ("": "=mr" (inout_2) : "0" (inout_1));
5658
5659 back to
5660
5661 inout_2 = inout_1;
5662 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
5663
5664 static void
5665 match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs)
5666 {
5667 int i;
5668 bool changed = false;
5669 rtx op = SET_SRC (p_sets[0]);
5670 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
5671 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
5672 bool *output_matched = alloca (noutputs * sizeof (bool));
5673
5674 memset (output_matched, 0, noutputs * sizeof (bool));
5675 for (i = 0; i < ninputs; i++)
5676 {
5677 rtx input, output, insns;
5678 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
5679 char *end;
5680 int match, j;
5681
5682 match = strtoul (constraint, &end, 10);
5683 if (end == constraint)
5684 continue;
5685
5686 gcc_assert (match < noutputs);
5687 output = SET_DEST (p_sets[match]);
5688 input = RTVEC_ELT (inputs, i);
5689 /* Only do the transformation for pseudos. */
5690 if (! REG_P (output)
5691 || rtx_equal_p (output, input)
5692 || (GET_MODE (input) != VOIDmode
5693 && GET_MODE (input) != GET_MODE (output)))
5694 continue;
5695
5696 /* We can't do anything if the output is also used as input,
5697 as we're going to overwrite it. */
5698 for (j = 0; j < ninputs; j++)
5699 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
5700 break;
5701 if (j != ninputs)
5702 continue;
5703
5704 /* Avoid changing the same input several times. For
5705 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
5706 only change in once (to out1), rather than changing it
5707 first to out1 and afterwards to out2. */
5708 if (i > 0)
5709 {
5710 for (j = 0; j < noutputs; j++)
5711 if (output_matched[j] && input == SET_DEST (p_sets[j]))
5712 break;
5713 if (j != noutputs)
5714 continue;
5715 }
5716 output_matched[match] = true;
5717
5718 start_sequence ();
5719 emit_move_insn (output, input);
5720 insns = get_insns ();
5721 end_sequence ();
5722 emit_insn_before (insns, insn);
5723
5724 /* Now replace all mentions of the input with output. We can't
5725 just replace the occurence in inputs[i], as the register might
5726 also be used in some other input (or even in an address of an
5727 output), which would mean possibly increasing the number of
5728 inputs by one (namely 'output' in addition), which might pose
5729 a too complicated problem for reload to solve. E.g. this situation:
5730
5731 asm ("" : "=r" (output), "=m" (input) : "0" (input))
5732
5733 Here 'input' is used in two occurrences as input (once for the
5734 input operand, once for the address in the second output operand).
5735 If we would replace only the occurence of the input operand (to
5736 make the matching) we would be left with this:
5737
5738 output = input
5739 asm ("" : "=r" (output), "=m" (input) : "0" (output))
5740
5741 Now we suddenly have two different input values (containing the same
5742 value, but different pseudos) where we formerly had only one.
5743 With more complicated asms this might lead to reload failures
5744 which wouldn't have happen without this pass. So, iterate over
5745 all operands and replace all occurrences of the register used. */
5746 for (j = 0; j < noutputs; j++)
5747 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
5748 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
5749 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
5750 input, output);
5751 for (j = 0; j < ninputs; j++)
5752 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
5753 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
5754 input, output);
5755
5756 changed = true;
5757 }
5758
5759 if (changed)
5760 df_insn_rescan (insn);
5761 }
5762
5763 static unsigned
5764 rest_of_match_asm_constraints (void)
5765 {
5766 basic_block bb;
5767 rtx insn, pat, *p_sets;
5768 int noutputs;
5769
5770 if (!cfun->has_asm_statement)
5771 return 0;
5772
5773 df_set_flags (DF_DEFER_INSN_RESCAN);
5774 FOR_EACH_BB (bb)
5775 {
5776 FOR_BB_INSNS (bb, insn)
5777 {
5778 if (!INSN_P (insn))
5779 continue;
5780
5781 pat = PATTERN (insn);
5782 if (GET_CODE (pat) == PARALLEL)
5783 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
5784 else if (GET_CODE (pat) == SET)
5785 p_sets = &PATTERN (insn), noutputs = 1;
5786 else
5787 continue;
5788
5789 if (GET_CODE (*p_sets) == SET
5790 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
5791 match_asm_constraints_1 (insn, p_sets, noutputs);
5792 }
5793 }
5794
5795 return TODO_df_finish;
5796 }
5797
5798 struct rtl_opt_pass pass_match_asm_constraints =
5799 {
5800 {
5801 RTL_PASS,
5802 "asmcons", /* name */
5803 NULL, /* gate */
5804 rest_of_match_asm_constraints, /* execute */
5805 NULL, /* sub */
5806 NULL, /* next */
5807 0, /* static_pass_number */
5808 0, /* tv_id */
5809 0, /* properties_required */
5810 0, /* properties_provided */
5811 0, /* properties_destroyed */
5812 0, /* todo_flags_start */
5813 TODO_dump_func /* todo_flags_finish */
5814 }
5815 };
5816
5817
5818 #include "gt-function.h"