c-objc-common.h (LANG_HOOKS_FUNCTION_ENTER_NESTED, [...]): Delete.
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "regs.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "output.h"
53 #include "basic-block.h"
54 #include "toplev.h"
55 #include "hashtab.h"
56 #include "ggc.h"
57 #include "tm_p.h"
58 #include "integrate.h"
59 #include "langhooks.h"
60 #include "target.h"
61 #include "cfglayout.h"
62 #include "tree-gimple.h"
63 #include "tree-pass.h"
64 #include "predict.h"
65 #include "df.h"
66 #include "timevar.h"
67 #include "vecprim.h"
68
69 /* So we can assign to cfun in this file. */
70 #undef cfun
71
72 #ifndef LOCAL_ALIGNMENT
73 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
74 #endif
75
76 #ifndef STACK_ALIGNMENT_NEEDED
77 #define STACK_ALIGNMENT_NEEDED 1
78 #endif
79
80 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
81
82 /* Some systems use __main in a way incompatible with its use in gcc, in these
83 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
84 give the same symbol without quotes for an alternative entry point. You
85 must define both, or neither. */
86 #ifndef NAME__MAIN
87 #define NAME__MAIN "__main"
88 #endif
89
90 /* Round a value to the lowest integer less than it that is a multiple of
91 the required alignment. Avoid using division in case the value is
92 negative. Assume the alignment is a power of two. */
93 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
94
95 /* Similar, but round to the next highest integer that meets the
96 alignment. */
97 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
98
99 /* Nonzero if function being compiled doesn't contain any calls
100 (ignoring the prologue and epilogue). This is set prior to
101 local register allocation and is valid for the remaining
102 compiler passes. */
103 int current_function_is_leaf;
104
105 /* Nonzero if function being compiled doesn't modify the stack pointer
106 (ignoring the prologue and epilogue). This is only valid after
107 pass_stack_ptr_mod has run. */
108 int current_function_sp_is_unchanging;
109
110 /* Nonzero if the function being compiled is a leaf function which only
111 uses leaf registers. This is valid after reload (specifically after
112 sched2) and is useful only if the port defines LEAF_REGISTERS. */
113 int current_function_uses_only_leaf_regs;
114
115 /* Nonzero once virtual register instantiation has been done.
116 assign_stack_local uses frame_pointer_rtx when this is nonzero.
117 calls.c:emit_library_call_value_1 uses it to set up
118 post-instantiation libcalls. */
119 int virtuals_instantiated;
120
121 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
122 static GTY(()) int funcdef_no;
123
124 /* These variables hold pointers to functions to create and destroy
125 target specific, per-function data structures. */
126 struct machine_function * (*init_machine_status) (void);
127
128 /* The currently compiled function. */
129 struct function *cfun = 0;
130
131 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
132 static VEC(int,heap) *prologue;
133 static VEC(int,heap) *epilogue;
134
135 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
136 in this function. */
137 static VEC(int,heap) *sibcall_epilogue;
138 \f
139 /* In order to evaluate some expressions, such as function calls returning
140 structures in memory, we need to temporarily allocate stack locations.
141 We record each allocated temporary in the following structure.
142
143 Associated with each temporary slot is a nesting level. When we pop up
144 one level, all temporaries associated with the previous level are freed.
145 Normally, all temporaries are freed after the execution of the statement
146 in which they were created. However, if we are inside a ({...}) grouping,
147 the result may be in a temporary and hence must be preserved. If the
148 result could be in a temporary, we preserve it if we can determine which
149 one it is in. If we cannot determine which temporary may contain the
150 result, all temporaries are preserved. A temporary is preserved by
151 pretending it was allocated at the previous nesting level.
152
153 Automatic variables are also assigned temporary slots, at the nesting
154 level where they are defined. They are marked a "kept" so that
155 free_temp_slots will not free them. */
156
157 struct temp_slot GTY(())
158 {
159 /* Points to next temporary slot. */
160 struct temp_slot *next;
161 /* Points to previous temporary slot. */
162 struct temp_slot *prev;
163
164 /* The rtx to used to reference the slot. */
165 rtx slot;
166 /* The rtx used to represent the address if not the address of the
167 slot above. May be an EXPR_LIST if multiple addresses exist. */
168 rtx address;
169 /* The alignment (in bits) of the slot. */
170 unsigned int align;
171 /* The size, in units, of the slot. */
172 HOST_WIDE_INT size;
173 /* The type of the object in the slot, or zero if it doesn't correspond
174 to a type. We use this to determine whether a slot can be reused.
175 It can be reused if objects of the type of the new slot will always
176 conflict with objects of the type of the old slot. */
177 tree type;
178 /* Nonzero if this temporary is currently in use. */
179 char in_use;
180 /* Nonzero if this temporary has its address taken. */
181 char addr_taken;
182 /* Nesting level at which this slot is being used. */
183 int level;
184 /* Nonzero if this should survive a call to free_temp_slots. */
185 int keep;
186 /* The offset of the slot from the frame_pointer, including extra space
187 for alignment. This info is for combine_temp_slots. */
188 HOST_WIDE_INT base_offset;
189 /* The size of the slot, including extra space for alignment. This
190 info is for combine_temp_slots. */
191 HOST_WIDE_INT full_size;
192 };
193 \f
194 /* Forward declarations. */
195
196 static struct temp_slot *find_temp_slot_from_address (rtx);
197 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
198 static void pad_below (struct args_size *, enum machine_mode, tree);
199 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
200 static int all_blocks (tree, tree *);
201 static tree *get_block_vector (tree, int *);
202 extern tree debug_find_var_in_block_tree (tree, tree);
203 /* We always define `record_insns' even if it's not used so that we
204 can always export `prologue_epilogue_contains'. */
205 static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED;
206 static int contains (const_rtx, VEC(int,heap) **);
207 #ifdef HAVE_return
208 static void emit_return_into_block (basic_block);
209 #endif
210 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
211 static rtx keep_stack_depressed (rtx);
212 #endif
213 static void prepare_function_start (void);
214 static void do_clobber_return_reg (rtx, void *);
215 static void do_use_return_reg (rtx, void *);
216 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
217 \f
218 /* Pointer to chain of `struct function' for containing functions. */
219 struct function *outer_function_chain;
220
221 /* Given a function decl for a containing function,
222 return the `struct function' for it. */
223
224 struct function *
225 find_function_data (tree decl)
226 {
227 struct function *p;
228
229 for (p = outer_function_chain; p; p = p->outer)
230 if (p->decl == decl)
231 return p;
232
233 gcc_unreachable ();
234 }
235
236 /* Save the current context for compilation of a nested function.
237 This is called from language-specific code. */
238
239 void
240 push_function_context (void)
241 {
242 if (cfun == 0)
243 allocate_struct_function (NULL, false);
244
245 cfun->outer = outer_function_chain;
246 outer_function_chain = cfun;
247 set_cfun (NULL);
248 }
249
250 /* Restore the last saved context, at the end of a nested function.
251 This function is called from language-specific code. */
252
253 void
254 pop_function_context (void)
255 {
256 struct function *p = outer_function_chain;
257
258 set_cfun (p);
259 outer_function_chain = p->outer;
260 current_function_decl = p->decl;
261
262 /* Reset variables that have known state during rtx generation. */
263 virtuals_instantiated = 0;
264 generating_concat_p = 1;
265 }
266
267 /* Clear out all parts of the state in F that can safely be discarded
268 after the function has been parsed, but not compiled, to let
269 garbage collection reclaim the memory. */
270
271 void
272 free_after_parsing (struct function *f)
273 {
274 /* f->expr->forced_labels is used by code generation. */
275 /* f->emit->regno_reg_rtx is used by code generation. */
276 /* f->varasm is used by code generation. */
277 /* f->eh->eh_return_stub_label is used by code generation. */
278
279 lang_hooks.function.final (f);
280 }
281
282 /* Clear out all parts of the state in F that can safely be discarded
283 after the function has been compiled, to let garbage collection
284 reclaim the memory. */
285
286 void
287 free_after_compilation (struct function *f)
288 {
289 VEC_free (int, heap, prologue);
290 VEC_free (int, heap, epilogue);
291 VEC_free (int, heap, sibcall_epilogue);
292 if (rtl.emit.regno_pointer_align)
293 free (rtl.emit.regno_pointer_align);
294
295 memset (&rtl, 0, sizeof (rtl));
296 f->eh = NULL;
297 f->machine = NULL;
298 f->cfg = NULL;
299
300 f->arg_offset_rtx = NULL;
301 f->return_rtx = NULL;
302 f->internal_arg_pointer = NULL;
303 f->epilogue_delay_list = NULL;
304 }
305 \f
306 /* Return size needed for stack frame based on slots so far allocated.
307 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
308 the caller may have to do that. */
309
310 HOST_WIDE_INT
311 get_frame_size (void)
312 {
313 if (FRAME_GROWS_DOWNWARD)
314 return -frame_offset;
315 else
316 return frame_offset;
317 }
318
319 /* Issue an error message and return TRUE if frame OFFSET overflows in
320 the signed target pointer arithmetics for function FUNC. Otherwise
321 return FALSE. */
322
323 bool
324 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
325 {
326 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
327
328 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
329 /* Leave room for the fixed part of the frame. */
330 - 64 * UNITS_PER_WORD)
331 {
332 error ("%Jtotal size of local objects too large", func);
333 return TRUE;
334 }
335
336 return FALSE;
337 }
338
339 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
340 with machine mode MODE.
341
342 ALIGN controls the amount of alignment for the address of the slot:
343 0 means according to MODE,
344 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
345 -2 means use BITS_PER_UNIT,
346 positive specifies alignment boundary in bits.
347
348 We do not round to stack_boundary here. */
349
350 rtx
351 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
352 {
353 rtx x, addr;
354 int bigend_correction = 0;
355 unsigned int alignment;
356 int frame_off, frame_alignment, frame_phase;
357
358 if (align == 0)
359 {
360 tree type;
361
362 if (mode == BLKmode)
363 alignment = BIGGEST_ALIGNMENT;
364 else
365 alignment = GET_MODE_ALIGNMENT (mode);
366
367 /* Allow the target to (possibly) increase the alignment of this
368 stack slot. */
369 type = lang_hooks.types.type_for_mode (mode, 0);
370 if (type)
371 alignment = LOCAL_ALIGNMENT (type, alignment);
372
373 alignment /= BITS_PER_UNIT;
374 }
375 else if (align == -1)
376 {
377 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
378 size = CEIL_ROUND (size, alignment);
379 }
380 else if (align == -2)
381 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
382 else
383 alignment = align / BITS_PER_UNIT;
384
385 if (FRAME_GROWS_DOWNWARD)
386 frame_offset -= size;
387
388 /* Ignore alignment we can't do with expected alignment of the boundary. */
389 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
390 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
391
392 if (cfun->stack_alignment_needed < alignment * BITS_PER_UNIT)
393 cfun->stack_alignment_needed = alignment * BITS_PER_UNIT;
394
395 /* Calculate how many bytes the start of local variables is off from
396 stack alignment. */
397 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
398 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
399 frame_phase = frame_off ? frame_alignment - frame_off : 0;
400
401 /* Round the frame offset to the specified alignment. The default is
402 to always honor requests to align the stack but a port may choose to
403 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
404 if (STACK_ALIGNMENT_NEEDED
405 || mode != BLKmode
406 || size != 0)
407 {
408 /* We must be careful here, since FRAME_OFFSET might be negative and
409 division with a negative dividend isn't as well defined as we might
410 like. So we instead assume that ALIGNMENT is a power of two and
411 use logical operations which are unambiguous. */
412 if (FRAME_GROWS_DOWNWARD)
413 frame_offset
414 = (FLOOR_ROUND (frame_offset - frame_phase,
415 (unsigned HOST_WIDE_INT) alignment)
416 + frame_phase);
417 else
418 frame_offset
419 = (CEIL_ROUND (frame_offset - frame_phase,
420 (unsigned HOST_WIDE_INT) alignment)
421 + frame_phase);
422 }
423
424 /* On a big-endian machine, if we are allocating more space than we will use,
425 use the least significant bytes of those that are allocated. */
426 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
427 bigend_correction = size - GET_MODE_SIZE (mode);
428
429 /* If we have already instantiated virtual registers, return the actual
430 address relative to the frame pointer. */
431 if (virtuals_instantiated)
432 addr = plus_constant (frame_pointer_rtx,
433 trunc_int_for_mode
434 (frame_offset + bigend_correction
435 + STARTING_FRAME_OFFSET, Pmode));
436 else
437 addr = plus_constant (virtual_stack_vars_rtx,
438 trunc_int_for_mode
439 (frame_offset + bigend_correction,
440 Pmode));
441
442 if (!FRAME_GROWS_DOWNWARD)
443 frame_offset += size;
444
445 x = gen_rtx_MEM (mode, addr);
446 MEM_NOTRAP_P (x) = 1;
447
448 stack_slot_list
449 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
450
451 if (frame_offset_overflow (frame_offset, current_function_decl))
452 frame_offset = 0;
453
454 return x;
455 }
456 \f
457 /* Removes temporary slot TEMP from LIST. */
458
459 static void
460 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
461 {
462 if (temp->next)
463 temp->next->prev = temp->prev;
464 if (temp->prev)
465 temp->prev->next = temp->next;
466 else
467 *list = temp->next;
468
469 temp->prev = temp->next = NULL;
470 }
471
472 /* Inserts temporary slot TEMP to LIST. */
473
474 static void
475 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
476 {
477 temp->next = *list;
478 if (*list)
479 (*list)->prev = temp;
480 temp->prev = NULL;
481 *list = temp;
482 }
483
484 /* Returns the list of used temp slots at LEVEL. */
485
486 static struct temp_slot **
487 temp_slots_at_level (int level)
488 {
489 if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
490 VEC_safe_grow_cleared (temp_slot_p, gc, used_temp_slots, level + 1);
491
492 return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
493 }
494
495 /* Returns the maximal temporary slot level. */
496
497 static int
498 max_slot_level (void)
499 {
500 if (!used_temp_slots)
501 return -1;
502
503 return VEC_length (temp_slot_p, used_temp_slots) - 1;
504 }
505
506 /* Moves temporary slot TEMP to LEVEL. */
507
508 static void
509 move_slot_to_level (struct temp_slot *temp, int level)
510 {
511 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
512 insert_slot_to_list (temp, temp_slots_at_level (level));
513 temp->level = level;
514 }
515
516 /* Make temporary slot TEMP available. */
517
518 static void
519 make_slot_available (struct temp_slot *temp)
520 {
521 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
522 insert_slot_to_list (temp, &avail_temp_slots);
523 temp->in_use = 0;
524 temp->level = -1;
525 }
526 \f
527 /* Allocate a temporary stack slot and record it for possible later
528 reuse.
529
530 MODE is the machine mode to be given to the returned rtx.
531
532 SIZE is the size in units of the space required. We do no rounding here
533 since assign_stack_local will do any required rounding.
534
535 KEEP is 1 if this slot is to be retained after a call to
536 free_temp_slots. Automatic variables for a block are allocated
537 with this flag. KEEP values of 2 or 3 were needed respectively
538 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
539 or for SAVE_EXPRs, but they are now unused.
540
541 TYPE is the type that will be used for the stack slot. */
542
543 rtx
544 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
545 int keep, tree type)
546 {
547 unsigned int align;
548 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
549 rtx slot;
550
551 /* If SIZE is -1 it means that somebody tried to allocate a temporary
552 of a variable size. */
553 gcc_assert (size != -1);
554
555 /* These are now unused. */
556 gcc_assert (keep <= 1);
557
558 if (mode == BLKmode)
559 align = BIGGEST_ALIGNMENT;
560 else
561 align = GET_MODE_ALIGNMENT (mode);
562
563 if (! type)
564 type = lang_hooks.types.type_for_mode (mode, 0);
565
566 if (type)
567 align = LOCAL_ALIGNMENT (type, align);
568
569 /* Try to find an available, already-allocated temporary of the proper
570 mode which meets the size and alignment requirements. Choose the
571 smallest one with the closest alignment.
572
573 If assign_stack_temp is called outside of the tree->rtl expansion,
574 we cannot reuse the stack slots (that may still refer to
575 VIRTUAL_STACK_VARS_REGNUM). */
576 if (!virtuals_instantiated)
577 {
578 for (p = avail_temp_slots; p; p = p->next)
579 {
580 if (p->align >= align && p->size >= size
581 && GET_MODE (p->slot) == mode
582 && objects_must_conflict_p (p->type, type)
583 && (best_p == 0 || best_p->size > p->size
584 || (best_p->size == p->size && best_p->align > p->align)))
585 {
586 if (p->align == align && p->size == size)
587 {
588 selected = p;
589 cut_slot_from_list (selected, &avail_temp_slots);
590 best_p = 0;
591 break;
592 }
593 best_p = p;
594 }
595 }
596 }
597
598 /* Make our best, if any, the one to use. */
599 if (best_p)
600 {
601 selected = best_p;
602 cut_slot_from_list (selected, &avail_temp_slots);
603
604 /* If there are enough aligned bytes left over, make them into a new
605 temp_slot so that the extra bytes don't get wasted. Do this only
606 for BLKmode slots, so that we can be sure of the alignment. */
607 if (GET_MODE (best_p->slot) == BLKmode)
608 {
609 int alignment = best_p->align / BITS_PER_UNIT;
610 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
611
612 if (best_p->size - rounded_size >= alignment)
613 {
614 p = ggc_alloc (sizeof (struct temp_slot));
615 p->in_use = p->addr_taken = 0;
616 p->size = best_p->size - rounded_size;
617 p->base_offset = best_p->base_offset + rounded_size;
618 p->full_size = best_p->full_size - rounded_size;
619 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
620 p->align = best_p->align;
621 p->address = 0;
622 p->type = best_p->type;
623 insert_slot_to_list (p, &avail_temp_slots);
624
625 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
626 stack_slot_list);
627
628 best_p->size = rounded_size;
629 best_p->full_size = rounded_size;
630 }
631 }
632 }
633
634 /* If we still didn't find one, make a new temporary. */
635 if (selected == 0)
636 {
637 HOST_WIDE_INT frame_offset_old = frame_offset;
638
639 p = ggc_alloc (sizeof (struct temp_slot));
640
641 /* We are passing an explicit alignment request to assign_stack_local.
642 One side effect of that is assign_stack_local will not round SIZE
643 to ensure the frame offset remains suitably aligned.
644
645 So for requests which depended on the rounding of SIZE, we go ahead
646 and round it now. We also make sure ALIGNMENT is at least
647 BIGGEST_ALIGNMENT. */
648 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
649 p->slot = assign_stack_local (mode,
650 (mode == BLKmode
651 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
652 : size),
653 align);
654
655 p->align = align;
656
657 /* The following slot size computation is necessary because we don't
658 know the actual size of the temporary slot until assign_stack_local
659 has performed all the frame alignment and size rounding for the
660 requested temporary. Note that extra space added for alignment
661 can be either above or below this stack slot depending on which
662 way the frame grows. We include the extra space if and only if it
663 is above this slot. */
664 if (FRAME_GROWS_DOWNWARD)
665 p->size = frame_offset_old - frame_offset;
666 else
667 p->size = size;
668
669 /* Now define the fields used by combine_temp_slots. */
670 if (FRAME_GROWS_DOWNWARD)
671 {
672 p->base_offset = frame_offset;
673 p->full_size = frame_offset_old - frame_offset;
674 }
675 else
676 {
677 p->base_offset = frame_offset_old;
678 p->full_size = frame_offset - frame_offset_old;
679 }
680 p->address = 0;
681
682 selected = p;
683 }
684
685 p = selected;
686 p->in_use = 1;
687 p->addr_taken = 0;
688 p->type = type;
689 p->level = temp_slot_level;
690 p->keep = keep;
691
692 pp = temp_slots_at_level (p->level);
693 insert_slot_to_list (p, pp);
694
695 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
696 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
697 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
698
699 /* If we know the alias set for the memory that will be used, use
700 it. If there's no TYPE, then we don't know anything about the
701 alias set for the memory. */
702 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
703 set_mem_align (slot, align);
704
705 /* If a type is specified, set the relevant flags. */
706 if (type != 0)
707 {
708 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
709 MEM_SET_IN_STRUCT_P (slot, (AGGREGATE_TYPE_P (type)
710 || TREE_CODE (type) == COMPLEX_TYPE));
711 }
712 MEM_NOTRAP_P (slot) = 1;
713
714 return slot;
715 }
716
717 /* Allocate a temporary stack slot and record it for possible later
718 reuse. First three arguments are same as in preceding function. */
719
720 rtx
721 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
722 {
723 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
724 }
725 \f
726 /* Assign a temporary.
727 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
728 and so that should be used in error messages. In either case, we
729 allocate of the given type.
730 KEEP is as for assign_stack_temp.
731 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
732 it is 0 if a register is OK.
733 DONT_PROMOTE is 1 if we should not promote values in register
734 to wider modes. */
735
736 rtx
737 assign_temp (tree type_or_decl, int keep, int memory_required,
738 int dont_promote ATTRIBUTE_UNUSED)
739 {
740 tree type, decl;
741 enum machine_mode mode;
742 #ifdef PROMOTE_MODE
743 int unsignedp;
744 #endif
745
746 if (DECL_P (type_or_decl))
747 decl = type_or_decl, type = TREE_TYPE (decl);
748 else
749 decl = NULL, type = type_or_decl;
750
751 mode = TYPE_MODE (type);
752 #ifdef PROMOTE_MODE
753 unsignedp = TYPE_UNSIGNED (type);
754 #endif
755
756 if (mode == BLKmode || memory_required)
757 {
758 HOST_WIDE_INT size = int_size_in_bytes (type);
759 rtx tmp;
760
761 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
762 problems with allocating the stack space. */
763 if (size == 0)
764 size = 1;
765
766 /* Unfortunately, we don't yet know how to allocate variable-sized
767 temporaries. However, sometimes we can find a fixed upper limit on
768 the size, so try that instead. */
769 else if (size == -1)
770 size = max_int_size_in_bytes (type);
771
772 /* The size of the temporary may be too large to fit into an integer. */
773 /* ??? Not sure this should happen except for user silliness, so limit
774 this to things that aren't compiler-generated temporaries. The
775 rest of the time we'll die in assign_stack_temp_for_type. */
776 if (decl && size == -1
777 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
778 {
779 error ("size of variable %q+D is too large", decl);
780 size = 1;
781 }
782
783 tmp = assign_stack_temp_for_type (mode, size, keep, type);
784 return tmp;
785 }
786
787 #ifdef PROMOTE_MODE
788 if (! dont_promote)
789 mode = promote_mode (type, mode, &unsignedp, 0);
790 #endif
791
792 return gen_reg_rtx (mode);
793 }
794 \f
795 /* Combine temporary stack slots which are adjacent on the stack.
796
797 This allows for better use of already allocated stack space. This is only
798 done for BLKmode slots because we can be sure that we won't have alignment
799 problems in this case. */
800
801 static void
802 combine_temp_slots (void)
803 {
804 struct temp_slot *p, *q, *next, *next_q;
805 int num_slots;
806
807 /* We can't combine slots, because the information about which slot
808 is in which alias set will be lost. */
809 if (flag_strict_aliasing)
810 return;
811
812 /* If there are a lot of temp slots, don't do anything unless
813 high levels of optimization. */
814 if (! flag_expensive_optimizations)
815 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
816 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
817 return;
818
819 for (p = avail_temp_slots; p; p = next)
820 {
821 int delete_p = 0;
822
823 next = p->next;
824
825 if (GET_MODE (p->slot) != BLKmode)
826 continue;
827
828 for (q = p->next; q; q = next_q)
829 {
830 int delete_q = 0;
831
832 next_q = q->next;
833
834 if (GET_MODE (q->slot) != BLKmode)
835 continue;
836
837 if (p->base_offset + p->full_size == q->base_offset)
838 {
839 /* Q comes after P; combine Q into P. */
840 p->size += q->size;
841 p->full_size += q->full_size;
842 delete_q = 1;
843 }
844 else if (q->base_offset + q->full_size == p->base_offset)
845 {
846 /* P comes after Q; combine P into Q. */
847 q->size += p->size;
848 q->full_size += p->full_size;
849 delete_p = 1;
850 break;
851 }
852 if (delete_q)
853 cut_slot_from_list (q, &avail_temp_slots);
854 }
855
856 /* Either delete P or advance past it. */
857 if (delete_p)
858 cut_slot_from_list (p, &avail_temp_slots);
859 }
860 }
861 \f
862 /* Find the temp slot corresponding to the object at address X. */
863
864 static struct temp_slot *
865 find_temp_slot_from_address (rtx x)
866 {
867 struct temp_slot *p;
868 rtx next;
869 int i;
870
871 for (i = max_slot_level (); i >= 0; i--)
872 for (p = *temp_slots_at_level (i); p; p = p->next)
873 {
874 if (XEXP (p->slot, 0) == x
875 || p->address == x
876 || (GET_CODE (x) == PLUS
877 && XEXP (x, 0) == virtual_stack_vars_rtx
878 && GET_CODE (XEXP (x, 1)) == CONST_INT
879 && INTVAL (XEXP (x, 1)) >= p->base_offset
880 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
881 return p;
882
883 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
884 for (next = p->address; next; next = XEXP (next, 1))
885 if (XEXP (next, 0) == x)
886 return p;
887 }
888
889 /* If we have a sum involving a register, see if it points to a temp
890 slot. */
891 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
892 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
893 return p;
894 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
895 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
896 return p;
897
898 return 0;
899 }
900
901 /* Indicate that NEW is an alternate way of referring to the temp slot
902 that previously was known by OLD. */
903
904 void
905 update_temp_slot_address (rtx old, rtx new)
906 {
907 struct temp_slot *p;
908
909 if (rtx_equal_p (old, new))
910 return;
911
912 p = find_temp_slot_from_address (old);
913
914 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
915 is a register, see if one operand of the PLUS is a temporary
916 location. If so, NEW points into it. Otherwise, if both OLD and
917 NEW are a PLUS and if there is a register in common between them.
918 If so, try a recursive call on those values. */
919 if (p == 0)
920 {
921 if (GET_CODE (old) != PLUS)
922 return;
923
924 if (REG_P (new))
925 {
926 update_temp_slot_address (XEXP (old, 0), new);
927 update_temp_slot_address (XEXP (old, 1), new);
928 return;
929 }
930 else if (GET_CODE (new) != PLUS)
931 return;
932
933 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
934 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
935 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
936 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
937 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
938 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
939 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
940 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
941
942 return;
943 }
944
945 /* Otherwise add an alias for the temp's address. */
946 else if (p->address == 0)
947 p->address = new;
948 else
949 {
950 if (GET_CODE (p->address) != EXPR_LIST)
951 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
952
953 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
954 }
955 }
956
957 /* If X could be a reference to a temporary slot, mark the fact that its
958 address was taken. */
959
960 void
961 mark_temp_addr_taken (rtx x)
962 {
963 struct temp_slot *p;
964
965 if (x == 0)
966 return;
967
968 /* If X is not in memory or is at a constant address, it cannot be in
969 a temporary slot. */
970 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
971 return;
972
973 p = find_temp_slot_from_address (XEXP (x, 0));
974 if (p != 0)
975 p->addr_taken = 1;
976 }
977
978 /* If X could be a reference to a temporary slot, mark that slot as
979 belonging to the to one level higher than the current level. If X
980 matched one of our slots, just mark that one. Otherwise, we can't
981 easily predict which it is, so upgrade all of them. Kept slots
982 need not be touched.
983
984 This is called when an ({...}) construct occurs and a statement
985 returns a value in memory. */
986
987 void
988 preserve_temp_slots (rtx x)
989 {
990 struct temp_slot *p = 0, *next;
991
992 /* If there is no result, we still might have some objects whose address
993 were taken, so we need to make sure they stay around. */
994 if (x == 0)
995 {
996 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
997 {
998 next = p->next;
999
1000 if (p->addr_taken)
1001 move_slot_to_level (p, temp_slot_level - 1);
1002 }
1003
1004 return;
1005 }
1006
1007 /* If X is a register that is being used as a pointer, see if we have
1008 a temporary slot we know it points to. To be consistent with
1009 the code below, we really should preserve all non-kept slots
1010 if we can't find a match, but that seems to be much too costly. */
1011 if (REG_P (x) && REG_POINTER (x))
1012 p = find_temp_slot_from_address (x);
1013
1014 /* If X is not in memory or is at a constant address, it cannot be in
1015 a temporary slot, but it can contain something whose address was
1016 taken. */
1017 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1018 {
1019 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1020 {
1021 next = p->next;
1022
1023 if (p->addr_taken)
1024 move_slot_to_level (p, temp_slot_level - 1);
1025 }
1026
1027 return;
1028 }
1029
1030 /* First see if we can find a match. */
1031 if (p == 0)
1032 p = find_temp_slot_from_address (XEXP (x, 0));
1033
1034 if (p != 0)
1035 {
1036 /* Move everything at our level whose address was taken to our new
1037 level in case we used its address. */
1038 struct temp_slot *q;
1039
1040 if (p->level == temp_slot_level)
1041 {
1042 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1043 {
1044 next = q->next;
1045
1046 if (p != q && q->addr_taken)
1047 move_slot_to_level (q, temp_slot_level - 1);
1048 }
1049
1050 move_slot_to_level (p, temp_slot_level - 1);
1051 p->addr_taken = 0;
1052 }
1053 return;
1054 }
1055
1056 /* Otherwise, preserve all non-kept slots at this level. */
1057 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1058 {
1059 next = p->next;
1060
1061 if (!p->keep)
1062 move_slot_to_level (p, temp_slot_level - 1);
1063 }
1064 }
1065
1066 /* Free all temporaries used so far. This is normally called at the
1067 end of generating code for a statement. */
1068
1069 void
1070 free_temp_slots (void)
1071 {
1072 struct temp_slot *p, *next;
1073
1074 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1075 {
1076 next = p->next;
1077
1078 if (!p->keep)
1079 make_slot_available (p);
1080 }
1081
1082 combine_temp_slots ();
1083 }
1084
1085 /* Push deeper into the nesting level for stack temporaries. */
1086
1087 void
1088 push_temp_slots (void)
1089 {
1090 temp_slot_level++;
1091 }
1092
1093 /* Pop a temporary nesting level. All slots in use in the current level
1094 are freed. */
1095
1096 void
1097 pop_temp_slots (void)
1098 {
1099 struct temp_slot *p, *next;
1100
1101 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1102 {
1103 next = p->next;
1104 make_slot_available (p);
1105 }
1106
1107 combine_temp_slots ();
1108
1109 temp_slot_level--;
1110 }
1111
1112 /* Initialize temporary slots. */
1113
1114 void
1115 init_temp_slots (void)
1116 {
1117 /* We have not allocated any temporaries yet. */
1118 avail_temp_slots = 0;
1119 used_temp_slots = 0;
1120 temp_slot_level = 0;
1121 }
1122 \f
1123 /* These routines are responsible for converting virtual register references
1124 to the actual hard register references once RTL generation is complete.
1125
1126 The following four variables are used for communication between the
1127 routines. They contain the offsets of the virtual registers from their
1128 respective hard registers. */
1129
1130 static int in_arg_offset;
1131 static int var_offset;
1132 static int dynamic_offset;
1133 static int out_arg_offset;
1134 static int cfa_offset;
1135
1136 /* In most machines, the stack pointer register is equivalent to the bottom
1137 of the stack. */
1138
1139 #ifndef STACK_POINTER_OFFSET
1140 #define STACK_POINTER_OFFSET 0
1141 #endif
1142
1143 /* If not defined, pick an appropriate default for the offset of dynamically
1144 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1145 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1146
1147 #ifndef STACK_DYNAMIC_OFFSET
1148
1149 /* The bottom of the stack points to the actual arguments. If
1150 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1151 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1152 stack space for register parameters is not pushed by the caller, but
1153 rather part of the fixed stack areas and hence not included in
1154 `current_function_outgoing_args_size'. Nevertheless, we must allow
1155 for it when allocating stack dynamic objects. */
1156
1157 #if defined(REG_PARM_STACK_SPACE)
1158 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1159 ((ACCUMULATE_OUTGOING_ARGS \
1160 ? (current_function_outgoing_args_size \
1161 + (OUTGOING_REG_PARM_STACK_SPACE ? 0 : REG_PARM_STACK_SPACE (FNDECL))) \
1162 : 0) + (STACK_POINTER_OFFSET))
1163 #else
1164 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1165 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
1166 + (STACK_POINTER_OFFSET))
1167 #endif
1168 #endif
1169
1170 \f
1171 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1172 is a virtual register, return the equivalent hard register and set the
1173 offset indirectly through the pointer. Otherwise, return 0. */
1174
1175 static rtx
1176 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1177 {
1178 rtx new;
1179 HOST_WIDE_INT offset;
1180
1181 if (x == virtual_incoming_args_rtx)
1182 new = arg_pointer_rtx, offset = in_arg_offset;
1183 else if (x == virtual_stack_vars_rtx)
1184 new = frame_pointer_rtx, offset = var_offset;
1185 else if (x == virtual_stack_dynamic_rtx)
1186 new = stack_pointer_rtx, offset = dynamic_offset;
1187 else if (x == virtual_outgoing_args_rtx)
1188 new = stack_pointer_rtx, offset = out_arg_offset;
1189 else if (x == virtual_cfa_rtx)
1190 {
1191 #ifdef FRAME_POINTER_CFA_OFFSET
1192 new = frame_pointer_rtx;
1193 #else
1194 new = arg_pointer_rtx;
1195 #endif
1196 offset = cfa_offset;
1197 }
1198 else
1199 return NULL_RTX;
1200
1201 *poffset = offset;
1202 return new;
1203 }
1204
1205 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1206 Instantiate any virtual registers present inside of *LOC. The expression
1207 is simplified, as much as possible, but is not to be considered "valid"
1208 in any sense implied by the target. If any change is made, set CHANGED
1209 to true. */
1210
1211 static int
1212 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1213 {
1214 HOST_WIDE_INT offset;
1215 bool *changed = (bool *) data;
1216 rtx x, new;
1217
1218 x = *loc;
1219 if (x == 0)
1220 return 0;
1221
1222 switch (GET_CODE (x))
1223 {
1224 case REG:
1225 new = instantiate_new_reg (x, &offset);
1226 if (new)
1227 {
1228 *loc = plus_constant (new, offset);
1229 if (changed)
1230 *changed = true;
1231 }
1232 return -1;
1233
1234 case PLUS:
1235 new = instantiate_new_reg (XEXP (x, 0), &offset);
1236 if (new)
1237 {
1238 new = plus_constant (new, offset);
1239 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new, XEXP (x, 1));
1240 if (changed)
1241 *changed = true;
1242 return -1;
1243 }
1244
1245 /* FIXME -- from old code */
1246 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1247 we can commute the PLUS and SUBREG because pointers into the
1248 frame are well-behaved. */
1249 break;
1250
1251 default:
1252 break;
1253 }
1254
1255 return 0;
1256 }
1257
1258 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1259 matches the predicate for insn CODE operand OPERAND. */
1260
1261 static int
1262 safe_insn_predicate (int code, int operand, rtx x)
1263 {
1264 const struct insn_operand_data *op_data;
1265
1266 if (code < 0)
1267 return true;
1268
1269 op_data = &insn_data[code].operand[operand];
1270 if (op_data->predicate == NULL)
1271 return true;
1272
1273 return op_data->predicate (x, op_data->mode);
1274 }
1275
1276 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1277 registers present inside of insn. The result will be a valid insn. */
1278
1279 static void
1280 instantiate_virtual_regs_in_insn (rtx insn)
1281 {
1282 HOST_WIDE_INT offset;
1283 int insn_code, i;
1284 bool any_change = false;
1285 rtx set, new, x, seq;
1286
1287 /* There are some special cases to be handled first. */
1288 set = single_set (insn);
1289 if (set)
1290 {
1291 /* We're allowed to assign to a virtual register. This is interpreted
1292 to mean that the underlying register gets assigned the inverse
1293 transformation. This is used, for example, in the handling of
1294 non-local gotos. */
1295 new = instantiate_new_reg (SET_DEST (set), &offset);
1296 if (new)
1297 {
1298 start_sequence ();
1299
1300 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1301 x = simplify_gen_binary (PLUS, GET_MODE (new), SET_SRC (set),
1302 GEN_INT (-offset));
1303 x = force_operand (x, new);
1304 if (x != new)
1305 emit_move_insn (new, x);
1306
1307 seq = get_insns ();
1308 end_sequence ();
1309
1310 emit_insn_before (seq, insn);
1311 delete_insn (insn);
1312 return;
1313 }
1314
1315 /* Handle a straight copy from a virtual register by generating a
1316 new add insn. The difference between this and falling through
1317 to the generic case is avoiding a new pseudo and eliminating a
1318 move insn in the initial rtl stream. */
1319 new = instantiate_new_reg (SET_SRC (set), &offset);
1320 if (new && offset != 0
1321 && REG_P (SET_DEST (set))
1322 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1323 {
1324 start_sequence ();
1325
1326 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1327 new, GEN_INT (offset), SET_DEST (set),
1328 1, OPTAB_LIB_WIDEN);
1329 if (x != SET_DEST (set))
1330 emit_move_insn (SET_DEST (set), x);
1331
1332 seq = get_insns ();
1333 end_sequence ();
1334
1335 emit_insn_before (seq, insn);
1336 delete_insn (insn);
1337 return;
1338 }
1339
1340 extract_insn (insn);
1341 insn_code = INSN_CODE (insn);
1342
1343 /* Handle a plus involving a virtual register by determining if the
1344 operands remain valid if they're modified in place. */
1345 if (GET_CODE (SET_SRC (set)) == PLUS
1346 && recog_data.n_operands >= 3
1347 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1348 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1349 && GET_CODE (recog_data.operand[2]) == CONST_INT
1350 && (new = instantiate_new_reg (recog_data.operand[1], &offset)))
1351 {
1352 offset += INTVAL (recog_data.operand[2]);
1353
1354 /* If the sum is zero, then replace with a plain move. */
1355 if (offset == 0
1356 && REG_P (SET_DEST (set))
1357 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1358 {
1359 start_sequence ();
1360 emit_move_insn (SET_DEST (set), new);
1361 seq = get_insns ();
1362 end_sequence ();
1363
1364 emit_insn_before (seq, insn);
1365 delete_insn (insn);
1366 return;
1367 }
1368
1369 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1370
1371 /* Using validate_change and apply_change_group here leaves
1372 recog_data in an invalid state. Since we know exactly what
1373 we want to check, do those two by hand. */
1374 if (safe_insn_predicate (insn_code, 1, new)
1375 && safe_insn_predicate (insn_code, 2, x))
1376 {
1377 *recog_data.operand_loc[1] = recog_data.operand[1] = new;
1378 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1379 any_change = true;
1380
1381 /* Fall through into the regular operand fixup loop in
1382 order to take care of operands other than 1 and 2. */
1383 }
1384 }
1385 }
1386 else
1387 {
1388 extract_insn (insn);
1389 insn_code = INSN_CODE (insn);
1390 }
1391
1392 /* In the general case, we expect virtual registers to appear only in
1393 operands, and then only as either bare registers or inside memories. */
1394 for (i = 0; i < recog_data.n_operands; ++i)
1395 {
1396 x = recog_data.operand[i];
1397 switch (GET_CODE (x))
1398 {
1399 case MEM:
1400 {
1401 rtx addr = XEXP (x, 0);
1402 bool changed = false;
1403
1404 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1405 if (!changed)
1406 continue;
1407
1408 start_sequence ();
1409 x = replace_equiv_address (x, addr);
1410 /* It may happen that the address with the virtual reg
1411 was valid (e.g. based on the virtual stack reg, which might
1412 be acceptable to the predicates with all offsets), whereas
1413 the address now isn't anymore, for instance when the address
1414 is still offsetted, but the base reg isn't virtual-stack-reg
1415 anymore. Below we would do a force_reg on the whole operand,
1416 but this insn might actually only accept memory. Hence,
1417 before doing that last resort, try to reload the address into
1418 a register, so this operand stays a MEM. */
1419 if (!safe_insn_predicate (insn_code, i, x))
1420 {
1421 addr = force_reg (GET_MODE (addr), addr);
1422 x = replace_equiv_address (x, addr);
1423 }
1424 seq = get_insns ();
1425 end_sequence ();
1426 if (seq)
1427 emit_insn_before (seq, insn);
1428 }
1429 break;
1430
1431 case REG:
1432 new = instantiate_new_reg (x, &offset);
1433 if (new == NULL)
1434 continue;
1435 if (offset == 0)
1436 x = new;
1437 else
1438 {
1439 start_sequence ();
1440
1441 /* Careful, special mode predicates may have stuff in
1442 insn_data[insn_code].operand[i].mode that isn't useful
1443 to us for computing a new value. */
1444 /* ??? Recognize address_operand and/or "p" constraints
1445 to see if (plus new offset) is a valid before we put
1446 this through expand_simple_binop. */
1447 x = expand_simple_binop (GET_MODE (x), PLUS, new,
1448 GEN_INT (offset), NULL_RTX,
1449 1, OPTAB_LIB_WIDEN);
1450 seq = get_insns ();
1451 end_sequence ();
1452 emit_insn_before (seq, insn);
1453 }
1454 break;
1455
1456 case SUBREG:
1457 new = instantiate_new_reg (SUBREG_REG (x), &offset);
1458 if (new == NULL)
1459 continue;
1460 if (offset != 0)
1461 {
1462 start_sequence ();
1463 new = expand_simple_binop (GET_MODE (new), PLUS, new,
1464 GEN_INT (offset), NULL_RTX,
1465 1, OPTAB_LIB_WIDEN);
1466 seq = get_insns ();
1467 end_sequence ();
1468 emit_insn_before (seq, insn);
1469 }
1470 x = simplify_gen_subreg (recog_data.operand_mode[i], new,
1471 GET_MODE (new), SUBREG_BYTE (x));
1472 break;
1473
1474 default:
1475 continue;
1476 }
1477
1478 /* At this point, X contains the new value for the operand.
1479 Validate the new value vs the insn predicate. Note that
1480 asm insns will have insn_code -1 here. */
1481 if (!safe_insn_predicate (insn_code, i, x))
1482 {
1483 start_sequence ();
1484 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1485 seq = get_insns ();
1486 end_sequence ();
1487 if (seq)
1488 emit_insn_before (seq, insn);
1489 }
1490
1491 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1492 any_change = true;
1493 }
1494
1495 if (any_change)
1496 {
1497 /* Propagate operand changes into the duplicates. */
1498 for (i = 0; i < recog_data.n_dups; ++i)
1499 *recog_data.dup_loc[i]
1500 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1501
1502 /* Force re-recognition of the instruction for validation. */
1503 INSN_CODE (insn) = -1;
1504 }
1505
1506 if (asm_noperands (PATTERN (insn)) >= 0)
1507 {
1508 if (!check_asm_operands (PATTERN (insn)))
1509 {
1510 error_for_asm (insn, "impossible constraint in %<asm%>");
1511 delete_insn (insn);
1512 }
1513 }
1514 else
1515 {
1516 if (recog_memoized (insn) < 0)
1517 fatal_insn_not_found (insn);
1518 }
1519 }
1520
1521 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1522 do any instantiation required. */
1523
1524 void
1525 instantiate_decl_rtl (rtx x)
1526 {
1527 rtx addr;
1528
1529 if (x == 0)
1530 return;
1531
1532 /* If this is a CONCAT, recurse for the pieces. */
1533 if (GET_CODE (x) == CONCAT)
1534 {
1535 instantiate_decl_rtl (XEXP (x, 0));
1536 instantiate_decl_rtl (XEXP (x, 1));
1537 return;
1538 }
1539
1540 /* If this is not a MEM, no need to do anything. Similarly if the
1541 address is a constant or a register that is not a virtual register. */
1542 if (!MEM_P (x))
1543 return;
1544
1545 addr = XEXP (x, 0);
1546 if (CONSTANT_P (addr)
1547 || (REG_P (addr)
1548 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1549 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1550 return;
1551
1552 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1553 }
1554
1555 /* Helper for instantiate_decls called via walk_tree: Process all decls
1556 in the given DECL_VALUE_EXPR. */
1557
1558 static tree
1559 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1560 {
1561 tree t = *tp;
1562 if (! EXPR_P (t) && ! GIMPLE_STMT_P (t))
1563 {
1564 *walk_subtrees = 0;
1565 if (DECL_P (t) && DECL_RTL_SET_P (t))
1566 instantiate_decl_rtl (DECL_RTL (t));
1567 }
1568 return NULL;
1569 }
1570
1571 /* Subroutine of instantiate_decls: Process all decls in the given
1572 BLOCK node and all its subblocks. */
1573
1574 static void
1575 instantiate_decls_1 (tree let)
1576 {
1577 tree t;
1578
1579 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1580 {
1581 if (DECL_RTL_SET_P (t))
1582 instantiate_decl_rtl (DECL_RTL (t));
1583 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1584 {
1585 tree v = DECL_VALUE_EXPR (t);
1586 walk_tree (&v, instantiate_expr, NULL, NULL);
1587 }
1588 }
1589
1590 /* Process all subblocks. */
1591 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1592 instantiate_decls_1 (t);
1593 }
1594
1595 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1596 all virtual registers in their DECL_RTL's. */
1597
1598 static void
1599 instantiate_decls (tree fndecl)
1600 {
1601 tree decl;
1602
1603 /* Process all parameters of the function. */
1604 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1605 {
1606 instantiate_decl_rtl (DECL_RTL (decl));
1607 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1608 if (DECL_HAS_VALUE_EXPR_P (decl))
1609 {
1610 tree v = DECL_VALUE_EXPR (decl);
1611 walk_tree (&v, instantiate_expr, NULL, NULL);
1612 }
1613 }
1614
1615 /* Now process all variables defined in the function or its subblocks. */
1616 instantiate_decls_1 (DECL_INITIAL (fndecl));
1617 }
1618
1619 /* Pass through the INSNS of function FNDECL and convert virtual register
1620 references to hard register references. */
1621
1622 static unsigned int
1623 instantiate_virtual_regs (void)
1624 {
1625 rtx insn;
1626
1627 /* Compute the offsets to use for this function. */
1628 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1629 var_offset = STARTING_FRAME_OFFSET;
1630 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1631 out_arg_offset = STACK_POINTER_OFFSET;
1632 #ifdef FRAME_POINTER_CFA_OFFSET
1633 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1634 #else
1635 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1636 #endif
1637
1638 /* Initialize recognition, indicating that volatile is OK. */
1639 init_recog ();
1640
1641 /* Scan through all the insns, instantiating every virtual register still
1642 present. */
1643 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1644 if (INSN_P (insn))
1645 {
1646 /* These patterns in the instruction stream can never be recognized.
1647 Fortunately, they shouldn't contain virtual registers either. */
1648 if (GET_CODE (PATTERN (insn)) == USE
1649 || GET_CODE (PATTERN (insn)) == CLOBBER
1650 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1651 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1652 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1653 continue;
1654
1655 instantiate_virtual_regs_in_insn (insn);
1656
1657 if (INSN_DELETED_P (insn))
1658 continue;
1659
1660 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1661
1662 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1663 if (GET_CODE (insn) == CALL_INSN)
1664 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1665 instantiate_virtual_regs_in_rtx, NULL);
1666 }
1667
1668 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1669 instantiate_decls (current_function_decl);
1670
1671 targetm.instantiate_decls ();
1672
1673 /* Indicate that, from now on, assign_stack_local should use
1674 frame_pointer_rtx. */
1675 virtuals_instantiated = 1;
1676 return 0;
1677 }
1678
1679 struct rtl_opt_pass pass_instantiate_virtual_regs =
1680 {
1681 {
1682 RTL_PASS,
1683 "vregs", /* name */
1684 NULL, /* gate */
1685 instantiate_virtual_regs, /* execute */
1686 NULL, /* sub */
1687 NULL, /* next */
1688 0, /* static_pass_number */
1689 0, /* tv_id */
1690 0, /* properties_required */
1691 0, /* properties_provided */
1692 0, /* properties_destroyed */
1693 0, /* todo_flags_start */
1694 TODO_dump_func /* todo_flags_finish */
1695 }
1696 };
1697
1698 \f
1699 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1700 This means a type for which function calls must pass an address to the
1701 function or get an address back from the function.
1702 EXP may be a type node or an expression (whose type is tested). */
1703
1704 int
1705 aggregate_value_p (const_tree exp, const_tree fntype)
1706 {
1707 int i, regno, nregs;
1708 rtx reg;
1709
1710 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1711
1712 /* DECL node associated with FNTYPE when relevant, which we might need to
1713 check for by-invisible-reference returns, typically for CALL_EXPR input
1714 EXPressions. */
1715 const_tree fndecl = NULL_TREE;
1716
1717 if (fntype)
1718 switch (TREE_CODE (fntype))
1719 {
1720 case CALL_EXPR:
1721 fndecl = get_callee_fndecl (fntype);
1722 fntype = fndecl ? TREE_TYPE (fndecl) : 0;
1723 break;
1724 case FUNCTION_DECL:
1725 fndecl = fntype;
1726 fntype = TREE_TYPE (fndecl);
1727 break;
1728 case FUNCTION_TYPE:
1729 case METHOD_TYPE:
1730 break;
1731 case IDENTIFIER_NODE:
1732 fntype = 0;
1733 break;
1734 default:
1735 /* We don't expect other rtl types here. */
1736 gcc_unreachable ();
1737 }
1738
1739 if (TREE_CODE (type) == VOID_TYPE)
1740 return 0;
1741
1742 /* If the front end has decided that this needs to be passed by
1743 reference, do so. */
1744 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1745 && DECL_BY_REFERENCE (exp))
1746 return 1;
1747
1748 /* If the EXPression is a CALL_EXPR, honor DECL_BY_REFERENCE set on the
1749 called function RESULT_DECL, meaning the function returns in memory by
1750 invisible reference. This check lets front-ends not set TREE_ADDRESSABLE
1751 on the function type, which used to be the way to request such a return
1752 mechanism but might now be causing troubles at gimplification time if
1753 temporaries with the function type need to be created. */
1754 if (TREE_CODE (exp) == CALL_EXPR && fndecl && DECL_RESULT (fndecl)
1755 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
1756 return 1;
1757
1758 if (targetm.calls.return_in_memory (type, fntype))
1759 return 1;
1760 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1761 and thus can't be returned in registers. */
1762 if (TREE_ADDRESSABLE (type))
1763 return 1;
1764 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
1765 return 1;
1766 /* Make sure we have suitable call-clobbered regs to return
1767 the value in; if not, we must return it in memory. */
1768 reg = hard_function_value (type, 0, fntype, 0);
1769
1770 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1771 it is OK. */
1772 if (!REG_P (reg))
1773 return 0;
1774
1775 regno = REGNO (reg);
1776 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
1777 for (i = 0; i < nregs; i++)
1778 if (! call_used_regs[regno + i])
1779 return 1;
1780 return 0;
1781 }
1782 \f
1783 /* Return true if we should assign DECL a pseudo register; false if it
1784 should live on the local stack. */
1785
1786 bool
1787 use_register_for_decl (const_tree decl)
1788 {
1789 /* Honor volatile. */
1790 if (TREE_SIDE_EFFECTS (decl))
1791 return false;
1792
1793 /* Honor addressability. */
1794 if (TREE_ADDRESSABLE (decl))
1795 return false;
1796
1797 /* Only register-like things go in registers. */
1798 if (DECL_MODE (decl) == BLKmode)
1799 return false;
1800
1801 /* If -ffloat-store specified, don't put explicit float variables
1802 into registers. */
1803 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1804 propagates values across these stores, and it probably shouldn't. */
1805 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1806 return false;
1807
1808 /* If we're not interested in tracking debugging information for
1809 this decl, then we can certainly put it in a register. */
1810 if (DECL_IGNORED_P (decl))
1811 return true;
1812
1813 return (optimize || DECL_REGISTER (decl));
1814 }
1815
1816 /* Return true if TYPE should be passed by invisible reference. */
1817
1818 bool
1819 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1820 tree type, bool named_arg)
1821 {
1822 if (type)
1823 {
1824 /* If this type contains non-trivial constructors, then it is
1825 forbidden for the middle-end to create any new copies. */
1826 if (TREE_ADDRESSABLE (type))
1827 return true;
1828
1829 /* GCC post 3.4 passes *all* variable sized types by reference. */
1830 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1831 return true;
1832 }
1833
1834 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
1835 }
1836
1837 /* Return true if TYPE, which is passed by reference, should be callee
1838 copied instead of caller copied. */
1839
1840 bool
1841 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1842 tree type, bool named_arg)
1843 {
1844 if (type && TREE_ADDRESSABLE (type))
1845 return false;
1846 return targetm.calls.callee_copies (ca, mode, type, named_arg);
1847 }
1848
1849 /* Structures to communicate between the subroutines of assign_parms.
1850 The first holds data persistent across all parameters, the second
1851 is cleared out for each parameter. */
1852
1853 struct assign_parm_data_all
1854 {
1855 CUMULATIVE_ARGS args_so_far;
1856 struct args_size stack_args_size;
1857 tree function_result_decl;
1858 tree orig_fnargs;
1859 rtx first_conversion_insn;
1860 rtx last_conversion_insn;
1861 HOST_WIDE_INT pretend_args_size;
1862 HOST_WIDE_INT extra_pretend_bytes;
1863 int reg_parm_stack_space;
1864 };
1865
1866 struct assign_parm_data_one
1867 {
1868 tree nominal_type;
1869 tree passed_type;
1870 rtx entry_parm;
1871 rtx stack_parm;
1872 enum machine_mode nominal_mode;
1873 enum machine_mode passed_mode;
1874 enum machine_mode promoted_mode;
1875 struct locate_and_pad_arg_data locate;
1876 int partial;
1877 BOOL_BITFIELD named_arg : 1;
1878 BOOL_BITFIELD passed_pointer : 1;
1879 BOOL_BITFIELD on_stack : 1;
1880 BOOL_BITFIELD loaded_in_reg : 1;
1881 };
1882
1883 /* A subroutine of assign_parms. Initialize ALL. */
1884
1885 static void
1886 assign_parms_initialize_all (struct assign_parm_data_all *all)
1887 {
1888 tree fntype;
1889
1890 memset (all, 0, sizeof (*all));
1891
1892 fntype = TREE_TYPE (current_function_decl);
1893
1894 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
1895 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
1896 #else
1897 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
1898 current_function_decl, -1);
1899 #endif
1900
1901 #ifdef REG_PARM_STACK_SPACE
1902 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
1903 #endif
1904 }
1905
1906 /* If ARGS contains entries with complex types, split the entry into two
1907 entries of the component type. Return a new list of substitutions are
1908 needed, else the old list. */
1909
1910 static tree
1911 split_complex_args (tree args)
1912 {
1913 tree p;
1914
1915 /* Before allocating memory, check for the common case of no complex. */
1916 for (p = args; p; p = TREE_CHAIN (p))
1917 {
1918 tree type = TREE_TYPE (p);
1919 if (TREE_CODE (type) == COMPLEX_TYPE
1920 && targetm.calls.split_complex_arg (type))
1921 goto found;
1922 }
1923 return args;
1924
1925 found:
1926 args = copy_list (args);
1927
1928 for (p = args; p; p = TREE_CHAIN (p))
1929 {
1930 tree type = TREE_TYPE (p);
1931 if (TREE_CODE (type) == COMPLEX_TYPE
1932 && targetm.calls.split_complex_arg (type))
1933 {
1934 tree decl;
1935 tree subtype = TREE_TYPE (type);
1936 bool addressable = TREE_ADDRESSABLE (p);
1937
1938 /* Rewrite the PARM_DECL's type with its component. */
1939 TREE_TYPE (p) = subtype;
1940 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
1941 DECL_MODE (p) = VOIDmode;
1942 DECL_SIZE (p) = NULL;
1943 DECL_SIZE_UNIT (p) = NULL;
1944 /* If this arg must go in memory, put it in a pseudo here.
1945 We can't allow it to go in memory as per normal parms,
1946 because the usual place might not have the imag part
1947 adjacent to the real part. */
1948 DECL_ARTIFICIAL (p) = addressable;
1949 DECL_IGNORED_P (p) = addressable;
1950 TREE_ADDRESSABLE (p) = 0;
1951 layout_decl (p, 0);
1952
1953 /* Build a second synthetic decl. */
1954 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
1955 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
1956 DECL_ARTIFICIAL (decl) = addressable;
1957 DECL_IGNORED_P (decl) = addressable;
1958 layout_decl (decl, 0);
1959
1960 /* Splice it in; skip the new decl. */
1961 TREE_CHAIN (decl) = TREE_CHAIN (p);
1962 TREE_CHAIN (p) = decl;
1963 p = decl;
1964 }
1965 }
1966
1967 return args;
1968 }
1969
1970 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
1971 the hidden struct return argument, and (abi willing) complex args.
1972 Return the new parameter list. */
1973
1974 static tree
1975 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
1976 {
1977 tree fndecl = current_function_decl;
1978 tree fntype = TREE_TYPE (fndecl);
1979 tree fnargs = DECL_ARGUMENTS (fndecl);
1980
1981 /* If struct value address is treated as the first argument, make it so. */
1982 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
1983 && ! current_function_returns_pcc_struct
1984 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
1985 {
1986 tree type = build_pointer_type (TREE_TYPE (fntype));
1987 tree decl;
1988
1989 decl = build_decl (PARM_DECL, NULL_TREE, type);
1990 DECL_ARG_TYPE (decl) = type;
1991 DECL_ARTIFICIAL (decl) = 1;
1992 DECL_IGNORED_P (decl) = 1;
1993
1994 TREE_CHAIN (decl) = fnargs;
1995 fnargs = decl;
1996 all->function_result_decl = decl;
1997 }
1998
1999 all->orig_fnargs = fnargs;
2000
2001 /* If the target wants to split complex arguments into scalars, do so. */
2002 if (targetm.calls.split_complex_arg)
2003 fnargs = split_complex_args (fnargs);
2004
2005 return fnargs;
2006 }
2007
2008 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2009 data for the parameter. Incorporate ABI specifics such as pass-by-
2010 reference and type promotion. */
2011
2012 static void
2013 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2014 struct assign_parm_data_one *data)
2015 {
2016 tree nominal_type, passed_type;
2017 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2018
2019 memset (data, 0, sizeof (*data));
2020
2021 /* NAMED_ARG is a mis-nomer. We really mean 'non-varadic'. */
2022 if (!current_function_stdarg)
2023 data->named_arg = 1; /* No varadic parms. */
2024 else if (TREE_CHAIN (parm))
2025 data->named_arg = 1; /* Not the last non-varadic parm. */
2026 else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2027 data->named_arg = 1; /* Only varadic ones are unnamed. */
2028 else
2029 data->named_arg = 0; /* Treat as varadic. */
2030
2031 nominal_type = TREE_TYPE (parm);
2032 passed_type = DECL_ARG_TYPE (parm);
2033
2034 /* Look out for errors propagating this far. Also, if the parameter's
2035 type is void then its value doesn't matter. */
2036 if (TREE_TYPE (parm) == error_mark_node
2037 /* This can happen after weird syntax errors
2038 or if an enum type is defined among the parms. */
2039 || TREE_CODE (parm) != PARM_DECL
2040 || passed_type == NULL
2041 || VOID_TYPE_P (nominal_type))
2042 {
2043 nominal_type = passed_type = void_type_node;
2044 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2045 goto egress;
2046 }
2047
2048 /* Find mode of arg as it is passed, and mode of arg as it should be
2049 during execution of this function. */
2050 passed_mode = TYPE_MODE (passed_type);
2051 nominal_mode = TYPE_MODE (nominal_type);
2052
2053 /* If the parm is to be passed as a transparent union, use the type of
2054 the first field for the tests below. We have already verified that
2055 the modes are the same. */
2056 if (TREE_CODE (passed_type) == UNION_TYPE
2057 && TYPE_TRANSPARENT_UNION (passed_type))
2058 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2059
2060 /* See if this arg was passed by invisible reference. */
2061 if (pass_by_reference (&all->args_so_far, passed_mode,
2062 passed_type, data->named_arg))
2063 {
2064 passed_type = nominal_type = build_pointer_type (passed_type);
2065 data->passed_pointer = true;
2066 passed_mode = nominal_mode = Pmode;
2067 }
2068
2069 /* Find mode as it is passed by the ABI. */
2070 promoted_mode = passed_mode;
2071 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2072 {
2073 int unsignedp = TYPE_UNSIGNED (passed_type);
2074 promoted_mode = promote_mode (passed_type, promoted_mode,
2075 &unsignedp, 1);
2076 }
2077
2078 egress:
2079 data->nominal_type = nominal_type;
2080 data->passed_type = passed_type;
2081 data->nominal_mode = nominal_mode;
2082 data->passed_mode = passed_mode;
2083 data->promoted_mode = promoted_mode;
2084 }
2085
2086 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2087
2088 static void
2089 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2090 struct assign_parm_data_one *data, bool no_rtl)
2091 {
2092 int varargs_pretend_bytes = 0;
2093
2094 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2095 data->promoted_mode,
2096 data->passed_type,
2097 &varargs_pretend_bytes, no_rtl);
2098
2099 /* If the back-end has requested extra stack space, record how much is
2100 needed. Do not change pretend_args_size otherwise since it may be
2101 nonzero from an earlier partial argument. */
2102 if (varargs_pretend_bytes > 0)
2103 all->pretend_args_size = varargs_pretend_bytes;
2104 }
2105
2106 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2107 the incoming location of the current parameter. */
2108
2109 static void
2110 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2111 struct assign_parm_data_one *data)
2112 {
2113 HOST_WIDE_INT pretend_bytes = 0;
2114 rtx entry_parm;
2115 bool in_regs;
2116
2117 if (data->promoted_mode == VOIDmode)
2118 {
2119 data->entry_parm = data->stack_parm = const0_rtx;
2120 return;
2121 }
2122
2123 #ifdef FUNCTION_INCOMING_ARG
2124 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2125 data->passed_type, data->named_arg);
2126 #else
2127 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2128 data->passed_type, data->named_arg);
2129 #endif
2130
2131 if (entry_parm == 0)
2132 data->promoted_mode = data->passed_mode;
2133
2134 /* Determine parm's home in the stack, in case it arrives in the stack
2135 or we should pretend it did. Compute the stack position and rtx where
2136 the argument arrives and its size.
2137
2138 There is one complexity here: If this was a parameter that would
2139 have been passed in registers, but wasn't only because it is
2140 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2141 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2142 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2143 as it was the previous time. */
2144 in_regs = entry_parm != 0;
2145 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2146 in_regs = true;
2147 #endif
2148 if (!in_regs && !data->named_arg)
2149 {
2150 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2151 {
2152 rtx tem;
2153 #ifdef FUNCTION_INCOMING_ARG
2154 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2155 data->passed_type, true);
2156 #else
2157 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2158 data->passed_type, true);
2159 #endif
2160 in_regs = tem != NULL;
2161 }
2162 }
2163
2164 /* If this parameter was passed both in registers and in the stack, use
2165 the copy on the stack. */
2166 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2167 data->passed_type))
2168 entry_parm = 0;
2169
2170 if (entry_parm)
2171 {
2172 int partial;
2173
2174 partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2175 data->promoted_mode,
2176 data->passed_type,
2177 data->named_arg);
2178 data->partial = partial;
2179
2180 /* The caller might already have allocated stack space for the
2181 register parameters. */
2182 if (partial != 0 && all->reg_parm_stack_space == 0)
2183 {
2184 /* Part of this argument is passed in registers and part
2185 is passed on the stack. Ask the prologue code to extend
2186 the stack part so that we can recreate the full value.
2187
2188 PRETEND_BYTES is the size of the registers we need to store.
2189 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2190 stack space that the prologue should allocate.
2191
2192 Internally, gcc assumes that the argument pointer is aligned
2193 to STACK_BOUNDARY bits. This is used both for alignment
2194 optimizations (see init_emit) and to locate arguments that are
2195 aligned to more than PARM_BOUNDARY bits. We must preserve this
2196 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2197 a stack boundary. */
2198
2199 /* We assume at most one partial arg, and it must be the first
2200 argument on the stack. */
2201 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2202
2203 pretend_bytes = partial;
2204 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2205
2206 /* We want to align relative to the actual stack pointer, so
2207 don't include this in the stack size until later. */
2208 all->extra_pretend_bytes = all->pretend_args_size;
2209 }
2210 }
2211
2212 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2213 entry_parm ? data->partial : 0, current_function_decl,
2214 &all->stack_args_size, &data->locate);
2215
2216 /* Adjust offsets to include the pretend args. */
2217 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2218 data->locate.slot_offset.constant += pretend_bytes;
2219 data->locate.offset.constant += pretend_bytes;
2220
2221 data->entry_parm = entry_parm;
2222 }
2223
2224 /* A subroutine of assign_parms. If there is actually space on the stack
2225 for this parm, count it in stack_args_size and return true. */
2226
2227 static bool
2228 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2229 struct assign_parm_data_one *data)
2230 {
2231 /* Trivially true if we've no incoming register. */
2232 if (data->entry_parm == NULL)
2233 ;
2234 /* Also true if we're partially in registers and partially not,
2235 since we've arranged to drop the entire argument on the stack. */
2236 else if (data->partial != 0)
2237 ;
2238 /* Also true if the target says that it's passed in both registers
2239 and on the stack. */
2240 else if (GET_CODE (data->entry_parm) == PARALLEL
2241 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2242 ;
2243 /* Also true if the target says that there's stack allocated for
2244 all register parameters. */
2245 else if (all->reg_parm_stack_space > 0)
2246 ;
2247 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2248 else
2249 return false;
2250
2251 all->stack_args_size.constant += data->locate.size.constant;
2252 if (data->locate.size.var)
2253 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2254
2255 return true;
2256 }
2257
2258 /* A subroutine of assign_parms. Given that this parameter is allocated
2259 stack space by the ABI, find it. */
2260
2261 static void
2262 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2263 {
2264 rtx offset_rtx, stack_parm;
2265 unsigned int align, boundary;
2266
2267 /* If we're passing this arg using a reg, make its stack home the
2268 aligned stack slot. */
2269 if (data->entry_parm)
2270 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2271 else
2272 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2273
2274 stack_parm = current_function_internal_arg_pointer;
2275 if (offset_rtx != const0_rtx)
2276 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2277 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2278
2279 set_mem_attributes (stack_parm, parm, 1);
2280
2281 boundary = data->locate.boundary;
2282 align = BITS_PER_UNIT;
2283
2284 /* If we're padding upward, we know that the alignment of the slot
2285 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2286 intentionally forcing upward padding. Otherwise we have to come
2287 up with a guess at the alignment based on OFFSET_RTX. */
2288 if (data->locate.where_pad != downward || data->entry_parm)
2289 align = boundary;
2290 else if (GET_CODE (offset_rtx) == CONST_INT)
2291 {
2292 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2293 align = align & -align;
2294 }
2295 set_mem_align (stack_parm, align);
2296
2297 if (data->entry_parm)
2298 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2299
2300 data->stack_parm = stack_parm;
2301 }
2302
2303 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2304 always valid and contiguous. */
2305
2306 static void
2307 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2308 {
2309 rtx entry_parm = data->entry_parm;
2310 rtx stack_parm = data->stack_parm;
2311
2312 /* If this parm was passed part in regs and part in memory, pretend it
2313 arrived entirely in memory by pushing the register-part onto the stack.
2314 In the special case of a DImode or DFmode that is split, we could put
2315 it together in a pseudoreg directly, but for now that's not worth
2316 bothering with. */
2317 if (data->partial != 0)
2318 {
2319 /* Handle calls that pass values in multiple non-contiguous
2320 locations. The Irix 6 ABI has examples of this. */
2321 if (GET_CODE (entry_parm) == PARALLEL)
2322 emit_group_store (validize_mem (stack_parm), entry_parm,
2323 data->passed_type,
2324 int_size_in_bytes (data->passed_type));
2325 else
2326 {
2327 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2328 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2329 data->partial / UNITS_PER_WORD);
2330 }
2331
2332 entry_parm = stack_parm;
2333 }
2334
2335 /* If we didn't decide this parm came in a register, by default it came
2336 on the stack. */
2337 else if (entry_parm == NULL)
2338 entry_parm = stack_parm;
2339
2340 /* When an argument is passed in multiple locations, we can't make use
2341 of this information, but we can save some copying if the whole argument
2342 is passed in a single register. */
2343 else if (GET_CODE (entry_parm) == PARALLEL
2344 && data->nominal_mode != BLKmode
2345 && data->passed_mode != BLKmode)
2346 {
2347 size_t i, len = XVECLEN (entry_parm, 0);
2348
2349 for (i = 0; i < len; i++)
2350 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2351 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2352 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2353 == data->passed_mode)
2354 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2355 {
2356 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2357 break;
2358 }
2359 }
2360
2361 data->entry_parm = entry_parm;
2362 }
2363
2364 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2365 always valid and properly aligned. */
2366
2367 static void
2368 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2369 {
2370 rtx stack_parm = data->stack_parm;
2371
2372 /* If we can't trust the parm stack slot to be aligned enough for its
2373 ultimate type, don't use that slot after entry. We'll make another
2374 stack slot, if we need one. */
2375 if (stack_parm
2376 && ((STRICT_ALIGNMENT
2377 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2378 || (data->nominal_type
2379 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2380 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2381 stack_parm = NULL;
2382
2383 /* If parm was passed in memory, and we need to convert it on entry,
2384 don't store it back in that same slot. */
2385 else if (data->entry_parm == stack_parm
2386 && data->nominal_mode != BLKmode
2387 && data->nominal_mode != data->passed_mode)
2388 stack_parm = NULL;
2389
2390 /* If stack protection is in effect for this function, don't leave any
2391 pointers in their passed stack slots. */
2392 else if (cfun->stack_protect_guard
2393 && (flag_stack_protect == 2
2394 || data->passed_pointer
2395 || POINTER_TYPE_P (data->nominal_type)))
2396 stack_parm = NULL;
2397
2398 data->stack_parm = stack_parm;
2399 }
2400
2401 /* A subroutine of assign_parms. Return true if the current parameter
2402 should be stored as a BLKmode in the current frame. */
2403
2404 static bool
2405 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2406 {
2407 if (data->nominal_mode == BLKmode)
2408 return true;
2409 if (GET_CODE (data->entry_parm) == PARALLEL)
2410 return true;
2411
2412 #ifdef BLOCK_REG_PADDING
2413 /* Only assign_parm_setup_block knows how to deal with register arguments
2414 that are padded at the least significant end. */
2415 if (REG_P (data->entry_parm)
2416 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2417 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2418 == (BYTES_BIG_ENDIAN ? upward : downward)))
2419 return true;
2420 #endif
2421
2422 return false;
2423 }
2424
2425 /* A subroutine of assign_parms. Arrange for the parameter to be
2426 present and valid in DATA->STACK_RTL. */
2427
2428 static void
2429 assign_parm_setup_block (struct assign_parm_data_all *all,
2430 tree parm, struct assign_parm_data_one *data)
2431 {
2432 rtx entry_parm = data->entry_parm;
2433 rtx stack_parm = data->stack_parm;
2434 HOST_WIDE_INT size;
2435 HOST_WIDE_INT size_stored;
2436 rtx orig_entry_parm = entry_parm;
2437
2438 if (GET_CODE (entry_parm) == PARALLEL)
2439 entry_parm = emit_group_move_into_temps (entry_parm);
2440
2441 /* If we've a non-block object that's nevertheless passed in parts,
2442 reconstitute it in register operations rather than on the stack. */
2443 if (GET_CODE (entry_parm) == PARALLEL
2444 && data->nominal_mode != BLKmode)
2445 {
2446 rtx elt0 = XEXP (XVECEXP (orig_entry_parm, 0, 0), 0);
2447
2448 if ((XVECLEN (entry_parm, 0) > 1
2449 || hard_regno_nregs[REGNO (elt0)][GET_MODE (elt0)] > 1)
2450 && use_register_for_decl (parm))
2451 {
2452 rtx parmreg = gen_reg_rtx (data->nominal_mode);
2453
2454 push_to_sequence2 (all->first_conversion_insn,
2455 all->last_conversion_insn);
2456
2457 /* For values returned in multiple registers, handle possible
2458 incompatible calls to emit_group_store.
2459
2460 For example, the following would be invalid, and would have to
2461 be fixed by the conditional below:
2462
2463 emit_group_store ((reg:SF), (parallel:DF))
2464 emit_group_store ((reg:SI), (parallel:DI))
2465
2466 An example of this are doubles in e500 v2:
2467 (parallel:DF (expr_list (reg:SI) (const_int 0))
2468 (expr_list (reg:SI) (const_int 4))). */
2469 if (data->nominal_mode != data->passed_mode)
2470 {
2471 rtx t = gen_reg_rtx (GET_MODE (entry_parm));
2472 emit_group_store (t, entry_parm, NULL_TREE,
2473 GET_MODE_SIZE (GET_MODE (entry_parm)));
2474 convert_move (parmreg, t, 0);
2475 }
2476 else
2477 emit_group_store (parmreg, entry_parm, data->nominal_type,
2478 int_size_in_bytes (data->nominal_type));
2479
2480 all->first_conversion_insn = get_insns ();
2481 all->last_conversion_insn = get_last_insn ();
2482 end_sequence ();
2483
2484 SET_DECL_RTL (parm, parmreg);
2485 return;
2486 }
2487 }
2488
2489 size = int_size_in_bytes (data->passed_type);
2490 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2491 if (stack_parm == 0)
2492 {
2493 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2494 stack_parm = assign_stack_local (BLKmode, size_stored,
2495 DECL_ALIGN (parm));
2496 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2497 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2498 set_mem_attributes (stack_parm, parm, 1);
2499 }
2500
2501 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2502 calls that pass values in multiple non-contiguous locations. */
2503 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2504 {
2505 rtx mem;
2506
2507 /* Note that we will be storing an integral number of words.
2508 So we have to be careful to ensure that we allocate an
2509 integral number of words. We do this above when we call
2510 assign_stack_local if space was not allocated in the argument
2511 list. If it was, this will not work if PARM_BOUNDARY is not
2512 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2513 if it becomes a problem. Exception is when BLKmode arrives
2514 with arguments not conforming to word_mode. */
2515
2516 if (data->stack_parm == 0)
2517 ;
2518 else if (GET_CODE (entry_parm) == PARALLEL)
2519 ;
2520 else
2521 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2522
2523 mem = validize_mem (stack_parm);
2524
2525 /* Handle values in multiple non-contiguous locations. */
2526 if (GET_CODE (entry_parm) == PARALLEL)
2527 {
2528 push_to_sequence2 (all->first_conversion_insn,
2529 all->last_conversion_insn);
2530 emit_group_store (mem, entry_parm, data->passed_type, size);
2531 all->first_conversion_insn = get_insns ();
2532 all->last_conversion_insn = get_last_insn ();
2533 end_sequence ();
2534 }
2535
2536 else if (size == 0)
2537 ;
2538
2539 /* If SIZE is that of a mode no bigger than a word, just use
2540 that mode's store operation. */
2541 else if (size <= UNITS_PER_WORD)
2542 {
2543 enum machine_mode mode
2544 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2545
2546 if (mode != BLKmode
2547 #ifdef BLOCK_REG_PADDING
2548 && (size == UNITS_PER_WORD
2549 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2550 != (BYTES_BIG_ENDIAN ? upward : downward)))
2551 #endif
2552 )
2553 {
2554 rtx reg;
2555
2556 /* We are really truncating a word_mode value containing
2557 SIZE bytes into a value of mode MODE. If such an
2558 operation requires no actual instructions, we can refer
2559 to the value directly in mode MODE, otherwise we must
2560 start with the register in word_mode and explicitly
2561 convert it. */
2562 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2563 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2564 else
2565 {
2566 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2567 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2568 }
2569 emit_move_insn (change_address (mem, mode, 0), reg);
2570 }
2571
2572 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2573 machine must be aligned to the left before storing
2574 to memory. Note that the previous test doesn't
2575 handle all cases (e.g. SIZE == 3). */
2576 else if (size != UNITS_PER_WORD
2577 #ifdef BLOCK_REG_PADDING
2578 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2579 == downward)
2580 #else
2581 && BYTES_BIG_ENDIAN
2582 #endif
2583 )
2584 {
2585 rtx tem, x;
2586 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2587 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2588
2589 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2590 build_int_cst (NULL_TREE, by),
2591 NULL_RTX, 1);
2592 tem = change_address (mem, word_mode, 0);
2593 emit_move_insn (tem, x);
2594 }
2595 else
2596 move_block_from_reg (REGNO (entry_parm), mem,
2597 size_stored / UNITS_PER_WORD);
2598 }
2599 else
2600 move_block_from_reg (REGNO (entry_parm), mem,
2601 size_stored / UNITS_PER_WORD);
2602 }
2603 else if (data->stack_parm == 0)
2604 {
2605 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2606 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2607 BLOCK_OP_NORMAL);
2608 all->first_conversion_insn = get_insns ();
2609 all->last_conversion_insn = get_last_insn ();
2610 end_sequence ();
2611 }
2612
2613 data->stack_parm = stack_parm;
2614 SET_DECL_RTL (parm, stack_parm);
2615 }
2616
2617 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2618 parameter. Get it there. Perform all ABI specified conversions. */
2619
2620 static void
2621 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2622 struct assign_parm_data_one *data)
2623 {
2624 rtx parmreg;
2625 enum machine_mode promoted_nominal_mode;
2626 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2627 bool did_conversion = false;
2628
2629 /* Store the parm in a pseudoregister during the function, but we may
2630 need to do it in a wider mode. */
2631
2632 /* This is not really promoting for a call. However we need to be
2633 consistent with assign_parm_find_data_types and expand_expr_real_1. */
2634 promoted_nominal_mode
2635 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1);
2636
2637 parmreg = gen_reg_rtx (promoted_nominal_mode);
2638
2639 if (!DECL_ARTIFICIAL (parm))
2640 mark_user_reg (parmreg);
2641
2642 /* If this was an item that we received a pointer to,
2643 set DECL_RTL appropriately. */
2644 if (data->passed_pointer)
2645 {
2646 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2647 set_mem_attributes (x, parm, 1);
2648 SET_DECL_RTL (parm, x);
2649 }
2650 else
2651 SET_DECL_RTL (parm, parmreg);
2652
2653 /* Copy the value into the register. */
2654 if (data->nominal_mode != data->passed_mode
2655 || promoted_nominal_mode != data->promoted_mode)
2656 {
2657 int save_tree_used;
2658
2659 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2660 mode, by the caller. We now have to convert it to
2661 NOMINAL_MODE, if different. However, PARMREG may be in
2662 a different mode than NOMINAL_MODE if it is being stored
2663 promoted.
2664
2665 If ENTRY_PARM is a hard register, it might be in a register
2666 not valid for operating in its mode (e.g., an odd-numbered
2667 register for a DFmode). In that case, moves are the only
2668 thing valid, so we can't do a convert from there. This
2669 occurs when the calling sequence allow such misaligned
2670 usages.
2671
2672 In addition, the conversion may involve a call, which could
2673 clobber parameters which haven't been copied to pseudo
2674 registers yet. Therefore, we must first copy the parm to
2675 a pseudo reg here, and save the conversion until after all
2676 parameters have been moved. */
2677
2678 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2679
2680 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2681
2682 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2683 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2684
2685 if (GET_CODE (tempreg) == SUBREG
2686 && GET_MODE (tempreg) == data->nominal_mode
2687 && REG_P (SUBREG_REG (tempreg))
2688 && data->nominal_mode == data->passed_mode
2689 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2690 && GET_MODE_SIZE (GET_MODE (tempreg))
2691 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
2692 {
2693 /* The argument is already sign/zero extended, so note it
2694 into the subreg. */
2695 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2696 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2697 }
2698
2699 /* TREE_USED gets set erroneously during expand_assignment. */
2700 save_tree_used = TREE_USED (parm);
2701 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
2702 TREE_USED (parm) = save_tree_used;
2703 all->first_conversion_insn = get_insns ();
2704 all->last_conversion_insn = get_last_insn ();
2705 end_sequence ();
2706
2707 did_conversion = true;
2708 }
2709 else
2710 emit_move_insn (parmreg, validize_mem (data->entry_parm));
2711
2712 /* If we were passed a pointer but the actual value can safely live
2713 in a register, put it in one. */
2714 if (data->passed_pointer
2715 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2716 /* If by-reference argument was promoted, demote it. */
2717 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2718 || use_register_for_decl (parm)))
2719 {
2720 /* We can't use nominal_mode, because it will have been set to
2721 Pmode above. We must use the actual mode of the parm. */
2722 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2723 mark_user_reg (parmreg);
2724
2725 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2726 {
2727 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2728 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2729
2730 push_to_sequence2 (all->first_conversion_insn,
2731 all->last_conversion_insn);
2732 emit_move_insn (tempreg, DECL_RTL (parm));
2733 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2734 emit_move_insn (parmreg, tempreg);
2735 all->first_conversion_insn = get_insns ();
2736 all->last_conversion_insn = get_last_insn ();
2737 end_sequence ();
2738
2739 did_conversion = true;
2740 }
2741 else
2742 emit_move_insn (parmreg, DECL_RTL (parm));
2743
2744 SET_DECL_RTL (parm, parmreg);
2745
2746 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2747 now the parm. */
2748 data->stack_parm = NULL;
2749 }
2750
2751 /* Mark the register as eliminable if we did no conversion and it was
2752 copied from memory at a fixed offset, and the arg pointer was not
2753 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2754 offset formed an invalid address, such memory-equivalences as we
2755 make here would screw up life analysis for it. */
2756 if (data->nominal_mode == data->passed_mode
2757 && !did_conversion
2758 && data->stack_parm != 0
2759 && MEM_P (data->stack_parm)
2760 && data->locate.offset.var == 0
2761 && reg_mentioned_p (virtual_incoming_args_rtx,
2762 XEXP (data->stack_parm, 0)))
2763 {
2764 rtx linsn = get_last_insn ();
2765 rtx sinsn, set;
2766
2767 /* Mark complex types separately. */
2768 if (GET_CODE (parmreg) == CONCAT)
2769 {
2770 enum machine_mode submode
2771 = GET_MODE_INNER (GET_MODE (parmreg));
2772 int regnor = REGNO (XEXP (parmreg, 0));
2773 int regnoi = REGNO (XEXP (parmreg, 1));
2774 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
2775 rtx stacki = adjust_address_nv (data->stack_parm, submode,
2776 GET_MODE_SIZE (submode));
2777
2778 /* Scan backwards for the set of the real and
2779 imaginary parts. */
2780 for (sinsn = linsn; sinsn != 0;
2781 sinsn = prev_nonnote_insn (sinsn))
2782 {
2783 set = single_set (sinsn);
2784 if (set == 0)
2785 continue;
2786
2787 if (SET_DEST (set) == regno_reg_rtx [regnoi])
2788 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
2789 else if (SET_DEST (set) == regno_reg_rtx [regnor])
2790 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
2791 }
2792 }
2793 else if ((set = single_set (linsn)) != 0
2794 && SET_DEST (set) == parmreg)
2795 set_unique_reg_note (linsn, REG_EQUIV, data->stack_parm);
2796 }
2797
2798 /* For pointer data type, suggest pointer register. */
2799 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2800 mark_reg_pointer (parmreg,
2801 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2802 }
2803
2804 /* A subroutine of assign_parms. Allocate stack space to hold the current
2805 parameter. Get it there. Perform all ABI specified conversions. */
2806
2807 static void
2808 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2809 struct assign_parm_data_one *data)
2810 {
2811 /* Value must be stored in the stack slot STACK_PARM during function
2812 execution. */
2813 bool to_conversion = false;
2814
2815 if (data->promoted_mode != data->nominal_mode)
2816 {
2817 /* Conversion is required. */
2818 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2819
2820 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2821
2822 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2823 to_conversion = true;
2824
2825 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2826 TYPE_UNSIGNED (TREE_TYPE (parm)));
2827
2828 if (data->stack_parm)
2829 /* ??? This may need a big-endian conversion on sparc64. */
2830 data->stack_parm
2831 = adjust_address (data->stack_parm, data->nominal_mode, 0);
2832 }
2833
2834 if (data->entry_parm != data->stack_parm)
2835 {
2836 rtx src, dest;
2837
2838 if (data->stack_parm == 0)
2839 {
2840 data->stack_parm
2841 = assign_stack_local (GET_MODE (data->entry_parm),
2842 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
2843 TYPE_ALIGN (data->passed_type));
2844 set_mem_attributes (data->stack_parm, parm, 1);
2845 }
2846
2847 dest = validize_mem (data->stack_parm);
2848 src = validize_mem (data->entry_parm);
2849
2850 if (MEM_P (src))
2851 {
2852 /* Use a block move to handle potentially misaligned entry_parm. */
2853 if (!to_conversion)
2854 push_to_sequence2 (all->first_conversion_insn,
2855 all->last_conversion_insn);
2856 to_conversion = true;
2857
2858 emit_block_move (dest, src,
2859 GEN_INT (int_size_in_bytes (data->passed_type)),
2860 BLOCK_OP_NORMAL);
2861 }
2862 else
2863 emit_move_insn (dest, src);
2864 }
2865
2866 if (to_conversion)
2867 {
2868 all->first_conversion_insn = get_insns ();
2869 all->last_conversion_insn = get_last_insn ();
2870 end_sequence ();
2871 }
2872
2873 SET_DECL_RTL (parm, data->stack_parm);
2874 }
2875
2876 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
2877 undo the frobbing that we did in assign_parms_augmented_arg_list. */
2878
2879 static void
2880 assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
2881 {
2882 tree parm;
2883 tree orig_fnargs = all->orig_fnargs;
2884
2885 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2886 {
2887 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2888 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2889 {
2890 rtx tmp, real, imag;
2891 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
2892
2893 real = DECL_RTL (fnargs);
2894 imag = DECL_RTL (TREE_CHAIN (fnargs));
2895 if (inner != GET_MODE (real))
2896 {
2897 real = gen_lowpart_SUBREG (inner, real);
2898 imag = gen_lowpart_SUBREG (inner, imag);
2899 }
2900
2901 if (TREE_ADDRESSABLE (parm))
2902 {
2903 rtx rmem, imem;
2904 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
2905
2906 /* split_complex_arg put the real and imag parts in
2907 pseudos. Move them to memory. */
2908 tmp = assign_stack_local (DECL_MODE (parm), size,
2909 TYPE_ALIGN (TREE_TYPE (parm)));
2910 set_mem_attributes (tmp, parm, 1);
2911 rmem = adjust_address_nv (tmp, inner, 0);
2912 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
2913 push_to_sequence2 (all->first_conversion_insn,
2914 all->last_conversion_insn);
2915 emit_move_insn (rmem, real);
2916 emit_move_insn (imem, imag);
2917 all->first_conversion_insn = get_insns ();
2918 all->last_conversion_insn = get_last_insn ();
2919 end_sequence ();
2920 }
2921 else
2922 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2923 SET_DECL_RTL (parm, tmp);
2924
2925 real = DECL_INCOMING_RTL (fnargs);
2926 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
2927 if (inner != GET_MODE (real))
2928 {
2929 real = gen_lowpart_SUBREG (inner, real);
2930 imag = gen_lowpart_SUBREG (inner, imag);
2931 }
2932 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2933 set_decl_incoming_rtl (parm, tmp, false);
2934 fnargs = TREE_CHAIN (fnargs);
2935 }
2936 else
2937 {
2938 SET_DECL_RTL (parm, DECL_RTL (fnargs));
2939 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs), false);
2940
2941 /* Set MEM_EXPR to the original decl, i.e. to PARM,
2942 instead of the copy of decl, i.e. FNARGS. */
2943 if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
2944 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
2945 }
2946
2947 fnargs = TREE_CHAIN (fnargs);
2948 }
2949 }
2950
2951 /* Assign RTL expressions to the function's parameters. This may involve
2952 copying them into registers and using those registers as the DECL_RTL. */
2953
2954 static void
2955 assign_parms (tree fndecl)
2956 {
2957 struct assign_parm_data_all all;
2958 tree fnargs, parm;
2959
2960 current_function_internal_arg_pointer
2961 = targetm.calls.internal_arg_pointer ();
2962
2963 assign_parms_initialize_all (&all);
2964 fnargs = assign_parms_augmented_arg_list (&all);
2965
2966 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2967 {
2968 struct assign_parm_data_one data;
2969
2970 /* Extract the type of PARM; adjust it according to ABI. */
2971 assign_parm_find_data_types (&all, parm, &data);
2972
2973 /* Early out for errors and void parameters. */
2974 if (data.passed_mode == VOIDmode)
2975 {
2976 SET_DECL_RTL (parm, const0_rtx);
2977 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
2978 continue;
2979 }
2980
2981 if (current_function_stdarg && !TREE_CHAIN (parm))
2982 assign_parms_setup_varargs (&all, &data, false);
2983
2984 /* Find out where the parameter arrives in this function. */
2985 assign_parm_find_entry_rtl (&all, &data);
2986
2987 /* Find out where stack space for this parameter might be. */
2988 if (assign_parm_is_stack_parm (&all, &data))
2989 {
2990 assign_parm_find_stack_rtl (parm, &data);
2991 assign_parm_adjust_entry_rtl (&data);
2992 }
2993
2994 /* Record permanently how this parm was passed. */
2995 set_decl_incoming_rtl (parm, data.entry_parm, data.passed_pointer);
2996
2997 /* Update info on where next arg arrives in registers. */
2998 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
2999 data.passed_type, data.named_arg);
3000
3001 assign_parm_adjust_stack_rtl (&data);
3002
3003 if (assign_parm_setup_block_p (&data))
3004 assign_parm_setup_block (&all, parm, &data);
3005 else if (data.passed_pointer || use_register_for_decl (parm))
3006 assign_parm_setup_reg (&all, parm, &data);
3007 else
3008 assign_parm_setup_stack (&all, parm, &data);
3009 }
3010
3011 if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
3012 assign_parms_unsplit_complex (&all, fnargs);
3013
3014 /* Output all parameter conversion instructions (possibly including calls)
3015 now that all parameters have been copied out of hard registers. */
3016 emit_insn (all.first_conversion_insn);
3017
3018 /* If we are receiving a struct value address as the first argument, set up
3019 the RTL for the function result. As this might require code to convert
3020 the transmitted address to Pmode, we do this here to ensure that possible
3021 preliminary conversions of the address have been emitted already. */
3022 if (all.function_result_decl)
3023 {
3024 tree result = DECL_RESULT (current_function_decl);
3025 rtx addr = DECL_RTL (all.function_result_decl);
3026 rtx x;
3027
3028 if (DECL_BY_REFERENCE (result))
3029 x = addr;
3030 else
3031 {
3032 addr = convert_memory_address (Pmode, addr);
3033 x = gen_rtx_MEM (DECL_MODE (result), addr);
3034 set_mem_attributes (x, result, 1);
3035 }
3036 SET_DECL_RTL (result, x);
3037 }
3038
3039 /* We have aligned all the args, so add space for the pretend args. */
3040 current_function_pretend_args_size = all.pretend_args_size;
3041 all.stack_args_size.constant += all.extra_pretend_bytes;
3042 current_function_args_size = all.stack_args_size.constant;
3043
3044 /* Adjust function incoming argument size for alignment and
3045 minimum length. */
3046
3047 #ifdef REG_PARM_STACK_SPACE
3048 current_function_args_size = MAX (current_function_args_size,
3049 REG_PARM_STACK_SPACE (fndecl));
3050 #endif
3051
3052 current_function_args_size = CEIL_ROUND (current_function_args_size,
3053 PARM_BOUNDARY / BITS_PER_UNIT);
3054
3055 #ifdef ARGS_GROW_DOWNWARD
3056 current_function_arg_offset_rtx
3057 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3058 : expand_expr (size_diffop (all.stack_args_size.var,
3059 size_int (-all.stack_args_size.constant)),
3060 NULL_RTX, VOIDmode, 0));
3061 #else
3062 current_function_arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3063 #endif
3064
3065 /* See how many bytes, if any, of its args a function should try to pop
3066 on return. */
3067
3068 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
3069 current_function_args_size);
3070
3071 /* For stdarg.h function, save info about
3072 regs and stack space used by the named args. */
3073
3074 current_function_args_info = all.args_so_far;
3075
3076 /* Set the rtx used for the function return value. Put this in its
3077 own variable so any optimizers that need this information don't have
3078 to include tree.h. Do this here so it gets done when an inlined
3079 function gets output. */
3080
3081 current_function_return_rtx
3082 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3083 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3084
3085 /* If scalar return value was computed in a pseudo-reg, or was a named
3086 return value that got dumped to the stack, copy that to the hard
3087 return register. */
3088 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3089 {
3090 tree decl_result = DECL_RESULT (fndecl);
3091 rtx decl_rtl = DECL_RTL (decl_result);
3092
3093 if (REG_P (decl_rtl)
3094 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3095 : DECL_REGISTER (decl_result))
3096 {
3097 rtx real_decl_rtl;
3098
3099 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3100 fndecl, true);
3101 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3102 /* The delay slot scheduler assumes that current_function_return_rtx
3103 holds the hard register containing the return value, not a
3104 temporary pseudo. */
3105 current_function_return_rtx = real_decl_rtl;
3106 }
3107 }
3108 }
3109
3110 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3111 For all seen types, gimplify their sizes. */
3112
3113 static tree
3114 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3115 {
3116 tree t = *tp;
3117
3118 *walk_subtrees = 0;
3119 if (TYPE_P (t))
3120 {
3121 if (POINTER_TYPE_P (t))
3122 *walk_subtrees = 1;
3123 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3124 && !TYPE_SIZES_GIMPLIFIED (t))
3125 {
3126 gimplify_type_sizes (t, (tree *) data);
3127 *walk_subtrees = 1;
3128 }
3129 }
3130
3131 return NULL;
3132 }
3133
3134 /* Gimplify the parameter list for current_function_decl. This involves
3135 evaluating SAVE_EXPRs of variable sized parameters and generating code
3136 to implement callee-copies reference parameters. Returns a list of
3137 statements to add to the beginning of the function, or NULL if nothing
3138 to do. */
3139
3140 tree
3141 gimplify_parameters (void)
3142 {
3143 struct assign_parm_data_all all;
3144 tree fnargs, parm, stmts = NULL;
3145
3146 assign_parms_initialize_all (&all);
3147 fnargs = assign_parms_augmented_arg_list (&all);
3148
3149 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3150 {
3151 struct assign_parm_data_one data;
3152
3153 /* Extract the type of PARM; adjust it according to ABI. */
3154 assign_parm_find_data_types (&all, parm, &data);
3155
3156 /* Early out for errors and void parameters. */
3157 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3158 continue;
3159
3160 /* Update info on where next arg arrives in registers. */
3161 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3162 data.passed_type, data.named_arg);
3163
3164 /* ??? Once upon a time variable_size stuffed parameter list
3165 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3166 turned out to be less than manageable in the gimple world.
3167 Now we have to hunt them down ourselves. */
3168 walk_tree_without_duplicates (&data.passed_type,
3169 gimplify_parm_type, &stmts);
3170
3171 if (!TREE_CONSTANT (DECL_SIZE (parm)))
3172 {
3173 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3174 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3175 }
3176
3177 if (data.passed_pointer)
3178 {
3179 tree type = TREE_TYPE (data.passed_type);
3180 if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3181 type, data.named_arg))
3182 {
3183 tree local, t;
3184
3185 /* For constant sized objects, this is trivial; for
3186 variable-sized objects, we have to play games. */
3187 if (TREE_CONSTANT (DECL_SIZE (parm)))
3188 {
3189 local = create_tmp_var (type, get_name (parm));
3190 DECL_IGNORED_P (local) = 0;
3191 }
3192 else
3193 {
3194 tree ptr_type, addr;
3195
3196 ptr_type = build_pointer_type (type);
3197 addr = create_tmp_var (ptr_type, get_name (parm));
3198 DECL_IGNORED_P (addr) = 0;
3199 local = build_fold_indirect_ref (addr);
3200
3201 t = built_in_decls[BUILT_IN_ALLOCA];
3202 t = build_call_expr (t, 1, DECL_SIZE_UNIT (parm));
3203 t = fold_convert (ptr_type, t);
3204 t = build_gimple_modify_stmt (addr, t);
3205 gimplify_and_add (t, &stmts);
3206 }
3207
3208 t = build_gimple_modify_stmt (local, parm);
3209 gimplify_and_add (t, &stmts);
3210
3211 SET_DECL_VALUE_EXPR (parm, local);
3212 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3213 }
3214 }
3215 }
3216
3217 return stmts;
3218 }
3219 \f
3220 /* Compute the size and offset from the start of the stacked arguments for a
3221 parm passed in mode PASSED_MODE and with type TYPE.
3222
3223 INITIAL_OFFSET_PTR points to the current offset into the stacked
3224 arguments.
3225
3226 The starting offset and size for this parm are returned in
3227 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3228 nonzero, the offset is that of stack slot, which is returned in
3229 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3230 padding required from the initial offset ptr to the stack slot.
3231
3232 IN_REGS is nonzero if the argument will be passed in registers. It will
3233 never be set if REG_PARM_STACK_SPACE is not defined.
3234
3235 FNDECL is the function in which the argument was defined.
3236
3237 There are two types of rounding that are done. The first, controlled by
3238 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3239 list to be aligned to the specific boundary (in bits). This rounding
3240 affects the initial and starting offsets, but not the argument size.
3241
3242 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3243 optionally rounds the size of the parm to PARM_BOUNDARY. The
3244 initial offset is not affected by this rounding, while the size always
3245 is and the starting offset may be. */
3246
3247 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3248 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3249 callers pass in the total size of args so far as
3250 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3251
3252 void
3253 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3254 int partial, tree fndecl ATTRIBUTE_UNUSED,
3255 struct args_size *initial_offset_ptr,
3256 struct locate_and_pad_arg_data *locate)
3257 {
3258 tree sizetree;
3259 enum direction where_pad;
3260 unsigned int boundary;
3261 int reg_parm_stack_space = 0;
3262 int part_size_in_regs;
3263
3264 #ifdef REG_PARM_STACK_SPACE
3265 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3266
3267 /* If we have found a stack parm before we reach the end of the
3268 area reserved for registers, skip that area. */
3269 if (! in_regs)
3270 {
3271 if (reg_parm_stack_space > 0)
3272 {
3273 if (initial_offset_ptr->var)
3274 {
3275 initial_offset_ptr->var
3276 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3277 ssize_int (reg_parm_stack_space));
3278 initial_offset_ptr->constant = 0;
3279 }
3280 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3281 initial_offset_ptr->constant = reg_parm_stack_space;
3282 }
3283 }
3284 #endif /* REG_PARM_STACK_SPACE */
3285
3286 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3287
3288 sizetree
3289 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3290 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3291 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3292 locate->where_pad = where_pad;
3293 locate->boundary = boundary;
3294
3295 /* Remember if the outgoing parameter requires extra alignment on the
3296 calling function side. */
3297 if (boundary > PREFERRED_STACK_BOUNDARY)
3298 boundary = PREFERRED_STACK_BOUNDARY;
3299 if (cfun->stack_alignment_needed < boundary)
3300 cfun->stack_alignment_needed = boundary;
3301
3302 #ifdef ARGS_GROW_DOWNWARD
3303 locate->slot_offset.constant = -initial_offset_ptr->constant;
3304 if (initial_offset_ptr->var)
3305 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3306 initial_offset_ptr->var);
3307
3308 {
3309 tree s2 = sizetree;
3310 if (where_pad != none
3311 && (!host_integerp (sizetree, 1)
3312 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3313 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3314 SUB_PARM_SIZE (locate->slot_offset, s2);
3315 }
3316
3317 locate->slot_offset.constant += part_size_in_regs;
3318
3319 if (!in_regs
3320 #ifdef REG_PARM_STACK_SPACE
3321 || REG_PARM_STACK_SPACE (fndecl) > 0
3322 #endif
3323 )
3324 pad_to_arg_alignment (&locate->slot_offset, boundary,
3325 &locate->alignment_pad);
3326
3327 locate->size.constant = (-initial_offset_ptr->constant
3328 - locate->slot_offset.constant);
3329 if (initial_offset_ptr->var)
3330 locate->size.var = size_binop (MINUS_EXPR,
3331 size_binop (MINUS_EXPR,
3332 ssize_int (0),
3333 initial_offset_ptr->var),
3334 locate->slot_offset.var);
3335
3336 /* Pad_below needs the pre-rounded size to know how much to pad
3337 below. */
3338 locate->offset = locate->slot_offset;
3339 if (where_pad == downward)
3340 pad_below (&locate->offset, passed_mode, sizetree);
3341
3342 #else /* !ARGS_GROW_DOWNWARD */
3343 if (!in_regs
3344 #ifdef REG_PARM_STACK_SPACE
3345 || REG_PARM_STACK_SPACE (fndecl) > 0
3346 #endif
3347 )
3348 pad_to_arg_alignment (initial_offset_ptr, boundary,
3349 &locate->alignment_pad);
3350 locate->slot_offset = *initial_offset_ptr;
3351
3352 #ifdef PUSH_ROUNDING
3353 if (passed_mode != BLKmode)
3354 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3355 #endif
3356
3357 /* Pad_below needs the pre-rounded size to know how much to pad below
3358 so this must be done before rounding up. */
3359 locate->offset = locate->slot_offset;
3360 if (where_pad == downward)
3361 pad_below (&locate->offset, passed_mode, sizetree);
3362
3363 if (where_pad != none
3364 && (!host_integerp (sizetree, 1)
3365 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3366 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3367
3368 ADD_PARM_SIZE (locate->size, sizetree);
3369
3370 locate->size.constant -= part_size_in_regs;
3371 #endif /* ARGS_GROW_DOWNWARD */
3372 }
3373
3374 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3375 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3376
3377 static void
3378 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3379 struct args_size *alignment_pad)
3380 {
3381 tree save_var = NULL_TREE;
3382 HOST_WIDE_INT save_constant = 0;
3383 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3384 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3385
3386 #ifdef SPARC_STACK_BOUNDARY_HACK
3387 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3388 the real alignment of %sp. However, when it does this, the
3389 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3390 if (SPARC_STACK_BOUNDARY_HACK)
3391 sp_offset = 0;
3392 #endif
3393
3394 if (boundary > PARM_BOUNDARY)
3395 {
3396 save_var = offset_ptr->var;
3397 save_constant = offset_ptr->constant;
3398 }
3399
3400 alignment_pad->var = NULL_TREE;
3401 alignment_pad->constant = 0;
3402
3403 if (boundary > BITS_PER_UNIT)
3404 {
3405 if (offset_ptr->var)
3406 {
3407 tree sp_offset_tree = ssize_int (sp_offset);
3408 tree offset = size_binop (PLUS_EXPR,
3409 ARGS_SIZE_TREE (*offset_ptr),
3410 sp_offset_tree);
3411 #ifdef ARGS_GROW_DOWNWARD
3412 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3413 #else
3414 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3415 #endif
3416
3417 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3418 /* ARGS_SIZE_TREE includes constant term. */
3419 offset_ptr->constant = 0;
3420 if (boundary > PARM_BOUNDARY)
3421 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3422 save_var);
3423 }
3424 else
3425 {
3426 offset_ptr->constant = -sp_offset +
3427 #ifdef ARGS_GROW_DOWNWARD
3428 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3429 #else
3430 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3431 #endif
3432 if (boundary > PARM_BOUNDARY)
3433 alignment_pad->constant = offset_ptr->constant - save_constant;
3434 }
3435 }
3436 }
3437
3438 static void
3439 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3440 {
3441 if (passed_mode != BLKmode)
3442 {
3443 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3444 offset_ptr->constant
3445 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3446 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3447 - GET_MODE_SIZE (passed_mode));
3448 }
3449 else
3450 {
3451 if (TREE_CODE (sizetree) != INTEGER_CST
3452 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3453 {
3454 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3455 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3456 /* Add it in. */
3457 ADD_PARM_SIZE (*offset_ptr, s2);
3458 SUB_PARM_SIZE (*offset_ptr, sizetree);
3459 }
3460 }
3461 }
3462 \f
3463
3464 /* True if register REGNO was alive at a place where `setjmp' was
3465 called and was set more than once or is an argument. Such regs may
3466 be clobbered by `longjmp'. */
3467
3468 static bool
3469 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
3470 {
3471 /* There appear to be cases where some local vars never reach the
3472 backend but have bogus regnos. */
3473 if (regno >= max_reg_num ())
3474 return false;
3475
3476 return ((REG_N_SETS (regno) > 1
3477 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno))
3478 && REGNO_REG_SET_P (setjmp_crosses, regno));
3479 }
3480
3481 /* Walk the tree of blocks describing the binding levels within a
3482 function and warn about variables the might be killed by setjmp or
3483 vfork. This is done after calling flow_analysis before register
3484 allocation since that will clobber the pseudo-regs to hard
3485 regs. */
3486
3487 static void
3488 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
3489 {
3490 tree decl, sub;
3491
3492 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3493 {
3494 if (TREE_CODE (decl) == VAR_DECL
3495 && DECL_RTL_SET_P (decl)
3496 && REG_P (DECL_RTL (decl))
3497 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3498 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
3499 " %<longjmp%> or %<vfork%>", decl);
3500 }
3501
3502 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3503 setjmp_vars_warning (setjmp_crosses, sub);
3504 }
3505
3506 /* Do the appropriate part of setjmp_vars_warning
3507 but for arguments instead of local variables. */
3508
3509 static void
3510 setjmp_args_warning (bitmap setjmp_crosses)
3511 {
3512 tree decl;
3513 for (decl = DECL_ARGUMENTS (current_function_decl);
3514 decl; decl = TREE_CHAIN (decl))
3515 if (DECL_RTL (decl) != 0
3516 && REG_P (DECL_RTL (decl))
3517 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3518 warning (OPT_Wclobbered,
3519 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3520 decl);
3521 }
3522
3523 /* Generate warning messages for variables live across setjmp. */
3524
3525 void
3526 generate_setjmp_warnings (void)
3527 {
3528 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
3529
3530 if (n_basic_blocks == NUM_FIXED_BLOCKS
3531 || bitmap_empty_p (setjmp_crosses))
3532 return;
3533
3534 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
3535 setjmp_args_warning (setjmp_crosses);
3536 }
3537
3538 \f
3539 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3540 and create duplicate blocks. */
3541 /* ??? Need an option to either create block fragments or to create
3542 abstract origin duplicates of a source block. It really depends
3543 on what optimization has been performed. */
3544
3545 void
3546 reorder_blocks (void)
3547 {
3548 tree block = DECL_INITIAL (current_function_decl);
3549 VEC(tree,heap) *block_stack;
3550
3551 if (block == NULL_TREE)
3552 return;
3553
3554 block_stack = VEC_alloc (tree, heap, 10);
3555
3556 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3557 clear_block_marks (block);
3558
3559 /* Prune the old trees away, so that they don't get in the way. */
3560 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3561 BLOCK_CHAIN (block) = NULL_TREE;
3562
3563 /* Recreate the block tree from the note nesting. */
3564 reorder_blocks_1 (get_insns (), block, &block_stack);
3565 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
3566
3567 VEC_free (tree, heap, block_stack);
3568 }
3569
3570 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3571
3572 void
3573 clear_block_marks (tree block)
3574 {
3575 while (block)
3576 {
3577 TREE_ASM_WRITTEN (block) = 0;
3578 clear_block_marks (BLOCK_SUBBLOCKS (block));
3579 block = BLOCK_CHAIN (block);
3580 }
3581 }
3582
3583 static void
3584 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
3585 {
3586 rtx insn;
3587
3588 for (insn = insns; insn; insn = NEXT_INSN (insn))
3589 {
3590 if (NOTE_P (insn))
3591 {
3592 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
3593 {
3594 tree block = NOTE_BLOCK (insn);
3595 tree origin;
3596
3597 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3598 ? BLOCK_FRAGMENT_ORIGIN (block)
3599 : block);
3600
3601 /* If we have seen this block before, that means it now
3602 spans multiple address regions. Create a new fragment. */
3603 if (TREE_ASM_WRITTEN (block))
3604 {
3605 tree new_block = copy_node (block);
3606
3607 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3608 BLOCK_FRAGMENT_CHAIN (new_block)
3609 = BLOCK_FRAGMENT_CHAIN (origin);
3610 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3611
3612 NOTE_BLOCK (insn) = new_block;
3613 block = new_block;
3614 }
3615
3616 BLOCK_SUBBLOCKS (block) = 0;
3617 TREE_ASM_WRITTEN (block) = 1;
3618 /* When there's only one block for the entire function,
3619 current_block == block and we mustn't do this, it
3620 will cause infinite recursion. */
3621 if (block != current_block)
3622 {
3623 if (block != origin)
3624 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block);
3625
3626 BLOCK_SUPERCONTEXT (block) = current_block;
3627 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3628 BLOCK_SUBBLOCKS (current_block) = block;
3629 current_block = origin;
3630 }
3631 VEC_safe_push (tree, heap, *p_block_stack, block);
3632 }
3633 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
3634 {
3635 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
3636 BLOCK_SUBBLOCKS (current_block)
3637 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3638 current_block = BLOCK_SUPERCONTEXT (current_block);
3639 }
3640 }
3641 }
3642 }
3643
3644 /* Reverse the order of elements in the chain T of blocks,
3645 and return the new head of the chain (old last element). */
3646
3647 tree
3648 blocks_nreverse (tree t)
3649 {
3650 tree prev = 0, decl, next;
3651 for (decl = t; decl; decl = next)
3652 {
3653 next = BLOCK_CHAIN (decl);
3654 BLOCK_CHAIN (decl) = prev;
3655 prev = decl;
3656 }
3657 return prev;
3658 }
3659
3660 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3661 non-NULL, list them all into VECTOR, in a depth-first preorder
3662 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3663 blocks. */
3664
3665 static int
3666 all_blocks (tree block, tree *vector)
3667 {
3668 int n_blocks = 0;
3669
3670 while (block)
3671 {
3672 TREE_ASM_WRITTEN (block) = 0;
3673
3674 /* Record this block. */
3675 if (vector)
3676 vector[n_blocks] = block;
3677
3678 ++n_blocks;
3679
3680 /* Record the subblocks, and their subblocks... */
3681 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3682 vector ? vector + n_blocks : 0);
3683 block = BLOCK_CHAIN (block);
3684 }
3685
3686 return n_blocks;
3687 }
3688
3689 /* Return a vector containing all the blocks rooted at BLOCK. The
3690 number of elements in the vector is stored in N_BLOCKS_P. The
3691 vector is dynamically allocated; it is the caller's responsibility
3692 to call `free' on the pointer returned. */
3693
3694 static tree *
3695 get_block_vector (tree block, int *n_blocks_p)
3696 {
3697 tree *block_vector;
3698
3699 *n_blocks_p = all_blocks (block, NULL);
3700 block_vector = XNEWVEC (tree, *n_blocks_p);
3701 all_blocks (block, block_vector);
3702
3703 return block_vector;
3704 }
3705
3706 static GTY(()) int next_block_index = 2;
3707
3708 /* Set BLOCK_NUMBER for all the blocks in FN. */
3709
3710 void
3711 number_blocks (tree fn)
3712 {
3713 int i;
3714 int n_blocks;
3715 tree *block_vector;
3716
3717 /* For SDB and XCOFF debugging output, we start numbering the blocks
3718 from 1 within each function, rather than keeping a running
3719 count. */
3720 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3721 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3722 next_block_index = 1;
3723 #endif
3724
3725 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3726
3727 /* The top-level BLOCK isn't numbered at all. */
3728 for (i = 1; i < n_blocks; ++i)
3729 /* We number the blocks from two. */
3730 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3731
3732 free (block_vector);
3733
3734 return;
3735 }
3736
3737 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3738
3739 tree
3740 debug_find_var_in_block_tree (tree var, tree block)
3741 {
3742 tree t;
3743
3744 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3745 if (t == var)
3746 return block;
3747
3748 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3749 {
3750 tree ret = debug_find_var_in_block_tree (var, t);
3751 if (ret)
3752 return ret;
3753 }
3754
3755 return NULL_TREE;
3756 }
3757 \f
3758 /* Keep track of whether we're in a dummy function context. If we are,
3759 we don't want to invoke the set_current_function hook, because we'll
3760 get into trouble if the hook calls target_reinit () recursively or
3761 when the initial initialization is not yet complete. */
3762
3763 static bool in_dummy_function;
3764
3765 /* Invoke the target hook when setting cfun. */
3766
3767 static void
3768 invoke_set_current_function_hook (tree fndecl)
3769 {
3770 if (!in_dummy_function)
3771 targetm.set_current_function (fndecl);
3772 }
3773
3774 /* cfun should never be set directly; use this function. */
3775
3776 void
3777 set_cfun (struct function *new_cfun)
3778 {
3779 if (cfun != new_cfun)
3780 {
3781 cfun = new_cfun;
3782 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
3783 }
3784 }
3785
3786 /* Keep track of the cfun stack. */
3787
3788 typedef struct function *function_p;
3789
3790 DEF_VEC_P(function_p);
3791 DEF_VEC_ALLOC_P(function_p,heap);
3792
3793 /* Initialized with NOGC, making this poisonous to the garbage collector. */
3794
3795 static VEC(function_p,heap) *cfun_stack;
3796
3797 /* We save the value of in_system_header here when pushing the first
3798 function on the cfun stack, and we restore it from here when
3799 popping the last function. */
3800
3801 static bool saved_in_system_header;
3802
3803 /* Push the current cfun onto the stack, and set cfun to new_cfun. */
3804
3805 void
3806 push_cfun (struct function *new_cfun)
3807 {
3808 if (cfun == NULL)
3809 saved_in_system_header = in_system_header;
3810 VEC_safe_push (function_p, heap, cfun_stack, cfun);
3811 if (new_cfun)
3812 in_system_header = DECL_IN_SYSTEM_HEADER (new_cfun->decl);
3813 set_cfun (new_cfun);
3814 }
3815
3816 /* Pop cfun from the stack. */
3817
3818 void
3819 pop_cfun (void)
3820 {
3821 struct function *new_cfun = VEC_pop (function_p, cfun_stack);
3822 in_system_header = ((new_cfun == NULL) ? saved_in_system_header
3823 : DECL_IN_SYSTEM_HEADER (new_cfun->decl));
3824 set_cfun (new_cfun);
3825 }
3826
3827 /* Return value of funcdef and increase it. */
3828 int
3829 get_next_funcdef_no (void)
3830 {
3831 return funcdef_no++;
3832 }
3833
3834 /* Allocate a function structure for FNDECL and set its contents
3835 to the defaults. Set cfun to the newly-allocated object.
3836 Some of the helper functions invoked during initialization assume
3837 that cfun has already been set. Therefore, assign the new object
3838 directly into cfun and invoke the back end hook explicitly at the
3839 very end, rather than initializing a temporary and calling set_cfun
3840 on it.
3841
3842 ABSTRACT_P is true if this is a function that will never be seen by
3843 the middle-end. Such functions are front-end concepts (like C++
3844 function templates) that do not correspond directly to functions
3845 placed in object files. */
3846
3847 void
3848 allocate_struct_function (tree fndecl, bool abstract_p)
3849 {
3850 tree result;
3851 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
3852
3853 cfun = ggc_alloc_cleared (sizeof (struct function));
3854
3855 cfun->stack_alignment_needed = STACK_BOUNDARY;
3856 cfun->preferred_stack_boundary = STACK_BOUNDARY;
3857
3858 current_function_funcdef_no = get_next_funcdef_no ();
3859
3860 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
3861
3862 init_eh_for_function ();
3863
3864 lang_hooks.function.init (cfun);
3865 if (init_machine_status)
3866 cfun->machine = (*init_machine_status) ();
3867
3868 if (fndecl != NULL)
3869 {
3870 DECL_STRUCT_FUNCTION (fndecl) = cfun;
3871 cfun->decl = fndecl;
3872
3873 result = DECL_RESULT (fndecl);
3874 if (!abstract_p && aggregate_value_p (result, fndecl))
3875 {
3876 #ifdef PCC_STATIC_STRUCT_RETURN
3877 current_function_returns_pcc_struct = 1;
3878 #endif
3879 current_function_returns_struct = 1;
3880 }
3881
3882 current_function_stdarg
3883 = (fntype
3884 && TYPE_ARG_TYPES (fntype) != 0
3885 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3886 != void_type_node));
3887
3888 /* Assume all registers in stdarg functions need to be saved. */
3889 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
3890 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
3891 }
3892
3893 invoke_set_current_function_hook (fndecl);
3894 }
3895
3896 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
3897 instead of just setting it. */
3898
3899 void
3900 push_struct_function (tree fndecl)
3901 {
3902 if (cfun == NULL)
3903 saved_in_system_header = in_system_header;
3904 VEC_safe_push (function_p, heap, cfun_stack, cfun);
3905 if (fndecl)
3906 in_system_header = DECL_IN_SYSTEM_HEADER (fndecl);
3907 allocate_struct_function (fndecl, false);
3908 }
3909
3910 /* Reset cfun, and other non-struct-function variables to defaults as
3911 appropriate for emitting rtl at the start of a function. */
3912
3913 static void
3914 prepare_function_start (void)
3915 {
3916 gcc_assert (!rtl.emit.x_last_insn);
3917 init_emit ();
3918 init_varasm_status ();
3919 init_expr ();
3920
3921 cse_not_expected = ! optimize;
3922
3923 /* Caller save not needed yet. */
3924 caller_save_needed = 0;
3925
3926 /* We haven't done register allocation yet. */
3927 reg_renumber = 0;
3928
3929 /* Indicate that we have not instantiated virtual registers yet. */
3930 virtuals_instantiated = 0;
3931
3932 /* Indicate that we want CONCATs now. */
3933 generating_concat_p = 1;
3934
3935 /* Indicate we have no need of a frame pointer yet. */
3936 frame_pointer_needed = 0;
3937 }
3938
3939 /* Initialize the rtl expansion mechanism so that we can do simple things
3940 like generate sequences. This is used to provide a context during global
3941 initialization of some passes. You must call expand_dummy_function_end
3942 to exit this context. */
3943
3944 void
3945 init_dummy_function_start (void)
3946 {
3947 gcc_assert (!in_dummy_function);
3948 in_dummy_function = true;
3949 push_struct_function (NULL_TREE);
3950 prepare_function_start ();
3951 }
3952
3953 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3954 and initialize static variables for generating RTL for the statements
3955 of the function. */
3956
3957 void
3958 init_function_start (tree subr)
3959 {
3960 if (subr && DECL_STRUCT_FUNCTION (subr))
3961 set_cfun (DECL_STRUCT_FUNCTION (subr));
3962 else
3963 allocate_struct_function (subr, false);
3964 prepare_function_start ();
3965
3966 /* Warn if this value is an aggregate type,
3967 regardless of which calling convention we are using for it. */
3968 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
3969 warning (OPT_Waggregate_return, "function returns an aggregate");
3970 }
3971
3972 /* Make sure all values used by the optimization passes have sane
3973 defaults. */
3974 unsigned int
3975 init_function_for_compilation (void)
3976 {
3977 reg_renumber = 0;
3978
3979 /* No prologue/epilogue insns yet. Make sure that these vectors are
3980 empty. */
3981 gcc_assert (VEC_length (int, prologue) == 0);
3982 gcc_assert (VEC_length (int, epilogue) == 0);
3983 gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
3984 return 0;
3985 }
3986
3987 struct rtl_opt_pass pass_init_function =
3988 {
3989 {
3990 RTL_PASS,
3991 NULL, /* name */
3992 NULL, /* gate */
3993 init_function_for_compilation, /* execute */
3994 NULL, /* sub */
3995 NULL, /* next */
3996 0, /* static_pass_number */
3997 0, /* tv_id */
3998 0, /* properties_required */
3999 0, /* properties_provided */
4000 0, /* properties_destroyed */
4001 0, /* todo_flags_start */
4002 0 /* todo_flags_finish */
4003 }
4004 };
4005
4006
4007 void
4008 expand_main_function (void)
4009 {
4010 #if (defined(INVOKE__main) \
4011 || (!defined(HAS_INIT_SECTION) \
4012 && !defined(INIT_SECTION_ASM_OP) \
4013 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
4014 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
4015 #endif
4016 }
4017 \f
4018 /* Expand code to initialize the stack_protect_guard. This is invoked at
4019 the beginning of a function to be protected. */
4020
4021 #ifndef HAVE_stack_protect_set
4022 # define HAVE_stack_protect_set 0
4023 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
4024 #endif
4025
4026 void
4027 stack_protect_prologue (void)
4028 {
4029 tree guard_decl = targetm.stack_protect_guard ();
4030 rtx x, y;
4031
4032 /* Avoid expand_expr here, because we don't want guard_decl pulled
4033 into registers unless absolutely necessary. And we know that
4034 cfun->stack_protect_guard is a local stack slot, so this skips
4035 all the fluff. */
4036 x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
4037 y = validize_mem (DECL_RTL (guard_decl));
4038
4039 /* Allow the target to copy from Y to X without leaking Y into a
4040 register. */
4041 if (HAVE_stack_protect_set)
4042 {
4043 rtx insn = gen_stack_protect_set (x, y);
4044 if (insn)
4045 {
4046 emit_insn (insn);
4047 return;
4048 }
4049 }
4050
4051 /* Otherwise do a straight move. */
4052 emit_move_insn (x, y);
4053 }
4054
4055 /* Expand code to verify the stack_protect_guard. This is invoked at
4056 the end of a function to be protected. */
4057
4058 #ifndef HAVE_stack_protect_test
4059 # define HAVE_stack_protect_test 0
4060 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4061 #endif
4062
4063 void
4064 stack_protect_epilogue (void)
4065 {
4066 tree guard_decl = targetm.stack_protect_guard ();
4067 rtx label = gen_label_rtx ();
4068 rtx x, y, tmp;
4069
4070 /* Avoid expand_expr here, because we don't want guard_decl pulled
4071 into registers unless absolutely necessary. And we know that
4072 cfun->stack_protect_guard is a local stack slot, so this skips
4073 all the fluff. */
4074 x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
4075 y = validize_mem (DECL_RTL (guard_decl));
4076
4077 /* Allow the target to compare Y with X without leaking either into
4078 a register. */
4079 switch (HAVE_stack_protect_test != 0)
4080 {
4081 case 1:
4082 tmp = gen_stack_protect_test (x, y, label);
4083 if (tmp)
4084 {
4085 emit_insn (tmp);
4086 break;
4087 }
4088 /* FALLTHRU */
4089
4090 default:
4091 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4092 break;
4093 }
4094
4095 /* The noreturn predictor has been moved to the tree level. The rtl-level
4096 predictors estimate this branch about 20%, which isn't enough to get
4097 things moved out of line. Since this is the only extant case of adding
4098 a noreturn function at the rtl level, it doesn't seem worth doing ought
4099 except adding the prediction by hand. */
4100 tmp = get_last_insn ();
4101 if (JUMP_P (tmp))
4102 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4103
4104 expand_expr_stmt (targetm.stack_protect_fail ());
4105 emit_label (label);
4106 }
4107 \f
4108 /* Start the RTL for a new function, and set variables used for
4109 emitting RTL.
4110 SUBR is the FUNCTION_DECL node.
4111 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4112 the function's parameters, which must be run at any return statement. */
4113
4114 void
4115 expand_function_start (tree subr)
4116 {
4117 /* Make sure volatile mem refs aren't considered
4118 valid operands of arithmetic insns. */
4119 init_recog_no_volatile ();
4120
4121 current_function_profile
4122 = (profile_flag
4123 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4124
4125 current_function_limit_stack
4126 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4127
4128 /* Make the label for return statements to jump to. Do not special
4129 case machines with special return instructions -- they will be
4130 handled later during jump, ifcvt, or epilogue creation. */
4131 return_label = gen_label_rtx ();
4132
4133 /* Initialize rtx used to return the value. */
4134 /* Do this before assign_parms so that we copy the struct value address
4135 before any library calls that assign parms might generate. */
4136
4137 /* Decide whether to return the value in memory or in a register. */
4138 if (aggregate_value_p (DECL_RESULT (subr), subr))
4139 {
4140 /* Returning something that won't go in a register. */
4141 rtx value_address = 0;
4142
4143 #ifdef PCC_STATIC_STRUCT_RETURN
4144 if (current_function_returns_pcc_struct)
4145 {
4146 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4147 value_address = assemble_static_space (size);
4148 }
4149 else
4150 #endif
4151 {
4152 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4153 /* Expect to be passed the address of a place to store the value.
4154 If it is passed as an argument, assign_parms will take care of
4155 it. */
4156 if (sv)
4157 {
4158 value_address = gen_reg_rtx (Pmode);
4159 emit_move_insn (value_address, sv);
4160 }
4161 }
4162 if (value_address)
4163 {
4164 rtx x = value_address;
4165 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4166 {
4167 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4168 set_mem_attributes (x, DECL_RESULT (subr), 1);
4169 }
4170 SET_DECL_RTL (DECL_RESULT (subr), x);
4171 }
4172 }
4173 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4174 /* If return mode is void, this decl rtl should not be used. */
4175 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4176 else
4177 {
4178 /* Compute the return values into a pseudo reg, which we will copy
4179 into the true return register after the cleanups are done. */
4180 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4181 if (TYPE_MODE (return_type) != BLKmode
4182 && targetm.calls.return_in_msb (return_type))
4183 /* expand_function_end will insert the appropriate padding in
4184 this case. Use the return value's natural (unpadded) mode
4185 within the function proper. */
4186 SET_DECL_RTL (DECL_RESULT (subr),
4187 gen_reg_rtx (TYPE_MODE (return_type)));
4188 else
4189 {
4190 /* In order to figure out what mode to use for the pseudo, we
4191 figure out what the mode of the eventual return register will
4192 actually be, and use that. */
4193 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4194
4195 /* Structures that are returned in registers are not
4196 aggregate_value_p, so we may see a PARALLEL or a REG. */
4197 if (REG_P (hard_reg))
4198 SET_DECL_RTL (DECL_RESULT (subr),
4199 gen_reg_rtx (GET_MODE (hard_reg)));
4200 else
4201 {
4202 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4203 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4204 }
4205 }
4206
4207 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4208 result to the real return register(s). */
4209 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4210 }
4211
4212 /* Initialize rtx for parameters and local variables.
4213 In some cases this requires emitting insns. */
4214 assign_parms (subr);
4215
4216 /* If function gets a static chain arg, store it. */
4217 if (cfun->static_chain_decl)
4218 {
4219 tree parm = cfun->static_chain_decl;
4220 rtx local = gen_reg_rtx (Pmode);
4221
4222 set_decl_incoming_rtl (parm, static_chain_incoming_rtx, false);
4223 SET_DECL_RTL (parm, local);
4224 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4225
4226 emit_move_insn (local, static_chain_incoming_rtx);
4227 }
4228
4229 /* If the function receives a non-local goto, then store the
4230 bits we need to restore the frame pointer. */
4231 if (cfun->nonlocal_goto_save_area)
4232 {
4233 tree t_save;
4234 rtx r_save;
4235
4236 /* ??? We need to do this save early. Unfortunately here is
4237 before the frame variable gets declared. Help out... */
4238 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
4239 if (!DECL_RTL_SET_P (var))
4240 expand_decl (var);
4241
4242 t_save = build4 (ARRAY_REF, ptr_type_node,
4243 cfun->nonlocal_goto_save_area,
4244 integer_zero_node, NULL_TREE, NULL_TREE);
4245 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4246 r_save = convert_memory_address (Pmode, r_save);
4247
4248 emit_move_insn (r_save, virtual_stack_vars_rtx);
4249 update_nonlocal_goto_save_area ();
4250 }
4251
4252 /* The following was moved from init_function_start.
4253 The move is supposed to make sdb output more accurate. */
4254 /* Indicate the beginning of the function body,
4255 as opposed to parm setup. */
4256 emit_note (NOTE_INSN_FUNCTION_BEG);
4257
4258 gcc_assert (NOTE_P (get_last_insn ()));
4259
4260 parm_birth_insn = get_last_insn ();
4261
4262 if (current_function_profile)
4263 {
4264 #ifdef PROFILE_HOOK
4265 PROFILE_HOOK (current_function_funcdef_no);
4266 #endif
4267 }
4268
4269 /* After the display initializations is where the stack checking
4270 probe should go. */
4271 if(flag_stack_check)
4272 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4273
4274 /* Make sure there is a line number after the function entry setup code. */
4275 force_next_line_note ();
4276 }
4277 \f
4278 /* Undo the effects of init_dummy_function_start. */
4279 void
4280 expand_dummy_function_end (void)
4281 {
4282 gcc_assert (in_dummy_function);
4283
4284 /* End any sequences that failed to be closed due to syntax errors. */
4285 while (in_sequence_p ())
4286 end_sequence ();
4287
4288 /* Outside function body, can't compute type's actual size
4289 until next function's body starts. */
4290
4291 free_after_parsing (cfun);
4292 free_after_compilation (cfun);
4293 pop_cfun ();
4294 in_dummy_function = false;
4295 }
4296
4297 /* Call DOIT for each hard register used as a return value from
4298 the current function. */
4299
4300 void
4301 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4302 {
4303 rtx outgoing = current_function_return_rtx;
4304
4305 if (! outgoing)
4306 return;
4307
4308 if (REG_P (outgoing))
4309 (*doit) (outgoing, arg);
4310 else if (GET_CODE (outgoing) == PARALLEL)
4311 {
4312 int i;
4313
4314 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4315 {
4316 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4317
4318 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4319 (*doit) (x, arg);
4320 }
4321 }
4322 }
4323
4324 static void
4325 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4326 {
4327 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
4328 }
4329
4330 void
4331 clobber_return_register (void)
4332 {
4333 diddle_return_value (do_clobber_return_reg, NULL);
4334
4335 /* In case we do use pseudo to return value, clobber it too. */
4336 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4337 {
4338 tree decl_result = DECL_RESULT (current_function_decl);
4339 rtx decl_rtl = DECL_RTL (decl_result);
4340 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4341 {
4342 do_clobber_return_reg (decl_rtl, NULL);
4343 }
4344 }
4345 }
4346
4347 static void
4348 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4349 {
4350 emit_insn (gen_rtx_USE (VOIDmode, reg));
4351 }
4352
4353 static void
4354 use_return_register (void)
4355 {
4356 diddle_return_value (do_use_return_reg, NULL);
4357 }
4358
4359 /* Possibly warn about unused parameters. */
4360 void
4361 do_warn_unused_parameter (tree fn)
4362 {
4363 tree decl;
4364
4365 for (decl = DECL_ARGUMENTS (fn);
4366 decl; decl = TREE_CHAIN (decl))
4367 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4368 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
4369 && !TREE_NO_WARNING (decl))
4370 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4371 }
4372
4373 static GTY(()) rtx initial_trampoline;
4374
4375 /* Generate RTL for the end of the current function. */
4376
4377 void
4378 expand_function_end (void)
4379 {
4380 rtx clobber_after;
4381
4382 /* If arg_pointer_save_area was referenced only from a nested
4383 function, we will not have initialized it yet. Do that now. */
4384 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
4385 get_arg_pointer_save_area ();
4386
4387 /* If we are doing stack checking and this function makes calls,
4388 do a stack probe at the start of the function to ensure we have enough
4389 space for another stack frame. */
4390 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
4391 {
4392 rtx insn, seq;
4393
4394 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4395 if (CALL_P (insn))
4396 {
4397 start_sequence ();
4398 probe_stack_range (STACK_CHECK_PROTECT,
4399 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4400 seq = get_insns ();
4401 end_sequence ();
4402 emit_insn_before (seq, stack_check_probe_note);
4403 break;
4404 }
4405 }
4406
4407 /* End any sequences that failed to be closed due to syntax errors. */
4408 while (in_sequence_p ())
4409 end_sequence ();
4410
4411 clear_pending_stack_adjust ();
4412 do_pending_stack_adjust ();
4413
4414 /* Output a linenumber for the end of the function.
4415 SDB depends on this. */
4416 force_next_line_note ();
4417 set_curr_insn_source_location (input_location);
4418
4419 /* Before the return label (if any), clobber the return
4420 registers so that they are not propagated live to the rest of
4421 the function. This can only happen with functions that drop
4422 through; if there had been a return statement, there would
4423 have either been a return rtx, or a jump to the return label.
4424
4425 We delay actual code generation after the current_function_value_rtx
4426 is computed. */
4427 clobber_after = get_last_insn ();
4428
4429 /* Output the label for the actual return from the function. */
4430 emit_label (return_label);
4431
4432 if (USING_SJLJ_EXCEPTIONS)
4433 {
4434 /* Let except.c know where it should emit the call to unregister
4435 the function context for sjlj exceptions. */
4436 if (flag_exceptions)
4437 sjlj_emit_function_exit_after (get_last_insn ());
4438 }
4439 else
4440 {
4441 /* We want to ensure that instructions that may trap are not
4442 moved into the epilogue by scheduling, because we don't
4443 always emit unwind information for the epilogue. */
4444 if (flag_non_call_exceptions)
4445 emit_insn (gen_blockage ());
4446 }
4447
4448 /* If this is an implementation of throw, do what's necessary to
4449 communicate between __builtin_eh_return and the epilogue. */
4450 expand_eh_return ();
4451
4452 /* If scalar return value was computed in a pseudo-reg, or was a named
4453 return value that got dumped to the stack, copy that to the hard
4454 return register. */
4455 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4456 {
4457 tree decl_result = DECL_RESULT (current_function_decl);
4458 rtx decl_rtl = DECL_RTL (decl_result);
4459
4460 if (REG_P (decl_rtl)
4461 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4462 : DECL_REGISTER (decl_result))
4463 {
4464 rtx real_decl_rtl = current_function_return_rtx;
4465
4466 /* This should be set in assign_parms. */
4467 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
4468
4469 /* If this is a BLKmode structure being returned in registers,
4470 then use the mode computed in expand_return. Note that if
4471 decl_rtl is memory, then its mode may have been changed,
4472 but that current_function_return_rtx has not. */
4473 if (GET_MODE (real_decl_rtl) == BLKmode)
4474 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4475
4476 /* If a non-BLKmode return value should be padded at the least
4477 significant end of the register, shift it left by the appropriate
4478 amount. BLKmode results are handled using the group load/store
4479 machinery. */
4480 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
4481 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
4482 {
4483 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
4484 REGNO (real_decl_rtl)),
4485 decl_rtl);
4486 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
4487 }
4488 /* If a named return value dumped decl_return to memory, then
4489 we may need to re-do the PROMOTE_MODE signed/unsigned
4490 extension. */
4491 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4492 {
4493 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4494
4495 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4496 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4497 &unsignedp, 1);
4498
4499 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4500 }
4501 else if (GET_CODE (real_decl_rtl) == PARALLEL)
4502 {
4503 /* If expand_function_start has created a PARALLEL for decl_rtl,
4504 move the result to the real return registers. Otherwise, do
4505 a group load from decl_rtl for a named return. */
4506 if (GET_CODE (decl_rtl) == PARALLEL)
4507 emit_group_move (real_decl_rtl, decl_rtl);
4508 else
4509 emit_group_load (real_decl_rtl, decl_rtl,
4510 TREE_TYPE (decl_result),
4511 int_size_in_bytes (TREE_TYPE (decl_result)));
4512 }
4513 /* In the case of complex integer modes smaller than a word, we'll
4514 need to generate some non-trivial bitfield insertions. Do that
4515 on a pseudo and not the hard register. */
4516 else if (GET_CODE (decl_rtl) == CONCAT
4517 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
4518 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
4519 {
4520 int old_generating_concat_p;
4521 rtx tmp;
4522
4523 old_generating_concat_p = generating_concat_p;
4524 generating_concat_p = 0;
4525 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
4526 generating_concat_p = old_generating_concat_p;
4527
4528 emit_move_insn (tmp, decl_rtl);
4529 emit_move_insn (real_decl_rtl, tmp);
4530 }
4531 else
4532 emit_move_insn (real_decl_rtl, decl_rtl);
4533 }
4534 }
4535
4536 /* If returning a structure, arrange to return the address of the value
4537 in a place where debuggers expect to find it.
4538
4539 If returning a structure PCC style,
4540 the caller also depends on this value.
4541 And current_function_returns_pcc_struct is not necessarily set. */
4542 if (current_function_returns_struct
4543 || current_function_returns_pcc_struct)
4544 {
4545 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
4546 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4547 rtx outgoing;
4548
4549 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4550 type = TREE_TYPE (type);
4551 else
4552 value_address = XEXP (value_address, 0);
4553
4554 outgoing = targetm.calls.function_value (build_pointer_type (type),
4555 current_function_decl, true);
4556
4557 /* Mark this as a function return value so integrate will delete the
4558 assignment and USE below when inlining this function. */
4559 REG_FUNCTION_VALUE_P (outgoing) = 1;
4560
4561 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4562 value_address = convert_memory_address (GET_MODE (outgoing),
4563 value_address);
4564
4565 emit_move_insn (outgoing, value_address);
4566
4567 /* Show return register used to hold result (in this case the address
4568 of the result. */
4569 current_function_return_rtx = outgoing;
4570 }
4571
4572 /* Emit the actual code to clobber return register. */
4573 {
4574 rtx seq;
4575
4576 start_sequence ();
4577 clobber_return_register ();
4578 expand_naked_return ();
4579 seq = get_insns ();
4580 end_sequence ();
4581
4582 emit_insn_after (seq, clobber_after);
4583 }
4584
4585 /* Output the label for the naked return from the function. */
4586 emit_label (naked_return_label);
4587
4588 /* @@@ This is a kludge. We want to ensure that instructions that
4589 may trap are not moved into the epilogue by scheduling, because
4590 we don't always emit unwind information for the epilogue. */
4591 if (! USING_SJLJ_EXCEPTIONS && flag_non_call_exceptions)
4592 emit_insn (gen_blockage ());
4593
4594 /* If stack protection is enabled for this function, check the guard. */
4595 if (cfun->stack_protect_guard)
4596 stack_protect_epilogue ();
4597
4598 /* If we had calls to alloca, and this machine needs
4599 an accurate stack pointer to exit the function,
4600 insert some code to save and restore the stack pointer. */
4601 if (! EXIT_IGNORE_STACK
4602 && current_function_calls_alloca)
4603 {
4604 rtx tem = 0;
4605
4606 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4607 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4608 }
4609
4610 /* ??? This should no longer be necessary since stupid is no longer with
4611 us, but there are some parts of the compiler (eg reload_combine, and
4612 sh mach_dep_reorg) that still try and compute their own lifetime info
4613 instead of using the general framework. */
4614 use_return_register ();
4615 }
4616
4617 rtx
4618 get_arg_pointer_save_area (void)
4619 {
4620 rtx ret = arg_pointer_save_area;
4621
4622 if (! ret)
4623 {
4624 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
4625 arg_pointer_save_area = ret;
4626 }
4627
4628 if (! cfun->arg_pointer_save_area_init)
4629 {
4630 rtx seq;
4631
4632 /* Save the arg pointer at the beginning of the function. The
4633 generated stack slot may not be a valid memory address, so we
4634 have to check it and fix it if necessary. */
4635 start_sequence ();
4636 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
4637 seq = get_insns ();
4638 end_sequence ();
4639
4640 push_topmost_sequence ();
4641 emit_insn_after (seq, entry_of_function ());
4642 pop_topmost_sequence ();
4643 }
4644
4645 return ret;
4646 }
4647 \f
4648 /* Extend a vector that records the INSN_UIDs of INSNS
4649 (a list of one or more insns). */
4650
4651 static void
4652 record_insns (rtx insns, VEC(int,heap) **vecp)
4653 {
4654 rtx tmp;
4655
4656 for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp))
4657 VEC_safe_push (int, heap, *vecp, INSN_UID (tmp));
4658 }
4659
4660 /* Set the locator of the insn chain starting at INSN to LOC. */
4661 static void
4662 set_insn_locators (rtx insn, int loc)
4663 {
4664 while (insn != NULL_RTX)
4665 {
4666 if (INSN_P (insn))
4667 INSN_LOCATOR (insn) = loc;
4668 insn = NEXT_INSN (insn);
4669 }
4670 }
4671
4672 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4673 be running after reorg, SEQUENCE rtl is possible. */
4674
4675 static int
4676 contains (const_rtx insn, VEC(int,heap) **vec)
4677 {
4678 int i, j;
4679
4680 if (NONJUMP_INSN_P (insn)
4681 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4682 {
4683 int count = 0;
4684 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4685 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4686 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i))
4687 == VEC_index (int, *vec, j))
4688 count++;
4689 return count;
4690 }
4691 else
4692 {
4693 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4694 if (INSN_UID (insn) == VEC_index (int, *vec, j))
4695 return 1;
4696 }
4697 return 0;
4698 }
4699
4700 int
4701 prologue_epilogue_contains (const_rtx insn)
4702 {
4703 if (contains (insn, &prologue))
4704 return 1;
4705 if (contains (insn, &epilogue))
4706 return 1;
4707 return 0;
4708 }
4709
4710 int
4711 sibcall_epilogue_contains (const_rtx insn)
4712 {
4713 if (sibcall_epilogue)
4714 return contains (insn, &sibcall_epilogue);
4715 return 0;
4716 }
4717
4718 #ifdef HAVE_return
4719 /* Insert gen_return at the end of block BB. This also means updating
4720 block_for_insn appropriately. */
4721
4722 static void
4723 emit_return_into_block (basic_block bb)
4724 {
4725 emit_jump_insn_after (gen_return (), BB_END (bb));
4726 }
4727 #endif /* HAVE_return */
4728
4729 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
4730
4731 /* These functions convert the epilogue into a variant that does not
4732 modify the stack pointer. This is used in cases where a function
4733 returns an object whose size is not known until it is computed.
4734 The called function leaves the object on the stack, leaves the
4735 stack depressed, and returns a pointer to the object.
4736
4737 What we need to do is track all modifications and references to the
4738 stack pointer, deleting the modifications and changing the
4739 references to point to the location the stack pointer would have
4740 pointed to had the modifications taken place.
4741
4742 These functions need to be portable so we need to make as few
4743 assumptions about the epilogue as we can. However, the epilogue
4744 basically contains three things: instructions to reset the stack
4745 pointer, instructions to reload registers, possibly including the
4746 frame pointer, and an instruction to return to the caller.
4747
4748 We must be sure of what a relevant epilogue insn is doing. We also
4749 make no attempt to validate the insns we make since if they are
4750 invalid, we probably can't do anything valid. The intent is that
4751 these routines get "smarter" as more and more machines start to use
4752 them and they try operating on different epilogues.
4753
4754 We use the following structure to track what the part of the
4755 epilogue that we've already processed has done. We keep two copies
4756 of the SP equivalence, one for use during the insn we are
4757 processing and one for use in the next insn. The difference is
4758 because one part of a PARALLEL may adjust SP and the other may use
4759 it. */
4760
4761 struct epi_info
4762 {
4763 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
4764 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
4765 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
4766 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
4767 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
4768 should be set to once we no longer need
4769 its value. */
4770 rtx const_equiv[FIRST_PSEUDO_REGISTER]; /* Any known constant equivalences
4771 for registers. */
4772 };
4773
4774 static void handle_epilogue_set (rtx, struct epi_info *);
4775 static void update_epilogue_consts (rtx, const_rtx, void *);
4776 static void emit_equiv_load (struct epi_info *);
4777
4778 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
4779 no modifications to the stack pointer. Return the new list of insns. */
4780
4781 static rtx
4782 keep_stack_depressed (rtx insns)
4783 {
4784 int j;
4785 struct epi_info info;
4786 rtx insn, next;
4787
4788 /* If the epilogue is just a single instruction, it must be OK as is. */
4789 if (NEXT_INSN (insns) == NULL_RTX)
4790 return insns;
4791
4792 /* Otherwise, start a sequence, initialize the information we have, and
4793 process all the insns we were given. */
4794 start_sequence ();
4795
4796 info.sp_equiv_reg = stack_pointer_rtx;
4797 info.sp_offset = 0;
4798 info.equiv_reg_src = 0;
4799
4800 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
4801 info.const_equiv[j] = 0;
4802
4803 insn = insns;
4804 next = NULL_RTX;
4805 while (insn != NULL_RTX)
4806 {
4807 next = NEXT_INSN (insn);
4808
4809 if (!INSN_P (insn))
4810 {
4811 add_insn (insn);
4812 insn = next;
4813 continue;
4814 }
4815
4816 /* If this insn references the register that SP is equivalent to and
4817 we have a pending load to that register, we must force out the load
4818 first and then indicate we no longer know what SP's equivalent is. */
4819 if (info.equiv_reg_src != 0
4820 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
4821 {
4822 emit_equiv_load (&info);
4823 info.sp_equiv_reg = 0;
4824 }
4825
4826 info.new_sp_equiv_reg = info.sp_equiv_reg;
4827 info.new_sp_offset = info.sp_offset;
4828
4829 /* If this is a (RETURN) and the return address is on the stack,
4830 update the address and change to an indirect jump. */
4831 if (GET_CODE (PATTERN (insn)) == RETURN
4832 || (GET_CODE (PATTERN (insn)) == PARALLEL
4833 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
4834 {
4835 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
4836 rtx base = 0;
4837 HOST_WIDE_INT offset = 0;
4838 rtx jump_insn, jump_set;
4839
4840 /* If the return address is in a register, we can emit the insn
4841 unchanged. Otherwise, it must be a MEM and we see what the
4842 base register and offset are. In any case, we have to emit any
4843 pending load to the equivalent reg of SP, if any. */
4844 if (REG_P (retaddr))
4845 {
4846 emit_equiv_load (&info);
4847 add_insn (insn);
4848 insn = next;
4849 continue;
4850 }
4851 else
4852 {
4853 rtx ret_ptr;
4854 gcc_assert (MEM_P (retaddr));
4855
4856 ret_ptr = XEXP (retaddr, 0);
4857
4858 if (REG_P (ret_ptr))
4859 {
4860 base = gen_rtx_REG (Pmode, REGNO (ret_ptr));
4861 offset = 0;
4862 }
4863 else
4864 {
4865 gcc_assert (GET_CODE (ret_ptr) == PLUS
4866 && REG_P (XEXP (ret_ptr, 0))
4867 && GET_CODE (XEXP (ret_ptr, 1)) == CONST_INT);
4868 base = gen_rtx_REG (Pmode, REGNO (XEXP (ret_ptr, 0)));
4869 offset = INTVAL (XEXP (ret_ptr, 1));
4870 }
4871 }
4872
4873 /* If the base of the location containing the return pointer
4874 is SP, we must update it with the replacement address. Otherwise,
4875 just build the necessary MEM. */
4876 retaddr = plus_constant (base, offset);
4877 if (base == stack_pointer_rtx)
4878 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
4879 plus_constant (info.sp_equiv_reg,
4880 info.sp_offset));
4881
4882 retaddr = gen_rtx_MEM (Pmode, retaddr);
4883 MEM_NOTRAP_P (retaddr) = 1;
4884
4885 /* If there is a pending load to the equivalent register for SP
4886 and we reference that register, we must load our address into
4887 a scratch register and then do that load. */
4888 if (info.equiv_reg_src
4889 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
4890 {
4891 unsigned int regno;
4892 rtx reg;
4893
4894 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4895 if (HARD_REGNO_MODE_OK (regno, Pmode)
4896 && !fixed_regs[regno]
4897 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
4898 && !REGNO_REG_SET_P
4899 (DF_LR_IN (EXIT_BLOCK_PTR), regno)
4900 && !refers_to_regno_p (regno,
4901 end_hard_regno (Pmode, regno),
4902 info.equiv_reg_src, NULL)
4903 && info.const_equiv[regno] == 0)
4904 break;
4905
4906 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
4907
4908 reg = gen_rtx_REG (Pmode, regno);
4909 emit_move_insn (reg, retaddr);
4910 retaddr = reg;
4911 }
4912
4913 emit_equiv_load (&info);
4914 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
4915
4916 /* Show the SET in the above insn is a RETURN. */
4917 jump_set = single_set (jump_insn);
4918 gcc_assert (jump_set);
4919 SET_IS_RETURN_P (jump_set) = 1;
4920 }
4921
4922 /* If SP is not mentioned in the pattern and its equivalent register, if
4923 any, is not modified, just emit it. Otherwise, if neither is set,
4924 replace the reference to SP and emit the insn. If none of those are
4925 true, handle each SET individually. */
4926 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
4927 && (info.sp_equiv_reg == stack_pointer_rtx
4928 || !reg_set_p (info.sp_equiv_reg, insn)))
4929 add_insn (insn);
4930 else if (! reg_set_p (stack_pointer_rtx, insn)
4931 && (info.sp_equiv_reg == stack_pointer_rtx
4932 || !reg_set_p (info.sp_equiv_reg, insn)))
4933 {
4934 int changed;
4935
4936 changed = validate_replace_rtx (stack_pointer_rtx,
4937 plus_constant (info.sp_equiv_reg,
4938 info.sp_offset),
4939 insn);
4940 gcc_assert (changed);
4941
4942 add_insn (insn);
4943 }
4944 else if (GET_CODE (PATTERN (insn)) == SET)
4945 handle_epilogue_set (PATTERN (insn), &info);
4946 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
4947 {
4948 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
4949 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
4950 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
4951 }
4952 else
4953 add_insn (insn);
4954
4955 info.sp_equiv_reg = info.new_sp_equiv_reg;
4956 info.sp_offset = info.new_sp_offset;
4957
4958 /* Now update any constants this insn sets. */
4959 note_stores (PATTERN (insn), update_epilogue_consts, &info);
4960 insn = next;
4961 }
4962
4963 insns = get_insns ();
4964 end_sequence ();
4965 return insns;
4966 }
4967
4968 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
4969 structure that contains information about what we've seen so far. We
4970 process this SET by either updating that data or by emitting one or
4971 more insns. */
4972
4973 static void
4974 handle_epilogue_set (rtx set, struct epi_info *p)
4975 {
4976 /* First handle the case where we are setting SP. Record what it is being
4977 set from, which we must be able to determine */
4978 if (reg_set_p (stack_pointer_rtx, set))
4979 {
4980 gcc_assert (SET_DEST (set) == stack_pointer_rtx);
4981
4982 if (GET_CODE (SET_SRC (set)) == PLUS)
4983 {
4984 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
4985 if (GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
4986 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
4987 else
4988 {
4989 gcc_assert (REG_P (XEXP (SET_SRC (set), 1))
4990 && (REGNO (XEXP (SET_SRC (set), 1))
4991 < FIRST_PSEUDO_REGISTER)
4992 && p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4993 p->new_sp_offset
4994 = INTVAL (p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4995 }
4996 }
4997 else
4998 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
4999
5000 /* If we are adjusting SP, we adjust from the old data. */
5001 if (p->new_sp_equiv_reg == stack_pointer_rtx)
5002 {
5003 p->new_sp_equiv_reg = p->sp_equiv_reg;
5004 p->new_sp_offset += p->sp_offset;
5005 }
5006
5007 gcc_assert (p->new_sp_equiv_reg && REG_P (p->new_sp_equiv_reg));
5008
5009 return;
5010 }
5011
5012 /* Next handle the case where we are setting SP's equivalent
5013 register. We must not already have a value to set it to. We
5014 could update, but there seems little point in handling that case.
5015 Note that we have to allow for the case where we are setting the
5016 register set in the previous part of a PARALLEL inside a single
5017 insn. But use the old offset for any updates within this insn.
5018 We must allow for the case where the register is being set in a
5019 different (usually wider) mode than Pmode). */
5020 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
5021 {
5022 gcc_assert (!p->equiv_reg_src
5023 && REG_P (p->new_sp_equiv_reg)
5024 && REG_P (SET_DEST (set))
5025 && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set)))
5026 <= BITS_PER_WORD)
5027 && REGNO (p->new_sp_equiv_reg) == REGNO (SET_DEST (set)));
5028 p->equiv_reg_src
5029 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
5030 plus_constant (p->sp_equiv_reg,
5031 p->sp_offset));
5032 }
5033
5034 /* Otherwise, replace any references to SP in the insn to its new value
5035 and emit the insn. */
5036 else
5037 {
5038 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
5039 plus_constant (p->sp_equiv_reg,
5040 p->sp_offset));
5041 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
5042 plus_constant (p->sp_equiv_reg,
5043 p->sp_offset));
5044 emit_insn (set);
5045 }
5046 }
5047
5048 /* Update the tracking information for registers set to constants. */
5049
5050 static void
5051 update_epilogue_consts (rtx dest, const_rtx x, void *data)
5052 {
5053 struct epi_info *p = (struct epi_info *) data;
5054 rtx new;
5055
5056 if (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5057 return;
5058
5059 /* If we are either clobbering a register or doing a partial set,
5060 show we don't know the value. */
5061 else if (GET_CODE (x) == CLOBBER || ! rtx_equal_p (dest, SET_DEST (x)))
5062 p->const_equiv[REGNO (dest)] = 0;
5063
5064 /* If we are setting it to a constant, record that constant. */
5065 else if (GET_CODE (SET_SRC (x)) == CONST_INT)
5066 p->const_equiv[REGNO (dest)] = SET_SRC (x);
5067
5068 /* If this is a binary operation between a register we have been tracking
5069 and a constant, see if we can compute a new constant value. */
5070 else if (ARITHMETIC_P (SET_SRC (x))
5071 && REG_P (XEXP (SET_SRC (x), 0))
5072 && REGNO (XEXP (SET_SRC (x), 0)) < FIRST_PSEUDO_REGISTER
5073 && p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))] != 0
5074 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
5075 && 0 != (new = simplify_binary_operation
5076 (GET_CODE (SET_SRC (x)), GET_MODE (dest),
5077 p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))],
5078 XEXP (SET_SRC (x), 1)))
5079 && GET_CODE (new) == CONST_INT)
5080 p->const_equiv[REGNO (dest)] = new;
5081
5082 /* Otherwise, we can't do anything with this value. */
5083 else
5084 p->const_equiv[REGNO (dest)] = 0;
5085 }
5086
5087 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
5088
5089 static void
5090 emit_equiv_load (struct epi_info *p)
5091 {
5092 if (p->equiv_reg_src != 0)
5093 {
5094 rtx dest = p->sp_equiv_reg;
5095
5096 if (GET_MODE (p->equiv_reg_src) != GET_MODE (dest))
5097 dest = gen_rtx_REG (GET_MODE (p->equiv_reg_src),
5098 REGNO (p->sp_equiv_reg));
5099
5100 emit_move_insn (dest, p->equiv_reg_src);
5101 p->equiv_reg_src = 0;
5102 }
5103 }
5104 #endif
5105
5106 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5107 this into place with notes indicating where the prologue ends and where
5108 the epilogue begins. Update the basic block information when possible. */
5109
5110 static void
5111 thread_prologue_and_epilogue_insns (void)
5112 {
5113 int inserted = 0;
5114 edge e;
5115 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
5116 rtx seq;
5117 #endif
5118 #if defined (HAVE_epilogue) || defined(HAVE_return)
5119 rtx epilogue_end = NULL_RTX;
5120 #endif
5121 edge_iterator ei;
5122
5123 #ifdef HAVE_prologue
5124 if (HAVE_prologue)
5125 {
5126 start_sequence ();
5127 seq = gen_prologue ();
5128 emit_insn (seq);
5129
5130 /* Insert an explicit USE for the frame pointer
5131 if the profiling is on and the frame pointer is required. */
5132 if (current_function_profile && frame_pointer_needed)
5133 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
5134
5135 /* Retain a map of the prologue insns. */
5136 record_insns (seq, &prologue);
5137 emit_note (NOTE_INSN_PROLOGUE_END);
5138
5139 #ifndef PROFILE_BEFORE_PROLOGUE
5140 /* Ensure that instructions are not moved into the prologue when
5141 profiling is on. The call to the profiling routine can be
5142 emitted within the live range of a call-clobbered register. */
5143 if (current_function_profile)
5144 emit_insn (gen_blockage ());
5145 #endif
5146
5147 seq = get_insns ();
5148 end_sequence ();
5149 set_insn_locators (seq, prologue_locator);
5150
5151 /* Can't deal with multiple successors of the entry block
5152 at the moment. Function should always have at least one
5153 entry point. */
5154 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5155
5156 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
5157 inserted = 1;
5158 }
5159 #endif
5160
5161 /* If the exit block has no non-fake predecessors, we don't need
5162 an epilogue. */
5163 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5164 if ((e->flags & EDGE_FAKE) == 0)
5165 break;
5166 if (e == NULL)
5167 goto epilogue_done;
5168
5169 #ifdef HAVE_return
5170 if (optimize && HAVE_return)
5171 {
5172 /* If we're allowed to generate a simple return instruction,
5173 then by definition we don't need a full epilogue. Examine
5174 the block that falls through to EXIT. If it does not
5175 contain any code, examine its predecessors and try to
5176 emit (conditional) return instructions. */
5177
5178 basic_block last;
5179 rtx label;
5180
5181 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5182 if (e->flags & EDGE_FALLTHRU)
5183 break;
5184 if (e == NULL)
5185 goto epilogue_done;
5186 last = e->src;
5187
5188 /* Verify that there are no active instructions in the last block. */
5189 label = BB_END (last);
5190 while (label && !LABEL_P (label))
5191 {
5192 if (active_insn_p (label))
5193 break;
5194 label = PREV_INSN (label);
5195 }
5196
5197 if (BB_HEAD (last) == label && LABEL_P (label))
5198 {
5199 edge_iterator ei2;
5200
5201 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
5202 {
5203 basic_block bb = e->src;
5204 rtx jump;
5205
5206 if (bb == ENTRY_BLOCK_PTR)
5207 {
5208 ei_next (&ei2);
5209 continue;
5210 }
5211
5212 jump = BB_END (bb);
5213 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5214 {
5215 ei_next (&ei2);
5216 continue;
5217 }
5218
5219 /* If we have an unconditional jump, we can replace that
5220 with a simple return instruction. */
5221 if (simplejump_p (jump))
5222 {
5223 emit_return_into_block (bb);
5224 delete_insn (jump);
5225 }
5226
5227 /* If we have a conditional jump, we can try to replace
5228 that with a conditional return instruction. */
5229 else if (condjump_p (jump))
5230 {
5231 if (! redirect_jump (jump, 0, 0))
5232 {
5233 ei_next (&ei2);
5234 continue;
5235 }
5236
5237 /* If this block has only one successor, it both jumps
5238 and falls through to the fallthru block, so we can't
5239 delete the edge. */
5240 if (single_succ_p (bb))
5241 {
5242 ei_next (&ei2);
5243 continue;
5244 }
5245 }
5246 else
5247 {
5248 ei_next (&ei2);
5249 continue;
5250 }
5251
5252 /* Fix up the CFG for the successful change we just made. */
5253 redirect_edge_succ (e, EXIT_BLOCK_PTR);
5254 }
5255
5256 /* Emit a return insn for the exit fallthru block. Whether
5257 this is still reachable will be determined later. */
5258
5259 emit_barrier_after (BB_END (last));
5260 emit_return_into_block (last);
5261 epilogue_end = BB_END (last);
5262 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
5263 goto epilogue_done;
5264 }
5265 }
5266 #endif
5267 /* Find the edge that falls through to EXIT. Other edges may exist
5268 due to RETURN instructions, but those don't need epilogues.
5269 There really shouldn't be a mixture -- either all should have
5270 been converted or none, however... */
5271
5272 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5273 if (e->flags & EDGE_FALLTHRU)
5274 break;
5275 if (e == NULL)
5276 goto epilogue_done;
5277
5278 #ifdef HAVE_epilogue
5279 if (HAVE_epilogue)
5280 {
5281 start_sequence ();
5282 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5283
5284 seq = gen_epilogue ();
5285
5286 #ifdef INCOMING_RETURN_ADDR_RTX
5287 /* If this function returns with the stack depressed and we can support
5288 it, massage the epilogue to actually do that. */
5289 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5290 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
5291 seq = keep_stack_depressed (seq);
5292 #endif
5293
5294 emit_jump_insn (seq);
5295
5296 /* Retain a map of the epilogue insns. */
5297 record_insns (seq, &epilogue);
5298 set_insn_locators (seq, epilogue_locator);
5299
5300 seq = get_insns ();
5301 end_sequence ();
5302
5303 insert_insn_on_edge (seq, e);
5304 inserted = 1;
5305 }
5306 else
5307 #endif
5308 {
5309 basic_block cur_bb;
5310
5311 if (! next_active_insn (BB_END (e->src)))
5312 goto epilogue_done;
5313 /* We have a fall-through edge to the exit block, the source is not
5314 at the end of the function, and there will be an assembler epilogue
5315 at the end of the function.
5316 We can't use force_nonfallthru here, because that would try to
5317 use return. Inserting a jump 'by hand' is extremely messy, so
5318 we take advantage of cfg_layout_finalize using
5319 fixup_fallthru_exit_predecessor. */
5320 cfg_layout_initialize (0);
5321 FOR_EACH_BB (cur_bb)
5322 if (cur_bb->index >= NUM_FIXED_BLOCKS
5323 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
5324 cur_bb->aux = cur_bb->next_bb;
5325 cfg_layout_finalize ();
5326 }
5327 epilogue_done:
5328
5329 if (inserted)
5330 {
5331 commit_edge_insertions ();
5332
5333 /* The epilogue insns we inserted may cause the exit edge to no longer
5334 be fallthru. */
5335 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5336 {
5337 if (((e->flags & EDGE_FALLTHRU) != 0)
5338 && returnjump_p (BB_END (e->src)))
5339 e->flags &= ~EDGE_FALLTHRU;
5340 }
5341 }
5342
5343 #ifdef HAVE_sibcall_epilogue
5344 /* Emit sibling epilogues before any sibling call sites. */
5345 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
5346 {
5347 basic_block bb = e->src;
5348 rtx insn = BB_END (bb);
5349
5350 if (!CALL_P (insn)
5351 || ! SIBLING_CALL_P (insn))
5352 {
5353 ei_next (&ei);
5354 continue;
5355 }
5356
5357 start_sequence ();
5358 emit_insn (gen_sibcall_epilogue ());
5359 seq = get_insns ();
5360 end_sequence ();
5361
5362 /* Retain a map of the epilogue insns. Used in life analysis to
5363 avoid getting rid of sibcall epilogue insns. Do this before we
5364 actually emit the sequence. */
5365 record_insns (seq, &sibcall_epilogue);
5366 set_insn_locators (seq, epilogue_locator);
5367
5368 emit_insn_before (seq, insn);
5369 ei_next (&ei);
5370 }
5371 #endif
5372
5373 #ifdef HAVE_epilogue
5374 if (epilogue_end)
5375 {
5376 rtx insn, next;
5377
5378 /* Similarly, move any line notes that appear after the epilogue.
5379 There is no need, however, to be quite so anal about the existence
5380 of such a note. Also possibly move
5381 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5382 info generation. */
5383 for (insn = epilogue_end; insn; insn = next)
5384 {
5385 next = NEXT_INSN (insn);
5386 if (NOTE_P (insn)
5387 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
5388 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5389 }
5390 }
5391 #endif
5392
5393 /* Threading the prologue and epilogue changes the artificial refs
5394 in the entry and exit blocks. */
5395 epilogue_completed = 1;
5396 df_update_entry_exit_and_calls ();
5397 }
5398
5399 /* Reposition the prologue-end and epilogue-begin notes after instruction
5400 scheduling and delayed branch scheduling. */
5401
5402 void
5403 reposition_prologue_and_epilogue_notes (void)
5404 {
5405 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5406 rtx insn, last, note;
5407 int len;
5408
5409 if ((len = VEC_length (int, prologue)) > 0)
5410 {
5411 last = 0, note = 0;
5412
5413 /* Scan from the beginning until we reach the last prologue insn.
5414 We apparently can't depend on basic_block_{head,end} after
5415 reorg has run. */
5416 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5417 {
5418 if (NOTE_P (insn))
5419 {
5420 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
5421 note = insn;
5422 }
5423 else if (contains (insn, &prologue))
5424 {
5425 last = insn;
5426 if (--len == 0)
5427 break;
5428 }
5429 }
5430
5431 if (last)
5432 {
5433 /* Find the prologue-end note if we haven't already, and
5434 move it to just after the last prologue insn. */
5435 if (note == 0)
5436 {
5437 for (note = last; (note = NEXT_INSN (note));)
5438 if (NOTE_P (note)
5439 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
5440 break;
5441 }
5442
5443 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5444 if (LABEL_P (last))
5445 last = NEXT_INSN (last);
5446 reorder_insns (note, note, last);
5447 }
5448 }
5449
5450 if ((len = VEC_length (int, epilogue)) > 0)
5451 {
5452 last = 0, note = 0;
5453
5454 /* Scan from the end until we reach the first epilogue insn.
5455 We apparently can't depend on basic_block_{head,end} after
5456 reorg has run. */
5457 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
5458 {
5459 if (NOTE_P (insn))
5460 {
5461 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
5462 note = insn;
5463 }
5464 else if (contains (insn, &epilogue))
5465 {
5466 last = insn;
5467 if (--len == 0)
5468 break;
5469 }
5470 }
5471
5472 if (last)
5473 {
5474 /* Find the epilogue-begin note if we haven't already, and
5475 move it to just before the first epilogue insn. */
5476 if (note == 0)
5477 {
5478 for (note = insn; (note = PREV_INSN (note));)
5479 if (NOTE_P (note)
5480 && NOTE_KIND (note) == NOTE_INSN_EPILOGUE_BEG)
5481 break;
5482 }
5483
5484 if (PREV_INSN (last) != note)
5485 reorder_insns (note, note, PREV_INSN (last));
5486 }
5487 }
5488 #endif /* HAVE_prologue or HAVE_epilogue */
5489 }
5490
5491 /* Returns the name of the current function. */
5492 const char *
5493 current_function_name (void)
5494 {
5495 return lang_hooks.decl_printable_name (cfun->decl, 2);
5496 }
5497
5498 /* Returns the raw (mangled) name of the current function. */
5499 const char *
5500 current_function_assembler_name (void)
5501 {
5502 return IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (cfun->decl));
5503 }
5504 \f
5505
5506 static unsigned int
5507 rest_of_handle_check_leaf_regs (void)
5508 {
5509 #ifdef LEAF_REGISTERS
5510 current_function_uses_only_leaf_regs
5511 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5512 #endif
5513 return 0;
5514 }
5515
5516 /* Insert a TYPE into the used types hash table of CFUN. */
5517 static void
5518 used_types_insert_helper (tree type, struct function *func)
5519 {
5520 if (type != NULL && func != NULL)
5521 {
5522 void **slot;
5523
5524 if (func->used_types_hash == NULL)
5525 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
5526 htab_eq_pointer, NULL);
5527 slot = htab_find_slot (func->used_types_hash, type, INSERT);
5528 if (*slot == NULL)
5529 *slot = type;
5530 }
5531 }
5532
5533 /* Given a type, insert it into the used hash table in cfun. */
5534 void
5535 used_types_insert (tree t)
5536 {
5537 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
5538 t = TREE_TYPE (t);
5539 t = TYPE_MAIN_VARIANT (t);
5540 if (debug_info_level > DINFO_LEVEL_NONE)
5541 used_types_insert_helper (t, cfun);
5542 }
5543
5544 struct rtl_opt_pass pass_leaf_regs =
5545 {
5546 {
5547 RTL_PASS,
5548 NULL, /* name */
5549 NULL, /* gate */
5550 rest_of_handle_check_leaf_regs, /* execute */
5551 NULL, /* sub */
5552 NULL, /* next */
5553 0, /* static_pass_number */
5554 0, /* tv_id */
5555 0, /* properties_required */
5556 0, /* properties_provided */
5557 0, /* properties_destroyed */
5558 0, /* todo_flags_start */
5559 0 /* todo_flags_finish */
5560 }
5561 };
5562
5563 static unsigned int
5564 rest_of_handle_thread_prologue_and_epilogue (void)
5565 {
5566 if (optimize)
5567 cleanup_cfg (CLEANUP_EXPENSIVE);
5568 /* On some machines, the prologue and epilogue code, or parts thereof,
5569 can be represented as RTL. Doing so lets us schedule insns between
5570 it and the rest of the code and also allows delayed branch
5571 scheduling to operate in the epilogue. */
5572
5573 thread_prologue_and_epilogue_insns ();
5574 return 0;
5575 }
5576
5577 struct rtl_opt_pass pass_thread_prologue_and_epilogue =
5578 {
5579 {
5580 RTL_PASS,
5581 "pro_and_epilogue", /* name */
5582 NULL, /* gate */
5583 rest_of_handle_thread_prologue_and_epilogue, /* execute */
5584 NULL, /* sub */
5585 NULL, /* next */
5586 0, /* static_pass_number */
5587 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
5588 0, /* properties_required */
5589 0, /* properties_provided */
5590 0, /* properties_destroyed */
5591 TODO_verify_flow, /* todo_flags_start */
5592 TODO_dump_func |
5593 TODO_df_verify |
5594 TODO_df_finish | TODO_verify_rtl_sharing |
5595 TODO_ggc_collect /* todo_flags_finish */
5596 }
5597 };
5598 \f
5599
5600 /* This mini-pass fixes fall-out from SSA in asm statements that have
5601 in-out constraints. Say you start with
5602
5603 orig = inout;
5604 asm ("": "+mr" (inout));
5605 use (orig);
5606
5607 which is transformed very early to use explicit output and match operands:
5608
5609 orig = inout;
5610 asm ("": "=mr" (inout) : "0" (inout));
5611 use (orig);
5612
5613 Or, after SSA and copyprop,
5614
5615 asm ("": "=mr" (inout_2) : "0" (inout_1));
5616 use (inout_1);
5617
5618 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
5619 they represent two separate values, so they will get different pseudo
5620 registers during expansion. Then, since the two operands need to match
5621 per the constraints, but use different pseudo registers, reload can
5622 only register a reload for these operands. But reloads can only be
5623 satisfied by hardregs, not by memory, so we need a register for this
5624 reload, just because we are presented with non-matching operands.
5625 So, even though we allow memory for this operand, no memory can be
5626 used for it, just because the two operands don't match. This can
5627 cause reload failures on register-starved targets.
5628
5629 So it's a symptom of reload not being able to use memory for reloads
5630 or, alternatively it's also a symptom of both operands not coming into
5631 reload as matching (in which case the pseudo could go to memory just
5632 fine, as the alternative allows it, and no reload would be necessary).
5633 We fix the latter problem here, by transforming
5634
5635 asm ("": "=mr" (inout_2) : "0" (inout_1));
5636
5637 back to
5638
5639 inout_2 = inout_1;
5640 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
5641
5642 static void
5643 match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs)
5644 {
5645 int i;
5646 bool changed = false;
5647 rtx op = SET_SRC (p_sets[0]);
5648 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
5649 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
5650 bool *output_matched = alloca (noutputs * sizeof (bool));
5651
5652 memset (output_matched, 0, noutputs * sizeof (bool));
5653 for (i = 0; i < ninputs; i++)
5654 {
5655 rtx input, output, insns;
5656 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
5657 char *end;
5658 int match, j;
5659
5660 match = strtoul (constraint, &end, 10);
5661 if (end == constraint)
5662 continue;
5663
5664 gcc_assert (match < noutputs);
5665 output = SET_DEST (p_sets[match]);
5666 input = RTVEC_ELT (inputs, i);
5667 /* Only do the transformation for pseudos. */
5668 if (! REG_P (output)
5669 || rtx_equal_p (output, input)
5670 || (GET_MODE (input) != VOIDmode
5671 && GET_MODE (input) != GET_MODE (output)))
5672 continue;
5673
5674 /* We can't do anything if the output is also used as input,
5675 as we're going to overwrite it. */
5676 for (j = 0; j < ninputs; j++)
5677 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
5678 break;
5679 if (j != ninputs)
5680 continue;
5681
5682 /* Avoid changing the same input several times. For
5683 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
5684 only change in once (to out1), rather than changing it
5685 first to out1 and afterwards to out2. */
5686 if (i > 0)
5687 {
5688 for (j = 0; j < noutputs; j++)
5689 if (output_matched[j] && input == SET_DEST (p_sets[j]))
5690 break;
5691 if (j != noutputs)
5692 continue;
5693 }
5694 output_matched[match] = true;
5695
5696 start_sequence ();
5697 emit_move_insn (output, input);
5698 insns = get_insns ();
5699 end_sequence ();
5700 emit_insn_before (insns, insn);
5701
5702 /* Now replace all mentions of the input with output. We can't
5703 just replace the occurence in inputs[i], as the register might
5704 also be used in some other input (or even in an address of an
5705 output), which would mean possibly increasing the number of
5706 inputs by one (namely 'output' in addition), which might pose
5707 a too complicated problem for reload to solve. E.g. this situation:
5708
5709 asm ("" : "=r" (output), "=m" (input) : "0" (input))
5710
5711 Here 'input' is used in two occurrences as input (once for the
5712 input operand, once for the address in the second output operand).
5713 If we would replace only the occurence of the input operand (to
5714 make the matching) we would be left with this:
5715
5716 output = input
5717 asm ("" : "=r" (output), "=m" (input) : "0" (output))
5718
5719 Now we suddenly have two different input values (containing the same
5720 value, but different pseudos) where we formerly had only one.
5721 With more complicated asms this might lead to reload failures
5722 which wouldn't have happen without this pass. So, iterate over
5723 all operands and replace all occurrences of the register used. */
5724 for (j = 0; j < noutputs; j++)
5725 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
5726 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
5727 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
5728 input, output);
5729 for (j = 0; j < ninputs; j++)
5730 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
5731 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
5732 input, output);
5733
5734 changed = true;
5735 }
5736
5737 if (changed)
5738 df_insn_rescan (insn);
5739 }
5740
5741 static unsigned
5742 rest_of_match_asm_constraints (void)
5743 {
5744 basic_block bb;
5745 rtx insn, pat, *p_sets;
5746 int noutputs;
5747
5748 if (!cfun->has_asm_statement)
5749 return 0;
5750
5751 df_set_flags (DF_DEFER_INSN_RESCAN);
5752 FOR_EACH_BB (bb)
5753 {
5754 FOR_BB_INSNS (bb, insn)
5755 {
5756 if (!INSN_P (insn))
5757 continue;
5758
5759 pat = PATTERN (insn);
5760 if (GET_CODE (pat) == PARALLEL)
5761 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
5762 else if (GET_CODE (pat) == SET)
5763 p_sets = &PATTERN (insn), noutputs = 1;
5764 else
5765 continue;
5766
5767 if (GET_CODE (*p_sets) == SET
5768 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
5769 match_asm_constraints_1 (insn, p_sets, noutputs);
5770 }
5771 }
5772
5773 return TODO_df_finish;
5774 }
5775
5776 struct rtl_opt_pass pass_match_asm_constraints =
5777 {
5778 {
5779 RTL_PASS,
5780 "asmcons", /* name */
5781 NULL, /* gate */
5782 rest_of_match_asm_constraints, /* execute */
5783 NULL, /* sub */
5784 NULL, /* next */
5785 0, /* static_pass_number */
5786 0, /* tv_id */
5787 0, /* properties_required */
5788 0, /* properties_provided */
5789 0, /* properties_destroyed */
5790 0, /* todo_flags_start */
5791 TODO_dump_func /* todo_flags_finish */
5792 }
5793 };
5794
5795
5796 #include "gt-function.h"