function.c (allocate_struct_function): Do not set current_function_returns_pointer.
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "regs.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "output.h"
53 #include "basic-block.h"
54 #include "toplev.h"
55 #include "hashtab.h"
56 #include "ggc.h"
57 #include "tm_p.h"
58 #include "integrate.h"
59 #include "langhooks.h"
60 #include "target.h"
61 #include "cfglayout.h"
62 #include "tree-gimple.h"
63 #include "tree-pass.h"
64 #include "predict.h"
65 #include "df.h"
66 #include "timevar.h"
67 #include "vecprim.h"
68
69 #ifndef LOCAL_ALIGNMENT
70 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
71 #endif
72
73 #ifndef STACK_ALIGNMENT_NEEDED
74 #define STACK_ALIGNMENT_NEEDED 1
75 #endif
76
77 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
78
79 /* Some systems use __main in a way incompatible with its use in gcc, in these
80 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
81 give the same symbol without quotes for an alternative entry point. You
82 must define both, or neither. */
83 #ifndef NAME__MAIN
84 #define NAME__MAIN "__main"
85 #endif
86
87 /* Round a value to the lowest integer less than it that is a multiple of
88 the required alignment. Avoid using division in case the value is
89 negative. Assume the alignment is a power of two. */
90 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
91
92 /* Similar, but round to the next highest integer that meets the
93 alignment. */
94 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
95
96 /* Nonzero if function being compiled doesn't contain any calls
97 (ignoring the prologue and epilogue). This is set prior to
98 local register allocation and is valid for the remaining
99 compiler passes. */
100 int current_function_is_leaf;
101
102 /* Nonzero if function being compiled doesn't modify the stack pointer
103 (ignoring the prologue and epilogue). This is only valid after
104 pass_stack_ptr_mod has run. */
105 int current_function_sp_is_unchanging;
106
107 /* Nonzero if the function being compiled is a leaf function which only
108 uses leaf registers. This is valid after reload (specifically after
109 sched2) and is useful only if the port defines LEAF_REGISTERS. */
110 int current_function_uses_only_leaf_regs;
111
112 /* Nonzero once virtual register instantiation has been done.
113 assign_stack_local uses frame_pointer_rtx when this is nonzero.
114 calls.c:emit_library_call_value_1 uses it to set up
115 post-instantiation libcalls. */
116 int virtuals_instantiated;
117
118 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
119 static GTY(()) int funcdef_no;
120
121 /* These variables hold pointers to functions to create and destroy
122 target specific, per-function data structures. */
123 struct machine_function * (*init_machine_status) (void);
124
125 /* The currently compiled function. */
126 struct function *cfun = 0;
127
128 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
129 static VEC(int,heap) *prologue;
130 static VEC(int,heap) *epilogue;
131
132 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
133 in this function. */
134 static VEC(int,heap) *sibcall_epilogue;
135 \f
136 /* In order to evaluate some expressions, such as function calls returning
137 structures in memory, we need to temporarily allocate stack locations.
138 We record each allocated temporary in the following structure.
139
140 Associated with each temporary slot is a nesting level. When we pop up
141 one level, all temporaries associated with the previous level are freed.
142 Normally, all temporaries are freed after the execution of the statement
143 in which they were created. However, if we are inside a ({...}) grouping,
144 the result may be in a temporary and hence must be preserved. If the
145 result could be in a temporary, we preserve it if we can determine which
146 one it is in. If we cannot determine which temporary may contain the
147 result, all temporaries are preserved. A temporary is preserved by
148 pretending it was allocated at the previous nesting level.
149
150 Automatic variables are also assigned temporary slots, at the nesting
151 level where they are defined. They are marked a "kept" so that
152 free_temp_slots will not free them. */
153
154 struct temp_slot GTY(())
155 {
156 /* Points to next temporary slot. */
157 struct temp_slot *next;
158 /* Points to previous temporary slot. */
159 struct temp_slot *prev;
160
161 /* The rtx to used to reference the slot. */
162 rtx slot;
163 /* The rtx used to represent the address if not the address of the
164 slot above. May be an EXPR_LIST if multiple addresses exist. */
165 rtx address;
166 /* The alignment (in bits) of the slot. */
167 unsigned int align;
168 /* The size, in units, of the slot. */
169 HOST_WIDE_INT size;
170 /* The type of the object in the slot, or zero if it doesn't correspond
171 to a type. We use this to determine whether a slot can be reused.
172 It can be reused if objects of the type of the new slot will always
173 conflict with objects of the type of the old slot. */
174 tree type;
175 /* Nonzero if this temporary is currently in use. */
176 char in_use;
177 /* Nonzero if this temporary has its address taken. */
178 char addr_taken;
179 /* Nesting level at which this slot is being used. */
180 int level;
181 /* Nonzero if this should survive a call to free_temp_slots. */
182 int keep;
183 /* The offset of the slot from the frame_pointer, including extra space
184 for alignment. This info is for combine_temp_slots. */
185 HOST_WIDE_INT base_offset;
186 /* The size of the slot, including extra space for alignment. This
187 info is for combine_temp_slots. */
188 HOST_WIDE_INT full_size;
189 };
190 \f
191 /* Forward declarations. */
192
193 static rtx assign_stack_local_1 (enum machine_mode, HOST_WIDE_INT, int,
194 struct function *);
195 static struct temp_slot *find_temp_slot_from_address (rtx);
196 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
197 static void pad_below (struct args_size *, enum machine_mode, tree);
198 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
199 static int all_blocks (tree, tree *);
200 static tree *get_block_vector (tree, int *);
201 extern tree debug_find_var_in_block_tree (tree, tree);
202 /* We always define `record_insns' even if it's not used so that we
203 can always export `prologue_epilogue_contains'. */
204 static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED;
205 static int contains (const_rtx, VEC(int,heap) **);
206 #ifdef HAVE_return
207 static void emit_return_into_block (basic_block);
208 #endif
209 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
210 static rtx keep_stack_depressed (rtx);
211 #endif
212 static void prepare_function_start (tree);
213 static void do_clobber_return_reg (rtx, void *);
214 static void do_use_return_reg (rtx, void *);
215 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
216 \f
217 /* Pointer to chain of `struct function' for containing functions. */
218 struct function *outer_function_chain;
219
220 /* Given a function decl for a containing function,
221 return the `struct function' for it. */
222
223 struct function *
224 find_function_data (tree decl)
225 {
226 struct function *p;
227
228 for (p = outer_function_chain; p; p = p->outer)
229 if (p->decl == decl)
230 return p;
231
232 gcc_unreachable ();
233 }
234
235 /* Save the current context for compilation of a nested function.
236 This is called from language-specific code. The caller should use
237 the enter_nested langhook to save any language-specific state,
238 since this function knows only about language-independent
239 variables. */
240
241 void
242 push_function_context_to (tree context ATTRIBUTE_UNUSED)
243 {
244 struct function *p;
245
246 if (cfun == 0)
247 init_dummy_function_start ();
248 p = cfun;
249
250 p->outer = outer_function_chain;
251 outer_function_chain = p;
252
253 lang_hooks.function.enter_nested (p);
254
255 cfun = 0;
256 }
257
258 void
259 push_function_context (void)
260 {
261 push_function_context_to (current_function_decl);
262 }
263
264 /* Restore the last saved context, at the end of a nested function.
265 This function is called from language-specific code. */
266
267 void
268 pop_function_context_from (tree context ATTRIBUTE_UNUSED)
269 {
270 struct function *p = outer_function_chain;
271
272 cfun = p;
273 outer_function_chain = p->outer;
274
275 current_function_decl = p->decl;
276
277 lang_hooks.function.leave_nested (p);
278
279 /* Reset variables that have known state during rtx generation. */
280 virtuals_instantiated = 0;
281 generating_concat_p = 1;
282 }
283
284 void
285 pop_function_context (void)
286 {
287 pop_function_context_from (current_function_decl);
288 }
289
290 /* Clear out all parts of the state in F that can safely be discarded
291 after the function has been parsed, but not compiled, to let
292 garbage collection reclaim the memory. */
293
294 void
295 free_after_parsing (struct function *f)
296 {
297 /* f->expr->forced_labels is used by code generation. */
298 /* f->emit->regno_reg_rtx is used by code generation. */
299 /* f->varasm is used by code generation. */
300 /* f->eh->eh_return_stub_label is used by code generation. */
301
302 lang_hooks.function.final (f);
303 }
304
305 /* Clear out all parts of the state in F that can safely be discarded
306 after the function has been compiled, to let garbage collection
307 reclaim the memory. */
308
309 void
310 free_after_compilation (struct function *f)
311 {
312 VEC_free (int, heap, prologue);
313 VEC_free (int, heap, epilogue);
314 VEC_free (int, heap, sibcall_epilogue);
315
316 f->eh = NULL;
317 f->expr = NULL;
318 f->emit = NULL;
319 f->varasm = NULL;
320 f->machine = NULL;
321 f->cfg = NULL;
322
323 f->x_avail_temp_slots = NULL;
324 f->x_used_temp_slots = NULL;
325 f->arg_offset_rtx = NULL;
326 f->return_rtx = NULL;
327 f->internal_arg_pointer = NULL;
328 f->x_nonlocal_goto_handler_labels = NULL;
329 f->x_return_label = NULL;
330 f->x_naked_return_label = NULL;
331 f->x_stack_slot_list = NULL;
332 f->x_stack_check_probe_note = NULL;
333 f->x_arg_pointer_save_area = NULL;
334 f->x_parm_birth_insn = NULL;
335 f->epilogue_delay_list = NULL;
336 }
337 \f
338 /* Allocate fixed slots in the stack frame of the current function. */
339
340 /* Return size needed for stack frame based on slots so far allocated in
341 function F.
342 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
343 the caller may have to do that. */
344
345 static HOST_WIDE_INT
346 get_func_frame_size (struct function *f)
347 {
348 if (FRAME_GROWS_DOWNWARD)
349 return -f->x_frame_offset;
350 else
351 return f->x_frame_offset;
352 }
353
354 /* Return size needed for stack frame based on slots so far allocated.
355 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
356 the caller may have to do that. */
357
358 HOST_WIDE_INT
359 get_frame_size (void)
360 {
361 return get_func_frame_size (cfun);
362 }
363
364 /* Issue an error message and return TRUE if frame OFFSET overflows in
365 the signed target pointer arithmetics for function FUNC. Otherwise
366 return FALSE. */
367
368 bool
369 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
370 {
371 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
372
373 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
374 /* Leave room for the fixed part of the frame. */
375 - 64 * UNITS_PER_WORD)
376 {
377 error ("%Jtotal size of local objects too large", func);
378 return TRUE;
379 }
380
381 return FALSE;
382 }
383
384 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
385 with machine mode MODE.
386
387 ALIGN controls the amount of alignment for the address of the slot:
388 0 means according to MODE,
389 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
390 -2 means use BITS_PER_UNIT,
391 positive specifies alignment boundary in bits.
392
393 We do not round to stack_boundary here.
394
395 FUNCTION specifies the function to allocate in. */
396
397 static rtx
398 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, int align,
399 struct function *function)
400 {
401 rtx x, addr;
402 int bigend_correction = 0;
403 unsigned int alignment;
404 int frame_off, frame_alignment, frame_phase;
405
406 if (align == 0)
407 {
408 tree type;
409
410 if (mode == BLKmode)
411 alignment = BIGGEST_ALIGNMENT;
412 else
413 alignment = GET_MODE_ALIGNMENT (mode);
414
415 /* Allow the target to (possibly) increase the alignment of this
416 stack slot. */
417 type = lang_hooks.types.type_for_mode (mode, 0);
418 if (type)
419 alignment = LOCAL_ALIGNMENT (type, alignment);
420
421 alignment /= BITS_PER_UNIT;
422 }
423 else if (align == -1)
424 {
425 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
426 size = CEIL_ROUND (size, alignment);
427 }
428 else if (align == -2)
429 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
430 else
431 alignment = align / BITS_PER_UNIT;
432
433 if (FRAME_GROWS_DOWNWARD)
434 function->x_frame_offset -= size;
435
436 /* Ignore alignment we can't do with expected alignment of the boundary. */
437 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
438 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
439
440 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
441 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
442
443 /* Calculate how many bytes the start of local variables is off from
444 stack alignment. */
445 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
446 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
447 frame_phase = frame_off ? frame_alignment - frame_off : 0;
448
449 /* Round the frame offset to the specified alignment. The default is
450 to always honor requests to align the stack but a port may choose to
451 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
452 if (STACK_ALIGNMENT_NEEDED
453 || mode != BLKmode
454 || size != 0)
455 {
456 /* We must be careful here, since FRAME_OFFSET might be negative and
457 division with a negative dividend isn't as well defined as we might
458 like. So we instead assume that ALIGNMENT is a power of two and
459 use logical operations which are unambiguous. */
460 if (FRAME_GROWS_DOWNWARD)
461 function->x_frame_offset
462 = (FLOOR_ROUND (function->x_frame_offset - frame_phase,
463 (unsigned HOST_WIDE_INT) alignment)
464 + frame_phase);
465 else
466 function->x_frame_offset
467 = (CEIL_ROUND (function->x_frame_offset - frame_phase,
468 (unsigned HOST_WIDE_INT) alignment)
469 + frame_phase);
470 }
471
472 /* On a big-endian machine, if we are allocating more space than we will use,
473 use the least significant bytes of those that are allocated. */
474 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
475 bigend_correction = size - GET_MODE_SIZE (mode);
476
477 /* If we have already instantiated virtual registers, return the actual
478 address relative to the frame pointer. */
479 if (function == cfun && virtuals_instantiated)
480 addr = plus_constant (frame_pointer_rtx,
481 trunc_int_for_mode
482 (frame_offset + bigend_correction
483 + STARTING_FRAME_OFFSET, Pmode));
484 else
485 addr = plus_constant (virtual_stack_vars_rtx,
486 trunc_int_for_mode
487 (function->x_frame_offset + bigend_correction,
488 Pmode));
489
490 if (!FRAME_GROWS_DOWNWARD)
491 function->x_frame_offset += size;
492
493 x = gen_rtx_MEM (mode, addr);
494 MEM_NOTRAP_P (x) = 1;
495
496 function->x_stack_slot_list
497 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
498
499 if (frame_offset_overflow (function->x_frame_offset, function->decl))
500 function->x_frame_offset = 0;
501
502 return x;
503 }
504
505 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
506 current function. */
507
508 rtx
509 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
510 {
511 return assign_stack_local_1 (mode, size, align, cfun);
512 }
513
514 \f
515 /* Removes temporary slot TEMP from LIST. */
516
517 static void
518 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
519 {
520 if (temp->next)
521 temp->next->prev = temp->prev;
522 if (temp->prev)
523 temp->prev->next = temp->next;
524 else
525 *list = temp->next;
526
527 temp->prev = temp->next = NULL;
528 }
529
530 /* Inserts temporary slot TEMP to LIST. */
531
532 static void
533 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
534 {
535 temp->next = *list;
536 if (*list)
537 (*list)->prev = temp;
538 temp->prev = NULL;
539 *list = temp;
540 }
541
542 /* Returns the list of used temp slots at LEVEL. */
543
544 static struct temp_slot **
545 temp_slots_at_level (int level)
546 {
547 if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
548 VEC_safe_grow_cleared (temp_slot_p, gc, used_temp_slots, level + 1);
549
550 return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
551 }
552
553 /* Returns the maximal temporary slot level. */
554
555 static int
556 max_slot_level (void)
557 {
558 if (!used_temp_slots)
559 return -1;
560
561 return VEC_length (temp_slot_p, used_temp_slots) - 1;
562 }
563
564 /* Moves temporary slot TEMP to LEVEL. */
565
566 static void
567 move_slot_to_level (struct temp_slot *temp, int level)
568 {
569 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
570 insert_slot_to_list (temp, temp_slots_at_level (level));
571 temp->level = level;
572 }
573
574 /* Make temporary slot TEMP available. */
575
576 static void
577 make_slot_available (struct temp_slot *temp)
578 {
579 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
580 insert_slot_to_list (temp, &avail_temp_slots);
581 temp->in_use = 0;
582 temp->level = -1;
583 }
584 \f
585 /* Allocate a temporary stack slot and record it for possible later
586 reuse.
587
588 MODE is the machine mode to be given to the returned rtx.
589
590 SIZE is the size in units of the space required. We do no rounding here
591 since assign_stack_local will do any required rounding.
592
593 KEEP is 1 if this slot is to be retained after a call to
594 free_temp_slots. Automatic variables for a block are allocated
595 with this flag. KEEP values of 2 or 3 were needed respectively
596 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
597 or for SAVE_EXPRs, but they are now unused.
598
599 TYPE is the type that will be used for the stack slot. */
600
601 rtx
602 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
603 int keep, tree type)
604 {
605 unsigned int align;
606 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
607 rtx slot;
608
609 /* If SIZE is -1 it means that somebody tried to allocate a temporary
610 of a variable size. */
611 gcc_assert (size != -1);
612
613 /* These are now unused. */
614 gcc_assert (keep <= 1);
615
616 if (mode == BLKmode)
617 align = BIGGEST_ALIGNMENT;
618 else
619 align = GET_MODE_ALIGNMENT (mode);
620
621 if (! type)
622 type = lang_hooks.types.type_for_mode (mode, 0);
623
624 if (type)
625 align = LOCAL_ALIGNMENT (type, align);
626
627 /* Try to find an available, already-allocated temporary of the proper
628 mode which meets the size and alignment requirements. Choose the
629 smallest one with the closest alignment.
630
631 If assign_stack_temp is called outside of the tree->rtl expansion,
632 we cannot reuse the stack slots (that may still refer to
633 VIRTUAL_STACK_VARS_REGNUM). */
634 if (!virtuals_instantiated)
635 {
636 for (p = avail_temp_slots; p; p = p->next)
637 {
638 if (p->align >= align && p->size >= size
639 && GET_MODE (p->slot) == mode
640 && objects_must_conflict_p (p->type, type)
641 && (best_p == 0 || best_p->size > p->size
642 || (best_p->size == p->size && best_p->align > p->align)))
643 {
644 if (p->align == align && p->size == size)
645 {
646 selected = p;
647 cut_slot_from_list (selected, &avail_temp_slots);
648 best_p = 0;
649 break;
650 }
651 best_p = p;
652 }
653 }
654 }
655
656 /* Make our best, if any, the one to use. */
657 if (best_p)
658 {
659 selected = best_p;
660 cut_slot_from_list (selected, &avail_temp_slots);
661
662 /* If there are enough aligned bytes left over, make them into a new
663 temp_slot so that the extra bytes don't get wasted. Do this only
664 for BLKmode slots, so that we can be sure of the alignment. */
665 if (GET_MODE (best_p->slot) == BLKmode)
666 {
667 int alignment = best_p->align / BITS_PER_UNIT;
668 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
669
670 if (best_p->size - rounded_size >= alignment)
671 {
672 p = ggc_alloc (sizeof (struct temp_slot));
673 p->in_use = p->addr_taken = 0;
674 p->size = best_p->size - rounded_size;
675 p->base_offset = best_p->base_offset + rounded_size;
676 p->full_size = best_p->full_size - rounded_size;
677 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
678 p->align = best_p->align;
679 p->address = 0;
680 p->type = best_p->type;
681 insert_slot_to_list (p, &avail_temp_slots);
682
683 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
684 stack_slot_list);
685
686 best_p->size = rounded_size;
687 best_p->full_size = rounded_size;
688 }
689 }
690 }
691
692 /* If we still didn't find one, make a new temporary. */
693 if (selected == 0)
694 {
695 HOST_WIDE_INT frame_offset_old = frame_offset;
696
697 p = ggc_alloc (sizeof (struct temp_slot));
698
699 /* We are passing an explicit alignment request to assign_stack_local.
700 One side effect of that is assign_stack_local will not round SIZE
701 to ensure the frame offset remains suitably aligned.
702
703 So for requests which depended on the rounding of SIZE, we go ahead
704 and round it now. We also make sure ALIGNMENT is at least
705 BIGGEST_ALIGNMENT. */
706 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
707 p->slot = assign_stack_local (mode,
708 (mode == BLKmode
709 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
710 : size),
711 align);
712
713 p->align = align;
714
715 /* The following slot size computation is necessary because we don't
716 know the actual size of the temporary slot until assign_stack_local
717 has performed all the frame alignment and size rounding for the
718 requested temporary. Note that extra space added for alignment
719 can be either above or below this stack slot depending on which
720 way the frame grows. We include the extra space if and only if it
721 is above this slot. */
722 if (FRAME_GROWS_DOWNWARD)
723 p->size = frame_offset_old - frame_offset;
724 else
725 p->size = size;
726
727 /* Now define the fields used by combine_temp_slots. */
728 if (FRAME_GROWS_DOWNWARD)
729 {
730 p->base_offset = frame_offset;
731 p->full_size = frame_offset_old - frame_offset;
732 }
733 else
734 {
735 p->base_offset = frame_offset_old;
736 p->full_size = frame_offset - frame_offset_old;
737 }
738 p->address = 0;
739
740 selected = p;
741 }
742
743 p = selected;
744 p->in_use = 1;
745 p->addr_taken = 0;
746 p->type = type;
747 p->level = temp_slot_level;
748 p->keep = keep;
749
750 pp = temp_slots_at_level (p->level);
751 insert_slot_to_list (p, pp);
752
753 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
754 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
755 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
756
757 /* If we know the alias set for the memory that will be used, use
758 it. If there's no TYPE, then we don't know anything about the
759 alias set for the memory. */
760 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
761 set_mem_align (slot, align);
762
763 /* If a type is specified, set the relevant flags. */
764 if (type != 0)
765 {
766 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
767 MEM_SET_IN_STRUCT_P (slot, (AGGREGATE_TYPE_P (type)
768 || TREE_CODE (type) == COMPLEX_TYPE));
769 }
770 MEM_NOTRAP_P (slot) = 1;
771
772 return slot;
773 }
774
775 /* Allocate a temporary stack slot and record it for possible later
776 reuse. First three arguments are same as in preceding function. */
777
778 rtx
779 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
780 {
781 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
782 }
783 \f
784 /* Assign a temporary.
785 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
786 and so that should be used in error messages. In either case, we
787 allocate of the given type.
788 KEEP is as for assign_stack_temp.
789 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
790 it is 0 if a register is OK.
791 DONT_PROMOTE is 1 if we should not promote values in register
792 to wider modes. */
793
794 rtx
795 assign_temp (tree type_or_decl, int keep, int memory_required,
796 int dont_promote ATTRIBUTE_UNUSED)
797 {
798 tree type, decl;
799 enum machine_mode mode;
800 #ifdef PROMOTE_MODE
801 int unsignedp;
802 #endif
803
804 if (DECL_P (type_or_decl))
805 decl = type_or_decl, type = TREE_TYPE (decl);
806 else
807 decl = NULL, type = type_or_decl;
808
809 mode = TYPE_MODE (type);
810 #ifdef PROMOTE_MODE
811 unsignedp = TYPE_UNSIGNED (type);
812 #endif
813
814 if (mode == BLKmode || memory_required)
815 {
816 HOST_WIDE_INT size = int_size_in_bytes (type);
817 rtx tmp;
818
819 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
820 problems with allocating the stack space. */
821 if (size == 0)
822 size = 1;
823
824 /* Unfortunately, we don't yet know how to allocate variable-sized
825 temporaries. However, sometimes we can find a fixed upper limit on
826 the size, so try that instead. */
827 else if (size == -1)
828 size = max_int_size_in_bytes (type);
829
830 /* The size of the temporary may be too large to fit into an integer. */
831 /* ??? Not sure this should happen except for user silliness, so limit
832 this to things that aren't compiler-generated temporaries. The
833 rest of the time we'll die in assign_stack_temp_for_type. */
834 if (decl && size == -1
835 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
836 {
837 error ("size of variable %q+D is too large", decl);
838 size = 1;
839 }
840
841 tmp = assign_stack_temp_for_type (mode, size, keep, type);
842 return tmp;
843 }
844
845 #ifdef PROMOTE_MODE
846 if (! dont_promote)
847 mode = promote_mode (type, mode, &unsignedp, 0);
848 #endif
849
850 return gen_reg_rtx (mode);
851 }
852 \f
853 /* Combine temporary stack slots which are adjacent on the stack.
854
855 This allows for better use of already allocated stack space. This is only
856 done for BLKmode slots because we can be sure that we won't have alignment
857 problems in this case. */
858
859 static void
860 combine_temp_slots (void)
861 {
862 struct temp_slot *p, *q, *next, *next_q;
863 int num_slots;
864
865 /* We can't combine slots, because the information about which slot
866 is in which alias set will be lost. */
867 if (flag_strict_aliasing)
868 return;
869
870 /* If there are a lot of temp slots, don't do anything unless
871 high levels of optimization. */
872 if (! flag_expensive_optimizations)
873 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
874 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
875 return;
876
877 for (p = avail_temp_slots; p; p = next)
878 {
879 int delete_p = 0;
880
881 next = p->next;
882
883 if (GET_MODE (p->slot) != BLKmode)
884 continue;
885
886 for (q = p->next; q; q = next_q)
887 {
888 int delete_q = 0;
889
890 next_q = q->next;
891
892 if (GET_MODE (q->slot) != BLKmode)
893 continue;
894
895 if (p->base_offset + p->full_size == q->base_offset)
896 {
897 /* Q comes after P; combine Q into P. */
898 p->size += q->size;
899 p->full_size += q->full_size;
900 delete_q = 1;
901 }
902 else if (q->base_offset + q->full_size == p->base_offset)
903 {
904 /* P comes after Q; combine P into Q. */
905 q->size += p->size;
906 q->full_size += p->full_size;
907 delete_p = 1;
908 break;
909 }
910 if (delete_q)
911 cut_slot_from_list (q, &avail_temp_slots);
912 }
913
914 /* Either delete P or advance past it. */
915 if (delete_p)
916 cut_slot_from_list (p, &avail_temp_slots);
917 }
918 }
919 \f
920 /* Find the temp slot corresponding to the object at address X. */
921
922 static struct temp_slot *
923 find_temp_slot_from_address (rtx x)
924 {
925 struct temp_slot *p;
926 rtx next;
927 int i;
928
929 for (i = max_slot_level (); i >= 0; i--)
930 for (p = *temp_slots_at_level (i); p; p = p->next)
931 {
932 if (XEXP (p->slot, 0) == x
933 || p->address == x
934 || (GET_CODE (x) == PLUS
935 && XEXP (x, 0) == virtual_stack_vars_rtx
936 && GET_CODE (XEXP (x, 1)) == CONST_INT
937 && INTVAL (XEXP (x, 1)) >= p->base_offset
938 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
939 return p;
940
941 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
942 for (next = p->address; next; next = XEXP (next, 1))
943 if (XEXP (next, 0) == x)
944 return p;
945 }
946
947 /* If we have a sum involving a register, see if it points to a temp
948 slot. */
949 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
950 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
951 return p;
952 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
953 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
954 return p;
955
956 return 0;
957 }
958
959 /* Indicate that NEW is an alternate way of referring to the temp slot
960 that previously was known by OLD. */
961
962 void
963 update_temp_slot_address (rtx old, rtx new)
964 {
965 struct temp_slot *p;
966
967 if (rtx_equal_p (old, new))
968 return;
969
970 p = find_temp_slot_from_address (old);
971
972 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
973 is a register, see if one operand of the PLUS is a temporary
974 location. If so, NEW points into it. Otherwise, if both OLD and
975 NEW are a PLUS and if there is a register in common between them.
976 If so, try a recursive call on those values. */
977 if (p == 0)
978 {
979 if (GET_CODE (old) != PLUS)
980 return;
981
982 if (REG_P (new))
983 {
984 update_temp_slot_address (XEXP (old, 0), new);
985 update_temp_slot_address (XEXP (old, 1), new);
986 return;
987 }
988 else if (GET_CODE (new) != PLUS)
989 return;
990
991 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
992 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
993 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
994 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
995 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
996 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
997 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
998 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
999
1000 return;
1001 }
1002
1003 /* Otherwise add an alias for the temp's address. */
1004 else if (p->address == 0)
1005 p->address = new;
1006 else
1007 {
1008 if (GET_CODE (p->address) != EXPR_LIST)
1009 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1010
1011 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1012 }
1013 }
1014
1015 /* If X could be a reference to a temporary slot, mark the fact that its
1016 address was taken. */
1017
1018 void
1019 mark_temp_addr_taken (rtx x)
1020 {
1021 struct temp_slot *p;
1022
1023 if (x == 0)
1024 return;
1025
1026 /* If X is not in memory or is at a constant address, it cannot be in
1027 a temporary slot. */
1028 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
1029 return;
1030
1031 p = find_temp_slot_from_address (XEXP (x, 0));
1032 if (p != 0)
1033 p->addr_taken = 1;
1034 }
1035
1036 /* If X could be a reference to a temporary slot, mark that slot as
1037 belonging to the to one level higher than the current level. If X
1038 matched one of our slots, just mark that one. Otherwise, we can't
1039 easily predict which it is, so upgrade all of them. Kept slots
1040 need not be touched.
1041
1042 This is called when an ({...}) construct occurs and a statement
1043 returns a value in memory. */
1044
1045 void
1046 preserve_temp_slots (rtx x)
1047 {
1048 struct temp_slot *p = 0, *next;
1049
1050 /* If there is no result, we still might have some objects whose address
1051 were taken, so we need to make sure they stay around. */
1052 if (x == 0)
1053 {
1054 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1055 {
1056 next = p->next;
1057
1058 if (p->addr_taken)
1059 move_slot_to_level (p, temp_slot_level - 1);
1060 }
1061
1062 return;
1063 }
1064
1065 /* If X is a register that is being used as a pointer, see if we have
1066 a temporary slot we know it points to. To be consistent with
1067 the code below, we really should preserve all non-kept slots
1068 if we can't find a match, but that seems to be much too costly. */
1069 if (REG_P (x) && REG_POINTER (x))
1070 p = find_temp_slot_from_address (x);
1071
1072 /* If X is not in memory or is at a constant address, it cannot be in
1073 a temporary slot, but it can contain something whose address was
1074 taken. */
1075 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1076 {
1077 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1078 {
1079 next = p->next;
1080
1081 if (p->addr_taken)
1082 move_slot_to_level (p, temp_slot_level - 1);
1083 }
1084
1085 return;
1086 }
1087
1088 /* First see if we can find a match. */
1089 if (p == 0)
1090 p = find_temp_slot_from_address (XEXP (x, 0));
1091
1092 if (p != 0)
1093 {
1094 /* Move everything at our level whose address was taken to our new
1095 level in case we used its address. */
1096 struct temp_slot *q;
1097
1098 if (p->level == temp_slot_level)
1099 {
1100 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1101 {
1102 next = q->next;
1103
1104 if (p != q && q->addr_taken)
1105 move_slot_to_level (q, temp_slot_level - 1);
1106 }
1107
1108 move_slot_to_level (p, temp_slot_level - 1);
1109 p->addr_taken = 0;
1110 }
1111 return;
1112 }
1113
1114 /* Otherwise, preserve all non-kept slots at this level. */
1115 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1116 {
1117 next = p->next;
1118
1119 if (!p->keep)
1120 move_slot_to_level (p, temp_slot_level - 1);
1121 }
1122 }
1123
1124 /* Free all temporaries used so far. This is normally called at the
1125 end of generating code for a statement. */
1126
1127 void
1128 free_temp_slots (void)
1129 {
1130 struct temp_slot *p, *next;
1131
1132 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1133 {
1134 next = p->next;
1135
1136 if (!p->keep)
1137 make_slot_available (p);
1138 }
1139
1140 combine_temp_slots ();
1141 }
1142
1143 /* Push deeper into the nesting level for stack temporaries. */
1144
1145 void
1146 push_temp_slots (void)
1147 {
1148 temp_slot_level++;
1149 }
1150
1151 /* Pop a temporary nesting level. All slots in use in the current level
1152 are freed. */
1153
1154 void
1155 pop_temp_slots (void)
1156 {
1157 struct temp_slot *p, *next;
1158
1159 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1160 {
1161 next = p->next;
1162 make_slot_available (p);
1163 }
1164
1165 combine_temp_slots ();
1166
1167 temp_slot_level--;
1168 }
1169
1170 /* Initialize temporary slots. */
1171
1172 void
1173 init_temp_slots (void)
1174 {
1175 /* We have not allocated any temporaries yet. */
1176 avail_temp_slots = 0;
1177 used_temp_slots = 0;
1178 temp_slot_level = 0;
1179 }
1180 \f
1181 /* These routines are responsible for converting virtual register references
1182 to the actual hard register references once RTL generation is complete.
1183
1184 The following four variables are used for communication between the
1185 routines. They contain the offsets of the virtual registers from their
1186 respective hard registers. */
1187
1188 static int in_arg_offset;
1189 static int var_offset;
1190 static int dynamic_offset;
1191 static int out_arg_offset;
1192 static int cfa_offset;
1193
1194 /* In most machines, the stack pointer register is equivalent to the bottom
1195 of the stack. */
1196
1197 #ifndef STACK_POINTER_OFFSET
1198 #define STACK_POINTER_OFFSET 0
1199 #endif
1200
1201 /* If not defined, pick an appropriate default for the offset of dynamically
1202 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1203 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1204
1205 #ifndef STACK_DYNAMIC_OFFSET
1206
1207 /* The bottom of the stack points to the actual arguments. If
1208 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1209 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1210 stack space for register parameters is not pushed by the caller, but
1211 rather part of the fixed stack areas and hence not included in
1212 `current_function_outgoing_args_size'. Nevertheless, we must allow
1213 for it when allocating stack dynamic objects. */
1214
1215 #if defined(REG_PARM_STACK_SPACE)
1216 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1217 ((ACCUMULATE_OUTGOING_ARGS \
1218 ? (current_function_outgoing_args_size \
1219 + (OUTGOING_REG_PARM_STACK_SPACE ? 0 : REG_PARM_STACK_SPACE (FNDECL))) \
1220 : 0) + (STACK_POINTER_OFFSET))
1221 #else
1222 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1223 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
1224 + (STACK_POINTER_OFFSET))
1225 #endif
1226 #endif
1227
1228 \f
1229 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1230 is a virtual register, return the equivalent hard register and set the
1231 offset indirectly through the pointer. Otherwise, return 0. */
1232
1233 static rtx
1234 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1235 {
1236 rtx new;
1237 HOST_WIDE_INT offset;
1238
1239 if (x == virtual_incoming_args_rtx)
1240 new = arg_pointer_rtx, offset = in_arg_offset;
1241 else if (x == virtual_stack_vars_rtx)
1242 new = frame_pointer_rtx, offset = var_offset;
1243 else if (x == virtual_stack_dynamic_rtx)
1244 new = stack_pointer_rtx, offset = dynamic_offset;
1245 else if (x == virtual_outgoing_args_rtx)
1246 new = stack_pointer_rtx, offset = out_arg_offset;
1247 else if (x == virtual_cfa_rtx)
1248 {
1249 #ifdef FRAME_POINTER_CFA_OFFSET
1250 new = frame_pointer_rtx;
1251 #else
1252 new = arg_pointer_rtx;
1253 #endif
1254 offset = cfa_offset;
1255 }
1256 else
1257 return NULL_RTX;
1258
1259 *poffset = offset;
1260 return new;
1261 }
1262
1263 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1264 Instantiate any virtual registers present inside of *LOC. The expression
1265 is simplified, as much as possible, but is not to be considered "valid"
1266 in any sense implied by the target. If any change is made, set CHANGED
1267 to true. */
1268
1269 static int
1270 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1271 {
1272 HOST_WIDE_INT offset;
1273 bool *changed = (bool *) data;
1274 rtx x, new;
1275
1276 x = *loc;
1277 if (x == 0)
1278 return 0;
1279
1280 switch (GET_CODE (x))
1281 {
1282 case REG:
1283 new = instantiate_new_reg (x, &offset);
1284 if (new)
1285 {
1286 *loc = plus_constant (new, offset);
1287 if (changed)
1288 *changed = true;
1289 }
1290 return -1;
1291
1292 case PLUS:
1293 new = instantiate_new_reg (XEXP (x, 0), &offset);
1294 if (new)
1295 {
1296 new = plus_constant (new, offset);
1297 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new, XEXP (x, 1));
1298 if (changed)
1299 *changed = true;
1300 return -1;
1301 }
1302
1303 /* FIXME -- from old code */
1304 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1305 we can commute the PLUS and SUBREG because pointers into the
1306 frame are well-behaved. */
1307 break;
1308
1309 default:
1310 break;
1311 }
1312
1313 return 0;
1314 }
1315
1316 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1317 matches the predicate for insn CODE operand OPERAND. */
1318
1319 static int
1320 safe_insn_predicate (int code, int operand, rtx x)
1321 {
1322 const struct insn_operand_data *op_data;
1323
1324 if (code < 0)
1325 return true;
1326
1327 op_data = &insn_data[code].operand[operand];
1328 if (op_data->predicate == NULL)
1329 return true;
1330
1331 return op_data->predicate (x, op_data->mode);
1332 }
1333
1334 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1335 registers present inside of insn. The result will be a valid insn. */
1336
1337 static void
1338 instantiate_virtual_regs_in_insn (rtx insn)
1339 {
1340 HOST_WIDE_INT offset;
1341 int insn_code, i;
1342 bool any_change = false;
1343 rtx set, new, x, seq;
1344
1345 /* There are some special cases to be handled first. */
1346 set = single_set (insn);
1347 if (set)
1348 {
1349 /* We're allowed to assign to a virtual register. This is interpreted
1350 to mean that the underlying register gets assigned the inverse
1351 transformation. This is used, for example, in the handling of
1352 non-local gotos. */
1353 new = instantiate_new_reg (SET_DEST (set), &offset);
1354 if (new)
1355 {
1356 start_sequence ();
1357
1358 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1359 x = simplify_gen_binary (PLUS, GET_MODE (new), SET_SRC (set),
1360 GEN_INT (-offset));
1361 x = force_operand (x, new);
1362 if (x != new)
1363 emit_move_insn (new, x);
1364
1365 seq = get_insns ();
1366 end_sequence ();
1367
1368 emit_insn_before (seq, insn);
1369 delete_insn (insn);
1370 return;
1371 }
1372
1373 /* Handle a straight copy from a virtual register by generating a
1374 new add insn. The difference between this and falling through
1375 to the generic case is avoiding a new pseudo and eliminating a
1376 move insn in the initial rtl stream. */
1377 new = instantiate_new_reg (SET_SRC (set), &offset);
1378 if (new && offset != 0
1379 && REG_P (SET_DEST (set))
1380 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1381 {
1382 start_sequence ();
1383
1384 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1385 new, GEN_INT (offset), SET_DEST (set),
1386 1, OPTAB_LIB_WIDEN);
1387 if (x != SET_DEST (set))
1388 emit_move_insn (SET_DEST (set), x);
1389
1390 seq = get_insns ();
1391 end_sequence ();
1392
1393 emit_insn_before (seq, insn);
1394 delete_insn (insn);
1395 return;
1396 }
1397
1398 extract_insn (insn);
1399 insn_code = INSN_CODE (insn);
1400
1401 /* Handle a plus involving a virtual register by determining if the
1402 operands remain valid if they're modified in place. */
1403 if (GET_CODE (SET_SRC (set)) == PLUS
1404 && recog_data.n_operands >= 3
1405 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1406 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1407 && GET_CODE (recog_data.operand[2]) == CONST_INT
1408 && (new = instantiate_new_reg (recog_data.operand[1], &offset)))
1409 {
1410 offset += INTVAL (recog_data.operand[2]);
1411
1412 /* If the sum is zero, then replace with a plain move. */
1413 if (offset == 0
1414 && REG_P (SET_DEST (set))
1415 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1416 {
1417 start_sequence ();
1418 emit_move_insn (SET_DEST (set), new);
1419 seq = get_insns ();
1420 end_sequence ();
1421
1422 emit_insn_before (seq, insn);
1423 delete_insn (insn);
1424 return;
1425 }
1426
1427 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1428
1429 /* Using validate_change and apply_change_group here leaves
1430 recog_data in an invalid state. Since we know exactly what
1431 we want to check, do those two by hand. */
1432 if (safe_insn_predicate (insn_code, 1, new)
1433 && safe_insn_predicate (insn_code, 2, x))
1434 {
1435 *recog_data.operand_loc[1] = recog_data.operand[1] = new;
1436 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1437 any_change = true;
1438
1439 /* Fall through into the regular operand fixup loop in
1440 order to take care of operands other than 1 and 2. */
1441 }
1442 }
1443 }
1444 else
1445 {
1446 extract_insn (insn);
1447 insn_code = INSN_CODE (insn);
1448 }
1449
1450 /* In the general case, we expect virtual registers to appear only in
1451 operands, and then only as either bare registers or inside memories. */
1452 for (i = 0; i < recog_data.n_operands; ++i)
1453 {
1454 x = recog_data.operand[i];
1455 switch (GET_CODE (x))
1456 {
1457 case MEM:
1458 {
1459 rtx addr = XEXP (x, 0);
1460 bool changed = false;
1461
1462 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1463 if (!changed)
1464 continue;
1465
1466 start_sequence ();
1467 x = replace_equiv_address (x, addr);
1468 seq = get_insns ();
1469 end_sequence ();
1470 if (seq)
1471 emit_insn_before (seq, insn);
1472 }
1473 break;
1474
1475 case REG:
1476 new = instantiate_new_reg (x, &offset);
1477 if (new == NULL)
1478 continue;
1479 if (offset == 0)
1480 x = new;
1481 else
1482 {
1483 start_sequence ();
1484
1485 /* Careful, special mode predicates may have stuff in
1486 insn_data[insn_code].operand[i].mode that isn't useful
1487 to us for computing a new value. */
1488 /* ??? Recognize address_operand and/or "p" constraints
1489 to see if (plus new offset) is a valid before we put
1490 this through expand_simple_binop. */
1491 x = expand_simple_binop (GET_MODE (x), PLUS, new,
1492 GEN_INT (offset), NULL_RTX,
1493 1, OPTAB_LIB_WIDEN);
1494 seq = get_insns ();
1495 end_sequence ();
1496 emit_insn_before (seq, insn);
1497 }
1498 break;
1499
1500 case SUBREG:
1501 new = instantiate_new_reg (SUBREG_REG (x), &offset);
1502 if (new == NULL)
1503 continue;
1504 if (offset != 0)
1505 {
1506 start_sequence ();
1507 new = expand_simple_binop (GET_MODE (new), PLUS, new,
1508 GEN_INT (offset), NULL_RTX,
1509 1, OPTAB_LIB_WIDEN);
1510 seq = get_insns ();
1511 end_sequence ();
1512 emit_insn_before (seq, insn);
1513 }
1514 x = simplify_gen_subreg (recog_data.operand_mode[i], new,
1515 GET_MODE (new), SUBREG_BYTE (x));
1516 break;
1517
1518 default:
1519 continue;
1520 }
1521
1522 /* At this point, X contains the new value for the operand.
1523 Validate the new value vs the insn predicate. Note that
1524 asm insns will have insn_code -1 here. */
1525 if (!safe_insn_predicate (insn_code, i, x))
1526 {
1527 start_sequence ();
1528 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1529 seq = get_insns ();
1530 end_sequence ();
1531 if (seq)
1532 emit_insn_before (seq, insn);
1533 }
1534
1535 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1536 any_change = true;
1537 }
1538
1539 if (any_change)
1540 {
1541 /* Propagate operand changes into the duplicates. */
1542 for (i = 0; i < recog_data.n_dups; ++i)
1543 *recog_data.dup_loc[i]
1544 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1545
1546 /* Force re-recognition of the instruction for validation. */
1547 INSN_CODE (insn) = -1;
1548 }
1549
1550 if (asm_noperands (PATTERN (insn)) >= 0)
1551 {
1552 if (!check_asm_operands (PATTERN (insn)))
1553 {
1554 error_for_asm (insn, "impossible constraint in %<asm%>");
1555 delete_insn (insn);
1556 }
1557 }
1558 else
1559 {
1560 if (recog_memoized (insn) < 0)
1561 fatal_insn_not_found (insn);
1562 }
1563 }
1564
1565 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1566 do any instantiation required. */
1567
1568 static void
1569 instantiate_decl (rtx x)
1570 {
1571 rtx addr;
1572
1573 if (x == 0)
1574 return;
1575
1576 /* If this is a CONCAT, recurse for the pieces. */
1577 if (GET_CODE (x) == CONCAT)
1578 {
1579 instantiate_decl (XEXP (x, 0));
1580 instantiate_decl (XEXP (x, 1));
1581 return;
1582 }
1583
1584 /* If this is not a MEM, no need to do anything. Similarly if the
1585 address is a constant or a register that is not a virtual register. */
1586 if (!MEM_P (x))
1587 return;
1588
1589 addr = XEXP (x, 0);
1590 if (CONSTANT_P (addr)
1591 || (REG_P (addr)
1592 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1593 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1594 return;
1595
1596 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1597 }
1598
1599 /* Helper for instantiate_decls called via walk_tree: Process all decls
1600 in the given DECL_VALUE_EXPR. */
1601
1602 static tree
1603 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1604 {
1605 tree t = *tp;
1606 if (! EXPR_P (t) && ! GIMPLE_STMT_P (t))
1607 {
1608 *walk_subtrees = 0;
1609 if (DECL_P (t) && DECL_RTL_SET_P (t))
1610 instantiate_decl (DECL_RTL (t));
1611 }
1612 return NULL;
1613 }
1614
1615 /* Subroutine of instantiate_decls: Process all decls in the given
1616 BLOCK node and all its subblocks. */
1617
1618 static void
1619 instantiate_decls_1 (tree let)
1620 {
1621 tree t;
1622
1623 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1624 {
1625 if (DECL_RTL_SET_P (t))
1626 instantiate_decl (DECL_RTL (t));
1627 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1628 {
1629 tree v = DECL_VALUE_EXPR (t);
1630 walk_tree (&v, instantiate_expr, NULL, NULL);
1631 }
1632 }
1633
1634 /* Process all subblocks. */
1635 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1636 instantiate_decls_1 (t);
1637 }
1638
1639 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1640 all virtual registers in their DECL_RTL's. */
1641
1642 static void
1643 instantiate_decls (tree fndecl)
1644 {
1645 tree decl;
1646
1647 /* Process all parameters of the function. */
1648 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1649 {
1650 instantiate_decl (DECL_RTL (decl));
1651 instantiate_decl (DECL_INCOMING_RTL (decl));
1652 if (DECL_HAS_VALUE_EXPR_P (decl))
1653 {
1654 tree v = DECL_VALUE_EXPR (decl);
1655 walk_tree (&v, instantiate_expr, NULL, NULL);
1656 }
1657 }
1658
1659 /* Now process all variables defined in the function or its subblocks. */
1660 instantiate_decls_1 (DECL_INITIAL (fndecl));
1661 }
1662
1663 /* Pass through the INSNS of function FNDECL and convert virtual register
1664 references to hard register references. */
1665
1666 static unsigned int
1667 instantiate_virtual_regs (void)
1668 {
1669 rtx insn;
1670
1671 /* Compute the offsets to use for this function. */
1672 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1673 var_offset = STARTING_FRAME_OFFSET;
1674 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1675 out_arg_offset = STACK_POINTER_OFFSET;
1676 #ifdef FRAME_POINTER_CFA_OFFSET
1677 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1678 #else
1679 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1680 #endif
1681
1682 /* Initialize recognition, indicating that volatile is OK. */
1683 init_recog ();
1684
1685 /* Scan through all the insns, instantiating every virtual register still
1686 present. */
1687 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1688 if (INSN_P (insn))
1689 {
1690 /* These patterns in the instruction stream can never be recognized.
1691 Fortunately, they shouldn't contain virtual registers either. */
1692 if (GET_CODE (PATTERN (insn)) == USE
1693 || GET_CODE (PATTERN (insn)) == CLOBBER
1694 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1695 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1696 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1697 continue;
1698
1699 instantiate_virtual_regs_in_insn (insn);
1700
1701 if (INSN_DELETED_P (insn))
1702 continue;
1703
1704 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1705
1706 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1707 if (GET_CODE (insn) == CALL_INSN)
1708 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1709 instantiate_virtual_regs_in_rtx, NULL);
1710 }
1711
1712 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1713 instantiate_decls (current_function_decl);
1714
1715 /* Indicate that, from now on, assign_stack_local should use
1716 frame_pointer_rtx. */
1717 virtuals_instantiated = 1;
1718 return 0;
1719 }
1720
1721 struct tree_opt_pass pass_instantiate_virtual_regs =
1722 {
1723 "vregs", /* name */
1724 NULL, /* gate */
1725 instantiate_virtual_regs, /* execute */
1726 NULL, /* sub */
1727 NULL, /* next */
1728 0, /* static_pass_number */
1729 0, /* tv_id */
1730 0, /* properties_required */
1731 0, /* properties_provided */
1732 0, /* properties_destroyed */
1733 0, /* todo_flags_start */
1734 TODO_dump_func, /* todo_flags_finish */
1735 0 /* letter */
1736 };
1737
1738 \f
1739 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1740 This means a type for which function calls must pass an address to the
1741 function or get an address back from the function.
1742 EXP may be a type node or an expression (whose type is tested). */
1743
1744 int
1745 aggregate_value_p (const_tree exp, const_tree fntype)
1746 {
1747 int i, regno, nregs;
1748 rtx reg;
1749
1750 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1751
1752 /* DECL node associated with FNTYPE when relevant, which we might need to
1753 check for by-invisible-reference returns, typically for CALL_EXPR input
1754 EXPressions. */
1755 const_tree fndecl = NULL_TREE;
1756
1757 if (fntype)
1758 switch (TREE_CODE (fntype))
1759 {
1760 case CALL_EXPR:
1761 fndecl = get_callee_fndecl (fntype);
1762 fntype = fndecl ? TREE_TYPE (fndecl) : 0;
1763 break;
1764 case FUNCTION_DECL:
1765 fndecl = fntype;
1766 fntype = TREE_TYPE (fndecl);
1767 break;
1768 case FUNCTION_TYPE:
1769 case METHOD_TYPE:
1770 break;
1771 case IDENTIFIER_NODE:
1772 fntype = 0;
1773 break;
1774 default:
1775 /* We don't expect other rtl types here. */
1776 gcc_unreachable ();
1777 }
1778
1779 if (TREE_CODE (type) == VOID_TYPE)
1780 return 0;
1781
1782 /* If the front end has decided that this needs to be passed by
1783 reference, do so. */
1784 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1785 && DECL_BY_REFERENCE (exp))
1786 return 1;
1787
1788 /* If the EXPression is a CALL_EXPR, honor DECL_BY_REFERENCE set on the
1789 called function RESULT_DECL, meaning the function returns in memory by
1790 invisible reference. This check lets front-ends not set TREE_ADDRESSABLE
1791 on the function type, which used to be the way to request such a return
1792 mechanism but might now be causing troubles at gimplification time if
1793 temporaries with the function type need to be created. */
1794 if (TREE_CODE (exp) == CALL_EXPR && fndecl && DECL_RESULT (fndecl)
1795 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
1796 return 1;
1797
1798 if (targetm.calls.return_in_memory (type, fntype))
1799 return 1;
1800 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1801 and thus can't be returned in registers. */
1802 if (TREE_ADDRESSABLE (type))
1803 return 1;
1804 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
1805 return 1;
1806 /* Make sure we have suitable call-clobbered regs to return
1807 the value in; if not, we must return it in memory. */
1808 reg = hard_function_value (type, 0, fntype, 0);
1809
1810 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1811 it is OK. */
1812 if (!REG_P (reg))
1813 return 0;
1814
1815 regno = REGNO (reg);
1816 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
1817 for (i = 0; i < nregs; i++)
1818 if (! call_used_regs[regno + i])
1819 return 1;
1820 return 0;
1821 }
1822 \f
1823 /* Return true if we should assign DECL a pseudo register; false if it
1824 should live on the local stack. */
1825
1826 bool
1827 use_register_for_decl (const_tree decl)
1828 {
1829 /* Honor volatile. */
1830 if (TREE_SIDE_EFFECTS (decl))
1831 return false;
1832
1833 /* Honor addressability. */
1834 if (TREE_ADDRESSABLE (decl))
1835 return false;
1836
1837 /* Only register-like things go in registers. */
1838 if (DECL_MODE (decl) == BLKmode)
1839 return false;
1840
1841 /* If -ffloat-store specified, don't put explicit float variables
1842 into registers. */
1843 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1844 propagates values across these stores, and it probably shouldn't. */
1845 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1846 return false;
1847
1848 /* If we're not interested in tracking debugging information for
1849 this decl, then we can certainly put it in a register. */
1850 if (DECL_IGNORED_P (decl))
1851 return true;
1852
1853 return (optimize || DECL_REGISTER (decl));
1854 }
1855
1856 /* Return true if TYPE should be passed by invisible reference. */
1857
1858 bool
1859 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1860 tree type, bool named_arg)
1861 {
1862 if (type)
1863 {
1864 /* If this type contains non-trivial constructors, then it is
1865 forbidden for the middle-end to create any new copies. */
1866 if (TREE_ADDRESSABLE (type))
1867 return true;
1868
1869 /* GCC post 3.4 passes *all* variable sized types by reference. */
1870 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1871 return true;
1872 }
1873
1874 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
1875 }
1876
1877 /* Return true if TYPE, which is passed by reference, should be callee
1878 copied instead of caller copied. */
1879
1880 bool
1881 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1882 tree type, bool named_arg)
1883 {
1884 if (type && TREE_ADDRESSABLE (type))
1885 return false;
1886 return targetm.calls.callee_copies (ca, mode, type, named_arg);
1887 }
1888
1889 /* Structures to communicate between the subroutines of assign_parms.
1890 The first holds data persistent across all parameters, the second
1891 is cleared out for each parameter. */
1892
1893 struct assign_parm_data_all
1894 {
1895 CUMULATIVE_ARGS args_so_far;
1896 struct args_size stack_args_size;
1897 tree function_result_decl;
1898 tree orig_fnargs;
1899 rtx first_conversion_insn;
1900 rtx last_conversion_insn;
1901 HOST_WIDE_INT pretend_args_size;
1902 HOST_WIDE_INT extra_pretend_bytes;
1903 int reg_parm_stack_space;
1904 };
1905
1906 struct assign_parm_data_one
1907 {
1908 tree nominal_type;
1909 tree passed_type;
1910 rtx entry_parm;
1911 rtx stack_parm;
1912 enum machine_mode nominal_mode;
1913 enum machine_mode passed_mode;
1914 enum machine_mode promoted_mode;
1915 struct locate_and_pad_arg_data locate;
1916 int partial;
1917 BOOL_BITFIELD named_arg : 1;
1918 BOOL_BITFIELD passed_pointer : 1;
1919 BOOL_BITFIELD on_stack : 1;
1920 BOOL_BITFIELD loaded_in_reg : 1;
1921 };
1922
1923 /* A subroutine of assign_parms. Initialize ALL. */
1924
1925 static void
1926 assign_parms_initialize_all (struct assign_parm_data_all *all)
1927 {
1928 tree fntype;
1929
1930 memset (all, 0, sizeof (*all));
1931
1932 fntype = TREE_TYPE (current_function_decl);
1933
1934 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
1935 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
1936 #else
1937 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
1938 current_function_decl, -1);
1939 #endif
1940
1941 #ifdef REG_PARM_STACK_SPACE
1942 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
1943 #endif
1944 }
1945
1946 /* If ARGS contains entries with complex types, split the entry into two
1947 entries of the component type. Return a new list of substitutions are
1948 needed, else the old list. */
1949
1950 static tree
1951 split_complex_args (tree args)
1952 {
1953 tree p;
1954
1955 /* Before allocating memory, check for the common case of no complex. */
1956 for (p = args; p; p = TREE_CHAIN (p))
1957 {
1958 tree type = TREE_TYPE (p);
1959 if (TREE_CODE (type) == COMPLEX_TYPE
1960 && targetm.calls.split_complex_arg (type))
1961 goto found;
1962 }
1963 return args;
1964
1965 found:
1966 args = copy_list (args);
1967
1968 for (p = args; p; p = TREE_CHAIN (p))
1969 {
1970 tree type = TREE_TYPE (p);
1971 if (TREE_CODE (type) == COMPLEX_TYPE
1972 && targetm.calls.split_complex_arg (type))
1973 {
1974 tree decl;
1975 tree subtype = TREE_TYPE (type);
1976 bool addressable = TREE_ADDRESSABLE (p);
1977
1978 /* Rewrite the PARM_DECL's type with its component. */
1979 TREE_TYPE (p) = subtype;
1980 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
1981 DECL_MODE (p) = VOIDmode;
1982 DECL_SIZE (p) = NULL;
1983 DECL_SIZE_UNIT (p) = NULL;
1984 /* If this arg must go in memory, put it in a pseudo here.
1985 We can't allow it to go in memory as per normal parms,
1986 because the usual place might not have the imag part
1987 adjacent to the real part. */
1988 DECL_ARTIFICIAL (p) = addressable;
1989 DECL_IGNORED_P (p) = addressable;
1990 TREE_ADDRESSABLE (p) = 0;
1991 layout_decl (p, 0);
1992
1993 /* Build a second synthetic decl. */
1994 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
1995 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
1996 DECL_ARTIFICIAL (decl) = addressable;
1997 DECL_IGNORED_P (decl) = addressable;
1998 layout_decl (decl, 0);
1999
2000 /* Splice it in; skip the new decl. */
2001 TREE_CHAIN (decl) = TREE_CHAIN (p);
2002 TREE_CHAIN (p) = decl;
2003 p = decl;
2004 }
2005 }
2006
2007 return args;
2008 }
2009
2010 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2011 the hidden struct return argument, and (abi willing) complex args.
2012 Return the new parameter list. */
2013
2014 static tree
2015 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2016 {
2017 tree fndecl = current_function_decl;
2018 tree fntype = TREE_TYPE (fndecl);
2019 tree fnargs = DECL_ARGUMENTS (fndecl);
2020
2021 /* If struct value address is treated as the first argument, make it so. */
2022 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2023 && ! current_function_returns_pcc_struct
2024 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2025 {
2026 tree type = build_pointer_type (TREE_TYPE (fntype));
2027 tree decl;
2028
2029 decl = build_decl (PARM_DECL, NULL_TREE, type);
2030 DECL_ARG_TYPE (decl) = type;
2031 DECL_ARTIFICIAL (decl) = 1;
2032 DECL_IGNORED_P (decl) = 1;
2033
2034 TREE_CHAIN (decl) = fnargs;
2035 fnargs = decl;
2036 all->function_result_decl = decl;
2037 }
2038
2039 all->orig_fnargs = fnargs;
2040
2041 /* If the target wants to split complex arguments into scalars, do so. */
2042 if (targetm.calls.split_complex_arg)
2043 fnargs = split_complex_args (fnargs);
2044
2045 return fnargs;
2046 }
2047
2048 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2049 data for the parameter. Incorporate ABI specifics such as pass-by-
2050 reference and type promotion. */
2051
2052 static void
2053 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2054 struct assign_parm_data_one *data)
2055 {
2056 tree nominal_type, passed_type;
2057 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2058
2059 memset (data, 0, sizeof (*data));
2060
2061 /* NAMED_ARG is a mis-nomer. We really mean 'non-varadic'. */
2062 if (!current_function_stdarg)
2063 data->named_arg = 1; /* No varadic parms. */
2064 else if (TREE_CHAIN (parm))
2065 data->named_arg = 1; /* Not the last non-varadic parm. */
2066 else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2067 data->named_arg = 1; /* Only varadic ones are unnamed. */
2068 else
2069 data->named_arg = 0; /* Treat as varadic. */
2070
2071 nominal_type = TREE_TYPE (parm);
2072 passed_type = DECL_ARG_TYPE (parm);
2073
2074 /* Look out for errors propagating this far. Also, if the parameter's
2075 type is void then its value doesn't matter. */
2076 if (TREE_TYPE (parm) == error_mark_node
2077 /* This can happen after weird syntax errors
2078 or if an enum type is defined among the parms. */
2079 || TREE_CODE (parm) != PARM_DECL
2080 || passed_type == NULL
2081 || VOID_TYPE_P (nominal_type))
2082 {
2083 nominal_type = passed_type = void_type_node;
2084 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2085 goto egress;
2086 }
2087
2088 /* Find mode of arg as it is passed, and mode of arg as it should be
2089 during execution of this function. */
2090 passed_mode = TYPE_MODE (passed_type);
2091 nominal_mode = TYPE_MODE (nominal_type);
2092
2093 /* If the parm is to be passed as a transparent union, use the type of
2094 the first field for the tests below. We have already verified that
2095 the modes are the same. */
2096 if (TREE_CODE (passed_type) == UNION_TYPE
2097 && TYPE_TRANSPARENT_UNION (passed_type))
2098 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2099
2100 /* See if this arg was passed by invisible reference. */
2101 if (pass_by_reference (&all->args_so_far, passed_mode,
2102 passed_type, data->named_arg))
2103 {
2104 passed_type = nominal_type = build_pointer_type (passed_type);
2105 data->passed_pointer = true;
2106 passed_mode = nominal_mode = Pmode;
2107 }
2108
2109 /* Find mode as it is passed by the ABI. */
2110 promoted_mode = passed_mode;
2111 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2112 {
2113 int unsignedp = TYPE_UNSIGNED (passed_type);
2114 promoted_mode = promote_mode (passed_type, promoted_mode,
2115 &unsignedp, 1);
2116 }
2117
2118 egress:
2119 data->nominal_type = nominal_type;
2120 data->passed_type = passed_type;
2121 data->nominal_mode = nominal_mode;
2122 data->passed_mode = passed_mode;
2123 data->promoted_mode = promoted_mode;
2124 }
2125
2126 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2127
2128 static void
2129 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2130 struct assign_parm_data_one *data, bool no_rtl)
2131 {
2132 int varargs_pretend_bytes = 0;
2133
2134 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2135 data->promoted_mode,
2136 data->passed_type,
2137 &varargs_pretend_bytes, no_rtl);
2138
2139 /* If the back-end has requested extra stack space, record how much is
2140 needed. Do not change pretend_args_size otherwise since it may be
2141 nonzero from an earlier partial argument. */
2142 if (varargs_pretend_bytes > 0)
2143 all->pretend_args_size = varargs_pretend_bytes;
2144 }
2145
2146 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2147 the incoming location of the current parameter. */
2148
2149 static void
2150 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2151 struct assign_parm_data_one *data)
2152 {
2153 HOST_WIDE_INT pretend_bytes = 0;
2154 rtx entry_parm;
2155 bool in_regs;
2156
2157 if (data->promoted_mode == VOIDmode)
2158 {
2159 data->entry_parm = data->stack_parm = const0_rtx;
2160 return;
2161 }
2162
2163 #ifdef FUNCTION_INCOMING_ARG
2164 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2165 data->passed_type, data->named_arg);
2166 #else
2167 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2168 data->passed_type, data->named_arg);
2169 #endif
2170
2171 if (entry_parm == 0)
2172 data->promoted_mode = data->passed_mode;
2173
2174 /* Determine parm's home in the stack, in case it arrives in the stack
2175 or we should pretend it did. Compute the stack position and rtx where
2176 the argument arrives and its size.
2177
2178 There is one complexity here: If this was a parameter that would
2179 have been passed in registers, but wasn't only because it is
2180 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2181 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2182 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2183 as it was the previous time. */
2184 in_regs = entry_parm != 0;
2185 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2186 in_regs = true;
2187 #endif
2188 if (!in_regs && !data->named_arg)
2189 {
2190 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2191 {
2192 rtx tem;
2193 #ifdef FUNCTION_INCOMING_ARG
2194 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2195 data->passed_type, true);
2196 #else
2197 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2198 data->passed_type, true);
2199 #endif
2200 in_regs = tem != NULL;
2201 }
2202 }
2203
2204 /* If this parameter was passed both in registers and in the stack, use
2205 the copy on the stack. */
2206 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2207 data->passed_type))
2208 entry_parm = 0;
2209
2210 if (entry_parm)
2211 {
2212 int partial;
2213
2214 partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2215 data->promoted_mode,
2216 data->passed_type,
2217 data->named_arg);
2218 data->partial = partial;
2219
2220 /* The caller might already have allocated stack space for the
2221 register parameters. */
2222 if (partial != 0 && all->reg_parm_stack_space == 0)
2223 {
2224 /* Part of this argument is passed in registers and part
2225 is passed on the stack. Ask the prologue code to extend
2226 the stack part so that we can recreate the full value.
2227
2228 PRETEND_BYTES is the size of the registers we need to store.
2229 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2230 stack space that the prologue should allocate.
2231
2232 Internally, gcc assumes that the argument pointer is aligned
2233 to STACK_BOUNDARY bits. This is used both for alignment
2234 optimizations (see init_emit) and to locate arguments that are
2235 aligned to more than PARM_BOUNDARY bits. We must preserve this
2236 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2237 a stack boundary. */
2238
2239 /* We assume at most one partial arg, and it must be the first
2240 argument on the stack. */
2241 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2242
2243 pretend_bytes = partial;
2244 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2245
2246 /* We want to align relative to the actual stack pointer, so
2247 don't include this in the stack size until later. */
2248 all->extra_pretend_bytes = all->pretend_args_size;
2249 }
2250 }
2251
2252 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2253 entry_parm ? data->partial : 0, current_function_decl,
2254 &all->stack_args_size, &data->locate);
2255
2256 /* Adjust offsets to include the pretend args. */
2257 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2258 data->locate.slot_offset.constant += pretend_bytes;
2259 data->locate.offset.constant += pretend_bytes;
2260
2261 data->entry_parm = entry_parm;
2262 }
2263
2264 /* A subroutine of assign_parms. If there is actually space on the stack
2265 for this parm, count it in stack_args_size and return true. */
2266
2267 static bool
2268 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2269 struct assign_parm_data_one *data)
2270 {
2271 /* Trivially true if we've no incoming register. */
2272 if (data->entry_parm == NULL)
2273 ;
2274 /* Also true if we're partially in registers and partially not,
2275 since we've arranged to drop the entire argument on the stack. */
2276 else if (data->partial != 0)
2277 ;
2278 /* Also true if the target says that it's passed in both registers
2279 and on the stack. */
2280 else if (GET_CODE (data->entry_parm) == PARALLEL
2281 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2282 ;
2283 /* Also true if the target says that there's stack allocated for
2284 all register parameters. */
2285 else if (all->reg_parm_stack_space > 0)
2286 ;
2287 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2288 else
2289 return false;
2290
2291 all->stack_args_size.constant += data->locate.size.constant;
2292 if (data->locate.size.var)
2293 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2294
2295 return true;
2296 }
2297
2298 /* A subroutine of assign_parms. Given that this parameter is allocated
2299 stack space by the ABI, find it. */
2300
2301 static void
2302 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2303 {
2304 rtx offset_rtx, stack_parm;
2305 unsigned int align, boundary;
2306
2307 /* If we're passing this arg using a reg, make its stack home the
2308 aligned stack slot. */
2309 if (data->entry_parm)
2310 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2311 else
2312 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2313
2314 stack_parm = current_function_internal_arg_pointer;
2315 if (offset_rtx != const0_rtx)
2316 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2317 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2318
2319 set_mem_attributes (stack_parm, parm, 1);
2320
2321 boundary = data->locate.boundary;
2322 align = BITS_PER_UNIT;
2323
2324 /* If we're padding upward, we know that the alignment of the slot
2325 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2326 intentionally forcing upward padding. Otherwise we have to come
2327 up with a guess at the alignment based on OFFSET_RTX. */
2328 if (data->locate.where_pad != downward || data->entry_parm)
2329 align = boundary;
2330 else if (GET_CODE (offset_rtx) == CONST_INT)
2331 {
2332 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2333 align = align & -align;
2334 }
2335 set_mem_align (stack_parm, align);
2336
2337 if (data->entry_parm)
2338 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2339
2340 data->stack_parm = stack_parm;
2341 }
2342
2343 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2344 always valid and contiguous. */
2345
2346 static void
2347 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2348 {
2349 rtx entry_parm = data->entry_parm;
2350 rtx stack_parm = data->stack_parm;
2351
2352 /* If this parm was passed part in regs and part in memory, pretend it
2353 arrived entirely in memory by pushing the register-part onto the stack.
2354 In the special case of a DImode or DFmode that is split, we could put
2355 it together in a pseudoreg directly, but for now that's not worth
2356 bothering with. */
2357 if (data->partial != 0)
2358 {
2359 /* Handle calls that pass values in multiple non-contiguous
2360 locations. The Irix 6 ABI has examples of this. */
2361 if (GET_CODE (entry_parm) == PARALLEL)
2362 emit_group_store (validize_mem (stack_parm), entry_parm,
2363 data->passed_type,
2364 int_size_in_bytes (data->passed_type));
2365 else
2366 {
2367 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2368 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2369 data->partial / UNITS_PER_WORD);
2370 }
2371
2372 entry_parm = stack_parm;
2373 }
2374
2375 /* If we didn't decide this parm came in a register, by default it came
2376 on the stack. */
2377 else if (entry_parm == NULL)
2378 entry_parm = stack_parm;
2379
2380 /* When an argument is passed in multiple locations, we can't make use
2381 of this information, but we can save some copying if the whole argument
2382 is passed in a single register. */
2383 else if (GET_CODE (entry_parm) == PARALLEL
2384 && data->nominal_mode != BLKmode
2385 && data->passed_mode != BLKmode)
2386 {
2387 size_t i, len = XVECLEN (entry_parm, 0);
2388
2389 for (i = 0; i < len; i++)
2390 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2391 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2392 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2393 == data->passed_mode)
2394 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2395 {
2396 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2397 break;
2398 }
2399 }
2400
2401 data->entry_parm = entry_parm;
2402 }
2403
2404 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2405 always valid and properly aligned. */
2406
2407 static void
2408 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2409 {
2410 rtx stack_parm = data->stack_parm;
2411
2412 /* If we can't trust the parm stack slot to be aligned enough for its
2413 ultimate type, don't use that slot after entry. We'll make another
2414 stack slot, if we need one. */
2415 if (stack_parm
2416 && ((STRICT_ALIGNMENT
2417 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2418 || (data->nominal_type
2419 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2420 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2421 stack_parm = NULL;
2422
2423 /* If parm was passed in memory, and we need to convert it on entry,
2424 don't store it back in that same slot. */
2425 else if (data->entry_parm == stack_parm
2426 && data->nominal_mode != BLKmode
2427 && data->nominal_mode != data->passed_mode)
2428 stack_parm = NULL;
2429
2430 /* If stack protection is in effect for this function, don't leave any
2431 pointers in their passed stack slots. */
2432 else if (cfun->stack_protect_guard
2433 && (flag_stack_protect == 2
2434 || data->passed_pointer
2435 || POINTER_TYPE_P (data->nominal_type)))
2436 stack_parm = NULL;
2437
2438 data->stack_parm = stack_parm;
2439 }
2440
2441 /* A subroutine of assign_parms. Return true if the current parameter
2442 should be stored as a BLKmode in the current frame. */
2443
2444 static bool
2445 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2446 {
2447 if (data->nominal_mode == BLKmode)
2448 return true;
2449 if (GET_CODE (data->entry_parm) == PARALLEL)
2450 return true;
2451
2452 #ifdef BLOCK_REG_PADDING
2453 /* Only assign_parm_setup_block knows how to deal with register arguments
2454 that are padded at the least significant end. */
2455 if (REG_P (data->entry_parm)
2456 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2457 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2458 == (BYTES_BIG_ENDIAN ? upward : downward)))
2459 return true;
2460 #endif
2461
2462 return false;
2463 }
2464
2465 /* A subroutine of assign_parms. Arrange for the parameter to be
2466 present and valid in DATA->STACK_RTL. */
2467
2468 static void
2469 assign_parm_setup_block (struct assign_parm_data_all *all,
2470 tree parm, struct assign_parm_data_one *data)
2471 {
2472 rtx entry_parm = data->entry_parm;
2473 rtx stack_parm = data->stack_parm;
2474 HOST_WIDE_INT size;
2475 HOST_WIDE_INT size_stored;
2476 rtx orig_entry_parm = entry_parm;
2477
2478 if (GET_CODE (entry_parm) == PARALLEL)
2479 entry_parm = emit_group_move_into_temps (entry_parm);
2480
2481 /* If we've a non-block object that's nevertheless passed in parts,
2482 reconstitute it in register operations rather than on the stack. */
2483 if (GET_CODE (entry_parm) == PARALLEL
2484 && data->nominal_mode != BLKmode)
2485 {
2486 rtx elt0 = XEXP (XVECEXP (orig_entry_parm, 0, 0), 0);
2487
2488 if ((XVECLEN (entry_parm, 0) > 1
2489 || hard_regno_nregs[REGNO (elt0)][GET_MODE (elt0)] > 1)
2490 && use_register_for_decl (parm))
2491 {
2492 rtx parmreg = gen_reg_rtx (data->nominal_mode);
2493
2494 push_to_sequence2 (all->first_conversion_insn,
2495 all->last_conversion_insn);
2496
2497 /* For values returned in multiple registers, handle possible
2498 incompatible calls to emit_group_store.
2499
2500 For example, the following would be invalid, and would have to
2501 be fixed by the conditional below:
2502
2503 emit_group_store ((reg:SF), (parallel:DF))
2504 emit_group_store ((reg:SI), (parallel:DI))
2505
2506 An example of this are doubles in e500 v2:
2507 (parallel:DF (expr_list (reg:SI) (const_int 0))
2508 (expr_list (reg:SI) (const_int 4))). */
2509 if (data->nominal_mode != data->passed_mode)
2510 {
2511 rtx t = gen_reg_rtx (GET_MODE (entry_parm));
2512 emit_group_store (t, entry_parm, NULL_TREE,
2513 GET_MODE_SIZE (GET_MODE (entry_parm)));
2514 convert_move (parmreg, t, 0);
2515 }
2516 else
2517 emit_group_store (parmreg, entry_parm, data->nominal_type,
2518 int_size_in_bytes (data->nominal_type));
2519
2520 all->first_conversion_insn = get_insns ();
2521 all->last_conversion_insn = get_last_insn ();
2522 end_sequence ();
2523
2524 SET_DECL_RTL (parm, parmreg);
2525 return;
2526 }
2527 }
2528
2529 size = int_size_in_bytes (data->passed_type);
2530 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2531 if (stack_parm == 0)
2532 {
2533 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2534 stack_parm = assign_stack_local (BLKmode, size_stored,
2535 DECL_ALIGN (parm));
2536 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2537 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2538 set_mem_attributes (stack_parm, parm, 1);
2539 }
2540
2541 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2542 calls that pass values in multiple non-contiguous locations. */
2543 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2544 {
2545 rtx mem;
2546
2547 /* Note that we will be storing an integral number of words.
2548 So we have to be careful to ensure that we allocate an
2549 integral number of words. We do this above when we call
2550 assign_stack_local if space was not allocated in the argument
2551 list. If it was, this will not work if PARM_BOUNDARY is not
2552 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2553 if it becomes a problem. Exception is when BLKmode arrives
2554 with arguments not conforming to word_mode. */
2555
2556 if (data->stack_parm == 0)
2557 ;
2558 else if (GET_CODE (entry_parm) == PARALLEL)
2559 ;
2560 else
2561 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2562
2563 mem = validize_mem (stack_parm);
2564
2565 /* Handle values in multiple non-contiguous locations. */
2566 if (GET_CODE (entry_parm) == PARALLEL)
2567 {
2568 push_to_sequence2 (all->first_conversion_insn,
2569 all->last_conversion_insn);
2570 emit_group_store (mem, entry_parm, data->passed_type, size);
2571 all->first_conversion_insn = get_insns ();
2572 all->last_conversion_insn = get_last_insn ();
2573 end_sequence ();
2574 }
2575
2576 else if (size == 0)
2577 ;
2578
2579 /* If SIZE is that of a mode no bigger than a word, just use
2580 that mode's store operation. */
2581 else if (size <= UNITS_PER_WORD)
2582 {
2583 enum machine_mode mode
2584 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2585
2586 if (mode != BLKmode
2587 #ifdef BLOCK_REG_PADDING
2588 && (size == UNITS_PER_WORD
2589 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2590 != (BYTES_BIG_ENDIAN ? upward : downward)))
2591 #endif
2592 )
2593 {
2594 rtx reg = gen_rtx_REG (mode, REGNO (entry_parm));
2595 emit_move_insn (change_address (mem, mode, 0), reg);
2596 }
2597
2598 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2599 machine must be aligned to the left before storing
2600 to memory. Note that the previous test doesn't
2601 handle all cases (e.g. SIZE == 3). */
2602 else if (size != UNITS_PER_WORD
2603 #ifdef BLOCK_REG_PADDING
2604 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2605 == downward)
2606 #else
2607 && BYTES_BIG_ENDIAN
2608 #endif
2609 )
2610 {
2611 rtx tem, x;
2612 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2613 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2614
2615 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2616 build_int_cst (NULL_TREE, by),
2617 NULL_RTX, 1);
2618 tem = change_address (mem, word_mode, 0);
2619 emit_move_insn (tem, x);
2620 }
2621 else
2622 move_block_from_reg (REGNO (entry_parm), mem,
2623 size_stored / UNITS_PER_WORD);
2624 }
2625 else
2626 move_block_from_reg (REGNO (entry_parm), mem,
2627 size_stored / UNITS_PER_WORD);
2628 }
2629 else if (data->stack_parm == 0)
2630 {
2631 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2632 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2633 BLOCK_OP_NORMAL);
2634 all->first_conversion_insn = get_insns ();
2635 all->last_conversion_insn = get_last_insn ();
2636 end_sequence ();
2637 }
2638
2639 data->stack_parm = stack_parm;
2640 SET_DECL_RTL (parm, stack_parm);
2641 }
2642
2643 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2644 parameter. Get it there. Perform all ABI specified conversions. */
2645
2646 static void
2647 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2648 struct assign_parm_data_one *data)
2649 {
2650 rtx parmreg;
2651 enum machine_mode promoted_nominal_mode;
2652 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2653 bool did_conversion = false;
2654
2655 /* Store the parm in a pseudoregister during the function, but we may
2656 need to do it in a wider mode. */
2657
2658 /* This is not really promoting for a call. However we need to be
2659 consistent with assign_parm_find_data_types and expand_expr_real_1. */
2660 promoted_nominal_mode
2661 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1);
2662
2663 parmreg = gen_reg_rtx (promoted_nominal_mode);
2664
2665 if (!DECL_ARTIFICIAL (parm))
2666 mark_user_reg (parmreg);
2667
2668 /* If this was an item that we received a pointer to,
2669 set DECL_RTL appropriately. */
2670 if (data->passed_pointer)
2671 {
2672 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2673 set_mem_attributes (x, parm, 1);
2674 SET_DECL_RTL (parm, x);
2675 }
2676 else
2677 SET_DECL_RTL (parm, parmreg);
2678
2679 /* Copy the value into the register. */
2680 if (data->nominal_mode != data->passed_mode
2681 || promoted_nominal_mode != data->promoted_mode)
2682 {
2683 int save_tree_used;
2684
2685 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2686 mode, by the caller. We now have to convert it to
2687 NOMINAL_MODE, if different. However, PARMREG may be in
2688 a different mode than NOMINAL_MODE if it is being stored
2689 promoted.
2690
2691 If ENTRY_PARM is a hard register, it might be in a register
2692 not valid for operating in its mode (e.g., an odd-numbered
2693 register for a DFmode). In that case, moves are the only
2694 thing valid, so we can't do a convert from there. This
2695 occurs when the calling sequence allow such misaligned
2696 usages.
2697
2698 In addition, the conversion may involve a call, which could
2699 clobber parameters which haven't been copied to pseudo
2700 registers yet. Therefore, we must first copy the parm to
2701 a pseudo reg here, and save the conversion until after all
2702 parameters have been moved. */
2703
2704 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2705
2706 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2707
2708 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2709 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2710
2711 if (GET_CODE (tempreg) == SUBREG
2712 && GET_MODE (tempreg) == data->nominal_mode
2713 && REG_P (SUBREG_REG (tempreg))
2714 && data->nominal_mode == data->passed_mode
2715 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2716 && GET_MODE_SIZE (GET_MODE (tempreg))
2717 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
2718 {
2719 /* The argument is already sign/zero extended, so note it
2720 into the subreg. */
2721 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2722 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2723 }
2724
2725 /* TREE_USED gets set erroneously during expand_assignment. */
2726 save_tree_used = TREE_USED (parm);
2727 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
2728 TREE_USED (parm) = save_tree_used;
2729 all->first_conversion_insn = get_insns ();
2730 all->last_conversion_insn = get_last_insn ();
2731 end_sequence ();
2732
2733 did_conversion = true;
2734 }
2735 else
2736 emit_move_insn (parmreg, validize_mem (data->entry_parm));
2737
2738 /* If we were passed a pointer but the actual value can safely live
2739 in a register, put it in one. */
2740 if (data->passed_pointer
2741 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2742 /* If by-reference argument was promoted, demote it. */
2743 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2744 || use_register_for_decl (parm)))
2745 {
2746 /* We can't use nominal_mode, because it will have been set to
2747 Pmode above. We must use the actual mode of the parm. */
2748 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2749 mark_user_reg (parmreg);
2750
2751 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2752 {
2753 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2754 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2755
2756 push_to_sequence2 (all->first_conversion_insn,
2757 all->last_conversion_insn);
2758 emit_move_insn (tempreg, DECL_RTL (parm));
2759 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2760 emit_move_insn (parmreg, tempreg);
2761 all->first_conversion_insn = get_insns ();
2762 all->last_conversion_insn = get_last_insn ();
2763 end_sequence ();
2764
2765 did_conversion = true;
2766 }
2767 else
2768 emit_move_insn (parmreg, DECL_RTL (parm));
2769
2770 SET_DECL_RTL (parm, parmreg);
2771
2772 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2773 now the parm. */
2774 data->stack_parm = NULL;
2775 }
2776
2777 /* Mark the register as eliminable if we did no conversion and it was
2778 copied from memory at a fixed offset, and the arg pointer was not
2779 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2780 offset formed an invalid address, such memory-equivalences as we
2781 make here would screw up life analysis for it. */
2782 if (data->nominal_mode == data->passed_mode
2783 && !did_conversion
2784 && data->stack_parm != 0
2785 && MEM_P (data->stack_parm)
2786 && data->locate.offset.var == 0
2787 && reg_mentioned_p (virtual_incoming_args_rtx,
2788 XEXP (data->stack_parm, 0)))
2789 {
2790 rtx linsn = get_last_insn ();
2791 rtx sinsn, set;
2792
2793 /* Mark complex types separately. */
2794 if (GET_CODE (parmreg) == CONCAT)
2795 {
2796 enum machine_mode submode
2797 = GET_MODE_INNER (GET_MODE (parmreg));
2798 int regnor = REGNO (XEXP (parmreg, 0));
2799 int regnoi = REGNO (XEXP (parmreg, 1));
2800 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
2801 rtx stacki = adjust_address_nv (data->stack_parm, submode,
2802 GET_MODE_SIZE (submode));
2803
2804 /* Scan backwards for the set of the real and
2805 imaginary parts. */
2806 for (sinsn = linsn; sinsn != 0;
2807 sinsn = prev_nonnote_insn (sinsn))
2808 {
2809 set = single_set (sinsn);
2810 if (set == 0)
2811 continue;
2812
2813 if (SET_DEST (set) == regno_reg_rtx [regnoi])
2814 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
2815 else if (SET_DEST (set) == regno_reg_rtx [regnor])
2816 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
2817 }
2818 }
2819 else if ((set = single_set (linsn)) != 0
2820 && SET_DEST (set) == parmreg)
2821 set_unique_reg_note (linsn, REG_EQUIV, data->stack_parm);
2822 }
2823
2824 /* For pointer data type, suggest pointer register. */
2825 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2826 mark_reg_pointer (parmreg,
2827 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2828 }
2829
2830 /* A subroutine of assign_parms. Allocate stack space to hold the current
2831 parameter. Get it there. Perform all ABI specified conversions. */
2832
2833 static void
2834 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2835 struct assign_parm_data_one *data)
2836 {
2837 /* Value must be stored in the stack slot STACK_PARM during function
2838 execution. */
2839 bool to_conversion = false;
2840
2841 if (data->promoted_mode != data->nominal_mode)
2842 {
2843 /* Conversion is required. */
2844 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2845
2846 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2847
2848 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2849 to_conversion = true;
2850
2851 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2852 TYPE_UNSIGNED (TREE_TYPE (parm)));
2853
2854 if (data->stack_parm)
2855 /* ??? This may need a big-endian conversion on sparc64. */
2856 data->stack_parm
2857 = adjust_address (data->stack_parm, data->nominal_mode, 0);
2858 }
2859
2860 if (data->entry_parm != data->stack_parm)
2861 {
2862 rtx src, dest;
2863
2864 if (data->stack_parm == 0)
2865 {
2866 data->stack_parm
2867 = assign_stack_local (GET_MODE (data->entry_parm),
2868 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
2869 TYPE_ALIGN (data->passed_type));
2870 set_mem_attributes (data->stack_parm, parm, 1);
2871 }
2872
2873 dest = validize_mem (data->stack_parm);
2874 src = validize_mem (data->entry_parm);
2875
2876 if (MEM_P (src))
2877 {
2878 /* Use a block move to handle potentially misaligned entry_parm. */
2879 if (!to_conversion)
2880 push_to_sequence2 (all->first_conversion_insn,
2881 all->last_conversion_insn);
2882 to_conversion = true;
2883
2884 emit_block_move (dest, src,
2885 GEN_INT (int_size_in_bytes (data->passed_type)),
2886 BLOCK_OP_NORMAL);
2887 }
2888 else
2889 emit_move_insn (dest, src);
2890 }
2891
2892 if (to_conversion)
2893 {
2894 all->first_conversion_insn = get_insns ();
2895 all->last_conversion_insn = get_last_insn ();
2896 end_sequence ();
2897 }
2898
2899 SET_DECL_RTL (parm, data->stack_parm);
2900 }
2901
2902 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
2903 undo the frobbing that we did in assign_parms_augmented_arg_list. */
2904
2905 static void
2906 assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
2907 {
2908 tree parm;
2909 tree orig_fnargs = all->orig_fnargs;
2910
2911 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2912 {
2913 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2914 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2915 {
2916 rtx tmp, real, imag;
2917 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
2918
2919 real = DECL_RTL (fnargs);
2920 imag = DECL_RTL (TREE_CHAIN (fnargs));
2921 if (inner != GET_MODE (real))
2922 {
2923 real = gen_lowpart_SUBREG (inner, real);
2924 imag = gen_lowpart_SUBREG (inner, imag);
2925 }
2926
2927 if (TREE_ADDRESSABLE (parm))
2928 {
2929 rtx rmem, imem;
2930 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
2931
2932 /* split_complex_arg put the real and imag parts in
2933 pseudos. Move them to memory. */
2934 tmp = assign_stack_local (DECL_MODE (parm), size,
2935 TYPE_ALIGN (TREE_TYPE (parm)));
2936 set_mem_attributes (tmp, parm, 1);
2937 rmem = adjust_address_nv (tmp, inner, 0);
2938 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
2939 push_to_sequence2 (all->first_conversion_insn,
2940 all->last_conversion_insn);
2941 emit_move_insn (rmem, real);
2942 emit_move_insn (imem, imag);
2943 all->first_conversion_insn = get_insns ();
2944 all->last_conversion_insn = get_last_insn ();
2945 end_sequence ();
2946 }
2947 else
2948 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2949 SET_DECL_RTL (parm, tmp);
2950
2951 real = DECL_INCOMING_RTL (fnargs);
2952 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
2953 if (inner != GET_MODE (real))
2954 {
2955 real = gen_lowpart_SUBREG (inner, real);
2956 imag = gen_lowpart_SUBREG (inner, imag);
2957 }
2958 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2959 set_decl_incoming_rtl (parm, tmp);
2960 fnargs = TREE_CHAIN (fnargs);
2961 }
2962 else
2963 {
2964 SET_DECL_RTL (parm, DECL_RTL (fnargs));
2965 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs));
2966
2967 /* Set MEM_EXPR to the original decl, i.e. to PARM,
2968 instead of the copy of decl, i.e. FNARGS. */
2969 if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
2970 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
2971 }
2972
2973 fnargs = TREE_CHAIN (fnargs);
2974 }
2975 }
2976
2977 /* Assign RTL expressions to the function's parameters. This may involve
2978 copying them into registers and using those registers as the DECL_RTL. */
2979
2980 static void
2981 assign_parms (tree fndecl)
2982 {
2983 struct assign_parm_data_all all;
2984 tree fnargs, parm;
2985
2986 current_function_internal_arg_pointer
2987 = targetm.calls.internal_arg_pointer ();
2988
2989 assign_parms_initialize_all (&all);
2990 fnargs = assign_parms_augmented_arg_list (&all);
2991
2992 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2993 {
2994 struct assign_parm_data_one data;
2995
2996 /* Extract the type of PARM; adjust it according to ABI. */
2997 assign_parm_find_data_types (&all, parm, &data);
2998
2999 /* Early out for errors and void parameters. */
3000 if (data.passed_mode == VOIDmode)
3001 {
3002 SET_DECL_RTL (parm, const0_rtx);
3003 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3004 continue;
3005 }
3006
3007 if (current_function_stdarg && !TREE_CHAIN (parm))
3008 assign_parms_setup_varargs (&all, &data, false);
3009
3010 /* Find out where the parameter arrives in this function. */
3011 assign_parm_find_entry_rtl (&all, &data);
3012
3013 /* Find out where stack space for this parameter might be. */
3014 if (assign_parm_is_stack_parm (&all, &data))
3015 {
3016 assign_parm_find_stack_rtl (parm, &data);
3017 assign_parm_adjust_entry_rtl (&data);
3018 }
3019
3020 /* Record permanently how this parm was passed. */
3021 set_decl_incoming_rtl (parm, data.entry_parm);
3022
3023 /* Update info on where next arg arrives in registers. */
3024 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3025 data.passed_type, data.named_arg);
3026
3027 assign_parm_adjust_stack_rtl (&data);
3028
3029 if (assign_parm_setup_block_p (&data))
3030 assign_parm_setup_block (&all, parm, &data);
3031 else if (data.passed_pointer || use_register_for_decl (parm))
3032 assign_parm_setup_reg (&all, parm, &data);
3033 else
3034 assign_parm_setup_stack (&all, parm, &data);
3035 }
3036
3037 if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
3038 assign_parms_unsplit_complex (&all, fnargs);
3039
3040 /* Output all parameter conversion instructions (possibly including calls)
3041 now that all parameters have been copied out of hard registers. */
3042 emit_insn (all.first_conversion_insn);
3043
3044 /* If we are receiving a struct value address as the first argument, set up
3045 the RTL for the function result. As this might require code to convert
3046 the transmitted address to Pmode, we do this here to ensure that possible
3047 preliminary conversions of the address have been emitted already. */
3048 if (all.function_result_decl)
3049 {
3050 tree result = DECL_RESULT (current_function_decl);
3051 rtx addr = DECL_RTL (all.function_result_decl);
3052 rtx x;
3053
3054 if (DECL_BY_REFERENCE (result))
3055 x = addr;
3056 else
3057 {
3058 addr = convert_memory_address (Pmode, addr);
3059 x = gen_rtx_MEM (DECL_MODE (result), addr);
3060 set_mem_attributes (x, result, 1);
3061 }
3062 SET_DECL_RTL (result, x);
3063 }
3064
3065 /* We have aligned all the args, so add space for the pretend args. */
3066 current_function_pretend_args_size = all.pretend_args_size;
3067 all.stack_args_size.constant += all.extra_pretend_bytes;
3068 current_function_args_size = all.stack_args_size.constant;
3069
3070 /* Adjust function incoming argument size for alignment and
3071 minimum length. */
3072
3073 #ifdef REG_PARM_STACK_SPACE
3074 current_function_args_size = MAX (current_function_args_size,
3075 REG_PARM_STACK_SPACE (fndecl));
3076 #endif
3077
3078 current_function_args_size = CEIL_ROUND (current_function_args_size,
3079 PARM_BOUNDARY / BITS_PER_UNIT);
3080
3081 #ifdef ARGS_GROW_DOWNWARD
3082 current_function_arg_offset_rtx
3083 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3084 : expand_expr (size_diffop (all.stack_args_size.var,
3085 size_int (-all.stack_args_size.constant)),
3086 NULL_RTX, VOIDmode, 0));
3087 #else
3088 current_function_arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3089 #endif
3090
3091 /* See how many bytes, if any, of its args a function should try to pop
3092 on return. */
3093
3094 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
3095 current_function_args_size);
3096
3097 /* For stdarg.h function, save info about
3098 regs and stack space used by the named args. */
3099
3100 current_function_args_info = all.args_so_far;
3101
3102 /* Set the rtx used for the function return value. Put this in its
3103 own variable so any optimizers that need this information don't have
3104 to include tree.h. Do this here so it gets done when an inlined
3105 function gets output. */
3106
3107 current_function_return_rtx
3108 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3109 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3110
3111 /* If scalar return value was computed in a pseudo-reg, or was a named
3112 return value that got dumped to the stack, copy that to the hard
3113 return register. */
3114 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3115 {
3116 tree decl_result = DECL_RESULT (fndecl);
3117 rtx decl_rtl = DECL_RTL (decl_result);
3118
3119 if (REG_P (decl_rtl)
3120 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3121 : DECL_REGISTER (decl_result))
3122 {
3123 rtx real_decl_rtl;
3124
3125 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3126 fndecl, true);
3127 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3128 /* The delay slot scheduler assumes that current_function_return_rtx
3129 holds the hard register containing the return value, not a
3130 temporary pseudo. */
3131 current_function_return_rtx = real_decl_rtl;
3132 }
3133 }
3134 }
3135
3136 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3137 For all seen types, gimplify their sizes. */
3138
3139 static tree
3140 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3141 {
3142 tree t = *tp;
3143
3144 *walk_subtrees = 0;
3145 if (TYPE_P (t))
3146 {
3147 if (POINTER_TYPE_P (t))
3148 *walk_subtrees = 1;
3149 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3150 && !TYPE_SIZES_GIMPLIFIED (t))
3151 {
3152 gimplify_type_sizes (t, (tree *) data);
3153 *walk_subtrees = 1;
3154 }
3155 }
3156
3157 return NULL;
3158 }
3159
3160 /* Gimplify the parameter list for current_function_decl. This involves
3161 evaluating SAVE_EXPRs of variable sized parameters and generating code
3162 to implement callee-copies reference parameters. Returns a list of
3163 statements to add to the beginning of the function, or NULL if nothing
3164 to do. */
3165
3166 tree
3167 gimplify_parameters (void)
3168 {
3169 struct assign_parm_data_all all;
3170 tree fnargs, parm, stmts = NULL;
3171
3172 assign_parms_initialize_all (&all);
3173 fnargs = assign_parms_augmented_arg_list (&all);
3174
3175 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3176 {
3177 struct assign_parm_data_one data;
3178
3179 /* Extract the type of PARM; adjust it according to ABI. */
3180 assign_parm_find_data_types (&all, parm, &data);
3181
3182 /* Early out for errors and void parameters. */
3183 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3184 continue;
3185
3186 /* Update info on where next arg arrives in registers. */
3187 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3188 data.passed_type, data.named_arg);
3189
3190 /* ??? Once upon a time variable_size stuffed parameter list
3191 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3192 turned out to be less than manageable in the gimple world.
3193 Now we have to hunt them down ourselves. */
3194 walk_tree_without_duplicates (&data.passed_type,
3195 gimplify_parm_type, &stmts);
3196
3197 if (!TREE_CONSTANT (DECL_SIZE (parm)))
3198 {
3199 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3200 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3201 }
3202
3203 if (data.passed_pointer)
3204 {
3205 tree type = TREE_TYPE (data.passed_type);
3206 if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3207 type, data.named_arg))
3208 {
3209 tree local, t;
3210
3211 /* For constant sized objects, this is trivial; for
3212 variable-sized objects, we have to play games. */
3213 if (TREE_CONSTANT (DECL_SIZE (parm)))
3214 {
3215 local = create_tmp_var (type, get_name (parm));
3216 DECL_IGNORED_P (local) = 0;
3217 }
3218 else
3219 {
3220 tree ptr_type, addr;
3221
3222 ptr_type = build_pointer_type (type);
3223 addr = create_tmp_var (ptr_type, get_name (parm));
3224 DECL_IGNORED_P (addr) = 0;
3225 local = build_fold_indirect_ref (addr);
3226
3227 t = built_in_decls[BUILT_IN_ALLOCA];
3228 t = build_call_expr (t, 1, DECL_SIZE_UNIT (parm));
3229 t = fold_convert (ptr_type, t);
3230 t = build_gimple_modify_stmt (addr, t);
3231 gimplify_and_add (t, &stmts);
3232 }
3233
3234 t = build_gimple_modify_stmt (local, parm);
3235 gimplify_and_add (t, &stmts);
3236
3237 SET_DECL_VALUE_EXPR (parm, local);
3238 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3239 }
3240 }
3241 }
3242
3243 return stmts;
3244 }
3245 \f
3246 /* Compute the size and offset from the start of the stacked arguments for a
3247 parm passed in mode PASSED_MODE and with type TYPE.
3248
3249 INITIAL_OFFSET_PTR points to the current offset into the stacked
3250 arguments.
3251
3252 The starting offset and size for this parm are returned in
3253 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3254 nonzero, the offset is that of stack slot, which is returned in
3255 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3256 padding required from the initial offset ptr to the stack slot.
3257
3258 IN_REGS is nonzero if the argument will be passed in registers. It will
3259 never be set if REG_PARM_STACK_SPACE is not defined.
3260
3261 FNDECL is the function in which the argument was defined.
3262
3263 There are two types of rounding that are done. The first, controlled by
3264 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3265 list to be aligned to the specific boundary (in bits). This rounding
3266 affects the initial and starting offsets, but not the argument size.
3267
3268 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3269 optionally rounds the size of the parm to PARM_BOUNDARY. The
3270 initial offset is not affected by this rounding, while the size always
3271 is and the starting offset may be. */
3272
3273 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3274 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3275 callers pass in the total size of args so far as
3276 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3277
3278 void
3279 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3280 int partial, tree fndecl ATTRIBUTE_UNUSED,
3281 struct args_size *initial_offset_ptr,
3282 struct locate_and_pad_arg_data *locate)
3283 {
3284 tree sizetree;
3285 enum direction where_pad;
3286 unsigned int boundary;
3287 int reg_parm_stack_space = 0;
3288 int part_size_in_regs;
3289
3290 #ifdef REG_PARM_STACK_SPACE
3291 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3292
3293 /* If we have found a stack parm before we reach the end of the
3294 area reserved for registers, skip that area. */
3295 if (! in_regs)
3296 {
3297 if (reg_parm_stack_space > 0)
3298 {
3299 if (initial_offset_ptr->var)
3300 {
3301 initial_offset_ptr->var
3302 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3303 ssize_int (reg_parm_stack_space));
3304 initial_offset_ptr->constant = 0;
3305 }
3306 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3307 initial_offset_ptr->constant = reg_parm_stack_space;
3308 }
3309 }
3310 #endif /* REG_PARM_STACK_SPACE */
3311
3312 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3313
3314 sizetree
3315 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3316 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3317 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3318 locate->where_pad = where_pad;
3319 locate->boundary = boundary;
3320
3321 /* Remember if the outgoing parameter requires extra alignment on the
3322 calling function side. */
3323 if (boundary > PREFERRED_STACK_BOUNDARY)
3324 boundary = PREFERRED_STACK_BOUNDARY;
3325 if (cfun->stack_alignment_needed < boundary)
3326 cfun->stack_alignment_needed = boundary;
3327
3328 #ifdef ARGS_GROW_DOWNWARD
3329 locate->slot_offset.constant = -initial_offset_ptr->constant;
3330 if (initial_offset_ptr->var)
3331 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3332 initial_offset_ptr->var);
3333
3334 {
3335 tree s2 = sizetree;
3336 if (where_pad != none
3337 && (!host_integerp (sizetree, 1)
3338 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3339 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3340 SUB_PARM_SIZE (locate->slot_offset, s2);
3341 }
3342
3343 locate->slot_offset.constant += part_size_in_regs;
3344
3345 if (!in_regs
3346 #ifdef REG_PARM_STACK_SPACE
3347 || REG_PARM_STACK_SPACE (fndecl) > 0
3348 #endif
3349 )
3350 pad_to_arg_alignment (&locate->slot_offset, boundary,
3351 &locate->alignment_pad);
3352
3353 locate->size.constant = (-initial_offset_ptr->constant
3354 - locate->slot_offset.constant);
3355 if (initial_offset_ptr->var)
3356 locate->size.var = size_binop (MINUS_EXPR,
3357 size_binop (MINUS_EXPR,
3358 ssize_int (0),
3359 initial_offset_ptr->var),
3360 locate->slot_offset.var);
3361
3362 /* Pad_below needs the pre-rounded size to know how much to pad
3363 below. */
3364 locate->offset = locate->slot_offset;
3365 if (where_pad == downward)
3366 pad_below (&locate->offset, passed_mode, sizetree);
3367
3368 #else /* !ARGS_GROW_DOWNWARD */
3369 if (!in_regs
3370 #ifdef REG_PARM_STACK_SPACE
3371 || REG_PARM_STACK_SPACE (fndecl) > 0
3372 #endif
3373 )
3374 pad_to_arg_alignment (initial_offset_ptr, boundary,
3375 &locate->alignment_pad);
3376 locate->slot_offset = *initial_offset_ptr;
3377
3378 #ifdef PUSH_ROUNDING
3379 if (passed_mode != BLKmode)
3380 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3381 #endif
3382
3383 /* Pad_below needs the pre-rounded size to know how much to pad below
3384 so this must be done before rounding up. */
3385 locate->offset = locate->slot_offset;
3386 if (where_pad == downward)
3387 pad_below (&locate->offset, passed_mode, sizetree);
3388
3389 if (where_pad != none
3390 && (!host_integerp (sizetree, 1)
3391 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3392 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3393
3394 ADD_PARM_SIZE (locate->size, sizetree);
3395
3396 locate->size.constant -= part_size_in_regs;
3397 #endif /* ARGS_GROW_DOWNWARD */
3398 }
3399
3400 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3401 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3402
3403 static void
3404 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3405 struct args_size *alignment_pad)
3406 {
3407 tree save_var = NULL_TREE;
3408 HOST_WIDE_INT save_constant = 0;
3409 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3410 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3411
3412 #ifdef SPARC_STACK_BOUNDARY_HACK
3413 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3414 the real alignment of %sp. However, when it does this, the
3415 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3416 if (SPARC_STACK_BOUNDARY_HACK)
3417 sp_offset = 0;
3418 #endif
3419
3420 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3421 {
3422 save_var = offset_ptr->var;
3423 save_constant = offset_ptr->constant;
3424 }
3425
3426 alignment_pad->var = NULL_TREE;
3427 alignment_pad->constant = 0;
3428
3429 if (boundary > BITS_PER_UNIT)
3430 {
3431 if (offset_ptr->var)
3432 {
3433 tree sp_offset_tree = ssize_int (sp_offset);
3434 tree offset = size_binop (PLUS_EXPR,
3435 ARGS_SIZE_TREE (*offset_ptr),
3436 sp_offset_tree);
3437 #ifdef ARGS_GROW_DOWNWARD
3438 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3439 #else
3440 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3441 #endif
3442
3443 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3444 /* ARGS_SIZE_TREE includes constant term. */
3445 offset_ptr->constant = 0;
3446 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3447 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3448 save_var);
3449 }
3450 else
3451 {
3452 offset_ptr->constant = -sp_offset +
3453 #ifdef ARGS_GROW_DOWNWARD
3454 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3455 #else
3456 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3457 #endif
3458 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3459 alignment_pad->constant = offset_ptr->constant - save_constant;
3460 }
3461 }
3462 }
3463
3464 static void
3465 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3466 {
3467 if (passed_mode != BLKmode)
3468 {
3469 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3470 offset_ptr->constant
3471 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3472 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3473 - GET_MODE_SIZE (passed_mode));
3474 }
3475 else
3476 {
3477 if (TREE_CODE (sizetree) != INTEGER_CST
3478 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3479 {
3480 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3481 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3482 /* Add it in. */
3483 ADD_PARM_SIZE (*offset_ptr, s2);
3484 SUB_PARM_SIZE (*offset_ptr, sizetree);
3485 }
3486 }
3487 }
3488 \f
3489
3490 /* True if register REGNO was alive at a place where `setjmp' was
3491 called and was set more than once or is an argument. Such regs may
3492 be clobbered by `longjmp'. */
3493
3494 static bool
3495 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
3496 {
3497 /* There appear to be cases where some local vars never reach the
3498 backend but have bogus regnos. */
3499 if (regno >= max_reg_num ())
3500 return false;
3501
3502 return ((REG_N_SETS (regno) > 1
3503 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno))
3504 && REGNO_REG_SET_P (setjmp_crosses, regno));
3505 }
3506
3507 /* Walk the tree of blocks describing the binding levels within a
3508 function and warn about variables the might be killed by setjmp or
3509 vfork. This is done after calling flow_analysis before register
3510 allocation since that will clobber the pseudo-regs to hard
3511 regs. */
3512
3513 static void
3514 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
3515 {
3516 tree decl, sub;
3517
3518 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3519 {
3520 if (TREE_CODE (decl) == VAR_DECL
3521 && DECL_RTL_SET_P (decl)
3522 && REG_P (DECL_RTL (decl))
3523 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3524 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
3525 " %<longjmp%> or %<vfork%>", decl);
3526 }
3527
3528 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3529 setjmp_vars_warning (setjmp_crosses, sub);
3530 }
3531
3532 /* Do the appropriate part of setjmp_vars_warning
3533 but for arguments instead of local variables. */
3534
3535 static void
3536 setjmp_args_warning (bitmap setjmp_crosses)
3537 {
3538 tree decl;
3539 for (decl = DECL_ARGUMENTS (current_function_decl);
3540 decl; decl = TREE_CHAIN (decl))
3541 if (DECL_RTL (decl) != 0
3542 && REG_P (DECL_RTL (decl))
3543 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3544 warning (OPT_Wclobbered,
3545 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3546 decl);
3547 }
3548
3549 /* Generate warning messages for variables live across setjmp. */
3550
3551 void
3552 generate_setjmp_warnings (void)
3553 {
3554 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
3555
3556 if (n_basic_blocks == NUM_FIXED_BLOCKS
3557 || bitmap_empty_p (setjmp_crosses))
3558 return;
3559
3560 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
3561 setjmp_args_warning (setjmp_crosses);
3562 }
3563
3564 \f
3565 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3566 and create duplicate blocks. */
3567 /* ??? Need an option to either create block fragments or to create
3568 abstract origin duplicates of a source block. It really depends
3569 on what optimization has been performed. */
3570
3571 void
3572 reorder_blocks (void)
3573 {
3574 tree block = DECL_INITIAL (current_function_decl);
3575 VEC(tree,heap) *block_stack;
3576
3577 if (block == NULL_TREE)
3578 return;
3579
3580 block_stack = VEC_alloc (tree, heap, 10);
3581
3582 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3583 clear_block_marks (block);
3584
3585 /* Prune the old trees away, so that they don't get in the way. */
3586 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3587 BLOCK_CHAIN (block) = NULL_TREE;
3588
3589 /* Recreate the block tree from the note nesting. */
3590 reorder_blocks_1 (get_insns (), block, &block_stack);
3591 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
3592
3593 VEC_free (tree, heap, block_stack);
3594 }
3595
3596 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3597
3598 void
3599 clear_block_marks (tree block)
3600 {
3601 while (block)
3602 {
3603 TREE_ASM_WRITTEN (block) = 0;
3604 clear_block_marks (BLOCK_SUBBLOCKS (block));
3605 block = BLOCK_CHAIN (block);
3606 }
3607 }
3608
3609 static void
3610 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
3611 {
3612 rtx insn;
3613
3614 for (insn = insns; insn; insn = NEXT_INSN (insn))
3615 {
3616 if (NOTE_P (insn))
3617 {
3618 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
3619 {
3620 tree block = NOTE_BLOCK (insn);
3621 tree origin;
3622
3623 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3624 ? BLOCK_FRAGMENT_ORIGIN (block)
3625 : block);
3626
3627 /* If we have seen this block before, that means it now
3628 spans multiple address regions. Create a new fragment. */
3629 if (TREE_ASM_WRITTEN (block))
3630 {
3631 tree new_block = copy_node (block);
3632
3633 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3634 BLOCK_FRAGMENT_CHAIN (new_block)
3635 = BLOCK_FRAGMENT_CHAIN (origin);
3636 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3637
3638 NOTE_BLOCK (insn) = new_block;
3639 block = new_block;
3640 }
3641
3642 BLOCK_SUBBLOCKS (block) = 0;
3643 TREE_ASM_WRITTEN (block) = 1;
3644 /* When there's only one block for the entire function,
3645 current_block == block and we mustn't do this, it
3646 will cause infinite recursion. */
3647 if (block != current_block)
3648 {
3649 if (block != origin)
3650 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block);
3651
3652 BLOCK_SUPERCONTEXT (block) = current_block;
3653 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3654 BLOCK_SUBBLOCKS (current_block) = block;
3655 current_block = origin;
3656 }
3657 VEC_safe_push (tree, heap, *p_block_stack, block);
3658 }
3659 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
3660 {
3661 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
3662 BLOCK_SUBBLOCKS (current_block)
3663 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3664 current_block = BLOCK_SUPERCONTEXT (current_block);
3665 }
3666 }
3667 }
3668 }
3669
3670 /* Reverse the order of elements in the chain T of blocks,
3671 and return the new head of the chain (old last element). */
3672
3673 tree
3674 blocks_nreverse (tree t)
3675 {
3676 tree prev = 0, decl, next;
3677 for (decl = t; decl; decl = next)
3678 {
3679 next = BLOCK_CHAIN (decl);
3680 BLOCK_CHAIN (decl) = prev;
3681 prev = decl;
3682 }
3683 return prev;
3684 }
3685
3686 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3687 non-NULL, list them all into VECTOR, in a depth-first preorder
3688 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3689 blocks. */
3690
3691 static int
3692 all_blocks (tree block, tree *vector)
3693 {
3694 int n_blocks = 0;
3695
3696 while (block)
3697 {
3698 TREE_ASM_WRITTEN (block) = 0;
3699
3700 /* Record this block. */
3701 if (vector)
3702 vector[n_blocks] = block;
3703
3704 ++n_blocks;
3705
3706 /* Record the subblocks, and their subblocks... */
3707 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3708 vector ? vector + n_blocks : 0);
3709 block = BLOCK_CHAIN (block);
3710 }
3711
3712 return n_blocks;
3713 }
3714
3715 /* Return a vector containing all the blocks rooted at BLOCK. The
3716 number of elements in the vector is stored in N_BLOCKS_P. The
3717 vector is dynamically allocated; it is the caller's responsibility
3718 to call `free' on the pointer returned. */
3719
3720 static tree *
3721 get_block_vector (tree block, int *n_blocks_p)
3722 {
3723 tree *block_vector;
3724
3725 *n_blocks_p = all_blocks (block, NULL);
3726 block_vector = XNEWVEC (tree, *n_blocks_p);
3727 all_blocks (block, block_vector);
3728
3729 return block_vector;
3730 }
3731
3732 static GTY(()) int next_block_index = 2;
3733
3734 /* Set BLOCK_NUMBER for all the blocks in FN. */
3735
3736 void
3737 number_blocks (tree fn)
3738 {
3739 int i;
3740 int n_blocks;
3741 tree *block_vector;
3742
3743 /* For SDB and XCOFF debugging output, we start numbering the blocks
3744 from 1 within each function, rather than keeping a running
3745 count. */
3746 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3747 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3748 next_block_index = 1;
3749 #endif
3750
3751 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3752
3753 /* The top-level BLOCK isn't numbered at all. */
3754 for (i = 1; i < n_blocks; ++i)
3755 /* We number the blocks from two. */
3756 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3757
3758 free (block_vector);
3759
3760 return;
3761 }
3762
3763 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3764
3765 tree
3766 debug_find_var_in_block_tree (tree var, tree block)
3767 {
3768 tree t;
3769
3770 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3771 if (t == var)
3772 return block;
3773
3774 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3775 {
3776 tree ret = debug_find_var_in_block_tree (var, t);
3777 if (ret)
3778 return ret;
3779 }
3780
3781 return NULL_TREE;
3782 }
3783 \f
3784
3785 /* Return value of funcdef and increase it. */
3786 int
3787 get_next_funcdef_no (void)
3788 {
3789 return funcdef_no++;
3790 }
3791
3792 /* Allocate a function structure for FNDECL and set its contents
3793 to the defaults. */
3794
3795 void
3796 allocate_struct_function (tree fndecl)
3797 {
3798 tree result;
3799 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
3800
3801 cfun = ggc_alloc_cleared (sizeof (struct function));
3802
3803 cfun->stack_alignment_needed = STACK_BOUNDARY;
3804 cfun->preferred_stack_boundary = STACK_BOUNDARY;
3805
3806 current_function_funcdef_no = get_next_funcdef_no ();
3807
3808 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
3809
3810 init_eh_for_function ();
3811
3812 lang_hooks.function.init (cfun);
3813 if (init_machine_status)
3814 cfun->machine = (*init_machine_status) ();
3815
3816 if (fndecl == NULL)
3817 return;
3818
3819 DECL_STRUCT_FUNCTION (fndecl) = cfun;
3820 cfun->decl = fndecl;
3821
3822 result = DECL_RESULT (fndecl);
3823 if (aggregate_value_p (result, fndecl))
3824 {
3825 #ifdef PCC_STATIC_STRUCT_RETURN
3826 current_function_returns_pcc_struct = 1;
3827 #endif
3828 current_function_returns_struct = 1;
3829 }
3830
3831 current_function_stdarg
3832 = (fntype
3833 && TYPE_ARG_TYPES (fntype) != 0
3834 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3835 != void_type_node));
3836
3837 /* Assume all registers in stdarg functions need to be saved. */
3838 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
3839 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
3840 }
3841
3842 /* Reset cfun, and other non-struct-function variables to defaults as
3843 appropriate for emitting rtl at the start of a function. */
3844
3845 static void
3846 prepare_function_start (tree fndecl)
3847 {
3848 if (fndecl && DECL_STRUCT_FUNCTION (fndecl))
3849 cfun = DECL_STRUCT_FUNCTION (fndecl);
3850 else
3851 allocate_struct_function (fndecl);
3852 init_emit ();
3853 init_varasm_status (cfun);
3854 init_expr ();
3855
3856 cse_not_expected = ! optimize;
3857
3858 /* Caller save not needed yet. */
3859 caller_save_needed = 0;
3860
3861 /* We haven't done register allocation yet. */
3862 reg_renumber = 0;
3863
3864 /* Indicate that we have not instantiated virtual registers yet. */
3865 virtuals_instantiated = 0;
3866
3867 /* Indicate that we want CONCATs now. */
3868 generating_concat_p = 1;
3869
3870 /* Indicate we have no need of a frame pointer yet. */
3871 frame_pointer_needed = 0;
3872 }
3873
3874 /* Initialize the rtl expansion mechanism so that we can do simple things
3875 like generate sequences. This is used to provide a context during global
3876 initialization of some passes. */
3877 void
3878 init_dummy_function_start (void)
3879 {
3880 prepare_function_start (NULL);
3881 }
3882
3883 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3884 and initialize static variables for generating RTL for the statements
3885 of the function. */
3886
3887 void
3888 init_function_start (tree subr)
3889 {
3890 prepare_function_start (subr);
3891
3892 /* Warn if this value is an aggregate type,
3893 regardless of which calling convention we are using for it. */
3894 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
3895 warning (OPT_Waggregate_return, "function returns an aggregate");
3896 }
3897
3898 /* Make sure all values used by the optimization passes have sane
3899 defaults. */
3900 unsigned int
3901 init_function_for_compilation (void)
3902 {
3903 reg_renumber = 0;
3904
3905 /* No prologue/epilogue insns yet. Make sure that these vectors are
3906 empty. */
3907 gcc_assert (VEC_length (int, prologue) == 0);
3908 gcc_assert (VEC_length (int, epilogue) == 0);
3909 gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
3910 return 0;
3911 }
3912
3913 struct tree_opt_pass pass_init_function =
3914 {
3915 NULL, /* name */
3916 NULL, /* gate */
3917 init_function_for_compilation, /* execute */
3918 NULL, /* sub */
3919 NULL, /* next */
3920 0, /* static_pass_number */
3921 0, /* tv_id */
3922 0, /* properties_required */
3923 0, /* properties_provided */
3924 0, /* properties_destroyed */
3925 0, /* todo_flags_start */
3926 0, /* todo_flags_finish */
3927 0 /* letter */
3928 };
3929
3930
3931 void
3932 expand_main_function (void)
3933 {
3934 #if (defined(INVOKE__main) \
3935 || (!defined(HAS_INIT_SECTION) \
3936 && !defined(INIT_SECTION_ASM_OP) \
3937 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
3938 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
3939 #endif
3940 }
3941 \f
3942 /* Expand code to initialize the stack_protect_guard. This is invoked at
3943 the beginning of a function to be protected. */
3944
3945 #ifndef HAVE_stack_protect_set
3946 # define HAVE_stack_protect_set 0
3947 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
3948 #endif
3949
3950 void
3951 stack_protect_prologue (void)
3952 {
3953 tree guard_decl = targetm.stack_protect_guard ();
3954 rtx x, y;
3955
3956 /* Avoid expand_expr here, because we don't want guard_decl pulled
3957 into registers unless absolutely necessary. And we know that
3958 cfun->stack_protect_guard is a local stack slot, so this skips
3959 all the fluff. */
3960 x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
3961 y = validize_mem (DECL_RTL (guard_decl));
3962
3963 /* Allow the target to copy from Y to X without leaking Y into a
3964 register. */
3965 if (HAVE_stack_protect_set)
3966 {
3967 rtx insn = gen_stack_protect_set (x, y);
3968 if (insn)
3969 {
3970 emit_insn (insn);
3971 return;
3972 }
3973 }
3974
3975 /* Otherwise do a straight move. */
3976 emit_move_insn (x, y);
3977 }
3978
3979 /* Expand code to verify the stack_protect_guard. This is invoked at
3980 the end of a function to be protected. */
3981
3982 #ifndef HAVE_stack_protect_test
3983 # define HAVE_stack_protect_test 0
3984 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
3985 #endif
3986
3987 void
3988 stack_protect_epilogue (void)
3989 {
3990 tree guard_decl = targetm.stack_protect_guard ();
3991 rtx label = gen_label_rtx ();
3992 rtx x, y, tmp;
3993
3994 /* Avoid expand_expr here, because we don't want guard_decl pulled
3995 into registers unless absolutely necessary. And we know that
3996 cfun->stack_protect_guard is a local stack slot, so this skips
3997 all the fluff. */
3998 x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
3999 y = validize_mem (DECL_RTL (guard_decl));
4000
4001 /* Allow the target to compare Y with X without leaking either into
4002 a register. */
4003 switch (HAVE_stack_protect_test != 0)
4004 {
4005 case 1:
4006 tmp = gen_stack_protect_test (x, y, label);
4007 if (tmp)
4008 {
4009 emit_insn (tmp);
4010 break;
4011 }
4012 /* FALLTHRU */
4013
4014 default:
4015 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4016 break;
4017 }
4018
4019 /* The noreturn predictor has been moved to the tree level. The rtl-level
4020 predictors estimate this branch about 20%, which isn't enough to get
4021 things moved out of line. Since this is the only extant case of adding
4022 a noreturn function at the rtl level, it doesn't seem worth doing ought
4023 except adding the prediction by hand. */
4024 tmp = get_last_insn ();
4025 if (JUMP_P (tmp))
4026 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4027
4028 expand_expr_stmt (targetm.stack_protect_fail ());
4029 emit_label (label);
4030 }
4031 \f
4032 /* Start the RTL for a new function, and set variables used for
4033 emitting RTL.
4034 SUBR is the FUNCTION_DECL node.
4035 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4036 the function's parameters, which must be run at any return statement. */
4037
4038 void
4039 expand_function_start (tree subr)
4040 {
4041 /* Make sure volatile mem refs aren't considered
4042 valid operands of arithmetic insns. */
4043 init_recog_no_volatile ();
4044
4045 current_function_profile
4046 = (profile_flag
4047 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4048
4049 current_function_limit_stack
4050 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4051
4052 /* Make the label for return statements to jump to. Do not special
4053 case machines with special return instructions -- they will be
4054 handled later during jump, ifcvt, or epilogue creation. */
4055 return_label = gen_label_rtx ();
4056
4057 /* Initialize rtx used to return the value. */
4058 /* Do this before assign_parms so that we copy the struct value address
4059 before any library calls that assign parms might generate. */
4060
4061 /* Decide whether to return the value in memory or in a register. */
4062 if (aggregate_value_p (DECL_RESULT (subr), subr))
4063 {
4064 /* Returning something that won't go in a register. */
4065 rtx value_address = 0;
4066
4067 #ifdef PCC_STATIC_STRUCT_RETURN
4068 if (current_function_returns_pcc_struct)
4069 {
4070 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4071 value_address = assemble_static_space (size);
4072 }
4073 else
4074 #endif
4075 {
4076 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4077 /* Expect to be passed the address of a place to store the value.
4078 If it is passed as an argument, assign_parms will take care of
4079 it. */
4080 if (sv)
4081 {
4082 value_address = gen_reg_rtx (Pmode);
4083 emit_move_insn (value_address, sv);
4084 }
4085 }
4086 if (value_address)
4087 {
4088 rtx x = value_address;
4089 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4090 {
4091 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4092 set_mem_attributes (x, DECL_RESULT (subr), 1);
4093 }
4094 SET_DECL_RTL (DECL_RESULT (subr), x);
4095 }
4096 }
4097 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4098 /* If return mode is void, this decl rtl should not be used. */
4099 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4100 else
4101 {
4102 /* Compute the return values into a pseudo reg, which we will copy
4103 into the true return register after the cleanups are done. */
4104 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4105 if (TYPE_MODE (return_type) != BLKmode
4106 && targetm.calls.return_in_msb (return_type))
4107 /* expand_function_end will insert the appropriate padding in
4108 this case. Use the return value's natural (unpadded) mode
4109 within the function proper. */
4110 SET_DECL_RTL (DECL_RESULT (subr),
4111 gen_reg_rtx (TYPE_MODE (return_type)));
4112 else
4113 {
4114 /* In order to figure out what mode to use for the pseudo, we
4115 figure out what the mode of the eventual return register will
4116 actually be, and use that. */
4117 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4118
4119 /* Structures that are returned in registers are not
4120 aggregate_value_p, so we may see a PARALLEL or a REG. */
4121 if (REG_P (hard_reg))
4122 SET_DECL_RTL (DECL_RESULT (subr),
4123 gen_reg_rtx (GET_MODE (hard_reg)));
4124 else
4125 {
4126 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4127 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4128 }
4129 }
4130
4131 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4132 result to the real return register(s). */
4133 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4134 }
4135
4136 /* Initialize rtx for parameters and local variables.
4137 In some cases this requires emitting insns. */
4138 assign_parms (subr);
4139
4140 /* If function gets a static chain arg, store it. */
4141 if (cfun->static_chain_decl)
4142 {
4143 tree parm = cfun->static_chain_decl;
4144 rtx local = gen_reg_rtx (Pmode);
4145
4146 set_decl_incoming_rtl (parm, static_chain_incoming_rtx);
4147 SET_DECL_RTL (parm, local);
4148 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4149
4150 emit_move_insn (local, static_chain_incoming_rtx);
4151 }
4152
4153 /* If the function receives a non-local goto, then store the
4154 bits we need to restore the frame pointer. */
4155 if (cfun->nonlocal_goto_save_area)
4156 {
4157 tree t_save;
4158 rtx r_save;
4159
4160 /* ??? We need to do this save early. Unfortunately here is
4161 before the frame variable gets declared. Help out... */
4162 expand_var (TREE_OPERAND (cfun->nonlocal_goto_save_area, 0));
4163
4164 t_save = build4 (ARRAY_REF, ptr_type_node,
4165 cfun->nonlocal_goto_save_area,
4166 integer_zero_node, NULL_TREE, NULL_TREE);
4167 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4168 r_save = convert_memory_address (Pmode, r_save);
4169
4170 emit_move_insn (r_save, virtual_stack_vars_rtx);
4171 update_nonlocal_goto_save_area ();
4172 }
4173
4174 /* The following was moved from init_function_start.
4175 The move is supposed to make sdb output more accurate. */
4176 /* Indicate the beginning of the function body,
4177 as opposed to parm setup. */
4178 emit_note (NOTE_INSN_FUNCTION_BEG);
4179
4180 gcc_assert (NOTE_P (get_last_insn ()));
4181
4182 parm_birth_insn = get_last_insn ();
4183
4184 if (current_function_profile)
4185 {
4186 #ifdef PROFILE_HOOK
4187 PROFILE_HOOK (current_function_funcdef_no);
4188 #endif
4189 }
4190
4191 /* After the display initializations is where the stack checking
4192 probe should go. */
4193 if(flag_stack_check)
4194 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4195
4196 /* Make sure there is a line number after the function entry setup code. */
4197 force_next_line_note ();
4198 }
4199 \f
4200 /* Undo the effects of init_dummy_function_start. */
4201 void
4202 expand_dummy_function_end (void)
4203 {
4204 /* End any sequences that failed to be closed due to syntax errors. */
4205 while (in_sequence_p ())
4206 end_sequence ();
4207
4208 /* Outside function body, can't compute type's actual size
4209 until next function's body starts. */
4210
4211 free_after_parsing (cfun);
4212 free_after_compilation (cfun);
4213 cfun = 0;
4214 }
4215
4216 /* Call DOIT for each hard register used as a return value from
4217 the current function. */
4218
4219 void
4220 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4221 {
4222 rtx outgoing = current_function_return_rtx;
4223
4224 if (! outgoing)
4225 return;
4226
4227 if (REG_P (outgoing))
4228 (*doit) (outgoing, arg);
4229 else if (GET_CODE (outgoing) == PARALLEL)
4230 {
4231 int i;
4232
4233 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4234 {
4235 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4236
4237 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4238 (*doit) (x, arg);
4239 }
4240 }
4241 }
4242
4243 static void
4244 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4245 {
4246 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
4247 }
4248
4249 void
4250 clobber_return_register (void)
4251 {
4252 diddle_return_value (do_clobber_return_reg, NULL);
4253
4254 /* In case we do use pseudo to return value, clobber it too. */
4255 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4256 {
4257 tree decl_result = DECL_RESULT (current_function_decl);
4258 rtx decl_rtl = DECL_RTL (decl_result);
4259 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4260 {
4261 do_clobber_return_reg (decl_rtl, NULL);
4262 }
4263 }
4264 }
4265
4266 static void
4267 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4268 {
4269 emit_insn (gen_rtx_USE (VOIDmode, reg));
4270 }
4271
4272 static void
4273 use_return_register (void)
4274 {
4275 diddle_return_value (do_use_return_reg, NULL);
4276 }
4277
4278 /* Possibly warn about unused parameters. */
4279 void
4280 do_warn_unused_parameter (tree fn)
4281 {
4282 tree decl;
4283
4284 for (decl = DECL_ARGUMENTS (fn);
4285 decl; decl = TREE_CHAIN (decl))
4286 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4287 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
4288 && !TREE_NO_WARNING (decl))
4289 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4290 }
4291
4292 static GTY(()) rtx initial_trampoline;
4293
4294 /* Generate RTL for the end of the current function. */
4295
4296 void
4297 expand_function_end (void)
4298 {
4299 rtx clobber_after;
4300
4301 /* If arg_pointer_save_area was referenced only from a nested
4302 function, we will not have initialized it yet. Do that now. */
4303 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
4304 get_arg_pointer_save_area (cfun);
4305
4306 /* If we are doing stack checking and this function makes calls,
4307 do a stack probe at the start of the function to ensure we have enough
4308 space for another stack frame. */
4309 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
4310 {
4311 rtx insn, seq;
4312
4313 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4314 if (CALL_P (insn))
4315 {
4316 start_sequence ();
4317 probe_stack_range (STACK_CHECK_PROTECT,
4318 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4319 seq = get_insns ();
4320 end_sequence ();
4321 emit_insn_before (seq, stack_check_probe_note);
4322 break;
4323 }
4324 }
4325
4326 /* Possibly warn about unused parameters.
4327 When frontend does unit-at-a-time, the warning is already
4328 issued at finalization time. */
4329 if (warn_unused_parameter
4330 && !lang_hooks.callgraph.expand_function)
4331 do_warn_unused_parameter (current_function_decl);
4332
4333 /* End any sequences that failed to be closed due to syntax errors. */
4334 while (in_sequence_p ())
4335 end_sequence ();
4336
4337 clear_pending_stack_adjust ();
4338 do_pending_stack_adjust ();
4339
4340 /* Output a linenumber for the end of the function.
4341 SDB depends on this. */
4342 force_next_line_note ();
4343 set_curr_insn_source_location (input_location);
4344
4345 /* Before the return label (if any), clobber the return
4346 registers so that they are not propagated live to the rest of
4347 the function. This can only happen with functions that drop
4348 through; if there had been a return statement, there would
4349 have either been a return rtx, or a jump to the return label.
4350
4351 We delay actual code generation after the current_function_value_rtx
4352 is computed. */
4353 clobber_after = get_last_insn ();
4354
4355 /* Output the label for the actual return from the function. */
4356 emit_label (return_label);
4357
4358 if (USING_SJLJ_EXCEPTIONS)
4359 {
4360 /* Let except.c know where it should emit the call to unregister
4361 the function context for sjlj exceptions. */
4362 if (flag_exceptions)
4363 sjlj_emit_function_exit_after (get_last_insn ());
4364 }
4365 else
4366 {
4367 /* We want to ensure that instructions that may trap are not
4368 moved into the epilogue by scheduling, because we don't
4369 always emit unwind information for the epilogue. */
4370 if (flag_non_call_exceptions)
4371 emit_insn (gen_blockage ());
4372 }
4373
4374 /* If this is an implementation of throw, do what's necessary to
4375 communicate between __builtin_eh_return and the epilogue. */
4376 expand_eh_return ();
4377
4378 /* If scalar return value was computed in a pseudo-reg, or was a named
4379 return value that got dumped to the stack, copy that to the hard
4380 return register. */
4381 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4382 {
4383 tree decl_result = DECL_RESULT (current_function_decl);
4384 rtx decl_rtl = DECL_RTL (decl_result);
4385
4386 if (REG_P (decl_rtl)
4387 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4388 : DECL_REGISTER (decl_result))
4389 {
4390 rtx real_decl_rtl = current_function_return_rtx;
4391
4392 /* This should be set in assign_parms. */
4393 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
4394
4395 /* If this is a BLKmode structure being returned in registers,
4396 then use the mode computed in expand_return. Note that if
4397 decl_rtl is memory, then its mode may have been changed,
4398 but that current_function_return_rtx has not. */
4399 if (GET_MODE (real_decl_rtl) == BLKmode)
4400 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4401
4402 /* If a non-BLKmode return value should be padded at the least
4403 significant end of the register, shift it left by the appropriate
4404 amount. BLKmode results are handled using the group load/store
4405 machinery. */
4406 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
4407 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
4408 {
4409 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
4410 REGNO (real_decl_rtl)),
4411 decl_rtl);
4412 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
4413 }
4414 /* If a named return value dumped decl_return to memory, then
4415 we may need to re-do the PROMOTE_MODE signed/unsigned
4416 extension. */
4417 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4418 {
4419 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4420
4421 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4422 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4423 &unsignedp, 1);
4424
4425 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4426 }
4427 else if (GET_CODE (real_decl_rtl) == PARALLEL)
4428 {
4429 /* If expand_function_start has created a PARALLEL for decl_rtl,
4430 move the result to the real return registers. Otherwise, do
4431 a group load from decl_rtl for a named return. */
4432 if (GET_CODE (decl_rtl) == PARALLEL)
4433 emit_group_move (real_decl_rtl, decl_rtl);
4434 else
4435 emit_group_load (real_decl_rtl, decl_rtl,
4436 TREE_TYPE (decl_result),
4437 int_size_in_bytes (TREE_TYPE (decl_result)));
4438 }
4439 /* In the case of complex integer modes smaller than a word, we'll
4440 need to generate some non-trivial bitfield insertions. Do that
4441 on a pseudo and not the hard register. */
4442 else if (GET_CODE (decl_rtl) == CONCAT
4443 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
4444 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
4445 {
4446 int old_generating_concat_p;
4447 rtx tmp;
4448
4449 old_generating_concat_p = generating_concat_p;
4450 generating_concat_p = 0;
4451 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
4452 generating_concat_p = old_generating_concat_p;
4453
4454 emit_move_insn (tmp, decl_rtl);
4455 emit_move_insn (real_decl_rtl, tmp);
4456 }
4457 else
4458 emit_move_insn (real_decl_rtl, decl_rtl);
4459 }
4460 }
4461
4462 /* If returning a structure, arrange to return the address of the value
4463 in a place where debuggers expect to find it.
4464
4465 If returning a structure PCC style,
4466 the caller also depends on this value.
4467 And current_function_returns_pcc_struct is not necessarily set. */
4468 if (current_function_returns_struct
4469 || current_function_returns_pcc_struct)
4470 {
4471 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
4472 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4473 rtx outgoing;
4474
4475 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4476 type = TREE_TYPE (type);
4477 else
4478 value_address = XEXP (value_address, 0);
4479
4480 outgoing = targetm.calls.function_value (build_pointer_type (type),
4481 current_function_decl, true);
4482
4483 /* Mark this as a function return value so integrate will delete the
4484 assignment and USE below when inlining this function. */
4485 REG_FUNCTION_VALUE_P (outgoing) = 1;
4486
4487 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4488 value_address = convert_memory_address (GET_MODE (outgoing),
4489 value_address);
4490
4491 emit_move_insn (outgoing, value_address);
4492
4493 /* Show return register used to hold result (in this case the address
4494 of the result. */
4495 current_function_return_rtx = outgoing;
4496 }
4497
4498 /* Emit the actual code to clobber return register. */
4499 {
4500 rtx seq;
4501
4502 start_sequence ();
4503 clobber_return_register ();
4504 expand_naked_return ();
4505 seq = get_insns ();
4506 end_sequence ();
4507
4508 emit_insn_after (seq, clobber_after);
4509 }
4510
4511 /* Output the label for the naked return from the function. */
4512 emit_label (naked_return_label);
4513
4514 /* @@@ This is a kludge. We want to ensure that instructions that
4515 may trap are not moved into the epilogue by scheduling, because
4516 we don't always emit unwind information for the epilogue. */
4517 if (! USING_SJLJ_EXCEPTIONS && flag_non_call_exceptions)
4518 emit_insn (gen_blockage ());
4519
4520 /* If stack protection is enabled for this function, check the guard. */
4521 if (cfun->stack_protect_guard)
4522 stack_protect_epilogue ();
4523
4524 /* If we had calls to alloca, and this machine needs
4525 an accurate stack pointer to exit the function,
4526 insert some code to save and restore the stack pointer. */
4527 if (! EXIT_IGNORE_STACK
4528 && current_function_calls_alloca)
4529 {
4530 rtx tem = 0;
4531
4532 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4533 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4534 }
4535
4536 /* ??? This should no longer be necessary since stupid is no longer with
4537 us, but there are some parts of the compiler (eg reload_combine, and
4538 sh mach_dep_reorg) that still try and compute their own lifetime info
4539 instead of using the general framework. */
4540 use_return_register ();
4541 }
4542
4543 rtx
4544 get_arg_pointer_save_area (struct function *f)
4545 {
4546 rtx ret = f->x_arg_pointer_save_area;
4547
4548 if (! ret)
4549 {
4550 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
4551 f->x_arg_pointer_save_area = ret;
4552 }
4553
4554 if (f == cfun && ! f->arg_pointer_save_area_init)
4555 {
4556 rtx seq;
4557
4558 /* Save the arg pointer at the beginning of the function. The
4559 generated stack slot may not be a valid memory address, so we
4560 have to check it and fix it if necessary. */
4561 start_sequence ();
4562 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
4563 seq = get_insns ();
4564 end_sequence ();
4565
4566 push_topmost_sequence ();
4567 emit_insn_after (seq, entry_of_function ());
4568 pop_topmost_sequence ();
4569 }
4570
4571 return ret;
4572 }
4573 \f
4574 /* Extend a vector that records the INSN_UIDs of INSNS
4575 (a list of one or more insns). */
4576
4577 static void
4578 record_insns (rtx insns, VEC(int,heap) **vecp)
4579 {
4580 rtx tmp;
4581
4582 for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp))
4583 VEC_safe_push (int, heap, *vecp, INSN_UID (tmp));
4584 }
4585
4586 /* Set the locator of the insn chain starting at INSN to LOC. */
4587 static void
4588 set_insn_locators (rtx insn, int loc)
4589 {
4590 while (insn != NULL_RTX)
4591 {
4592 if (INSN_P (insn))
4593 INSN_LOCATOR (insn) = loc;
4594 insn = NEXT_INSN (insn);
4595 }
4596 }
4597
4598 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4599 be running after reorg, SEQUENCE rtl is possible. */
4600
4601 static int
4602 contains (const_rtx insn, VEC(int,heap) **vec)
4603 {
4604 int i, j;
4605
4606 if (NONJUMP_INSN_P (insn)
4607 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4608 {
4609 int count = 0;
4610 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4611 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4612 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i))
4613 == VEC_index (int, *vec, j))
4614 count++;
4615 return count;
4616 }
4617 else
4618 {
4619 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4620 if (INSN_UID (insn) == VEC_index (int, *vec, j))
4621 return 1;
4622 }
4623 return 0;
4624 }
4625
4626 int
4627 prologue_epilogue_contains (const_rtx insn)
4628 {
4629 if (contains (insn, &prologue))
4630 return 1;
4631 if (contains (insn, &epilogue))
4632 return 1;
4633 return 0;
4634 }
4635
4636 int
4637 sibcall_epilogue_contains (const_rtx insn)
4638 {
4639 if (sibcall_epilogue)
4640 return contains (insn, &sibcall_epilogue);
4641 return 0;
4642 }
4643
4644 #ifdef HAVE_return
4645 /* Insert gen_return at the end of block BB. This also means updating
4646 block_for_insn appropriately. */
4647
4648 static void
4649 emit_return_into_block (basic_block bb)
4650 {
4651 emit_jump_insn_after (gen_return (), BB_END (bb));
4652 }
4653 #endif /* HAVE_return */
4654
4655 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
4656
4657 /* These functions convert the epilogue into a variant that does not
4658 modify the stack pointer. This is used in cases where a function
4659 returns an object whose size is not known until it is computed.
4660 The called function leaves the object on the stack, leaves the
4661 stack depressed, and returns a pointer to the object.
4662
4663 What we need to do is track all modifications and references to the
4664 stack pointer, deleting the modifications and changing the
4665 references to point to the location the stack pointer would have
4666 pointed to had the modifications taken place.
4667
4668 These functions need to be portable so we need to make as few
4669 assumptions about the epilogue as we can. However, the epilogue
4670 basically contains three things: instructions to reset the stack
4671 pointer, instructions to reload registers, possibly including the
4672 frame pointer, and an instruction to return to the caller.
4673
4674 We must be sure of what a relevant epilogue insn is doing. We also
4675 make no attempt to validate the insns we make since if they are
4676 invalid, we probably can't do anything valid. The intent is that
4677 these routines get "smarter" as more and more machines start to use
4678 them and they try operating on different epilogues.
4679
4680 We use the following structure to track what the part of the
4681 epilogue that we've already processed has done. We keep two copies
4682 of the SP equivalence, one for use during the insn we are
4683 processing and one for use in the next insn. The difference is
4684 because one part of a PARALLEL may adjust SP and the other may use
4685 it. */
4686
4687 struct epi_info
4688 {
4689 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
4690 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
4691 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
4692 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
4693 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
4694 should be set to once we no longer need
4695 its value. */
4696 rtx const_equiv[FIRST_PSEUDO_REGISTER]; /* Any known constant equivalences
4697 for registers. */
4698 };
4699
4700 static void handle_epilogue_set (rtx, struct epi_info *);
4701 static void update_epilogue_consts (rtx, const_rtx, void *);
4702 static void emit_equiv_load (struct epi_info *);
4703
4704 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
4705 no modifications to the stack pointer. Return the new list of insns. */
4706
4707 static rtx
4708 keep_stack_depressed (rtx insns)
4709 {
4710 int j;
4711 struct epi_info info;
4712 rtx insn, next;
4713
4714 /* If the epilogue is just a single instruction, it must be OK as is. */
4715 if (NEXT_INSN (insns) == NULL_RTX)
4716 return insns;
4717
4718 /* Otherwise, start a sequence, initialize the information we have, and
4719 process all the insns we were given. */
4720 start_sequence ();
4721
4722 info.sp_equiv_reg = stack_pointer_rtx;
4723 info.sp_offset = 0;
4724 info.equiv_reg_src = 0;
4725
4726 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
4727 info.const_equiv[j] = 0;
4728
4729 insn = insns;
4730 next = NULL_RTX;
4731 while (insn != NULL_RTX)
4732 {
4733 next = NEXT_INSN (insn);
4734
4735 if (!INSN_P (insn))
4736 {
4737 add_insn (insn);
4738 insn = next;
4739 continue;
4740 }
4741
4742 /* If this insn references the register that SP is equivalent to and
4743 we have a pending load to that register, we must force out the load
4744 first and then indicate we no longer know what SP's equivalent is. */
4745 if (info.equiv_reg_src != 0
4746 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
4747 {
4748 emit_equiv_load (&info);
4749 info.sp_equiv_reg = 0;
4750 }
4751
4752 info.new_sp_equiv_reg = info.sp_equiv_reg;
4753 info.new_sp_offset = info.sp_offset;
4754
4755 /* If this is a (RETURN) and the return address is on the stack,
4756 update the address and change to an indirect jump. */
4757 if (GET_CODE (PATTERN (insn)) == RETURN
4758 || (GET_CODE (PATTERN (insn)) == PARALLEL
4759 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
4760 {
4761 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
4762 rtx base = 0;
4763 HOST_WIDE_INT offset = 0;
4764 rtx jump_insn, jump_set;
4765
4766 /* If the return address is in a register, we can emit the insn
4767 unchanged. Otherwise, it must be a MEM and we see what the
4768 base register and offset are. In any case, we have to emit any
4769 pending load to the equivalent reg of SP, if any. */
4770 if (REG_P (retaddr))
4771 {
4772 emit_equiv_load (&info);
4773 add_insn (insn);
4774 insn = next;
4775 continue;
4776 }
4777 else
4778 {
4779 rtx ret_ptr;
4780 gcc_assert (MEM_P (retaddr));
4781
4782 ret_ptr = XEXP (retaddr, 0);
4783
4784 if (REG_P (ret_ptr))
4785 {
4786 base = gen_rtx_REG (Pmode, REGNO (ret_ptr));
4787 offset = 0;
4788 }
4789 else
4790 {
4791 gcc_assert (GET_CODE (ret_ptr) == PLUS
4792 && REG_P (XEXP (ret_ptr, 0))
4793 && GET_CODE (XEXP (ret_ptr, 1)) == CONST_INT);
4794 base = gen_rtx_REG (Pmode, REGNO (XEXP (ret_ptr, 0)));
4795 offset = INTVAL (XEXP (ret_ptr, 1));
4796 }
4797 }
4798
4799 /* If the base of the location containing the return pointer
4800 is SP, we must update it with the replacement address. Otherwise,
4801 just build the necessary MEM. */
4802 retaddr = plus_constant (base, offset);
4803 if (base == stack_pointer_rtx)
4804 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
4805 plus_constant (info.sp_equiv_reg,
4806 info.sp_offset));
4807
4808 retaddr = gen_rtx_MEM (Pmode, retaddr);
4809 MEM_NOTRAP_P (retaddr) = 1;
4810
4811 /* If there is a pending load to the equivalent register for SP
4812 and we reference that register, we must load our address into
4813 a scratch register and then do that load. */
4814 if (info.equiv_reg_src
4815 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
4816 {
4817 unsigned int regno;
4818 rtx reg;
4819
4820 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4821 if (HARD_REGNO_MODE_OK (regno, Pmode)
4822 && !fixed_regs[regno]
4823 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
4824 && !REGNO_REG_SET_P
4825 (DF_LR_IN (EXIT_BLOCK_PTR), regno)
4826 && !refers_to_regno_p (regno,
4827 end_hard_regno (Pmode, regno),
4828 info.equiv_reg_src, NULL)
4829 && info.const_equiv[regno] == 0)
4830 break;
4831
4832 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
4833
4834 reg = gen_rtx_REG (Pmode, regno);
4835 emit_move_insn (reg, retaddr);
4836 retaddr = reg;
4837 }
4838
4839 emit_equiv_load (&info);
4840 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
4841
4842 /* Show the SET in the above insn is a RETURN. */
4843 jump_set = single_set (jump_insn);
4844 gcc_assert (jump_set);
4845 SET_IS_RETURN_P (jump_set) = 1;
4846 }
4847
4848 /* If SP is not mentioned in the pattern and its equivalent register, if
4849 any, is not modified, just emit it. Otherwise, if neither is set,
4850 replace the reference to SP and emit the insn. If none of those are
4851 true, handle each SET individually. */
4852 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
4853 && (info.sp_equiv_reg == stack_pointer_rtx
4854 || !reg_set_p (info.sp_equiv_reg, insn)))
4855 add_insn (insn);
4856 else if (! reg_set_p (stack_pointer_rtx, insn)
4857 && (info.sp_equiv_reg == stack_pointer_rtx
4858 || !reg_set_p (info.sp_equiv_reg, insn)))
4859 {
4860 int changed;
4861
4862 changed = validate_replace_rtx (stack_pointer_rtx,
4863 plus_constant (info.sp_equiv_reg,
4864 info.sp_offset),
4865 insn);
4866 gcc_assert (changed);
4867
4868 add_insn (insn);
4869 }
4870 else if (GET_CODE (PATTERN (insn)) == SET)
4871 handle_epilogue_set (PATTERN (insn), &info);
4872 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
4873 {
4874 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
4875 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
4876 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
4877 }
4878 else
4879 add_insn (insn);
4880
4881 info.sp_equiv_reg = info.new_sp_equiv_reg;
4882 info.sp_offset = info.new_sp_offset;
4883
4884 /* Now update any constants this insn sets. */
4885 note_stores (PATTERN (insn), update_epilogue_consts, &info);
4886 insn = next;
4887 }
4888
4889 insns = get_insns ();
4890 end_sequence ();
4891 return insns;
4892 }
4893
4894 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
4895 structure that contains information about what we've seen so far. We
4896 process this SET by either updating that data or by emitting one or
4897 more insns. */
4898
4899 static void
4900 handle_epilogue_set (rtx set, struct epi_info *p)
4901 {
4902 /* First handle the case where we are setting SP. Record what it is being
4903 set from, which we must be able to determine */
4904 if (reg_set_p (stack_pointer_rtx, set))
4905 {
4906 gcc_assert (SET_DEST (set) == stack_pointer_rtx);
4907
4908 if (GET_CODE (SET_SRC (set)) == PLUS)
4909 {
4910 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
4911 if (GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
4912 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
4913 else
4914 {
4915 gcc_assert (REG_P (XEXP (SET_SRC (set), 1))
4916 && (REGNO (XEXP (SET_SRC (set), 1))
4917 < FIRST_PSEUDO_REGISTER)
4918 && p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4919 p->new_sp_offset
4920 = INTVAL (p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4921 }
4922 }
4923 else
4924 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
4925
4926 /* If we are adjusting SP, we adjust from the old data. */
4927 if (p->new_sp_equiv_reg == stack_pointer_rtx)
4928 {
4929 p->new_sp_equiv_reg = p->sp_equiv_reg;
4930 p->new_sp_offset += p->sp_offset;
4931 }
4932
4933 gcc_assert (p->new_sp_equiv_reg && REG_P (p->new_sp_equiv_reg));
4934
4935 return;
4936 }
4937
4938 /* Next handle the case where we are setting SP's equivalent
4939 register. We must not already have a value to set it to. We
4940 could update, but there seems little point in handling that case.
4941 Note that we have to allow for the case where we are setting the
4942 register set in the previous part of a PARALLEL inside a single
4943 insn. But use the old offset for any updates within this insn.
4944 We must allow for the case where the register is being set in a
4945 different (usually wider) mode than Pmode). */
4946 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
4947 {
4948 gcc_assert (!p->equiv_reg_src
4949 && REG_P (p->new_sp_equiv_reg)
4950 && REG_P (SET_DEST (set))
4951 && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set)))
4952 <= BITS_PER_WORD)
4953 && REGNO (p->new_sp_equiv_reg) == REGNO (SET_DEST (set)));
4954 p->equiv_reg_src
4955 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
4956 plus_constant (p->sp_equiv_reg,
4957 p->sp_offset));
4958 }
4959
4960 /* Otherwise, replace any references to SP in the insn to its new value
4961 and emit the insn. */
4962 else
4963 {
4964 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
4965 plus_constant (p->sp_equiv_reg,
4966 p->sp_offset));
4967 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
4968 plus_constant (p->sp_equiv_reg,
4969 p->sp_offset));
4970 emit_insn (set);
4971 }
4972 }
4973
4974 /* Update the tracking information for registers set to constants. */
4975
4976 static void
4977 update_epilogue_consts (rtx dest, const_rtx x, void *data)
4978 {
4979 struct epi_info *p = (struct epi_info *) data;
4980 rtx new;
4981
4982 if (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER)
4983 return;
4984
4985 /* If we are either clobbering a register or doing a partial set,
4986 show we don't know the value. */
4987 else if (GET_CODE (x) == CLOBBER || ! rtx_equal_p (dest, SET_DEST (x)))
4988 p->const_equiv[REGNO (dest)] = 0;
4989
4990 /* If we are setting it to a constant, record that constant. */
4991 else if (GET_CODE (SET_SRC (x)) == CONST_INT)
4992 p->const_equiv[REGNO (dest)] = SET_SRC (x);
4993
4994 /* If this is a binary operation between a register we have been tracking
4995 and a constant, see if we can compute a new constant value. */
4996 else if (ARITHMETIC_P (SET_SRC (x))
4997 && REG_P (XEXP (SET_SRC (x), 0))
4998 && REGNO (XEXP (SET_SRC (x), 0)) < FIRST_PSEUDO_REGISTER
4999 && p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))] != 0
5000 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
5001 && 0 != (new = simplify_binary_operation
5002 (GET_CODE (SET_SRC (x)), GET_MODE (dest),
5003 p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))],
5004 XEXP (SET_SRC (x), 1)))
5005 && GET_CODE (new) == CONST_INT)
5006 p->const_equiv[REGNO (dest)] = new;
5007
5008 /* Otherwise, we can't do anything with this value. */
5009 else
5010 p->const_equiv[REGNO (dest)] = 0;
5011 }
5012
5013 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
5014
5015 static void
5016 emit_equiv_load (struct epi_info *p)
5017 {
5018 if (p->equiv_reg_src != 0)
5019 {
5020 rtx dest = p->sp_equiv_reg;
5021
5022 if (GET_MODE (p->equiv_reg_src) != GET_MODE (dest))
5023 dest = gen_rtx_REG (GET_MODE (p->equiv_reg_src),
5024 REGNO (p->sp_equiv_reg));
5025
5026 emit_move_insn (dest, p->equiv_reg_src);
5027 p->equiv_reg_src = 0;
5028 }
5029 }
5030 #endif
5031
5032 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5033 this into place with notes indicating where the prologue ends and where
5034 the epilogue begins. Update the basic block information when possible. */
5035
5036 static void
5037 thread_prologue_and_epilogue_insns (void)
5038 {
5039 int inserted = 0;
5040 edge e;
5041 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
5042 rtx seq;
5043 #endif
5044 #if defined (HAVE_epilogue) || defined(HAVE_return)
5045 rtx epilogue_end = NULL_RTX;
5046 #endif
5047 edge_iterator ei;
5048
5049 #ifdef HAVE_prologue
5050 if (HAVE_prologue)
5051 {
5052 start_sequence ();
5053 seq = gen_prologue ();
5054 emit_insn (seq);
5055
5056 /* Insert an explicit USE for the frame pointer
5057 if the profiling is on and the frame pointer is required. */
5058 if (current_function_profile && frame_pointer_needed)
5059 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
5060
5061 /* Retain a map of the prologue insns. */
5062 record_insns (seq, &prologue);
5063 emit_note (NOTE_INSN_PROLOGUE_END);
5064
5065 #ifndef PROFILE_BEFORE_PROLOGUE
5066 /* Ensure that instructions are not moved into the prologue when
5067 profiling is on. The call to the profiling routine can be
5068 emitted within the live range of a call-clobbered register. */
5069 if (current_function_profile)
5070 emit_insn (gen_blockage ());
5071 #endif
5072
5073 seq = get_insns ();
5074 end_sequence ();
5075 set_insn_locators (seq, prologue_locator);
5076
5077 /* Can't deal with multiple successors of the entry block
5078 at the moment. Function should always have at least one
5079 entry point. */
5080 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5081
5082 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
5083 inserted = 1;
5084 }
5085 #endif
5086
5087 /* If the exit block has no non-fake predecessors, we don't need
5088 an epilogue. */
5089 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5090 if ((e->flags & EDGE_FAKE) == 0)
5091 break;
5092 if (e == NULL)
5093 goto epilogue_done;
5094
5095 #ifdef HAVE_return
5096 if (optimize && HAVE_return)
5097 {
5098 /* If we're allowed to generate a simple return instruction,
5099 then by definition we don't need a full epilogue. Examine
5100 the block that falls through to EXIT. If it does not
5101 contain any code, examine its predecessors and try to
5102 emit (conditional) return instructions. */
5103
5104 basic_block last;
5105 rtx label;
5106
5107 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5108 if (e->flags & EDGE_FALLTHRU)
5109 break;
5110 if (e == NULL)
5111 goto epilogue_done;
5112 last = e->src;
5113
5114 /* Verify that there are no active instructions in the last block. */
5115 label = BB_END (last);
5116 while (label && !LABEL_P (label))
5117 {
5118 if (active_insn_p (label))
5119 break;
5120 label = PREV_INSN (label);
5121 }
5122
5123 if (BB_HEAD (last) == label && LABEL_P (label))
5124 {
5125 edge_iterator ei2;
5126
5127 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
5128 {
5129 basic_block bb = e->src;
5130 rtx jump;
5131
5132 if (bb == ENTRY_BLOCK_PTR)
5133 {
5134 ei_next (&ei2);
5135 continue;
5136 }
5137
5138 jump = BB_END (bb);
5139 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5140 {
5141 ei_next (&ei2);
5142 continue;
5143 }
5144
5145 /* If we have an unconditional jump, we can replace that
5146 with a simple return instruction. */
5147 if (simplejump_p (jump))
5148 {
5149 emit_return_into_block (bb);
5150 delete_insn (jump);
5151 }
5152
5153 /* If we have a conditional jump, we can try to replace
5154 that with a conditional return instruction. */
5155 else if (condjump_p (jump))
5156 {
5157 if (! redirect_jump (jump, 0, 0))
5158 {
5159 ei_next (&ei2);
5160 continue;
5161 }
5162
5163 /* If this block has only one successor, it both jumps
5164 and falls through to the fallthru block, so we can't
5165 delete the edge. */
5166 if (single_succ_p (bb))
5167 {
5168 ei_next (&ei2);
5169 continue;
5170 }
5171 }
5172 else
5173 {
5174 ei_next (&ei2);
5175 continue;
5176 }
5177
5178 /* Fix up the CFG for the successful change we just made. */
5179 redirect_edge_succ (e, EXIT_BLOCK_PTR);
5180 }
5181
5182 /* Emit a return insn for the exit fallthru block. Whether
5183 this is still reachable will be determined later. */
5184
5185 emit_barrier_after (BB_END (last));
5186 emit_return_into_block (last);
5187 epilogue_end = BB_END (last);
5188 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
5189 goto epilogue_done;
5190 }
5191 }
5192 #endif
5193 /* Find the edge that falls through to EXIT. Other edges may exist
5194 due to RETURN instructions, but those don't need epilogues.
5195 There really shouldn't be a mixture -- either all should have
5196 been converted or none, however... */
5197
5198 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5199 if (e->flags & EDGE_FALLTHRU)
5200 break;
5201 if (e == NULL)
5202 goto epilogue_done;
5203
5204 #ifdef HAVE_epilogue
5205 if (HAVE_epilogue)
5206 {
5207 start_sequence ();
5208 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5209
5210 seq = gen_epilogue ();
5211
5212 #ifdef INCOMING_RETURN_ADDR_RTX
5213 /* If this function returns with the stack depressed and we can support
5214 it, massage the epilogue to actually do that. */
5215 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5216 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
5217 seq = keep_stack_depressed (seq);
5218 #endif
5219
5220 emit_jump_insn (seq);
5221
5222 /* Retain a map of the epilogue insns. */
5223 record_insns (seq, &epilogue);
5224 set_insn_locators (seq, epilogue_locator);
5225
5226 seq = get_insns ();
5227 end_sequence ();
5228
5229 insert_insn_on_edge (seq, e);
5230 inserted = 1;
5231 }
5232 else
5233 #endif
5234 {
5235 basic_block cur_bb;
5236
5237 if (! next_active_insn (BB_END (e->src)))
5238 goto epilogue_done;
5239 /* We have a fall-through edge to the exit block, the source is not
5240 at the end of the function, and there will be an assembler epilogue
5241 at the end of the function.
5242 We can't use force_nonfallthru here, because that would try to
5243 use return. Inserting a jump 'by hand' is extremely messy, so
5244 we take advantage of cfg_layout_finalize using
5245 fixup_fallthru_exit_predecessor. */
5246 cfg_layout_initialize (0);
5247 FOR_EACH_BB (cur_bb)
5248 if (cur_bb->index >= NUM_FIXED_BLOCKS
5249 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
5250 cur_bb->aux = cur_bb->next_bb;
5251 cfg_layout_finalize ();
5252 }
5253 epilogue_done:
5254
5255 if (inserted)
5256 {
5257 commit_edge_insertions ();
5258
5259 /* The epilogue insns we inserted may cause the exit edge to no longer
5260 be fallthru. */
5261 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5262 {
5263 if (((e->flags & EDGE_FALLTHRU) != 0)
5264 && returnjump_p (BB_END (e->src)))
5265 e->flags &= ~EDGE_FALLTHRU;
5266 }
5267 }
5268
5269 #ifdef HAVE_sibcall_epilogue
5270 /* Emit sibling epilogues before any sibling call sites. */
5271 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
5272 {
5273 basic_block bb = e->src;
5274 rtx insn = BB_END (bb);
5275
5276 if (!CALL_P (insn)
5277 || ! SIBLING_CALL_P (insn))
5278 {
5279 ei_next (&ei);
5280 continue;
5281 }
5282
5283 start_sequence ();
5284 emit_insn (gen_sibcall_epilogue ());
5285 seq = get_insns ();
5286 end_sequence ();
5287
5288 /* Retain a map of the epilogue insns. Used in life analysis to
5289 avoid getting rid of sibcall epilogue insns. Do this before we
5290 actually emit the sequence. */
5291 record_insns (seq, &sibcall_epilogue);
5292 set_insn_locators (seq, epilogue_locator);
5293
5294 emit_insn_before (seq, insn);
5295 ei_next (&ei);
5296 }
5297 #endif
5298
5299 #ifdef HAVE_epilogue
5300 if (epilogue_end)
5301 {
5302 rtx insn, next;
5303
5304 /* Similarly, move any line notes that appear after the epilogue.
5305 There is no need, however, to be quite so anal about the existence
5306 of such a note. Also possibly move
5307 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5308 info generation. */
5309 for (insn = epilogue_end; insn; insn = next)
5310 {
5311 next = NEXT_INSN (insn);
5312 if (NOTE_P (insn)
5313 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
5314 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5315 }
5316 }
5317 #endif
5318
5319 /* Threading the prologue and epilogue changes the artificial refs
5320 in the entry and exit blocks. */
5321 epilogue_completed = 1;
5322 df_update_entry_exit_and_calls ();
5323 }
5324
5325 /* Reposition the prologue-end and epilogue-begin notes after instruction
5326 scheduling and delayed branch scheduling. */
5327
5328 void
5329 reposition_prologue_and_epilogue_notes (void)
5330 {
5331 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5332 rtx insn, last, note;
5333 int len;
5334
5335 if ((len = VEC_length (int, prologue)) > 0)
5336 {
5337 last = 0, note = 0;
5338
5339 /* Scan from the beginning until we reach the last prologue insn.
5340 We apparently can't depend on basic_block_{head,end} after
5341 reorg has run. */
5342 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5343 {
5344 if (NOTE_P (insn))
5345 {
5346 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
5347 note = insn;
5348 }
5349 else if (contains (insn, &prologue))
5350 {
5351 last = insn;
5352 if (--len == 0)
5353 break;
5354 }
5355 }
5356
5357 if (last)
5358 {
5359 /* Find the prologue-end note if we haven't already, and
5360 move it to just after the last prologue insn. */
5361 if (note == 0)
5362 {
5363 for (note = last; (note = NEXT_INSN (note));)
5364 if (NOTE_P (note)
5365 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
5366 break;
5367 }
5368
5369 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5370 if (LABEL_P (last))
5371 last = NEXT_INSN (last);
5372 reorder_insns (note, note, last);
5373 }
5374 }
5375
5376 if ((len = VEC_length (int, epilogue)) > 0)
5377 {
5378 last = 0, note = 0;
5379
5380 /* Scan from the end until we reach the first epilogue insn.
5381 We apparently can't depend on basic_block_{head,end} after
5382 reorg has run. */
5383 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
5384 {
5385 if (NOTE_P (insn))
5386 {
5387 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
5388 note = insn;
5389 }
5390 else if (contains (insn, &epilogue))
5391 {
5392 last = insn;
5393 if (--len == 0)
5394 break;
5395 }
5396 }
5397
5398 if (last)
5399 {
5400 /* Find the epilogue-begin note if we haven't already, and
5401 move it to just before the first epilogue insn. */
5402 if (note == 0)
5403 {
5404 for (note = insn; (note = PREV_INSN (note));)
5405 if (NOTE_P (note)
5406 && NOTE_KIND (note) == NOTE_INSN_EPILOGUE_BEG)
5407 break;
5408 }
5409
5410 if (PREV_INSN (last) != note)
5411 reorder_insns (note, note, PREV_INSN (last));
5412 }
5413 }
5414 #endif /* HAVE_prologue or HAVE_epilogue */
5415 }
5416
5417 /* Returns the name of the current function. */
5418 const char *
5419 current_function_name (void)
5420 {
5421 return lang_hooks.decl_printable_name (cfun->decl, 2);
5422 }
5423
5424 /* Returns the raw (mangled) name of the current function. */
5425 const char *
5426 current_function_assembler_name (void)
5427 {
5428 return IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (cfun->decl));
5429 }
5430 \f
5431
5432 static unsigned int
5433 rest_of_handle_check_leaf_regs (void)
5434 {
5435 #ifdef LEAF_REGISTERS
5436 current_function_uses_only_leaf_regs
5437 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5438 #endif
5439 return 0;
5440 }
5441
5442 /* Insert a TYPE into the used types hash table of CFUN. */
5443 static void
5444 used_types_insert_helper (tree type, struct function *func)
5445 {
5446 if (type != NULL && func != NULL)
5447 {
5448 void **slot;
5449
5450 if (func->used_types_hash == NULL)
5451 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
5452 htab_eq_pointer, NULL);
5453 slot = htab_find_slot (func->used_types_hash, type, INSERT);
5454 if (*slot == NULL)
5455 *slot = type;
5456 }
5457 }
5458
5459 /* Given a type, insert it into the used hash table in cfun. */
5460 void
5461 used_types_insert (tree t)
5462 {
5463 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
5464 t = TREE_TYPE (t);
5465 t = TYPE_MAIN_VARIANT (t);
5466 if (debug_info_level > DINFO_LEVEL_NONE)
5467 used_types_insert_helper (t, cfun);
5468 }
5469
5470 struct tree_opt_pass pass_leaf_regs =
5471 {
5472 NULL, /* name */
5473 NULL, /* gate */
5474 rest_of_handle_check_leaf_regs, /* execute */
5475 NULL, /* sub */
5476 NULL, /* next */
5477 0, /* static_pass_number */
5478 0, /* tv_id */
5479 0, /* properties_required */
5480 0, /* properties_provided */
5481 0, /* properties_destroyed */
5482 0, /* todo_flags_start */
5483 0, /* todo_flags_finish */
5484 0 /* letter */
5485 };
5486
5487 static unsigned int
5488 rest_of_handle_thread_prologue_and_epilogue (void)
5489 {
5490 if (optimize)
5491 cleanup_cfg (CLEANUP_EXPENSIVE);
5492 /* On some machines, the prologue and epilogue code, or parts thereof,
5493 can be represented as RTL. Doing so lets us schedule insns between
5494 it and the rest of the code and also allows delayed branch
5495 scheduling to operate in the epilogue. */
5496
5497 thread_prologue_and_epilogue_insns ();
5498 return 0;
5499 }
5500
5501 struct tree_opt_pass pass_thread_prologue_and_epilogue =
5502 {
5503 "pro_and_epilogue", /* name */
5504 NULL, /* gate */
5505 rest_of_handle_thread_prologue_and_epilogue, /* execute */
5506 NULL, /* sub */
5507 NULL, /* next */
5508 0, /* static_pass_number */
5509 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
5510 0, /* properties_required */
5511 0, /* properties_provided */
5512 0, /* properties_destroyed */
5513 TODO_verify_flow, /* todo_flags_start */
5514 TODO_dump_func |
5515 TODO_df_verify |
5516 TODO_df_finish |
5517 TODO_ggc_collect, /* todo_flags_finish */
5518 'w' /* letter */
5519 };
5520 \f
5521
5522 /* This mini-pass fixes fall-out from SSA in asm statements that have
5523 in-out constraints. Say you start with
5524
5525 orig = inout;
5526 asm ("": "+mr" (inout));
5527 use (orig);
5528
5529 which is transformed very early to use explicit output and match operands:
5530
5531 orig = inout;
5532 asm ("": "=mr" (inout) : "0" (inout));
5533 use (orig);
5534
5535 Or, after SSA and copyprop,
5536
5537 asm ("": "=mr" (inout_2) : "0" (inout_1));
5538 use (inout_1);
5539
5540 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
5541 they represent two separate values, so they will get different pseudo
5542 registers during expansion. Then, since the two operands need to match
5543 per the constraints, but use different pseudo registers, reload can
5544 only register a reload for these operands. But reloads can only be
5545 satisfied by hardregs, not by memory, so we need a register for this
5546 reload, just because we are presented with non-matching operands.
5547 So, even though we allow memory for this operand, no memory can be
5548 used for it, just because the two operands don't match. This can
5549 cause reload failures on register-starved targets.
5550
5551 So it's a symptom of reload not being able to use memory for reloads
5552 or, alternatively it's also a symptom of both operands not coming into
5553 reload as matching (in which case the pseudo could go to memory just
5554 fine, as the alternative allows it, and no reload would be necessary).
5555 We fix the latter problem here, by transforming
5556
5557 asm ("": "=mr" (inout_2) : "0" (inout_1));
5558
5559 back to
5560
5561 inout_2 = inout_1;
5562 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
5563
5564 static void
5565 match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs)
5566 {
5567 int i;
5568 bool changed = false;
5569 rtx op = SET_SRC (p_sets[0]);
5570 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
5571 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
5572
5573 for (i = 0; i < ninputs; i++)
5574 {
5575 rtx input, output, insns;
5576 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
5577 char *end;
5578 int match;
5579
5580 match = strtoul (constraint, &end, 10);
5581 if (end == constraint)
5582 continue;
5583
5584 gcc_assert (match < noutputs);
5585 output = SET_DEST (p_sets[match]);
5586 input = RTVEC_ELT (inputs, i);
5587 if (rtx_equal_p (output, input)
5588 || (GET_MODE (input) != VOIDmode
5589 && GET_MODE (input) != GET_MODE (output)))
5590 continue;
5591
5592 start_sequence ();
5593 emit_move_insn (copy_rtx (output), input);
5594 RTVEC_ELT (inputs, i) = copy_rtx (output);
5595 insns = get_insns ();
5596 end_sequence ();
5597
5598 emit_insn_before (insns, insn);
5599 changed = true;
5600 }
5601
5602 if (changed)
5603 df_insn_rescan (insn);
5604 }
5605
5606 static unsigned
5607 rest_of_match_asm_constraints (void)
5608 {
5609 basic_block bb;
5610 rtx insn, pat, *p_sets;
5611 int noutputs;
5612
5613 if (!cfun->has_asm_statement)
5614 return 0;
5615
5616 df_set_flags (DF_DEFER_INSN_RESCAN);
5617 FOR_EACH_BB (bb)
5618 {
5619 FOR_BB_INSNS (bb, insn)
5620 {
5621 if (!INSN_P (insn))
5622 continue;
5623
5624 pat = PATTERN (insn);
5625 if (GET_CODE (pat) == PARALLEL)
5626 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
5627 else if (GET_CODE (pat) == SET)
5628 p_sets = &PATTERN (insn), noutputs = 1;
5629 else
5630 continue;
5631
5632 if (GET_CODE (*p_sets) == SET
5633 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
5634 match_asm_constraints_1 (insn, p_sets, noutputs);
5635 }
5636 }
5637
5638 return TODO_df_finish;
5639 }
5640
5641 struct tree_opt_pass pass_match_asm_constraints =
5642 {
5643 "asmcons", /* name */
5644 NULL, /* gate */
5645 rest_of_match_asm_constraints, /* execute */
5646 NULL, /* sub */
5647 NULL, /* next */
5648 0, /* static_pass_number */
5649 0, /* tv_id */
5650 0, /* properties_required */
5651 0, /* properties_provided */
5652 0, /* properties_destroyed */
5653 0, /* todo_flags_start */
5654 TODO_dump_func, /* todo_flags_finish */
5655 0 /* letter */
5656 };
5657
5658
5659 #include "gt-function.h"