* gimple-fold.c (gimple_fold_stmt_to_constant_1): Avoid warning.
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010, 2011, 2012 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "rtl-error.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "regs.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "output.h"
53 #include "basic-block.h"
54 #include "hashtab.h"
55 #include "ggc.h"
56 #include "tm_p.h"
57 #include "integrate.h"
58 #include "langhooks.h"
59 #include "target.h"
60 #include "common/common-target.h"
61 #include "cfglayout.h"
62 #include "gimple.h"
63 #include "tree-pass.h"
64 #include "predict.h"
65 #include "df.h"
66 #include "timevar.h"
67 #include "vecprim.h"
68 #include "params.h"
69 #include "bb-reorder.h"
70
71 /* So we can assign to cfun in this file. */
72 #undef cfun
73
74 #ifndef STACK_ALIGNMENT_NEEDED
75 #define STACK_ALIGNMENT_NEEDED 1
76 #endif
77
78 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
79
80 /* Some systems use __main in a way incompatible with its use in gcc, in these
81 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
82 give the same symbol without quotes for an alternative entry point. You
83 must define both, or neither. */
84 #ifndef NAME__MAIN
85 #define NAME__MAIN "__main"
86 #endif
87
88 /* Round a value to the lowest integer less than it that is a multiple of
89 the required alignment. Avoid using division in case the value is
90 negative. Assume the alignment is a power of two. */
91 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
92
93 /* Similar, but round to the next highest integer that meets the
94 alignment. */
95 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
96
97 /* Nonzero if function being compiled doesn't contain any calls
98 (ignoring the prologue and epilogue). This is set prior to
99 local register allocation and is valid for the remaining
100 compiler passes. */
101 int current_function_is_leaf;
102
103 /* Nonzero if function being compiled doesn't modify the stack pointer
104 (ignoring the prologue and epilogue). This is only valid after
105 pass_stack_ptr_mod has run. */
106 int current_function_sp_is_unchanging;
107
108 /* Nonzero if the function being compiled is a leaf function which only
109 uses leaf registers. This is valid after reload (specifically after
110 sched2) and is useful only if the port defines LEAF_REGISTERS. */
111 int current_function_uses_only_leaf_regs;
112
113 /* Nonzero once virtual register instantiation has been done.
114 assign_stack_local uses frame_pointer_rtx when this is nonzero.
115 calls.c:emit_library_call_value_1 uses it to set up
116 post-instantiation libcalls. */
117 int virtuals_instantiated;
118
119 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
120 static GTY(()) int funcdef_no;
121
122 /* These variables hold pointers to functions to create and destroy
123 target specific, per-function data structures. */
124 struct machine_function * (*init_machine_status) (void);
125
126 /* The currently compiled function. */
127 struct function *cfun = 0;
128
129 /* These hashes record the prologue and epilogue insns. */
130 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
131 htab_t prologue_insn_hash;
132 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
133 htab_t epilogue_insn_hash;
134 \f
135
136 htab_t types_used_by_vars_hash = NULL;
137 VEC(tree,gc) *types_used_by_cur_var_decl;
138
139 /* Forward declarations. */
140
141 static struct temp_slot *find_temp_slot_from_address (rtx);
142 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
143 static void pad_below (struct args_size *, enum machine_mode, tree);
144 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
145 static int all_blocks (tree, tree *);
146 static tree *get_block_vector (tree, int *);
147 extern tree debug_find_var_in_block_tree (tree, tree);
148 /* We always define `record_insns' even if it's not used so that we
149 can always export `prologue_epilogue_contains'. */
150 static void record_insns (rtx, rtx, htab_t *) ATTRIBUTE_UNUSED;
151 static bool contains (const_rtx, htab_t);
152 static void prepare_function_start (void);
153 static void do_clobber_return_reg (rtx, void *);
154 static void do_use_return_reg (rtx, void *);
155 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
156 \f
157 /* Stack of nested functions. */
158 /* Keep track of the cfun stack. */
159
160 typedef struct function *function_p;
161
162 DEF_VEC_P(function_p);
163 DEF_VEC_ALLOC_P(function_p,heap);
164 static VEC(function_p,heap) *function_context_stack;
165
166 /* Save the current context for compilation of a nested function.
167 This is called from language-specific code. */
168
169 void
170 push_function_context (void)
171 {
172 if (cfun == 0)
173 allocate_struct_function (NULL, false);
174
175 VEC_safe_push (function_p, heap, function_context_stack, cfun);
176 set_cfun (NULL);
177 }
178
179 /* Restore the last saved context, at the end of a nested function.
180 This function is called from language-specific code. */
181
182 void
183 pop_function_context (void)
184 {
185 struct function *p = VEC_pop (function_p, function_context_stack);
186 set_cfun (p);
187 current_function_decl = p->decl;
188
189 /* Reset variables that have known state during rtx generation. */
190 virtuals_instantiated = 0;
191 generating_concat_p = 1;
192 }
193
194 /* Clear out all parts of the state in F that can safely be discarded
195 after the function has been parsed, but not compiled, to let
196 garbage collection reclaim the memory. */
197
198 void
199 free_after_parsing (struct function *f)
200 {
201 f->language = 0;
202 }
203
204 /* Clear out all parts of the state in F that can safely be discarded
205 after the function has been compiled, to let garbage collection
206 reclaim the memory. */
207
208 void
209 free_after_compilation (struct function *f)
210 {
211 prologue_insn_hash = NULL;
212 epilogue_insn_hash = NULL;
213
214 free (crtl->emit.regno_pointer_align);
215
216 memset (crtl, 0, sizeof (struct rtl_data));
217 f->eh = NULL;
218 f->machine = NULL;
219 f->cfg = NULL;
220
221 regno_reg_rtx = NULL;
222 insn_locators_free ();
223 }
224 \f
225 /* Return size needed for stack frame based on slots so far allocated.
226 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
227 the caller may have to do that. */
228
229 HOST_WIDE_INT
230 get_frame_size (void)
231 {
232 if (FRAME_GROWS_DOWNWARD)
233 return -frame_offset;
234 else
235 return frame_offset;
236 }
237
238 /* Issue an error message and return TRUE if frame OFFSET overflows in
239 the signed target pointer arithmetics for function FUNC. Otherwise
240 return FALSE. */
241
242 bool
243 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
244 {
245 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
246
247 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
248 /* Leave room for the fixed part of the frame. */
249 - 64 * UNITS_PER_WORD)
250 {
251 error_at (DECL_SOURCE_LOCATION (func),
252 "total size of local objects too large");
253 return TRUE;
254 }
255
256 return FALSE;
257 }
258
259 /* Return stack slot alignment in bits for TYPE and MODE. */
260
261 static unsigned int
262 get_stack_local_alignment (tree type, enum machine_mode mode)
263 {
264 unsigned int alignment;
265
266 if (mode == BLKmode)
267 alignment = BIGGEST_ALIGNMENT;
268 else
269 alignment = GET_MODE_ALIGNMENT (mode);
270
271 /* Allow the frond-end to (possibly) increase the alignment of this
272 stack slot. */
273 if (! type)
274 type = lang_hooks.types.type_for_mode (mode, 0);
275
276 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
277 }
278
279 /* Determine whether it is possible to fit a stack slot of size SIZE and
280 alignment ALIGNMENT into an area in the stack frame that starts at
281 frame offset START and has a length of LENGTH. If so, store the frame
282 offset to be used for the stack slot in *POFFSET and return true;
283 return false otherwise. This function will extend the frame size when
284 given a start/length pair that lies at the end of the frame. */
285
286 static bool
287 try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
288 HOST_WIDE_INT size, unsigned int alignment,
289 HOST_WIDE_INT *poffset)
290 {
291 HOST_WIDE_INT this_frame_offset;
292 int frame_off, frame_alignment, frame_phase;
293
294 /* Calculate how many bytes the start of local variables is off from
295 stack alignment. */
296 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
297 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
298 frame_phase = frame_off ? frame_alignment - frame_off : 0;
299
300 /* Round the frame offset to the specified alignment. */
301
302 /* We must be careful here, since FRAME_OFFSET might be negative and
303 division with a negative dividend isn't as well defined as we might
304 like. So we instead assume that ALIGNMENT is a power of two and
305 use logical operations which are unambiguous. */
306 if (FRAME_GROWS_DOWNWARD)
307 this_frame_offset
308 = (FLOOR_ROUND (start + length - size - frame_phase,
309 (unsigned HOST_WIDE_INT) alignment)
310 + frame_phase);
311 else
312 this_frame_offset
313 = (CEIL_ROUND (start - frame_phase,
314 (unsigned HOST_WIDE_INT) alignment)
315 + frame_phase);
316
317 /* See if it fits. If this space is at the edge of the frame,
318 consider extending the frame to make it fit. Our caller relies on
319 this when allocating a new slot. */
320 if (frame_offset == start && this_frame_offset < frame_offset)
321 frame_offset = this_frame_offset;
322 else if (this_frame_offset < start)
323 return false;
324 else if (start + length == frame_offset
325 && this_frame_offset + size > start + length)
326 frame_offset = this_frame_offset + size;
327 else if (this_frame_offset + size > start + length)
328 return false;
329
330 *poffset = this_frame_offset;
331 return true;
332 }
333
334 /* Create a new frame_space structure describing free space in the stack
335 frame beginning at START and ending at END, and chain it into the
336 function's frame_space_list. */
337
338 static void
339 add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
340 {
341 struct frame_space *space = ggc_alloc_frame_space ();
342 space->next = crtl->frame_space_list;
343 crtl->frame_space_list = space;
344 space->start = start;
345 space->length = end - start;
346 }
347
348 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
349 with machine mode MODE.
350
351 ALIGN controls the amount of alignment for the address of the slot:
352 0 means according to MODE,
353 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
354 -2 means use BITS_PER_UNIT,
355 positive specifies alignment boundary in bits.
356
357 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
358 alignment and ASLK_RECORD_PAD bit set if we should remember
359 extra space we allocated for alignment purposes. When we are
360 called from assign_stack_temp_for_type, it is not set so we don't
361 track the same stack slot in two independent lists.
362
363 We do not round to stack_boundary here. */
364
365 rtx
366 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size,
367 int align, int kind)
368 {
369 rtx x, addr;
370 int bigend_correction = 0;
371 HOST_WIDE_INT slot_offset = 0, old_frame_offset;
372 unsigned int alignment, alignment_in_bits;
373
374 if (align == 0)
375 {
376 alignment = get_stack_local_alignment (NULL, mode);
377 alignment /= BITS_PER_UNIT;
378 }
379 else if (align == -1)
380 {
381 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
382 size = CEIL_ROUND (size, alignment);
383 }
384 else if (align == -2)
385 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
386 else
387 alignment = align / BITS_PER_UNIT;
388
389 alignment_in_bits = alignment * BITS_PER_UNIT;
390
391 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
392 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
393 {
394 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
395 alignment = alignment_in_bits / BITS_PER_UNIT;
396 }
397
398 if (SUPPORTS_STACK_ALIGNMENT)
399 {
400 if (crtl->stack_alignment_estimated < alignment_in_bits)
401 {
402 if (!crtl->stack_realign_processed)
403 crtl->stack_alignment_estimated = alignment_in_bits;
404 else
405 {
406 /* If stack is realigned and stack alignment value
407 hasn't been finalized, it is OK not to increase
408 stack_alignment_estimated. The bigger alignment
409 requirement is recorded in stack_alignment_needed
410 below. */
411 gcc_assert (!crtl->stack_realign_finalized);
412 if (!crtl->stack_realign_needed)
413 {
414 /* It is OK to reduce the alignment as long as the
415 requested size is 0 or the estimated stack
416 alignment >= mode alignment. */
417 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
418 || size == 0
419 || (crtl->stack_alignment_estimated
420 >= GET_MODE_ALIGNMENT (mode)));
421 alignment_in_bits = crtl->stack_alignment_estimated;
422 alignment = alignment_in_bits / BITS_PER_UNIT;
423 }
424 }
425 }
426 }
427
428 if (crtl->stack_alignment_needed < alignment_in_bits)
429 crtl->stack_alignment_needed = alignment_in_bits;
430 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
431 crtl->max_used_stack_slot_alignment = alignment_in_bits;
432
433 if (mode != BLKmode || size != 0)
434 {
435 if (kind & ASLK_RECORD_PAD)
436 {
437 struct frame_space **psp;
438
439 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
440 {
441 struct frame_space *space = *psp;
442 if (!try_fit_stack_local (space->start, space->length, size,
443 alignment, &slot_offset))
444 continue;
445 *psp = space->next;
446 if (slot_offset > space->start)
447 add_frame_space (space->start, slot_offset);
448 if (slot_offset + size < space->start + space->length)
449 add_frame_space (slot_offset + size,
450 space->start + space->length);
451 goto found_space;
452 }
453 }
454 }
455 else if (!STACK_ALIGNMENT_NEEDED)
456 {
457 slot_offset = frame_offset;
458 goto found_space;
459 }
460
461 old_frame_offset = frame_offset;
462
463 if (FRAME_GROWS_DOWNWARD)
464 {
465 frame_offset -= size;
466 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
467
468 if (kind & ASLK_RECORD_PAD)
469 {
470 if (slot_offset > frame_offset)
471 add_frame_space (frame_offset, slot_offset);
472 if (slot_offset + size < old_frame_offset)
473 add_frame_space (slot_offset + size, old_frame_offset);
474 }
475 }
476 else
477 {
478 frame_offset += size;
479 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
480
481 if (kind & ASLK_RECORD_PAD)
482 {
483 if (slot_offset > old_frame_offset)
484 add_frame_space (old_frame_offset, slot_offset);
485 if (slot_offset + size < frame_offset)
486 add_frame_space (slot_offset + size, frame_offset);
487 }
488 }
489
490 found_space:
491 /* On a big-endian machine, if we are allocating more space than we will use,
492 use the least significant bytes of those that are allocated. */
493 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
494 bigend_correction = size - GET_MODE_SIZE (mode);
495
496 /* If we have already instantiated virtual registers, return the actual
497 address relative to the frame pointer. */
498 if (virtuals_instantiated)
499 addr = plus_constant (frame_pointer_rtx,
500 trunc_int_for_mode
501 (slot_offset + bigend_correction
502 + STARTING_FRAME_OFFSET, Pmode));
503 else
504 addr = plus_constant (virtual_stack_vars_rtx,
505 trunc_int_for_mode
506 (slot_offset + bigend_correction,
507 Pmode));
508
509 x = gen_rtx_MEM (mode, addr);
510 set_mem_align (x, alignment_in_bits);
511 MEM_NOTRAP_P (x) = 1;
512
513 stack_slot_list
514 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
515
516 if (frame_offset_overflow (frame_offset, current_function_decl))
517 frame_offset = 0;
518
519 return x;
520 }
521
522 /* Wrap up assign_stack_local_1 with last parameter as false. */
523
524 rtx
525 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
526 {
527 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
528 }
529 \f
530 \f
531 /* In order to evaluate some expressions, such as function calls returning
532 structures in memory, we need to temporarily allocate stack locations.
533 We record each allocated temporary in the following structure.
534
535 Associated with each temporary slot is a nesting level. When we pop up
536 one level, all temporaries associated with the previous level are freed.
537 Normally, all temporaries are freed after the execution of the statement
538 in which they were created. However, if we are inside a ({...}) grouping,
539 the result may be in a temporary and hence must be preserved. If the
540 result could be in a temporary, we preserve it if we can determine which
541 one it is in. If we cannot determine which temporary may contain the
542 result, all temporaries are preserved. A temporary is preserved by
543 pretending it was allocated at the previous nesting level.
544
545 Automatic variables are also assigned temporary slots, at the nesting
546 level where they are defined. They are marked a "kept" so that
547 free_temp_slots will not free them. */
548
549 struct GTY(()) temp_slot {
550 /* Points to next temporary slot. */
551 struct temp_slot *next;
552 /* Points to previous temporary slot. */
553 struct temp_slot *prev;
554 /* The rtx to used to reference the slot. */
555 rtx slot;
556 /* The size, in units, of the slot. */
557 HOST_WIDE_INT size;
558 /* The type of the object in the slot, or zero if it doesn't correspond
559 to a type. We use this to determine whether a slot can be reused.
560 It can be reused if objects of the type of the new slot will always
561 conflict with objects of the type of the old slot. */
562 tree type;
563 /* The alignment (in bits) of the slot. */
564 unsigned int align;
565 /* Nonzero if this temporary is currently in use. */
566 char in_use;
567 /* Nonzero if this temporary has its address taken. */
568 char addr_taken;
569 /* Nesting level at which this slot is being used. */
570 int level;
571 /* Nonzero if this should survive a call to free_temp_slots. */
572 int keep;
573 /* The offset of the slot from the frame_pointer, including extra space
574 for alignment. This info is for combine_temp_slots. */
575 HOST_WIDE_INT base_offset;
576 /* The size of the slot, including extra space for alignment. This
577 info is for combine_temp_slots. */
578 HOST_WIDE_INT full_size;
579 };
580
581 /* A table of addresses that represent a stack slot. The table is a mapping
582 from address RTXen to a temp slot. */
583 static GTY((param_is(struct temp_slot_address_entry))) htab_t temp_slot_address_table;
584
585 /* Entry for the above hash table. */
586 struct GTY(()) temp_slot_address_entry {
587 hashval_t hash;
588 rtx address;
589 struct temp_slot *temp_slot;
590 };
591
592 /* Removes temporary slot TEMP from LIST. */
593
594 static void
595 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
596 {
597 if (temp->next)
598 temp->next->prev = temp->prev;
599 if (temp->prev)
600 temp->prev->next = temp->next;
601 else
602 *list = temp->next;
603
604 temp->prev = temp->next = NULL;
605 }
606
607 /* Inserts temporary slot TEMP to LIST. */
608
609 static void
610 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
611 {
612 temp->next = *list;
613 if (*list)
614 (*list)->prev = temp;
615 temp->prev = NULL;
616 *list = temp;
617 }
618
619 /* Returns the list of used temp slots at LEVEL. */
620
621 static struct temp_slot **
622 temp_slots_at_level (int level)
623 {
624 if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
625 VEC_safe_grow_cleared (temp_slot_p, gc, used_temp_slots, level + 1);
626
627 return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
628 }
629
630 /* Returns the maximal temporary slot level. */
631
632 static int
633 max_slot_level (void)
634 {
635 if (!used_temp_slots)
636 return -1;
637
638 return VEC_length (temp_slot_p, used_temp_slots) - 1;
639 }
640
641 /* Moves temporary slot TEMP to LEVEL. */
642
643 static void
644 move_slot_to_level (struct temp_slot *temp, int level)
645 {
646 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
647 insert_slot_to_list (temp, temp_slots_at_level (level));
648 temp->level = level;
649 }
650
651 /* Make temporary slot TEMP available. */
652
653 static void
654 make_slot_available (struct temp_slot *temp)
655 {
656 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
657 insert_slot_to_list (temp, &avail_temp_slots);
658 temp->in_use = 0;
659 temp->level = -1;
660 }
661
662 /* Compute the hash value for an address -> temp slot mapping.
663 The value is cached on the mapping entry. */
664 static hashval_t
665 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
666 {
667 int do_not_record = 0;
668 return hash_rtx (t->address, GET_MODE (t->address),
669 &do_not_record, NULL, false);
670 }
671
672 /* Return the hash value for an address -> temp slot mapping. */
673 static hashval_t
674 temp_slot_address_hash (const void *p)
675 {
676 const struct temp_slot_address_entry *t;
677 t = (const struct temp_slot_address_entry *) p;
678 return t->hash;
679 }
680
681 /* Compare two address -> temp slot mapping entries. */
682 static int
683 temp_slot_address_eq (const void *p1, const void *p2)
684 {
685 const struct temp_slot_address_entry *t1, *t2;
686 t1 = (const struct temp_slot_address_entry *) p1;
687 t2 = (const struct temp_slot_address_entry *) p2;
688 return exp_equiv_p (t1->address, t2->address, 0, true);
689 }
690
691 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
692 static void
693 insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
694 {
695 void **slot;
696 struct temp_slot_address_entry *t = ggc_alloc_temp_slot_address_entry ();
697 t->address = address;
698 t->temp_slot = temp_slot;
699 t->hash = temp_slot_address_compute_hash (t);
700 slot = htab_find_slot_with_hash (temp_slot_address_table, t, t->hash, INSERT);
701 *slot = t;
702 }
703
704 /* Remove an address -> temp slot mapping entry if the temp slot is
705 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
706 static int
707 remove_unused_temp_slot_addresses_1 (void **slot, void *data ATTRIBUTE_UNUSED)
708 {
709 const struct temp_slot_address_entry *t;
710 t = (const struct temp_slot_address_entry *) *slot;
711 if (! t->temp_slot->in_use)
712 *slot = NULL;
713 return 1;
714 }
715
716 /* Remove all mappings of addresses to unused temp slots. */
717 static void
718 remove_unused_temp_slot_addresses (void)
719 {
720 htab_traverse (temp_slot_address_table,
721 remove_unused_temp_slot_addresses_1,
722 NULL);
723 }
724
725 /* Find the temp slot corresponding to the object at address X. */
726
727 static struct temp_slot *
728 find_temp_slot_from_address (rtx x)
729 {
730 struct temp_slot *p;
731 struct temp_slot_address_entry tmp, *t;
732
733 /* First try the easy way:
734 See if X exists in the address -> temp slot mapping. */
735 tmp.address = x;
736 tmp.temp_slot = NULL;
737 tmp.hash = temp_slot_address_compute_hash (&tmp);
738 t = (struct temp_slot_address_entry *)
739 htab_find_with_hash (temp_slot_address_table, &tmp, tmp.hash);
740 if (t)
741 return t->temp_slot;
742
743 /* If we have a sum involving a register, see if it points to a temp
744 slot. */
745 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
746 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
747 return p;
748 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
749 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
750 return p;
751
752 /* Last resort: Address is a virtual stack var address. */
753 if (GET_CODE (x) == PLUS
754 && XEXP (x, 0) == virtual_stack_vars_rtx
755 && CONST_INT_P (XEXP (x, 1)))
756 {
757 int i;
758 for (i = max_slot_level (); i >= 0; i--)
759 for (p = *temp_slots_at_level (i); p; p = p->next)
760 {
761 if (INTVAL (XEXP (x, 1)) >= p->base_offset
762 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
763 return p;
764 }
765 }
766
767 return NULL;
768 }
769 \f
770 /* Allocate a temporary stack slot and record it for possible later
771 reuse.
772
773 MODE is the machine mode to be given to the returned rtx.
774
775 SIZE is the size in units of the space required. We do no rounding here
776 since assign_stack_local will do any required rounding.
777
778 KEEP is 1 if this slot is to be retained after a call to
779 free_temp_slots. Automatic variables for a block are allocated
780 with this flag. KEEP values of 2 or 3 were needed respectively
781 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
782 or for SAVE_EXPRs, but they are now unused.
783
784 TYPE is the type that will be used for the stack slot. */
785
786 rtx
787 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
788 int keep, tree type)
789 {
790 unsigned int align;
791 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
792 rtx slot;
793
794 /* If SIZE is -1 it means that somebody tried to allocate a temporary
795 of a variable size. */
796 gcc_assert (size != -1);
797
798 /* These are now unused. */
799 gcc_assert (keep <= 1);
800
801 align = get_stack_local_alignment (type, mode);
802
803 /* Try to find an available, already-allocated temporary of the proper
804 mode which meets the size and alignment requirements. Choose the
805 smallest one with the closest alignment.
806
807 If assign_stack_temp is called outside of the tree->rtl expansion,
808 we cannot reuse the stack slots (that may still refer to
809 VIRTUAL_STACK_VARS_REGNUM). */
810 if (!virtuals_instantiated)
811 {
812 for (p = avail_temp_slots; p; p = p->next)
813 {
814 if (p->align >= align && p->size >= size
815 && GET_MODE (p->slot) == mode
816 && objects_must_conflict_p (p->type, type)
817 && (best_p == 0 || best_p->size > p->size
818 || (best_p->size == p->size && best_p->align > p->align)))
819 {
820 if (p->align == align && p->size == size)
821 {
822 selected = p;
823 cut_slot_from_list (selected, &avail_temp_slots);
824 best_p = 0;
825 break;
826 }
827 best_p = p;
828 }
829 }
830 }
831
832 /* Make our best, if any, the one to use. */
833 if (best_p)
834 {
835 selected = best_p;
836 cut_slot_from_list (selected, &avail_temp_slots);
837
838 /* If there are enough aligned bytes left over, make them into a new
839 temp_slot so that the extra bytes don't get wasted. Do this only
840 for BLKmode slots, so that we can be sure of the alignment. */
841 if (GET_MODE (best_p->slot) == BLKmode)
842 {
843 int alignment = best_p->align / BITS_PER_UNIT;
844 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
845
846 if (best_p->size - rounded_size >= alignment)
847 {
848 p = ggc_alloc_temp_slot ();
849 p->in_use = p->addr_taken = 0;
850 p->size = best_p->size - rounded_size;
851 p->base_offset = best_p->base_offset + rounded_size;
852 p->full_size = best_p->full_size - rounded_size;
853 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
854 p->align = best_p->align;
855 p->type = best_p->type;
856 insert_slot_to_list (p, &avail_temp_slots);
857
858 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
859 stack_slot_list);
860
861 best_p->size = rounded_size;
862 best_p->full_size = rounded_size;
863 }
864 }
865 }
866
867 /* If we still didn't find one, make a new temporary. */
868 if (selected == 0)
869 {
870 HOST_WIDE_INT frame_offset_old = frame_offset;
871
872 p = ggc_alloc_temp_slot ();
873
874 /* We are passing an explicit alignment request to assign_stack_local.
875 One side effect of that is assign_stack_local will not round SIZE
876 to ensure the frame offset remains suitably aligned.
877
878 So for requests which depended on the rounding of SIZE, we go ahead
879 and round it now. We also make sure ALIGNMENT is at least
880 BIGGEST_ALIGNMENT. */
881 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
882 p->slot = assign_stack_local_1 (mode,
883 (mode == BLKmode
884 ? CEIL_ROUND (size,
885 (int) align
886 / BITS_PER_UNIT)
887 : size),
888 align, 0);
889
890 p->align = align;
891
892 /* The following slot size computation is necessary because we don't
893 know the actual size of the temporary slot until assign_stack_local
894 has performed all the frame alignment and size rounding for the
895 requested temporary. Note that extra space added for alignment
896 can be either above or below this stack slot depending on which
897 way the frame grows. We include the extra space if and only if it
898 is above this slot. */
899 if (FRAME_GROWS_DOWNWARD)
900 p->size = frame_offset_old - frame_offset;
901 else
902 p->size = size;
903
904 /* Now define the fields used by combine_temp_slots. */
905 if (FRAME_GROWS_DOWNWARD)
906 {
907 p->base_offset = frame_offset;
908 p->full_size = frame_offset_old - frame_offset;
909 }
910 else
911 {
912 p->base_offset = frame_offset_old;
913 p->full_size = frame_offset - frame_offset_old;
914 }
915
916 selected = p;
917 }
918
919 p = selected;
920 p->in_use = 1;
921 p->addr_taken = 0;
922 p->type = type;
923 p->level = temp_slot_level;
924 p->keep = keep;
925
926 pp = temp_slots_at_level (p->level);
927 insert_slot_to_list (p, pp);
928 insert_temp_slot_address (XEXP (p->slot, 0), p);
929
930 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
931 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
932 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
933
934 /* If we know the alias set for the memory that will be used, use
935 it. If there's no TYPE, then we don't know anything about the
936 alias set for the memory. */
937 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
938 set_mem_align (slot, align);
939
940 /* If a type is specified, set the relevant flags. */
941 if (type != 0)
942 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
943 MEM_NOTRAP_P (slot) = 1;
944
945 return slot;
946 }
947
948 /* Allocate a temporary stack slot and record it for possible later
949 reuse. First three arguments are same as in preceding function. */
950
951 rtx
952 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
953 {
954 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
955 }
956 \f
957 /* Assign a temporary.
958 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
959 and so that should be used in error messages. In either case, we
960 allocate of the given type.
961 KEEP is as for assign_stack_temp.
962 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
963 it is 0 if a register is OK.
964 DONT_PROMOTE is 1 if we should not promote values in register
965 to wider modes. */
966
967 rtx
968 assign_temp (tree type_or_decl, int keep, int memory_required,
969 int dont_promote ATTRIBUTE_UNUSED)
970 {
971 tree type, decl;
972 enum machine_mode mode;
973 #ifdef PROMOTE_MODE
974 int unsignedp;
975 #endif
976
977 if (DECL_P (type_or_decl))
978 decl = type_or_decl, type = TREE_TYPE (decl);
979 else
980 decl = NULL, type = type_or_decl;
981
982 mode = TYPE_MODE (type);
983 #ifdef PROMOTE_MODE
984 unsignedp = TYPE_UNSIGNED (type);
985 #endif
986
987 if (mode == BLKmode || memory_required)
988 {
989 HOST_WIDE_INT size = int_size_in_bytes (type);
990 rtx tmp;
991
992 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
993 problems with allocating the stack space. */
994 if (size == 0)
995 size = 1;
996
997 /* Unfortunately, we don't yet know how to allocate variable-sized
998 temporaries. However, sometimes we can find a fixed upper limit on
999 the size, so try that instead. */
1000 else if (size == -1)
1001 size = max_int_size_in_bytes (type);
1002
1003 /* The size of the temporary may be too large to fit into an integer. */
1004 /* ??? Not sure this should happen except for user silliness, so limit
1005 this to things that aren't compiler-generated temporaries. The
1006 rest of the time we'll die in assign_stack_temp_for_type. */
1007 if (decl && size == -1
1008 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
1009 {
1010 error ("size of variable %q+D is too large", decl);
1011 size = 1;
1012 }
1013
1014 tmp = assign_stack_temp_for_type (mode, size, keep, type);
1015 return tmp;
1016 }
1017
1018 #ifdef PROMOTE_MODE
1019 if (! dont_promote)
1020 mode = promote_mode (type, mode, &unsignedp);
1021 #endif
1022
1023 return gen_reg_rtx (mode);
1024 }
1025 \f
1026 /* Combine temporary stack slots which are adjacent on the stack.
1027
1028 This allows for better use of already allocated stack space. This is only
1029 done for BLKmode slots because we can be sure that we won't have alignment
1030 problems in this case. */
1031
1032 static void
1033 combine_temp_slots (void)
1034 {
1035 struct temp_slot *p, *q, *next, *next_q;
1036 int num_slots;
1037
1038 /* We can't combine slots, because the information about which slot
1039 is in which alias set will be lost. */
1040 if (flag_strict_aliasing)
1041 return;
1042
1043 /* If there are a lot of temp slots, don't do anything unless
1044 high levels of optimization. */
1045 if (! flag_expensive_optimizations)
1046 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1047 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1048 return;
1049
1050 for (p = avail_temp_slots; p; p = next)
1051 {
1052 int delete_p = 0;
1053
1054 next = p->next;
1055
1056 if (GET_MODE (p->slot) != BLKmode)
1057 continue;
1058
1059 for (q = p->next; q; q = next_q)
1060 {
1061 int delete_q = 0;
1062
1063 next_q = q->next;
1064
1065 if (GET_MODE (q->slot) != BLKmode)
1066 continue;
1067
1068 if (p->base_offset + p->full_size == q->base_offset)
1069 {
1070 /* Q comes after P; combine Q into P. */
1071 p->size += q->size;
1072 p->full_size += q->full_size;
1073 delete_q = 1;
1074 }
1075 else if (q->base_offset + q->full_size == p->base_offset)
1076 {
1077 /* P comes after Q; combine P into Q. */
1078 q->size += p->size;
1079 q->full_size += p->full_size;
1080 delete_p = 1;
1081 break;
1082 }
1083 if (delete_q)
1084 cut_slot_from_list (q, &avail_temp_slots);
1085 }
1086
1087 /* Either delete P or advance past it. */
1088 if (delete_p)
1089 cut_slot_from_list (p, &avail_temp_slots);
1090 }
1091 }
1092 \f
1093 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1094 slot that previously was known by OLD_RTX. */
1095
1096 void
1097 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1098 {
1099 struct temp_slot *p;
1100
1101 if (rtx_equal_p (old_rtx, new_rtx))
1102 return;
1103
1104 p = find_temp_slot_from_address (old_rtx);
1105
1106 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1107 NEW_RTX is a register, see if one operand of the PLUS is a
1108 temporary location. If so, NEW_RTX points into it. Otherwise,
1109 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1110 in common between them. If so, try a recursive call on those
1111 values. */
1112 if (p == 0)
1113 {
1114 if (GET_CODE (old_rtx) != PLUS)
1115 return;
1116
1117 if (REG_P (new_rtx))
1118 {
1119 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1120 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1121 return;
1122 }
1123 else if (GET_CODE (new_rtx) != PLUS)
1124 return;
1125
1126 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1127 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1128 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1129 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1130 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1131 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1132 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1133 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1134
1135 return;
1136 }
1137
1138 /* Otherwise add an alias for the temp's address. */
1139 insert_temp_slot_address (new_rtx, p);
1140 }
1141
1142 /* If X could be a reference to a temporary slot, mark the fact that its
1143 address was taken. */
1144
1145 void
1146 mark_temp_addr_taken (rtx x)
1147 {
1148 struct temp_slot *p;
1149
1150 if (x == 0)
1151 return;
1152
1153 /* If X is not in memory or is at a constant address, it cannot be in
1154 a temporary slot. */
1155 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
1156 return;
1157
1158 p = find_temp_slot_from_address (XEXP (x, 0));
1159 if (p != 0)
1160 p->addr_taken = 1;
1161 }
1162
1163 /* If X could be a reference to a temporary slot, mark that slot as
1164 belonging to the to one level higher than the current level. If X
1165 matched one of our slots, just mark that one. Otherwise, we can't
1166 easily predict which it is, so upgrade all of them. Kept slots
1167 need not be touched.
1168
1169 This is called when an ({...}) construct occurs and a statement
1170 returns a value in memory. */
1171
1172 void
1173 preserve_temp_slots (rtx x)
1174 {
1175 struct temp_slot *p = 0, *next;
1176
1177 /* If there is no result, we still might have some objects whose address
1178 were taken, so we need to make sure they stay around. */
1179 if (x == 0)
1180 {
1181 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1182 {
1183 next = p->next;
1184
1185 if (p->addr_taken)
1186 move_slot_to_level (p, temp_slot_level - 1);
1187 }
1188
1189 return;
1190 }
1191
1192 /* If X is a register that is being used as a pointer, see if we have
1193 a temporary slot we know it points to. To be consistent with
1194 the code below, we really should preserve all non-kept slots
1195 if we can't find a match, but that seems to be much too costly. */
1196 if (REG_P (x) && REG_POINTER (x))
1197 p = find_temp_slot_from_address (x);
1198
1199 /* If X is not in memory or is at a constant address, it cannot be in
1200 a temporary slot, but it can contain something whose address was
1201 taken. */
1202 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1203 {
1204 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1205 {
1206 next = p->next;
1207
1208 if (p->addr_taken)
1209 move_slot_to_level (p, temp_slot_level - 1);
1210 }
1211
1212 return;
1213 }
1214
1215 /* First see if we can find a match. */
1216 if (p == 0)
1217 p = find_temp_slot_from_address (XEXP (x, 0));
1218
1219 if (p != 0)
1220 {
1221 /* Move everything at our level whose address was taken to our new
1222 level in case we used its address. */
1223 struct temp_slot *q;
1224
1225 if (p->level == temp_slot_level)
1226 {
1227 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1228 {
1229 next = q->next;
1230
1231 if (p != q && q->addr_taken)
1232 move_slot_to_level (q, temp_slot_level - 1);
1233 }
1234
1235 move_slot_to_level (p, temp_slot_level - 1);
1236 p->addr_taken = 0;
1237 }
1238 return;
1239 }
1240
1241 /* Otherwise, preserve all non-kept slots at this level. */
1242 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1243 {
1244 next = p->next;
1245
1246 if (!p->keep)
1247 move_slot_to_level (p, temp_slot_level - 1);
1248 }
1249 }
1250
1251 /* Free all temporaries used so far. This is normally called at the
1252 end of generating code for a statement. */
1253
1254 void
1255 free_temp_slots (void)
1256 {
1257 struct temp_slot *p, *next;
1258 bool some_available = false;
1259
1260 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1261 {
1262 next = p->next;
1263
1264 if (!p->keep)
1265 {
1266 make_slot_available (p);
1267 some_available = true;
1268 }
1269 }
1270
1271 if (some_available)
1272 {
1273 remove_unused_temp_slot_addresses ();
1274 combine_temp_slots ();
1275 }
1276 }
1277
1278 /* Push deeper into the nesting level for stack temporaries. */
1279
1280 void
1281 push_temp_slots (void)
1282 {
1283 temp_slot_level++;
1284 }
1285
1286 /* Pop a temporary nesting level. All slots in use in the current level
1287 are freed. */
1288
1289 void
1290 pop_temp_slots (void)
1291 {
1292 struct temp_slot *p, *next;
1293 bool some_available = false;
1294
1295 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1296 {
1297 next = p->next;
1298 make_slot_available (p);
1299 some_available = true;
1300 }
1301
1302 if (some_available)
1303 {
1304 remove_unused_temp_slot_addresses ();
1305 combine_temp_slots ();
1306 }
1307
1308 temp_slot_level--;
1309 }
1310
1311 /* Initialize temporary slots. */
1312
1313 void
1314 init_temp_slots (void)
1315 {
1316 /* We have not allocated any temporaries yet. */
1317 avail_temp_slots = 0;
1318 used_temp_slots = 0;
1319 temp_slot_level = 0;
1320
1321 /* Set up the table to map addresses to temp slots. */
1322 if (! temp_slot_address_table)
1323 temp_slot_address_table = htab_create_ggc (32,
1324 temp_slot_address_hash,
1325 temp_slot_address_eq,
1326 NULL);
1327 else
1328 htab_empty (temp_slot_address_table);
1329 }
1330 \f
1331 /* These routines are responsible for converting virtual register references
1332 to the actual hard register references once RTL generation is complete.
1333
1334 The following four variables are used for communication between the
1335 routines. They contain the offsets of the virtual registers from their
1336 respective hard registers. */
1337
1338 static int in_arg_offset;
1339 static int var_offset;
1340 static int dynamic_offset;
1341 static int out_arg_offset;
1342 static int cfa_offset;
1343
1344 /* In most machines, the stack pointer register is equivalent to the bottom
1345 of the stack. */
1346
1347 #ifndef STACK_POINTER_OFFSET
1348 #define STACK_POINTER_OFFSET 0
1349 #endif
1350
1351 /* If not defined, pick an appropriate default for the offset of dynamically
1352 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1353 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1354
1355 #ifndef STACK_DYNAMIC_OFFSET
1356
1357 /* The bottom of the stack points to the actual arguments. If
1358 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1359 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1360 stack space for register parameters is not pushed by the caller, but
1361 rather part of the fixed stack areas and hence not included in
1362 `crtl->outgoing_args_size'. Nevertheless, we must allow
1363 for it when allocating stack dynamic objects. */
1364
1365 #if defined(REG_PARM_STACK_SPACE)
1366 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1367 ((ACCUMULATE_OUTGOING_ARGS \
1368 ? (crtl->outgoing_args_size \
1369 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1370 : REG_PARM_STACK_SPACE (FNDECL))) \
1371 : 0) + (STACK_POINTER_OFFSET))
1372 #else
1373 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1374 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1375 + (STACK_POINTER_OFFSET))
1376 #endif
1377 #endif
1378
1379 \f
1380 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1381 is a virtual register, return the equivalent hard register and set the
1382 offset indirectly through the pointer. Otherwise, return 0. */
1383
1384 static rtx
1385 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1386 {
1387 rtx new_rtx;
1388 HOST_WIDE_INT offset;
1389
1390 if (x == virtual_incoming_args_rtx)
1391 {
1392 if (stack_realign_drap)
1393 {
1394 /* Replace virtual_incoming_args_rtx with internal arg
1395 pointer if DRAP is used to realign stack. */
1396 new_rtx = crtl->args.internal_arg_pointer;
1397 offset = 0;
1398 }
1399 else
1400 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1401 }
1402 else if (x == virtual_stack_vars_rtx)
1403 new_rtx = frame_pointer_rtx, offset = var_offset;
1404 else if (x == virtual_stack_dynamic_rtx)
1405 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1406 else if (x == virtual_outgoing_args_rtx)
1407 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1408 else if (x == virtual_cfa_rtx)
1409 {
1410 #ifdef FRAME_POINTER_CFA_OFFSET
1411 new_rtx = frame_pointer_rtx;
1412 #else
1413 new_rtx = arg_pointer_rtx;
1414 #endif
1415 offset = cfa_offset;
1416 }
1417 else if (x == virtual_preferred_stack_boundary_rtx)
1418 {
1419 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1420 offset = 0;
1421 }
1422 else
1423 return NULL_RTX;
1424
1425 *poffset = offset;
1426 return new_rtx;
1427 }
1428
1429 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1430 Instantiate any virtual registers present inside of *LOC. The expression
1431 is simplified, as much as possible, but is not to be considered "valid"
1432 in any sense implied by the target. If any change is made, set CHANGED
1433 to true. */
1434
1435 static int
1436 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1437 {
1438 HOST_WIDE_INT offset;
1439 bool *changed = (bool *) data;
1440 rtx x, new_rtx;
1441
1442 x = *loc;
1443 if (x == 0)
1444 return 0;
1445
1446 switch (GET_CODE (x))
1447 {
1448 case REG:
1449 new_rtx = instantiate_new_reg (x, &offset);
1450 if (new_rtx)
1451 {
1452 *loc = plus_constant (new_rtx, offset);
1453 if (changed)
1454 *changed = true;
1455 }
1456 return -1;
1457
1458 case PLUS:
1459 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1460 if (new_rtx)
1461 {
1462 new_rtx = plus_constant (new_rtx, offset);
1463 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new_rtx, XEXP (x, 1));
1464 if (changed)
1465 *changed = true;
1466 return -1;
1467 }
1468
1469 /* FIXME -- from old code */
1470 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1471 we can commute the PLUS and SUBREG because pointers into the
1472 frame are well-behaved. */
1473 break;
1474
1475 default:
1476 break;
1477 }
1478
1479 return 0;
1480 }
1481
1482 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1483 matches the predicate for insn CODE operand OPERAND. */
1484
1485 static int
1486 safe_insn_predicate (int code, int operand, rtx x)
1487 {
1488 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
1489 }
1490
1491 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1492 registers present inside of insn. The result will be a valid insn. */
1493
1494 static void
1495 instantiate_virtual_regs_in_insn (rtx insn)
1496 {
1497 HOST_WIDE_INT offset;
1498 int insn_code, i;
1499 bool any_change = false;
1500 rtx set, new_rtx, x, seq;
1501
1502 /* There are some special cases to be handled first. */
1503 set = single_set (insn);
1504 if (set)
1505 {
1506 /* We're allowed to assign to a virtual register. This is interpreted
1507 to mean that the underlying register gets assigned the inverse
1508 transformation. This is used, for example, in the handling of
1509 non-local gotos. */
1510 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1511 if (new_rtx)
1512 {
1513 start_sequence ();
1514
1515 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1516 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1517 GEN_INT (-offset));
1518 x = force_operand (x, new_rtx);
1519 if (x != new_rtx)
1520 emit_move_insn (new_rtx, x);
1521
1522 seq = get_insns ();
1523 end_sequence ();
1524
1525 emit_insn_before (seq, insn);
1526 delete_insn (insn);
1527 return;
1528 }
1529
1530 /* Handle a straight copy from a virtual register by generating a
1531 new add insn. The difference between this and falling through
1532 to the generic case is avoiding a new pseudo and eliminating a
1533 move insn in the initial rtl stream. */
1534 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1535 if (new_rtx && offset != 0
1536 && REG_P (SET_DEST (set))
1537 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1538 {
1539 start_sequence ();
1540
1541 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1542 new_rtx, GEN_INT (offset), SET_DEST (set),
1543 1, OPTAB_LIB_WIDEN);
1544 if (x != SET_DEST (set))
1545 emit_move_insn (SET_DEST (set), x);
1546
1547 seq = get_insns ();
1548 end_sequence ();
1549
1550 emit_insn_before (seq, insn);
1551 delete_insn (insn);
1552 return;
1553 }
1554
1555 extract_insn (insn);
1556 insn_code = INSN_CODE (insn);
1557
1558 /* Handle a plus involving a virtual register by determining if the
1559 operands remain valid if they're modified in place. */
1560 if (GET_CODE (SET_SRC (set)) == PLUS
1561 && recog_data.n_operands >= 3
1562 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1563 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1564 && CONST_INT_P (recog_data.operand[2])
1565 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1566 {
1567 offset += INTVAL (recog_data.operand[2]);
1568
1569 /* If the sum is zero, then replace with a plain move. */
1570 if (offset == 0
1571 && REG_P (SET_DEST (set))
1572 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1573 {
1574 start_sequence ();
1575 emit_move_insn (SET_DEST (set), new_rtx);
1576 seq = get_insns ();
1577 end_sequence ();
1578
1579 emit_insn_before (seq, insn);
1580 delete_insn (insn);
1581 return;
1582 }
1583
1584 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1585
1586 /* Using validate_change and apply_change_group here leaves
1587 recog_data in an invalid state. Since we know exactly what
1588 we want to check, do those two by hand. */
1589 if (safe_insn_predicate (insn_code, 1, new_rtx)
1590 && safe_insn_predicate (insn_code, 2, x))
1591 {
1592 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1593 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1594 any_change = true;
1595
1596 /* Fall through into the regular operand fixup loop in
1597 order to take care of operands other than 1 and 2. */
1598 }
1599 }
1600 }
1601 else
1602 {
1603 extract_insn (insn);
1604 insn_code = INSN_CODE (insn);
1605 }
1606
1607 /* In the general case, we expect virtual registers to appear only in
1608 operands, and then only as either bare registers or inside memories. */
1609 for (i = 0; i < recog_data.n_operands; ++i)
1610 {
1611 x = recog_data.operand[i];
1612 switch (GET_CODE (x))
1613 {
1614 case MEM:
1615 {
1616 rtx addr = XEXP (x, 0);
1617 bool changed = false;
1618
1619 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1620 if (!changed)
1621 continue;
1622
1623 start_sequence ();
1624 x = replace_equiv_address (x, addr);
1625 /* It may happen that the address with the virtual reg
1626 was valid (e.g. based on the virtual stack reg, which might
1627 be acceptable to the predicates with all offsets), whereas
1628 the address now isn't anymore, for instance when the address
1629 is still offsetted, but the base reg isn't virtual-stack-reg
1630 anymore. Below we would do a force_reg on the whole operand,
1631 but this insn might actually only accept memory. Hence,
1632 before doing that last resort, try to reload the address into
1633 a register, so this operand stays a MEM. */
1634 if (!safe_insn_predicate (insn_code, i, x))
1635 {
1636 addr = force_reg (GET_MODE (addr), addr);
1637 x = replace_equiv_address (x, addr);
1638 }
1639 seq = get_insns ();
1640 end_sequence ();
1641 if (seq)
1642 emit_insn_before (seq, insn);
1643 }
1644 break;
1645
1646 case REG:
1647 new_rtx = instantiate_new_reg (x, &offset);
1648 if (new_rtx == NULL)
1649 continue;
1650 if (offset == 0)
1651 x = new_rtx;
1652 else
1653 {
1654 start_sequence ();
1655
1656 /* Careful, special mode predicates may have stuff in
1657 insn_data[insn_code].operand[i].mode that isn't useful
1658 to us for computing a new value. */
1659 /* ??? Recognize address_operand and/or "p" constraints
1660 to see if (plus new offset) is a valid before we put
1661 this through expand_simple_binop. */
1662 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1663 GEN_INT (offset), NULL_RTX,
1664 1, OPTAB_LIB_WIDEN);
1665 seq = get_insns ();
1666 end_sequence ();
1667 emit_insn_before (seq, insn);
1668 }
1669 break;
1670
1671 case SUBREG:
1672 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1673 if (new_rtx == NULL)
1674 continue;
1675 if (offset != 0)
1676 {
1677 start_sequence ();
1678 new_rtx = expand_simple_binop (GET_MODE (new_rtx), PLUS, new_rtx,
1679 GEN_INT (offset), NULL_RTX,
1680 1, OPTAB_LIB_WIDEN);
1681 seq = get_insns ();
1682 end_sequence ();
1683 emit_insn_before (seq, insn);
1684 }
1685 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1686 GET_MODE (new_rtx), SUBREG_BYTE (x));
1687 gcc_assert (x);
1688 break;
1689
1690 default:
1691 continue;
1692 }
1693
1694 /* At this point, X contains the new value for the operand.
1695 Validate the new value vs the insn predicate. Note that
1696 asm insns will have insn_code -1 here. */
1697 if (!safe_insn_predicate (insn_code, i, x))
1698 {
1699 start_sequence ();
1700 if (REG_P (x))
1701 {
1702 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1703 x = copy_to_reg (x);
1704 }
1705 else
1706 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1707 seq = get_insns ();
1708 end_sequence ();
1709 if (seq)
1710 emit_insn_before (seq, insn);
1711 }
1712
1713 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1714 any_change = true;
1715 }
1716
1717 if (any_change)
1718 {
1719 /* Propagate operand changes into the duplicates. */
1720 for (i = 0; i < recog_data.n_dups; ++i)
1721 *recog_data.dup_loc[i]
1722 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1723
1724 /* Force re-recognition of the instruction for validation. */
1725 INSN_CODE (insn) = -1;
1726 }
1727
1728 if (asm_noperands (PATTERN (insn)) >= 0)
1729 {
1730 if (!check_asm_operands (PATTERN (insn)))
1731 {
1732 error_for_asm (insn, "impossible constraint in %<asm%>");
1733 delete_insn_and_edges (insn);
1734 }
1735 }
1736 else
1737 {
1738 if (recog_memoized (insn) < 0)
1739 fatal_insn_not_found (insn);
1740 }
1741 }
1742
1743 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1744 do any instantiation required. */
1745
1746 void
1747 instantiate_decl_rtl (rtx x)
1748 {
1749 rtx addr;
1750
1751 if (x == 0)
1752 return;
1753
1754 /* If this is a CONCAT, recurse for the pieces. */
1755 if (GET_CODE (x) == CONCAT)
1756 {
1757 instantiate_decl_rtl (XEXP (x, 0));
1758 instantiate_decl_rtl (XEXP (x, 1));
1759 return;
1760 }
1761
1762 /* If this is not a MEM, no need to do anything. Similarly if the
1763 address is a constant or a register that is not a virtual register. */
1764 if (!MEM_P (x))
1765 return;
1766
1767 addr = XEXP (x, 0);
1768 if (CONSTANT_P (addr)
1769 || (REG_P (addr)
1770 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1771 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1772 return;
1773
1774 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1775 }
1776
1777 /* Helper for instantiate_decls called via walk_tree: Process all decls
1778 in the given DECL_VALUE_EXPR. */
1779
1780 static tree
1781 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1782 {
1783 tree t = *tp;
1784 if (! EXPR_P (t))
1785 {
1786 *walk_subtrees = 0;
1787 if (DECL_P (t))
1788 {
1789 if (DECL_RTL_SET_P (t))
1790 instantiate_decl_rtl (DECL_RTL (t));
1791 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1792 && DECL_INCOMING_RTL (t))
1793 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1794 if ((TREE_CODE (t) == VAR_DECL
1795 || TREE_CODE (t) == RESULT_DECL)
1796 && DECL_HAS_VALUE_EXPR_P (t))
1797 {
1798 tree v = DECL_VALUE_EXPR (t);
1799 walk_tree (&v, instantiate_expr, NULL, NULL);
1800 }
1801 }
1802 }
1803 return NULL;
1804 }
1805
1806 /* Subroutine of instantiate_decls: Process all decls in the given
1807 BLOCK node and all its subblocks. */
1808
1809 static void
1810 instantiate_decls_1 (tree let)
1811 {
1812 tree t;
1813
1814 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1815 {
1816 if (DECL_RTL_SET_P (t))
1817 instantiate_decl_rtl (DECL_RTL (t));
1818 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1819 {
1820 tree v = DECL_VALUE_EXPR (t);
1821 walk_tree (&v, instantiate_expr, NULL, NULL);
1822 }
1823 }
1824
1825 /* Process all subblocks. */
1826 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1827 instantiate_decls_1 (t);
1828 }
1829
1830 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1831 all virtual registers in their DECL_RTL's. */
1832
1833 static void
1834 instantiate_decls (tree fndecl)
1835 {
1836 tree decl;
1837 unsigned ix;
1838
1839 /* Process all parameters of the function. */
1840 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1841 {
1842 instantiate_decl_rtl (DECL_RTL (decl));
1843 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1844 if (DECL_HAS_VALUE_EXPR_P (decl))
1845 {
1846 tree v = DECL_VALUE_EXPR (decl);
1847 walk_tree (&v, instantiate_expr, NULL, NULL);
1848 }
1849 }
1850
1851 if ((decl = DECL_RESULT (fndecl))
1852 && TREE_CODE (decl) == RESULT_DECL)
1853 {
1854 if (DECL_RTL_SET_P (decl))
1855 instantiate_decl_rtl (DECL_RTL (decl));
1856 if (DECL_HAS_VALUE_EXPR_P (decl))
1857 {
1858 tree v = DECL_VALUE_EXPR (decl);
1859 walk_tree (&v, instantiate_expr, NULL, NULL);
1860 }
1861 }
1862
1863 /* Now process all variables defined in the function or its subblocks. */
1864 instantiate_decls_1 (DECL_INITIAL (fndecl));
1865
1866 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1867 if (DECL_RTL_SET_P (decl))
1868 instantiate_decl_rtl (DECL_RTL (decl));
1869 VEC_free (tree, gc, cfun->local_decls);
1870 }
1871
1872 /* Pass through the INSNS of function FNDECL and convert virtual register
1873 references to hard register references. */
1874
1875 static unsigned int
1876 instantiate_virtual_regs (void)
1877 {
1878 rtx insn;
1879
1880 /* Compute the offsets to use for this function. */
1881 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1882 var_offset = STARTING_FRAME_OFFSET;
1883 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1884 out_arg_offset = STACK_POINTER_OFFSET;
1885 #ifdef FRAME_POINTER_CFA_OFFSET
1886 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1887 #else
1888 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1889 #endif
1890
1891 /* Initialize recognition, indicating that volatile is OK. */
1892 init_recog ();
1893
1894 /* Scan through all the insns, instantiating every virtual register still
1895 present. */
1896 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1897 if (INSN_P (insn))
1898 {
1899 /* These patterns in the instruction stream can never be recognized.
1900 Fortunately, they shouldn't contain virtual registers either. */
1901 if (GET_CODE (PATTERN (insn)) == USE
1902 || GET_CODE (PATTERN (insn)) == CLOBBER
1903 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1904 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1905 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1906 continue;
1907 else if (DEBUG_INSN_P (insn))
1908 for_each_rtx (&INSN_VAR_LOCATION (insn),
1909 instantiate_virtual_regs_in_rtx, NULL);
1910 else
1911 instantiate_virtual_regs_in_insn (insn);
1912
1913 if (INSN_DELETED_P (insn))
1914 continue;
1915
1916 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1917
1918 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1919 if (CALL_P (insn))
1920 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1921 instantiate_virtual_regs_in_rtx, NULL);
1922 }
1923
1924 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1925 instantiate_decls (current_function_decl);
1926
1927 targetm.instantiate_decls ();
1928
1929 /* Indicate that, from now on, assign_stack_local should use
1930 frame_pointer_rtx. */
1931 virtuals_instantiated = 1;
1932
1933 return 0;
1934 }
1935
1936 struct rtl_opt_pass pass_instantiate_virtual_regs =
1937 {
1938 {
1939 RTL_PASS,
1940 "vregs", /* name */
1941 NULL, /* gate */
1942 instantiate_virtual_regs, /* execute */
1943 NULL, /* sub */
1944 NULL, /* next */
1945 0, /* static_pass_number */
1946 TV_NONE, /* tv_id */
1947 0, /* properties_required */
1948 0, /* properties_provided */
1949 0, /* properties_destroyed */
1950 0, /* todo_flags_start */
1951 0 /* todo_flags_finish */
1952 }
1953 };
1954
1955 \f
1956 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1957 This means a type for which function calls must pass an address to the
1958 function or get an address back from the function.
1959 EXP may be a type node or an expression (whose type is tested). */
1960
1961 int
1962 aggregate_value_p (const_tree exp, const_tree fntype)
1963 {
1964 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1965 int i, regno, nregs;
1966 rtx reg;
1967
1968 if (fntype)
1969 switch (TREE_CODE (fntype))
1970 {
1971 case CALL_EXPR:
1972 {
1973 tree fndecl = get_callee_fndecl (fntype);
1974 fntype = (fndecl
1975 ? TREE_TYPE (fndecl)
1976 : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype))));
1977 }
1978 break;
1979 case FUNCTION_DECL:
1980 fntype = TREE_TYPE (fntype);
1981 break;
1982 case FUNCTION_TYPE:
1983 case METHOD_TYPE:
1984 break;
1985 case IDENTIFIER_NODE:
1986 fntype = NULL_TREE;
1987 break;
1988 default:
1989 /* We don't expect other tree types here. */
1990 gcc_unreachable ();
1991 }
1992
1993 if (VOID_TYPE_P (type))
1994 return 0;
1995
1996 /* If a record should be passed the same as its first (and only) member
1997 don't pass it as an aggregate. */
1998 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
1999 return aggregate_value_p (first_field (type), fntype);
2000
2001 /* If the front end has decided that this needs to be passed by
2002 reference, do so. */
2003 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2004 && DECL_BY_REFERENCE (exp))
2005 return 1;
2006
2007 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2008 if (fntype && TREE_ADDRESSABLE (fntype))
2009 return 1;
2010
2011 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2012 and thus can't be returned in registers. */
2013 if (TREE_ADDRESSABLE (type))
2014 return 1;
2015
2016 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2017 return 1;
2018
2019 if (targetm.calls.return_in_memory (type, fntype))
2020 return 1;
2021
2022 /* Make sure we have suitable call-clobbered regs to return
2023 the value in; if not, we must return it in memory. */
2024 reg = hard_function_value (type, 0, fntype, 0);
2025
2026 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2027 it is OK. */
2028 if (!REG_P (reg))
2029 return 0;
2030
2031 regno = REGNO (reg);
2032 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
2033 for (i = 0; i < nregs; i++)
2034 if (! call_used_regs[regno + i])
2035 return 1;
2036
2037 return 0;
2038 }
2039 \f
2040 /* Return true if we should assign DECL a pseudo register; false if it
2041 should live on the local stack. */
2042
2043 bool
2044 use_register_for_decl (const_tree decl)
2045 {
2046 if (!targetm.calls.allocate_stack_slots_for_args())
2047 return true;
2048
2049 /* Honor volatile. */
2050 if (TREE_SIDE_EFFECTS (decl))
2051 return false;
2052
2053 /* Honor addressability. */
2054 if (TREE_ADDRESSABLE (decl))
2055 return false;
2056
2057 /* Only register-like things go in registers. */
2058 if (DECL_MODE (decl) == BLKmode)
2059 return false;
2060
2061 /* If -ffloat-store specified, don't put explicit float variables
2062 into registers. */
2063 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2064 propagates values across these stores, and it probably shouldn't. */
2065 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2066 return false;
2067
2068 /* If we're not interested in tracking debugging information for
2069 this decl, then we can certainly put it in a register. */
2070 if (DECL_IGNORED_P (decl))
2071 return true;
2072
2073 if (optimize)
2074 return true;
2075
2076 if (!DECL_REGISTER (decl))
2077 return false;
2078
2079 switch (TREE_CODE (TREE_TYPE (decl)))
2080 {
2081 case RECORD_TYPE:
2082 case UNION_TYPE:
2083 case QUAL_UNION_TYPE:
2084 /* When not optimizing, disregard register keyword for variables with
2085 types containing methods, otherwise the methods won't be callable
2086 from the debugger. */
2087 if (TYPE_METHODS (TREE_TYPE (decl)))
2088 return false;
2089 break;
2090 default:
2091 break;
2092 }
2093
2094 return true;
2095 }
2096
2097 /* Return true if TYPE should be passed by invisible reference. */
2098
2099 bool
2100 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
2101 tree type, bool named_arg)
2102 {
2103 if (type)
2104 {
2105 /* If this type contains non-trivial constructors, then it is
2106 forbidden for the middle-end to create any new copies. */
2107 if (TREE_ADDRESSABLE (type))
2108 return true;
2109
2110 /* GCC post 3.4 passes *all* variable sized types by reference. */
2111 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2112 return true;
2113
2114 /* If a record type should be passed the same as its first (and only)
2115 member, use the type and mode of that member. */
2116 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2117 {
2118 type = TREE_TYPE (first_field (type));
2119 mode = TYPE_MODE (type);
2120 }
2121 }
2122
2123 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
2124 type, named_arg);
2125 }
2126
2127 /* Return true if TYPE, which is passed by reference, should be callee
2128 copied instead of caller copied. */
2129
2130 bool
2131 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
2132 tree type, bool named_arg)
2133 {
2134 if (type && TREE_ADDRESSABLE (type))
2135 return false;
2136 return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
2137 named_arg);
2138 }
2139
2140 /* Structures to communicate between the subroutines of assign_parms.
2141 The first holds data persistent across all parameters, the second
2142 is cleared out for each parameter. */
2143
2144 struct assign_parm_data_all
2145 {
2146 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2147 should become a job of the target or otherwise encapsulated. */
2148 CUMULATIVE_ARGS args_so_far_v;
2149 cumulative_args_t args_so_far;
2150 struct args_size stack_args_size;
2151 tree function_result_decl;
2152 tree orig_fnargs;
2153 rtx first_conversion_insn;
2154 rtx last_conversion_insn;
2155 HOST_WIDE_INT pretend_args_size;
2156 HOST_WIDE_INT extra_pretend_bytes;
2157 int reg_parm_stack_space;
2158 };
2159
2160 struct assign_parm_data_one
2161 {
2162 tree nominal_type;
2163 tree passed_type;
2164 rtx entry_parm;
2165 rtx stack_parm;
2166 enum machine_mode nominal_mode;
2167 enum machine_mode passed_mode;
2168 enum machine_mode promoted_mode;
2169 struct locate_and_pad_arg_data locate;
2170 int partial;
2171 BOOL_BITFIELD named_arg : 1;
2172 BOOL_BITFIELD passed_pointer : 1;
2173 BOOL_BITFIELD on_stack : 1;
2174 BOOL_BITFIELD loaded_in_reg : 1;
2175 };
2176
2177 /* A subroutine of assign_parms. Initialize ALL. */
2178
2179 static void
2180 assign_parms_initialize_all (struct assign_parm_data_all *all)
2181 {
2182 tree fntype ATTRIBUTE_UNUSED;
2183
2184 memset (all, 0, sizeof (*all));
2185
2186 fntype = TREE_TYPE (current_function_decl);
2187
2188 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2189 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
2190 #else
2191 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
2192 current_function_decl, -1);
2193 #endif
2194 all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
2195
2196 #ifdef REG_PARM_STACK_SPACE
2197 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
2198 #endif
2199 }
2200
2201 /* If ARGS contains entries with complex types, split the entry into two
2202 entries of the component type. Return a new list of substitutions are
2203 needed, else the old list. */
2204
2205 static void
2206 split_complex_args (VEC(tree, heap) **args)
2207 {
2208 unsigned i;
2209 tree p;
2210
2211 FOR_EACH_VEC_ELT (tree, *args, i, p)
2212 {
2213 tree type = TREE_TYPE (p);
2214 if (TREE_CODE (type) == COMPLEX_TYPE
2215 && targetm.calls.split_complex_arg (type))
2216 {
2217 tree decl;
2218 tree subtype = TREE_TYPE (type);
2219 bool addressable = TREE_ADDRESSABLE (p);
2220
2221 /* Rewrite the PARM_DECL's type with its component. */
2222 p = copy_node (p);
2223 TREE_TYPE (p) = subtype;
2224 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2225 DECL_MODE (p) = VOIDmode;
2226 DECL_SIZE (p) = NULL;
2227 DECL_SIZE_UNIT (p) = NULL;
2228 /* If this arg must go in memory, put it in a pseudo here.
2229 We can't allow it to go in memory as per normal parms,
2230 because the usual place might not have the imag part
2231 adjacent to the real part. */
2232 DECL_ARTIFICIAL (p) = addressable;
2233 DECL_IGNORED_P (p) = addressable;
2234 TREE_ADDRESSABLE (p) = 0;
2235 layout_decl (p, 0);
2236 VEC_replace (tree, *args, i, p);
2237
2238 /* Build a second synthetic decl. */
2239 decl = build_decl (EXPR_LOCATION (p),
2240 PARM_DECL, NULL_TREE, subtype);
2241 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2242 DECL_ARTIFICIAL (decl) = addressable;
2243 DECL_IGNORED_P (decl) = addressable;
2244 layout_decl (decl, 0);
2245 VEC_safe_insert (tree, heap, *args, ++i, decl);
2246 }
2247 }
2248 }
2249
2250 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2251 the hidden struct return argument, and (abi willing) complex args.
2252 Return the new parameter list. */
2253
2254 static VEC(tree, heap) *
2255 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2256 {
2257 tree fndecl = current_function_decl;
2258 tree fntype = TREE_TYPE (fndecl);
2259 VEC(tree, heap) *fnargs = NULL;
2260 tree arg;
2261
2262 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2263 VEC_safe_push (tree, heap, fnargs, arg);
2264
2265 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2266
2267 /* If struct value address is treated as the first argument, make it so. */
2268 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2269 && ! cfun->returns_pcc_struct
2270 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2271 {
2272 tree type = build_pointer_type (TREE_TYPE (fntype));
2273 tree decl;
2274
2275 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2276 PARM_DECL, get_identifier (".result_ptr"), type);
2277 DECL_ARG_TYPE (decl) = type;
2278 DECL_ARTIFICIAL (decl) = 1;
2279 DECL_NAMELESS (decl) = 1;
2280 TREE_CONSTANT (decl) = 1;
2281
2282 DECL_CHAIN (decl) = all->orig_fnargs;
2283 all->orig_fnargs = decl;
2284 VEC_safe_insert (tree, heap, fnargs, 0, decl);
2285
2286 all->function_result_decl = decl;
2287 }
2288
2289 /* If the target wants to split complex arguments into scalars, do so. */
2290 if (targetm.calls.split_complex_arg)
2291 split_complex_args (&fnargs);
2292
2293 return fnargs;
2294 }
2295
2296 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2297 data for the parameter. Incorporate ABI specifics such as pass-by-
2298 reference and type promotion. */
2299
2300 static void
2301 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2302 struct assign_parm_data_one *data)
2303 {
2304 tree nominal_type, passed_type;
2305 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2306 int unsignedp;
2307
2308 memset (data, 0, sizeof (*data));
2309
2310 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2311 if (!cfun->stdarg)
2312 data->named_arg = 1; /* No variadic parms. */
2313 else if (DECL_CHAIN (parm))
2314 data->named_arg = 1; /* Not the last non-variadic parm. */
2315 else if (targetm.calls.strict_argument_naming (all->args_so_far))
2316 data->named_arg = 1; /* Only variadic ones are unnamed. */
2317 else
2318 data->named_arg = 0; /* Treat as variadic. */
2319
2320 nominal_type = TREE_TYPE (parm);
2321 passed_type = DECL_ARG_TYPE (parm);
2322
2323 /* Look out for errors propagating this far. Also, if the parameter's
2324 type is void then its value doesn't matter. */
2325 if (TREE_TYPE (parm) == error_mark_node
2326 /* This can happen after weird syntax errors
2327 or if an enum type is defined among the parms. */
2328 || TREE_CODE (parm) != PARM_DECL
2329 || passed_type == NULL
2330 || VOID_TYPE_P (nominal_type))
2331 {
2332 nominal_type = passed_type = void_type_node;
2333 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2334 goto egress;
2335 }
2336
2337 /* Find mode of arg as it is passed, and mode of arg as it should be
2338 during execution of this function. */
2339 passed_mode = TYPE_MODE (passed_type);
2340 nominal_mode = TYPE_MODE (nominal_type);
2341
2342 /* If the parm is to be passed as a transparent union or record, use the
2343 type of the first field for the tests below. We have already verified
2344 that the modes are the same. */
2345 if ((TREE_CODE (passed_type) == UNION_TYPE
2346 || TREE_CODE (passed_type) == RECORD_TYPE)
2347 && TYPE_TRANSPARENT_AGGR (passed_type))
2348 passed_type = TREE_TYPE (first_field (passed_type));
2349
2350 /* See if this arg was passed by invisible reference. */
2351 if (pass_by_reference (&all->args_so_far_v, passed_mode,
2352 passed_type, data->named_arg))
2353 {
2354 passed_type = nominal_type = build_pointer_type (passed_type);
2355 data->passed_pointer = true;
2356 passed_mode = nominal_mode = Pmode;
2357 }
2358
2359 /* Find mode as it is passed by the ABI. */
2360 unsignedp = TYPE_UNSIGNED (passed_type);
2361 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2362 TREE_TYPE (current_function_decl), 0);
2363
2364 egress:
2365 data->nominal_type = nominal_type;
2366 data->passed_type = passed_type;
2367 data->nominal_mode = nominal_mode;
2368 data->passed_mode = passed_mode;
2369 data->promoted_mode = promoted_mode;
2370 }
2371
2372 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2373
2374 static void
2375 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2376 struct assign_parm_data_one *data, bool no_rtl)
2377 {
2378 int varargs_pretend_bytes = 0;
2379
2380 targetm.calls.setup_incoming_varargs (all->args_so_far,
2381 data->promoted_mode,
2382 data->passed_type,
2383 &varargs_pretend_bytes, no_rtl);
2384
2385 /* If the back-end has requested extra stack space, record how much is
2386 needed. Do not change pretend_args_size otherwise since it may be
2387 nonzero from an earlier partial argument. */
2388 if (varargs_pretend_bytes > 0)
2389 all->pretend_args_size = varargs_pretend_bytes;
2390 }
2391
2392 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2393 the incoming location of the current parameter. */
2394
2395 static void
2396 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2397 struct assign_parm_data_one *data)
2398 {
2399 HOST_WIDE_INT pretend_bytes = 0;
2400 rtx entry_parm;
2401 bool in_regs;
2402
2403 if (data->promoted_mode == VOIDmode)
2404 {
2405 data->entry_parm = data->stack_parm = const0_rtx;
2406 return;
2407 }
2408
2409 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2410 data->promoted_mode,
2411 data->passed_type,
2412 data->named_arg);
2413
2414 if (entry_parm == 0)
2415 data->promoted_mode = data->passed_mode;
2416
2417 /* Determine parm's home in the stack, in case it arrives in the stack
2418 or we should pretend it did. Compute the stack position and rtx where
2419 the argument arrives and its size.
2420
2421 There is one complexity here: If this was a parameter that would
2422 have been passed in registers, but wasn't only because it is
2423 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2424 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2425 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2426 as it was the previous time. */
2427 in_regs = entry_parm != 0;
2428 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2429 in_regs = true;
2430 #endif
2431 if (!in_regs && !data->named_arg)
2432 {
2433 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
2434 {
2435 rtx tem;
2436 tem = targetm.calls.function_incoming_arg (all->args_so_far,
2437 data->promoted_mode,
2438 data->passed_type, true);
2439 in_regs = tem != NULL;
2440 }
2441 }
2442
2443 /* If this parameter was passed both in registers and in the stack, use
2444 the copy on the stack. */
2445 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2446 data->passed_type))
2447 entry_parm = 0;
2448
2449 if (entry_parm)
2450 {
2451 int partial;
2452
2453 partial = targetm.calls.arg_partial_bytes (all->args_so_far,
2454 data->promoted_mode,
2455 data->passed_type,
2456 data->named_arg);
2457 data->partial = partial;
2458
2459 /* The caller might already have allocated stack space for the
2460 register parameters. */
2461 if (partial != 0 && all->reg_parm_stack_space == 0)
2462 {
2463 /* Part of this argument is passed in registers and part
2464 is passed on the stack. Ask the prologue code to extend
2465 the stack part so that we can recreate the full value.
2466
2467 PRETEND_BYTES is the size of the registers we need to store.
2468 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2469 stack space that the prologue should allocate.
2470
2471 Internally, gcc assumes that the argument pointer is aligned
2472 to STACK_BOUNDARY bits. This is used both for alignment
2473 optimizations (see init_emit) and to locate arguments that are
2474 aligned to more than PARM_BOUNDARY bits. We must preserve this
2475 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2476 a stack boundary. */
2477
2478 /* We assume at most one partial arg, and it must be the first
2479 argument on the stack. */
2480 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2481
2482 pretend_bytes = partial;
2483 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2484
2485 /* We want to align relative to the actual stack pointer, so
2486 don't include this in the stack size until later. */
2487 all->extra_pretend_bytes = all->pretend_args_size;
2488 }
2489 }
2490
2491 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2492 entry_parm ? data->partial : 0, current_function_decl,
2493 &all->stack_args_size, &data->locate);
2494
2495 /* Update parm_stack_boundary if this parameter is passed in the
2496 stack. */
2497 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2498 crtl->parm_stack_boundary = data->locate.boundary;
2499
2500 /* Adjust offsets to include the pretend args. */
2501 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2502 data->locate.slot_offset.constant += pretend_bytes;
2503 data->locate.offset.constant += pretend_bytes;
2504
2505 data->entry_parm = entry_parm;
2506 }
2507
2508 /* A subroutine of assign_parms. If there is actually space on the stack
2509 for this parm, count it in stack_args_size and return true. */
2510
2511 static bool
2512 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2513 struct assign_parm_data_one *data)
2514 {
2515 /* Trivially true if we've no incoming register. */
2516 if (data->entry_parm == NULL)
2517 ;
2518 /* Also true if we're partially in registers and partially not,
2519 since we've arranged to drop the entire argument on the stack. */
2520 else if (data->partial != 0)
2521 ;
2522 /* Also true if the target says that it's passed in both registers
2523 and on the stack. */
2524 else if (GET_CODE (data->entry_parm) == PARALLEL
2525 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2526 ;
2527 /* Also true if the target says that there's stack allocated for
2528 all register parameters. */
2529 else if (all->reg_parm_stack_space > 0)
2530 ;
2531 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2532 else
2533 return false;
2534
2535 all->stack_args_size.constant += data->locate.size.constant;
2536 if (data->locate.size.var)
2537 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2538
2539 return true;
2540 }
2541
2542 /* A subroutine of assign_parms. Given that this parameter is allocated
2543 stack space by the ABI, find it. */
2544
2545 static void
2546 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2547 {
2548 rtx offset_rtx, stack_parm;
2549 unsigned int align, boundary;
2550
2551 /* If we're passing this arg using a reg, make its stack home the
2552 aligned stack slot. */
2553 if (data->entry_parm)
2554 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2555 else
2556 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2557
2558 stack_parm = crtl->args.internal_arg_pointer;
2559 if (offset_rtx != const0_rtx)
2560 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2561 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2562
2563 if (!data->passed_pointer)
2564 {
2565 set_mem_attributes (stack_parm, parm, 1);
2566 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2567 while promoted mode's size is needed. */
2568 if (data->promoted_mode != BLKmode
2569 && data->promoted_mode != DECL_MODE (parm))
2570 {
2571 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
2572 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2573 {
2574 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2575 data->promoted_mode);
2576 if (offset)
2577 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2578 }
2579 }
2580 }
2581
2582 boundary = data->locate.boundary;
2583 align = BITS_PER_UNIT;
2584
2585 /* If we're padding upward, we know that the alignment of the slot
2586 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2587 intentionally forcing upward padding. Otherwise we have to come
2588 up with a guess at the alignment based on OFFSET_RTX. */
2589 if (data->locate.where_pad != downward || data->entry_parm)
2590 align = boundary;
2591 else if (CONST_INT_P (offset_rtx))
2592 {
2593 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2594 align = align & -align;
2595 }
2596 set_mem_align (stack_parm, align);
2597
2598 if (data->entry_parm)
2599 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2600
2601 data->stack_parm = stack_parm;
2602 }
2603
2604 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2605 always valid and contiguous. */
2606
2607 static void
2608 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2609 {
2610 rtx entry_parm = data->entry_parm;
2611 rtx stack_parm = data->stack_parm;
2612
2613 /* If this parm was passed part in regs and part in memory, pretend it
2614 arrived entirely in memory by pushing the register-part onto the stack.
2615 In the special case of a DImode or DFmode that is split, we could put
2616 it together in a pseudoreg directly, but for now that's not worth
2617 bothering with. */
2618 if (data->partial != 0)
2619 {
2620 /* Handle calls that pass values in multiple non-contiguous
2621 locations. The Irix 6 ABI has examples of this. */
2622 if (GET_CODE (entry_parm) == PARALLEL)
2623 emit_group_store (validize_mem (stack_parm), entry_parm,
2624 data->passed_type,
2625 int_size_in_bytes (data->passed_type));
2626 else
2627 {
2628 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2629 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2630 data->partial / UNITS_PER_WORD);
2631 }
2632
2633 entry_parm = stack_parm;
2634 }
2635
2636 /* If we didn't decide this parm came in a register, by default it came
2637 on the stack. */
2638 else if (entry_parm == NULL)
2639 entry_parm = stack_parm;
2640
2641 /* When an argument is passed in multiple locations, we can't make use
2642 of this information, but we can save some copying if the whole argument
2643 is passed in a single register. */
2644 else if (GET_CODE (entry_parm) == PARALLEL
2645 && data->nominal_mode != BLKmode
2646 && data->passed_mode != BLKmode)
2647 {
2648 size_t i, len = XVECLEN (entry_parm, 0);
2649
2650 for (i = 0; i < len; i++)
2651 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2652 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2653 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2654 == data->passed_mode)
2655 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2656 {
2657 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2658 break;
2659 }
2660 }
2661
2662 data->entry_parm = entry_parm;
2663 }
2664
2665 /* A subroutine of assign_parms. Reconstitute any values which were
2666 passed in multiple registers and would fit in a single register. */
2667
2668 static void
2669 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2670 {
2671 rtx entry_parm = data->entry_parm;
2672
2673 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2674 This can be done with register operations rather than on the
2675 stack, even if we will store the reconstituted parameter on the
2676 stack later. */
2677 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2678 {
2679 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2680 emit_group_store (parmreg, entry_parm, data->passed_type,
2681 GET_MODE_SIZE (GET_MODE (entry_parm)));
2682 entry_parm = parmreg;
2683 }
2684
2685 data->entry_parm = entry_parm;
2686 }
2687
2688 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2689 always valid and properly aligned. */
2690
2691 static void
2692 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2693 {
2694 rtx stack_parm = data->stack_parm;
2695
2696 /* If we can't trust the parm stack slot to be aligned enough for its
2697 ultimate type, don't use that slot after entry. We'll make another
2698 stack slot, if we need one. */
2699 if (stack_parm
2700 && ((STRICT_ALIGNMENT
2701 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2702 || (data->nominal_type
2703 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2704 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2705 stack_parm = NULL;
2706
2707 /* If parm was passed in memory, and we need to convert it on entry,
2708 don't store it back in that same slot. */
2709 else if (data->entry_parm == stack_parm
2710 && data->nominal_mode != BLKmode
2711 && data->nominal_mode != data->passed_mode)
2712 stack_parm = NULL;
2713
2714 /* If stack protection is in effect for this function, don't leave any
2715 pointers in their passed stack slots. */
2716 else if (crtl->stack_protect_guard
2717 && (flag_stack_protect == 2
2718 || data->passed_pointer
2719 || POINTER_TYPE_P (data->nominal_type)))
2720 stack_parm = NULL;
2721
2722 data->stack_parm = stack_parm;
2723 }
2724
2725 /* A subroutine of assign_parms. Return true if the current parameter
2726 should be stored as a BLKmode in the current frame. */
2727
2728 static bool
2729 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2730 {
2731 if (data->nominal_mode == BLKmode)
2732 return true;
2733 if (GET_MODE (data->entry_parm) == BLKmode)
2734 return true;
2735
2736 #ifdef BLOCK_REG_PADDING
2737 /* Only assign_parm_setup_block knows how to deal with register arguments
2738 that are padded at the least significant end. */
2739 if (REG_P (data->entry_parm)
2740 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2741 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2742 == (BYTES_BIG_ENDIAN ? upward : downward)))
2743 return true;
2744 #endif
2745
2746 return false;
2747 }
2748
2749 /* A subroutine of assign_parms. Arrange for the parameter to be
2750 present and valid in DATA->STACK_RTL. */
2751
2752 static void
2753 assign_parm_setup_block (struct assign_parm_data_all *all,
2754 tree parm, struct assign_parm_data_one *data)
2755 {
2756 rtx entry_parm = data->entry_parm;
2757 rtx stack_parm = data->stack_parm;
2758 HOST_WIDE_INT size;
2759 HOST_WIDE_INT size_stored;
2760
2761 if (GET_CODE (entry_parm) == PARALLEL)
2762 entry_parm = emit_group_move_into_temps (entry_parm);
2763
2764 size = int_size_in_bytes (data->passed_type);
2765 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2766 if (stack_parm == 0)
2767 {
2768 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2769 stack_parm = assign_stack_local (BLKmode, size_stored,
2770 DECL_ALIGN (parm));
2771 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2772 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2773 set_mem_attributes (stack_parm, parm, 1);
2774 }
2775
2776 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2777 calls that pass values in multiple non-contiguous locations. */
2778 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2779 {
2780 rtx mem;
2781
2782 /* Note that we will be storing an integral number of words.
2783 So we have to be careful to ensure that we allocate an
2784 integral number of words. We do this above when we call
2785 assign_stack_local if space was not allocated in the argument
2786 list. If it was, this will not work if PARM_BOUNDARY is not
2787 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2788 if it becomes a problem. Exception is when BLKmode arrives
2789 with arguments not conforming to word_mode. */
2790
2791 if (data->stack_parm == 0)
2792 ;
2793 else if (GET_CODE (entry_parm) == PARALLEL)
2794 ;
2795 else
2796 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2797
2798 mem = validize_mem (stack_parm);
2799
2800 /* Handle values in multiple non-contiguous locations. */
2801 if (GET_CODE (entry_parm) == PARALLEL)
2802 {
2803 push_to_sequence2 (all->first_conversion_insn,
2804 all->last_conversion_insn);
2805 emit_group_store (mem, entry_parm, data->passed_type, size);
2806 all->first_conversion_insn = get_insns ();
2807 all->last_conversion_insn = get_last_insn ();
2808 end_sequence ();
2809 }
2810
2811 else if (size == 0)
2812 ;
2813
2814 /* If SIZE is that of a mode no bigger than a word, just use
2815 that mode's store operation. */
2816 else if (size <= UNITS_PER_WORD)
2817 {
2818 enum machine_mode mode
2819 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2820
2821 if (mode != BLKmode
2822 #ifdef BLOCK_REG_PADDING
2823 && (size == UNITS_PER_WORD
2824 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2825 != (BYTES_BIG_ENDIAN ? upward : downward)))
2826 #endif
2827 )
2828 {
2829 rtx reg;
2830
2831 /* We are really truncating a word_mode value containing
2832 SIZE bytes into a value of mode MODE. If such an
2833 operation requires no actual instructions, we can refer
2834 to the value directly in mode MODE, otherwise we must
2835 start with the register in word_mode and explicitly
2836 convert it. */
2837 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2838 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2839 else
2840 {
2841 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2842 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2843 }
2844 emit_move_insn (change_address (mem, mode, 0), reg);
2845 }
2846
2847 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2848 machine must be aligned to the left before storing
2849 to memory. Note that the previous test doesn't
2850 handle all cases (e.g. SIZE == 3). */
2851 else if (size != UNITS_PER_WORD
2852 #ifdef BLOCK_REG_PADDING
2853 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2854 == downward)
2855 #else
2856 && BYTES_BIG_ENDIAN
2857 #endif
2858 )
2859 {
2860 rtx tem, x;
2861 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2862 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2863
2864 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
2865 tem = change_address (mem, word_mode, 0);
2866 emit_move_insn (tem, x);
2867 }
2868 else
2869 move_block_from_reg (REGNO (entry_parm), mem,
2870 size_stored / UNITS_PER_WORD);
2871 }
2872 else
2873 move_block_from_reg (REGNO (entry_parm), mem,
2874 size_stored / UNITS_PER_WORD);
2875 }
2876 else if (data->stack_parm == 0)
2877 {
2878 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2879 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2880 BLOCK_OP_NORMAL);
2881 all->first_conversion_insn = get_insns ();
2882 all->last_conversion_insn = get_last_insn ();
2883 end_sequence ();
2884 }
2885
2886 data->stack_parm = stack_parm;
2887 SET_DECL_RTL (parm, stack_parm);
2888 }
2889
2890 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2891 parameter. Get it there. Perform all ABI specified conversions. */
2892
2893 static void
2894 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2895 struct assign_parm_data_one *data)
2896 {
2897 rtx parmreg, validated_mem;
2898 rtx equiv_stack_parm;
2899 enum machine_mode promoted_nominal_mode;
2900 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2901 bool did_conversion = false;
2902 bool need_conversion, moved;
2903
2904 /* Store the parm in a pseudoregister during the function, but we may
2905 need to do it in a wider mode. Using 2 here makes the result
2906 consistent with promote_decl_mode and thus expand_expr_real_1. */
2907 promoted_nominal_mode
2908 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
2909 TREE_TYPE (current_function_decl), 2);
2910
2911 parmreg = gen_reg_rtx (promoted_nominal_mode);
2912
2913 if (!DECL_ARTIFICIAL (parm))
2914 mark_user_reg (parmreg);
2915
2916 /* If this was an item that we received a pointer to,
2917 set DECL_RTL appropriately. */
2918 if (data->passed_pointer)
2919 {
2920 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2921 set_mem_attributes (x, parm, 1);
2922 SET_DECL_RTL (parm, x);
2923 }
2924 else
2925 SET_DECL_RTL (parm, parmreg);
2926
2927 assign_parm_remove_parallels (data);
2928
2929 /* Copy the value into the register, thus bridging between
2930 assign_parm_find_data_types and expand_expr_real_1. */
2931
2932 equiv_stack_parm = data->stack_parm;
2933 validated_mem = validize_mem (data->entry_parm);
2934
2935 need_conversion = (data->nominal_mode != data->passed_mode
2936 || promoted_nominal_mode != data->promoted_mode);
2937 moved = false;
2938
2939 if (need_conversion
2940 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
2941 && data->nominal_mode == data->passed_mode
2942 && data->nominal_mode == GET_MODE (data->entry_parm))
2943 {
2944 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2945 mode, by the caller. We now have to convert it to
2946 NOMINAL_MODE, if different. However, PARMREG may be in
2947 a different mode than NOMINAL_MODE if it is being stored
2948 promoted.
2949
2950 If ENTRY_PARM is a hard register, it might be in a register
2951 not valid for operating in its mode (e.g., an odd-numbered
2952 register for a DFmode). In that case, moves are the only
2953 thing valid, so we can't do a convert from there. This
2954 occurs when the calling sequence allow such misaligned
2955 usages.
2956
2957 In addition, the conversion may involve a call, which could
2958 clobber parameters which haven't been copied to pseudo
2959 registers yet.
2960
2961 First, we try to emit an insn which performs the necessary
2962 conversion. We verify that this insn does not clobber any
2963 hard registers. */
2964
2965 enum insn_code icode;
2966 rtx op0, op1;
2967
2968 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
2969 unsignedp);
2970
2971 op0 = parmreg;
2972 op1 = validated_mem;
2973 if (icode != CODE_FOR_nothing
2974 && insn_operand_matches (icode, 0, op0)
2975 && insn_operand_matches (icode, 1, op1))
2976 {
2977 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
2978 rtx insn, insns;
2979 HARD_REG_SET hardregs;
2980
2981 start_sequence ();
2982 insn = gen_extend_insn (op0, op1, promoted_nominal_mode,
2983 data->passed_mode, unsignedp);
2984 emit_insn (insn);
2985 insns = get_insns ();
2986
2987 moved = true;
2988 CLEAR_HARD_REG_SET (hardregs);
2989 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
2990 {
2991 if (INSN_P (insn))
2992 note_stores (PATTERN (insn), record_hard_reg_sets,
2993 &hardregs);
2994 if (!hard_reg_set_empty_p (hardregs))
2995 moved = false;
2996 }
2997
2998 end_sequence ();
2999
3000 if (moved)
3001 {
3002 emit_insn (insns);
3003 if (equiv_stack_parm != NULL_RTX)
3004 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3005 equiv_stack_parm);
3006 }
3007 }
3008 }
3009
3010 if (moved)
3011 /* Nothing to do. */
3012 ;
3013 else if (need_conversion)
3014 {
3015 /* We did not have an insn to convert directly, or the sequence
3016 generated appeared unsafe. We must first copy the parm to a
3017 pseudo reg, and save the conversion until after all
3018 parameters have been moved. */
3019
3020 int save_tree_used;
3021 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3022
3023 emit_move_insn (tempreg, validated_mem);
3024
3025 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3026 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3027
3028 if (GET_CODE (tempreg) == SUBREG
3029 && GET_MODE (tempreg) == data->nominal_mode
3030 && REG_P (SUBREG_REG (tempreg))
3031 && data->nominal_mode == data->passed_mode
3032 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
3033 && GET_MODE_SIZE (GET_MODE (tempreg))
3034 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
3035 {
3036 /* The argument is already sign/zero extended, so note it
3037 into the subreg. */
3038 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3039 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
3040 }
3041
3042 /* TREE_USED gets set erroneously during expand_assignment. */
3043 save_tree_used = TREE_USED (parm);
3044 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3045 TREE_USED (parm) = save_tree_used;
3046 all->first_conversion_insn = get_insns ();
3047 all->last_conversion_insn = get_last_insn ();
3048 end_sequence ();
3049
3050 did_conversion = true;
3051 }
3052 else
3053 emit_move_insn (parmreg, validated_mem);
3054
3055 /* If we were passed a pointer but the actual value can safely live
3056 in a register, put it in one. */
3057 if (data->passed_pointer
3058 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
3059 /* If by-reference argument was promoted, demote it. */
3060 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
3061 || use_register_for_decl (parm)))
3062 {
3063 /* We can't use nominal_mode, because it will have been set to
3064 Pmode above. We must use the actual mode of the parm. */
3065 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3066 mark_user_reg (parmreg);
3067
3068 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
3069 {
3070 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
3071 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3072
3073 push_to_sequence2 (all->first_conversion_insn,
3074 all->last_conversion_insn);
3075 emit_move_insn (tempreg, DECL_RTL (parm));
3076 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3077 emit_move_insn (parmreg, tempreg);
3078 all->first_conversion_insn = get_insns ();
3079 all->last_conversion_insn = get_last_insn ();
3080 end_sequence ();
3081
3082 did_conversion = true;
3083 }
3084 else
3085 emit_move_insn (parmreg, DECL_RTL (parm));
3086
3087 SET_DECL_RTL (parm, parmreg);
3088
3089 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3090 now the parm. */
3091 data->stack_parm = NULL;
3092 }
3093
3094 /* Mark the register as eliminable if we did no conversion and it was
3095 copied from memory at a fixed offset, and the arg pointer was not
3096 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3097 offset formed an invalid address, such memory-equivalences as we
3098 make here would screw up life analysis for it. */
3099 if (data->nominal_mode == data->passed_mode
3100 && !did_conversion
3101 && data->stack_parm != 0
3102 && MEM_P (data->stack_parm)
3103 && data->locate.offset.var == 0
3104 && reg_mentioned_p (virtual_incoming_args_rtx,
3105 XEXP (data->stack_parm, 0)))
3106 {
3107 rtx linsn = get_last_insn ();
3108 rtx sinsn, set;
3109
3110 /* Mark complex types separately. */
3111 if (GET_CODE (parmreg) == CONCAT)
3112 {
3113 enum machine_mode submode
3114 = GET_MODE_INNER (GET_MODE (parmreg));
3115 int regnor = REGNO (XEXP (parmreg, 0));
3116 int regnoi = REGNO (XEXP (parmreg, 1));
3117 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3118 rtx stacki = adjust_address_nv (data->stack_parm, submode,
3119 GET_MODE_SIZE (submode));
3120
3121 /* Scan backwards for the set of the real and
3122 imaginary parts. */
3123 for (sinsn = linsn; sinsn != 0;
3124 sinsn = prev_nonnote_insn (sinsn))
3125 {
3126 set = single_set (sinsn);
3127 if (set == 0)
3128 continue;
3129
3130 if (SET_DEST (set) == regno_reg_rtx [regnoi])
3131 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3132 else if (SET_DEST (set) == regno_reg_rtx [regnor])
3133 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3134 }
3135 }
3136 else
3137 set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
3138 }
3139
3140 /* For pointer data type, suggest pointer register. */
3141 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3142 mark_reg_pointer (parmreg,
3143 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3144 }
3145
3146 /* A subroutine of assign_parms. Allocate stack space to hold the current
3147 parameter. Get it there. Perform all ABI specified conversions. */
3148
3149 static void
3150 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3151 struct assign_parm_data_one *data)
3152 {
3153 /* Value must be stored in the stack slot STACK_PARM during function
3154 execution. */
3155 bool to_conversion = false;
3156
3157 assign_parm_remove_parallels (data);
3158
3159 if (data->promoted_mode != data->nominal_mode)
3160 {
3161 /* Conversion is required. */
3162 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3163
3164 emit_move_insn (tempreg, validize_mem (data->entry_parm));
3165
3166 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3167 to_conversion = true;
3168
3169 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3170 TYPE_UNSIGNED (TREE_TYPE (parm)));
3171
3172 if (data->stack_parm)
3173 {
3174 int offset = subreg_lowpart_offset (data->nominal_mode,
3175 GET_MODE (data->stack_parm));
3176 /* ??? This may need a big-endian conversion on sparc64. */
3177 data->stack_parm
3178 = adjust_address (data->stack_parm, data->nominal_mode, 0);
3179 if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm))
3180 set_mem_offset (data->stack_parm,
3181 MEM_OFFSET (data->stack_parm) + offset);
3182 }
3183 }
3184
3185 if (data->entry_parm != data->stack_parm)
3186 {
3187 rtx src, dest;
3188
3189 if (data->stack_parm == 0)
3190 {
3191 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3192 GET_MODE (data->entry_parm),
3193 TYPE_ALIGN (data->passed_type));
3194 data->stack_parm
3195 = assign_stack_local (GET_MODE (data->entry_parm),
3196 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3197 align);
3198 set_mem_attributes (data->stack_parm, parm, 1);
3199 }
3200
3201 dest = validize_mem (data->stack_parm);
3202 src = validize_mem (data->entry_parm);
3203
3204 if (MEM_P (src))
3205 {
3206 /* Use a block move to handle potentially misaligned entry_parm. */
3207 if (!to_conversion)
3208 push_to_sequence2 (all->first_conversion_insn,
3209 all->last_conversion_insn);
3210 to_conversion = true;
3211
3212 emit_block_move (dest, src,
3213 GEN_INT (int_size_in_bytes (data->passed_type)),
3214 BLOCK_OP_NORMAL);
3215 }
3216 else
3217 emit_move_insn (dest, src);
3218 }
3219
3220 if (to_conversion)
3221 {
3222 all->first_conversion_insn = get_insns ();
3223 all->last_conversion_insn = get_last_insn ();
3224 end_sequence ();
3225 }
3226
3227 SET_DECL_RTL (parm, data->stack_parm);
3228 }
3229
3230 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3231 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3232
3233 static void
3234 assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3235 VEC(tree, heap) *fnargs)
3236 {
3237 tree parm;
3238 tree orig_fnargs = all->orig_fnargs;
3239 unsigned i = 0;
3240
3241 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3242 {
3243 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3244 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3245 {
3246 rtx tmp, real, imag;
3247 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3248
3249 real = DECL_RTL (VEC_index (tree, fnargs, i));
3250 imag = DECL_RTL (VEC_index (tree, fnargs, i + 1));
3251 if (inner != GET_MODE (real))
3252 {
3253 real = gen_lowpart_SUBREG (inner, real);
3254 imag = gen_lowpart_SUBREG (inner, imag);
3255 }
3256
3257 if (TREE_ADDRESSABLE (parm))
3258 {
3259 rtx rmem, imem;
3260 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3261 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3262 DECL_MODE (parm),
3263 TYPE_ALIGN (TREE_TYPE (parm)));
3264
3265 /* split_complex_arg put the real and imag parts in
3266 pseudos. Move them to memory. */
3267 tmp = assign_stack_local (DECL_MODE (parm), size, align);
3268 set_mem_attributes (tmp, parm, 1);
3269 rmem = adjust_address_nv (tmp, inner, 0);
3270 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3271 push_to_sequence2 (all->first_conversion_insn,
3272 all->last_conversion_insn);
3273 emit_move_insn (rmem, real);
3274 emit_move_insn (imem, imag);
3275 all->first_conversion_insn = get_insns ();
3276 all->last_conversion_insn = get_last_insn ();
3277 end_sequence ();
3278 }
3279 else
3280 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3281 SET_DECL_RTL (parm, tmp);
3282
3283 real = DECL_INCOMING_RTL (VEC_index (tree, fnargs, i));
3284 imag = DECL_INCOMING_RTL (VEC_index (tree, fnargs, i + 1));
3285 if (inner != GET_MODE (real))
3286 {
3287 real = gen_lowpart_SUBREG (inner, real);
3288 imag = gen_lowpart_SUBREG (inner, imag);
3289 }
3290 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3291 set_decl_incoming_rtl (parm, tmp, false);
3292 i++;
3293 }
3294 }
3295 }
3296
3297 /* Assign RTL expressions to the function's parameters. This may involve
3298 copying them into registers and using those registers as the DECL_RTL. */
3299
3300 static void
3301 assign_parms (tree fndecl)
3302 {
3303 struct assign_parm_data_all all;
3304 tree parm;
3305 VEC(tree, heap) *fnargs;
3306 unsigned i;
3307
3308 crtl->args.internal_arg_pointer
3309 = targetm.calls.internal_arg_pointer ();
3310
3311 assign_parms_initialize_all (&all);
3312 fnargs = assign_parms_augmented_arg_list (&all);
3313
3314 FOR_EACH_VEC_ELT (tree, fnargs, i, parm)
3315 {
3316 struct assign_parm_data_one data;
3317
3318 /* Extract the type of PARM; adjust it according to ABI. */
3319 assign_parm_find_data_types (&all, parm, &data);
3320
3321 /* Early out for errors and void parameters. */
3322 if (data.passed_mode == VOIDmode)
3323 {
3324 SET_DECL_RTL (parm, const0_rtx);
3325 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3326 continue;
3327 }
3328
3329 /* Estimate stack alignment from parameter alignment. */
3330 if (SUPPORTS_STACK_ALIGNMENT)
3331 {
3332 unsigned int align
3333 = targetm.calls.function_arg_boundary (data.promoted_mode,
3334 data.passed_type);
3335 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3336 align);
3337 if (TYPE_ALIGN (data.nominal_type) > align)
3338 align = MINIMUM_ALIGNMENT (data.nominal_type,
3339 TYPE_MODE (data.nominal_type),
3340 TYPE_ALIGN (data.nominal_type));
3341 if (crtl->stack_alignment_estimated < align)
3342 {
3343 gcc_assert (!crtl->stack_realign_processed);
3344 crtl->stack_alignment_estimated = align;
3345 }
3346 }
3347
3348 if (cfun->stdarg && !DECL_CHAIN (parm))
3349 assign_parms_setup_varargs (&all, &data, false);
3350
3351 /* Find out where the parameter arrives in this function. */
3352 assign_parm_find_entry_rtl (&all, &data);
3353
3354 /* Find out where stack space for this parameter might be. */
3355 if (assign_parm_is_stack_parm (&all, &data))
3356 {
3357 assign_parm_find_stack_rtl (parm, &data);
3358 assign_parm_adjust_entry_rtl (&data);
3359 }
3360
3361 /* Record permanently how this parm was passed. */
3362 if (data.passed_pointer)
3363 {
3364 rtx incoming_rtl
3365 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3366 data.entry_parm);
3367 set_decl_incoming_rtl (parm, incoming_rtl, true);
3368 }
3369 else
3370 set_decl_incoming_rtl (parm, data.entry_parm, false);
3371
3372 /* Update info on where next arg arrives in registers. */
3373 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3374 data.passed_type, data.named_arg);
3375
3376 assign_parm_adjust_stack_rtl (&data);
3377
3378 if (assign_parm_setup_block_p (&data))
3379 assign_parm_setup_block (&all, parm, &data);
3380 else if (data.passed_pointer || use_register_for_decl (parm))
3381 assign_parm_setup_reg (&all, parm, &data);
3382 else
3383 assign_parm_setup_stack (&all, parm, &data);
3384 }
3385
3386 if (targetm.calls.split_complex_arg)
3387 assign_parms_unsplit_complex (&all, fnargs);
3388
3389 VEC_free (tree, heap, fnargs);
3390
3391 /* Output all parameter conversion instructions (possibly including calls)
3392 now that all parameters have been copied out of hard registers. */
3393 emit_insn (all.first_conversion_insn);
3394
3395 /* Estimate reload stack alignment from scalar return mode. */
3396 if (SUPPORTS_STACK_ALIGNMENT)
3397 {
3398 if (DECL_RESULT (fndecl))
3399 {
3400 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3401 enum machine_mode mode = TYPE_MODE (type);
3402
3403 if (mode != BLKmode
3404 && mode != VOIDmode
3405 && !AGGREGATE_TYPE_P (type))
3406 {
3407 unsigned int align = GET_MODE_ALIGNMENT (mode);
3408 if (crtl->stack_alignment_estimated < align)
3409 {
3410 gcc_assert (!crtl->stack_realign_processed);
3411 crtl->stack_alignment_estimated = align;
3412 }
3413 }
3414 }
3415 }
3416
3417 /* If we are receiving a struct value address as the first argument, set up
3418 the RTL for the function result. As this might require code to convert
3419 the transmitted address to Pmode, we do this here to ensure that possible
3420 preliminary conversions of the address have been emitted already. */
3421 if (all.function_result_decl)
3422 {
3423 tree result = DECL_RESULT (current_function_decl);
3424 rtx addr = DECL_RTL (all.function_result_decl);
3425 rtx x;
3426
3427 if (DECL_BY_REFERENCE (result))
3428 {
3429 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3430 x = addr;
3431 }
3432 else
3433 {
3434 SET_DECL_VALUE_EXPR (result,
3435 build1 (INDIRECT_REF, TREE_TYPE (result),
3436 all.function_result_decl));
3437 addr = convert_memory_address (Pmode, addr);
3438 x = gen_rtx_MEM (DECL_MODE (result), addr);
3439 set_mem_attributes (x, result, 1);
3440 }
3441
3442 DECL_HAS_VALUE_EXPR_P (result) = 1;
3443
3444 SET_DECL_RTL (result, x);
3445 }
3446
3447 /* We have aligned all the args, so add space for the pretend args. */
3448 crtl->args.pretend_args_size = all.pretend_args_size;
3449 all.stack_args_size.constant += all.extra_pretend_bytes;
3450 crtl->args.size = all.stack_args_size.constant;
3451
3452 /* Adjust function incoming argument size for alignment and
3453 minimum length. */
3454
3455 #ifdef REG_PARM_STACK_SPACE
3456 crtl->args.size = MAX (crtl->args.size,
3457 REG_PARM_STACK_SPACE (fndecl));
3458 #endif
3459
3460 crtl->args.size = CEIL_ROUND (crtl->args.size,
3461 PARM_BOUNDARY / BITS_PER_UNIT);
3462
3463 #ifdef ARGS_GROW_DOWNWARD
3464 crtl->args.arg_offset_rtx
3465 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3466 : expand_expr (size_diffop (all.stack_args_size.var,
3467 size_int (-all.stack_args_size.constant)),
3468 NULL_RTX, VOIDmode, EXPAND_NORMAL));
3469 #else
3470 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3471 #endif
3472
3473 /* See how many bytes, if any, of its args a function should try to pop
3474 on return. */
3475
3476 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3477 TREE_TYPE (fndecl),
3478 crtl->args.size);
3479
3480 /* For stdarg.h function, save info about
3481 regs and stack space used by the named args. */
3482
3483 crtl->args.info = all.args_so_far_v;
3484
3485 /* Set the rtx used for the function return value. Put this in its
3486 own variable so any optimizers that need this information don't have
3487 to include tree.h. Do this here so it gets done when an inlined
3488 function gets output. */
3489
3490 crtl->return_rtx
3491 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3492 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3493
3494 /* If scalar return value was computed in a pseudo-reg, or was a named
3495 return value that got dumped to the stack, copy that to the hard
3496 return register. */
3497 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3498 {
3499 tree decl_result = DECL_RESULT (fndecl);
3500 rtx decl_rtl = DECL_RTL (decl_result);
3501
3502 if (REG_P (decl_rtl)
3503 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3504 : DECL_REGISTER (decl_result))
3505 {
3506 rtx real_decl_rtl;
3507
3508 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3509 fndecl, true);
3510 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3511 /* The delay slot scheduler assumes that crtl->return_rtx
3512 holds the hard register containing the return value, not a
3513 temporary pseudo. */
3514 crtl->return_rtx = real_decl_rtl;
3515 }
3516 }
3517 }
3518
3519 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3520 For all seen types, gimplify their sizes. */
3521
3522 static tree
3523 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3524 {
3525 tree t = *tp;
3526
3527 *walk_subtrees = 0;
3528 if (TYPE_P (t))
3529 {
3530 if (POINTER_TYPE_P (t))
3531 *walk_subtrees = 1;
3532 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3533 && !TYPE_SIZES_GIMPLIFIED (t))
3534 {
3535 gimplify_type_sizes (t, (gimple_seq *) data);
3536 *walk_subtrees = 1;
3537 }
3538 }
3539
3540 return NULL;
3541 }
3542
3543 /* Gimplify the parameter list for current_function_decl. This involves
3544 evaluating SAVE_EXPRs of variable sized parameters and generating code
3545 to implement callee-copies reference parameters. Returns a sequence of
3546 statements to add to the beginning of the function. */
3547
3548 gimple_seq
3549 gimplify_parameters (void)
3550 {
3551 struct assign_parm_data_all all;
3552 tree parm;
3553 gimple_seq stmts = NULL;
3554 VEC(tree, heap) *fnargs;
3555 unsigned i;
3556
3557 assign_parms_initialize_all (&all);
3558 fnargs = assign_parms_augmented_arg_list (&all);
3559
3560 FOR_EACH_VEC_ELT (tree, fnargs, i, parm)
3561 {
3562 struct assign_parm_data_one data;
3563
3564 /* Extract the type of PARM; adjust it according to ABI. */
3565 assign_parm_find_data_types (&all, parm, &data);
3566
3567 /* Early out for errors and void parameters. */
3568 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3569 continue;
3570
3571 /* Update info on where next arg arrives in registers. */
3572 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3573 data.passed_type, data.named_arg);
3574
3575 /* ??? Once upon a time variable_size stuffed parameter list
3576 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3577 turned out to be less than manageable in the gimple world.
3578 Now we have to hunt them down ourselves. */
3579 walk_tree_without_duplicates (&data.passed_type,
3580 gimplify_parm_type, &stmts);
3581
3582 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
3583 {
3584 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3585 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3586 }
3587
3588 if (data.passed_pointer)
3589 {
3590 tree type = TREE_TYPE (data.passed_type);
3591 if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
3592 type, data.named_arg))
3593 {
3594 tree local, t;
3595
3596 /* For constant-sized objects, this is trivial; for
3597 variable-sized objects, we have to play games. */
3598 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3599 && !(flag_stack_check == GENERIC_STACK_CHECK
3600 && compare_tree_int (DECL_SIZE_UNIT (parm),
3601 STACK_CHECK_MAX_VAR_SIZE) > 0))
3602 {
3603 local = create_tmp_var (type, get_name (parm));
3604 DECL_IGNORED_P (local) = 0;
3605 /* If PARM was addressable, move that flag over
3606 to the local copy, as its address will be taken,
3607 not the PARMs. Keep the parms address taken
3608 as we'll query that flag during gimplification. */
3609 if (TREE_ADDRESSABLE (parm))
3610 TREE_ADDRESSABLE (local) = 1;
3611 else if (TREE_CODE (type) == COMPLEX_TYPE
3612 || TREE_CODE (type) == VECTOR_TYPE)
3613 DECL_GIMPLE_REG_P (local) = 1;
3614 }
3615 else
3616 {
3617 tree ptr_type, addr;
3618
3619 ptr_type = build_pointer_type (type);
3620 addr = create_tmp_reg (ptr_type, get_name (parm));
3621 DECL_IGNORED_P (addr) = 0;
3622 local = build_fold_indirect_ref (addr);
3623
3624 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3625 t = build_call_expr (t, 2, DECL_SIZE_UNIT (parm),
3626 size_int (DECL_ALIGN (parm)));
3627
3628 /* The call has been built for a variable-sized object. */
3629 CALL_ALLOCA_FOR_VAR_P (t) = 1;
3630 t = fold_convert (ptr_type, t);
3631 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3632 gimplify_and_add (t, &stmts);
3633 }
3634
3635 gimplify_assign (local, parm, &stmts);
3636
3637 SET_DECL_VALUE_EXPR (parm, local);
3638 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3639 }
3640 }
3641 }
3642
3643 VEC_free (tree, heap, fnargs);
3644
3645 return stmts;
3646 }
3647 \f
3648 /* Compute the size and offset from the start of the stacked arguments for a
3649 parm passed in mode PASSED_MODE and with type TYPE.
3650
3651 INITIAL_OFFSET_PTR points to the current offset into the stacked
3652 arguments.
3653
3654 The starting offset and size for this parm are returned in
3655 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3656 nonzero, the offset is that of stack slot, which is returned in
3657 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3658 padding required from the initial offset ptr to the stack slot.
3659
3660 IN_REGS is nonzero if the argument will be passed in registers. It will
3661 never be set if REG_PARM_STACK_SPACE is not defined.
3662
3663 FNDECL is the function in which the argument was defined.
3664
3665 There are two types of rounding that are done. The first, controlled by
3666 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3667 argument list to be aligned to the specific boundary (in bits). This
3668 rounding affects the initial and starting offsets, but not the argument
3669 size.
3670
3671 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3672 optionally rounds the size of the parm to PARM_BOUNDARY. The
3673 initial offset is not affected by this rounding, while the size always
3674 is and the starting offset may be. */
3675
3676 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3677 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3678 callers pass in the total size of args so far as
3679 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3680
3681 void
3682 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3683 int partial, tree fndecl ATTRIBUTE_UNUSED,
3684 struct args_size *initial_offset_ptr,
3685 struct locate_and_pad_arg_data *locate)
3686 {
3687 tree sizetree;
3688 enum direction where_pad;
3689 unsigned int boundary, round_boundary;
3690 int reg_parm_stack_space = 0;
3691 int part_size_in_regs;
3692
3693 #ifdef REG_PARM_STACK_SPACE
3694 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3695
3696 /* If we have found a stack parm before we reach the end of the
3697 area reserved for registers, skip that area. */
3698 if (! in_regs)
3699 {
3700 if (reg_parm_stack_space > 0)
3701 {
3702 if (initial_offset_ptr->var)
3703 {
3704 initial_offset_ptr->var
3705 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3706 ssize_int (reg_parm_stack_space));
3707 initial_offset_ptr->constant = 0;
3708 }
3709 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3710 initial_offset_ptr->constant = reg_parm_stack_space;
3711 }
3712 }
3713 #endif /* REG_PARM_STACK_SPACE */
3714
3715 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3716
3717 sizetree
3718 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3719 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3720 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
3721 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
3722 type);
3723 locate->where_pad = where_pad;
3724
3725 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3726 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
3727 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
3728
3729 locate->boundary = boundary;
3730
3731 if (SUPPORTS_STACK_ALIGNMENT)
3732 {
3733 /* stack_alignment_estimated can't change after stack has been
3734 realigned. */
3735 if (crtl->stack_alignment_estimated < boundary)
3736 {
3737 if (!crtl->stack_realign_processed)
3738 crtl->stack_alignment_estimated = boundary;
3739 else
3740 {
3741 /* If stack is realigned and stack alignment value
3742 hasn't been finalized, it is OK not to increase
3743 stack_alignment_estimated. The bigger alignment
3744 requirement is recorded in stack_alignment_needed
3745 below. */
3746 gcc_assert (!crtl->stack_realign_finalized
3747 && crtl->stack_realign_needed);
3748 }
3749 }
3750 }
3751
3752 /* Remember if the outgoing parameter requires extra alignment on the
3753 calling function side. */
3754 if (crtl->stack_alignment_needed < boundary)
3755 crtl->stack_alignment_needed = boundary;
3756 if (crtl->preferred_stack_boundary < boundary)
3757 crtl->preferred_stack_boundary = boundary;
3758
3759 #ifdef ARGS_GROW_DOWNWARD
3760 locate->slot_offset.constant = -initial_offset_ptr->constant;
3761 if (initial_offset_ptr->var)
3762 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3763 initial_offset_ptr->var);
3764
3765 {
3766 tree s2 = sizetree;
3767 if (where_pad != none
3768 && (!host_integerp (sizetree, 1)
3769 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % round_boundary))
3770 s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
3771 SUB_PARM_SIZE (locate->slot_offset, s2);
3772 }
3773
3774 locate->slot_offset.constant += part_size_in_regs;
3775
3776 if (!in_regs
3777 #ifdef REG_PARM_STACK_SPACE
3778 || REG_PARM_STACK_SPACE (fndecl) > 0
3779 #endif
3780 )
3781 pad_to_arg_alignment (&locate->slot_offset, boundary,
3782 &locate->alignment_pad);
3783
3784 locate->size.constant = (-initial_offset_ptr->constant
3785 - locate->slot_offset.constant);
3786 if (initial_offset_ptr->var)
3787 locate->size.var = size_binop (MINUS_EXPR,
3788 size_binop (MINUS_EXPR,
3789 ssize_int (0),
3790 initial_offset_ptr->var),
3791 locate->slot_offset.var);
3792
3793 /* Pad_below needs the pre-rounded size to know how much to pad
3794 below. */
3795 locate->offset = locate->slot_offset;
3796 if (where_pad == downward)
3797 pad_below (&locate->offset, passed_mode, sizetree);
3798
3799 #else /* !ARGS_GROW_DOWNWARD */
3800 if (!in_regs
3801 #ifdef REG_PARM_STACK_SPACE
3802 || REG_PARM_STACK_SPACE (fndecl) > 0
3803 #endif
3804 )
3805 pad_to_arg_alignment (initial_offset_ptr, boundary,
3806 &locate->alignment_pad);
3807 locate->slot_offset = *initial_offset_ptr;
3808
3809 #ifdef PUSH_ROUNDING
3810 if (passed_mode != BLKmode)
3811 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3812 #endif
3813
3814 /* Pad_below needs the pre-rounded size to know how much to pad below
3815 so this must be done before rounding up. */
3816 locate->offset = locate->slot_offset;
3817 if (where_pad == downward)
3818 pad_below (&locate->offset, passed_mode, sizetree);
3819
3820 if (where_pad != none
3821 && (!host_integerp (sizetree, 1)
3822 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % round_boundary))
3823 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
3824
3825 ADD_PARM_SIZE (locate->size, sizetree);
3826
3827 locate->size.constant -= part_size_in_regs;
3828 #endif /* ARGS_GROW_DOWNWARD */
3829
3830 #ifdef FUNCTION_ARG_OFFSET
3831 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
3832 #endif
3833 }
3834
3835 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3836 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3837
3838 static void
3839 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3840 struct args_size *alignment_pad)
3841 {
3842 tree save_var = NULL_TREE;
3843 HOST_WIDE_INT save_constant = 0;
3844 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3845 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3846
3847 #ifdef SPARC_STACK_BOUNDARY_HACK
3848 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3849 the real alignment of %sp. However, when it does this, the
3850 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3851 if (SPARC_STACK_BOUNDARY_HACK)
3852 sp_offset = 0;
3853 #endif
3854
3855 if (boundary > PARM_BOUNDARY)
3856 {
3857 save_var = offset_ptr->var;
3858 save_constant = offset_ptr->constant;
3859 }
3860
3861 alignment_pad->var = NULL_TREE;
3862 alignment_pad->constant = 0;
3863
3864 if (boundary > BITS_PER_UNIT)
3865 {
3866 if (offset_ptr->var)
3867 {
3868 tree sp_offset_tree = ssize_int (sp_offset);
3869 tree offset = size_binop (PLUS_EXPR,
3870 ARGS_SIZE_TREE (*offset_ptr),
3871 sp_offset_tree);
3872 #ifdef ARGS_GROW_DOWNWARD
3873 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3874 #else
3875 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3876 #endif
3877
3878 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3879 /* ARGS_SIZE_TREE includes constant term. */
3880 offset_ptr->constant = 0;
3881 if (boundary > PARM_BOUNDARY)
3882 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3883 save_var);
3884 }
3885 else
3886 {
3887 offset_ptr->constant = -sp_offset +
3888 #ifdef ARGS_GROW_DOWNWARD
3889 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3890 #else
3891 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3892 #endif
3893 if (boundary > PARM_BOUNDARY)
3894 alignment_pad->constant = offset_ptr->constant - save_constant;
3895 }
3896 }
3897 }
3898
3899 static void
3900 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3901 {
3902 if (passed_mode != BLKmode)
3903 {
3904 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3905 offset_ptr->constant
3906 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3907 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3908 - GET_MODE_SIZE (passed_mode));
3909 }
3910 else
3911 {
3912 if (TREE_CODE (sizetree) != INTEGER_CST
3913 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3914 {
3915 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3916 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3917 /* Add it in. */
3918 ADD_PARM_SIZE (*offset_ptr, s2);
3919 SUB_PARM_SIZE (*offset_ptr, sizetree);
3920 }
3921 }
3922 }
3923 \f
3924
3925 /* True if register REGNO was alive at a place where `setjmp' was
3926 called and was set more than once or is an argument. Such regs may
3927 be clobbered by `longjmp'. */
3928
3929 static bool
3930 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
3931 {
3932 /* There appear to be cases where some local vars never reach the
3933 backend but have bogus regnos. */
3934 if (regno >= max_reg_num ())
3935 return false;
3936
3937 return ((REG_N_SETS (regno) > 1
3938 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno))
3939 && REGNO_REG_SET_P (setjmp_crosses, regno));
3940 }
3941
3942 /* Walk the tree of blocks describing the binding levels within a
3943 function and warn about variables the might be killed by setjmp or
3944 vfork. This is done after calling flow_analysis before register
3945 allocation since that will clobber the pseudo-regs to hard
3946 regs. */
3947
3948 static void
3949 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
3950 {
3951 tree decl, sub;
3952
3953 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
3954 {
3955 if (TREE_CODE (decl) == VAR_DECL
3956 && DECL_RTL_SET_P (decl)
3957 && REG_P (DECL_RTL (decl))
3958 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3959 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
3960 " %<longjmp%> or %<vfork%>", decl);
3961 }
3962
3963 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3964 setjmp_vars_warning (setjmp_crosses, sub);
3965 }
3966
3967 /* Do the appropriate part of setjmp_vars_warning
3968 but for arguments instead of local variables. */
3969
3970 static void
3971 setjmp_args_warning (bitmap setjmp_crosses)
3972 {
3973 tree decl;
3974 for (decl = DECL_ARGUMENTS (current_function_decl);
3975 decl; decl = DECL_CHAIN (decl))
3976 if (DECL_RTL (decl) != 0
3977 && REG_P (DECL_RTL (decl))
3978 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3979 warning (OPT_Wclobbered,
3980 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3981 decl);
3982 }
3983
3984 /* Generate warning messages for variables live across setjmp. */
3985
3986 void
3987 generate_setjmp_warnings (void)
3988 {
3989 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
3990
3991 if (n_basic_blocks == NUM_FIXED_BLOCKS
3992 || bitmap_empty_p (setjmp_crosses))
3993 return;
3994
3995 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
3996 setjmp_args_warning (setjmp_crosses);
3997 }
3998
3999 \f
4000 /* Reverse the order of elements in the fragment chain T of blocks,
4001 and return the new head of the chain (old last element).
4002 In addition to that clear BLOCK_SAME_RANGE flags when needed
4003 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4004 its super fragment origin. */
4005
4006 static tree
4007 block_fragments_nreverse (tree t)
4008 {
4009 tree prev = 0, block, next, prev_super = 0;
4010 tree super = BLOCK_SUPERCONTEXT (t);
4011 if (BLOCK_FRAGMENT_ORIGIN (super))
4012 super = BLOCK_FRAGMENT_ORIGIN (super);
4013 for (block = t; block; block = next)
4014 {
4015 next = BLOCK_FRAGMENT_CHAIN (block);
4016 BLOCK_FRAGMENT_CHAIN (block) = prev;
4017 if ((prev && !BLOCK_SAME_RANGE (prev))
4018 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4019 != prev_super))
4020 BLOCK_SAME_RANGE (block) = 0;
4021 prev_super = BLOCK_SUPERCONTEXT (block);
4022 BLOCK_SUPERCONTEXT (block) = super;
4023 prev = block;
4024 }
4025 t = BLOCK_FRAGMENT_ORIGIN (t);
4026 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4027 != prev_super)
4028 BLOCK_SAME_RANGE (t) = 0;
4029 BLOCK_SUPERCONTEXT (t) = super;
4030 return prev;
4031 }
4032
4033 /* Reverse the order of elements in the chain T of blocks,
4034 and return the new head of the chain (old last element).
4035 Also do the same on subblocks and reverse the order of elements
4036 in BLOCK_FRAGMENT_CHAIN as well. */
4037
4038 static tree
4039 blocks_nreverse_all (tree t)
4040 {
4041 tree prev = 0, block, next;
4042 for (block = t; block; block = next)
4043 {
4044 next = BLOCK_CHAIN (block);
4045 BLOCK_CHAIN (block) = prev;
4046 if (BLOCK_FRAGMENT_CHAIN (block)
4047 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4048 {
4049 BLOCK_FRAGMENT_CHAIN (block)
4050 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4051 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4052 BLOCK_SAME_RANGE (block) = 0;
4053 }
4054 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4055 prev = block;
4056 }
4057 return prev;
4058 }
4059
4060
4061 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4062 and create duplicate blocks. */
4063 /* ??? Need an option to either create block fragments or to create
4064 abstract origin duplicates of a source block. It really depends
4065 on what optimization has been performed. */
4066
4067 void
4068 reorder_blocks (void)
4069 {
4070 tree block = DECL_INITIAL (current_function_decl);
4071 VEC(tree,heap) *block_stack;
4072
4073 if (block == NULL_TREE)
4074 return;
4075
4076 block_stack = VEC_alloc (tree, heap, 10);
4077
4078 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4079 clear_block_marks (block);
4080
4081 /* Prune the old trees away, so that they don't get in the way. */
4082 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4083 BLOCK_CHAIN (block) = NULL_TREE;
4084
4085 /* Recreate the block tree from the note nesting. */
4086 reorder_blocks_1 (get_insns (), block, &block_stack);
4087 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4088
4089 VEC_free (tree, heap, block_stack);
4090 }
4091
4092 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4093
4094 void
4095 clear_block_marks (tree block)
4096 {
4097 while (block)
4098 {
4099 TREE_ASM_WRITTEN (block) = 0;
4100 clear_block_marks (BLOCK_SUBBLOCKS (block));
4101 block = BLOCK_CHAIN (block);
4102 }
4103 }
4104
4105 static void
4106 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
4107 {
4108 rtx insn;
4109 tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
4110
4111 for (insn = insns; insn; insn = NEXT_INSN (insn))
4112 {
4113 if (NOTE_P (insn))
4114 {
4115 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4116 {
4117 tree block = NOTE_BLOCK (insn);
4118 tree origin;
4119
4120 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4121 origin = block;
4122
4123 if (prev_end)
4124 BLOCK_SAME_RANGE (prev_end) = 0;
4125 prev_end = NULL_TREE;
4126
4127 /* If we have seen this block before, that means it now
4128 spans multiple address regions. Create a new fragment. */
4129 if (TREE_ASM_WRITTEN (block))
4130 {
4131 tree new_block = copy_node (block);
4132
4133 BLOCK_SAME_RANGE (new_block) = 0;
4134 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4135 BLOCK_FRAGMENT_CHAIN (new_block)
4136 = BLOCK_FRAGMENT_CHAIN (origin);
4137 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4138
4139 NOTE_BLOCK (insn) = new_block;
4140 block = new_block;
4141 }
4142
4143 if (prev_beg == current_block && prev_beg)
4144 BLOCK_SAME_RANGE (block) = 1;
4145
4146 prev_beg = origin;
4147
4148 BLOCK_SUBBLOCKS (block) = 0;
4149 TREE_ASM_WRITTEN (block) = 1;
4150 /* When there's only one block for the entire function,
4151 current_block == block and we mustn't do this, it
4152 will cause infinite recursion. */
4153 if (block != current_block)
4154 {
4155 tree super;
4156 if (block != origin)
4157 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4158 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4159 (origin))
4160 == current_block);
4161 if (VEC_empty (tree, *p_block_stack))
4162 super = current_block;
4163 else
4164 {
4165 super = VEC_last (tree, *p_block_stack);
4166 gcc_assert (super == current_block
4167 || BLOCK_FRAGMENT_ORIGIN (super)
4168 == current_block);
4169 }
4170 BLOCK_SUPERCONTEXT (block) = super;
4171 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4172 BLOCK_SUBBLOCKS (current_block) = block;
4173 current_block = origin;
4174 }
4175 VEC_safe_push (tree, heap, *p_block_stack, block);
4176 }
4177 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4178 {
4179 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
4180 current_block = BLOCK_SUPERCONTEXT (current_block);
4181 if (BLOCK_FRAGMENT_ORIGIN (current_block))
4182 current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4183 prev_beg = NULL_TREE;
4184 prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4185 ? NOTE_BLOCK (insn) : NULL_TREE;
4186 }
4187 }
4188 else
4189 {
4190 prev_beg = NULL_TREE;
4191 if (prev_end)
4192 BLOCK_SAME_RANGE (prev_end) = 0;
4193 prev_end = NULL_TREE;
4194 }
4195 }
4196 }
4197
4198 /* Reverse the order of elements in the chain T of blocks,
4199 and return the new head of the chain (old last element). */
4200
4201 tree
4202 blocks_nreverse (tree t)
4203 {
4204 tree prev = 0, block, next;
4205 for (block = t; block; block = next)
4206 {
4207 next = BLOCK_CHAIN (block);
4208 BLOCK_CHAIN (block) = prev;
4209 prev = block;
4210 }
4211 return prev;
4212 }
4213
4214 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4215 by modifying the last node in chain 1 to point to chain 2. */
4216
4217 tree
4218 block_chainon (tree op1, tree op2)
4219 {
4220 tree t1;
4221
4222 if (!op1)
4223 return op2;
4224 if (!op2)
4225 return op1;
4226
4227 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4228 continue;
4229 BLOCK_CHAIN (t1) = op2;
4230
4231 #ifdef ENABLE_TREE_CHECKING
4232 {
4233 tree t2;
4234 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4235 gcc_assert (t2 != t1);
4236 }
4237 #endif
4238
4239 return op1;
4240 }
4241
4242 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4243 non-NULL, list them all into VECTOR, in a depth-first preorder
4244 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4245 blocks. */
4246
4247 static int
4248 all_blocks (tree block, tree *vector)
4249 {
4250 int n_blocks = 0;
4251
4252 while (block)
4253 {
4254 TREE_ASM_WRITTEN (block) = 0;
4255
4256 /* Record this block. */
4257 if (vector)
4258 vector[n_blocks] = block;
4259
4260 ++n_blocks;
4261
4262 /* Record the subblocks, and their subblocks... */
4263 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4264 vector ? vector + n_blocks : 0);
4265 block = BLOCK_CHAIN (block);
4266 }
4267
4268 return n_blocks;
4269 }
4270
4271 /* Return a vector containing all the blocks rooted at BLOCK. The
4272 number of elements in the vector is stored in N_BLOCKS_P. The
4273 vector is dynamically allocated; it is the caller's responsibility
4274 to call `free' on the pointer returned. */
4275
4276 static tree *
4277 get_block_vector (tree block, int *n_blocks_p)
4278 {
4279 tree *block_vector;
4280
4281 *n_blocks_p = all_blocks (block, NULL);
4282 block_vector = XNEWVEC (tree, *n_blocks_p);
4283 all_blocks (block, block_vector);
4284
4285 return block_vector;
4286 }
4287
4288 static GTY(()) int next_block_index = 2;
4289
4290 /* Set BLOCK_NUMBER for all the blocks in FN. */
4291
4292 void
4293 number_blocks (tree fn)
4294 {
4295 int i;
4296 int n_blocks;
4297 tree *block_vector;
4298
4299 /* For SDB and XCOFF debugging output, we start numbering the blocks
4300 from 1 within each function, rather than keeping a running
4301 count. */
4302 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
4303 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
4304 next_block_index = 1;
4305 #endif
4306
4307 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4308
4309 /* The top-level BLOCK isn't numbered at all. */
4310 for (i = 1; i < n_blocks; ++i)
4311 /* We number the blocks from two. */
4312 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4313
4314 free (block_vector);
4315
4316 return;
4317 }
4318
4319 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4320
4321 DEBUG_FUNCTION tree
4322 debug_find_var_in_block_tree (tree var, tree block)
4323 {
4324 tree t;
4325
4326 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4327 if (t == var)
4328 return block;
4329
4330 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4331 {
4332 tree ret = debug_find_var_in_block_tree (var, t);
4333 if (ret)
4334 return ret;
4335 }
4336
4337 return NULL_TREE;
4338 }
4339 \f
4340 /* Keep track of whether we're in a dummy function context. If we are,
4341 we don't want to invoke the set_current_function hook, because we'll
4342 get into trouble if the hook calls target_reinit () recursively or
4343 when the initial initialization is not yet complete. */
4344
4345 static bool in_dummy_function;
4346
4347 /* Invoke the target hook when setting cfun. Update the optimization options
4348 if the function uses different options than the default. */
4349
4350 static void
4351 invoke_set_current_function_hook (tree fndecl)
4352 {
4353 if (!in_dummy_function)
4354 {
4355 tree opts = ((fndecl)
4356 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4357 : optimization_default_node);
4358
4359 if (!opts)
4360 opts = optimization_default_node;
4361
4362 /* Change optimization options if needed. */
4363 if (optimization_current_node != opts)
4364 {
4365 optimization_current_node = opts;
4366 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
4367 }
4368
4369 targetm.set_current_function (fndecl);
4370 }
4371 }
4372
4373 /* cfun should never be set directly; use this function. */
4374
4375 void
4376 set_cfun (struct function *new_cfun)
4377 {
4378 if (cfun != new_cfun)
4379 {
4380 cfun = new_cfun;
4381 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4382 }
4383 }
4384
4385 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4386
4387 static VEC(function_p,heap) *cfun_stack;
4388
4389 /* Push the current cfun onto the stack, and set cfun to new_cfun. */
4390
4391 void
4392 push_cfun (struct function *new_cfun)
4393 {
4394 VEC_safe_push (function_p, heap, cfun_stack, cfun);
4395 set_cfun (new_cfun);
4396 }
4397
4398 /* Pop cfun from the stack. */
4399
4400 void
4401 pop_cfun (void)
4402 {
4403 struct function *new_cfun = VEC_pop (function_p, cfun_stack);
4404 set_cfun (new_cfun);
4405 }
4406
4407 /* Return value of funcdef and increase it. */
4408 int
4409 get_next_funcdef_no (void)
4410 {
4411 return funcdef_no++;
4412 }
4413
4414 /* Return value of funcdef. */
4415 int
4416 get_last_funcdef_no (void)
4417 {
4418 return funcdef_no;
4419 }
4420
4421 /* Allocate a function structure for FNDECL and set its contents
4422 to the defaults. Set cfun to the newly-allocated object.
4423 Some of the helper functions invoked during initialization assume
4424 that cfun has already been set. Therefore, assign the new object
4425 directly into cfun and invoke the back end hook explicitly at the
4426 very end, rather than initializing a temporary and calling set_cfun
4427 on it.
4428
4429 ABSTRACT_P is true if this is a function that will never be seen by
4430 the middle-end. Such functions are front-end concepts (like C++
4431 function templates) that do not correspond directly to functions
4432 placed in object files. */
4433
4434 void
4435 allocate_struct_function (tree fndecl, bool abstract_p)
4436 {
4437 tree result;
4438 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4439
4440 cfun = ggc_alloc_cleared_function ();
4441
4442 init_eh_for_function ();
4443
4444 if (init_machine_status)
4445 cfun->machine = (*init_machine_status) ();
4446
4447 #ifdef OVERRIDE_ABI_FORMAT
4448 OVERRIDE_ABI_FORMAT (fndecl);
4449 #endif
4450
4451 invoke_set_current_function_hook (fndecl);
4452
4453 if (fndecl != NULL_TREE)
4454 {
4455 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4456 cfun->decl = fndecl;
4457 current_function_funcdef_no = get_next_funcdef_no ();
4458
4459 result = DECL_RESULT (fndecl);
4460 if (!abstract_p && aggregate_value_p (result, fndecl))
4461 {
4462 #ifdef PCC_STATIC_STRUCT_RETURN
4463 cfun->returns_pcc_struct = 1;
4464 #endif
4465 cfun->returns_struct = 1;
4466 }
4467
4468 cfun->stdarg = stdarg_p (fntype);
4469
4470 /* Assume all registers in stdarg functions need to be saved. */
4471 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4472 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4473
4474 /* ??? This could be set on a per-function basis by the front-end
4475 but is this worth the hassle? */
4476 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4477 }
4478 }
4479
4480 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4481 instead of just setting it. */
4482
4483 void
4484 push_struct_function (tree fndecl)
4485 {
4486 VEC_safe_push (function_p, heap, cfun_stack, cfun);
4487 allocate_struct_function (fndecl, false);
4488 }
4489
4490 /* Reset crtl and other non-struct-function variables to defaults as
4491 appropriate for emitting rtl at the start of a function. */
4492
4493 static void
4494 prepare_function_start (void)
4495 {
4496 gcc_assert (!crtl->emit.x_last_insn);
4497 init_temp_slots ();
4498 init_emit ();
4499 init_varasm_status ();
4500 init_expr ();
4501 default_rtl_profile ();
4502
4503 if (flag_stack_usage_info)
4504 {
4505 cfun->su = ggc_alloc_cleared_stack_usage ();
4506 cfun->su->static_stack_size = -1;
4507 }
4508
4509 cse_not_expected = ! optimize;
4510
4511 /* Caller save not needed yet. */
4512 caller_save_needed = 0;
4513
4514 /* We haven't done register allocation yet. */
4515 reg_renumber = 0;
4516
4517 /* Indicate that we have not instantiated virtual registers yet. */
4518 virtuals_instantiated = 0;
4519
4520 /* Indicate that we want CONCATs now. */
4521 generating_concat_p = 1;
4522
4523 /* Indicate we have no need of a frame pointer yet. */
4524 frame_pointer_needed = 0;
4525 }
4526
4527 /* Initialize the rtl expansion mechanism so that we can do simple things
4528 like generate sequences. This is used to provide a context during global
4529 initialization of some passes. You must call expand_dummy_function_end
4530 to exit this context. */
4531
4532 void
4533 init_dummy_function_start (void)
4534 {
4535 gcc_assert (!in_dummy_function);
4536 in_dummy_function = true;
4537 push_struct_function (NULL_TREE);
4538 prepare_function_start ();
4539 }
4540
4541 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4542 and initialize static variables for generating RTL for the statements
4543 of the function. */
4544
4545 void
4546 init_function_start (tree subr)
4547 {
4548 if (subr && DECL_STRUCT_FUNCTION (subr))
4549 set_cfun (DECL_STRUCT_FUNCTION (subr));
4550 else
4551 allocate_struct_function (subr, false);
4552 prepare_function_start ();
4553 decide_function_section (subr);
4554
4555 /* Warn if this value is an aggregate type,
4556 regardless of which calling convention we are using for it. */
4557 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4558 warning (OPT_Waggregate_return, "function returns an aggregate");
4559 }
4560
4561
4562 void
4563 expand_main_function (void)
4564 {
4565 #if (defined(INVOKE__main) \
4566 || (!defined(HAS_INIT_SECTION) \
4567 && !defined(INIT_SECTION_ASM_OP) \
4568 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
4569 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
4570 #endif
4571 }
4572 \f
4573 /* Expand code to initialize the stack_protect_guard. This is invoked at
4574 the beginning of a function to be protected. */
4575
4576 #ifndef HAVE_stack_protect_set
4577 # define HAVE_stack_protect_set 0
4578 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
4579 #endif
4580
4581 void
4582 stack_protect_prologue (void)
4583 {
4584 tree guard_decl = targetm.stack_protect_guard ();
4585 rtx x, y;
4586
4587 x = expand_normal (crtl->stack_protect_guard);
4588 y = expand_normal (guard_decl);
4589
4590 /* Allow the target to copy from Y to X without leaking Y into a
4591 register. */
4592 if (HAVE_stack_protect_set)
4593 {
4594 rtx insn = gen_stack_protect_set (x, y);
4595 if (insn)
4596 {
4597 emit_insn (insn);
4598 return;
4599 }
4600 }
4601
4602 /* Otherwise do a straight move. */
4603 emit_move_insn (x, y);
4604 }
4605
4606 /* Expand code to verify the stack_protect_guard. This is invoked at
4607 the end of a function to be protected. */
4608
4609 #ifndef HAVE_stack_protect_test
4610 # define HAVE_stack_protect_test 0
4611 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4612 #endif
4613
4614 void
4615 stack_protect_epilogue (void)
4616 {
4617 tree guard_decl = targetm.stack_protect_guard ();
4618 rtx label = gen_label_rtx ();
4619 rtx x, y, tmp;
4620
4621 x = expand_normal (crtl->stack_protect_guard);
4622 y = expand_normal (guard_decl);
4623
4624 /* Allow the target to compare Y with X without leaking either into
4625 a register. */
4626 switch (HAVE_stack_protect_test != 0)
4627 {
4628 case 1:
4629 tmp = gen_stack_protect_test (x, y, label);
4630 if (tmp)
4631 {
4632 emit_insn (tmp);
4633 break;
4634 }
4635 /* FALLTHRU */
4636
4637 default:
4638 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4639 break;
4640 }
4641
4642 /* The noreturn predictor has been moved to the tree level. The rtl-level
4643 predictors estimate this branch about 20%, which isn't enough to get
4644 things moved out of line. Since this is the only extant case of adding
4645 a noreturn function at the rtl level, it doesn't seem worth doing ought
4646 except adding the prediction by hand. */
4647 tmp = get_last_insn ();
4648 if (JUMP_P (tmp))
4649 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4650
4651 expand_expr_stmt (targetm.stack_protect_fail ());
4652 emit_label (label);
4653 }
4654 \f
4655 /* Start the RTL for a new function, and set variables used for
4656 emitting RTL.
4657 SUBR is the FUNCTION_DECL node.
4658 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4659 the function's parameters, which must be run at any return statement. */
4660
4661 void
4662 expand_function_start (tree subr)
4663 {
4664 /* Make sure volatile mem refs aren't considered
4665 valid operands of arithmetic insns. */
4666 init_recog_no_volatile ();
4667
4668 crtl->profile
4669 = (profile_flag
4670 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4671
4672 crtl->limit_stack
4673 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4674
4675 /* Make the label for return statements to jump to. Do not special
4676 case machines with special return instructions -- they will be
4677 handled later during jump, ifcvt, or epilogue creation. */
4678 return_label = gen_label_rtx ();
4679
4680 /* Initialize rtx used to return the value. */
4681 /* Do this before assign_parms so that we copy the struct value address
4682 before any library calls that assign parms might generate. */
4683
4684 /* Decide whether to return the value in memory or in a register. */
4685 if (aggregate_value_p (DECL_RESULT (subr), subr))
4686 {
4687 /* Returning something that won't go in a register. */
4688 rtx value_address = 0;
4689
4690 #ifdef PCC_STATIC_STRUCT_RETURN
4691 if (cfun->returns_pcc_struct)
4692 {
4693 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4694 value_address = assemble_static_space (size);
4695 }
4696 else
4697 #endif
4698 {
4699 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4700 /* Expect to be passed the address of a place to store the value.
4701 If it is passed as an argument, assign_parms will take care of
4702 it. */
4703 if (sv)
4704 {
4705 value_address = gen_reg_rtx (Pmode);
4706 emit_move_insn (value_address, sv);
4707 }
4708 }
4709 if (value_address)
4710 {
4711 rtx x = value_address;
4712 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4713 {
4714 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4715 set_mem_attributes (x, DECL_RESULT (subr), 1);
4716 }
4717 SET_DECL_RTL (DECL_RESULT (subr), x);
4718 }
4719 }
4720 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4721 /* If return mode is void, this decl rtl should not be used. */
4722 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4723 else
4724 {
4725 /* Compute the return values into a pseudo reg, which we will copy
4726 into the true return register after the cleanups are done. */
4727 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4728 if (TYPE_MODE (return_type) != BLKmode
4729 && targetm.calls.return_in_msb (return_type))
4730 /* expand_function_end will insert the appropriate padding in
4731 this case. Use the return value's natural (unpadded) mode
4732 within the function proper. */
4733 SET_DECL_RTL (DECL_RESULT (subr),
4734 gen_reg_rtx (TYPE_MODE (return_type)));
4735 else
4736 {
4737 /* In order to figure out what mode to use for the pseudo, we
4738 figure out what the mode of the eventual return register will
4739 actually be, and use that. */
4740 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4741
4742 /* Structures that are returned in registers are not
4743 aggregate_value_p, so we may see a PARALLEL or a REG. */
4744 if (REG_P (hard_reg))
4745 SET_DECL_RTL (DECL_RESULT (subr),
4746 gen_reg_rtx (GET_MODE (hard_reg)));
4747 else
4748 {
4749 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4750 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4751 }
4752 }
4753
4754 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4755 result to the real return register(s). */
4756 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4757 }
4758
4759 /* Initialize rtx for parameters and local variables.
4760 In some cases this requires emitting insns. */
4761 assign_parms (subr);
4762
4763 /* If function gets a static chain arg, store it. */
4764 if (cfun->static_chain_decl)
4765 {
4766 tree parm = cfun->static_chain_decl;
4767 rtx local, chain, insn;
4768
4769 local = gen_reg_rtx (Pmode);
4770 chain = targetm.calls.static_chain (current_function_decl, true);
4771
4772 set_decl_incoming_rtl (parm, chain, false);
4773 SET_DECL_RTL (parm, local);
4774 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4775
4776 insn = emit_move_insn (local, chain);
4777
4778 /* Mark the register as eliminable, similar to parameters. */
4779 if (MEM_P (chain)
4780 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
4781 set_dst_reg_note (insn, REG_EQUIV, chain, local);
4782 }
4783
4784 /* If the function receives a non-local goto, then store the
4785 bits we need to restore the frame pointer. */
4786 if (cfun->nonlocal_goto_save_area)
4787 {
4788 tree t_save;
4789 rtx r_save;
4790
4791 /* ??? We need to do this save early. Unfortunately here is
4792 before the frame variable gets declared. Help out... */
4793 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
4794 if (!DECL_RTL_SET_P (var))
4795 expand_decl (var);
4796
4797 t_save = build4 (ARRAY_REF,
4798 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
4799 cfun->nonlocal_goto_save_area,
4800 integer_zero_node, NULL_TREE, NULL_TREE);
4801 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4802 gcc_assert (GET_MODE (r_save) == Pmode);
4803
4804 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
4805 update_nonlocal_goto_save_area ();
4806 }
4807
4808 /* The following was moved from init_function_start.
4809 The move is supposed to make sdb output more accurate. */
4810 /* Indicate the beginning of the function body,
4811 as opposed to parm setup. */
4812 emit_note (NOTE_INSN_FUNCTION_BEG);
4813
4814 gcc_assert (NOTE_P (get_last_insn ()));
4815
4816 parm_birth_insn = get_last_insn ();
4817
4818 if (crtl->profile)
4819 {
4820 #ifdef PROFILE_HOOK
4821 PROFILE_HOOK (current_function_funcdef_no);
4822 #endif
4823 }
4824
4825 /* If we are doing generic stack checking, the probe should go here. */
4826 if (flag_stack_check == GENERIC_STACK_CHECK)
4827 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4828
4829 /* Make sure there is a line number after the function entry setup code. */
4830 force_next_line_note ();
4831 }
4832 \f
4833 /* Undo the effects of init_dummy_function_start. */
4834 void
4835 expand_dummy_function_end (void)
4836 {
4837 gcc_assert (in_dummy_function);
4838
4839 /* End any sequences that failed to be closed due to syntax errors. */
4840 while (in_sequence_p ())
4841 end_sequence ();
4842
4843 /* Outside function body, can't compute type's actual size
4844 until next function's body starts. */
4845
4846 free_after_parsing (cfun);
4847 free_after_compilation (cfun);
4848 pop_cfun ();
4849 in_dummy_function = false;
4850 }
4851
4852 /* Call DOIT for each hard register used as a return value from
4853 the current function. */
4854
4855 void
4856 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4857 {
4858 rtx outgoing = crtl->return_rtx;
4859
4860 if (! outgoing)
4861 return;
4862
4863 if (REG_P (outgoing))
4864 (*doit) (outgoing, arg);
4865 else if (GET_CODE (outgoing) == PARALLEL)
4866 {
4867 int i;
4868
4869 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4870 {
4871 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4872
4873 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4874 (*doit) (x, arg);
4875 }
4876 }
4877 }
4878
4879 static void
4880 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4881 {
4882 emit_clobber (reg);
4883 }
4884
4885 void
4886 clobber_return_register (void)
4887 {
4888 diddle_return_value (do_clobber_return_reg, NULL);
4889
4890 /* In case we do use pseudo to return value, clobber it too. */
4891 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4892 {
4893 tree decl_result = DECL_RESULT (current_function_decl);
4894 rtx decl_rtl = DECL_RTL (decl_result);
4895 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4896 {
4897 do_clobber_return_reg (decl_rtl, NULL);
4898 }
4899 }
4900 }
4901
4902 static void
4903 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4904 {
4905 emit_use (reg);
4906 }
4907
4908 static void
4909 use_return_register (void)
4910 {
4911 diddle_return_value (do_use_return_reg, NULL);
4912 }
4913
4914 /* Possibly warn about unused parameters. */
4915 void
4916 do_warn_unused_parameter (tree fn)
4917 {
4918 tree decl;
4919
4920 for (decl = DECL_ARGUMENTS (fn);
4921 decl; decl = DECL_CHAIN (decl))
4922 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4923 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
4924 && !TREE_NO_WARNING (decl))
4925 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4926 }
4927
4928 static GTY(()) rtx initial_trampoline;
4929
4930 /* Generate RTL for the end of the current function. */
4931
4932 void
4933 expand_function_end (void)
4934 {
4935 rtx clobber_after;
4936
4937 /* If arg_pointer_save_area was referenced only from a nested
4938 function, we will not have initialized it yet. Do that now. */
4939 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
4940 get_arg_pointer_save_area ();
4941
4942 /* If we are doing generic stack checking and this function makes calls,
4943 do a stack probe at the start of the function to ensure we have enough
4944 space for another stack frame. */
4945 if (flag_stack_check == GENERIC_STACK_CHECK)
4946 {
4947 rtx insn, seq;
4948
4949 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4950 if (CALL_P (insn))
4951 {
4952 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
4953 start_sequence ();
4954 if (STACK_CHECK_MOVING_SP)
4955 anti_adjust_stack_and_probe (max_frame_size, true);
4956 else
4957 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
4958 seq = get_insns ();
4959 end_sequence ();
4960 set_insn_locators (seq, prologue_locator);
4961 emit_insn_before (seq, stack_check_probe_note);
4962 break;
4963 }
4964 }
4965
4966 /* End any sequences that failed to be closed due to syntax errors. */
4967 while (in_sequence_p ())
4968 end_sequence ();
4969
4970 clear_pending_stack_adjust ();
4971 do_pending_stack_adjust ();
4972
4973 /* Output a linenumber for the end of the function.
4974 SDB depends on this. */
4975 force_next_line_note ();
4976 set_curr_insn_source_location (input_location);
4977
4978 /* Before the return label (if any), clobber the return
4979 registers so that they are not propagated live to the rest of
4980 the function. This can only happen with functions that drop
4981 through; if there had been a return statement, there would
4982 have either been a return rtx, or a jump to the return label.
4983
4984 We delay actual code generation after the current_function_value_rtx
4985 is computed. */
4986 clobber_after = get_last_insn ();
4987
4988 /* Output the label for the actual return from the function. */
4989 emit_label (return_label);
4990
4991 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
4992 {
4993 /* Let except.c know where it should emit the call to unregister
4994 the function context for sjlj exceptions. */
4995 if (flag_exceptions)
4996 sjlj_emit_function_exit_after (get_last_insn ());
4997 }
4998 else
4999 {
5000 /* We want to ensure that instructions that may trap are not
5001 moved into the epilogue by scheduling, because we don't
5002 always emit unwind information for the epilogue. */
5003 if (cfun->can_throw_non_call_exceptions)
5004 emit_insn (gen_blockage ());
5005 }
5006
5007 /* If this is an implementation of throw, do what's necessary to
5008 communicate between __builtin_eh_return and the epilogue. */
5009 expand_eh_return ();
5010
5011 /* If scalar return value was computed in a pseudo-reg, or was a named
5012 return value that got dumped to the stack, copy that to the hard
5013 return register. */
5014 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5015 {
5016 tree decl_result = DECL_RESULT (current_function_decl);
5017 rtx decl_rtl = DECL_RTL (decl_result);
5018
5019 if (REG_P (decl_rtl)
5020 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5021 : DECL_REGISTER (decl_result))
5022 {
5023 rtx real_decl_rtl = crtl->return_rtx;
5024
5025 /* This should be set in assign_parms. */
5026 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
5027
5028 /* If this is a BLKmode structure being returned in registers,
5029 then use the mode computed in expand_return. Note that if
5030 decl_rtl is memory, then its mode may have been changed,
5031 but that crtl->return_rtx has not. */
5032 if (GET_MODE (real_decl_rtl) == BLKmode)
5033 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
5034
5035 /* If a non-BLKmode return value should be padded at the least
5036 significant end of the register, shift it left by the appropriate
5037 amount. BLKmode results are handled using the group load/store
5038 machinery. */
5039 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5040 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5041 {
5042 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5043 REGNO (real_decl_rtl)),
5044 decl_rtl);
5045 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5046 }
5047 /* If a named return value dumped decl_return to memory, then
5048 we may need to re-do the PROMOTE_MODE signed/unsigned
5049 extension. */
5050 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5051 {
5052 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5053 promote_function_mode (TREE_TYPE (decl_result),
5054 GET_MODE (decl_rtl), &unsignedp,
5055 TREE_TYPE (current_function_decl), 1);
5056
5057 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5058 }
5059 else if (GET_CODE (real_decl_rtl) == PARALLEL)
5060 {
5061 /* If expand_function_start has created a PARALLEL for decl_rtl,
5062 move the result to the real return registers. Otherwise, do
5063 a group load from decl_rtl for a named return. */
5064 if (GET_CODE (decl_rtl) == PARALLEL)
5065 emit_group_move (real_decl_rtl, decl_rtl);
5066 else
5067 emit_group_load (real_decl_rtl, decl_rtl,
5068 TREE_TYPE (decl_result),
5069 int_size_in_bytes (TREE_TYPE (decl_result)));
5070 }
5071 /* In the case of complex integer modes smaller than a word, we'll
5072 need to generate some non-trivial bitfield insertions. Do that
5073 on a pseudo and not the hard register. */
5074 else if (GET_CODE (decl_rtl) == CONCAT
5075 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
5076 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
5077 {
5078 int old_generating_concat_p;
5079 rtx tmp;
5080
5081 old_generating_concat_p = generating_concat_p;
5082 generating_concat_p = 0;
5083 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5084 generating_concat_p = old_generating_concat_p;
5085
5086 emit_move_insn (tmp, decl_rtl);
5087 emit_move_insn (real_decl_rtl, tmp);
5088 }
5089 else
5090 emit_move_insn (real_decl_rtl, decl_rtl);
5091 }
5092 }
5093
5094 /* If returning a structure, arrange to return the address of the value
5095 in a place where debuggers expect to find it.
5096
5097 If returning a structure PCC style,
5098 the caller also depends on this value.
5099 And cfun->returns_pcc_struct is not necessarily set. */
5100 if (cfun->returns_struct
5101 || cfun->returns_pcc_struct)
5102 {
5103 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5104 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5105 rtx outgoing;
5106
5107 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5108 type = TREE_TYPE (type);
5109 else
5110 value_address = XEXP (value_address, 0);
5111
5112 outgoing = targetm.calls.function_value (build_pointer_type (type),
5113 current_function_decl, true);
5114
5115 /* Mark this as a function return value so integrate will delete the
5116 assignment and USE below when inlining this function. */
5117 REG_FUNCTION_VALUE_P (outgoing) = 1;
5118
5119 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5120 value_address = convert_memory_address (GET_MODE (outgoing),
5121 value_address);
5122
5123 emit_move_insn (outgoing, value_address);
5124
5125 /* Show return register used to hold result (in this case the address
5126 of the result. */
5127 crtl->return_rtx = outgoing;
5128 }
5129
5130 /* Emit the actual code to clobber return register. */
5131 {
5132 rtx seq;
5133
5134 start_sequence ();
5135 clobber_return_register ();
5136 seq = get_insns ();
5137 end_sequence ();
5138
5139 emit_insn_after (seq, clobber_after);
5140 }
5141
5142 /* Output the label for the naked return from the function. */
5143 if (naked_return_label)
5144 emit_label (naked_return_label);
5145
5146 /* @@@ This is a kludge. We want to ensure that instructions that
5147 may trap are not moved into the epilogue by scheduling, because
5148 we don't always emit unwind information for the epilogue. */
5149 if (cfun->can_throw_non_call_exceptions
5150 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
5151 emit_insn (gen_blockage ());
5152
5153 /* If stack protection is enabled for this function, check the guard. */
5154 if (crtl->stack_protect_guard)
5155 stack_protect_epilogue ();
5156
5157 /* If we had calls to alloca, and this machine needs
5158 an accurate stack pointer to exit the function,
5159 insert some code to save and restore the stack pointer. */
5160 if (! EXIT_IGNORE_STACK
5161 && cfun->calls_alloca)
5162 {
5163 rtx tem = 0, seq;
5164
5165 start_sequence ();
5166 emit_stack_save (SAVE_FUNCTION, &tem);
5167 seq = get_insns ();
5168 end_sequence ();
5169 emit_insn_before (seq, parm_birth_insn);
5170
5171 emit_stack_restore (SAVE_FUNCTION, tem);
5172 }
5173
5174 /* ??? This should no longer be necessary since stupid is no longer with
5175 us, but there are some parts of the compiler (eg reload_combine, and
5176 sh mach_dep_reorg) that still try and compute their own lifetime info
5177 instead of using the general framework. */
5178 use_return_register ();
5179 }
5180
5181 rtx
5182 get_arg_pointer_save_area (void)
5183 {
5184 rtx ret = arg_pointer_save_area;
5185
5186 if (! ret)
5187 {
5188 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5189 arg_pointer_save_area = ret;
5190 }
5191
5192 if (! crtl->arg_pointer_save_area_init)
5193 {
5194 rtx seq;
5195
5196 /* Save the arg pointer at the beginning of the function. The
5197 generated stack slot may not be a valid memory address, so we
5198 have to check it and fix it if necessary. */
5199 start_sequence ();
5200 emit_move_insn (validize_mem (ret),
5201 crtl->args.internal_arg_pointer);
5202 seq = get_insns ();
5203 end_sequence ();
5204
5205 push_topmost_sequence ();
5206 emit_insn_after (seq, entry_of_function ());
5207 pop_topmost_sequence ();
5208
5209 crtl->arg_pointer_save_area_init = true;
5210 }
5211
5212 return ret;
5213 }
5214 \f
5215 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5216 for the first time. */
5217
5218 static void
5219 record_insns (rtx insns, rtx end, htab_t *hashp)
5220 {
5221 rtx tmp;
5222 htab_t hash = *hashp;
5223
5224 if (hash == NULL)
5225 *hashp = hash
5226 = htab_create_ggc (17, htab_hash_pointer, htab_eq_pointer, NULL);
5227
5228 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5229 {
5230 void **slot = htab_find_slot (hash, tmp, INSERT);
5231 gcc_assert (*slot == NULL);
5232 *slot = tmp;
5233 }
5234 }
5235
5236 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5237 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5238 insn, then record COPY as well. */
5239
5240 void
5241 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5242 {
5243 htab_t hash;
5244 void **slot;
5245
5246 hash = epilogue_insn_hash;
5247 if (!hash || !htab_find (hash, insn))
5248 {
5249 hash = prologue_insn_hash;
5250 if (!hash || !htab_find (hash, insn))
5251 return;
5252 }
5253
5254 slot = htab_find_slot (hash, copy, INSERT);
5255 gcc_assert (*slot == NULL);
5256 *slot = copy;
5257 }
5258
5259 /* Set the locator of the insn chain starting at INSN to LOC. */
5260 static void
5261 set_insn_locators (rtx insn, int loc)
5262 {
5263 while (insn != NULL_RTX)
5264 {
5265 if (INSN_P (insn))
5266 INSN_LOCATOR (insn) = loc;
5267 insn = NEXT_INSN (insn);
5268 }
5269 }
5270
5271 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5272 we can be running after reorg, SEQUENCE rtl is possible. */
5273
5274 static bool
5275 contains (const_rtx insn, htab_t hash)
5276 {
5277 if (hash == NULL)
5278 return false;
5279
5280 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5281 {
5282 int i;
5283 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
5284 if (htab_find (hash, XVECEXP (PATTERN (insn), 0, i)))
5285 return true;
5286 return false;
5287 }
5288
5289 return htab_find (hash, insn) != NULL;
5290 }
5291
5292 int
5293 prologue_epilogue_contains (const_rtx insn)
5294 {
5295 if (contains (insn, prologue_insn_hash))
5296 return 1;
5297 if (contains (insn, epilogue_insn_hash))
5298 return 1;
5299 return 0;
5300 }
5301
5302 #ifdef HAVE_simple_return
5303
5304 /* Return true if INSN requires the stack frame to be set up.
5305 PROLOGUE_USED contains the hard registers used in the function
5306 prologue. SET_UP_BY_PROLOGUE is the set of registers we expect the
5307 prologue to set up for the function. */
5308 bool
5309 requires_stack_frame_p (rtx insn, HARD_REG_SET prologue_used,
5310 HARD_REG_SET set_up_by_prologue)
5311 {
5312 df_ref *df_rec;
5313 HARD_REG_SET hardregs;
5314 unsigned regno;
5315
5316 if (CALL_P (insn))
5317 return !SIBLING_CALL_P (insn);
5318
5319 CLEAR_HARD_REG_SET (hardregs);
5320 for (df_rec = DF_INSN_DEFS (insn); *df_rec; df_rec++)
5321 {
5322 rtx dreg = DF_REF_REG (*df_rec);
5323
5324 if (!REG_P (dreg))
5325 continue;
5326
5327 add_to_hard_reg_set (&hardregs, GET_MODE (dreg),
5328 REGNO (dreg));
5329 }
5330 if (hard_reg_set_intersect_p (hardregs, prologue_used))
5331 return true;
5332 AND_COMPL_HARD_REG_SET (hardregs, call_used_reg_set);
5333 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5334 if (TEST_HARD_REG_BIT (hardregs, regno)
5335 && df_regs_ever_live_p (regno))
5336 return true;
5337
5338 for (df_rec = DF_INSN_USES (insn); *df_rec; df_rec++)
5339 {
5340 rtx reg = DF_REF_REG (*df_rec);
5341
5342 if (!REG_P (reg))
5343 continue;
5344
5345 add_to_hard_reg_set (&hardregs, GET_MODE (reg),
5346 REGNO (reg));
5347 }
5348 if (hard_reg_set_intersect_p (hardregs, set_up_by_prologue))
5349 return true;
5350
5351 return false;
5352 }
5353
5354 /* See whether BB has a single successor that uses [REGNO, END_REGNO),
5355 and if BB is its only predecessor. Return that block if so,
5356 otherwise return null. */
5357
5358 static basic_block
5359 next_block_for_reg (basic_block bb, int regno, int end_regno)
5360 {
5361 edge e, live_edge;
5362 edge_iterator ei;
5363 bitmap live;
5364 int i;
5365
5366 live_edge = NULL;
5367 FOR_EACH_EDGE (e, ei, bb->succs)
5368 {
5369 live = df_get_live_in (e->dest);
5370 for (i = regno; i < end_regno; i++)
5371 if (REGNO_REG_SET_P (live, i))
5372 {
5373 if (live_edge && live_edge != e)
5374 return NULL;
5375 live_edge = e;
5376 }
5377 }
5378
5379 /* We can sometimes encounter dead code. Don't try to move it
5380 into the exit block. */
5381 if (!live_edge || live_edge->dest == EXIT_BLOCK_PTR)
5382 return NULL;
5383
5384 /* Reject targets of abnormal edges. This is needed for correctness
5385 on ports like Alpha and MIPS, whose pic_offset_table_rtx can die on
5386 exception edges even though it is generally treated as call-saved
5387 for the majority of the compilation. Moving across abnormal edges
5388 isn't going to be interesting for shrink-wrap usage anyway. */
5389 if (live_edge->flags & EDGE_ABNORMAL)
5390 return NULL;
5391
5392 if (EDGE_COUNT (live_edge->dest->preds) > 1)
5393 return NULL;
5394
5395 return live_edge->dest;
5396 }
5397
5398 /* Try to move INSN from BB to a successor. Return true on success.
5399 USES and DEFS are the set of registers that are used and defined
5400 after INSN in BB. */
5401
5402 static bool
5403 move_insn_for_shrink_wrap (basic_block bb, rtx insn,
5404 const HARD_REG_SET uses,
5405 const HARD_REG_SET defs)
5406 {
5407 rtx set, src, dest;
5408 bitmap live_out, live_in, bb_uses, bb_defs;
5409 unsigned int i, dregno, end_dregno, sregno, end_sregno;
5410 basic_block next_block;
5411
5412 /* Look for a simple register copy. */
5413 set = single_set (insn);
5414 if (!set)
5415 return false;
5416 src = SET_SRC (set);
5417 dest = SET_DEST (set);
5418 if (!REG_P (dest) || !REG_P (src))
5419 return false;
5420
5421 /* Make sure that the source register isn't defined later in BB. */
5422 sregno = REGNO (src);
5423 end_sregno = END_REGNO (src);
5424 if (overlaps_hard_reg_set_p (defs, GET_MODE (src), sregno))
5425 return false;
5426
5427 /* Make sure that the destination register isn't referenced later in BB. */
5428 dregno = REGNO (dest);
5429 end_dregno = END_REGNO (dest);
5430 if (overlaps_hard_reg_set_p (uses, GET_MODE (dest), dregno)
5431 || overlaps_hard_reg_set_p (defs, GET_MODE (dest), dregno))
5432 return false;
5433
5434 /* See whether there is a successor block to which we could move INSN. */
5435 next_block = next_block_for_reg (bb, dregno, end_dregno);
5436 if (!next_block)
5437 return false;
5438
5439 /* At this point we are committed to moving INSN, but let's try to
5440 move it as far as we can. */
5441 do
5442 {
5443 live_out = df_get_live_out (bb);
5444 live_in = df_get_live_in (next_block);
5445 bb = next_block;
5446
5447 /* Check whether BB uses DEST or clobbers DEST. We need to add
5448 INSN to BB if so. Either way, DEST is no longer live on entry,
5449 except for any part that overlaps SRC (next loop). */
5450 bb_uses = &DF_LR_BB_INFO (bb)->use;
5451 bb_defs = &DF_LR_BB_INFO (bb)->def;
5452 for (i = dregno; i < end_dregno; i++)
5453 {
5454 if (REGNO_REG_SET_P (bb_uses, i) || REGNO_REG_SET_P (bb_defs, i))
5455 next_block = NULL;
5456 CLEAR_REGNO_REG_SET (live_out, i);
5457 CLEAR_REGNO_REG_SET (live_in, i);
5458 }
5459
5460 /* Check whether BB clobbers SRC. We need to add INSN to BB if so.
5461 Either way, SRC is now live on entry. */
5462 for (i = sregno; i < end_sregno; i++)
5463 {
5464 if (REGNO_REG_SET_P (bb_defs, i))
5465 next_block = NULL;
5466 SET_REGNO_REG_SET (live_out, i);
5467 SET_REGNO_REG_SET (live_in, i);
5468 }
5469
5470 /* If we don't need to add the move to BB, look for a single
5471 successor block. */
5472 if (next_block)
5473 next_block = next_block_for_reg (next_block, dregno, end_dregno);
5474 }
5475 while (next_block);
5476
5477 /* BB now defines DEST. It only uses the parts of DEST that overlap SRC
5478 (next loop). */
5479 for (i = dregno; i < end_dregno; i++)
5480 {
5481 CLEAR_REGNO_REG_SET (bb_uses, i);
5482 SET_REGNO_REG_SET (bb_defs, i);
5483 }
5484
5485 /* BB now uses SRC. */
5486 for (i = sregno; i < end_sregno; i++)
5487 SET_REGNO_REG_SET (bb_uses, i);
5488
5489 emit_insn_after (PATTERN (insn), bb_note (bb));
5490 delete_insn (insn);
5491 return true;
5492 }
5493
5494 /* Look for register copies in the first block of the function, and move
5495 them down into successor blocks if the register is used only on one
5496 path. This exposes more opportunities for shrink-wrapping. These
5497 kinds of sets often occur when incoming argument registers are moved
5498 to call-saved registers because their values are live across one or
5499 more calls during the function. */
5500
5501 static void
5502 prepare_shrink_wrap (basic_block entry_block)
5503 {
5504 rtx insn, curr, x;
5505 HARD_REG_SET uses, defs;
5506 df_ref *ref;
5507
5508 CLEAR_HARD_REG_SET (uses);
5509 CLEAR_HARD_REG_SET (defs);
5510 FOR_BB_INSNS_REVERSE_SAFE (entry_block, insn, curr)
5511 if (NONDEBUG_INSN_P (insn)
5512 && !move_insn_for_shrink_wrap (entry_block, insn, uses, defs))
5513 {
5514 /* Add all defined registers to DEFs. */
5515 for (ref = DF_INSN_DEFS (insn); *ref; ref++)
5516 {
5517 x = DF_REF_REG (*ref);
5518 if (REG_P (x) && HARD_REGISTER_P (x))
5519 SET_HARD_REG_BIT (defs, REGNO (x));
5520 }
5521
5522 /* Add all used registers to USESs. */
5523 for (ref = DF_INSN_USES (insn); *ref; ref++)
5524 {
5525 x = DF_REF_REG (*ref);
5526 if (REG_P (x) && HARD_REGISTER_P (x))
5527 SET_HARD_REG_BIT (uses, REGNO (x));
5528 }
5529 }
5530 }
5531
5532 #endif
5533
5534 #ifdef HAVE_return
5535 /* Insert use of return register before the end of BB. */
5536
5537 static void
5538 emit_use_return_register_into_block (basic_block bb)
5539 {
5540 rtx seq;
5541 start_sequence ();
5542 use_return_register ();
5543 seq = get_insns ();
5544 end_sequence ();
5545 emit_insn_before (seq, BB_END (bb));
5546 }
5547
5548
5549 /* Create a return pattern, either simple_return or return, depending on
5550 simple_p. */
5551
5552 static rtx
5553 gen_return_pattern (bool simple_p)
5554 {
5555 #ifdef HAVE_simple_return
5556 return simple_p ? gen_simple_return () : gen_return ();
5557 #else
5558 gcc_assert (!simple_p);
5559 return gen_return ();
5560 #endif
5561 }
5562
5563 /* Insert an appropriate return pattern at the end of block BB. This
5564 also means updating block_for_insn appropriately. SIMPLE_P is
5565 the same as in gen_return_pattern and passed to it. */
5566
5567 static void
5568 emit_return_into_block (bool simple_p, basic_block bb)
5569 {
5570 rtx jump, pat;
5571 jump = emit_jump_insn_after (gen_return_pattern (simple_p), BB_END (bb));
5572 pat = PATTERN (jump);
5573 if (GET_CODE (pat) == PARALLEL)
5574 pat = XVECEXP (pat, 0, 0);
5575 gcc_assert (ANY_RETURN_P (pat));
5576 JUMP_LABEL (jump) = pat;
5577 }
5578 #endif
5579
5580 /* Set JUMP_LABEL for a return insn. */
5581
5582 void
5583 set_return_jump_label (rtx returnjump)
5584 {
5585 rtx pat = PATTERN (returnjump);
5586 if (GET_CODE (pat) == PARALLEL)
5587 pat = XVECEXP (pat, 0, 0);
5588 if (ANY_RETURN_P (pat))
5589 JUMP_LABEL (returnjump) = pat;
5590 else
5591 JUMP_LABEL (returnjump) = ret_rtx;
5592 }
5593
5594 #ifdef HAVE_simple_return
5595 /* Create a copy of BB instructions and insert at BEFORE. Redirect
5596 preds of BB to COPY_BB if they don't appear in NEED_PROLOGUE. */
5597 static void
5598 dup_block_and_redirect (basic_block bb, basic_block copy_bb, rtx before,
5599 bitmap_head *need_prologue)
5600 {
5601 edge_iterator ei;
5602 edge e;
5603 rtx insn = BB_END (bb);
5604
5605 /* We know BB has a single successor, so there is no need to copy a
5606 simple jump at the end of BB. */
5607 if (simplejump_p (insn))
5608 insn = PREV_INSN (insn);
5609
5610 start_sequence ();
5611 duplicate_insn_chain (BB_HEAD (bb), insn);
5612 if (dump_file)
5613 {
5614 unsigned count = 0;
5615 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5616 if (active_insn_p (insn))
5617 ++count;
5618 fprintf (dump_file, "Duplicating bb %d to bb %d, %u active insns.\n",
5619 bb->index, copy_bb->index, count);
5620 }
5621 insn = get_insns ();
5622 end_sequence ();
5623 emit_insn_before (insn, before);
5624
5625 /* Redirect all the paths that need no prologue into copy_bb. */
5626 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
5627 if (!bitmap_bit_p (need_prologue, e->src->index))
5628 {
5629 redirect_edge_and_branch_force (e, copy_bb);
5630 continue;
5631 }
5632 else
5633 ei_next (&ei);
5634 }
5635 #endif
5636
5637 #if defined (HAVE_return) || defined (HAVE_simple_return)
5638 /* Return true if there are any active insns between HEAD and TAIL. */
5639 static bool
5640 active_insn_between (rtx head, rtx tail)
5641 {
5642 while (tail)
5643 {
5644 if (active_insn_p (tail))
5645 return true;
5646 if (tail == head)
5647 return false;
5648 tail = PREV_INSN (tail);
5649 }
5650 return false;
5651 }
5652
5653 /* LAST_BB is a block that exits, and empty of active instructions.
5654 Examine its predecessors for jumps that can be converted to
5655 (conditional) returns. */
5656 static VEC (edge, heap) *
5657 convert_jumps_to_returns (basic_block last_bb, bool simple_p,
5658 VEC (edge, heap) *unconverted ATTRIBUTE_UNUSED)
5659 {
5660 int i;
5661 basic_block bb;
5662 rtx label;
5663 edge_iterator ei;
5664 edge e;
5665 VEC(basic_block,heap) *src_bbs;
5666
5667 src_bbs = VEC_alloc (basic_block, heap, EDGE_COUNT (last_bb->preds));
5668 FOR_EACH_EDGE (e, ei, last_bb->preds)
5669 if (e->src != ENTRY_BLOCK_PTR)
5670 VEC_quick_push (basic_block, src_bbs, e->src);
5671
5672 label = BB_HEAD (last_bb);
5673
5674 FOR_EACH_VEC_ELT (basic_block, src_bbs, i, bb)
5675 {
5676 rtx jump = BB_END (bb);
5677
5678 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5679 continue;
5680
5681 e = find_edge (bb, last_bb);
5682
5683 /* If we have an unconditional jump, we can replace that
5684 with a simple return instruction. */
5685 if (simplejump_p (jump))
5686 {
5687 /* The use of the return register might be present in the exit
5688 fallthru block. Either:
5689 - removing the use is safe, and we should remove the use in
5690 the exit fallthru block, or
5691 - removing the use is not safe, and we should add it here.
5692 For now, we conservatively choose the latter. Either of the
5693 2 helps in crossjumping. */
5694 emit_use_return_register_into_block (bb);
5695
5696 emit_return_into_block (simple_p, bb);
5697 delete_insn (jump);
5698 }
5699
5700 /* If we have a conditional jump branching to the last
5701 block, we can try to replace that with a conditional
5702 return instruction. */
5703 else if (condjump_p (jump))
5704 {
5705 rtx dest;
5706
5707 if (simple_p)
5708 dest = simple_return_rtx;
5709 else
5710 dest = ret_rtx;
5711 if (!redirect_jump (jump, dest, 0))
5712 {
5713 #ifdef HAVE_simple_return
5714 if (simple_p)
5715 {
5716 if (dump_file)
5717 fprintf (dump_file,
5718 "Failed to redirect bb %d branch.\n", bb->index);
5719 VEC_safe_push (edge, heap, unconverted, e);
5720 }
5721 #endif
5722 continue;
5723 }
5724
5725 /* See comment in simplejump_p case above. */
5726 emit_use_return_register_into_block (bb);
5727
5728 /* If this block has only one successor, it both jumps
5729 and falls through to the fallthru block, so we can't
5730 delete the edge. */
5731 if (single_succ_p (bb))
5732 continue;
5733 }
5734 else
5735 {
5736 #ifdef HAVE_simple_return
5737 if (simple_p)
5738 {
5739 if (dump_file)
5740 fprintf (dump_file,
5741 "Failed to redirect bb %d branch.\n", bb->index);
5742 VEC_safe_push (edge, heap, unconverted, e);
5743 }
5744 #endif
5745 continue;
5746 }
5747
5748 /* Fix up the CFG for the successful change we just made. */
5749 redirect_edge_succ (e, EXIT_BLOCK_PTR);
5750 e->flags &= ~EDGE_CROSSING;
5751 }
5752 VEC_free (basic_block, heap, src_bbs);
5753 return unconverted;
5754 }
5755
5756 /* Emit a return insn for the exit fallthru block. */
5757 static basic_block
5758 emit_return_for_exit (edge exit_fallthru_edge, bool simple_p)
5759 {
5760 basic_block last_bb = exit_fallthru_edge->src;
5761
5762 if (JUMP_P (BB_END (last_bb)))
5763 {
5764 last_bb = split_edge (exit_fallthru_edge);
5765 exit_fallthru_edge = single_succ_edge (last_bb);
5766 }
5767 emit_barrier_after (BB_END (last_bb));
5768 emit_return_into_block (simple_p, last_bb);
5769 exit_fallthru_edge->flags &= ~EDGE_FALLTHRU;
5770 return last_bb;
5771 }
5772 #endif
5773
5774
5775 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5776 this into place with notes indicating where the prologue ends and where
5777 the epilogue begins. Update the basic block information when possible.
5778
5779 Notes on epilogue placement:
5780 There are several kinds of edges to the exit block:
5781 * a single fallthru edge from LAST_BB
5782 * possibly, edges from blocks containing sibcalls
5783 * possibly, fake edges from infinite loops
5784
5785 The epilogue is always emitted on the fallthru edge from the last basic
5786 block in the function, LAST_BB, into the exit block.
5787
5788 If LAST_BB is empty except for a label, it is the target of every
5789 other basic block in the function that ends in a return. If a
5790 target has a return or simple_return pattern (possibly with
5791 conditional variants), these basic blocks can be changed so that a
5792 return insn is emitted into them, and their target is adjusted to
5793 the real exit block.
5794
5795 Notes on shrink wrapping: We implement a fairly conservative
5796 version of shrink-wrapping rather than the textbook one. We only
5797 generate a single prologue and a single epilogue. This is
5798 sufficient to catch a number of interesting cases involving early
5799 exits.
5800
5801 First, we identify the blocks that require the prologue to occur before
5802 them. These are the ones that modify a call-saved register, or reference
5803 any of the stack or frame pointer registers. To simplify things, we then
5804 mark everything reachable from these blocks as also requiring a prologue.
5805 This takes care of loops automatically, and avoids the need to examine
5806 whether MEMs reference the frame, since it is sufficient to check for
5807 occurrences of the stack or frame pointer.
5808
5809 We then compute the set of blocks for which the need for a prologue
5810 is anticipatable (borrowing terminology from the shrink-wrapping
5811 description in Muchnick's book). These are the blocks which either
5812 require a prologue themselves, or those that have only successors
5813 where the prologue is anticipatable. The prologue needs to be
5814 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5815 is not. For the moment, we ensure that only one such edge exists.
5816
5817 The epilogue is placed as described above, but we make a
5818 distinction between inserting return and simple_return patterns
5819 when modifying other blocks that end in a return. Blocks that end
5820 in a sibcall omit the sibcall_epilogue if the block is not in
5821 ANTIC. */
5822
5823 static void
5824 thread_prologue_and_epilogue_insns (void)
5825 {
5826 bool inserted;
5827 #ifdef HAVE_simple_return
5828 VEC (edge, heap) *unconverted_simple_returns = NULL;
5829 bool nonempty_prologue;
5830 bitmap_head bb_flags;
5831 unsigned max_grow_size;
5832 #endif
5833 rtx returnjump;
5834 rtx seq ATTRIBUTE_UNUSED, epilogue_end ATTRIBUTE_UNUSED;
5835 rtx prologue_seq ATTRIBUTE_UNUSED, split_prologue_seq ATTRIBUTE_UNUSED;
5836 edge e, entry_edge, orig_entry_edge, exit_fallthru_edge;
5837 edge_iterator ei;
5838
5839 df_analyze ();
5840
5841 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
5842
5843 inserted = false;
5844 seq = NULL_RTX;
5845 epilogue_end = NULL_RTX;
5846 returnjump = NULL_RTX;
5847
5848 /* Can't deal with multiple successors of the entry block at the
5849 moment. Function should always have at least one entry
5850 point. */
5851 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5852 entry_edge = single_succ_edge (ENTRY_BLOCK_PTR);
5853 orig_entry_edge = entry_edge;
5854
5855 split_prologue_seq = NULL_RTX;
5856 if (flag_split_stack
5857 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
5858 == NULL))
5859 {
5860 #ifndef HAVE_split_stack_prologue
5861 gcc_unreachable ();
5862 #else
5863 gcc_assert (HAVE_split_stack_prologue);
5864
5865 start_sequence ();
5866 emit_insn (gen_split_stack_prologue ());
5867 split_prologue_seq = get_insns ();
5868 end_sequence ();
5869
5870 record_insns (split_prologue_seq, NULL, &prologue_insn_hash);
5871 set_insn_locators (split_prologue_seq, prologue_locator);
5872 #endif
5873 }
5874
5875 prologue_seq = NULL_RTX;
5876 #ifdef HAVE_prologue
5877 if (HAVE_prologue)
5878 {
5879 start_sequence ();
5880 seq = gen_prologue ();
5881 emit_insn (seq);
5882
5883 /* Insert an explicit USE for the frame pointer
5884 if the profiling is on and the frame pointer is required. */
5885 if (crtl->profile && frame_pointer_needed)
5886 emit_use (hard_frame_pointer_rtx);
5887
5888 /* Retain a map of the prologue insns. */
5889 record_insns (seq, NULL, &prologue_insn_hash);
5890 emit_note (NOTE_INSN_PROLOGUE_END);
5891
5892 /* Ensure that instructions are not moved into the prologue when
5893 profiling is on. The call to the profiling routine can be
5894 emitted within the live range of a call-clobbered register. */
5895 if (!targetm.profile_before_prologue () && crtl->profile)
5896 emit_insn (gen_blockage ());
5897
5898 prologue_seq = get_insns ();
5899 end_sequence ();
5900 set_insn_locators (prologue_seq, prologue_locator);
5901 }
5902 #endif
5903
5904 #ifdef HAVE_simple_return
5905 bitmap_initialize (&bb_flags, &bitmap_default_obstack);
5906
5907 /* Try to perform a kind of shrink-wrapping, making sure the
5908 prologue/epilogue is emitted only around those parts of the
5909 function that require it. */
5910
5911 nonempty_prologue = false;
5912 for (seq = prologue_seq; seq; seq = NEXT_INSN (seq))
5913 if (!NOTE_P (seq) || NOTE_KIND (seq) != NOTE_INSN_PROLOGUE_END)
5914 {
5915 nonempty_prologue = true;
5916 break;
5917 }
5918
5919 if (flag_shrink_wrap && HAVE_simple_return
5920 && (targetm.profile_before_prologue () || !crtl->profile)
5921 && nonempty_prologue && !crtl->calls_eh_return)
5922 {
5923 HARD_REG_SET prologue_clobbered, prologue_used, live_on_edge;
5924 struct hard_reg_set_container set_up_by_prologue;
5925 rtx p_insn;
5926 VEC(basic_block, heap) *vec;
5927 basic_block bb;
5928 bitmap_head bb_antic_flags;
5929 bitmap_head bb_on_list;
5930 bitmap_head bb_tail;
5931
5932 if (dump_file)
5933 fprintf (dump_file, "Attempting shrink-wrapping optimization.\n");
5934
5935 /* Compute the registers set and used in the prologue. */
5936 CLEAR_HARD_REG_SET (prologue_clobbered);
5937 CLEAR_HARD_REG_SET (prologue_used);
5938 for (p_insn = prologue_seq; p_insn; p_insn = NEXT_INSN (p_insn))
5939 {
5940 HARD_REG_SET this_used;
5941 if (!NONDEBUG_INSN_P (p_insn))
5942 continue;
5943
5944 CLEAR_HARD_REG_SET (this_used);
5945 note_uses (&PATTERN (p_insn), record_hard_reg_uses,
5946 &this_used);
5947 AND_COMPL_HARD_REG_SET (this_used, prologue_clobbered);
5948 IOR_HARD_REG_SET (prologue_used, this_used);
5949 note_stores (PATTERN (p_insn), record_hard_reg_sets,
5950 &prologue_clobbered);
5951 }
5952
5953 prepare_shrink_wrap (entry_edge->dest);
5954
5955 bitmap_initialize (&bb_antic_flags, &bitmap_default_obstack);
5956 bitmap_initialize (&bb_on_list, &bitmap_default_obstack);
5957 bitmap_initialize (&bb_tail, &bitmap_default_obstack);
5958
5959 /* Find the set of basic blocks that require a stack frame,
5960 and blocks that are too big to be duplicated. */
5961
5962 vec = VEC_alloc (basic_block, heap, n_basic_blocks);
5963
5964 CLEAR_HARD_REG_SET (set_up_by_prologue.set);
5965 add_to_hard_reg_set (&set_up_by_prologue.set, Pmode,
5966 STACK_POINTER_REGNUM);
5967 add_to_hard_reg_set (&set_up_by_prologue.set, Pmode, ARG_POINTER_REGNUM);
5968 if (frame_pointer_needed)
5969 add_to_hard_reg_set (&set_up_by_prologue.set, Pmode,
5970 HARD_FRAME_POINTER_REGNUM);
5971 if (pic_offset_table_rtx)
5972 add_to_hard_reg_set (&set_up_by_prologue.set, Pmode,
5973 PIC_OFFSET_TABLE_REGNUM);
5974 if (stack_realign_drap && crtl->drap_reg)
5975 add_to_hard_reg_set (&set_up_by_prologue.set,
5976 GET_MODE (crtl->drap_reg),
5977 REGNO (crtl->drap_reg));
5978 if (targetm.set_up_by_prologue)
5979 targetm.set_up_by_prologue (&set_up_by_prologue);
5980
5981 /* We don't use a different max size depending on
5982 optimize_bb_for_speed_p because increasing shrink-wrapping
5983 opportunities by duplicating tail blocks can actually result
5984 in an overall decrease in code size. */
5985 max_grow_size = get_uncond_jump_length ();
5986 max_grow_size *= PARAM_VALUE (PARAM_MAX_GROW_COPY_BB_INSNS);
5987
5988 FOR_EACH_BB (bb)
5989 {
5990 rtx insn;
5991 unsigned size = 0;
5992
5993 FOR_BB_INSNS (bb, insn)
5994 if (NONDEBUG_INSN_P (insn))
5995 {
5996 if (requires_stack_frame_p (insn, prologue_used,
5997 set_up_by_prologue.set))
5998 {
5999 if (bb == entry_edge->dest)
6000 goto fail_shrinkwrap;
6001 bitmap_set_bit (&bb_flags, bb->index);
6002 VEC_quick_push (basic_block, vec, bb);
6003 break;
6004 }
6005 else if (size <= max_grow_size)
6006 {
6007 size += get_attr_min_length (insn);
6008 if (size > max_grow_size)
6009 bitmap_set_bit (&bb_on_list, bb->index);
6010 }
6011 }
6012 }
6013
6014 /* Blocks that really need a prologue, or are too big for tails. */
6015 bitmap_ior_into (&bb_on_list, &bb_flags);
6016
6017 /* For every basic block that needs a prologue, mark all blocks
6018 reachable from it, so as to ensure they are also seen as
6019 requiring a prologue. */
6020 while (!VEC_empty (basic_block, vec))
6021 {
6022 basic_block tmp_bb = VEC_pop (basic_block, vec);
6023
6024 FOR_EACH_EDGE (e, ei, tmp_bb->succs)
6025 if (e->dest != EXIT_BLOCK_PTR
6026 && bitmap_set_bit (&bb_flags, e->dest->index))
6027 VEC_quick_push (basic_block, vec, e->dest);
6028 }
6029
6030 /* Find the set of basic blocks that need no prologue, have a
6031 single successor, can be duplicated, meet a max size
6032 requirement, and go to the exit via like blocks. */
6033 VEC_quick_push (basic_block, vec, EXIT_BLOCK_PTR);
6034 while (!VEC_empty (basic_block, vec))
6035 {
6036 basic_block tmp_bb = VEC_pop (basic_block, vec);
6037
6038 FOR_EACH_EDGE (e, ei, tmp_bb->preds)
6039 if (single_succ_p (e->src)
6040 && !bitmap_bit_p (&bb_on_list, e->src->index)
6041 && can_duplicate_block_p (e->src))
6042 {
6043 edge pe;
6044 edge_iterator pei;
6045
6046 /* If there is predecessor of e->src which doesn't
6047 need prologue and the edge is complex,
6048 we might not be able to redirect the branch
6049 to a copy of e->src. */
6050 FOR_EACH_EDGE (pe, pei, e->src->preds)
6051 if ((pe->flags & EDGE_COMPLEX) != 0
6052 && !bitmap_bit_p (&bb_flags, pe->src->index))
6053 break;
6054 if (pe == NULL && bitmap_set_bit (&bb_tail, e->src->index))
6055 VEC_quick_push (basic_block, vec, e->src);
6056 }
6057 }
6058
6059 /* Now walk backwards from every block that is marked as needing
6060 a prologue to compute the bb_antic_flags bitmap. Exclude
6061 tail blocks; They can be duplicated to be used on paths not
6062 needing a prologue. */
6063 bitmap_clear (&bb_on_list);
6064 bitmap_and_compl (&bb_antic_flags, &bb_flags, &bb_tail);
6065 FOR_EACH_BB (bb)
6066 {
6067 if (!bitmap_bit_p (&bb_antic_flags, bb->index))
6068 continue;
6069 FOR_EACH_EDGE (e, ei, bb->preds)
6070 if (!bitmap_bit_p (&bb_antic_flags, e->src->index)
6071 && bitmap_set_bit (&bb_on_list, e->src->index))
6072 VEC_quick_push (basic_block, vec, e->src);
6073 }
6074 while (!VEC_empty (basic_block, vec))
6075 {
6076 basic_block tmp_bb = VEC_pop (basic_block, vec);
6077 bool all_set = true;
6078
6079 bitmap_clear_bit (&bb_on_list, tmp_bb->index);
6080 FOR_EACH_EDGE (e, ei, tmp_bb->succs)
6081 if (!bitmap_bit_p (&bb_antic_flags, e->dest->index))
6082 {
6083 all_set = false;
6084 break;
6085 }
6086
6087 if (all_set)
6088 {
6089 bitmap_set_bit (&bb_antic_flags, tmp_bb->index);
6090 FOR_EACH_EDGE (e, ei, tmp_bb->preds)
6091 if (!bitmap_bit_p (&bb_antic_flags, e->src->index)
6092 && bitmap_set_bit (&bb_on_list, e->src->index))
6093 VEC_quick_push (basic_block, vec, e->src);
6094 }
6095 }
6096 /* Find exactly one edge that leads to a block in ANTIC from
6097 a block that isn't. */
6098 if (!bitmap_bit_p (&bb_antic_flags, entry_edge->dest->index))
6099 FOR_EACH_BB (bb)
6100 {
6101 if (!bitmap_bit_p (&bb_antic_flags, bb->index))
6102 continue;
6103 FOR_EACH_EDGE (e, ei, bb->preds)
6104 if (!bitmap_bit_p (&bb_antic_flags, e->src->index))
6105 {
6106 if (entry_edge != orig_entry_edge)
6107 {
6108 entry_edge = orig_entry_edge;
6109 if (dump_file)
6110 fprintf (dump_file, "More than one candidate edge.\n");
6111 goto fail_shrinkwrap;
6112 }
6113 if (dump_file)
6114 fprintf (dump_file, "Found candidate edge for "
6115 "shrink-wrapping, %d->%d.\n", e->src->index,
6116 e->dest->index);
6117 entry_edge = e;
6118 }
6119 }
6120
6121 if (entry_edge != orig_entry_edge)
6122 {
6123 /* Test whether the prologue is known to clobber any register
6124 (other than FP or SP) which are live on the edge. */
6125 CLEAR_HARD_REG_BIT (prologue_clobbered, STACK_POINTER_REGNUM);
6126 if (frame_pointer_needed)
6127 CLEAR_HARD_REG_BIT (prologue_clobbered, HARD_FRAME_POINTER_REGNUM);
6128 CLEAR_HARD_REG_SET (live_on_edge);
6129 reg_set_to_hard_reg_set (&live_on_edge,
6130 df_get_live_in (entry_edge->dest));
6131 if (hard_reg_set_intersect_p (live_on_edge, prologue_clobbered))
6132 {
6133 entry_edge = orig_entry_edge;
6134 if (dump_file)
6135 fprintf (dump_file,
6136 "Shrink-wrapping aborted due to clobber.\n");
6137 }
6138 }
6139 if (entry_edge != orig_entry_edge)
6140 {
6141 crtl->shrink_wrapped = true;
6142 if (dump_file)
6143 fprintf (dump_file, "Performing shrink-wrapping.\n");
6144
6145 /* Find tail blocks reachable from both blocks needing a
6146 prologue and blocks not needing a prologue. */
6147 if (!bitmap_empty_p (&bb_tail))
6148 FOR_EACH_BB (bb)
6149 {
6150 bool some_pro, some_no_pro;
6151 if (!bitmap_bit_p (&bb_tail, bb->index))
6152 continue;
6153 some_pro = some_no_pro = false;
6154 FOR_EACH_EDGE (e, ei, bb->preds)
6155 {
6156 if (bitmap_bit_p (&bb_flags, e->src->index))
6157 some_pro = true;
6158 else
6159 some_no_pro = true;
6160 }
6161 if (some_pro && some_no_pro)
6162 VEC_quick_push (basic_block, vec, bb);
6163 else
6164 bitmap_clear_bit (&bb_tail, bb->index);
6165 }
6166 /* Find the head of each tail. */
6167 while (!VEC_empty (basic_block, vec))
6168 {
6169 basic_block tbb = VEC_pop (basic_block, vec);
6170
6171 if (!bitmap_bit_p (&bb_tail, tbb->index))
6172 continue;
6173
6174 while (single_succ_p (tbb))
6175 {
6176 tbb = single_succ (tbb);
6177 bitmap_clear_bit (&bb_tail, tbb->index);
6178 }
6179 }
6180 /* Now duplicate the tails. */
6181 if (!bitmap_empty_p (&bb_tail))
6182 FOR_EACH_BB_REVERSE (bb)
6183 {
6184 basic_block copy_bb, tbb;
6185 rtx insert_point;
6186 int eflags;
6187
6188 if (!bitmap_clear_bit (&bb_tail, bb->index))
6189 continue;
6190
6191 /* Create a copy of BB, instructions and all, for
6192 use on paths that don't need a prologue.
6193 Ideal placement of the copy is on a fall-thru edge
6194 or after a block that would jump to the copy. */
6195 FOR_EACH_EDGE (e, ei, bb->preds)
6196 if (!bitmap_bit_p (&bb_flags, e->src->index)
6197 && single_succ_p (e->src))
6198 break;
6199 if (e)
6200 {
6201 copy_bb = create_basic_block (NEXT_INSN (BB_END (e->src)),
6202 NULL_RTX, e->src);
6203 BB_COPY_PARTITION (copy_bb, e->src);
6204 }
6205 else
6206 {
6207 /* Otherwise put the copy at the end of the function. */
6208 copy_bb = create_basic_block (NULL_RTX, NULL_RTX,
6209 EXIT_BLOCK_PTR->prev_bb);
6210 BB_COPY_PARTITION (copy_bb, bb);
6211 }
6212
6213 insert_point = emit_note_after (NOTE_INSN_DELETED,
6214 BB_END (copy_bb));
6215 emit_barrier_after (BB_END (copy_bb));
6216
6217 tbb = bb;
6218 while (1)
6219 {
6220 dup_block_and_redirect (tbb, copy_bb, insert_point,
6221 &bb_flags);
6222 tbb = single_succ (tbb);
6223 if (tbb == EXIT_BLOCK_PTR)
6224 break;
6225 e = split_block (copy_bb, PREV_INSN (insert_point));
6226 copy_bb = e->dest;
6227 }
6228
6229 /* Quiet verify_flow_info by (ab)using EDGE_FAKE.
6230 We have yet to add a simple_return to the tails,
6231 as we'd like to first convert_jumps_to_returns in
6232 case the block is no longer used after that. */
6233 eflags = EDGE_FAKE;
6234 if (CALL_P (PREV_INSN (insert_point))
6235 && SIBLING_CALL_P (PREV_INSN (insert_point)))
6236 eflags = EDGE_SIBCALL | EDGE_ABNORMAL;
6237 make_single_succ_edge (copy_bb, EXIT_BLOCK_PTR, eflags);
6238
6239 /* verify_flow_info doesn't like a note after a
6240 sibling call. */
6241 delete_insn (insert_point);
6242 if (bitmap_empty_p (&bb_tail))
6243 break;
6244 }
6245 }
6246
6247 fail_shrinkwrap:
6248 bitmap_clear (&bb_tail);
6249 bitmap_clear (&bb_antic_flags);
6250 bitmap_clear (&bb_on_list);
6251 VEC_free (basic_block, heap, vec);
6252 }
6253 #endif
6254
6255 if (split_prologue_seq != NULL_RTX)
6256 {
6257 insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
6258 inserted = true;
6259 }
6260 if (prologue_seq != NULL_RTX)
6261 {
6262 insert_insn_on_edge (prologue_seq, entry_edge);
6263 inserted = true;
6264 }
6265
6266 /* If the exit block has no non-fake predecessors, we don't need
6267 an epilogue. */
6268 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6269 if ((e->flags & EDGE_FAKE) == 0)
6270 break;
6271 if (e == NULL)
6272 goto epilogue_done;
6273
6274 rtl_profile_for_bb (EXIT_BLOCK_PTR);
6275
6276 exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
6277
6278 /* If we're allowed to generate a simple return instruction, then by
6279 definition we don't need a full epilogue. If the last basic
6280 block before the exit block does not contain active instructions,
6281 examine its predecessors and try to emit (conditional) return
6282 instructions. */
6283 #ifdef HAVE_simple_return
6284 if (entry_edge != orig_entry_edge)
6285 {
6286 if (optimize)
6287 {
6288 unsigned i, last;
6289
6290 /* convert_jumps_to_returns may add to EXIT_BLOCK_PTR->preds
6291 (but won't remove). Stop at end of current preds. */
6292 last = EDGE_COUNT (EXIT_BLOCK_PTR->preds);
6293 for (i = 0; i < last; i++)
6294 {
6295 e = EDGE_I (EXIT_BLOCK_PTR->preds, i);
6296 if (LABEL_P (BB_HEAD (e->src))
6297 && !bitmap_bit_p (&bb_flags, e->src->index)
6298 && !active_insn_between (BB_HEAD (e->src), BB_END (e->src)))
6299 unconverted_simple_returns
6300 = convert_jumps_to_returns (e->src, true,
6301 unconverted_simple_returns);
6302 }
6303 }
6304
6305 if (exit_fallthru_edge != NULL
6306 && EDGE_COUNT (exit_fallthru_edge->src->preds) != 0
6307 && !bitmap_bit_p (&bb_flags, exit_fallthru_edge->src->index))
6308 {
6309 basic_block last_bb;
6310
6311 last_bb = emit_return_for_exit (exit_fallthru_edge, true);
6312 returnjump = BB_END (last_bb);
6313 exit_fallthru_edge = NULL;
6314 }
6315 }
6316 #endif
6317 #ifdef HAVE_return
6318 if (HAVE_return)
6319 {
6320 if (exit_fallthru_edge == NULL)
6321 goto epilogue_done;
6322
6323 if (optimize)
6324 {
6325 basic_block last_bb = exit_fallthru_edge->src;
6326
6327 if (LABEL_P (BB_HEAD (last_bb))
6328 && !active_insn_between (BB_HEAD (last_bb), BB_END (last_bb)))
6329 convert_jumps_to_returns (last_bb, false, NULL);
6330
6331 if (EDGE_COUNT (last_bb->preds) != 0
6332 && single_succ_p (last_bb))
6333 {
6334 last_bb = emit_return_for_exit (exit_fallthru_edge, false);
6335 epilogue_end = returnjump = BB_END (last_bb);
6336 #ifdef HAVE_simple_return
6337 /* Emitting the return may add a basic block.
6338 Fix bb_flags for the added block. */
6339 if (last_bb != exit_fallthru_edge->src)
6340 bitmap_set_bit (&bb_flags, last_bb->index);
6341 #endif
6342 goto epilogue_done;
6343 }
6344 }
6345 }
6346 #endif
6347
6348 /* A small fib -- epilogue is not yet completed, but we wish to re-use
6349 this marker for the splits of EH_RETURN patterns, and nothing else
6350 uses the flag in the meantime. */
6351 epilogue_completed = 1;
6352
6353 #ifdef HAVE_eh_return
6354 /* Find non-fallthru edges that end with EH_RETURN instructions. On
6355 some targets, these get split to a special version of the epilogue
6356 code. In order to be able to properly annotate these with unwind
6357 info, try to split them now. If we get a valid split, drop an
6358 EPILOGUE_BEG note and mark the insns as epilogue insns. */
6359 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6360 {
6361 rtx prev, last, trial;
6362
6363 if (e->flags & EDGE_FALLTHRU)
6364 continue;
6365 last = BB_END (e->src);
6366 if (!eh_returnjump_p (last))
6367 continue;
6368
6369 prev = PREV_INSN (last);
6370 trial = try_split (PATTERN (last), last, 1);
6371 if (trial == last)
6372 continue;
6373
6374 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
6375 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6376 }
6377 #endif
6378
6379 /* If nothing falls through into the exit block, we don't need an
6380 epilogue. */
6381
6382 if (exit_fallthru_edge == NULL)
6383 goto epilogue_done;
6384
6385 #ifdef HAVE_epilogue
6386 if (HAVE_epilogue)
6387 {
6388 start_sequence ();
6389 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
6390 seq = gen_epilogue ();
6391 if (seq)
6392 emit_jump_insn (seq);
6393
6394 /* Retain a map of the epilogue insns. */
6395 record_insns (seq, NULL, &epilogue_insn_hash);
6396 set_insn_locators (seq, epilogue_locator);
6397
6398 seq = get_insns ();
6399 returnjump = get_last_insn ();
6400 end_sequence ();
6401
6402 insert_insn_on_edge (seq, exit_fallthru_edge);
6403 inserted = true;
6404
6405 if (JUMP_P (returnjump))
6406 set_return_jump_label (returnjump);
6407 }
6408 else
6409 #endif
6410 {
6411 basic_block cur_bb;
6412
6413 if (! next_active_insn (BB_END (exit_fallthru_edge->src)))
6414 goto epilogue_done;
6415 /* We have a fall-through edge to the exit block, the source is not
6416 at the end of the function, and there will be an assembler epilogue
6417 at the end of the function.
6418 We can't use force_nonfallthru here, because that would try to
6419 use return. Inserting a jump 'by hand' is extremely messy, so
6420 we take advantage of cfg_layout_finalize using
6421 fixup_fallthru_exit_predecessor. */
6422 cfg_layout_initialize (0);
6423 FOR_EACH_BB (cur_bb)
6424 if (cur_bb->index >= NUM_FIXED_BLOCKS
6425 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
6426 cur_bb->aux = cur_bb->next_bb;
6427 cfg_layout_finalize ();
6428 }
6429
6430 epilogue_done:
6431
6432 default_rtl_profile ();
6433
6434 if (inserted)
6435 {
6436 sbitmap blocks;
6437
6438 commit_edge_insertions ();
6439
6440 /* Look for basic blocks within the prologue insns. */
6441 blocks = sbitmap_alloc (last_basic_block);
6442 sbitmap_zero (blocks);
6443 SET_BIT (blocks, entry_edge->dest->index);
6444 SET_BIT (blocks, orig_entry_edge->dest->index);
6445 find_many_sub_basic_blocks (blocks);
6446 sbitmap_free (blocks);
6447
6448 /* The epilogue insns we inserted may cause the exit edge to no longer
6449 be fallthru. */
6450 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6451 {
6452 if (((e->flags & EDGE_FALLTHRU) != 0)
6453 && returnjump_p (BB_END (e->src)))
6454 e->flags &= ~EDGE_FALLTHRU;
6455 }
6456 }
6457
6458 #ifdef HAVE_simple_return
6459 /* If there were branches to an empty LAST_BB which we tried to
6460 convert to conditional simple_returns, but couldn't for some
6461 reason, create a block to hold a simple_return insn and redirect
6462 those remaining edges. */
6463 if (!VEC_empty (edge, unconverted_simple_returns))
6464 {
6465 basic_block simple_return_block_hot = NULL;
6466 basic_block simple_return_block_cold = NULL;
6467 edge pending_edge_hot = NULL;
6468 edge pending_edge_cold = NULL;
6469 basic_block exit_pred = EXIT_BLOCK_PTR->prev_bb;
6470 int i;
6471
6472 gcc_assert (entry_edge != orig_entry_edge);
6473
6474 /* See if we can reuse the last insn that was emitted for the
6475 epilogue. */
6476 if (returnjump != NULL_RTX
6477 && JUMP_LABEL (returnjump) == simple_return_rtx)
6478 {
6479 e = split_block (BLOCK_FOR_INSN (returnjump), PREV_INSN (returnjump));
6480 if (BB_PARTITION (e->src) == BB_HOT_PARTITION)
6481 simple_return_block_hot = e->dest;
6482 else
6483 simple_return_block_cold = e->dest;
6484 }
6485
6486 /* Also check returns we might need to add to tail blocks. */
6487 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6488 if (EDGE_COUNT (e->src->preds) != 0
6489 && (e->flags & EDGE_FAKE) != 0
6490 && !bitmap_bit_p (&bb_flags, e->src->index))
6491 {
6492 if (BB_PARTITION (e->src) == BB_HOT_PARTITION)
6493 pending_edge_hot = e;
6494 else
6495 pending_edge_cold = e;
6496 }
6497
6498 FOR_EACH_VEC_ELT (edge, unconverted_simple_returns, i, e)
6499 {
6500 basic_block *pdest_bb;
6501 edge pending;
6502
6503 if (BB_PARTITION (e->src) == BB_HOT_PARTITION)
6504 {
6505 pdest_bb = &simple_return_block_hot;
6506 pending = pending_edge_hot;
6507 }
6508 else
6509 {
6510 pdest_bb = &simple_return_block_cold;
6511 pending = pending_edge_cold;
6512 }
6513
6514 if (*pdest_bb == NULL && pending != NULL)
6515 {
6516 emit_return_into_block (true, pending->src);
6517 pending->flags &= ~(EDGE_FALLTHRU | EDGE_FAKE);
6518 *pdest_bb = pending->src;
6519 }
6520 else if (*pdest_bb == NULL)
6521 {
6522 basic_block bb;
6523 rtx start;
6524
6525 bb = create_basic_block (NULL, NULL, exit_pred);
6526 BB_COPY_PARTITION (bb, e->src);
6527 start = emit_jump_insn_after (gen_simple_return (),
6528 BB_END (bb));
6529 JUMP_LABEL (start) = simple_return_rtx;
6530 emit_barrier_after (start);
6531
6532 *pdest_bb = bb;
6533 make_edge (bb, EXIT_BLOCK_PTR, 0);
6534 }
6535 redirect_edge_and_branch_force (e, *pdest_bb);
6536 }
6537 VEC_free (edge, heap, unconverted_simple_returns);
6538 }
6539
6540 if (entry_edge != orig_entry_edge)
6541 {
6542 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6543 if (EDGE_COUNT (e->src->preds) != 0
6544 && (e->flags & EDGE_FAKE) != 0
6545 && !bitmap_bit_p (&bb_flags, e->src->index))
6546 {
6547 emit_return_into_block (true, e->src);
6548 e->flags &= ~(EDGE_FALLTHRU | EDGE_FAKE);
6549 }
6550 }
6551 #endif
6552
6553 #ifdef HAVE_sibcall_epilogue
6554 /* Emit sibling epilogues before any sibling call sites. */
6555 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
6556 {
6557 basic_block bb = e->src;
6558 rtx insn = BB_END (bb);
6559 rtx ep_seq;
6560
6561 if (!CALL_P (insn)
6562 || ! SIBLING_CALL_P (insn)
6563 #ifdef HAVE_simple_return
6564 || (entry_edge != orig_entry_edge
6565 && !bitmap_bit_p (&bb_flags, bb->index))
6566 #endif
6567 )
6568 {
6569 ei_next (&ei);
6570 continue;
6571 }
6572
6573 ep_seq = gen_sibcall_epilogue ();
6574 if (ep_seq)
6575 {
6576 start_sequence ();
6577 emit_note (NOTE_INSN_EPILOGUE_BEG);
6578 emit_insn (ep_seq);
6579 seq = get_insns ();
6580 end_sequence ();
6581
6582 /* Retain a map of the epilogue insns. Used in life analysis to
6583 avoid getting rid of sibcall epilogue insns. Do this before we
6584 actually emit the sequence. */
6585 record_insns (seq, NULL, &epilogue_insn_hash);
6586 set_insn_locators (seq, epilogue_locator);
6587
6588 emit_insn_before (seq, insn);
6589 }
6590 ei_next (&ei);
6591 }
6592 #endif
6593
6594 #ifdef HAVE_epilogue
6595 if (epilogue_end)
6596 {
6597 rtx insn, next;
6598
6599 /* Similarly, move any line notes that appear after the epilogue.
6600 There is no need, however, to be quite so anal about the existence
6601 of such a note. Also possibly move
6602 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6603 info generation. */
6604 for (insn = epilogue_end; insn; insn = next)
6605 {
6606 next = NEXT_INSN (insn);
6607 if (NOTE_P (insn)
6608 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
6609 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
6610 }
6611 }
6612 #endif
6613
6614 #ifdef HAVE_simple_return
6615 bitmap_clear (&bb_flags);
6616 #endif
6617
6618 /* Threading the prologue and epilogue changes the artificial refs
6619 in the entry and exit blocks. */
6620 epilogue_completed = 1;
6621 df_update_entry_exit_and_calls ();
6622 }
6623
6624 /* Reposition the prologue-end and epilogue-begin notes after
6625 instruction scheduling. */
6626
6627 void
6628 reposition_prologue_and_epilogue_notes (void)
6629 {
6630 #if defined (HAVE_prologue) || defined (HAVE_epilogue) \
6631 || defined (HAVE_sibcall_epilogue)
6632 /* Since the hash table is created on demand, the fact that it is
6633 non-null is a signal that it is non-empty. */
6634 if (prologue_insn_hash != NULL)
6635 {
6636 size_t len = htab_elements (prologue_insn_hash);
6637 rtx insn, last = NULL, note = NULL;
6638
6639 /* Scan from the beginning until we reach the last prologue insn. */
6640 /* ??? While we do have the CFG intact, there are two problems:
6641 (1) The prologue can contain loops (typically probing the stack),
6642 which means that the end of the prologue isn't in the first bb.
6643 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6644 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6645 {
6646 if (NOTE_P (insn))
6647 {
6648 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
6649 note = insn;
6650 }
6651 else if (contains (insn, prologue_insn_hash))
6652 {
6653 last = insn;
6654 if (--len == 0)
6655 break;
6656 }
6657 }
6658
6659 if (last)
6660 {
6661 if (note == NULL)
6662 {
6663 /* Scan forward looking for the PROLOGUE_END note. It should
6664 be right at the beginning of the block, possibly with other
6665 insn notes that got moved there. */
6666 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6667 {
6668 if (NOTE_P (note)
6669 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6670 break;
6671 }
6672 }
6673
6674 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6675 if (LABEL_P (last))
6676 last = NEXT_INSN (last);
6677 reorder_insns (note, note, last);
6678 }
6679 }
6680
6681 if (epilogue_insn_hash != NULL)
6682 {
6683 edge_iterator ei;
6684 edge e;
6685
6686 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
6687 {
6688 rtx insn, first = NULL, note = NULL;
6689 basic_block bb = e->src;
6690
6691 /* Scan from the beginning until we reach the first epilogue insn. */
6692 FOR_BB_INSNS (bb, insn)
6693 {
6694 if (NOTE_P (insn))
6695 {
6696 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6697 {
6698 note = insn;
6699 if (first != NULL)
6700 break;
6701 }
6702 }
6703 else if (first == NULL && contains (insn, epilogue_insn_hash))
6704 {
6705 first = insn;
6706 if (note != NULL)
6707 break;
6708 }
6709 }
6710
6711 if (note)
6712 {
6713 /* If the function has a single basic block, and no real
6714 epilogue insns (e.g. sibcall with no cleanup), the
6715 epilogue note can get scheduled before the prologue
6716 note. If we have frame related prologue insns, having
6717 them scanned during the epilogue will result in a crash.
6718 In this case re-order the epilogue note to just before
6719 the last insn in the block. */
6720 if (first == NULL)
6721 first = BB_END (bb);
6722
6723 if (PREV_INSN (first) != note)
6724 reorder_insns (note, note, PREV_INSN (first));
6725 }
6726 }
6727 }
6728 #endif /* HAVE_prologue or HAVE_epilogue */
6729 }
6730
6731 /* Returns the name of the current function. */
6732 const char *
6733 current_function_name (void)
6734 {
6735 if (cfun == NULL)
6736 return "<none>";
6737 return lang_hooks.decl_printable_name (cfun->decl, 2);
6738 }
6739 \f
6740
6741 static unsigned int
6742 rest_of_handle_check_leaf_regs (void)
6743 {
6744 #ifdef LEAF_REGISTERS
6745 current_function_uses_only_leaf_regs
6746 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6747 #endif
6748 return 0;
6749 }
6750
6751 /* Insert a TYPE into the used types hash table of CFUN. */
6752
6753 static void
6754 used_types_insert_helper (tree type, struct function *func)
6755 {
6756 if (type != NULL && func != NULL)
6757 {
6758 void **slot;
6759
6760 if (func->used_types_hash == NULL)
6761 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
6762 htab_eq_pointer, NULL);
6763 slot = htab_find_slot (func->used_types_hash, type, INSERT);
6764 if (*slot == NULL)
6765 *slot = type;
6766 }
6767 }
6768
6769 /* Given a type, insert it into the used hash table in cfun. */
6770 void
6771 used_types_insert (tree t)
6772 {
6773 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
6774 if (TYPE_NAME (t))
6775 break;
6776 else
6777 t = TREE_TYPE (t);
6778 if (TREE_CODE (t) == ERROR_MARK)
6779 return;
6780 if (TYPE_NAME (t) == NULL_TREE
6781 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6782 t = TYPE_MAIN_VARIANT (t);
6783 if (debug_info_level > DINFO_LEVEL_NONE)
6784 {
6785 if (cfun)
6786 used_types_insert_helper (t, cfun);
6787 else
6788 /* So this might be a type referenced by a global variable.
6789 Record that type so that we can later decide to emit its debug
6790 information. */
6791 VEC_safe_push (tree, gc, types_used_by_cur_var_decl, t);
6792 }
6793 }
6794
6795 /* Helper to Hash a struct types_used_by_vars_entry. */
6796
6797 static hashval_t
6798 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6799 {
6800 gcc_assert (entry && entry->var_decl && entry->type);
6801
6802 return iterative_hash_object (entry->type,
6803 iterative_hash_object (entry->var_decl, 0));
6804 }
6805
6806 /* Hash function of the types_used_by_vars_entry hash table. */
6807
6808 hashval_t
6809 types_used_by_vars_do_hash (const void *x)
6810 {
6811 const struct types_used_by_vars_entry *entry =
6812 (const struct types_used_by_vars_entry *) x;
6813
6814 return hash_types_used_by_vars_entry (entry);
6815 }
6816
6817 /*Equality function of the types_used_by_vars_entry hash table. */
6818
6819 int
6820 types_used_by_vars_eq (const void *x1, const void *x2)
6821 {
6822 const struct types_used_by_vars_entry *e1 =
6823 (const struct types_used_by_vars_entry *) x1;
6824 const struct types_used_by_vars_entry *e2 =
6825 (const struct types_used_by_vars_entry *)x2;
6826
6827 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6828 }
6829
6830 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6831
6832 void
6833 types_used_by_var_decl_insert (tree type, tree var_decl)
6834 {
6835 if (type != NULL && var_decl != NULL)
6836 {
6837 void **slot;
6838 struct types_used_by_vars_entry e;
6839 e.var_decl = var_decl;
6840 e.type = type;
6841 if (types_used_by_vars_hash == NULL)
6842 types_used_by_vars_hash =
6843 htab_create_ggc (37, types_used_by_vars_do_hash,
6844 types_used_by_vars_eq, NULL);
6845 slot = htab_find_slot_with_hash (types_used_by_vars_hash, &e,
6846 hash_types_used_by_vars_entry (&e), INSERT);
6847 if (*slot == NULL)
6848 {
6849 struct types_used_by_vars_entry *entry;
6850 entry = ggc_alloc_types_used_by_vars_entry ();
6851 entry->type = type;
6852 entry->var_decl = var_decl;
6853 *slot = entry;
6854 }
6855 }
6856 }
6857
6858 struct rtl_opt_pass pass_leaf_regs =
6859 {
6860 {
6861 RTL_PASS,
6862 "*leaf_regs", /* name */
6863 NULL, /* gate */
6864 rest_of_handle_check_leaf_regs, /* execute */
6865 NULL, /* sub */
6866 NULL, /* next */
6867 0, /* static_pass_number */
6868 TV_NONE, /* tv_id */
6869 0, /* properties_required */
6870 0, /* properties_provided */
6871 0, /* properties_destroyed */
6872 0, /* todo_flags_start */
6873 0 /* todo_flags_finish */
6874 }
6875 };
6876
6877 static unsigned int
6878 rest_of_handle_thread_prologue_and_epilogue (void)
6879 {
6880 if (optimize)
6881 cleanup_cfg (CLEANUP_EXPENSIVE);
6882
6883 /* On some machines, the prologue and epilogue code, or parts thereof,
6884 can be represented as RTL. Doing so lets us schedule insns between
6885 it and the rest of the code and also allows delayed branch
6886 scheduling to operate in the epilogue. */
6887 thread_prologue_and_epilogue_insns ();
6888
6889 /* The stack usage info is finalized during prologue expansion. */
6890 if (flag_stack_usage_info)
6891 output_stack_usage ();
6892
6893 return 0;
6894 }
6895
6896 struct rtl_opt_pass pass_thread_prologue_and_epilogue =
6897 {
6898 {
6899 RTL_PASS,
6900 "pro_and_epilogue", /* name */
6901 NULL, /* gate */
6902 rest_of_handle_thread_prologue_and_epilogue, /* execute */
6903 NULL, /* sub */
6904 NULL, /* next */
6905 0, /* static_pass_number */
6906 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6907 0, /* properties_required */
6908 0, /* properties_provided */
6909 0, /* properties_destroyed */
6910 TODO_verify_flow, /* todo_flags_start */
6911 TODO_df_verify |
6912 TODO_df_finish | TODO_verify_rtl_sharing |
6913 TODO_ggc_collect /* todo_flags_finish */
6914 }
6915 };
6916 \f
6917
6918 /* This mini-pass fixes fall-out from SSA in asm statements that have
6919 in-out constraints. Say you start with
6920
6921 orig = inout;
6922 asm ("": "+mr" (inout));
6923 use (orig);
6924
6925 which is transformed very early to use explicit output and match operands:
6926
6927 orig = inout;
6928 asm ("": "=mr" (inout) : "0" (inout));
6929 use (orig);
6930
6931 Or, after SSA and copyprop,
6932
6933 asm ("": "=mr" (inout_2) : "0" (inout_1));
6934 use (inout_1);
6935
6936 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6937 they represent two separate values, so they will get different pseudo
6938 registers during expansion. Then, since the two operands need to match
6939 per the constraints, but use different pseudo registers, reload can
6940 only register a reload for these operands. But reloads can only be
6941 satisfied by hardregs, not by memory, so we need a register for this
6942 reload, just because we are presented with non-matching operands.
6943 So, even though we allow memory for this operand, no memory can be
6944 used for it, just because the two operands don't match. This can
6945 cause reload failures on register-starved targets.
6946
6947 So it's a symptom of reload not being able to use memory for reloads
6948 or, alternatively it's also a symptom of both operands not coming into
6949 reload as matching (in which case the pseudo could go to memory just
6950 fine, as the alternative allows it, and no reload would be necessary).
6951 We fix the latter problem here, by transforming
6952
6953 asm ("": "=mr" (inout_2) : "0" (inout_1));
6954
6955 back to
6956
6957 inout_2 = inout_1;
6958 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6959
6960 static void
6961 match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs)
6962 {
6963 int i;
6964 bool changed = false;
6965 rtx op = SET_SRC (p_sets[0]);
6966 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6967 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
6968 bool *output_matched = XALLOCAVEC (bool, noutputs);
6969
6970 memset (output_matched, 0, noutputs * sizeof (bool));
6971 for (i = 0; i < ninputs; i++)
6972 {
6973 rtx input, output, insns;
6974 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6975 char *end;
6976 int match, j;
6977
6978 if (*constraint == '%')
6979 constraint++;
6980
6981 match = strtoul (constraint, &end, 10);
6982 if (end == constraint)
6983 continue;
6984
6985 gcc_assert (match < noutputs);
6986 output = SET_DEST (p_sets[match]);
6987 input = RTVEC_ELT (inputs, i);
6988 /* Only do the transformation for pseudos. */
6989 if (! REG_P (output)
6990 || rtx_equal_p (output, input)
6991 || (GET_MODE (input) != VOIDmode
6992 && GET_MODE (input) != GET_MODE (output)))
6993 continue;
6994
6995 /* We can't do anything if the output is also used as input,
6996 as we're going to overwrite it. */
6997 for (j = 0; j < ninputs; j++)
6998 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6999 break;
7000 if (j != ninputs)
7001 continue;
7002
7003 /* Avoid changing the same input several times. For
7004 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
7005 only change in once (to out1), rather than changing it
7006 first to out1 and afterwards to out2. */
7007 if (i > 0)
7008 {
7009 for (j = 0; j < noutputs; j++)
7010 if (output_matched[j] && input == SET_DEST (p_sets[j]))
7011 break;
7012 if (j != noutputs)
7013 continue;
7014 }
7015 output_matched[match] = true;
7016
7017 start_sequence ();
7018 emit_move_insn (output, input);
7019 insns = get_insns ();
7020 end_sequence ();
7021 emit_insn_before (insns, insn);
7022
7023 /* Now replace all mentions of the input with output. We can't
7024 just replace the occurrence in inputs[i], as the register might
7025 also be used in some other input (or even in an address of an
7026 output), which would mean possibly increasing the number of
7027 inputs by one (namely 'output' in addition), which might pose
7028 a too complicated problem for reload to solve. E.g. this situation:
7029
7030 asm ("" : "=r" (output), "=m" (input) : "0" (input))
7031
7032 Here 'input' is used in two occurrences as input (once for the
7033 input operand, once for the address in the second output operand).
7034 If we would replace only the occurrence of the input operand (to
7035 make the matching) we would be left with this:
7036
7037 output = input
7038 asm ("" : "=r" (output), "=m" (input) : "0" (output))
7039
7040 Now we suddenly have two different input values (containing the same
7041 value, but different pseudos) where we formerly had only one.
7042 With more complicated asms this might lead to reload failures
7043 which wouldn't have happen without this pass. So, iterate over
7044 all operands and replace all occurrences of the register used. */
7045 for (j = 0; j < noutputs; j++)
7046 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
7047 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
7048 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
7049 input, output);
7050 for (j = 0; j < ninputs; j++)
7051 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
7052 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
7053 input, output);
7054
7055 changed = true;
7056 }
7057
7058 if (changed)
7059 df_insn_rescan (insn);
7060 }
7061
7062 static unsigned
7063 rest_of_match_asm_constraints (void)
7064 {
7065 basic_block bb;
7066 rtx insn, pat, *p_sets;
7067 int noutputs;
7068
7069 if (!crtl->has_asm_statement)
7070 return 0;
7071
7072 df_set_flags (DF_DEFER_INSN_RESCAN);
7073 FOR_EACH_BB (bb)
7074 {
7075 FOR_BB_INSNS (bb, insn)
7076 {
7077 if (!INSN_P (insn))
7078 continue;
7079
7080 pat = PATTERN (insn);
7081 if (GET_CODE (pat) == PARALLEL)
7082 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
7083 else if (GET_CODE (pat) == SET)
7084 p_sets = &PATTERN (insn), noutputs = 1;
7085 else
7086 continue;
7087
7088 if (GET_CODE (*p_sets) == SET
7089 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
7090 match_asm_constraints_1 (insn, p_sets, noutputs);
7091 }
7092 }
7093
7094 return TODO_df_finish;
7095 }
7096
7097 struct rtl_opt_pass pass_match_asm_constraints =
7098 {
7099 {
7100 RTL_PASS,
7101 "asmcons", /* name */
7102 NULL, /* gate */
7103 rest_of_match_asm_constraints, /* execute */
7104 NULL, /* sub */
7105 NULL, /* next */
7106 0, /* static_pass_number */
7107 TV_NONE, /* tv_id */
7108 0, /* properties_required */
7109 0, /* properties_provided */
7110 0, /* properties_destroyed */
7111 0, /* todo_flags_start */
7112 0 /* todo_flags_finish */
7113 }
7114 };
7115
7116
7117 #include "gt-function.h"