function.c: Use rtx_sequence
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
24
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
28
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
32 not get a hard register. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "rtl-error.h"
39 #include "tree.h"
40 #include "stor-layout.h"
41 #include "varasm.h"
42 #include "stringpool.h"
43 #include "flags.h"
44 #include "except.h"
45 #include "function.h"
46 #include "expr.h"
47 #include "optabs.h"
48 #include "libfuncs.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "insn-config.h"
52 #include "recog.h"
53 #include "output.h"
54 #include "hashtab.h"
55 #include "tm_p.h"
56 #include "langhooks.h"
57 #include "target.h"
58 #include "common/common-target.h"
59 #include "gimple-expr.h"
60 #include "gimplify.h"
61 #include "tree-pass.h"
62 #include "predict.h"
63 #include "df.h"
64 #include "params.h"
65 #include "bb-reorder.h"
66 #include "shrink-wrap.h"
67 #include "toplev.h"
68
69 /* So we can assign to cfun in this file. */
70 #undef cfun
71
72 #ifndef STACK_ALIGNMENT_NEEDED
73 #define STACK_ALIGNMENT_NEEDED 1
74 #endif
75
76 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
77
78 /* Round a value to the lowest integer less than it that is a multiple of
79 the required alignment. Avoid using division in case the value is
80 negative. Assume the alignment is a power of two. */
81 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
82
83 /* Similar, but round to the next highest integer that meets the
84 alignment. */
85 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
86
87 /* Nonzero once virtual register instantiation has been done.
88 assign_stack_local uses frame_pointer_rtx when this is nonzero.
89 calls.c:emit_library_call_value_1 uses it to set up
90 post-instantiation libcalls. */
91 int virtuals_instantiated;
92
93 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
94 static GTY(()) int funcdef_no;
95
96 /* These variables hold pointers to functions to create and destroy
97 target specific, per-function data structures. */
98 struct machine_function * (*init_machine_status) (void);
99
100 /* The currently compiled function. */
101 struct function *cfun = 0;
102
103 /* These hashes record the prologue and epilogue insns. */
104 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
105 htab_t prologue_insn_hash;
106 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
107 htab_t epilogue_insn_hash;
108 \f
109
110 htab_t types_used_by_vars_hash = NULL;
111 vec<tree, va_gc> *types_used_by_cur_var_decl;
112
113 /* Forward declarations. */
114
115 static struct temp_slot *find_temp_slot_from_address (rtx);
116 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
117 static void pad_below (struct args_size *, enum machine_mode, tree);
118 static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
119 static int all_blocks (tree, tree *);
120 static tree *get_block_vector (tree, int *);
121 extern tree debug_find_var_in_block_tree (tree, tree);
122 /* We always define `record_insns' even if it's not used so that we
123 can always export `prologue_epilogue_contains'. */
124 static void record_insns (rtx, rtx, htab_t *) ATTRIBUTE_UNUSED;
125 static bool contains (const_rtx, htab_t);
126 static void prepare_function_start (void);
127 static void do_clobber_return_reg (rtx, void *);
128 static void do_use_return_reg (rtx, void *);
129 \f
130 /* Stack of nested functions. */
131 /* Keep track of the cfun stack. */
132
133 typedef struct function *function_p;
134
135 static vec<function_p> function_context_stack;
136
137 /* Save the current context for compilation of a nested function.
138 This is called from language-specific code. */
139
140 void
141 push_function_context (void)
142 {
143 if (cfun == 0)
144 allocate_struct_function (NULL, false);
145
146 function_context_stack.safe_push (cfun);
147 set_cfun (NULL);
148 }
149
150 /* Restore the last saved context, at the end of a nested function.
151 This function is called from language-specific code. */
152
153 void
154 pop_function_context (void)
155 {
156 struct function *p = function_context_stack.pop ();
157 set_cfun (p);
158 current_function_decl = p->decl;
159
160 /* Reset variables that have known state during rtx generation. */
161 virtuals_instantiated = 0;
162 generating_concat_p = 1;
163 }
164
165 /* Clear out all parts of the state in F that can safely be discarded
166 after the function has been parsed, but not compiled, to let
167 garbage collection reclaim the memory. */
168
169 void
170 free_after_parsing (struct function *f)
171 {
172 f->language = 0;
173 }
174
175 /* Clear out all parts of the state in F that can safely be discarded
176 after the function has been compiled, to let garbage collection
177 reclaim the memory. */
178
179 void
180 free_after_compilation (struct function *f)
181 {
182 prologue_insn_hash = NULL;
183 epilogue_insn_hash = NULL;
184
185 free (crtl->emit.regno_pointer_align);
186
187 memset (crtl, 0, sizeof (struct rtl_data));
188 f->eh = NULL;
189 f->machine = NULL;
190 f->cfg = NULL;
191
192 regno_reg_rtx = NULL;
193 }
194 \f
195 /* Return size needed for stack frame based on slots so far allocated.
196 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
197 the caller may have to do that. */
198
199 HOST_WIDE_INT
200 get_frame_size (void)
201 {
202 if (FRAME_GROWS_DOWNWARD)
203 return -frame_offset;
204 else
205 return frame_offset;
206 }
207
208 /* Issue an error message and return TRUE if frame OFFSET overflows in
209 the signed target pointer arithmetics for function FUNC. Otherwise
210 return FALSE. */
211
212 bool
213 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
214 {
215 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
216
217 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
218 /* Leave room for the fixed part of the frame. */
219 - 64 * UNITS_PER_WORD)
220 {
221 error_at (DECL_SOURCE_LOCATION (func),
222 "total size of local objects too large");
223 return TRUE;
224 }
225
226 return FALSE;
227 }
228
229 /* Return stack slot alignment in bits for TYPE and MODE. */
230
231 static unsigned int
232 get_stack_local_alignment (tree type, enum machine_mode mode)
233 {
234 unsigned int alignment;
235
236 if (mode == BLKmode)
237 alignment = BIGGEST_ALIGNMENT;
238 else
239 alignment = GET_MODE_ALIGNMENT (mode);
240
241 /* Allow the frond-end to (possibly) increase the alignment of this
242 stack slot. */
243 if (! type)
244 type = lang_hooks.types.type_for_mode (mode, 0);
245
246 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
247 }
248
249 /* Determine whether it is possible to fit a stack slot of size SIZE and
250 alignment ALIGNMENT into an area in the stack frame that starts at
251 frame offset START and has a length of LENGTH. If so, store the frame
252 offset to be used for the stack slot in *POFFSET and return true;
253 return false otherwise. This function will extend the frame size when
254 given a start/length pair that lies at the end of the frame. */
255
256 static bool
257 try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
258 HOST_WIDE_INT size, unsigned int alignment,
259 HOST_WIDE_INT *poffset)
260 {
261 HOST_WIDE_INT this_frame_offset;
262 int frame_off, frame_alignment, frame_phase;
263
264 /* Calculate how many bytes the start of local variables is off from
265 stack alignment. */
266 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
267 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
268 frame_phase = frame_off ? frame_alignment - frame_off : 0;
269
270 /* Round the frame offset to the specified alignment. */
271
272 /* We must be careful here, since FRAME_OFFSET might be negative and
273 division with a negative dividend isn't as well defined as we might
274 like. So we instead assume that ALIGNMENT is a power of two and
275 use logical operations which are unambiguous. */
276 if (FRAME_GROWS_DOWNWARD)
277 this_frame_offset
278 = (FLOOR_ROUND (start + length - size - frame_phase,
279 (unsigned HOST_WIDE_INT) alignment)
280 + frame_phase);
281 else
282 this_frame_offset
283 = (CEIL_ROUND (start - frame_phase,
284 (unsigned HOST_WIDE_INT) alignment)
285 + frame_phase);
286
287 /* See if it fits. If this space is at the edge of the frame,
288 consider extending the frame to make it fit. Our caller relies on
289 this when allocating a new slot. */
290 if (frame_offset == start && this_frame_offset < frame_offset)
291 frame_offset = this_frame_offset;
292 else if (this_frame_offset < start)
293 return false;
294 else if (start + length == frame_offset
295 && this_frame_offset + size > start + length)
296 frame_offset = this_frame_offset + size;
297 else if (this_frame_offset + size > start + length)
298 return false;
299
300 *poffset = this_frame_offset;
301 return true;
302 }
303
304 /* Create a new frame_space structure describing free space in the stack
305 frame beginning at START and ending at END, and chain it into the
306 function's frame_space_list. */
307
308 static void
309 add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
310 {
311 struct frame_space *space = ggc_alloc<frame_space> ();
312 space->next = crtl->frame_space_list;
313 crtl->frame_space_list = space;
314 space->start = start;
315 space->length = end - start;
316 }
317
318 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
319 with machine mode MODE.
320
321 ALIGN controls the amount of alignment for the address of the slot:
322 0 means according to MODE,
323 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
324 -2 means use BITS_PER_UNIT,
325 positive specifies alignment boundary in bits.
326
327 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
328 alignment and ASLK_RECORD_PAD bit set if we should remember
329 extra space we allocated for alignment purposes. When we are
330 called from assign_stack_temp_for_type, it is not set so we don't
331 track the same stack slot in two independent lists.
332
333 We do not round to stack_boundary here. */
334
335 rtx
336 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size,
337 int align, int kind)
338 {
339 rtx x, addr;
340 int bigend_correction = 0;
341 HOST_WIDE_INT slot_offset = 0, old_frame_offset;
342 unsigned int alignment, alignment_in_bits;
343
344 if (align == 0)
345 {
346 alignment = get_stack_local_alignment (NULL, mode);
347 alignment /= BITS_PER_UNIT;
348 }
349 else if (align == -1)
350 {
351 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
352 size = CEIL_ROUND (size, alignment);
353 }
354 else if (align == -2)
355 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
356 else
357 alignment = align / BITS_PER_UNIT;
358
359 alignment_in_bits = alignment * BITS_PER_UNIT;
360
361 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
362 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
363 {
364 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
365 alignment = alignment_in_bits / BITS_PER_UNIT;
366 }
367
368 if (SUPPORTS_STACK_ALIGNMENT)
369 {
370 if (crtl->stack_alignment_estimated < alignment_in_bits)
371 {
372 if (!crtl->stack_realign_processed)
373 crtl->stack_alignment_estimated = alignment_in_bits;
374 else
375 {
376 /* If stack is realigned and stack alignment value
377 hasn't been finalized, it is OK not to increase
378 stack_alignment_estimated. The bigger alignment
379 requirement is recorded in stack_alignment_needed
380 below. */
381 gcc_assert (!crtl->stack_realign_finalized);
382 if (!crtl->stack_realign_needed)
383 {
384 /* It is OK to reduce the alignment as long as the
385 requested size is 0 or the estimated stack
386 alignment >= mode alignment. */
387 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
388 || size == 0
389 || (crtl->stack_alignment_estimated
390 >= GET_MODE_ALIGNMENT (mode)));
391 alignment_in_bits = crtl->stack_alignment_estimated;
392 alignment = alignment_in_bits / BITS_PER_UNIT;
393 }
394 }
395 }
396 }
397
398 if (crtl->stack_alignment_needed < alignment_in_bits)
399 crtl->stack_alignment_needed = alignment_in_bits;
400 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
401 crtl->max_used_stack_slot_alignment = alignment_in_bits;
402
403 if (mode != BLKmode || size != 0)
404 {
405 if (kind & ASLK_RECORD_PAD)
406 {
407 struct frame_space **psp;
408
409 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
410 {
411 struct frame_space *space = *psp;
412 if (!try_fit_stack_local (space->start, space->length, size,
413 alignment, &slot_offset))
414 continue;
415 *psp = space->next;
416 if (slot_offset > space->start)
417 add_frame_space (space->start, slot_offset);
418 if (slot_offset + size < space->start + space->length)
419 add_frame_space (slot_offset + size,
420 space->start + space->length);
421 goto found_space;
422 }
423 }
424 }
425 else if (!STACK_ALIGNMENT_NEEDED)
426 {
427 slot_offset = frame_offset;
428 goto found_space;
429 }
430
431 old_frame_offset = frame_offset;
432
433 if (FRAME_GROWS_DOWNWARD)
434 {
435 frame_offset -= size;
436 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
437
438 if (kind & ASLK_RECORD_PAD)
439 {
440 if (slot_offset > frame_offset)
441 add_frame_space (frame_offset, slot_offset);
442 if (slot_offset + size < old_frame_offset)
443 add_frame_space (slot_offset + size, old_frame_offset);
444 }
445 }
446 else
447 {
448 frame_offset += size;
449 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
450
451 if (kind & ASLK_RECORD_PAD)
452 {
453 if (slot_offset > old_frame_offset)
454 add_frame_space (old_frame_offset, slot_offset);
455 if (slot_offset + size < frame_offset)
456 add_frame_space (slot_offset + size, frame_offset);
457 }
458 }
459
460 found_space:
461 /* On a big-endian machine, if we are allocating more space than we will use,
462 use the least significant bytes of those that are allocated. */
463 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
464 bigend_correction = size - GET_MODE_SIZE (mode);
465
466 /* If we have already instantiated virtual registers, return the actual
467 address relative to the frame pointer. */
468 if (virtuals_instantiated)
469 addr = plus_constant (Pmode, frame_pointer_rtx,
470 trunc_int_for_mode
471 (slot_offset + bigend_correction
472 + STARTING_FRAME_OFFSET, Pmode));
473 else
474 addr = plus_constant (Pmode, virtual_stack_vars_rtx,
475 trunc_int_for_mode
476 (slot_offset + bigend_correction,
477 Pmode));
478
479 x = gen_rtx_MEM (mode, addr);
480 set_mem_align (x, alignment_in_bits);
481 MEM_NOTRAP_P (x) = 1;
482
483 stack_slot_list
484 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
485
486 if (frame_offset_overflow (frame_offset, current_function_decl))
487 frame_offset = 0;
488
489 return x;
490 }
491
492 /* Wrap up assign_stack_local_1 with last parameter as false. */
493
494 rtx
495 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
496 {
497 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
498 }
499 \f
500 /* In order to evaluate some expressions, such as function calls returning
501 structures in memory, we need to temporarily allocate stack locations.
502 We record each allocated temporary in the following structure.
503
504 Associated with each temporary slot is a nesting level. When we pop up
505 one level, all temporaries associated with the previous level are freed.
506 Normally, all temporaries are freed after the execution of the statement
507 in which they were created. However, if we are inside a ({...}) grouping,
508 the result may be in a temporary and hence must be preserved. If the
509 result could be in a temporary, we preserve it if we can determine which
510 one it is in. If we cannot determine which temporary may contain the
511 result, all temporaries are preserved. A temporary is preserved by
512 pretending it was allocated at the previous nesting level. */
513
514 struct GTY(()) temp_slot {
515 /* Points to next temporary slot. */
516 struct temp_slot *next;
517 /* Points to previous temporary slot. */
518 struct temp_slot *prev;
519 /* The rtx to used to reference the slot. */
520 rtx slot;
521 /* The size, in units, of the slot. */
522 HOST_WIDE_INT size;
523 /* The type of the object in the slot, or zero if it doesn't correspond
524 to a type. We use this to determine whether a slot can be reused.
525 It can be reused if objects of the type of the new slot will always
526 conflict with objects of the type of the old slot. */
527 tree type;
528 /* The alignment (in bits) of the slot. */
529 unsigned int align;
530 /* Nonzero if this temporary is currently in use. */
531 char in_use;
532 /* Nesting level at which this slot is being used. */
533 int level;
534 /* The offset of the slot from the frame_pointer, including extra space
535 for alignment. This info is for combine_temp_slots. */
536 HOST_WIDE_INT base_offset;
537 /* The size of the slot, including extra space for alignment. This
538 info is for combine_temp_slots. */
539 HOST_WIDE_INT full_size;
540 };
541
542 /* A table of addresses that represent a stack slot. The table is a mapping
543 from address RTXen to a temp slot. */
544 static GTY((param_is(struct temp_slot_address_entry))) htab_t temp_slot_address_table;
545 static size_t n_temp_slots_in_use;
546
547 /* Entry for the above hash table. */
548 struct GTY(()) temp_slot_address_entry {
549 hashval_t hash;
550 rtx address;
551 struct temp_slot *temp_slot;
552 };
553
554 /* Removes temporary slot TEMP from LIST. */
555
556 static void
557 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
558 {
559 if (temp->next)
560 temp->next->prev = temp->prev;
561 if (temp->prev)
562 temp->prev->next = temp->next;
563 else
564 *list = temp->next;
565
566 temp->prev = temp->next = NULL;
567 }
568
569 /* Inserts temporary slot TEMP to LIST. */
570
571 static void
572 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
573 {
574 temp->next = *list;
575 if (*list)
576 (*list)->prev = temp;
577 temp->prev = NULL;
578 *list = temp;
579 }
580
581 /* Returns the list of used temp slots at LEVEL. */
582
583 static struct temp_slot **
584 temp_slots_at_level (int level)
585 {
586 if (level >= (int) vec_safe_length (used_temp_slots))
587 vec_safe_grow_cleared (used_temp_slots, level + 1);
588
589 return &(*used_temp_slots)[level];
590 }
591
592 /* Returns the maximal temporary slot level. */
593
594 static int
595 max_slot_level (void)
596 {
597 if (!used_temp_slots)
598 return -1;
599
600 return used_temp_slots->length () - 1;
601 }
602
603 /* Moves temporary slot TEMP to LEVEL. */
604
605 static void
606 move_slot_to_level (struct temp_slot *temp, int level)
607 {
608 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
609 insert_slot_to_list (temp, temp_slots_at_level (level));
610 temp->level = level;
611 }
612
613 /* Make temporary slot TEMP available. */
614
615 static void
616 make_slot_available (struct temp_slot *temp)
617 {
618 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
619 insert_slot_to_list (temp, &avail_temp_slots);
620 temp->in_use = 0;
621 temp->level = -1;
622 n_temp_slots_in_use--;
623 }
624
625 /* Compute the hash value for an address -> temp slot mapping.
626 The value is cached on the mapping entry. */
627 static hashval_t
628 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
629 {
630 int do_not_record = 0;
631 return hash_rtx (t->address, GET_MODE (t->address),
632 &do_not_record, NULL, false);
633 }
634
635 /* Return the hash value for an address -> temp slot mapping. */
636 static hashval_t
637 temp_slot_address_hash (const void *p)
638 {
639 const struct temp_slot_address_entry *t;
640 t = (const struct temp_slot_address_entry *) p;
641 return t->hash;
642 }
643
644 /* Compare two address -> temp slot mapping entries. */
645 static int
646 temp_slot_address_eq (const void *p1, const void *p2)
647 {
648 const struct temp_slot_address_entry *t1, *t2;
649 t1 = (const struct temp_slot_address_entry *) p1;
650 t2 = (const struct temp_slot_address_entry *) p2;
651 return exp_equiv_p (t1->address, t2->address, 0, true);
652 }
653
654 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
655 static void
656 insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
657 {
658 void **slot;
659 struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
660 t->address = address;
661 t->temp_slot = temp_slot;
662 t->hash = temp_slot_address_compute_hash (t);
663 slot = htab_find_slot_with_hash (temp_slot_address_table, t, t->hash, INSERT);
664 *slot = t;
665 }
666
667 /* Remove an address -> temp slot mapping entry if the temp slot is
668 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
669 static int
670 remove_unused_temp_slot_addresses_1 (void **slot, void *data ATTRIBUTE_UNUSED)
671 {
672 const struct temp_slot_address_entry *t;
673 t = (const struct temp_slot_address_entry *) *slot;
674 if (! t->temp_slot->in_use)
675 htab_clear_slot (temp_slot_address_table, slot);
676 return 1;
677 }
678
679 /* Remove all mappings of addresses to unused temp slots. */
680 static void
681 remove_unused_temp_slot_addresses (void)
682 {
683 /* Use quicker clearing if there aren't any active temp slots. */
684 if (n_temp_slots_in_use)
685 htab_traverse (temp_slot_address_table,
686 remove_unused_temp_slot_addresses_1,
687 NULL);
688 else
689 htab_empty (temp_slot_address_table);
690 }
691
692 /* Find the temp slot corresponding to the object at address X. */
693
694 static struct temp_slot *
695 find_temp_slot_from_address (rtx x)
696 {
697 struct temp_slot *p;
698 struct temp_slot_address_entry tmp, *t;
699
700 /* First try the easy way:
701 See if X exists in the address -> temp slot mapping. */
702 tmp.address = x;
703 tmp.temp_slot = NULL;
704 tmp.hash = temp_slot_address_compute_hash (&tmp);
705 t = (struct temp_slot_address_entry *)
706 htab_find_with_hash (temp_slot_address_table, &tmp, tmp.hash);
707 if (t)
708 return t->temp_slot;
709
710 /* If we have a sum involving a register, see if it points to a temp
711 slot. */
712 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
713 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
714 return p;
715 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
716 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
717 return p;
718
719 /* Last resort: Address is a virtual stack var address. */
720 if (GET_CODE (x) == PLUS
721 && XEXP (x, 0) == virtual_stack_vars_rtx
722 && CONST_INT_P (XEXP (x, 1)))
723 {
724 int i;
725 for (i = max_slot_level (); i >= 0; i--)
726 for (p = *temp_slots_at_level (i); p; p = p->next)
727 {
728 if (INTVAL (XEXP (x, 1)) >= p->base_offset
729 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
730 return p;
731 }
732 }
733
734 return NULL;
735 }
736 \f
737 /* Allocate a temporary stack slot and record it for possible later
738 reuse.
739
740 MODE is the machine mode to be given to the returned rtx.
741
742 SIZE is the size in units of the space required. We do no rounding here
743 since assign_stack_local will do any required rounding.
744
745 TYPE is the type that will be used for the stack slot. */
746
747 rtx
748 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
749 tree type)
750 {
751 unsigned int align;
752 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
753 rtx slot;
754
755 /* If SIZE is -1 it means that somebody tried to allocate a temporary
756 of a variable size. */
757 gcc_assert (size != -1);
758
759 align = get_stack_local_alignment (type, mode);
760
761 /* Try to find an available, already-allocated temporary of the proper
762 mode which meets the size and alignment requirements. Choose the
763 smallest one with the closest alignment.
764
765 If assign_stack_temp is called outside of the tree->rtl expansion,
766 we cannot reuse the stack slots (that may still refer to
767 VIRTUAL_STACK_VARS_REGNUM). */
768 if (!virtuals_instantiated)
769 {
770 for (p = avail_temp_slots; p; p = p->next)
771 {
772 if (p->align >= align && p->size >= size
773 && GET_MODE (p->slot) == mode
774 && objects_must_conflict_p (p->type, type)
775 && (best_p == 0 || best_p->size > p->size
776 || (best_p->size == p->size && best_p->align > p->align)))
777 {
778 if (p->align == align && p->size == size)
779 {
780 selected = p;
781 cut_slot_from_list (selected, &avail_temp_slots);
782 best_p = 0;
783 break;
784 }
785 best_p = p;
786 }
787 }
788 }
789
790 /* Make our best, if any, the one to use. */
791 if (best_p)
792 {
793 selected = best_p;
794 cut_slot_from_list (selected, &avail_temp_slots);
795
796 /* If there are enough aligned bytes left over, make them into a new
797 temp_slot so that the extra bytes don't get wasted. Do this only
798 for BLKmode slots, so that we can be sure of the alignment. */
799 if (GET_MODE (best_p->slot) == BLKmode)
800 {
801 int alignment = best_p->align / BITS_PER_UNIT;
802 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
803
804 if (best_p->size - rounded_size >= alignment)
805 {
806 p = ggc_alloc<temp_slot> ();
807 p->in_use = 0;
808 p->size = best_p->size - rounded_size;
809 p->base_offset = best_p->base_offset + rounded_size;
810 p->full_size = best_p->full_size - rounded_size;
811 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
812 p->align = best_p->align;
813 p->type = best_p->type;
814 insert_slot_to_list (p, &avail_temp_slots);
815
816 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
817 stack_slot_list);
818
819 best_p->size = rounded_size;
820 best_p->full_size = rounded_size;
821 }
822 }
823 }
824
825 /* If we still didn't find one, make a new temporary. */
826 if (selected == 0)
827 {
828 HOST_WIDE_INT frame_offset_old = frame_offset;
829
830 p = ggc_alloc<temp_slot> ();
831
832 /* We are passing an explicit alignment request to assign_stack_local.
833 One side effect of that is assign_stack_local will not round SIZE
834 to ensure the frame offset remains suitably aligned.
835
836 So for requests which depended on the rounding of SIZE, we go ahead
837 and round it now. We also make sure ALIGNMENT is at least
838 BIGGEST_ALIGNMENT. */
839 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
840 p->slot = assign_stack_local_1 (mode,
841 (mode == BLKmode
842 ? CEIL_ROUND (size,
843 (int) align
844 / BITS_PER_UNIT)
845 : size),
846 align, 0);
847
848 p->align = align;
849
850 /* The following slot size computation is necessary because we don't
851 know the actual size of the temporary slot until assign_stack_local
852 has performed all the frame alignment and size rounding for the
853 requested temporary. Note that extra space added for alignment
854 can be either above or below this stack slot depending on which
855 way the frame grows. We include the extra space if and only if it
856 is above this slot. */
857 if (FRAME_GROWS_DOWNWARD)
858 p->size = frame_offset_old - frame_offset;
859 else
860 p->size = size;
861
862 /* Now define the fields used by combine_temp_slots. */
863 if (FRAME_GROWS_DOWNWARD)
864 {
865 p->base_offset = frame_offset;
866 p->full_size = frame_offset_old - frame_offset;
867 }
868 else
869 {
870 p->base_offset = frame_offset_old;
871 p->full_size = frame_offset - frame_offset_old;
872 }
873
874 selected = p;
875 }
876
877 p = selected;
878 p->in_use = 1;
879 p->type = type;
880 p->level = temp_slot_level;
881 n_temp_slots_in_use++;
882
883 pp = temp_slots_at_level (p->level);
884 insert_slot_to_list (p, pp);
885 insert_temp_slot_address (XEXP (p->slot, 0), p);
886
887 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
888 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
889 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
890
891 /* If we know the alias set for the memory that will be used, use
892 it. If there's no TYPE, then we don't know anything about the
893 alias set for the memory. */
894 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
895 set_mem_align (slot, align);
896
897 /* If a type is specified, set the relevant flags. */
898 if (type != 0)
899 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
900 MEM_NOTRAP_P (slot) = 1;
901
902 return slot;
903 }
904
905 /* Allocate a temporary stack slot and record it for possible later
906 reuse. First two arguments are same as in preceding function. */
907
908 rtx
909 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size)
910 {
911 return assign_stack_temp_for_type (mode, size, NULL_TREE);
912 }
913 \f
914 /* Assign a temporary.
915 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
916 and so that should be used in error messages. In either case, we
917 allocate of the given type.
918 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
919 it is 0 if a register is OK.
920 DONT_PROMOTE is 1 if we should not promote values in register
921 to wider modes. */
922
923 rtx
924 assign_temp (tree type_or_decl, int memory_required,
925 int dont_promote ATTRIBUTE_UNUSED)
926 {
927 tree type, decl;
928 enum machine_mode mode;
929 #ifdef PROMOTE_MODE
930 int unsignedp;
931 #endif
932
933 if (DECL_P (type_or_decl))
934 decl = type_or_decl, type = TREE_TYPE (decl);
935 else
936 decl = NULL, type = type_or_decl;
937
938 mode = TYPE_MODE (type);
939 #ifdef PROMOTE_MODE
940 unsignedp = TYPE_UNSIGNED (type);
941 #endif
942
943 if (mode == BLKmode || memory_required)
944 {
945 HOST_WIDE_INT size = int_size_in_bytes (type);
946 rtx tmp;
947
948 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
949 problems with allocating the stack space. */
950 if (size == 0)
951 size = 1;
952
953 /* Unfortunately, we don't yet know how to allocate variable-sized
954 temporaries. However, sometimes we can find a fixed upper limit on
955 the size, so try that instead. */
956 else if (size == -1)
957 size = max_int_size_in_bytes (type);
958
959 /* The size of the temporary may be too large to fit into an integer. */
960 /* ??? Not sure this should happen except for user silliness, so limit
961 this to things that aren't compiler-generated temporaries. The
962 rest of the time we'll die in assign_stack_temp_for_type. */
963 if (decl && size == -1
964 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
965 {
966 error ("size of variable %q+D is too large", decl);
967 size = 1;
968 }
969
970 tmp = assign_stack_temp_for_type (mode, size, type);
971 return tmp;
972 }
973
974 #ifdef PROMOTE_MODE
975 if (! dont_promote)
976 mode = promote_mode (type, mode, &unsignedp);
977 #endif
978
979 return gen_reg_rtx (mode);
980 }
981 \f
982 /* Combine temporary stack slots which are adjacent on the stack.
983
984 This allows for better use of already allocated stack space. This is only
985 done for BLKmode slots because we can be sure that we won't have alignment
986 problems in this case. */
987
988 static void
989 combine_temp_slots (void)
990 {
991 struct temp_slot *p, *q, *next, *next_q;
992 int num_slots;
993
994 /* We can't combine slots, because the information about which slot
995 is in which alias set will be lost. */
996 if (flag_strict_aliasing)
997 return;
998
999 /* If there are a lot of temp slots, don't do anything unless
1000 high levels of optimization. */
1001 if (! flag_expensive_optimizations)
1002 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1003 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1004 return;
1005
1006 for (p = avail_temp_slots; p; p = next)
1007 {
1008 int delete_p = 0;
1009
1010 next = p->next;
1011
1012 if (GET_MODE (p->slot) != BLKmode)
1013 continue;
1014
1015 for (q = p->next; q; q = next_q)
1016 {
1017 int delete_q = 0;
1018
1019 next_q = q->next;
1020
1021 if (GET_MODE (q->slot) != BLKmode)
1022 continue;
1023
1024 if (p->base_offset + p->full_size == q->base_offset)
1025 {
1026 /* Q comes after P; combine Q into P. */
1027 p->size += q->size;
1028 p->full_size += q->full_size;
1029 delete_q = 1;
1030 }
1031 else if (q->base_offset + q->full_size == p->base_offset)
1032 {
1033 /* P comes after Q; combine P into Q. */
1034 q->size += p->size;
1035 q->full_size += p->full_size;
1036 delete_p = 1;
1037 break;
1038 }
1039 if (delete_q)
1040 cut_slot_from_list (q, &avail_temp_slots);
1041 }
1042
1043 /* Either delete P or advance past it. */
1044 if (delete_p)
1045 cut_slot_from_list (p, &avail_temp_slots);
1046 }
1047 }
1048 \f
1049 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1050 slot that previously was known by OLD_RTX. */
1051
1052 void
1053 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1054 {
1055 struct temp_slot *p;
1056
1057 if (rtx_equal_p (old_rtx, new_rtx))
1058 return;
1059
1060 p = find_temp_slot_from_address (old_rtx);
1061
1062 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1063 NEW_RTX is a register, see if one operand of the PLUS is a
1064 temporary location. If so, NEW_RTX points into it. Otherwise,
1065 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1066 in common between them. If so, try a recursive call on those
1067 values. */
1068 if (p == 0)
1069 {
1070 if (GET_CODE (old_rtx) != PLUS)
1071 return;
1072
1073 if (REG_P (new_rtx))
1074 {
1075 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1076 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1077 return;
1078 }
1079 else if (GET_CODE (new_rtx) != PLUS)
1080 return;
1081
1082 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1083 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1084 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1085 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1086 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1087 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1088 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1089 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1090
1091 return;
1092 }
1093
1094 /* Otherwise add an alias for the temp's address. */
1095 insert_temp_slot_address (new_rtx, p);
1096 }
1097
1098 /* If X could be a reference to a temporary slot, mark that slot as
1099 belonging to the to one level higher than the current level. If X
1100 matched one of our slots, just mark that one. Otherwise, we can't
1101 easily predict which it is, so upgrade all of them.
1102
1103 This is called when an ({...}) construct occurs and a statement
1104 returns a value in memory. */
1105
1106 void
1107 preserve_temp_slots (rtx x)
1108 {
1109 struct temp_slot *p = 0, *next;
1110
1111 if (x == 0)
1112 return;
1113
1114 /* If X is a register that is being used as a pointer, see if we have
1115 a temporary slot we know it points to. */
1116 if (REG_P (x) && REG_POINTER (x))
1117 p = find_temp_slot_from_address (x);
1118
1119 /* If X is not in memory or is at a constant address, it cannot be in
1120 a temporary slot. */
1121 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1122 return;
1123
1124 /* First see if we can find a match. */
1125 if (p == 0)
1126 p = find_temp_slot_from_address (XEXP (x, 0));
1127
1128 if (p != 0)
1129 {
1130 if (p->level == temp_slot_level)
1131 move_slot_to_level (p, temp_slot_level - 1);
1132 return;
1133 }
1134
1135 /* Otherwise, preserve all non-kept slots at this level. */
1136 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1137 {
1138 next = p->next;
1139 move_slot_to_level (p, temp_slot_level - 1);
1140 }
1141 }
1142
1143 /* Free all temporaries used so far. This is normally called at the
1144 end of generating code for a statement. */
1145
1146 void
1147 free_temp_slots (void)
1148 {
1149 struct temp_slot *p, *next;
1150 bool some_available = false;
1151
1152 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1153 {
1154 next = p->next;
1155 make_slot_available (p);
1156 some_available = true;
1157 }
1158
1159 if (some_available)
1160 {
1161 remove_unused_temp_slot_addresses ();
1162 combine_temp_slots ();
1163 }
1164 }
1165
1166 /* Push deeper into the nesting level for stack temporaries. */
1167
1168 void
1169 push_temp_slots (void)
1170 {
1171 temp_slot_level++;
1172 }
1173
1174 /* Pop a temporary nesting level. All slots in use in the current level
1175 are freed. */
1176
1177 void
1178 pop_temp_slots (void)
1179 {
1180 free_temp_slots ();
1181 temp_slot_level--;
1182 }
1183
1184 /* Initialize temporary slots. */
1185
1186 void
1187 init_temp_slots (void)
1188 {
1189 /* We have not allocated any temporaries yet. */
1190 avail_temp_slots = 0;
1191 vec_alloc (used_temp_slots, 0);
1192 temp_slot_level = 0;
1193 n_temp_slots_in_use = 0;
1194
1195 /* Set up the table to map addresses to temp slots. */
1196 if (! temp_slot_address_table)
1197 temp_slot_address_table = htab_create_ggc (32,
1198 temp_slot_address_hash,
1199 temp_slot_address_eq,
1200 NULL);
1201 else
1202 htab_empty (temp_slot_address_table);
1203 }
1204 \f
1205 /* Functions and data structures to keep track of the values hard regs
1206 had at the start of the function. */
1207
1208 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1209 and has_hard_reg_initial_val.. */
1210 typedef struct GTY(()) initial_value_pair {
1211 rtx hard_reg;
1212 rtx pseudo;
1213 } initial_value_pair;
1214 /* ??? This could be a VEC but there is currently no way to define an
1215 opaque VEC type. This could be worked around by defining struct
1216 initial_value_pair in function.h. */
1217 typedef struct GTY(()) initial_value_struct {
1218 int num_entries;
1219 int max_entries;
1220 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1221 } initial_value_struct;
1222
1223 /* If a pseudo represents an initial hard reg (or expression), return
1224 it, else return NULL_RTX. */
1225
1226 rtx
1227 get_hard_reg_initial_reg (rtx reg)
1228 {
1229 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1230 int i;
1231
1232 if (ivs == 0)
1233 return NULL_RTX;
1234
1235 for (i = 0; i < ivs->num_entries; i++)
1236 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1237 return ivs->entries[i].hard_reg;
1238
1239 return NULL_RTX;
1240 }
1241
1242 /* Make sure that there's a pseudo register of mode MODE that stores the
1243 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1244
1245 rtx
1246 get_hard_reg_initial_val (enum machine_mode mode, unsigned int regno)
1247 {
1248 struct initial_value_struct *ivs;
1249 rtx rv;
1250
1251 rv = has_hard_reg_initial_val (mode, regno);
1252 if (rv)
1253 return rv;
1254
1255 ivs = crtl->hard_reg_initial_vals;
1256 if (ivs == 0)
1257 {
1258 ivs = ggc_alloc<initial_value_struct> ();
1259 ivs->num_entries = 0;
1260 ivs->max_entries = 5;
1261 ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
1262 crtl->hard_reg_initial_vals = ivs;
1263 }
1264
1265 if (ivs->num_entries >= ivs->max_entries)
1266 {
1267 ivs->max_entries += 5;
1268 ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1269 ivs->max_entries);
1270 }
1271
1272 ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1273 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1274
1275 return ivs->entries[ivs->num_entries++].pseudo;
1276 }
1277
1278 /* See if get_hard_reg_initial_val has been used to create a pseudo
1279 for the initial value of hard register REGNO in mode MODE. Return
1280 the associated pseudo if so, otherwise return NULL. */
1281
1282 rtx
1283 has_hard_reg_initial_val (enum machine_mode mode, unsigned int regno)
1284 {
1285 struct initial_value_struct *ivs;
1286 int i;
1287
1288 ivs = crtl->hard_reg_initial_vals;
1289 if (ivs != 0)
1290 for (i = 0; i < ivs->num_entries; i++)
1291 if (GET_MODE (ivs->entries[i].hard_reg) == mode
1292 && REGNO (ivs->entries[i].hard_reg) == regno)
1293 return ivs->entries[i].pseudo;
1294
1295 return NULL_RTX;
1296 }
1297
1298 unsigned int
1299 emit_initial_value_sets (void)
1300 {
1301 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1302 int i;
1303 rtx_insn *seq;
1304
1305 if (ivs == 0)
1306 return 0;
1307
1308 start_sequence ();
1309 for (i = 0; i < ivs->num_entries; i++)
1310 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1311 seq = get_insns ();
1312 end_sequence ();
1313
1314 emit_insn_at_entry (seq);
1315 return 0;
1316 }
1317
1318 /* Return the hardreg-pseudoreg initial values pair entry I and
1319 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1320 bool
1321 initial_value_entry (int i, rtx *hreg, rtx *preg)
1322 {
1323 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1324 if (!ivs || i >= ivs->num_entries)
1325 return false;
1326
1327 *hreg = ivs->entries[i].hard_reg;
1328 *preg = ivs->entries[i].pseudo;
1329 return true;
1330 }
1331 \f
1332 /* These routines are responsible for converting virtual register references
1333 to the actual hard register references once RTL generation is complete.
1334
1335 The following four variables are used for communication between the
1336 routines. They contain the offsets of the virtual registers from their
1337 respective hard registers. */
1338
1339 static int in_arg_offset;
1340 static int var_offset;
1341 static int dynamic_offset;
1342 static int out_arg_offset;
1343 static int cfa_offset;
1344
1345 /* In most machines, the stack pointer register is equivalent to the bottom
1346 of the stack. */
1347
1348 #ifndef STACK_POINTER_OFFSET
1349 #define STACK_POINTER_OFFSET 0
1350 #endif
1351
1352 #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1353 #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1354 #endif
1355
1356 /* If not defined, pick an appropriate default for the offset of dynamically
1357 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1358 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1359
1360 #ifndef STACK_DYNAMIC_OFFSET
1361
1362 /* The bottom of the stack points to the actual arguments. If
1363 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1364 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1365 stack space for register parameters is not pushed by the caller, but
1366 rather part of the fixed stack areas and hence not included in
1367 `crtl->outgoing_args_size'. Nevertheless, we must allow
1368 for it when allocating stack dynamic objects. */
1369
1370 #ifdef INCOMING_REG_PARM_STACK_SPACE
1371 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1372 ((ACCUMULATE_OUTGOING_ARGS \
1373 ? (crtl->outgoing_args_size \
1374 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1375 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1376 : 0) + (STACK_POINTER_OFFSET))
1377 #else
1378 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1379 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1380 + (STACK_POINTER_OFFSET))
1381 #endif
1382 #endif
1383
1384 \f
1385 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1386 is a virtual register, return the equivalent hard register and set the
1387 offset indirectly through the pointer. Otherwise, return 0. */
1388
1389 static rtx
1390 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1391 {
1392 rtx new_rtx;
1393 HOST_WIDE_INT offset;
1394
1395 if (x == virtual_incoming_args_rtx)
1396 {
1397 if (stack_realign_drap)
1398 {
1399 /* Replace virtual_incoming_args_rtx with internal arg
1400 pointer if DRAP is used to realign stack. */
1401 new_rtx = crtl->args.internal_arg_pointer;
1402 offset = 0;
1403 }
1404 else
1405 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1406 }
1407 else if (x == virtual_stack_vars_rtx)
1408 new_rtx = frame_pointer_rtx, offset = var_offset;
1409 else if (x == virtual_stack_dynamic_rtx)
1410 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1411 else if (x == virtual_outgoing_args_rtx)
1412 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1413 else if (x == virtual_cfa_rtx)
1414 {
1415 #ifdef FRAME_POINTER_CFA_OFFSET
1416 new_rtx = frame_pointer_rtx;
1417 #else
1418 new_rtx = arg_pointer_rtx;
1419 #endif
1420 offset = cfa_offset;
1421 }
1422 else if (x == virtual_preferred_stack_boundary_rtx)
1423 {
1424 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1425 offset = 0;
1426 }
1427 else
1428 return NULL_RTX;
1429
1430 *poffset = offset;
1431 return new_rtx;
1432 }
1433
1434 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1435 Instantiate any virtual registers present inside of *LOC. The expression
1436 is simplified, as much as possible, but is not to be considered "valid"
1437 in any sense implied by the target. If any change is made, set CHANGED
1438 to true. */
1439
1440 static int
1441 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1442 {
1443 HOST_WIDE_INT offset;
1444 bool *changed = (bool *) data;
1445 rtx x, new_rtx;
1446
1447 x = *loc;
1448 if (x == 0)
1449 return 0;
1450
1451 switch (GET_CODE (x))
1452 {
1453 case REG:
1454 new_rtx = instantiate_new_reg (x, &offset);
1455 if (new_rtx)
1456 {
1457 *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1458 if (changed)
1459 *changed = true;
1460 }
1461 return -1;
1462
1463 case PLUS:
1464 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1465 if (new_rtx)
1466 {
1467 XEXP (x, 0) = new_rtx;
1468 *loc = plus_constant (GET_MODE (x), x, offset, true);
1469 if (changed)
1470 *changed = true;
1471 return -1;
1472 }
1473
1474 /* FIXME -- from old code */
1475 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1476 we can commute the PLUS and SUBREG because pointers into the
1477 frame are well-behaved. */
1478 break;
1479
1480 default:
1481 break;
1482 }
1483
1484 return 0;
1485 }
1486
1487 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1488 matches the predicate for insn CODE operand OPERAND. */
1489
1490 static int
1491 safe_insn_predicate (int code, int operand, rtx x)
1492 {
1493 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
1494 }
1495
1496 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1497 registers present inside of insn. The result will be a valid insn. */
1498
1499 static void
1500 instantiate_virtual_regs_in_insn (rtx_insn *insn)
1501 {
1502 HOST_WIDE_INT offset;
1503 int insn_code, i;
1504 bool any_change = false;
1505 rtx set, new_rtx, x;
1506 rtx_insn *seq;
1507
1508 /* There are some special cases to be handled first. */
1509 set = single_set (insn);
1510 if (set)
1511 {
1512 /* We're allowed to assign to a virtual register. This is interpreted
1513 to mean that the underlying register gets assigned the inverse
1514 transformation. This is used, for example, in the handling of
1515 non-local gotos. */
1516 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1517 if (new_rtx)
1518 {
1519 start_sequence ();
1520
1521 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1522 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1523 gen_int_mode (-offset, GET_MODE (new_rtx)));
1524 x = force_operand (x, new_rtx);
1525 if (x != new_rtx)
1526 emit_move_insn (new_rtx, x);
1527
1528 seq = get_insns ();
1529 end_sequence ();
1530
1531 emit_insn_before (seq, insn);
1532 delete_insn (insn);
1533 return;
1534 }
1535
1536 /* Handle a straight copy from a virtual register by generating a
1537 new add insn. The difference between this and falling through
1538 to the generic case is avoiding a new pseudo and eliminating a
1539 move insn in the initial rtl stream. */
1540 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1541 if (new_rtx && offset != 0
1542 && REG_P (SET_DEST (set))
1543 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1544 {
1545 start_sequence ();
1546
1547 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
1548 gen_int_mode (offset,
1549 GET_MODE (SET_DEST (set))),
1550 SET_DEST (set), 1, OPTAB_LIB_WIDEN);
1551 if (x != SET_DEST (set))
1552 emit_move_insn (SET_DEST (set), x);
1553
1554 seq = get_insns ();
1555 end_sequence ();
1556
1557 emit_insn_before (seq, insn);
1558 delete_insn (insn);
1559 return;
1560 }
1561
1562 extract_insn (insn);
1563 insn_code = INSN_CODE (insn);
1564
1565 /* Handle a plus involving a virtual register by determining if the
1566 operands remain valid if they're modified in place. */
1567 if (GET_CODE (SET_SRC (set)) == PLUS
1568 && recog_data.n_operands >= 3
1569 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1570 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1571 && CONST_INT_P (recog_data.operand[2])
1572 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1573 {
1574 offset += INTVAL (recog_data.operand[2]);
1575
1576 /* If the sum is zero, then replace with a plain move. */
1577 if (offset == 0
1578 && REG_P (SET_DEST (set))
1579 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1580 {
1581 start_sequence ();
1582 emit_move_insn (SET_DEST (set), new_rtx);
1583 seq = get_insns ();
1584 end_sequence ();
1585
1586 emit_insn_before (seq, insn);
1587 delete_insn (insn);
1588 return;
1589 }
1590
1591 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1592
1593 /* Using validate_change and apply_change_group here leaves
1594 recog_data in an invalid state. Since we know exactly what
1595 we want to check, do those two by hand. */
1596 if (safe_insn_predicate (insn_code, 1, new_rtx)
1597 && safe_insn_predicate (insn_code, 2, x))
1598 {
1599 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1600 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1601 any_change = true;
1602
1603 /* Fall through into the regular operand fixup loop in
1604 order to take care of operands other than 1 and 2. */
1605 }
1606 }
1607 }
1608 else
1609 {
1610 extract_insn (insn);
1611 insn_code = INSN_CODE (insn);
1612 }
1613
1614 /* In the general case, we expect virtual registers to appear only in
1615 operands, and then only as either bare registers or inside memories. */
1616 for (i = 0; i < recog_data.n_operands; ++i)
1617 {
1618 x = recog_data.operand[i];
1619 switch (GET_CODE (x))
1620 {
1621 case MEM:
1622 {
1623 rtx addr = XEXP (x, 0);
1624 bool changed = false;
1625
1626 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1627 if (!changed)
1628 continue;
1629
1630 start_sequence ();
1631 x = replace_equiv_address (x, addr, true);
1632 /* It may happen that the address with the virtual reg
1633 was valid (e.g. based on the virtual stack reg, which might
1634 be acceptable to the predicates with all offsets), whereas
1635 the address now isn't anymore, for instance when the address
1636 is still offsetted, but the base reg isn't virtual-stack-reg
1637 anymore. Below we would do a force_reg on the whole operand,
1638 but this insn might actually only accept memory. Hence,
1639 before doing that last resort, try to reload the address into
1640 a register, so this operand stays a MEM. */
1641 if (!safe_insn_predicate (insn_code, i, x))
1642 {
1643 addr = force_reg (GET_MODE (addr), addr);
1644 x = replace_equiv_address (x, addr, true);
1645 }
1646 seq = get_insns ();
1647 end_sequence ();
1648 if (seq)
1649 emit_insn_before (seq, insn);
1650 }
1651 break;
1652
1653 case REG:
1654 new_rtx = instantiate_new_reg (x, &offset);
1655 if (new_rtx == NULL)
1656 continue;
1657 if (offset == 0)
1658 x = new_rtx;
1659 else
1660 {
1661 start_sequence ();
1662
1663 /* Careful, special mode predicates may have stuff in
1664 insn_data[insn_code].operand[i].mode that isn't useful
1665 to us for computing a new value. */
1666 /* ??? Recognize address_operand and/or "p" constraints
1667 to see if (plus new offset) is a valid before we put
1668 this through expand_simple_binop. */
1669 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1670 gen_int_mode (offset, GET_MODE (x)),
1671 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1672 seq = get_insns ();
1673 end_sequence ();
1674 emit_insn_before (seq, insn);
1675 }
1676 break;
1677
1678 case SUBREG:
1679 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1680 if (new_rtx == NULL)
1681 continue;
1682 if (offset != 0)
1683 {
1684 start_sequence ();
1685 new_rtx = expand_simple_binop
1686 (GET_MODE (new_rtx), PLUS, new_rtx,
1687 gen_int_mode (offset, GET_MODE (new_rtx)),
1688 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1689 seq = get_insns ();
1690 end_sequence ();
1691 emit_insn_before (seq, insn);
1692 }
1693 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1694 GET_MODE (new_rtx), SUBREG_BYTE (x));
1695 gcc_assert (x);
1696 break;
1697
1698 default:
1699 continue;
1700 }
1701
1702 /* At this point, X contains the new value for the operand.
1703 Validate the new value vs the insn predicate. Note that
1704 asm insns will have insn_code -1 here. */
1705 if (!safe_insn_predicate (insn_code, i, x))
1706 {
1707 start_sequence ();
1708 if (REG_P (x))
1709 {
1710 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1711 x = copy_to_reg (x);
1712 }
1713 else
1714 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1715 seq = get_insns ();
1716 end_sequence ();
1717 if (seq)
1718 emit_insn_before (seq, insn);
1719 }
1720
1721 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1722 any_change = true;
1723 }
1724
1725 if (any_change)
1726 {
1727 /* Propagate operand changes into the duplicates. */
1728 for (i = 0; i < recog_data.n_dups; ++i)
1729 *recog_data.dup_loc[i]
1730 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1731
1732 /* Force re-recognition of the instruction for validation. */
1733 INSN_CODE (insn) = -1;
1734 }
1735
1736 if (asm_noperands (PATTERN (insn)) >= 0)
1737 {
1738 if (!check_asm_operands (PATTERN (insn)))
1739 {
1740 error_for_asm (insn, "impossible constraint in %<asm%>");
1741 /* For asm goto, instead of fixing up all the edges
1742 just clear the template and clear input operands
1743 (asm goto doesn't have any output operands). */
1744 if (JUMP_P (insn))
1745 {
1746 rtx asm_op = extract_asm_operands (PATTERN (insn));
1747 ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1748 ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1749 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1750 }
1751 else
1752 delete_insn (insn);
1753 }
1754 }
1755 else
1756 {
1757 if (recog_memoized (insn) < 0)
1758 fatal_insn_not_found (insn);
1759 }
1760 }
1761
1762 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1763 do any instantiation required. */
1764
1765 void
1766 instantiate_decl_rtl (rtx x)
1767 {
1768 rtx addr;
1769
1770 if (x == 0)
1771 return;
1772
1773 /* If this is a CONCAT, recurse for the pieces. */
1774 if (GET_CODE (x) == CONCAT)
1775 {
1776 instantiate_decl_rtl (XEXP (x, 0));
1777 instantiate_decl_rtl (XEXP (x, 1));
1778 return;
1779 }
1780
1781 /* If this is not a MEM, no need to do anything. Similarly if the
1782 address is a constant or a register that is not a virtual register. */
1783 if (!MEM_P (x))
1784 return;
1785
1786 addr = XEXP (x, 0);
1787 if (CONSTANT_P (addr)
1788 || (REG_P (addr)
1789 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1790 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1791 return;
1792
1793 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1794 }
1795
1796 /* Helper for instantiate_decls called via walk_tree: Process all decls
1797 in the given DECL_VALUE_EXPR. */
1798
1799 static tree
1800 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1801 {
1802 tree t = *tp;
1803 if (! EXPR_P (t))
1804 {
1805 *walk_subtrees = 0;
1806 if (DECL_P (t))
1807 {
1808 if (DECL_RTL_SET_P (t))
1809 instantiate_decl_rtl (DECL_RTL (t));
1810 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1811 && DECL_INCOMING_RTL (t))
1812 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1813 if ((TREE_CODE (t) == VAR_DECL
1814 || TREE_CODE (t) == RESULT_DECL)
1815 && DECL_HAS_VALUE_EXPR_P (t))
1816 {
1817 tree v = DECL_VALUE_EXPR (t);
1818 walk_tree (&v, instantiate_expr, NULL, NULL);
1819 }
1820 }
1821 }
1822 return NULL;
1823 }
1824
1825 /* Subroutine of instantiate_decls: Process all decls in the given
1826 BLOCK node and all its subblocks. */
1827
1828 static void
1829 instantiate_decls_1 (tree let)
1830 {
1831 tree t;
1832
1833 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1834 {
1835 if (DECL_RTL_SET_P (t))
1836 instantiate_decl_rtl (DECL_RTL (t));
1837 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1838 {
1839 tree v = DECL_VALUE_EXPR (t);
1840 walk_tree (&v, instantiate_expr, NULL, NULL);
1841 }
1842 }
1843
1844 /* Process all subblocks. */
1845 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1846 instantiate_decls_1 (t);
1847 }
1848
1849 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1850 all virtual registers in their DECL_RTL's. */
1851
1852 static void
1853 instantiate_decls (tree fndecl)
1854 {
1855 tree decl;
1856 unsigned ix;
1857
1858 /* Process all parameters of the function. */
1859 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1860 {
1861 instantiate_decl_rtl (DECL_RTL (decl));
1862 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1863 if (DECL_HAS_VALUE_EXPR_P (decl))
1864 {
1865 tree v = DECL_VALUE_EXPR (decl);
1866 walk_tree (&v, instantiate_expr, NULL, NULL);
1867 }
1868 }
1869
1870 if ((decl = DECL_RESULT (fndecl))
1871 && TREE_CODE (decl) == RESULT_DECL)
1872 {
1873 if (DECL_RTL_SET_P (decl))
1874 instantiate_decl_rtl (DECL_RTL (decl));
1875 if (DECL_HAS_VALUE_EXPR_P (decl))
1876 {
1877 tree v = DECL_VALUE_EXPR (decl);
1878 walk_tree (&v, instantiate_expr, NULL, NULL);
1879 }
1880 }
1881
1882 /* Process the saved static chain if it exists. */
1883 decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
1884 if (decl && DECL_HAS_VALUE_EXPR_P (decl))
1885 instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
1886
1887 /* Now process all variables defined in the function or its subblocks. */
1888 instantiate_decls_1 (DECL_INITIAL (fndecl));
1889
1890 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1891 if (DECL_RTL_SET_P (decl))
1892 instantiate_decl_rtl (DECL_RTL (decl));
1893 vec_free (cfun->local_decls);
1894 }
1895
1896 /* Pass through the INSNS of function FNDECL and convert virtual register
1897 references to hard register references. */
1898
1899 static unsigned int
1900 instantiate_virtual_regs (void)
1901 {
1902 rtx_insn *insn;
1903
1904 /* Compute the offsets to use for this function. */
1905 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1906 var_offset = STARTING_FRAME_OFFSET;
1907 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1908 out_arg_offset = STACK_POINTER_OFFSET;
1909 #ifdef FRAME_POINTER_CFA_OFFSET
1910 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1911 #else
1912 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1913 #endif
1914
1915 /* Initialize recognition, indicating that volatile is OK. */
1916 init_recog ();
1917
1918 /* Scan through all the insns, instantiating every virtual register still
1919 present. */
1920 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1921 if (INSN_P (insn))
1922 {
1923 /* These patterns in the instruction stream can never be recognized.
1924 Fortunately, they shouldn't contain virtual registers either. */
1925 if (GET_CODE (PATTERN (insn)) == USE
1926 || GET_CODE (PATTERN (insn)) == CLOBBER
1927 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1928 continue;
1929 else if (DEBUG_INSN_P (insn))
1930 for_each_rtx (&INSN_VAR_LOCATION (insn),
1931 instantiate_virtual_regs_in_rtx, NULL);
1932 else
1933 instantiate_virtual_regs_in_insn (insn);
1934
1935 if (INSN_DELETED_P (insn))
1936 continue;
1937
1938 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1939
1940 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1941 if (CALL_P (insn))
1942 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1943 instantiate_virtual_regs_in_rtx, NULL);
1944 }
1945
1946 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1947 instantiate_decls (current_function_decl);
1948
1949 targetm.instantiate_decls ();
1950
1951 /* Indicate that, from now on, assign_stack_local should use
1952 frame_pointer_rtx. */
1953 virtuals_instantiated = 1;
1954
1955 return 0;
1956 }
1957
1958 namespace {
1959
1960 const pass_data pass_data_instantiate_virtual_regs =
1961 {
1962 RTL_PASS, /* type */
1963 "vregs", /* name */
1964 OPTGROUP_NONE, /* optinfo_flags */
1965 TV_NONE, /* tv_id */
1966 0, /* properties_required */
1967 0, /* properties_provided */
1968 0, /* properties_destroyed */
1969 0, /* todo_flags_start */
1970 0, /* todo_flags_finish */
1971 };
1972
1973 class pass_instantiate_virtual_regs : public rtl_opt_pass
1974 {
1975 public:
1976 pass_instantiate_virtual_regs (gcc::context *ctxt)
1977 : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
1978 {}
1979
1980 /* opt_pass methods: */
1981 virtual unsigned int execute (function *)
1982 {
1983 return instantiate_virtual_regs ();
1984 }
1985
1986 }; // class pass_instantiate_virtual_regs
1987
1988 } // anon namespace
1989
1990 rtl_opt_pass *
1991 make_pass_instantiate_virtual_regs (gcc::context *ctxt)
1992 {
1993 return new pass_instantiate_virtual_regs (ctxt);
1994 }
1995
1996 \f
1997 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1998 This means a type for which function calls must pass an address to the
1999 function or get an address back from the function.
2000 EXP may be a type node or an expression (whose type is tested). */
2001
2002 int
2003 aggregate_value_p (const_tree exp, const_tree fntype)
2004 {
2005 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
2006 int i, regno, nregs;
2007 rtx reg;
2008
2009 if (fntype)
2010 switch (TREE_CODE (fntype))
2011 {
2012 case CALL_EXPR:
2013 {
2014 tree fndecl = get_callee_fndecl (fntype);
2015 fntype = (fndecl
2016 ? TREE_TYPE (fndecl)
2017 : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype))));
2018 }
2019 break;
2020 case FUNCTION_DECL:
2021 fntype = TREE_TYPE (fntype);
2022 break;
2023 case FUNCTION_TYPE:
2024 case METHOD_TYPE:
2025 break;
2026 case IDENTIFIER_NODE:
2027 fntype = NULL_TREE;
2028 break;
2029 default:
2030 /* We don't expect other tree types here. */
2031 gcc_unreachable ();
2032 }
2033
2034 if (VOID_TYPE_P (type))
2035 return 0;
2036
2037 /* If a record should be passed the same as its first (and only) member
2038 don't pass it as an aggregate. */
2039 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2040 return aggregate_value_p (first_field (type), fntype);
2041
2042 /* If the front end has decided that this needs to be passed by
2043 reference, do so. */
2044 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2045 && DECL_BY_REFERENCE (exp))
2046 return 1;
2047
2048 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2049 if (fntype && TREE_ADDRESSABLE (fntype))
2050 return 1;
2051
2052 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2053 and thus can't be returned in registers. */
2054 if (TREE_ADDRESSABLE (type))
2055 return 1;
2056
2057 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2058 return 1;
2059
2060 if (targetm.calls.return_in_memory (type, fntype))
2061 return 1;
2062
2063 /* Make sure we have suitable call-clobbered regs to return
2064 the value in; if not, we must return it in memory. */
2065 reg = hard_function_value (type, 0, fntype, 0);
2066
2067 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2068 it is OK. */
2069 if (!REG_P (reg))
2070 return 0;
2071
2072 regno = REGNO (reg);
2073 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
2074 for (i = 0; i < nregs; i++)
2075 if (! call_used_regs[regno + i])
2076 return 1;
2077
2078 return 0;
2079 }
2080 \f
2081 /* Return true if we should assign DECL a pseudo register; false if it
2082 should live on the local stack. */
2083
2084 bool
2085 use_register_for_decl (const_tree decl)
2086 {
2087 if (!targetm.calls.allocate_stack_slots_for_args ())
2088 return true;
2089
2090 /* Honor volatile. */
2091 if (TREE_SIDE_EFFECTS (decl))
2092 return false;
2093
2094 /* Honor addressability. */
2095 if (TREE_ADDRESSABLE (decl))
2096 return false;
2097
2098 /* Only register-like things go in registers. */
2099 if (DECL_MODE (decl) == BLKmode)
2100 return false;
2101
2102 /* If -ffloat-store specified, don't put explicit float variables
2103 into registers. */
2104 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2105 propagates values across these stores, and it probably shouldn't. */
2106 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2107 return false;
2108
2109 /* If we're not interested in tracking debugging information for
2110 this decl, then we can certainly put it in a register. */
2111 if (DECL_IGNORED_P (decl))
2112 return true;
2113
2114 if (optimize)
2115 return true;
2116
2117 if (!DECL_REGISTER (decl))
2118 return false;
2119
2120 switch (TREE_CODE (TREE_TYPE (decl)))
2121 {
2122 case RECORD_TYPE:
2123 case UNION_TYPE:
2124 case QUAL_UNION_TYPE:
2125 /* When not optimizing, disregard register keyword for variables with
2126 types containing methods, otherwise the methods won't be callable
2127 from the debugger. */
2128 if (TYPE_METHODS (TREE_TYPE (decl)))
2129 return false;
2130 break;
2131 default:
2132 break;
2133 }
2134
2135 return true;
2136 }
2137
2138 /* Return true if TYPE should be passed by invisible reference. */
2139
2140 bool
2141 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
2142 tree type, bool named_arg)
2143 {
2144 if (type)
2145 {
2146 /* If this type contains non-trivial constructors, then it is
2147 forbidden for the middle-end to create any new copies. */
2148 if (TREE_ADDRESSABLE (type))
2149 return true;
2150
2151 /* GCC post 3.4 passes *all* variable sized types by reference. */
2152 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2153 return true;
2154
2155 /* If a record type should be passed the same as its first (and only)
2156 member, use the type and mode of that member. */
2157 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2158 {
2159 type = TREE_TYPE (first_field (type));
2160 mode = TYPE_MODE (type);
2161 }
2162 }
2163
2164 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
2165 type, named_arg);
2166 }
2167
2168 /* Return true if TYPE, which is passed by reference, should be callee
2169 copied instead of caller copied. */
2170
2171 bool
2172 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
2173 tree type, bool named_arg)
2174 {
2175 if (type && TREE_ADDRESSABLE (type))
2176 return false;
2177 return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
2178 named_arg);
2179 }
2180
2181 /* Structures to communicate between the subroutines of assign_parms.
2182 The first holds data persistent across all parameters, the second
2183 is cleared out for each parameter. */
2184
2185 struct assign_parm_data_all
2186 {
2187 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2188 should become a job of the target or otherwise encapsulated. */
2189 CUMULATIVE_ARGS args_so_far_v;
2190 cumulative_args_t args_so_far;
2191 struct args_size stack_args_size;
2192 tree function_result_decl;
2193 tree orig_fnargs;
2194 rtx_insn *first_conversion_insn;
2195 rtx_insn *last_conversion_insn;
2196 HOST_WIDE_INT pretend_args_size;
2197 HOST_WIDE_INT extra_pretend_bytes;
2198 int reg_parm_stack_space;
2199 };
2200
2201 struct assign_parm_data_one
2202 {
2203 tree nominal_type;
2204 tree passed_type;
2205 rtx entry_parm;
2206 rtx stack_parm;
2207 enum machine_mode nominal_mode;
2208 enum machine_mode passed_mode;
2209 enum machine_mode promoted_mode;
2210 struct locate_and_pad_arg_data locate;
2211 int partial;
2212 BOOL_BITFIELD named_arg : 1;
2213 BOOL_BITFIELD passed_pointer : 1;
2214 BOOL_BITFIELD on_stack : 1;
2215 BOOL_BITFIELD loaded_in_reg : 1;
2216 };
2217
2218 /* A subroutine of assign_parms. Initialize ALL. */
2219
2220 static void
2221 assign_parms_initialize_all (struct assign_parm_data_all *all)
2222 {
2223 tree fntype ATTRIBUTE_UNUSED;
2224
2225 memset (all, 0, sizeof (*all));
2226
2227 fntype = TREE_TYPE (current_function_decl);
2228
2229 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2230 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
2231 #else
2232 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
2233 current_function_decl, -1);
2234 #endif
2235 all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
2236
2237 #ifdef INCOMING_REG_PARM_STACK_SPACE
2238 all->reg_parm_stack_space
2239 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
2240 #endif
2241 }
2242
2243 /* If ARGS contains entries with complex types, split the entry into two
2244 entries of the component type. Return a new list of substitutions are
2245 needed, else the old list. */
2246
2247 static void
2248 split_complex_args (vec<tree> *args)
2249 {
2250 unsigned i;
2251 tree p;
2252
2253 FOR_EACH_VEC_ELT (*args, i, p)
2254 {
2255 tree type = TREE_TYPE (p);
2256 if (TREE_CODE (type) == COMPLEX_TYPE
2257 && targetm.calls.split_complex_arg (type))
2258 {
2259 tree decl;
2260 tree subtype = TREE_TYPE (type);
2261 bool addressable = TREE_ADDRESSABLE (p);
2262
2263 /* Rewrite the PARM_DECL's type with its component. */
2264 p = copy_node (p);
2265 TREE_TYPE (p) = subtype;
2266 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2267 DECL_MODE (p) = VOIDmode;
2268 DECL_SIZE (p) = NULL;
2269 DECL_SIZE_UNIT (p) = NULL;
2270 /* If this arg must go in memory, put it in a pseudo here.
2271 We can't allow it to go in memory as per normal parms,
2272 because the usual place might not have the imag part
2273 adjacent to the real part. */
2274 DECL_ARTIFICIAL (p) = addressable;
2275 DECL_IGNORED_P (p) = addressable;
2276 TREE_ADDRESSABLE (p) = 0;
2277 layout_decl (p, 0);
2278 (*args)[i] = p;
2279
2280 /* Build a second synthetic decl. */
2281 decl = build_decl (EXPR_LOCATION (p),
2282 PARM_DECL, NULL_TREE, subtype);
2283 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2284 DECL_ARTIFICIAL (decl) = addressable;
2285 DECL_IGNORED_P (decl) = addressable;
2286 layout_decl (decl, 0);
2287 args->safe_insert (++i, decl);
2288 }
2289 }
2290 }
2291
2292 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2293 the hidden struct return argument, and (abi willing) complex args.
2294 Return the new parameter list. */
2295
2296 static vec<tree>
2297 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2298 {
2299 tree fndecl = current_function_decl;
2300 tree fntype = TREE_TYPE (fndecl);
2301 vec<tree> fnargs = vNULL;
2302 tree arg;
2303
2304 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2305 fnargs.safe_push (arg);
2306
2307 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2308
2309 /* If struct value address is treated as the first argument, make it so. */
2310 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2311 && ! cfun->returns_pcc_struct
2312 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2313 {
2314 tree type = build_pointer_type (TREE_TYPE (fntype));
2315 tree decl;
2316
2317 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2318 PARM_DECL, get_identifier (".result_ptr"), type);
2319 DECL_ARG_TYPE (decl) = type;
2320 DECL_ARTIFICIAL (decl) = 1;
2321 DECL_NAMELESS (decl) = 1;
2322 TREE_CONSTANT (decl) = 1;
2323
2324 DECL_CHAIN (decl) = all->orig_fnargs;
2325 all->orig_fnargs = decl;
2326 fnargs.safe_insert (0, decl);
2327
2328 all->function_result_decl = decl;
2329 }
2330
2331 /* If the target wants to split complex arguments into scalars, do so. */
2332 if (targetm.calls.split_complex_arg)
2333 split_complex_args (&fnargs);
2334
2335 return fnargs;
2336 }
2337
2338 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2339 data for the parameter. Incorporate ABI specifics such as pass-by-
2340 reference and type promotion. */
2341
2342 static void
2343 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2344 struct assign_parm_data_one *data)
2345 {
2346 tree nominal_type, passed_type;
2347 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2348 int unsignedp;
2349
2350 memset (data, 0, sizeof (*data));
2351
2352 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2353 if (!cfun->stdarg)
2354 data->named_arg = 1; /* No variadic parms. */
2355 else if (DECL_CHAIN (parm))
2356 data->named_arg = 1; /* Not the last non-variadic parm. */
2357 else if (targetm.calls.strict_argument_naming (all->args_so_far))
2358 data->named_arg = 1; /* Only variadic ones are unnamed. */
2359 else
2360 data->named_arg = 0; /* Treat as variadic. */
2361
2362 nominal_type = TREE_TYPE (parm);
2363 passed_type = DECL_ARG_TYPE (parm);
2364
2365 /* Look out for errors propagating this far. Also, if the parameter's
2366 type is void then its value doesn't matter. */
2367 if (TREE_TYPE (parm) == error_mark_node
2368 /* This can happen after weird syntax errors
2369 or if an enum type is defined among the parms. */
2370 || TREE_CODE (parm) != PARM_DECL
2371 || passed_type == NULL
2372 || VOID_TYPE_P (nominal_type))
2373 {
2374 nominal_type = passed_type = void_type_node;
2375 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2376 goto egress;
2377 }
2378
2379 /* Find mode of arg as it is passed, and mode of arg as it should be
2380 during execution of this function. */
2381 passed_mode = TYPE_MODE (passed_type);
2382 nominal_mode = TYPE_MODE (nominal_type);
2383
2384 /* If the parm is to be passed as a transparent union or record, use the
2385 type of the first field for the tests below. We have already verified
2386 that the modes are the same. */
2387 if ((TREE_CODE (passed_type) == UNION_TYPE
2388 || TREE_CODE (passed_type) == RECORD_TYPE)
2389 && TYPE_TRANSPARENT_AGGR (passed_type))
2390 passed_type = TREE_TYPE (first_field (passed_type));
2391
2392 /* See if this arg was passed by invisible reference. */
2393 if (pass_by_reference (&all->args_so_far_v, passed_mode,
2394 passed_type, data->named_arg))
2395 {
2396 passed_type = nominal_type = build_pointer_type (passed_type);
2397 data->passed_pointer = true;
2398 passed_mode = nominal_mode = TYPE_MODE (nominal_type);
2399 }
2400
2401 /* Find mode as it is passed by the ABI. */
2402 unsignedp = TYPE_UNSIGNED (passed_type);
2403 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2404 TREE_TYPE (current_function_decl), 0);
2405
2406 egress:
2407 data->nominal_type = nominal_type;
2408 data->passed_type = passed_type;
2409 data->nominal_mode = nominal_mode;
2410 data->passed_mode = passed_mode;
2411 data->promoted_mode = promoted_mode;
2412 }
2413
2414 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2415
2416 static void
2417 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2418 struct assign_parm_data_one *data, bool no_rtl)
2419 {
2420 int varargs_pretend_bytes = 0;
2421
2422 targetm.calls.setup_incoming_varargs (all->args_so_far,
2423 data->promoted_mode,
2424 data->passed_type,
2425 &varargs_pretend_bytes, no_rtl);
2426
2427 /* If the back-end has requested extra stack space, record how much is
2428 needed. Do not change pretend_args_size otherwise since it may be
2429 nonzero from an earlier partial argument. */
2430 if (varargs_pretend_bytes > 0)
2431 all->pretend_args_size = varargs_pretend_bytes;
2432 }
2433
2434 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2435 the incoming location of the current parameter. */
2436
2437 static void
2438 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2439 struct assign_parm_data_one *data)
2440 {
2441 HOST_WIDE_INT pretend_bytes = 0;
2442 rtx entry_parm;
2443 bool in_regs;
2444
2445 if (data->promoted_mode == VOIDmode)
2446 {
2447 data->entry_parm = data->stack_parm = const0_rtx;
2448 return;
2449 }
2450
2451 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2452 data->promoted_mode,
2453 data->passed_type,
2454 data->named_arg);
2455
2456 if (entry_parm == 0)
2457 data->promoted_mode = data->passed_mode;
2458
2459 /* Determine parm's home in the stack, in case it arrives in the stack
2460 or we should pretend it did. Compute the stack position and rtx where
2461 the argument arrives and its size.
2462
2463 There is one complexity here: If this was a parameter that would
2464 have been passed in registers, but wasn't only because it is
2465 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2466 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2467 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2468 as it was the previous time. */
2469 in_regs = entry_parm != 0;
2470 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2471 in_regs = true;
2472 #endif
2473 if (!in_regs && !data->named_arg)
2474 {
2475 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
2476 {
2477 rtx tem;
2478 tem = targetm.calls.function_incoming_arg (all->args_so_far,
2479 data->promoted_mode,
2480 data->passed_type, true);
2481 in_regs = tem != NULL;
2482 }
2483 }
2484
2485 /* If this parameter was passed both in registers and in the stack, use
2486 the copy on the stack. */
2487 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2488 data->passed_type))
2489 entry_parm = 0;
2490
2491 if (entry_parm)
2492 {
2493 int partial;
2494
2495 partial = targetm.calls.arg_partial_bytes (all->args_so_far,
2496 data->promoted_mode,
2497 data->passed_type,
2498 data->named_arg);
2499 data->partial = partial;
2500
2501 /* The caller might already have allocated stack space for the
2502 register parameters. */
2503 if (partial != 0 && all->reg_parm_stack_space == 0)
2504 {
2505 /* Part of this argument is passed in registers and part
2506 is passed on the stack. Ask the prologue code to extend
2507 the stack part so that we can recreate the full value.
2508
2509 PRETEND_BYTES is the size of the registers we need to store.
2510 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2511 stack space that the prologue should allocate.
2512
2513 Internally, gcc assumes that the argument pointer is aligned
2514 to STACK_BOUNDARY bits. This is used both for alignment
2515 optimizations (see init_emit) and to locate arguments that are
2516 aligned to more than PARM_BOUNDARY bits. We must preserve this
2517 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2518 a stack boundary. */
2519
2520 /* We assume at most one partial arg, and it must be the first
2521 argument on the stack. */
2522 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2523
2524 pretend_bytes = partial;
2525 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2526
2527 /* We want to align relative to the actual stack pointer, so
2528 don't include this in the stack size until later. */
2529 all->extra_pretend_bytes = all->pretend_args_size;
2530 }
2531 }
2532
2533 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2534 all->reg_parm_stack_space,
2535 entry_parm ? data->partial : 0, current_function_decl,
2536 &all->stack_args_size, &data->locate);
2537
2538 /* Update parm_stack_boundary if this parameter is passed in the
2539 stack. */
2540 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2541 crtl->parm_stack_boundary = data->locate.boundary;
2542
2543 /* Adjust offsets to include the pretend args. */
2544 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2545 data->locate.slot_offset.constant += pretend_bytes;
2546 data->locate.offset.constant += pretend_bytes;
2547
2548 data->entry_parm = entry_parm;
2549 }
2550
2551 /* A subroutine of assign_parms. If there is actually space on the stack
2552 for this parm, count it in stack_args_size and return true. */
2553
2554 static bool
2555 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2556 struct assign_parm_data_one *data)
2557 {
2558 /* Trivially true if we've no incoming register. */
2559 if (data->entry_parm == NULL)
2560 ;
2561 /* Also true if we're partially in registers and partially not,
2562 since we've arranged to drop the entire argument on the stack. */
2563 else if (data->partial != 0)
2564 ;
2565 /* Also true if the target says that it's passed in both registers
2566 and on the stack. */
2567 else if (GET_CODE (data->entry_parm) == PARALLEL
2568 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2569 ;
2570 /* Also true if the target says that there's stack allocated for
2571 all register parameters. */
2572 else if (all->reg_parm_stack_space > 0)
2573 ;
2574 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2575 else
2576 return false;
2577
2578 all->stack_args_size.constant += data->locate.size.constant;
2579 if (data->locate.size.var)
2580 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2581
2582 return true;
2583 }
2584
2585 /* A subroutine of assign_parms. Given that this parameter is allocated
2586 stack space by the ABI, find it. */
2587
2588 static void
2589 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2590 {
2591 rtx offset_rtx, stack_parm;
2592 unsigned int align, boundary;
2593
2594 /* If we're passing this arg using a reg, make its stack home the
2595 aligned stack slot. */
2596 if (data->entry_parm)
2597 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2598 else
2599 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2600
2601 stack_parm = crtl->args.internal_arg_pointer;
2602 if (offset_rtx != const0_rtx)
2603 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2604 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2605
2606 if (!data->passed_pointer)
2607 {
2608 set_mem_attributes (stack_parm, parm, 1);
2609 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2610 while promoted mode's size is needed. */
2611 if (data->promoted_mode != BLKmode
2612 && data->promoted_mode != DECL_MODE (parm))
2613 {
2614 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
2615 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2616 {
2617 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2618 data->promoted_mode);
2619 if (offset)
2620 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2621 }
2622 }
2623 }
2624
2625 boundary = data->locate.boundary;
2626 align = BITS_PER_UNIT;
2627
2628 /* If we're padding upward, we know that the alignment of the slot
2629 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2630 intentionally forcing upward padding. Otherwise we have to come
2631 up with a guess at the alignment based on OFFSET_RTX. */
2632 if (data->locate.where_pad != downward || data->entry_parm)
2633 align = boundary;
2634 else if (CONST_INT_P (offset_rtx))
2635 {
2636 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2637 align = align & -align;
2638 }
2639 set_mem_align (stack_parm, align);
2640
2641 if (data->entry_parm)
2642 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2643
2644 data->stack_parm = stack_parm;
2645 }
2646
2647 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2648 always valid and contiguous. */
2649
2650 static void
2651 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2652 {
2653 rtx entry_parm = data->entry_parm;
2654 rtx stack_parm = data->stack_parm;
2655
2656 /* If this parm was passed part in regs and part in memory, pretend it
2657 arrived entirely in memory by pushing the register-part onto the stack.
2658 In the special case of a DImode or DFmode that is split, we could put
2659 it together in a pseudoreg directly, but for now that's not worth
2660 bothering with. */
2661 if (data->partial != 0)
2662 {
2663 /* Handle calls that pass values in multiple non-contiguous
2664 locations. The Irix 6 ABI has examples of this. */
2665 if (GET_CODE (entry_parm) == PARALLEL)
2666 emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
2667 data->passed_type,
2668 int_size_in_bytes (data->passed_type));
2669 else
2670 {
2671 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2672 move_block_from_reg (REGNO (entry_parm),
2673 validize_mem (copy_rtx (stack_parm)),
2674 data->partial / UNITS_PER_WORD);
2675 }
2676
2677 entry_parm = stack_parm;
2678 }
2679
2680 /* If we didn't decide this parm came in a register, by default it came
2681 on the stack. */
2682 else if (entry_parm == NULL)
2683 entry_parm = stack_parm;
2684
2685 /* When an argument is passed in multiple locations, we can't make use
2686 of this information, but we can save some copying if the whole argument
2687 is passed in a single register. */
2688 else if (GET_CODE (entry_parm) == PARALLEL
2689 && data->nominal_mode != BLKmode
2690 && data->passed_mode != BLKmode)
2691 {
2692 size_t i, len = XVECLEN (entry_parm, 0);
2693
2694 for (i = 0; i < len; i++)
2695 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2696 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2697 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2698 == data->passed_mode)
2699 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2700 {
2701 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2702 break;
2703 }
2704 }
2705
2706 data->entry_parm = entry_parm;
2707 }
2708
2709 /* A subroutine of assign_parms. Reconstitute any values which were
2710 passed in multiple registers and would fit in a single register. */
2711
2712 static void
2713 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2714 {
2715 rtx entry_parm = data->entry_parm;
2716
2717 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2718 This can be done with register operations rather than on the
2719 stack, even if we will store the reconstituted parameter on the
2720 stack later. */
2721 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2722 {
2723 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2724 emit_group_store (parmreg, entry_parm, data->passed_type,
2725 GET_MODE_SIZE (GET_MODE (entry_parm)));
2726 entry_parm = parmreg;
2727 }
2728
2729 data->entry_parm = entry_parm;
2730 }
2731
2732 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2733 always valid and properly aligned. */
2734
2735 static void
2736 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2737 {
2738 rtx stack_parm = data->stack_parm;
2739
2740 /* If we can't trust the parm stack slot to be aligned enough for its
2741 ultimate type, don't use that slot after entry. We'll make another
2742 stack slot, if we need one. */
2743 if (stack_parm
2744 && ((STRICT_ALIGNMENT
2745 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2746 || (data->nominal_type
2747 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2748 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2749 stack_parm = NULL;
2750
2751 /* If parm was passed in memory, and we need to convert it on entry,
2752 don't store it back in that same slot. */
2753 else if (data->entry_parm == stack_parm
2754 && data->nominal_mode != BLKmode
2755 && data->nominal_mode != data->passed_mode)
2756 stack_parm = NULL;
2757
2758 /* If stack protection is in effect for this function, don't leave any
2759 pointers in their passed stack slots. */
2760 else if (crtl->stack_protect_guard
2761 && (flag_stack_protect == 2
2762 || data->passed_pointer
2763 || POINTER_TYPE_P (data->nominal_type)))
2764 stack_parm = NULL;
2765
2766 data->stack_parm = stack_parm;
2767 }
2768
2769 /* A subroutine of assign_parms. Return true if the current parameter
2770 should be stored as a BLKmode in the current frame. */
2771
2772 static bool
2773 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2774 {
2775 if (data->nominal_mode == BLKmode)
2776 return true;
2777 if (GET_MODE (data->entry_parm) == BLKmode)
2778 return true;
2779
2780 #ifdef BLOCK_REG_PADDING
2781 /* Only assign_parm_setup_block knows how to deal with register arguments
2782 that are padded at the least significant end. */
2783 if (REG_P (data->entry_parm)
2784 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2785 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2786 == (BYTES_BIG_ENDIAN ? upward : downward)))
2787 return true;
2788 #endif
2789
2790 return false;
2791 }
2792
2793 /* A subroutine of assign_parms. Arrange for the parameter to be
2794 present and valid in DATA->STACK_RTL. */
2795
2796 static void
2797 assign_parm_setup_block (struct assign_parm_data_all *all,
2798 tree parm, struct assign_parm_data_one *data)
2799 {
2800 rtx entry_parm = data->entry_parm;
2801 rtx stack_parm = data->stack_parm;
2802 HOST_WIDE_INT size;
2803 HOST_WIDE_INT size_stored;
2804
2805 if (GET_CODE (entry_parm) == PARALLEL)
2806 entry_parm = emit_group_move_into_temps (entry_parm);
2807
2808 size = int_size_in_bytes (data->passed_type);
2809 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2810 if (stack_parm == 0)
2811 {
2812 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2813 stack_parm = assign_stack_local (BLKmode, size_stored,
2814 DECL_ALIGN (parm));
2815 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2816 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2817 set_mem_attributes (stack_parm, parm, 1);
2818 }
2819
2820 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2821 calls that pass values in multiple non-contiguous locations. */
2822 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2823 {
2824 rtx mem;
2825
2826 /* Note that we will be storing an integral number of words.
2827 So we have to be careful to ensure that we allocate an
2828 integral number of words. We do this above when we call
2829 assign_stack_local if space was not allocated in the argument
2830 list. If it was, this will not work if PARM_BOUNDARY is not
2831 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2832 if it becomes a problem. Exception is when BLKmode arrives
2833 with arguments not conforming to word_mode. */
2834
2835 if (data->stack_parm == 0)
2836 ;
2837 else if (GET_CODE (entry_parm) == PARALLEL)
2838 ;
2839 else
2840 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2841
2842 mem = validize_mem (copy_rtx (stack_parm));
2843
2844 /* Handle values in multiple non-contiguous locations. */
2845 if (GET_CODE (entry_parm) == PARALLEL)
2846 {
2847 push_to_sequence2 (all->first_conversion_insn,
2848 all->last_conversion_insn);
2849 emit_group_store (mem, entry_parm, data->passed_type, size);
2850 all->first_conversion_insn = get_insns ();
2851 all->last_conversion_insn = get_last_insn ();
2852 end_sequence ();
2853 }
2854
2855 else if (size == 0)
2856 ;
2857
2858 /* If SIZE is that of a mode no bigger than a word, just use
2859 that mode's store operation. */
2860 else if (size <= UNITS_PER_WORD)
2861 {
2862 enum machine_mode mode
2863 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2864
2865 if (mode != BLKmode
2866 #ifdef BLOCK_REG_PADDING
2867 && (size == UNITS_PER_WORD
2868 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2869 != (BYTES_BIG_ENDIAN ? upward : downward)))
2870 #endif
2871 )
2872 {
2873 rtx reg;
2874
2875 /* We are really truncating a word_mode value containing
2876 SIZE bytes into a value of mode MODE. If such an
2877 operation requires no actual instructions, we can refer
2878 to the value directly in mode MODE, otherwise we must
2879 start with the register in word_mode and explicitly
2880 convert it. */
2881 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2882 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2883 else
2884 {
2885 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2886 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2887 }
2888 emit_move_insn (change_address (mem, mode, 0), reg);
2889 }
2890
2891 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2892 machine must be aligned to the left before storing
2893 to memory. Note that the previous test doesn't
2894 handle all cases (e.g. SIZE == 3). */
2895 else if (size != UNITS_PER_WORD
2896 #ifdef BLOCK_REG_PADDING
2897 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2898 == downward)
2899 #else
2900 && BYTES_BIG_ENDIAN
2901 #endif
2902 )
2903 {
2904 rtx tem, x;
2905 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2906 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2907
2908 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
2909 tem = change_address (mem, word_mode, 0);
2910 emit_move_insn (tem, x);
2911 }
2912 else
2913 move_block_from_reg (REGNO (entry_parm), mem,
2914 size_stored / UNITS_PER_WORD);
2915 }
2916 else
2917 move_block_from_reg (REGNO (entry_parm), mem,
2918 size_stored / UNITS_PER_WORD);
2919 }
2920 else if (data->stack_parm == 0)
2921 {
2922 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2923 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2924 BLOCK_OP_NORMAL);
2925 all->first_conversion_insn = get_insns ();
2926 all->last_conversion_insn = get_last_insn ();
2927 end_sequence ();
2928 }
2929
2930 data->stack_parm = stack_parm;
2931 SET_DECL_RTL (parm, stack_parm);
2932 }
2933
2934 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2935 parameter. Get it there. Perform all ABI specified conversions. */
2936
2937 static void
2938 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2939 struct assign_parm_data_one *data)
2940 {
2941 rtx parmreg, validated_mem;
2942 rtx equiv_stack_parm;
2943 enum machine_mode promoted_nominal_mode;
2944 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2945 bool did_conversion = false;
2946 bool need_conversion, moved;
2947
2948 /* Store the parm in a pseudoregister during the function, but we may
2949 need to do it in a wider mode. Using 2 here makes the result
2950 consistent with promote_decl_mode and thus expand_expr_real_1. */
2951 promoted_nominal_mode
2952 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
2953 TREE_TYPE (current_function_decl), 2);
2954
2955 parmreg = gen_reg_rtx (promoted_nominal_mode);
2956
2957 if (!DECL_ARTIFICIAL (parm))
2958 mark_user_reg (parmreg);
2959
2960 /* If this was an item that we received a pointer to,
2961 set DECL_RTL appropriately. */
2962 if (data->passed_pointer)
2963 {
2964 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2965 set_mem_attributes (x, parm, 1);
2966 SET_DECL_RTL (parm, x);
2967 }
2968 else
2969 SET_DECL_RTL (parm, parmreg);
2970
2971 assign_parm_remove_parallels (data);
2972
2973 /* Copy the value into the register, thus bridging between
2974 assign_parm_find_data_types and expand_expr_real_1. */
2975
2976 equiv_stack_parm = data->stack_parm;
2977 validated_mem = validize_mem (copy_rtx (data->entry_parm));
2978
2979 need_conversion = (data->nominal_mode != data->passed_mode
2980 || promoted_nominal_mode != data->promoted_mode);
2981 moved = false;
2982
2983 if (need_conversion
2984 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
2985 && data->nominal_mode == data->passed_mode
2986 && data->nominal_mode == GET_MODE (data->entry_parm))
2987 {
2988 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2989 mode, by the caller. We now have to convert it to
2990 NOMINAL_MODE, if different. However, PARMREG may be in
2991 a different mode than NOMINAL_MODE if it is being stored
2992 promoted.
2993
2994 If ENTRY_PARM is a hard register, it might be in a register
2995 not valid for operating in its mode (e.g., an odd-numbered
2996 register for a DFmode). In that case, moves are the only
2997 thing valid, so we can't do a convert from there. This
2998 occurs when the calling sequence allow such misaligned
2999 usages.
3000
3001 In addition, the conversion may involve a call, which could
3002 clobber parameters which haven't been copied to pseudo
3003 registers yet.
3004
3005 First, we try to emit an insn which performs the necessary
3006 conversion. We verify that this insn does not clobber any
3007 hard registers. */
3008
3009 enum insn_code icode;
3010 rtx op0, op1;
3011
3012 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3013 unsignedp);
3014
3015 op0 = parmreg;
3016 op1 = validated_mem;
3017 if (icode != CODE_FOR_nothing
3018 && insn_operand_matches (icode, 0, op0)
3019 && insn_operand_matches (icode, 1, op1))
3020 {
3021 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
3022 rtx insn, insns, t = op1;
3023 HARD_REG_SET hardregs;
3024
3025 start_sequence ();
3026 /* If op1 is a hard register that is likely spilled, first
3027 force it into a pseudo, otherwise combiner might extend
3028 its lifetime too much. */
3029 if (GET_CODE (t) == SUBREG)
3030 t = SUBREG_REG (t);
3031 if (REG_P (t)
3032 && HARD_REGISTER_P (t)
3033 && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3034 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3035 {
3036 t = gen_reg_rtx (GET_MODE (op1));
3037 emit_move_insn (t, op1);
3038 }
3039 else
3040 t = op1;
3041 insn = gen_extend_insn (op0, t, promoted_nominal_mode,
3042 data->passed_mode, unsignedp);
3043 emit_insn (insn);
3044 insns = get_insns ();
3045
3046 moved = true;
3047 CLEAR_HARD_REG_SET (hardregs);
3048 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3049 {
3050 if (INSN_P (insn))
3051 note_stores (PATTERN (insn), record_hard_reg_sets,
3052 &hardregs);
3053 if (!hard_reg_set_empty_p (hardregs))
3054 moved = false;
3055 }
3056
3057 end_sequence ();
3058
3059 if (moved)
3060 {
3061 emit_insn (insns);
3062 if (equiv_stack_parm != NULL_RTX)
3063 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3064 equiv_stack_parm);
3065 }
3066 }
3067 }
3068
3069 if (moved)
3070 /* Nothing to do. */
3071 ;
3072 else if (need_conversion)
3073 {
3074 /* We did not have an insn to convert directly, or the sequence
3075 generated appeared unsafe. We must first copy the parm to a
3076 pseudo reg, and save the conversion until after all
3077 parameters have been moved. */
3078
3079 int save_tree_used;
3080 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3081
3082 emit_move_insn (tempreg, validated_mem);
3083
3084 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3085 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3086
3087 if (GET_CODE (tempreg) == SUBREG
3088 && GET_MODE (tempreg) == data->nominal_mode
3089 && REG_P (SUBREG_REG (tempreg))
3090 && data->nominal_mode == data->passed_mode
3091 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
3092 && GET_MODE_SIZE (GET_MODE (tempreg))
3093 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
3094 {
3095 /* The argument is already sign/zero extended, so note it
3096 into the subreg. */
3097 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3098 SUBREG_PROMOTED_SET (tempreg, unsignedp);
3099 }
3100
3101 /* TREE_USED gets set erroneously during expand_assignment. */
3102 save_tree_used = TREE_USED (parm);
3103 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3104 TREE_USED (parm) = save_tree_used;
3105 all->first_conversion_insn = get_insns ();
3106 all->last_conversion_insn = get_last_insn ();
3107 end_sequence ();
3108
3109 did_conversion = true;
3110 }
3111 else
3112 emit_move_insn (parmreg, validated_mem);
3113
3114 /* If we were passed a pointer but the actual value can safely live
3115 in a register, retrieve it and use it directly. */
3116 if (data->passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
3117 {
3118 /* We can't use nominal_mode, because it will have been set to
3119 Pmode above. We must use the actual mode of the parm. */
3120 if (use_register_for_decl (parm))
3121 {
3122 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3123 mark_user_reg (parmreg);
3124 }
3125 else
3126 {
3127 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3128 TYPE_MODE (TREE_TYPE (parm)),
3129 TYPE_ALIGN (TREE_TYPE (parm)));
3130 parmreg
3131 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
3132 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
3133 align);
3134 set_mem_attributes (parmreg, parm, 1);
3135 }
3136
3137 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
3138 {
3139 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
3140 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3141
3142 push_to_sequence2 (all->first_conversion_insn,
3143 all->last_conversion_insn);
3144 emit_move_insn (tempreg, DECL_RTL (parm));
3145 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3146 emit_move_insn (parmreg, tempreg);
3147 all->first_conversion_insn = get_insns ();
3148 all->last_conversion_insn = get_last_insn ();
3149 end_sequence ();
3150
3151 did_conversion = true;
3152 }
3153 else
3154 emit_move_insn (parmreg, DECL_RTL (parm));
3155
3156 SET_DECL_RTL (parm, parmreg);
3157
3158 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3159 now the parm. */
3160 data->stack_parm = NULL;
3161 }
3162
3163 /* Mark the register as eliminable if we did no conversion and it was
3164 copied from memory at a fixed offset, and the arg pointer was not
3165 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3166 offset formed an invalid address, such memory-equivalences as we
3167 make here would screw up life analysis for it. */
3168 if (data->nominal_mode == data->passed_mode
3169 && !did_conversion
3170 && data->stack_parm != 0
3171 && MEM_P (data->stack_parm)
3172 && data->locate.offset.var == 0
3173 && reg_mentioned_p (virtual_incoming_args_rtx,
3174 XEXP (data->stack_parm, 0)))
3175 {
3176 rtx_insn *linsn = get_last_insn ();
3177 rtx_insn *sinsn;
3178 rtx set;
3179
3180 /* Mark complex types separately. */
3181 if (GET_CODE (parmreg) == CONCAT)
3182 {
3183 enum machine_mode submode
3184 = GET_MODE_INNER (GET_MODE (parmreg));
3185 int regnor = REGNO (XEXP (parmreg, 0));
3186 int regnoi = REGNO (XEXP (parmreg, 1));
3187 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3188 rtx stacki = adjust_address_nv (data->stack_parm, submode,
3189 GET_MODE_SIZE (submode));
3190
3191 /* Scan backwards for the set of the real and
3192 imaginary parts. */
3193 for (sinsn = linsn; sinsn != 0;
3194 sinsn = prev_nonnote_insn (sinsn))
3195 {
3196 set = single_set (sinsn);
3197 if (set == 0)
3198 continue;
3199
3200 if (SET_DEST (set) == regno_reg_rtx [regnoi])
3201 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3202 else if (SET_DEST (set) == regno_reg_rtx [regnor])
3203 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3204 }
3205 }
3206 else
3207 set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
3208 }
3209
3210 /* For pointer data type, suggest pointer register. */
3211 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3212 mark_reg_pointer (parmreg,
3213 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3214 }
3215
3216 /* A subroutine of assign_parms. Allocate stack space to hold the current
3217 parameter. Get it there. Perform all ABI specified conversions. */
3218
3219 static void
3220 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3221 struct assign_parm_data_one *data)
3222 {
3223 /* Value must be stored in the stack slot STACK_PARM during function
3224 execution. */
3225 bool to_conversion = false;
3226
3227 assign_parm_remove_parallels (data);
3228
3229 if (data->promoted_mode != data->nominal_mode)
3230 {
3231 /* Conversion is required. */
3232 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3233
3234 emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
3235
3236 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3237 to_conversion = true;
3238
3239 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3240 TYPE_UNSIGNED (TREE_TYPE (parm)));
3241
3242 if (data->stack_parm)
3243 {
3244 int offset = subreg_lowpart_offset (data->nominal_mode,
3245 GET_MODE (data->stack_parm));
3246 /* ??? This may need a big-endian conversion on sparc64. */
3247 data->stack_parm
3248 = adjust_address (data->stack_parm, data->nominal_mode, 0);
3249 if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm))
3250 set_mem_offset (data->stack_parm,
3251 MEM_OFFSET (data->stack_parm) + offset);
3252 }
3253 }
3254
3255 if (data->entry_parm != data->stack_parm)
3256 {
3257 rtx src, dest;
3258
3259 if (data->stack_parm == 0)
3260 {
3261 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3262 GET_MODE (data->entry_parm),
3263 TYPE_ALIGN (data->passed_type));
3264 data->stack_parm
3265 = assign_stack_local (GET_MODE (data->entry_parm),
3266 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3267 align);
3268 set_mem_attributes (data->stack_parm, parm, 1);
3269 }
3270
3271 dest = validize_mem (copy_rtx (data->stack_parm));
3272 src = validize_mem (copy_rtx (data->entry_parm));
3273
3274 if (MEM_P (src))
3275 {
3276 /* Use a block move to handle potentially misaligned entry_parm. */
3277 if (!to_conversion)
3278 push_to_sequence2 (all->first_conversion_insn,
3279 all->last_conversion_insn);
3280 to_conversion = true;
3281
3282 emit_block_move (dest, src,
3283 GEN_INT (int_size_in_bytes (data->passed_type)),
3284 BLOCK_OP_NORMAL);
3285 }
3286 else
3287 emit_move_insn (dest, src);
3288 }
3289
3290 if (to_conversion)
3291 {
3292 all->first_conversion_insn = get_insns ();
3293 all->last_conversion_insn = get_last_insn ();
3294 end_sequence ();
3295 }
3296
3297 SET_DECL_RTL (parm, data->stack_parm);
3298 }
3299
3300 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3301 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3302
3303 static void
3304 assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3305 vec<tree> fnargs)
3306 {
3307 tree parm;
3308 tree orig_fnargs = all->orig_fnargs;
3309 unsigned i = 0;
3310
3311 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3312 {
3313 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3314 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3315 {
3316 rtx tmp, real, imag;
3317 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3318
3319 real = DECL_RTL (fnargs[i]);
3320 imag = DECL_RTL (fnargs[i + 1]);
3321 if (inner != GET_MODE (real))
3322 {
3323 real = gen_lowpart_SUBREG (inner, real);
3324 imag = gen_lowpart_SUBREG (inner, imag);
3325 }
3326
3327 if (TREE_ADDRESSABLE (parm))
3328 {
3329 rtx rmem, imem;
3330 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3331 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3332 DECL_MODE (parm),
3333 TYPE_ALIGN (TREE_TYPE (parm)));
3334
3335 /* split_complex_arg put the real and imag parts in
3336 pseudos. Move them to memory. */
3337 tmp = assign_stack_local (DECL_MODE (parm), size, align);
3338 set_mem_attributes (tmp, parm, 1);
3339 rmem = adjust_address_nv (tmp, inner, 0);
3340 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3341 push_to_sequence2 (all->first_conversion_insn,
3342 all->last_conversion_insn);
3343 emit_move_insn (rmem, real);
3344 emit_move_insn (imem, imag);
3345 all->first_conversion_insn = get_insns ();
3346 all->last_conversion_insn = get_last_insn ();
3347 end_sequence ();
3348 }
3349 else
3350 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3351 SET_DECL_RTL (parm, tmp);
3352
3353 real = DECL_INCOMING_RTL (fnargs[i]);
3354 imag = DECL_INCOMING_RTL (fnargs[i + 1]);
3355 if (inner != GET_MODE (real))
3356 {
3357 real = gen_lowpart_SUBREG (inner, real);
3358 imag = gen_lowpart_SUBREG (inner, imag);
3359 }
3360 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3361 set_decl_incoming_rtl (parm, tmp, false);
3362 i++;
3363 }
3364 }
3365 }
3366
3367 /* Assign RTL expressions to the function's parameters. This may involve
3368 copying them into registers and using those registers as the DECL_RTL. */
3369
3370 static void
3371 assign_parms (tree fndecl)
3372 {
3373 struct assign_parm_data_all all;
3374 tree parm;
3375 vec<tree> fnargs;
3376 unsigned i;
3377
3378 crtl->args.internal_arg_pointer
3379 = targetm.calls.internal_arg_pointer ();
3380
3381 assign_parms_initialize_all (&all);
3382 fnargs = assign_parms_augmented_arg_list (&all);
3383
3384 FOR_EACH_VEC_ELT (fnargs, i, parm)
3385 {
3386 struct assign_parm_data_one data;
3387
3388 /* Extract the type of PARM; adjust it according to ABI. */
3389 assign_parm_find_data_types (&all, parm, &data);
3390
3391 /* Early out for errors and void parameters. */
3392 if (data.passed_mode == VOIDmode)
3393 {
3394 SET_DECL_RTL (parm, const0_rtx);
3395 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3396 continue;
3397 }
3398
3399 /* Estimate stack alignment from parameter alignment. */
3400 if (SUPPORTS_STACK_ALIGNMENT)
3401 {
3402 unsigned int align
3403 = targetm.calls.function_arg_boundary (data.promoted_mode,
3404 data.passed_type);
3405 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3406 align);
3407 if (TYPE_ALIGN (data.nominal_type) > align)
3408 align = MINIMUM_ALIGNMENT (data.nominal_type,
3409 TYPE_MODE (data.nominal_type),
3410 TYPE_ALIGN (data.nominal_type));
3411 if (crtl->stack_alignment_estimated < align)
3412 {
3413 gcc_assert (!crtl->stack_realign_processed);
3414 crtl->stack_alignment_estimated = align;
3415 }
3416 }
3417
3418 if (cfun->stdarg && !DECL_CHAIN (parm))
3419 assign_parms_setup_varargs (&all, &data, false);
3420
3421 /* Find out where the parameter arrives in this function. */
3422 assign_parm_find_entry_rtl (&all, &data);
3423
3424 /* Find out where stack space for this parameter might be. */
3425 if (assign_parm_is_stack_parm (&all, &data))
3426 {
3427 assign_parm_find_stack_rtl (parm, &data);
3428 assign_parm_adjust_entry_rtl (&data);
3429 }
3430
3431 /* Record permanently how this parm was passed. */
3432 if (data.passed_pointer)
3433 {
3434 rtx incoming_rtl
3435 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3436 data.entry_parm);
3437 set_decl_incoming_rtl (parm, incoming_rtl, true);
3438 }
3439 else
3440 set_decl_incoming_rtl (parm, data.entry_parm, false);
3441
3442 /* Update info on where next arg arrives in registers. */
3443 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3444 data.passed_type, data.named_arg);
3445
3446 assign_parm_adjust_stack_rtl (&data);
3447
3448 if (assign_parm_setup_block_p (&data))
3449 assign_parm_setup_block (&all, parm, &data);
3450 else if (data.passed_pointer || use_register_for_decl (parm))
3451 assign_parm_setup_reg (&all, parm, &data);
3452 else
3453 assign_parm_setup_stack (&all, parm, &data);
3454 }
3455
3456 if (targetm.calls.split_complex_arg)
3457 assign_parms_unsplit_complex (&all, fnargs);
3458
3459 fnargs.release ();
3460
3461 /* Output all parameter conversion instructions (possibly including calls)
3462 now that all parameters have been copied out of hard registers. */
3463 emit_insn (all.first_conversion_insn);
3464
3465 /* Estimate reload stack alignment from scalar return mode. */
3466 if (SUPPORTS_STACK_ALIGNMENT)
3467 {
3468 if (DECL_RESULT (fndecl))
3469 {
3470 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3471 enum machine_mode mode = TYPE_MODE (type);
3472
3473 if (mode != BLKmode
3474 && mode != VOIDmode
3475 && !AGGREGATE_TYPE_P (type))
3476 {
3477 unsigned int align = GET_MODE_ALIGNMENT (mode);
3478 if (crtl->stack_alignment_estimated < align)
3479 {
3480 gcc_assert (!crtl->stack_realign_processed);
3481 crtl->stack_alignment_estimated = align;
3482 }
3483 }
3484 }
3485 }
3486
3487 /* If we are receiving a struct value address as the first argument, set up
3488 the RTL for the function result. As this might require code to convert
3489 the transmitted address to Pmode, we do this here to ensure that possible
3490 preliminary conversions of the address have been emitted already. */
3491 if (all.function_result_decl)
3492 {
3493 tree result = DECL_RESULT (current_function_decl);
3494 rtx addr = DECL_RTL (all.function_result_decl);
3495 rtx x;
3496
3497 if (DECL_BY_REFERENCE (result))
3498 {
3499 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3500 x = addr;
3501 }
3502 else
3503 {
3504 SET_DECL_VALUE_EXPR (result,
3505 build1 (INDIRECT_REF, TREE_TYPE (result),
3506 all.function_result_decl));
3507 addr = convert_memory_address (Pmode, addr);
3508 x = gen_rtx_MEM (DECL_MODE (result), addr);
3509 set_mem_attributes (x, result, 1);
3510 }
3511
3512 DECL_HAS_VALUE_EXPR_P (result) = 1;
3513
3514 SET_DECL_RTL (result, x);
3515 }
3516
3517 /* We have aligned all the args, so add space for the pretend args. */
3518 crtl->args.pretend_args_size = all.pretend_args_size;
3519 all.stack_args_size.constant += all.extra_pretend_bytes;
3520 crtl->args.size = all.stack_args_size.constant;
3521
3522 /* Adjust function incoming argument size for alignment and
3523 minimum length. */
3524
3525 crtl->args.size = MAX (crtl->args.size, all.reg_parm_stack_space);
3526 crtl->args.size = CEIL_ROUND (crtl->args.size,
3527 PARM_BOUNDARY / BITS_PER_UNIT);
3528
3529 #ifdef ARGS_GROW_DOWNWARD
3530 crtl->args.arg_offset_rtx
3531 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3532 : expand_expr (size_diffop (all.stack_args_size.var,
3533 size_int (-all.stack_args_size.constant)),
3534 NULL_RTX, VOIDmode, EXPAND_NORMAL));
3535 #else
3536 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3537 #endif
3538
3539 /* See how many bytes, if any, of its args a function should try to pop
3540 on return. */
3541
3542 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3543 TREE_TYPE (fndecl),
3544 crtl->args.size);
3545
3546 /* For stdarg.h function, save info about
3547 regs and stack space used by the named args. */
3548
3549 crtl->args.info = all.args_so_far_v;
3550
3551 /* Set the rtx used for the function return value. Put this in its
3552 own variable so any optimizers that need this information don't have
3553 to include tree.h. Do this here so it gets done when an inlined
3554 function gets output. */
3555
3556 crtl->return_rtx
3557 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3558 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3559
3560 /* If scalar return value was computed in a pseudo-reg, or was a named
3561 return value that got dumped to the stack, copy that to the hard
3562 return register. */
3563 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3564 {
3565 tree decl_result = DECL_RESULT (fndecl);
3566 rtx decl_rtl = DECL_RTL (decl_result);
3567
3568 if (REG_P (decl_rtl)
3569 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3570 : DECL_REGISTER (decl_result))
3571 {
3572 rtx real_decl_rtl;
3573
3574 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3575 fndecl, true);
3576 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3577 /* The delay slot scheduler assumes that crtl->return_rtx
3578 holds the hard register containing the return value, not a
3579 temporary pseudo. */
3580 crtl->return_rtx = real_decl_rtl;
3581 }
3582 }
3583 }
3584
3585 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3586 For all seen types, gimplify their sizes. */
3587
3588 static tree
3589 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3590 {
3591 tree t = *tp;
3592
3593 *walk_subtrees = 0;
3594 if (TYPE_P (t))
3595 {
3596 if (POINTER_TYPE_P (t))
3597 *walk_subtrees = 1;
3598 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3599 && !TYPE_SIZES_GIMPLIFIED (t))
3600 {
3601 gimplify_type_sizes (t, (gimple_seq *) data);
3602 *walk_subtrees = 1;
3603 }
3604 }
3605
3606 return NULL;
3607 }
3608
3609 /* Gimplify the parameter list for current_function_decl. This involves
3610 evaluating SAVE_EXPRs of variable sized parameters and generating code
3611 to implement callee-copies reference parameters. Returns a sequence of
3612 statements to add to the beginning of the function. */
3613
3614 gimple_seq
3615 gimplify_parameters (void)
3616 {
3617 struct assign_parm_data_all all;
3618 tree parm;
3619 gimple_seq stmts = NULL;
3620 vec<tree> fnargs;
3621 unsigned i;
3622
3623 assign_parms_initialize_all (&all);
3624 fnargs = assign_parms_augmented_arg_list (&all);
3625
3626 FOR_EACH_VEC_ELT (fnargs, i, parm)
3627 {
3628 struct assign_parm_data_one data;
3629
3630 /* Extract the type of PARM; adjust it according to ABI. */
3631 assign_parm_find_data_types (&all, parm, &data);
3632
3633 /* Early out for errors and void parameters. */
3634 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3635 continue;
3636
3637 /* Update info on where next arg arrives in registers. */
3638 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3639 data.passed_type, data.named_arg);
3640
3641 /* ??? Once upon a time variable_size stuffed parameter list
3642 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3643 turned out to be less than manageable in the gimple world.
3644 Now we have to hunt them down ourselves. */
3645 walk_tree_without_duplicates (&data.passed_type,
3646 gimplify_parm_type, &stmts);
3647
3648 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
3649 {
3650 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3651 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3652 }
3653
3654 if (data.passed_pointer)
3655 {
3656 tree type = TREE_TYPE (data.passed_type);
3657 if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
3658 type, data.named_arg))
3659 {
3660 tree local, t;
3661
3662 /* For constant-sized objects, this is trivial; for
3663 variable-sized objects, we have to play games. */
3664 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3665 && !(flag_stack_check == GENERIC_STACK_CHECK
3666 && compare_tree_int (DECL_SIZE_UNIT (parm),
3667 STACK_CHECK_MAX_VAR_SIZE) > 0))
3668 {
3669 local = create_tmp_var (type, get_name (parm));
3670 DECL_IGNORED_P (local) = 0;
3671 /* If PARM was addressable, move that flag over
3672 to the local copy, as its address will be taken,
3673 not the PARMs. Keep the parms address taken
3674 as we'll query that flag during gimplification. */
3675 if (TREE_ADDRESSABLE (parm))
3676 TREE_ADDRESSABLE (local) = 1;
3677 else if (TREE_CODE (type) == COMPLEX_TYPE
3678 || TREE_CODE (type) == VECTOR_TYPE)
3679 DECL_GIMPLE_REG_P (local) = 1;
3680 }
3681 else
3682 {
3683 tree ptr_type, addr;
3684
3685 ptr_type = build_pointer_type (type);
3686 addr = create_tmp_reg (ptr_type, get_name (parm));
3687 DECL_IGNORED_P (addr) = 0;
3688 local = build_fold_indirect_ref (addr);
3689
3690 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3691 t = build_call_expr (t, 2, DECL_SIZE_UNIT (parm),
3692 size_int (DECL_ALIGN (parm)));
3693
3694 /* The call has been built for a variable-sized object. */
3695 CALL_ALLOCA_FOR_VAR_P (t) = 1;
3696 t = fold_convert (ptr_type, t);
3697 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3698 gimplify_and_add (t, &stmts);
3699 }
3700
3701 gimplify_assign (local, parm, &stmts);
3702
3703 SET_DECL_VALUE_EXPR (parm, local);
3704 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3705 }
3706 }
3707 }
3708
3709 fnargs.release ();
3710
3711 return stmts;
3712 }
3713 \f
3714 /* Compute the size and offset from the start of the stacked arguments for a
3715 parm passed in mode PASSED_MODE and with type TYPE.
3716
3717 INITIAL_OFFSET_PTR points to the current offset into the stacked
3718 arguments.
3719
3720 The starting offset and size for this parm are returned in
3721 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3722 nonzero, the offset is that of stack slot, which is returned in
3723 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3724 padding required from the initial offset ptr to the stack slot.
3725
3726 IN_REGS is nonzero if the argument will be passed in registers. It will
3727 never be set if REG_PARM_STACK_SPACE is not defined.
3728
3729 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
3730 for arguments which are passed in registers.
3731
3732 FNDECL is the function in which the argument was defined.
3733
3734 There are two types of rounding that are done. The first, controlled by
3735 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3736 argument list to be aligned to the specific boundary (in bits). This
3737 rounding affects the initial and starting offsets, but not the argument
3738 size.
3739
3740 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3741 optionally rounds the size of the parm to PARM_BOUNDARY. The
3742 initial offset is not affected by this rounding, while the size always
3743 is and the starting offset may be. */
3744
3745 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3746 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3747 callers pass in the total size of args so far as
3748 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3749
3750 void
3751 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3752 int reg_parm_stack_space, int partial,
3753 tree fndecl ATTRIBUTE_UNUSED,
3754 struct args_size *initial_offset_ptr,
3755 struct locate_and_pad_arg_data *locate)
3756 {
3757 tree sizetree;
3758 enum direction where_pad;
3759 unsigned int boundary, round_boundary;
3760 int part_size_in_regs;
3761
3762 /* If we have found a stack parm before we reach the end of the
3763 area reserved for registers, skip that area. */
3764 if (! in_regs)
3765 {
3766 if (reg_parm_stack_space > 0)
3767 {
3768 if (initial_offset_ptr->var)
3769 {
3770 initial_offset_ptr->var
3771 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3772 ssize_int (reg_parm_stack_space));
3773 initial_offset_ptr->constant = 0;
3774 }
3775 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3776 initial_offset_ptr->constant = reg_parm_stack_space;
3777 }
3778 }
3779
3780 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3781
3782 sizetree
3783 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3784 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3785 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
3786 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
3787 type);
3788 locate->where_pad = where_pad;
3789
3790 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3791 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
3792 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
3793
3794 locate->boundary = boundary;
3795
3796 if (SUPPORTS_STACK_ALIGNMENT)
3797 {
3798 /* stack_alignment_estimated can't change after stack has been
3799 realigned. */
3800 if (crtl->stack_alignment_estimated < boundary)
3801 {
3802 if (!crtl->stack_realign_processed)
3803 crtl->stack_alignment_estimated = boundary;
3804 else
3805 {
3806 /* If stack is realigned and stack alignment value
3807 hasn't been finalized, it is OK not to increase
3808 stack_alignment_estimated. The bigger alignment
3809 requirement is recorded in stack_alignment_needed
3810 below. */
3811 gcc_assert (!crtl->stack_realign_finalized
3812 && crtl->stack_realign_needed);
3813 }
3814 }
3815 }
3816
3817 /* Remember if the outgoing parameter requires extra alignment on the
3818 calling function side. */
3819 if (crtl->stack_alignment_needed < boundary)
3820 crtl->stack_alignment_needed = boundary;
3821 if (crtl->preferred_stack_boundary < boundary)
3822 crtl->preferred_stack_boundary = boundary;
3823
3824 #ifdef ARGS_GROW_DOWNWARD
3825 locate->slot_offset.constant = -initial_offset_ptr->constant;
3826 if (initial_offset_ptr->var)
3827 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3828 initial_offset_ptr->var);
3829
3830 {
3831 tree s2 = sizetree;
3832 if (where_pad != none
3833 && (!tree_fits_uhwi_p (sizetree)
3834 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
3835 s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
3836 SUB_PARM_SIZE (locate->slot_offset, s2);
3837 }
3838
3839 locate->slot_offset.constant += part_size_in_regs;
3840
3841 if (!in_regs || reg_parm_stack_space > 0)
3842 pad_to_arg_alignment (&locate->slot_offset, boundary,
3843 &locate->alignment_pad);
3844
3845 locate->size.constant = (-initial_offset_ptr->constant
3846 - locate->slot_offset.constant);
3847 if (initial_offset_ptr->var)
3848 locate->size.var = size_binop (MINUS_EXPR,
3849 size_binop (MINUS_EXPR,
3850 ssize_int (0),
3851 initial_offset_ptr->var),
3852 locate->slot_offset.var);
3853
3854 /* Pad_below needs the pre-rounded size to know how much to pad
3855 below. */
3856 locate->offset = locate->slot_offset;
3857 if (where_pad == downward)
3858 pad_below (&locate->offset, passed_mode, sizetree);
3859
3860 #else /* !ARGS_GROW_DOWNWARD */
3861 if (!in_regs || reg_parm_stack_space > 0)
3862 pad_to_arg_alignment (initial_offset_ptr, boundary,
3863 &locate->alignment_pad);
3864 locate->slot_offset = *initial_offset_ptr;
3865
3866 #ifdef PUSH_ROUNDING
3867 if (passed_mode != BLKmode)
3868 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3869 #endif
3870
3871 /* Pad_below needs the pre-rounded size to know how much to pad below
3872 so this must be done before rounding up. */
3873 locate->offset = locate->slot_offset;
3874 if (where_pad == downward)
3875 pad_below (&locate->offset, passed_mode, sizetree);
3876
3877 if (where_pad != none
3878 && (!tree_fits_uhwi_p (sizetree)
3879 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
3880 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
3881
3882 ADD_PARM_SIZE (locate->size, sizetree);
3883
3884 locate->size.constant -= part_size_in_regs;
3885 #endif /* ARGS_GROW_DOWNWARD */
3886
3887 #ifdef FUNCTION_ARG_OFFSET
3888 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
3889 #endif
3890 }
3891
3892 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3893 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3894
3895 static void
3896 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3897 struct args_size *alignment_pad)
3898 {
3899 tree save_var = NULL_TREE;
3900 HOST_WIDE_INT save_constant = 0;
3901 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3902 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3903
3904 #ifdef SPARC_STACK_BOUNDARY_HACK
3905 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3906 the real alignment of %sp. However, when it does this, the
3907 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3908 if (SPARC_STACK_BOUNDARY_HACK)
3909 sp_offset = 0;
3910 #endif
3911
3912 if (boundary > PARM_BOUNDARY)
3913 {
3914 save_var = offset_ptr->var;
3915 save_constant = offset_ptr->constant;
3916 }
3917
3918 alignment_pad->var = NULL_TREE;
3919 alignment_pad->constant = 0;
3920
3921 if (boundary > BITS_PER_UNIT)
3922 {
3923 if (offset_ptr->var)
3924 {
3925 tree sp_offset_tree = ssize_int (sp_offset);
3926 tree offset = size_binop (PLUS_EXPR,
3927 ARGS_SIZE_TREE (*offset_ptr),
3928 sp_offset_tree);
3929 #ifdef ARGS_GROW_DOWNWARD
3930 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3931 #else
3932 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3933 #endif
3934
3935 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3936 /* ARGS_SIZE_TREE includes constant term. */
3937 offset_ptr->constant = 0;
3938 if (boundary > PARM_BOUNDARY)
3939 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3940 save_var);
3941 }
3942 else
3943 {
3944 offset_ptr->constant = -sp_offset +
3945 #ifdef ARGS_GROW_DOWNWARD
3946 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3947 #else
3948 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3949 #endif
3950 if (boundary > PARM_BOUNDARY)
3951 alignment_pad->constant = offset_ptr->constant - save_constant;
3952 }
3953 }
3954 }
3955
3956 static void
3957 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3958 {
3959 if (passed_mode != BLKmode)
3960 {
3961 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3962 offset_ptr->constant
3963 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3964 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3965 - GET_MODE_SIZE (passed_mode));
3966 }
3967 else
3968 {
3969 if (TREE_CODE (sizetree) != INTEGER_CST
3970 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3971 {
3972 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3973 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3974 /* Add it in. */
3975 ADD_PARM_SIZE (*offset_ptr, s2);
3976 SUB_PARM_SIZE (*offset_ptr, sizetree);
3977 }
3978 }
3979 }
3980 \f
3981
3982 /* True if register REGNO was alive at a place where `setjmp' was
3983 called and was set more than once or is an argument. Such regs may
3984 be clobbered by `longjmp'. */
3985
3986 static bool
3987 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
3988 {
3989 /* There appear to be cases where some local vars never reach the
3990 backend but have bogus regnos. */
3991 if (regno >= max_reg_num ())
3992 return false;
3993
3994 return ((REG_N_SETS (regno) > 1
3995 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
3996 regno))
3997 && REGNO_REG_SET_P (setjmp_crosses, regno));
3998 }
3999
4000 /* Walk the tree of blocks describing the binding levels within a
4001 function and warn about variables the might be killed by setjmp or
4002 vfork. This is done after calling flow_analysis before register
4003 allocation since that will clobber the pseudo-regs to hard
4004 regs. */
4005
4006 static void
4007 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
4008 {
4009 tree decl, sub;
4010
4011 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
4012 {
4013 if (TREE_CODE (decl) == VAR_DECL
4014 && DECL_RTL_SET_P (decl)
4015 && REG_P (DECL_RTL (decl))
4016 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4017 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
4018 " %<longjmp%> or %<vfork%>", decl);
4019 }
4020
4021 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
4022 setjmp_vars_warning (setjmp_crosses, sub);
4023 }
4024
4025 /* Do the appropriate part of setjmp_vars_warning
4026 but for arguments instead of local variables. */
4027
4028 static void
4029 setjmp_args_warning (bitmap setjmp_crosses)
4030 {
4031 tree decl;
4032 for (decl = DECL_ARGUMENTS (current_function_decl);
4033 decl; decl = DECL_CHAIN (decl))
4034 if (DECL_RTL (decl) != 0
4035 && REG_P (DECL_RTL (decl))
4036 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4037 warning (OPT_Wclobbered,
4038 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4039 decl);
4040 }
4041
4042 /* Generate warning messages for variables live across setjmp. */
4043
4044 void
4045 generate_setjmp_warnings (void)
4046 {
4047 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4048
4049 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
4050 || bitmap_empty_p (setjmp_crosses))
4051 return;
4052
4053 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4054 setjmp_args_warning (setjmp_crosses);
4055 }
4056
4057 \f
4058 /* Reverse the order of elements in the fragment chain T of blocks,
4059 and return the new head of the chain (old last element).
4060 In addition to that clear BLOCK_SAME_RANGE flags when needed
4061 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4062 its super fragment origin. */
4063
4064 static tree
4065 block_fragments_nreverse (tree t)
4066 {
4067 tree prev = 0, block, next, prev_super = 0;
4068 tree super = BLOCK_SUPERCONTEXT (t);
4069 if (BLOCK_FRAGMENT_ORIGIN (super))
4070 super = BLOCK_FRAGMENT_ORIGIN (super);
4071 for (block = t; block; block = next)
4072 {
4073 next = BLOCK_FRAGMENT_CHAIN (block);
4074 BLOCK_FRAGMENT_CHAIN (block) = prev;
4075 if ((prev && !BLOCK_SAME_RANGE (prev))
4076 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4077 != prev_super))
4078 BLOCK_SAME_RANGE (block) = 0;
4079 prev_super = BLOCK_SUPERCONTEXT (block);
4080 BLOCK_SUPERCONTEXT (block) = super;
4081 prev = block;
4082 }
4083 t = BLOCK_FRAGMENT_ORIGIN (t);
4084 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4085 != prev_super)
4086 BLOCK_SAME_RANGE (t) = 0;
4087 BLOCK_SUPERCONTEXT (t) = super;
4088 return prev;
4089 }
4090
4091 /* Reverse the order of elements in the chain T of blocks,
4092 and return the new head of the chain (old last element).
4093 Also do the same on subblocks and reverse the order of elements
4094 in BLOCK_FRAGMENT_CHAIN as well. */
4095
4096 static tree
4097 blocks_nreverse_all (tree t)
4098 {
4099 tree prev = 0, block, next;
4100 for (block = t; block; block = next)
4101 {
4102 next = BLOCK_CHAIN (block);
4103 BLOCK_CHAIN (block) = prev;
4104 if (BLOCK_FRAGMENT_CHAIN (block)
4105 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4106 {
4107 BLOCK_FRAGMENT_CHAIN (block)
4108 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4109 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4110 BLOCK_SAME_RANGE (block) = 0;
4111 }
4112 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4113 prev = block;
4114 }
4115 return prev;
4116 }
4117
4118
4119 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4120 and create duplicate blocks. */
4121 /* ??? Need an option to either create block fragments or to create
4122 abstract origin duplicates of a source block. It really depends
4123 on what optimization has been performed. */
4124
4125 void
4126 reorder_blocks (void)
4127 {
4128 tree block = DECL_INITIAL (current_function_decl);
4129
4130 if (block == NULL_TREE)
4131 return;
4132
4133 auto_vec<tree, 10> block_stack;
4134
4135 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4136 clear_block_marks (block);
4137
4138 /* Prune the old trees away, so that they don't get in the way. */
4139 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4140 BLOCK_CHAIN (block) = NULL_TREE;
4141
4142 /* Recreate the block tree from the note nesting. */
4143 reorder_blocks_1 (get_insns (), block, &block_stack);
4144 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4145 }
4146
4147 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4148
4149 void
4150 clear_block_marks (tree block)
4151 {
4152 while (block)
4153 {
4154 TREE_ASM_WRITTEN (block) = 0;
4155 clear_block_marks (BLOCK_SUBBLOCKS (block));
4156 block = BLOCK_CHAIN (block);
4157 }
4158 }
4159
4160 static void
4161 reorder_blocks_1 (rtx_insn *insns, tree current_block,
4162 vec<tree> *p_block_stack)
4163 {
4164 rtx_insn *insn;
4165 tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
4166
4167 for (insn = insns; insn; insn = NEXT_INSN (insn))
4168 {
4169 if (NOTE_P (insn))
4170 {
4171 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4172 {
4173 tree block = NOTE_BLOCK (insn);
4174 tree origin;
4175
4176 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4177 origin = block;
4178
4179 if (prev_end)
4180 BLOCK_SAME_RANGE (prev_end) = 0;
4181 prev_end = NULL_TREE;
4182
4183 /* If we have seen this block before, that means it now
4184 spans multiple address regions. Create a new fragment. */
4185 if (TREE_ASM_WRITTEN (block))
4186 {
4187 tree new_block = copy_node (block);
4188
4189 BLOCK_SAME_RANGE (new_block) = 0;
4190 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4191 BLOCK_FRAGMENT_CHAIN (new_block)
4192 = BLOCK_FRAGMENT_CHAIN (origin);
4193 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4194
4195 NOTE_BLOCK (insn) = new_block;
4196 block = new_block;
4197 }
4198
4199 if (prev_beg == current_block && prev_beg)
4200 BLOCK_SAME_RANGE (block) = 1;
4201
4202 prev_beg = origin;
4203
4204 BLOCK_SUBBLOCKS (block) = 0;
4205 TREE_ASM_WRITTEN (block) = 1;
4206 /* When there's only one block for the entire function,
4207 current_block == block and we mustn't do this, it
4208 will cause infinite recursion. */
4209 if (block != current_block)
4210 {
4211 tree super;
4212 if (block != origin)
4213 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4214 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4215 (origin))
4216 == current_block);
4217 if (p_block_stack->is_empty ())
4218 super = current_block;
4219 else
4220 {
4221 super = p_block_stack->last ();
4222 gcc_assert (super == current_block
4223 || BLOCK_FRAGMENT_ORIGIN (super)
4224 == current_block);
4225 }
4226 BLOCK_SUPERCONTEXT (block) = super;
4227 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4228 BLOCK_SUBBLOCKS (current_block) = block;
4229 current_block = origin;
4230 }
4231 p_block_stack->safe_push (block);
4232 }
4233 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4234 {
4235 NOTE_BLOCK (insn) = p_block_stack->pop ();
4236 current_block = BLOCK_SUPERCONTEXT (current_block);
4237 if (BLOCK_FRAGMENT_ORIGIN (current_block))
4238 current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4239 prev_beg = NULL_TREE;
4240 prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4241 ? NOTE_BLOCK (insn) : NULL_TREE;
4242 }
4243 }
4244 else
4245 {
4246 prev_beg = NULL_TREE;
4247 if (prev_end)
4248 BLOCK_SAME_RANGE (prev_end) = 0;
4249 prev_end = NULL_TREE;
4250 }
4251 }
4252 }
4253
4254 /* Reverse the order of elements in the chain T of blocks,
4255 and return the new head of the chain (old last element). */
4256
4257 tree
4258 blocks_nreverse (tree t)
4259 {
4260 tree prev = 0, block, next;
4261 for (block = t; block; block = next)
4262 {
4263 next = BLOCK_CHAIN (block);
4264 BLOCK_CHAIN (block) = prev;
4265 prev = block;
4266 }
4267 return prev;
4268 }
4269
4270 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4271 by modifying the last node in chain 1 to point to chain 2. */
4272
4273 tree
4274 block_chainon (tree op1, tree op2)
4275 {
4276 tree t1;
4277
4278 if (!op1)
4279 return op2;
4280 if (!op2)
4281 return op1;
4282
4283 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4284 continue;
4285 BLOCK_CHAIN (t1) = op2;
4286
4287 #ifdef ENABLE_TREE_CHECKING
4288 {
4289 tree t2;
4290 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4291 gcc_assert (t2 != t1);
4292 }
4293 #endif
4294
4295 return op1;
4296 }
4297
4298 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4299 non-NULL, list them all into VECTOR, in a depth-first preorder
4300 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4301 blocks. */
4302
4303 static int
4304 all_blocks (tree block, tree *vector)
4305 {
4306 int n_blocks = 0;
4307
4308 while (block)
4309 {
4310 TREE_ASM_WRITTEN (block) = 0;
4311
4312 /* Record this block. */
4313 if (vector)
4314 vector[n_blocks] = block;
4315
4316 ++n_blocks;
4317
4318 /* Record the subblocks, and their subblocks... */
4319 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4320 vector ? vector + n_blocks : 0);
4321 block = BLOCK_CHAIN (block);
4322 }
4323
4324 return n_blocks;
4325 }
4326
4327 /* Return a vector containing all the blocks rooted at BLOCK. The
4328 number of elements in the vector is stored in N_BLOCKS_P. The
4329 vector is dynamically allocated; it is the caller's responsibility
4330 to call `free' on the pointer returned. */
4331
4332 static tree *
4333 get_block_vector (tree block, int *n_blocks_p)
4334 {
4335 tree *block_vector;
4336
4337 *n_blocks_p = all_blocks (block, NULL);
4338 block_vector = XNEWVEC (tree, *n_blocks_p);
4339 all_blocks (block, block_vector);
4340
4341 return block_vector;
4342 }
4343
4344 static GTY(()) int next_block_index = 2;
4345
4346 /* Set BLOCK_NUMBER for all the blocks in FN. */
4347
4348 void
4349 number_blocks (tree fn)
4350 {
4351 int i;
4352 int n_blocks;
4353 tree *block_vector;
4354
4355 /* For SDB and XCOFF debugging output, we start numbering the blocks
4356 from 1 within each function, rather than keeping a running
4357 count. */
4358 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
4359 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
4360 next_block_index = 1;
4361 #endif
4362
4363 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4364
4365 /* The top-level BLOCK isn't numbered at all. */
4366 for (i = 1; i < n_blocks; ++i)
4367 /* We number the blocks from two. */
4368 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4369
4370 free (block_vector);
4371
4372 return;
4373 }
4374
4375 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4376
4377 DEBUG_FUNCTION tree
4378 debug_find_var_in_block_tree (tree var, tree block)
4379 {
4380 tree t;
4381
4382 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4383 if (t == var)
4384 return block;
4385
4386 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4387 {
4388 tree ret = debug_find_var_in_block_tree (var, t);
4389 if (ret)
4390 return ret;
4391 }
4392
4393 return NULL_TREE;
4394 }
4395 \f
4396 /* Keep track of whether we're in a dummy function context. If we are,
4397 we don't want to invoke the set_current_function hook, because we'll
4398 get into trouble if the hook calls target_reinit () recursively or
4399 when the initial initialization is not yet complete. */
4400
4401 static bool in_dummy_function;
4402
4403 /* Invoke the target hook when setting cfun. Update the optimization options
4404 if the function uses different options than the default. */
4405
4406 static void
4407 invoke_set_current_function_hook (tree fndecl)
4408 {
4409 if (!in_dummy_function)
4410 {
4411 tree opts = ((fndecl)
4412 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4413 : optimization_default_node);
4414
4415 if (!opts)
4416 opts = optimization_default_node;
4417
4418 /* Change optimization options if needed. */
4419 if (optimization_current_node != opts)
4420 {
4421 optimization_current_node = opts;
4422 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
4423 }
4424
4425 targetm.set_current_function (fndecl);
4426 this_fn_optabs = this_target_optabs;
4427
4428 if (opts != optimization_default_node)
4429 {
4430 init_tree_optimization_optabs (opts);
4431 if (TREE_OPTIMIZATION_OPTABS (opts))
4432 this_fn_optabs = (struct target_optabs *)
4433 TREE_OPTIMIZATION_OPTABS (opts);
4434 }
4435 }
4436 }
4437
4438 /* cfun should never be set directly; use this function. */
4439
4440 void
4441 set_cfun (struct function *new_cfun)
4442 {
4443 if (cfun != new_cfun)
4444 {
4445 cfun = new_cfun;
4446 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4447 }
4448 }
4449
4450 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4451
4452 static vec<function_p> cfun_stack;
4453
4454 /* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4455 current_function_decl accordingly. */
4456
4457 void
4458 push_cfun (struct function *new_cfun)
4459 {
4460 gcc_assert ((!cfun && !current_function_decl)
4461 || (cfun && current_function_decl == cfun->decl));
4462 cfun_stack.safe_push (cfun);
4463 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4464 set_cfun (new_cfun);
4465 }
4466
4467 /* Pop cfun from the stack. Also set current_function_decl accordingly. */
4468
4469 void
4470 pop_cfun (void)
4471 {
4472 struct function *new_cfun = cfun_stack.pop ();
4473 /* When in_dummy_function, we do have a cfun but current_function_decl is
4474 NULL. We also allow pushing NULL cfun and subsequently changing
4475 current_function_decl to something else and have both restored by
4476 pop_cfun. */
4477 gcc_checking_assert (in_dummy_function
4478 || !cfun
4479 || current_function_decl == cfun->decl);
4480 set_cfun (new_cfun);
4481 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4482 }
4483
4484 /* Return value of funcdef and increase it. */
4485 int
4486 get_next_funcdef_no (void)
4487 {
4488 return funcdef_no++;
4489 }
4490
4491 /* Return value of funcdef. */
4492 int
4493 get_last_funcdef_no (void)
4494 {
4495 return funcdef_no;
4496 }
4497
4498 /* Allocate a function structure for FNDECL and set its contents
4499 to the defaults. Set cfun to the newly-allocated object.
4500 Some of the helper functions invoked during initialization assume
4501 that cfun has already been set. Therefore, assign the new object
4502 directly into cfun and invoke the back end hook explicitly at the
4503 very end, rather than initializing a temporary and calling set_cfun
4504 on it.
4505
4506 ABSTRACT_P is true if this is a function that will never be seen by
4507 the middle-end. Such functions are front-end concepts (like C++
4508 function templates) that do not correspond directly to functions
4509 placed in object files. */
4510
4511 void
4512 allocate_struct_function (tree fndecl, bool abstract_p)
4513 {
4514 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4515
4516 cfun = ggc_cleared_alloc<function> ();
4517
4518 init_eh_for_function ();
4519
4520 if (init_machine_status)
4521 cfun->machine = (*init_machine_status) ();
4522
4523 #ifdef OVERRIDE_ABI_FORMAT
4524 OVERRIDE_ABI_FORMAT (fndecl);
4525 #endif
4526
4527 if (fndecl != NULL_TREE)
4528 {
4529 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4530 cfun->decl = fndecl;
4531 current_function_funcdef_no = get_next_funcdef_no ();
4532 }
4533
4534 invoke_set_current_function_hook (fndecl);
4535
4536 if (fndecl != NULL_TREE)
4537 {
4538 tree result = DECL_RESULT (fndecl);
4539 if (!abstract_p && aggregate_value_p (result, fndecl))
4540 {
4541 #ifdef PCC_STATIC_STRUCT_RETURN
4542 cfun->returns_pcc_struct = 1;
4543 #endif
4544 cfun->returns_struct = 1;
4545 }
4546
4547 cfun->stdarg = stdarg_p (fntype);
4548
4549 /* Assume all registers in stdarg functions need to be saved. */
4550 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4551 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4552
4553 /* ??? This could be set on a per-function basis by the front-end
4554 but is this worth the hassle? */
4555 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4556 cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
4557 }
4558 }
4559
4560 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4561 instead of just setting it. */
4562
4563 void
4564 push_struct_function (tree fndecl)
4565 {
4566 /* When in_dummy_function we might be in the middle of a pop_cfun and
4567 current_function_decl and cfun may not match. */
4568 gcc_assert (in_dummy_function
4569 || (!cfun && !current_function_decl)
4570 || (cfun && current_function_decl == cfun->decl));
4571 cfun_stack.safe_push (cfun);
4572 current_function_decl = fndecl;
4573 allocate_struct_function (fndecl, false);
4574 }
4575
4576 /* Reset crtl and other non-struct-function variables to defaults as
4577 appropriate for emitting rtl at the start of a function. */
4578
4579 static void
4580 prepare_function_start (void)
4581 {
4582 gcc_assert (!crtl->emit.x_last_insn);
4583 init_temp_slots ();
4584 init_emit ();
4585 init_varasm_status ();
4586 init_expr ();
4587 default_rtl_profile ();
4588
4589 if (flag_stack_usage_info)
4590 {
4591 cfun->su = ggc_cleared_alloc<stack_usage> ();
4592 cfun->su->static_stack_size = -1;
4593 }
4594
4595 cse_not_expected = ! optimize;
4596
4597 /* Caller save not needed yet. */
4598 caller_save_needed = 0;
4599
4600 /* We haven't done register allocation yet. */
4601 reg_renumber = 0;
4602
4603 /* Indicate that we have not instantiated virtual registers yet. */
4604 virtuals_instantiated = 0;
4605
4606 /* Indicate that we want CONCATs now. */
4607 generating_concat_p = 1;
4608
4609 /* Indicate we have no need of a frame pointer yet. */
4610 frame_pointer_needed = 0;
4611 }
4612
4613 /* Initialize the rtl expansion mechanism so that we can do simple things
4614 like generate sequences. This is used to provide a context during global
4615 initialization of some passes. You must call expand_dummy_function_end
4616 to exit this context. */
4617
4618 void
4619 init_dummy_function_start (void)
4620 {
4621 gcc_assert (!in_dummy_function);
4622 in_dummy_function = true;
4623 push_struct_function (NULL_TREE);
4624 prepare_function_start ();
4625 }
4626
4627 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4628 and initialize static variables for generating RTL for the statements
4629 of the function. */
4630
4631 void
4632 init_function_start (tree subr)
4633 {
4634 if (subr && DECL_STRUCT_FUNCTION (subr))
4635 set_cfun (DECL_STRUCT_FUNCTION (subr));
4636 else
4637 allocate_struct_function (subr, false);
4638
4639 /* Initialize backend, if needed. */
4640 initialize_rtl ();
4641
4642 prepare_function_start ();
4643 decide_function_section (subr);
4644
4645 /* Warn if this value is an aggregate type,
4646 regardless of which calling convention we are using for it. */
4647 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4648 warning (OPT_Waggregate_return, "function returns an aggregate");
4649 }
4650
4651 /* Expand code to verify the stack_protect_guard. This is invoked at
4652 the end of a function to be protected. */
4653
4654 #ifndef HAVE_stack_protect_test
4655 # define HAVE_stack_protect_test 0
4656 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4657 #endif
4658
4659 void
4660 stack_protect_epilogue (void)
4661 {
4662 tree guard_decl = targetm.stack_protect_guard ();
4663 rtx label = gen_label_rtx ();
4664 rtx x, y, tmp;
4665
4666 x = expand_normal (crtl->stack_protect_guard);
4667 y = expand_normal (guard_decl);
4668
4669 /* Allow the target to compare Y with X without leaking either into
4670 a register. */
4671 switch ((int) (HAVE_stack_protect_test != 0))
4672 {
4673 case 1:
4674 tmp = gen_stack_protect_test (x, y, label);
4675 if (tmp)
4676 {
4677 emit_insn (tmp);
4678 break;
4679 }
4680 /* FALLTHRU */
4681
4682 default:
4683 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4684 break;
4685 }
4686
4687 /* The noreturn predictor has been moved to the tree level. The rtl-level
4688 predictors estimate this branch about 20%, which isn't enough to get
4689 things moved out of line. Since this is the only extant case of adding
4690 a noreturn function at the rtl level, it doesn't seem worth doing ought
4691 except adding the prediction by hand. */
4692 tmp = get_last_insn ();
4693 if (JUMP_P (tmp))
4694 predict_insn_def (as_a <rtx_insn *> (tmp), PRED_NORETURN, TAKEN);
4695
4696 expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
4697 free_temp_slots ();
4698 emit_label (label);
4699 }
4700 \f
4701 /* Start the RTL for a new function, and set variables used for
4702 emitting RTL.
4703 SUBR is the FUNCTION_DECL node.
4704 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4705 the function's parameters, which must be run at any return statement. */
4706
4707 void
4708 expand_function_start (tree subr)
4709 {
4710 /* Make sure volatile mem refs aren't considered
4711 valid operands of arithmetic insns. */
4712 init_recog_no_volatile ();
4713
4714 crtl->profile
4715 = (profile_flag
4716 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4717
4718 crtl->limit_stack
4719 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4720
4721 /* Make the label for return statements to jump to. Do not special
4722 case machines with special return instructions -- they will be
4723 handled later during jump, ifcvt, or epilogue creation. */
4724 return_label = gen_label_rtx ();
4725
4726 /* Initialize rtx used to return the value. */
4727 /* Do this before assign_parms so that we copy the struct value address
4728 before any library calls that assign parms might generate. */
4729
4730 /* Decide whether to return the value in memory or in a register. */
4731 if (aggregate_value_p (DECL_RESULT (subr), subr))
4732 {
4733 /* Returning something that won't go in a register. */
4734 rtx value_address = 0;
4735
4736 #ifdef PCC_STATIC_STRUCT_RETURN
4737 if (cfun->returns_pcc_struct)
4738 {
4739 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4740 value_address = assemble_static_space (size);
4741 }
4742 else
4743 #endif
4744 {
4745 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4746 /* Expect to be passed the address of a place to store the value.
4747 If it is passed as an argument, assign_parms will take care of
4748 it. */
4749 if (sv)
4750 {
4751 value_address = gen_reg_rtx (Pmode);
4752 emit_move_insn (value_address, sv);
4753 }
4754 }
4755 if (value_address)
4756 {
4757 rtx x = value_address;
4758 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4759 {
4760 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4761 set_mem_attributes (x, DECL_RESULT (subr), 1);
4762 }
4763 SET_DECL_RTL (DECL_RESULT (subr), x);
4764 }
4765 }
4766 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4767 /* If return mode is void, this decl rtl should not be used. */
4768 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4769 else
4770 {
4771 /* Compute the return values into a pseudo reg, which we will copy
4772 into the true return register after the cleanups are done. */
4773 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4774 if (TYPE_MODE (return_type) != BLKmode
4775 && targetm.calls.return_in_msb (return_type))
4776 /* expand_function_end will insert the appropriate padding in
4777 this case. Use the return value's natural (unpadded) mode
4778 within the function proper. */
4779 SET_DECL_RTL (DECL_RESULT (subr),
4780 gen_reg_rtx (TYPE_MODE (return_type)));
4781 else
4782 {
4783 /* In order to figure out what mode to use for the pseudo, we
4784 figure out what the mode of the eventual return register will
4785 actually be, and use that. */
4786 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4787
4788 /* Structures that are returned in registers are not
4789 aggregate_value_p, so we may see a PARALLEL or a REG. */
4790 if (REG_P (hard_reg))
4791 SET_DECL_RTL (DECL_RESULT (subr),
4792 gen_reg_rtx (GET_MODE (hard_reg)));
4793 else
4794 {
4795 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4796 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4797 }
4798 }
4799
4800 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4801 result to the real return register(s). */
4802 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4803 }
4804
4805 /* Initialize rtx for parameters and local variables.
4806 In some cases this requires emitting insns. */
4807 assign_parms (subr);
4808
4809 /* If function gets a static chain arg, store it. */
4810 if (cfun->static_chain_decl)
4811 {
4812 tree parm = cfun->static_chain_decl;
4813 rtx local, chain, insn;
4814
4815 local = gen_reg_rtx (Pmode);
4816 chain = targetm.calls.static_chain (current_function_decl, true);
4817
4818 set_decl_incoming_rtl (parm, chain, false);
4819 SET_DECL_RTL (parm, local);
4820 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4821
4822 insn = emit_move_insn (local, chain);
4823
4824 /* Mark the register as eliminable, similar to parameters. */
4825 if (MEM_P (chain)
4826 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
4827 set_dst_reg_note (insn, REG_EQUIV, chain, local);
4828
4829 /* If we aren't optimizing, save the static chain onto the stack. */
4830 if (!optimize)
4831 {
4832 tree saved_static_chain_decl
4833 = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
4834 DECL_NAME (parm), TREE_TYPE (parm));
4835 rtx saved_static_chain_rtx
4836 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
4837 SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
4838 emit_move_insn (saved_static_chain_rtx, chain);
4839 SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
4840 DECL_HAS_VALUE_EXPR_P (parm) = 1;
4841 }
4842 }
4843
4844 /* If the function receives a non-local goto, then store the
4845 bits we need to restore the frame pointer. */
4846 if (cfun->nonlocal_goto_save_area)
4847 {
4848 tree t_save;
4849 rtx r_save;
4850
4851 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
4852 gcc_assert (DECL_RTL_SET_P (var));
4853
4854 t_save = build4 (ARRAY_REF,
4855 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
4856 cfun->nonlocal_goto_save_area,
4857 integer_zero_node, NULL_TREE, NULL_TREE);
4858 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4859 gcc_assert (GET_MODE (r_save) == Pmode);
4860
4861 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
4862 update_nonlocal_goto_save_area ();
4863 }
4864
4865 /* The following was moved from init_function_start.
4866 The move is supposed to make sdb output more accurate. */
4867 /* Indicate the beginning of the function body,
4868 as opposed to parm setup. */
4869 emit_note (NOTE_INSN_FUNCTION_BEG);
4870
4871 gcc_assert (NOTE_P (get_last_insn ()));
4872
4873 parm_birth_insn = get_last_insn ();
4874
4875 if (crtl->profile)
4876 {
4877 #ifdef PROFILE_HOOK
4878 PROFILE_HOOK (current_function_funcdef_no);
4879 #endif
4880 }
4881
4882 /* If we are doing generic stack checking, the probe should go here. */
4883 if (flag_stack_check == GENERIC_STACK_CHECK)
4884 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4885 }
4886 \f
4887 /* Undo the effects of init_dummy_function_start. */
4888 void
4889 expand_dummy_function_end (void)
4890 {
4891 gcc_assert (in_dummy_function);
4892
4893 /* End any sequences that failed to be closed due to syntax errors. */
4894 while (in_sequence_p ())
4895 end_sequence ();
4896
4897 /* Outside function body, can't compute type's actual size
4898 until next function's body starts. */
4899
4900 free_after_parsing (cfun);
4901 free_after_compilation (cfun);
4902 pop_cfun ();
4903 in_dummy_function = false;
4904 }
4905
4906 /* Call DOIT for each hard register used as a return value from
4907 the current function. */
4908
4909 void
4910 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4911 {
4912 rtx outgoing = crtl->return_rtx;
4913
4914 if (! outgoing)
4915 return;
4916
4917 if (REG_P (outgoing))
4918 (*doit) (outgoing, arg);
4919 else if (GET_CODE (outgoing) == PARALLEL)
4920 {
4921 int i;
4922
4923 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4924 {
4925 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4926
4927 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4928 (*doit) (x, arg);
4929 }
4930 }
4931 }
4932
4933 static void
4934 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4935 {
4936 emit_clobber (reg);
4937 }
4938
4939 void
4940 clobber_return_register (void)
4941 {
4942 diddle_return_value (do_clobber_return_reg, NULL);
4943
4944 /* In case we do use pseudo to return value, clobber it too. */
4945 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4946 {
4947 tree decl_result = DECL_RESULT (current_function_decl);
4948 rtx decl_rtl = DECL_RTL (decl_result);
4949 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4950 {
4951 do_clobber_return_reg (decl_rtl, NULL);
4952 }
4953 }
4954 }
4955
4956 static void
4957 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4958 {
4959 emit_use (reg);
4960 }
4961
4962 static void
4963 use_return_register (void)
4964 {
4965 diddle_return_value (do_use_return_reg, NULL);
4966 }
4967
4968 /* Possibly warn about unused parameters. */
4969 void
4970 do_warn_unused_parameter (tree fn)
4971 {
4972 tree decl;
4973
4974 for (decl = DECL_ARGUMENTS (fn);
4975 decl; decl = DECL_CHAIN (decl))
4976 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4977 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
4978 && !TREE_NO_WARNING (decl))
4979 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4980 }
4981
4982 /* Set the location of the insn chain starting at INSN to LOC. */
4983
4984 static void
4985 set_insn_locations (rtx insn, int loc)
4986 {
4987 while (insn != NULL_RTX)
4988 {
4989 if (INSN_P (insn))
4990 INSN_LOCATION (insn) = loc;
4991 insn = NEXT_INSN (insn);
4992 }
4993 }
4994
4995 /* Generate RTL for the end of the current function. */
4996
4997 void
4998 expand_function_end (void)
4999 {
5000 rtx clobber_after;
5001
5002 /* If arg_pointer_save_area was referenced only from a nested
5003 function, we will not have initialized it yet. Do that now. */
5004 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
5005 get_arg_pointer_save_area ();
5006
5007 /* If we are doing generic stack checking and this function makes calls,
5008 do a stack probe at the start of the function to ensure we have enough
5009 space for another stack frame. */
5010 if (flag_stack_check == GENERIC_STACK_CHECK)
5011 {
5012 rtx_insn *insn, *seq;
5013
5014 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5015 if (CALL_P (insn))
5016 {
5017 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
5018 start_sequence ();
5019 if (STACK_CHECK_MOVING_SP)
5020 anti_adjust_stack_and_probe (max_frame_size, true);
5021 else
5022 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
5023 seq = get_insns ();
5024 end_sequence ();
5025 set_insn_locations (seq, prologue_location);
5026 emit_insn_before (seq, stack_check_probe_note);
5027 break;
5028 }
5029 }
5030
5031 /* End any sequences that failed to be closed due to syntax errors. */
5032 while (in_sequence_p ())
5033 end_sequence ();
5034
5035 clear_pending_stack_adjust ();
5036 do_pending_stack_adjust ();
5037
5038 /* Output a linenumber for the end of the function.
5039 SDB depends on this. */
5040 set_curr_insn_location (input_location);
5041
5042 /* Before the return label (if any), clobber the return
5043 registers so that they are not propagated live to the rest of
5044 the function. This can only happen with functions that drop
5045 through; if there had been a return statement, there would
5046 have either been a return rtx, or a jump to the return label.
5047
5048 We delay actual code generation after the current_function_value_rtx
5049 is computed. */
5050 clobber_after = get_last_insn ();
5051
5052 /* Output the label for the actual return from the function. */
5053 emit_label (return_label);
5054
5055 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
5056 {
5057 /* Let except.c know where it should emit the call to unregister
5058 the function context for sjlj exceptions. */
5059 if (flag_exceptions)
5060 sjlj_emit_function_exit_after (get_last_insn ());
5061 }
5062 else
5063 {
5064 /* We want to ensure that instructions that may trap are not
5065 moved into the epilogue by scheduling, because we don't
5066 always emit unwind information for the epilogue. */
5067 if (cfun->can_throw_non_call_exceptions)
5068 emit_insn (gen_blockage ());
5069 }
5070
5071 /* If this is an implementation of throw, do what's necessary to
5072 communicate between __builtin_eh_return and the epilogue. */
5073 expand_eh_return ();
5074
5075 /* If scalar return value was computed in a pseudo-reg, or was a named
5076 return value that got dumped to the stack, copy that to the hard
5077 return register. */
5078 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5079 {
5080 tree decl_result = DECL_RESULT (current_function_decl);
5081 rtx decl_rtl = DECL_RTL (decl_result);
5082
5083 if (REG_P (decl_rtl)
5084 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5085 : DECL_REGISTER (decl_result))
5086 {
5087 rtx real_decl_rtl = crtl->return_rtx;
5088
5089 /* This should be set in assign_parms. */
5090 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
5091
5092 /* If this is a BLKmode structure being returned in registers,
5093 then use the mode computed in expand_return. Note that if
5094 decl_rtl is memory, then its mode may have been changed,
5095 but that crtl->return_rtx has not. */
5096 if (GET_MODE (real_decl_rtl) == BLKmode)
5097 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
5098
5099 /* If a non-BLKmode return value should be padded at the least
5100 significant end of the register, shift it left by the appropriate
5101 amount. BLKmode results are handled using the group load/store
5102 machinery. */
5103 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5104 && REG_P (real_decl_rtl)
5105 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5106 {
5107 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5108 REGNO (real_decl_rtl)),
5109 decl_rtl);
5110 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5111 }
5112 /* If a named return value dumped decl_return to memory, then
5113 we may need to re-do the PROMOTE_MODE signed/unsigned
5114 extension. */
5115 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5116 {
5117 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5118 promote_function_mode (TREE_TYPE (decl_result),
5119 GET_MODE (decl_rtl), &unsignedp,
5120 TREE_TYPE (current_function_decl), 1);
5121
5122 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5123 }
5124 else if (GET_CODE (real_decl_rtl) == PARALLEL)
5125 {
5126 /* If expand_function_start has created a PARALLEL for decl_rtl,
5127 move the result to the real return registers. Otherwise, do
5128 a group load from decl_rtl for a named return. */
5129 if (GET_CODE (decl_rtl) == PARALLEL)
5130 emit_group_move (real_decl_rtl, decl_rtl);
5131 else
5132 emit_group_load (real_decl_rtl, decl_rtl,
5133 TREE_TYPE (decl_result),
5134 int_size_in_bytes (TREE_TYPE (decl_result)));
5135 }
5136 /* In the case of complex integer modes smaller than a word, we'll
5137 need to generate some non-trivial bitfield insertions. Do that
5138 on a pseudo and not the hard register. */
5139 else if (GET_CODE (decl_rtl) == CONCAT
5140 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
5141 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
5142 {
5143 int old_generating_concat_p;
5144 rtx tmp;
5145
5146 old_generating_concat_p = generating_concat_p;
5147 generating_concat_p = 0;
5148 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5149 generating_concat_p = old_generating_concat_p;
5150
5151 emit_move_insn (tmp, decl_rtl);
5152 emit_move_insn (real_decl_rtl, tmp);
5153 }
5154 else
5155 emit_move_insn (real_decl_rtl, decl_rtl);
5156 }
5157 }
5158
5159 /* If returning a structure, arrange to return the address of the value
5160 in a place where debuggers expect to find it.
5161
5162 If returning a structure PCC style,
5163 the caller also depends on this value.
5164 And cfun->returns_pcc_struct is not necessarily set. */
5165 if (cfun->returns_struct
5166 || cfun->returns_pcc_struct)
5167 {
5168 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5169 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5170 rtx outgoing;
5171
5172 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5173 type = TREE_TYPE (type);
5174 else
5175 value_address = XEXP (value_address, 0);
5176
5177 outgoing = targetm.calls.function_value (build_pointer_type (type),
5178 current_function_decl, true);
5179
5180 /* Mark this as a function return value so integrate will delete the
5181 assignment and USE below when inlining this function. */
5182 REG_FUNCTION_VALUE_P (outgoing) = 1;
5183
5184 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5185 value_address = convert_memory_address (GET_MODE (outgoing),
5186 value_address);
5187
5188 emit_move_insn (outgoing, value_address);
5189
5190 /* Show return register used to hold result (in this case the address
5191 of the result. */
5192 crtl->return_rtx = outgoing;
5193 }
5194
5195 /* Emit the actual code to clobber return register. Don't emit
5196 it if clobber_after is a barrier, then the previous basic block
5197 certainly doesn't fall thru into the exit block. */
5198 if (!BARRIER_P (clobber_after))
5199 {
5200 rtx seq;
5201
5202 start_sequence ();
5203 clobber_return_register ();
5204 seq = get_insns ();
5205 end_sequence ();
5206
5207 emit_insn_after (seq, clobber_after);
5208 }
5209
5210 /* Output the label for the naked return from the function. */
5211 if (naked_return_label)
5212 emit_label (naked_return_label);
5213
5214 /* @@@ This is a kludge. We want to ensure that instructions that
5215 may trap are not moved into the epilogue by scheduling, because
5216 we don't always emit unwind information for the epilogue. */
5217 if (cfun->can_throw_non_call_exceptions
5218 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
5219 emit_insn (gen_blockage ());
5220
5221 /* If stack protection is enabled for this function, check the guard. */
5222 if (crtl->stack_protect_guard)
5223 stack_protect_epilogue ();
5224
5225 /* If we had calls to alloca, and this machine needs
5226 an accurate stack pointer to exit the function,
5227 insert some code to save and restore the stack pointer. */
5228 if (! EXIT_IGNORE_STACK
5229 && cfun->calls_alloca)
5230 {
5231 rtx tem = 0, seq;
5232
5233 start_sequence ();
5234 emit_stack_save (SAVE_FUNCTION, &tem);
5235 seq = get_insns ();
5236 end_sequence ();
5237 emit_insn_before (seq, parm_birth_insn);
5238
5239 emit_stack_restore (SAVE_FUNCTION, tem);
5240 }
5241
5242 /* ??? This should no longer be necessary since stupid is no longer with
5243 us, but there are some parts of the compiler (eg reload_combine, and
5244 sh mach_dep_reorg) that still try and compute their own lifetime info
5245 instead of using the general framework. */
5246 use_return_register ();
5247 }
5248
5249 rtx
5250 get_arg_pointer_save_area (void)
5251 {
5252 rtx ret = arg_pointer_save_area;
5253
5254 if (! ret)
5255 {
5256 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5257 arg_pointer_save_area = ret;
5258 }
5259
5260 if (! crtl->arg_pointer_save_area_init)
5261 {
5262 rtx seq;
5263
5264 /* Save the arg pointer at the beginning of the function. The
5265 generated stack slot may not be a valid memory address, so we
5266 have to check it and fix it if necessary. */
5267 start_sequence ();
5268 emit_move_insn (validize_mem (copy_rtx (ret)),
5269 crtl->args.internal_arg_pointer);
5270 seq = get_insns ();
5271 end_sequence ();
5272
5273 push_topmost_sequence ();
5274 emit_insn_after (seq, entry_of_function ());
5275 pop_topmost_sequence ();
5276
5277 crtl->arg_pointer_save_area_init = true;
5278 }
5279
5280 return ret;
5281 }
5282 \f
5283 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5284 for the first time. */
5285
5286 static void
5287 record_insns (rtx insns, rtx end, htab_t *hashp)
5288 {
5289 rtx tmp;
5290 htab_t hash = *hashp;
5291
5292 if (hash == NULL)
5293 *hashp = hash
5294 = htab_create_ggc (17, htab_hash_pointer, htab_eq_pointer, NULL);
5295
5296 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5297 {
5298 void **slot = htab_find_slot (hash, tmp, INSERT);
5299 gcc_assert (*slot == NULL);
5300 *slot = tmp;
5301 }
5302 }
5303
5304 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5305 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5306 insn, then record COPY as well. */
5307
5308 void
5309 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5310 {
5311 htab_t hash;
5312 void **slot;
5313
5314 hash = epilogue_insn_hash;
5315 if (!hash || !htab_find (hash, insn))
5316 {
5317 hash = prologue_insn_hash;
5318 if (!hash || !htab_find (hash, insn))
5319 return;
5320 }
5321
5322 slot = htab_find_slot (hash, copy, INSERT);
5323 gcc_assert (*slot == NULL);
5324 *slot = copy;
5325 }
5326
5327 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5328 we can be running after reorg, SEQUENCE rtl is possible. */
5329
5330 static bool
5331 contains (const_rtx insn, htab_t hash)
5332 {
5333 if (hash == NULL)
5334 return false;
5335
5336 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5337 {
5338 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
5339 int i;
5340 for (i = seq->len () - 1; i >= 0; i--)
5341 if (htab_find (hash, seq->element (i)))
5342 return true;
5343 return false;
5344 }
5345
5346 return htab_find (hash, insn) != NULL;
5347 }
5348
5349 int
5350 prologue_epilogue_contains (const_rtx insn)
5351 {
5352 if (contains (insn, prologue_insn_hash))
5353 return 1;
5354 if (contains (insn, epilogue_insn_hash))
5355 return 1;
5356 return 0;
5357 }
5358
5359 #ifdef HAVE_return
5360 /* Insert use of return register before the end of BB. */
5361
5362 static void
5363 emit_use_return_register_into_block (basic_block bb)
5364 {
5365 rtx seq, insn;
5366 start_sequence ();
5367 use_return_register ();
5368 seq = get_insns ();
5369 end_sequence ();
5370 insn = BB_END (bb);
5371 #ifdef HAVE_cc0
5372 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
5373 insn = prev_cc0_setter (insn);
5374 #endif
5375 emit_insn_before (seq, insn);
5376 }
5377
5378
5379 /* Create a return pattern, either simple_return or return, depending on
5380 simple_p. */
5381
5382 static rtx
5383 gen_return_pattern (bool simple_p)
5384 {
5385 #ifdef HAVE_simple_return
5386 return simple_p ? gen_simple_return () : gen_return ();
5387 #else
5388 gcc_assert (!simple_p);
5389 return gen_return ();
5390 #endif
5391 }
5392
5393 /* Insert an appropriate return pattern at the end of block BB. This
5394 also means updating block_for_insn appropriately. SIMPLE_P is
5395 the same as in gen_return_pattern and passed to it. */
5396
5397 void
5398 emit_return_into_block (bool simple_p, basic_block bb)
5399 {
5400 rtx jump, pat;
5401 jump = emit_jump_insn_after (gen_return_pattern (simple_p), BB_END (bb));
5402 pat = PATTERN (jump);
5403 if (GET_CODE (pat) == PARALLEL)
5404 pat = XVECEXP (pat, 0, 0);
5405 gcc_assert (ANY_RETURN_P (pat));
5406 JUMP_LABEL (jump) = pat;
5407 }
5408 #endif
5409
5410 /* Set JUMP_LABEL for a return insn. */
5411
5412 void
5413 set_return_jump_label (rtx returnjump)
5414 {
5415 rtx pat = PATTERN (returnjump);
5416 if (GET_CODE (pat) == PARALLEL)
5417 pat = XVECEXP (pat, 0, 0);
5418 if (ANY_RETURN_P (pat))
5419 JUMP_LABEL (returnjump) = pat;
5420 else
5421 JUMP_LABEL (returnjump) = ret_rtx;
5422 }
5423
5424 #if defined (HAVE_return) || defined (HAVE_simple_return)
5425 /* Return true if there are any active insns between HEAD and TAIL. */
5426 bool
5427 active_insn_between (rtx head, rtx tail)
5428 {
5429 while (tail)
5430 {
5431 if (active_insn_p (tail))
5432 return true;
5433 if (tail == head)
5434 return false;
5435 tail = PREV_INSN (tail);
5436 }
5437 return false;
5438 }
5439
5440 /* LAST_BB is a block that exits, and empty of active instructions.
5441 Examine its predecessors for jumps that can be converted to
5442 (conditional) returns. */
5443 vec<edge>
5444 convert_jumps_to_returns (basic_block last_bb, bool simple_p,
5445 vec<edge> unconverted ATTRIBUTE_UNUSED)
5446 {
5447 int i;
5448 basic_block bb;
5449 rtx label;
5450 edge_iterator ei;
5451 edge e;
5452 auto_vec<basic_block> src_bbs (EDGE_COUNT (last_bb->preds));
5453
5454 FOR_EACH_EDGE (e, ei, last_bb->preds)
5455 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5456 src_bbs.quick_push (e->src);
5457
5458 label = BB_HEAD (last_bb);
5459
5460 FOR_EACH_VEC_ELT (src_bbs, i, bb)
5461 {
5462 rtx jump = BB_END (bb);
5463
5464 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5465 continue;
5466
5467 e = find_edge (bb, last_bb);
5468
5469 /* If we have an unconditional jump, we can replace that
5470 with a simple return instruction. */
5471 if (simplejump_p (jump))
5472 {
5473 /* The use of the return register might be present in the exit
5474 fallthru block. Either:
5475 - removing the use is safe, and we should remove the use in
5476 the exit fallthru block, or
5477 - removing the use is not safe, and we should add it here.
5478 For now, we conservatively choose the latter. Either of the
5479 2 helps in crossjumping. */
5480 emit_use_return_register_into_block (bb);
5481
5482 emit_return_into_block (simple_p, bb);
5483 delete_insn (jump);
5484 }
5485
5486 /* If we have a conditional jump branching to the last
5487 block, we can try to replace that with a conditional
5488 return instruction. */
5489 else if (condjump_p (jump))
5490 {
5491 rtx dest;
5492
5493 if (simple_p)
5494 dest = simple_return_rtx;
5495 else
5496 dest = ret_rtx;
5497 if (!redirect_jump (jump, dest, 0))
5498 {
5499 #ifdef HAVE_simple_return
5500 if (simple_p)
5501 {
5502 if (dump_file)
5503 fprintf (dump_file,
5504 "Failed to redirect bb %d branch.\n", bb->index);
5505 unconverted.safe_push (e);
5506 }
5507 #endif
5508 continue;
5509 }
5510
5511 /* See comment in simplejump_p case above. */
5512 emit_use_return_register_into_block (bb);
5513
5514 /* If this block has only one successor, it both jumps
5515 and falls through to the fallthru block, so we can't
5516 delete the edge. */
5517 if (single_succ_p (bb))
5518 continue;
5519 }
5520 else
5521 {
5522 #ifdef HAVE_simple_return
5523 if (simple_p)
5524 {
5525 if (dump_file)
5526 fprintf (dump_file,
5527 "Failed to redirect bb %d branch.\n", bb->index);
5528 unconverted.safe_push (e);
5529 }
5530 #endif
5531 continue;
5532 }
5533
5534 /* Fix up the CFG for the successful change we just made. */
5535 redirect_edge_succ (e, EXIT_BLOCK_PTR_FOR_FN (cfun));
5536 e->flags &= ~EDGE_CROSSING;
5537 }
5538 src_bbs.release ();
5539 return unconverted;
5540 }
5541
5542 /* Emit a return insn for the exit fallthru block. */
5543 basic_block
5544 emit_return_for_exit (edge exit_fallthru_edge, bool simple_p)
5545 {
5546 basic_block last_bb = exit_fallthru_edge->src;
5547
5548 if (JUMP_P (BB_END (last_bb)))
5549 {
5550 last_bb = split_edge (exit_fallthru_edge);
5551 exit_fallthru_edge = single_succ_edge (last_bb);
5552 }
5553 emit_barrier_after (BB_END (last_bb));
5554 emit_return_into_block (simple_p, last_bb);
5555 exit_fallthru_edge->flags &= ~EDGE_FALLTHRU;
5556 return last_bb;
5557 }
5558 #endif
5559
5560
5561 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5562 this into place with notes indicating where the prologue ends and where
5563 the epilogue begins. Update the basic block information when possible.
5564
5565 Notes on epilogue placement:
5566 There are several kinds of edges to the exit block:
5567 * a single fallthru edge from LAST_BB
5568 * possibly, edges from blocks containing sibcalls
5569 * possibly, fake edges from infinite loops
5570
5571 The epilogue is always emitted on the fallthru edge from the last basic
5572 block in the function, LAST_BB, into the exit block.
5573
5574 If LAST_BB is empty except for a label, it is the target of every
5575 other basic block in the function that ends in a return. If a
5576 target has a return or simple_return pattern (possibly with
5577 conditional variants), these basic blocks can be changed so that a
5578 return insn is emitted into them, and their target is adjusted to
5579 the real exit block.
5580
5581 Notes on shrink wrapping: We implement a fairly conservative
5582 version of shrink-wrapping rather than the textbook one. We only
5583 generate a single prologue and a single epilogue. This is
5584 sufficient to catch a number of interesting cases involving early
5585 exits.
5586
5587 First, we identify the blocks that require the prologue to occur before
5588 them. These are the ones that modify a call-saved register, or reference
5589 any of the stack or frame pointer registers. To simplify things, we then
5590 mark everything reachable from these blocks as also requiring a prologue.
5591 This takes care of loops automatically, and avoids the need to examine
5592 whether MEMs reference the frame, since it is sufficient to check for
5593 occurrences of the stack or frame pointer.
5594
5595 We then compute the set of blocks for which the need for a prologue
5596 is anticipatable (borrowing terminology from the shrink-wrapping
5597 description in Muchnick's book). These are the blocks which either
5598 require a prologue themselves, or those that have only successors
5599 where the prologue is anticipatable. The prologue needs to be
5600 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5601 is not. For the moment, we ensure that only one such edge exists.
5602
5603 The epilogue is placed as described above, but we make a
5604 distinction between inserting return and simple_return patterns
5605 when modifying other blocks that end in a return. Blocks that end
5606 in a sibcall omit the sibcall_epilogue if the block is not in
5607 ANTIC. */
5608
5609 static void
5610 thread_prologue_and_epilogue_insns (void)
5611 {
5612 bool inserted;
5613 #ifdef HAVE_simple_return
5614 vec<edge> unconverted_simple_returns = vNULL;
5615 bitmap_head bb_flags;
5616 #endif
5617 rtx_insn *returnjump;
5618 rtx seq ATTRIBUTE_UNUSED;
5619 rtx_insn *epilogue_end ATTRIBUTE_UNUSED;
5620 rtx prologue_seq ATTRIBUTE_UNUSED, split_prologue_seq ATTRIBUTE_UNUSED;
5621 edge e, entry_edge, orig_entry_edge, exit_fallthru_edge;
5622 edge_iterator ei;
5623
5624 df_analyze ();
5625
5626 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5627
5628 inserted = false;
5629 seq = NULL_RTX;
5630 epilogue_end = NULL;
5631 returnjump = NULL;
5632
5633 /* Can't deal with multiple successors of the entry block at the
5634 moment. Function should always have at least one entry
5635 point. */
5636 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5637 entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5638 orig_entry_edge = entry_edge;
5639
5640 split_prologue_seq = NULL_RTX;
5641 if (flag_split_stack
5642 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
5643 == NULL))
5644 {
5645 #ifndef HAVE_split_stack_prologue
5646 gcc_unreachable ();
5647 #else
5648 gcc_assert (HAVE_split_stack_prologue);
5649
5650 start_sequence ();
5651 emit_insn (gen_split_stack_prologue ());
5652 split_prologue_seq = get_insns ();
5653 end_sequence ();
5654
5655 record_insns (split_prologue_seq, NULL, &prologue_insn_hash);
5656 set_insn_locations (split_prologue_seq, prologue_location);
5657 #endif
5658 }
5659
5660 prologue_seq = NULL_RTX;
5661 #ifdef HAVE_prologue
5662 if (HAVE_prologue)
5663 {
5664 start_sequence ();
5665 seq = gen_prologue ();
5666 emit_insn (seq);
5667
5668 /* Insert an explicit USE for the frame pointer
5669 if the profiling is on and the frame pointer is required. */
5670 if (crtl->profile && frame_pointer_needed)
5671 emit_use (hard_frame_pointer_rtx);
5672
5673 /* Retain a map of the prologue insns. */
5674 record_insns (seq, NULL, &prologue_insn_hash);
5675 emit_note (NOTE_INSN_PROLOGUE_END);
5676
5677 /* Ensure that instructions are not moved into the prologue when
5678 profiling is on. The call to the profiling routine can be
5679 emitted within the live range of a call-clobbered register. */
5680 if (!targetm.profile_before_prologue () && crtl->profile)
5681 emit_insn (gen_blockage ());
5682
5683 prologue_seq = get_insns ();
5684 end_sequence ();
5685 set_insn_locations (prologue_seq, prologue_location);
5686 }
5687 #endif
5688
5689 #ifdef HAVE_simple_return
5690 bitmap_initialize (&bb_flags, &bitmap_default_obstack);
5691
5692 /* Try to perform a kind of shrink-wrapping, making sure the
5693 prologue/epilogue is emitted only around those parts of the
5694 function that require it. */
5695
5696 try_shrink_wrapping (&entry_edge, orig_entry_edge, &bb_flags, prologue_seq);
5697 #endif
5698
5699 if (split_prologue_seq != NULL_RTX)
5700 {
5701 insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
5702 inserted = true;
5703 }
5704 if (prologue_seq != NULL_RTX)
5705 {
5706 insert_insn_on_edge (prologue_seq, entry_edge);
5707 inserted = true;
5708 }
5709
5710 /* If the exit block has no non-fake predecessors, we don't need
5711 an epilogue. */
5712 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5713 if ((e->flags & EDGE_FAKE) == 0)
5714 break;
5715 if (e == NULL)
5716 goto epilogue_done;
5717
5718 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5719
5720 exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
5721
5722 #ifdef HAVE_simple_return
5723 if (entry_edge != orig_entry_edge)
5724 exit_fallthru_edge
5725 = get_unconverted_simple_return (exit_fallthru_edge, bb_flags,
5726 &unconverted_simple_returns,
5727 &returnjump);
5728 #endif
5729 #ifdef HAVE_return
5730 if (HAVE_return)
5731 {
5732 if (exit_fallthru_edge == NULL)
5733 goto epilogue_done;
5734
5735 if (optimize)
5736 {
5737 basic_block last_bb = exit_fallthru_edge->src;
5738
5739 if (LABEL_P (BB_HEAD (last_bb))
5740 && !active_insn_between (BB_HEAD (last_bb), BB_END (last_bb)))
5741 convert_jumps_to_returns (last_bb, false, vNULL);
5742
5743 if (EDGE_COUNT (last_bb->preds) != 0
5744 && single_succ_p (last_bb))
5745 {
5746 last_bb = emit_return_for_exit (exit_fallthru_edge, false);
5747 epilogue_end = returnjump = BB_END (last_bb);
5748 #ifdef HAVE_simple_return
5749 /* Emitting the return may add a basic block.
5750 Fix bb_flags for the added block. */
5751 if (last_bb != exit_fallthru_edge->src)
5752 bitmap_set_bit (&bb_flags, last_bb->index);
5753 #endif
5754 goto epilogue_done;
5755 }
5756 }
5757 }
5758 #endif
5759
5760 /* A small fib -- epilogue is not yet completed, but we wish to re-use
5761 this marker for the splits of EH_RETURN patterns, and nothing else
5762 uses the flag in the meantime. */
5763 epilogue_completed = 1;
5764
5765 #ifdef HAVE_eh_return
5766 /* Find non-fallthru edges that end with EH_RETURN instructions. On
5767 some targets, these get split to a special version of the epilogue
5768 code. In order to be able to properly annotate these with unwind
5769 info, try to split them now. If we get a valid split, drop an
5770 EPILOGUE_BEG note and mark the insns as epilogue insns. */
5771 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5772 {
5773 rtx_insn *prev, *last, *trial;
5774
5775 if (e->flags & EDGE_FALLTHRU)
5776 continue;
5777 last = BB_END (e->src);
5778 if (!eh_returnjump_p (last))
5779 continue;
5780
5781 prev = PREV_INSN (last);
5782 trial = try_split (PATTERN (last), last, 1);
5783 if (trial == last)
5784 continue;
5785
5786 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
5787 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
5788 }
5789 #endif
5790
5791 /* If nothing falls through into the exit block, we don't need an
5792 epilogue. */
5793
5794 if (exit_fallthru_edge == NULL)
5795 goto epilogue_done;
5796
5797 #ifdef HAVE_epilogue
5798 if (HAVE_epilogue)
5799 {
5800 start_sequence ();
5801 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5802 seq = gen_epilogue ();
5803 if (seq)
5804 emit_jump_insn (seq);
5805
5806 /* Retain a map of the epilogue insns. */
5807 record_insns (seq, NULL, &epilogue_insn_hash);
5808 set_insn_locations (seq, epilogue_location);
5809
5810 seq = get_insns ();
5811 returnjump = get_last_insn ();
5812 end_sequence ();
5813
5814 insert_insn_on_edge (seq, exit_fallthru_edge);
5815 inserted = true;
5816
5817 if (JUMP_P (returnjump))
5818 set_return_jump_label (returnjump);
5819 }
5820 else
5821 #endif
5822 {
5823 basic_block cur_bb;
5824
5825 if (! next_active_insn (BB_END (exit_fallthru_edge->src)))
5826 goto epilogue_done;
5827 /* We have a fall-through edge to the exit block, the source is not
5828 at the end of the function, and there will be an assembler epilogue
5829 at the end of the function.
5830 We can't use force_nonfallthru here, because that would try to
5831 use return. Inserting a jump 'by hand' is extremely messy, so
5832 we take advantage of cfg_layout_finalize using
5833 fixup_fallthru_exit_predecessor. */
5834 cfg_layout_initialize (0);
5835 FOR_EACH_BB_FN (cur_bb, cfun)
5836 if (cur_bb->index >= NUM_FIXED_BLOCKS
5837 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
5838 cur_bb->aux = cur_bb->next_bb;
5839 cfg_layout_finalize ();
5840 }
5841
5842 epilogue_done:
5843
5844 default_rtl_profile ();
5845
5846 if (inserted)
5847 {
5848 sbitmap blocks;
5849
5850 commit_edge_insertions ();
5851
5852 /* Look for basic blocks within the prologue insns. */
5853 blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
5854 bitmap_clear (blocks);
5855 bitmap_set_bit (blocks, entry_edge->dest->index);
5856 bitmap_set_bit (blocks, orig_entry_edge->dest->index);
5857 find_many_sub_basic_blocks (blocks);
5858 sbitmap_free (blocks);
5859
5860 /* The epilogue insns we inserted may cause the exit edge to no longer
5861 be fallthru. */
5862 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5863 {
5864 if (((e->flags & EDGE_FALLTHRU) != 0)
5865 && returnjump_p (BB_END (e->src)))
5866 e->flags &= ~EDGE_FALLTHRU;
5867 }
5868 }
5869
5870 #ifdef HAVE_simple_return
5871 convert_to_simple_return (entry_edge, orig_entry_edge, bb_flags, returnjump,
5872 unconverted_simple_returns);
5873 #endif
5874
5875 #ifdef HAVE_sibcall_epilogue
5876 /* Emit sibling epilogues before any sibling call sites. */
5877 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds); (e =
5878 ei_safe_edge (ei));
5879 )
5880 {
5881 basic_block bb = e->src;
5882 rtx_insn *insn = BB_END (bb);
5883 rtx ep_seq;
5884
5885 if (!CALL_P (insn)
5886 || ! SIBLING_CALL_P (insn)
5887 #ifdef HAVE_simple_return
5888 || (entry_edge != orig_entry_edge
5889 && !bitmap_bit_p (&bb_flags, bb->index))
5890 #endif
5891 )
5892 {
5893 ei_next (&ei);
5894 continue;
5895 }
5896
5897 ep_seq = gen_sibcall_epilogue ();
5898 if (ep_seq)
5899 {
5900 start_sequence ();
5901 emit_note (NOTE_INSN_EPILOGUE_BEG);
5902 emit_insn (ep_seq);
5903 seq = get_insns ();
5904 end_sequence ();
5905
5906 /* Retain a map of the epilogue insns. Used in life analysis to
5907 avoid getting rid of sibcall epilogue insns. Do this before we
5908 actually emit the sequence. */
5909 record_insns (seq, NULL, &epilogue_insn_hash);
5910 set_insn_locations (seq, epilogue_location);
5911
5912 emit_insn_before (seq, insn);
5913 }
5914 ei_next (&ei);
5915 }
5916 #endif
5917
5918 #ifdef HAVE_epilogue
5919 if (epilogue_end)
5920 {
5921 rtx_insn *insn, *next;
5922
5923 /* Similarly, move any line notes that appear after the epilogue.
5924 There is no need, however, to be quite so anal about the existence
5925 of such a note. Also possibly move
5926 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5927 info generation. */
5928 for (insn = epilogue_end; insn; insn = next)
5929 {
5930 next = NEXT_INSN (insn);
5931 if (NOTE_P (insn)
5932 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
5933 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5934 }
5935 }
5936 #endif
5937
5938 #ifdef HAVE_simple_return
5939 bitmap_clear (&bb_flags);
5940 #endif
5941
5942 /* Threading the prologue and epilogue changes the artificial refs
5943 in the entry and exit blocks. */
5944 epilogue_completed = 1;
5945 df_update_entry_exit_and_calls ();
5946 }
5947
5948 /* Reposition the prologue-end and epilogue-begin notes after
5949 instruction scheduling. */
5950
5951 void
5952 reposition_prologue_and_epilogue_notes (void)
5953 {
5954 #if defined (HAVE_prologue) || defined (HAVE_epilogue) \
5955 || defined (HAVE_sibcall_epilogue)
5956 /* Since the hash table is created on demand, the fact that it is
5957 non-null is a signal that it is non-empty. */
5958 if (prologue_insn_hash != NULL)
5959 {
5960 size_t len = htab_elements (prologue_insn_hash);
5961 rtx_insn *insn, *last = NULL, *note = NULL;
5962
5963 /* Scan from the beginning until we reach the last prologue insn. */
5964 /* ??? While we do have the CFG intact, there are two problems:
5965 (1) The prologue can contain loops (typically probing the stack),
5966 which means that the end of the prologue isn't in the first bb.
5967 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
5968 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5969 {
5970 if (NOTE_P (insn))
5971 {
5972 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
5973 note = insn;
5974 }
5975 else if (contains (insn, prologue_insn_hash))
5976 {
5977 last = insn;
5978 if (--len == 0)
5979 break;
5980 }
5981 }
5982
5983 if (last)
5984 {
5985 if (note == NULL)
5986 {
5987 /* Scan forward looking for the PROLOGUE_END note. It should
5988 be right at the beginning of the block, possibly with other
5989 insn notes that got moved there. */
5990 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
5991 {
5992 if (NOTE_P (note)
5993 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
5994 break;
5995 }
5996 }
5997
5998 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5999 if (LABEL_P (last))
6000 last = NEXT_INSN (last);
6001 reorder_insns (note, note, last);
6002 }
6003 }
6004
6005 if (epilogue_insn_hash != NULL)
6006 {
6007 edge_iterator ei;
6008 edge e;
6009
6010 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6011 {
6012 rtx_insn *insn, *first = NULL, *note = NULL;
6013 basic_block bb = e->src;
6014
6015 /* Scan from the beginning until we reach the first epilogue insn. */
6016 FOR_BB_INSNS (bb, insn)
6017 {
6018 if (NOTE_P (insn))
6019 {
6020 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6021 {
6022 note = insn;
6023 if (first != NULL)
6024 break;
6025 }
6026 }
6027 else if (first == NULL && contains (insn, epilogue_insn_hash))
6028 {
6029 first = insn;
6030 if (note != NULL)
6031 break;
6032 }
6033 }
6034
6035 if (note)
6036 {
6037 /* If the function has a single basic block, and no real
6038 epilogue insns (e.g. sibcall with no cleanup), the
6039 epilogue note can get scheduled before the prologue
6040 note. If we have frame related prologue insns, having
6041 them scanned during the epilogue will result in a crash.
6042 In this case re-order the epilogue note to just before
6043 the last insn in the block. */
6044 if (first == NULL)
6045 first = BB_END (bb);
6046
6047 if (PREV_INSN (first) != note)
6048 reorder_insns (note, note, PREV_INSN (first));
6049 }
6050 }
6051 }
6052 #endif /* HAVE_prologue or HAVE_epilogue */
6053 }
6054
6055 /* Returns the name of function declared by FNDECL. */
6056 const char *
6057 fndecl_name (tree fndecl)
6058 {
6059 if (fndecl == NULL)
6060 return "(nofn)";
6061 return lang_hooks.decl_printable_name (fndecl, 2);
6062 }
6063
6064 /* Returns the name of function FN. */
6065 const char *
6066 function_name (struct function *fn)
6067 {
6068 tree fndecl = (fn == NULL) ? NULL : fn->decl;
6069 return fndecl_name (fndecl);
6070 }
6071
6072 /* Returns the name of the current function. */
6073 const char *
6074 current_function_name (void)
6075 {
6076 return function_name (cfun);
6077 }
6078 \f
6079
6080 static unsigned int
6081 rest_of_handle_check_leaf_regs (void)
6082 {
6083 #ifdef LEAF_REGISTERS
6084 crtl->uses_only_leaf_regs
6085 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6086 #endif
6087 return 0;
6088 }
6089
6090 /* Insert a TYPE into the used types hash table of CFUN. */
6091
6092 static void
6093 used_types_insert_helper (tree type, struct function *func)
6094 {
6095 if (type != NULL && func != NULL)
6096 {
6097 void **slot;
6098
6099 if (func->used_types_hash == NULL)
6100 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
6101 htab_eq_pointer, NULL);
6102 slot = htab_find_slot (func->used_types_hash, type, INSERT);
6103 if (*slot == NULL)
6104 *slot = type;
6105 }
6106 }
6107
6108 /* Given a type, insert it into the used hash table in cfun. */
6109 void
6110 used_types_insert (tree t)
6111 {
6112 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
6113 if (TYPE_NAME (t))
6114 break;
6115 else
6116 t = TREE_TYPE (t);
6117 if (TREE_CODE (t) == ERROR_MARK)
6118 return;
6119 if (TYPE_NAME (t) == NULL_TREE
6120 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6121 t = TYPE_MAIN_VARIANT (t);
6122 if (debug_info_level > DINFO_LEVEL_NONE)
6123 {
6124 if (cfun)
6125 used_types_insert_helper (t, cfun);
6126 else
6127 {
6128 /* So this might be a type referenced by a global variable.
6129 Record that type so that we can later decide to emit its
6130 debug information. */
6131 vec_safe_push (types_used_by_cur_var_decl, t);
6132 }
6133 }
6134 }
6135
6136 /* Helper to Hash a struct types_used_by_vars_entry. */
6137
6138 static hashval_t
6139 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6140 {
6141 gcc_assert (entry && entry->var_decl && entry->type);
6142
6143 return iterative_hash_object (entry->type,
6144 iterative_hash_object (entry->var_decl, 0));
6145 }
6146
6147 /* Hash function of the types_used_by_vars_entry hash table. */
6148
6149 hashval_t
6150 types_used_by_vars_do_hash (const void *x)
6151 {
6152 const struct types_used_by_vars_entry *entry =
6153 (const struct types_used_by_vars_entry *) x;
6154
6155 return hash_types_used_by_vars_entry (entry);
6156 }
6157
6158 /*Equality function of the types_used_by_vars_entry hash table. */
6159
6160 int
6161 types_used_by_vars_eq (const void *x1, const void *x2)
6162 {
6163 const struct types_used_by_vars_entry *e1 =
6164 (const struct types_used_by_vars_entry *) x1;
6165 const struct types_used_by_vars_entry *e2 =
6166 (const struct types_used_by_vars_entry *)x2;
6167
6168 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6169 }
6170
6171 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6172
6173 void
6174 types_used_by_var_decl_insert (tree type, tree var_decl)
6175 {
6176 if (type != NULL && var_decl != NULL)
6177 {
6178 void **slot;
6179 struct types_used_by_vars_entry e;
6180 e.var_decl = var_decl;
6181 e.type = type;
6182 if (types_used_by_vars_hash == NULL)
6183 types_used_by_vars_hash =
6184 htab_create_ggc (37, types_used_by_vars_do_hash,
6185 types_used_by_vars_eq, NULL);
6186 slot = htab_find_slot_with_hash (types_used_by_vars_hash, &e,
6187 hash_types_used_by_vars_entry (&e), INSERT);
6188 if (*slot == NULL)
6189 {
6190 struct types_used_by_vars_entry *entry;
6191 entry = ggc_alloc<types_used_by_vars_entry> ();
6192 entry->type = type;
6193 entry->var_decl = var_decl;
6194 *slot = entry;
6195 }
6196 }
6197 }
6198
6199 namespace {
6200
6201 const pass_data pass_data_leaf_regs =
6202 {
6203 RTL_PASS, /* type */
6204 "*leaf_regs", /* name */
6205 OPTGROUP_NONE, /* optinfo_flags */
6206 TV_NONE, /* tv_id */
6207 0, /* properties_required */
6208 0, /* properties_provided */
6209 0, /* properties_destroyed */
6210 0, /* todo_flags_start */
6211 0, /* todo_flags_finish */
6212 };
6213
6214 class pass_leaf_regs : public rtl_opt_pass
6215 {
6216 public:
6217 pass_leaf_regs (gcc::context *ctxt)
6218 : rtl_opt_pass (pass_data_leaf_regs, ctxt)
6219 {}
6220
6221 /* opt_pass methods: */
6222 virtual unsigned int execute (function *)
6223 {
6224 return rest_of_handle_check_leaf_regs ();
6225 }
6226
6227 }; // class pass_leaf_regs
6228
6229 } // anon namespace
6230
6231 rtl_opt_pass *
6232 make_pass_leaf_regs (gcc::context *ctxt)
6233 {
6234 return new pass_leaf_regs (ctxt);
6235 }
6236
6237 static unsigned int
6238 rest_of_handle_thread_prologue_and_epilogue (void)
6239 {
6240 if (optimize)
6241 cleanup_cfg (CLEANUP_EXPENSIVE);
6242
6243 /* On some machines, the prologue and epilogue code, or parts thereof,
6244 can be represented as RTL. Doing so lets us schedule insns between
6245 it and the rest of the code and also allows delayed branch
6246 scheduling to operate in the epilogue. */
6247 thread_prologue_and_epilogue_insns ();
6248
6249 /* Shrink-wrapping can result in unreachable edges in the epilogue,
6250 see PR57320. */
6251 cleanup_cfg (0);
6252
6253 /* The stack usage info is finalized during prologue expansion. */
6254 if (flag_stack_usage_info)
6255 output_stack_usage ();
6256
6257 return 0;
6258 }
6259
6260 namespace {
6261
6262 const pass_data pass_data_thread_prologue_and_epilogue =
6263 {
6264 RTL_PASS, /* type */
6265 "pro_and_epilogue", /* name */
6266 OPTGROUP_NONE, /* optinfo_flags */
6267 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6268 0, /* properties_required */
6269 0, /* properties_provided */
6270 0, /* properties_destroyed */
6271 0, /* todo_flags_start */
6272 ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
6273 };
6274
6275 class pass_thread_prologue_and_epilogue : public rtl_opt_pass
6276 {
6277 public:
6278 pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6279 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
6280 {}
6281
6282 /* opt_pass methods: */
6283 virtual unsigned int execute (function *)
6284 {
6285 return rest_of_handle_thread_prologue_and_epilogue ();
6286 }
6287
6288 }; // class pass_thread_prologue_and_epilogue
6289
6290 } // anon namespace
6291
6292 rtl_opt_pass *
6293 make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6294 {
6295 return new pass_thread_prologue_and_epilogue (ctxt);
6296 }
6297 \f
6298
6299 /* This mini-pass fixes fall-out from SSA in asm statements that have
6300 in-out constraints. Say you start with
6301
6302 orig = inout;
6303 asm ("": "+mr" (inout));
6304 use (orig);
6305
6306 which is transformed very early to use explicit output and match operands:
6307
6308 orig = inout;
6309 asm ("": "=mr" (inout) : "0" (inout));
6310 use (orig);
6311
6312 Or, after SSA and copyprop,
6313
6314 asm ("": "=mr" (inout_2) : "0" (inout_1));
6315 use (inout_1);
6316
6317 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6318 they represent two separate values, so they will get different pseudo
6319 registers during expansion. Then, since the two operands need to match
6320 per the constraints, but use different pseudo registers, reload can
6321 only register a reload for these operands. But reloads can only be
6322 satisfied by hardregs, not by memory, so we need a register for this
6323 reload, just because we are presented with non-matching operands.
6324 So, even though we allow memory for this operand, no memory can be
6325 used for it, just because the two operands don't match. This can
6326 cause reload failures on register-starved targets.
6327
6328 So it's a symptom of reload not being able to use memory for reloads
6329 or, alternatively it's also a symptom of both operands not coming into
6330 reload as matching (in which case the pseudo could go to memory just
6331 fine, as the alternative allows it, and no reload would be necessary).
6332 We fix the latter problem here, by transforming
6333
6334 asm ("": "=mr" (inout_2) : "0" (inout_1));
6335
6336 back to
6337
6338 inout_2 = inout_1;
6339 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6340
6341 static void
6342 match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
6343 {
6344 int i;
6345 bool changed = false;
6346 rtx op = SET_SRC (p_sets[0]);
6347 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6348 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
6349 bool *output_matched = XALLOCAVEC (bool, noutputs);
6350
6351 memset (output_matched, 0, noutputs * sizeof (bool));
6352 for (i = 0; i < ninputs; i++)
6353 {
6354 rtx input, output;
6355 rtx_insn *insns;
6356 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6357 char *end;
6358 int match, j;
6359
6360 if (*constraint == '%')
6361 constraint++;
6362
6363 match = strtoul (constraint, &end, 10);
6364 if (end == constraint)
6365 continue;
6366
6367 gcc_assert (match < noutputs);
6368 output = SET_DEST (p_sets[match]);
6369 input = RTVEC_ELT (inputs, i);
6370 /* Only do the transformation for pseudos. */
6371 if (! REG_P (output)
6372 || rtx_equal_p (output, input)
6373 || (GET_MODE (input) != VOIDmode
6374 && GET_MODE (input) != GET_MODE (output)))
6375 continue;
6376
6377 /* We can't do anything if the output is also used as input,
6378 as we're going to overwrite it. */
6379 for (j = 0; j < ninputs; j++)
6380 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6381 break;
6382 if (j != ninputs)
6383 continue;
6384
6385 /* Avoid changing the same input several times. For
6386 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6387 only change in once (to out1), rather than changing it
6388 first to out1 and afterwards to out2. */
6389 if (i > 0)
6390 {
6391 for (j = 0; j < noutputs; j++)
6392 if (output_matched[j] && input == SET_DEST (p_sets[j]))
6393 break;
6394 if (j != noutputs)
6395 continue;
6396 }
6397 output_matched[match] = true;
6398
6399 start_sequence ();
6400 emit_move_insn (output, input);
6401 insns = get_insns ();
6402 end_sequence ();
6403 emit_insn_before (insns, insn);
6404
6405 /* Now replace all mentions of the input with output. We can't
6406 just replace the occurrence in inputs[i], as the register might
6407 also be used in some other input (or even in an address of an
6408 output), which would mean possibly increasing the number of
6409 inputs by one (namely 'output' in addition), which might pose
6410 a too complicated problem for reload to solve. E.g. this situation:
6411
6412 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6413
6414 Here 'input' is used in two occurrences as input (once for the
6415 input operand, once for the address in the second output operand).
6416 If we would replace only the occurrence of the input operand (to
6417 make the matching) we would be left with this:
6418
6419 output = input
6420 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6421
6422 Now we suddenly have two different input values (containing the same
6423 value, but different pseudos) where we formerly had only one.
6424 With more complicated asms this might lead to reload failures
6425 which wouldn't have happen without this pass. So, iterate over
6426 all operands and replace all occurrences of the register used. */
6427 for (j = 0; j < noutputs; j++)
6428 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
6429 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6430 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6431 input, output);
6432 for (j = 0; j < ninputs; j++)
6433 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6434 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6435 input, output);
6436
6437 changed = true;
6438 }
6439
6440 if (changed)
6441 df_insn_rescan (insn);
6442 }
6443
6444 namespace {
6445
6446 const pass_data pass_data_match_asm_constraints =
6447 {
6448 RTL_PASS, /* type */
6449 "asmcons", /* name */
6450 OPTGROUP_NONE, /* optinfo_flags */
6451 TV_NONE, /* tv_id */
6452 0, /* properties_required */
6453 0, /* properties_provided */
6454 0, /* properties_destroyed */
6455 0, /* todo_flags_start */
6456 0, /* todo_flags_finish */
6457 };
6458
6459 class pass_match_asm_constraints : public rtl_opt_pass
6460 {
6461 public:
6462 pass_match_asm_constraints (gcc::context *ctxt)
6463 : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
6464 {}
6465
6466 /* opt_pass methods: */
6467 virtual unsigned int execute (function *);
6468
6469 }; // class pass_match_asm_constraints
6470
6471 unsigned
6472 pass_match_asm_constraints::execute (function *fun)
6473 {
6474 basic_block bb;
6475 rtx_insn *insn;
6476 rtx pat, *p_sets;
6477 int noutputs;
6478
6479 if (!crtl->has_asm_statement)
6480 return 0;
6481
6482 df_set_flags (DF_DEFER_INSN_RESCAN);
6483 FOR_EACH_BB_FN (bb, fun)
6484 {
6485 FOR_BB_INSNS (bb, insn)
6486 {
6487 if (!INSN_P (insn))
6488 continue;
6489
6490 pat = PATTERN (insn);
6491 if (GET_CODE (pat) == PARALLEL)
6492 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6493 else if (GET_CODE (pat) == SET)
6494 p_sets = &PATTERN (insn), noutputs = 1;
6495 else
6496 continue;
6497
6498 if (GET_CODE (*p_sets) == SET
6499 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6500 match_asm_constraints_1 (insn, p_sets, noutputs);
6501 }
6502 }
6503
6504 return TODO_df_finish;
6505 }
6506
6507 } // anon namespace
6508
6509 rtl_opt_pass *
6510 make_pass_match_asm_constraints (gcc::context *ctxt)
6511 {
6512 return new pass_match_asm_constraints (ctxt);
6513 }
6514
6515
6516 #include "gt-function.h"