options: Save and restore opts_set for Optimization and Target options
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2020 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
24
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
28
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
32 not get a hard register. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "gimple-expr.h"
42 #include "cfghooks.h"
43 #include "df.h"
44 #include "memmodel.h"
45 #include "tm_p.h"
46 #include "stringpool.h"
47 #include "expmed.h"
48 #include "optabs.h"
49 #include "regs.h"
50 #include "emit-rtl.h"
51 #include "recog.h"
52 #include "rtl-error.h"
53 #include "alias.h"
54 #include "fold-const.h"
55 #include "stor-layout.h"
56 #include "varasm.h"
57 #include "except.h"
58 #include "dojump.h"
59 #include "explow.h"
60 #include "calls.h"
61 #include "expr.h"
62 #include "optabs-tree.h"
63 #include "output.h"
64 #include "langhooks.h"
65 #include "common/common-target.h"
66 #include "gimplify.h"
67 #include "tree-pass.h"
68 #include "cfgrtl.h"
69 #include "cfganal.h"
70 #include "cfgbuild.h"
71 #include "cfgcleanup.h"
72 #include "cfgexpand.h"
73 #include "shrink-wrap.h"
74 #include "toplev.h"
75 #include "rtl-iter.h"
76 #include "tree-dfa.h"
77 #include "tree-ssa.h"
78 #include "stringpool.h"
79 #include "attribs.h"
80 #include "gimple.h"
81 #include "options.h"
82 #include "function-abi.h"
83
84 /* So we can assign to cfun in this file. */
85 #undef cfun
86
87 #ifndef STACK_ALIGNMENT_NEEDED
88 #define STACK_ALIGNMENT_NEEDED 1
89 #endif
90
91 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
92
93 /* Round a value to the lowest integer less than it that is a multiple of
94 the required alignment. Avoid using division in case the value is
95 negative. Assume the alignment is a power of two. */
96 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
97
98 /* Similar, but round to the next highest integer that meets the
99 alignment. */
100 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
101
102 /* Nonzero once virtual register instantiation has been done.
103 assign_stack_local uses frame_pointer_rtx when this is nonzero.
104 calls.c:emit_library_call_value_1 uses it to set up
105 post-instantiation libcalls. */
106 int virtuals_instantiated;
107
108 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
109 static GTY(()) int funcdef_no;
110
111 /* These variables hold pointers to functions to create and destroy
112 target specific, per-function data structures. */
113 struct machine_function * (*init_machine_status) (void);
114
115 /* The currently compiled function. */
116 struct function *cfun = 0;
117
118 /* These hashes record the prologue and epilogue insns. */
119
120 struct insn_cache_hasher : ggc_cache_ptr_hash<rtx_def>
121 {
122 static hashval_t hash (rtx x) { return htab_hash_pointer (x); }
123 static bool equal (rtx a, rtx b) { return a == b; }
124 };
125
126 static GTY((cache))
127 hash_table<insn_cache_hasher> *prologue_insn_hash;
128 static GTY((cache))
129 hash_table<insn_cache_hasher> *epilogue_insn_hash;
130 \f
131
132 hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
133 vec<tree, va_gc> *types_used_by_cur_var_decl;
134
135 /* Forward declarations. */
136
137 static class temp_slot *find_temp_slot_from_address (rtx);
138 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
139 static void pad_below (struct args_size *, machine_mode, tree);
140 static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
141 static int all_blocks (tree, tree *);
142 static tree *get_block_vector (tree, int *);
143 extern tree debug_find_var_in_block_tree (tree, tree);
144 /* We always define `record_insns' even if it's not used so that we
145 can always export `prologue_epilogue_contains'. */
146 static void record_insns (rtx_insn *, rtx, hash_table<insn_cache_hasher> **)
147 ATTRIBUTE_UNUSED;
148 static bool contains (const rtx_insn *, hash_table<insn_cache_hasher> *);
149 static void prepare_function_start (void);
150 static void do_clobber_return_reg (rtx, void *);
151 static void do_use_return_reg (rtx, void *);
152
153 \f
154 /* Stack of nested functions. */
155 /* Keep track of the cfun stack. */
156
157 static vec<function *> function_context_stack;
158
159 /* Save the current context for compilation of a nested function.
160 This is called from language-specific code. */
161
162 void
163 push_function_context (void)
164 {
165 if (cfun == 0)
166 allocate_struct_function (NULL, false);
167
168 function_context_stack.safe_push (cfun);
169 set_cfun (NULL);
170 }
171
172 /* Restore the last saved context, at the end of a nested function.
173 This function is called from language-specific code. */
174
175 void
176 pop_function_context (void)
177 {
178 struct function *p = function_context_stack.pop ();
179 set_cfun (p);
180 current_function_decl = p->decl;
181
182 /* Reset variables that have known state during rtx generation. */
183 virtuals_instantiated = 0;
184 generating_concat_p = 1;
185 }
186
187 /* Clear out all parts of the state in F that can safely be discarded
188 after the function has been parsed, but not compiled, to let
189 garbage collection reclaim the memory. */
190
191 void
192 free_after_parsing (struct function *f)
193 {
194 f->language = 0;
195 }
196
197 /* Clear out all parts of the state in F that can safely be discarded
198 after the function has been compiled, to let garbage collection
199 reclaim the memory. */
200
201 void
202 free_after_compilation (struct function *f)
203 {
204 prologue_insn_hash = NULL;
205 epilogue_insn_hash = NULL;
206
207 free (crtl->emit.regno_pointer_align);
208
209 memset (crtl, 0, sizeof (struct rtl_data));
210 f->eh = NULL;
211 f->machine = NULL;
212 f->cfg = NULL;
213 f->curr_properties &= ~PROP_cfg;
214
215 regno_reg_rtx = NULL;
216 }
217 \f
218 /* Return size needed for stack frame based on slots so far allocated.
219 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
220 the caller may have to do that. */
221
222 poly_int64
223 get_frame_size (void)
224 {
225 if (FRAME_GROWS_DOWNWARD)
226 return -frame_offset;
227 else
228 return frame_offset;
229 }
230
231 /* Issue an error message and return TRUE if frame OFFSET overflows in
232 the signed target pointer arithmetics for function FUNC. Otherwise
233 return FALSE. */
234
235 bool
236 frame_offset_overflow (poly_int64 offset, tree func)
237 {
238 poly_uint64 size = FRAME_GROWS_DOWNWARD ? -offset : offset;
239 unsigned HOST_WIDE_INT limit
240 = ((HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (Pmode) - 1))
241 /* Leave room for the fixed part of the frame. */
242 - 64 * UNITS_PER_WORD);
243
244 if (!coeffs_in_range_p (size, 0U, limit))
245 {
246 unsigned HOST_WIDE_INT hwisize;
247 if (size.is_constant (&hwisize))
248 error_at (DECL_SOURCE_LOCATION (func),
249 "total size of local objects %wu exceeds maximum %wu",
250 hwisize, limit);
251 else
252 error_at (DECL_SOURCE_LOCATION (func),
253 "total size of local objects exceeds maximum %wu",
254 limit);
255 return true;
256 }
257
258 return false;
259 }
260
261 /* Return the minimum spill slot alignment for a register of mode MODE. */
262
263 unsigned int
264 spill_slot_alignment (machine_mode mode ATTRIBUTE_UNUSED)
265 {
266 return STACK_SLOT_ALIGNMENT (NULL_TREE, mode, GET_MODE_ALIGNMENT (mode));
267 }
268
269 /* Return stack slot alignment in bits for TYPE and MODE. */
270
271 static unsigned int
272 get_stack_local_alignment (tree type, machine_mode mode)
273 {
274 unsigned int alignment;
275
276 if (mode == BLKmode)
277 alignment = BIGGEST_ALIGNMENT;
278 else
279 alignment = GET_MODE_ALIGNMENT (mode);
280
281 /* Allow the frond-end to (possibly) increase the alignment of this
282 stack slot. */
283 if (! type)
284 type = lang_hooks.types.type_for_mode (mode, 0);
285
286 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
287 }
288
289 /* Determine whether it is possible to fit a stack slot of size SIZE and
290 alignment ALIGNMENT into an area in the stack frame that starts at
291 frame offset START and has a length of LENGTH. If so, store the frame
292 offset to be used for the stack slot in *POFFSET and return true;
293 return false otherwise. This function will extend the frame size when
294 given a start/length pair that lies at the end of the frame. */
295
296 static bool
297 try_fit_stack_local (poly_int64 start, poly_int64 length,
298 poly_int64 size, unsigned int alignment,
299 poly_int64_pod *poffset)
300 {
301 poly_int64 this_frame_offset;
302 int frame_off, frame_alignment, frame_phase;
303
304 /* Calculate how many bytes the start of local variables is off from
305 stack alignment. */
306 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
307 frame_off = targetm.starting_frame_offset () % frame_alignment;
308 frame_phase = frame_off ? frame_alignment - frame_off : 0;
309
310 /* Round the frame offset to the specified alignment. */
311
312 if (FRAME_GROWS_DOWNWARD)
313 this_frame_offset
314 = (aligned_lower_bound (start + length - size - frame_phase, alignment)
315 + frame_phase);
316 else
317 this_frame_offset
318 = aligned_upper_bound (start - frame_phase, alignment) + frame_phase;
319
320 /* See if it fits. If this space is at the edge of the frame,
321 consider extending the frame to make it fit. Our caller relies on
322 this when allocating a new slot. */
323 if (maybe_lt (this_frame_offset, start))
324 {
325 if (known_eq (frame_offset, start))
326 frame_offset = this_frame_offset;
327 else
328 return false;
329 }
330 else if (maybe_gt (this_frame_offset + size, start + length))
331 {
332 if (known_eq (frame_offset, start + length))
333 frame_offset = this_frame_offset + size;
334 else
335 return false;
336 }
337
338 *poffset = this_frame_offset;
339 return true;
340 }
341
342 /* Create a new frame_space structure describing free space in the stack
343 frame beginning at START and ending at END, and chain it into the
344 function's frame_space_list. */
345
346 static void
347 add_frame_space (poly_int64 start, poly_int64 end)
348 {
349 class frame_space *space = ggc_alloc<frame_space> ();
350 space->next = crtl->frame_space_list;
351 crtl->frame_space_list = space;
352 space->start = start;
353 space->length = end - start;
354 }
355
356 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
357 with machine mode MODE.
358
359 ALIGN controls the amount of alignment for the address of the slot:
360 0 means according to MODE,
361 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
362 -2 means use BITS_PER_UNIT,
363 positive specifies alignment boundary in bits.
364
365 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
366 alignment and ASLK_RECORD_PAD bit set if we should remember
367 extra space we allocated for alignment purposes. When we are
368 called from assign_stack_temp_for_type, it is not set so we don't
369 track the same stack slot in two independent lists.
370
371 We do not round to stack_boundary here. */
372
373 rtx
374 assign_stack_local_1 (machine_mode mode, poly_int64 size,
375 int align, int kind)
376 {
377 rtx x, addr;
378 poly_int64 bigend_correction = 0;
379 poly_int64 slot_offset = 0, old_frame_offset;
380 unsigned int alignment, alignment_in_bits;
381
382 if (align == 0)
383 {
384 alignment = get_stack_local_alignment (NULL, mode);
385 alignment /= BITS_PER_UNIT;
386 }
387 else if (align == -1)
388 {
389 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
390 size = aligned_upper_bound (size, alignment);
391 }
392 else if (align == -2)
393 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
394 else
395 alignment = align / BITS_PER_UNIT;
396
397 alignment_in_bits = alignment * BITS_PER_UNIT;
398
399 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
400 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
401 {
402 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
403 alignment = MAX_SUPPORTED_STACK_ALIGNMENT / BITS_PER_UNIT;
404 }
405
406 if (SUPPORTS_STACK_ALIGNMENT)
407 {
408 if (crtl->stack_alignment_estimated < alignment_in_bits)
409 {
410 if (!crtl->stack_realign_processed)
411 crtl->stack_alignment_estimated = alignment_in_bits;
412 else
413 {
414 /* If stack is realigned and stack alignment value
415 hasn't been finalized, it is OK not to increase
416 stack_alignment_estimated. The bigger alignment
417 requirement is recorded in stack_alignment_needed
418 below. */
419 gcc_assert (!crtl->stack_realign_finalized);
420 if (!crtl->stack_realign_needed)
421 {
422 /* It is OK to reduce the alignment as long as the
423 requested size is 0 or the estimated stack
424 alignment >= mode alignment. */
425 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
426 || known_eq (size, 0)
427 || (crtl->stack_alignment_estimated
428 >= GET_MODE_ALIGNMENT (mode)));
429 alignment_in_bits = crtl->stack_alignment_estimated;
430 alignment = alignment_in_bits / BITS_PER_UNIT;
431 }
432 }
433 }
434 }
435
436 if (crtl->stack_alignment_needed < alignment_in_bits)
437 crtl->stack_alignment_needed = alignment_in_bits;
438 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
439 crtl->max_used_stack_slot_alignment = alignment_in_bits;
440
441 if (mode != BLKmode || maybe_ne (size, 0))
442 {
443 if (kind & ASLK_RECORD_PAD)
444 {
445 class frame_space **psp;
446
447 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
448 {
449 class frame_space *space = *psp;
450 if (!try_fit_stack_local (space->start, space->length, size,
451 alignment, &slot_offset))
452 continue;
453 *psp = space->next;
454 if (known_gt (slot_offset, space->start))
455 add_frame_space (space->start, slot_offset);
456 if (known_lt (slot_offset + size, space->start + space->length))
457 add_frame_space (slot_offset + size,
458 space->start + space->length);
459 goto found_space;
460 }
461 }
462 }
463 else if (!STACK_ALIGNMENT_NEEDED)
464 {
465 slot_offset = frame_offset;
466 goto found_space;
467 }
468
469 old_frame_offset = frame_offset;
470
471 if (FRAME_GROWS_DOWNWARD)
472 {
473 frame_offset -= size;
474 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
475
476 if (kind & ASLK_RECORD_PAD)
477 {
478 if (known_gt (slot_offset, frame_offset))
479 add_frame_space (frame_offset, slot_offset);
480 if (known_lt (slot_offset + size, old_frame_offset))
481 add_frame_space (slot_offset + size, old_frame_offset);
482 }
483 }
484 else
485 {
486 frame_offset += size;
487 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
488
489 if (kind & ASLK_RECORD_PAD)
490 {
491 if (known_gt (slot_offset, old_frame_offset))
492 add_frame_space (old_frame_offset, slot_offset);
493 if (known_lt (slot_offset + size, frame_offset))
494 add_frame_space (slot_offset + size, frame_offset);
495 }
496 }
497
498 found_space:
499 /* On a big-endian machine, if we are allocating more space than we will use,
500 use the least significant bytes of those that are allocated. */
501 if (mode != BLKmode)
502 {
503 /* The slot size can sometimes be smaller than the mode size;
504 e.g. the rs6000 port allocates slots with a vector mode
505 that have the size of only one element. However, the slot
506 size must always be ordered wrt to the mode size, in the
507 same way as for a subreg. */
508 gcc_checking_assert (ordered_p (GET_MODE_SIZE (mode), size));
509 if (BYTES_BIG_ENDIAN && maybe_lt (GET_MODE_SIZE (mode), size))
510 bigend_correction = size - GET_MODE_SIZE (mode);
511 }
512
513 /* If we have already instantiated virtual registers, return the actual
514 address relative to the frame pointer. */
515 if (virtuals_instantiated)
516 addr = plus_constant (Pmode, frame_pointer_rtx,
517 trunc_int_for_mode
518 (slot_offset + bigend_correction
519 + targetm.starting_frame_offset (), Pmode));
520 else
521 addr = plus_constant (Pmode, virtual_stack_vars_rtx,
522 trunc_int_for_mode
523 (slot_offset + bigend_correction,
524 Pmode));
525
526 x = gen_rtx_MEM (mode, addr);
527 set_mem_align (x, alignment_in_bits);
528 MEM_NOTRAP_P (x) = 1;
529
530 vec_safe_push (stack_slot_list, x);
531
532 if (frame_offset_overflow (frame_offset, current_function_decl))
533 frame_offset = 0;
534
535 return x;
536 }
537
538 /* Wrap up assign_stack_local_1 with last parameter as false. */
539
540 rtx
541 assign_stack_local (machine_mode mode, poly_int64 size, int align)
542 {
543 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
544 }
545 \f
546 /* In order to evaluate some expressions, such as function calls returning
547 structures in memory, we need to temporarily allocate stack locations.
548 We record each allocated temporary in the following structure.
549
550 Associated with each temporary slot is a nesting level. When we pop up
551 one level, all temporaries associated with the previous level are freed.
552 Normally, all temporaries are freed after the execution of the statement
553 in which they were created. However, if we are inside a ({...}) grouping,
554 the result may be in a temporary and hence must be preserved. If the
555 result could be in a temporary, we preserve it if we can determine which
556 one it is in. If we cannot determine which temporary may contain the
557 result, all temporaries are preserved. A temporary is preserved by
558 pretending it was allocated at the previous nesting level. */
559
560 class GTY(()) temp_slot {
561 public:
562 /* Points to next temporary slot. */
563 class temp_slot *next;
564 /* Points to previous temporary slot. */
565 class temp_slot *prev;
566 /* The rtx to used to reference the slot. */
567 rtx slot;
568 /* The size, in units, of the slot. */
569 poly_int64 size;
570 /* The type of the object in the slot, or zero if it doesn't correspond
571 to a type. We use this to determine whether a slot can be reused.
572 It can be reused if objects of the type of the new slot will always
573 conflict with objects of the type of the old slot. */
574 tree type;
575 /* The alignment (in bits) of the slot. */
576 unsigned int align;
577 /* Nonzero if this temporary is currently in use. */
578 char in_use;
579 /* Nesting level at which this slot is being used. */
580 int level;
581 /* The offset of the slot from the frame_pointer, including extra space
582 for alignment. This info is for combine_temp_slots. */
583 poly_int64 base_offset;
584 /* The size of the slot, including extra space for alignment. This
585 info is for combine_temp_slots. */
586 poly_int64 full_size;
587 };
588
589 /* Entry for the below hash table. */
590 struct GTY((for_user)) temp_slot_address_entry {
591 hashval_t hash;
592 rtx address;
593 class temp_slot *temp_slot;
594 };
595
596 struct temp_address_hasher : ggc_ptr_hash<temp_slot_address_entry>
597 {
598 static hashval_t hash (temp_slot_address_entry *);
599 static bool equal (temp_slot_address_entry *, temp_slot_address_entry *);
600 };
601
602 /* A table of addresses that represent a stack slot. The table is a mapping
603 from address RTXen to a temp slot. */
604 static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table;
605 static size_t n_temp_slots_in_use;
606
607 /* Removes temporary slot TEMP from LIST. */
608
609 static void
610 cut_slot_from_list (class temp_slot *temp, class temp_slot **list)
611 {
612 if (temp->next)
613 temp->next->prev = temp->prev;
614 if (temp->prev)
615 temp->prev->next = temp->next;
616 else
617 *list = temp->next;
618
619 temp->prev = temp->next = NULL;
620 }
621
622 /* Inserts temporary slot TEMP to LIST. */
623
624 static void
625 insert_slot_to_list (class temp_slot *temp, class temp_slot **list)
626 {
627 temp->next = *list;
628 if (*list)
629 (*list)->prev = temp;
630 temp->prev = NULL;
631 *list = temp;
632 }
633
634 /* Returns the list of used temp slots at LEVEL. */
635
636 static class temp_slot **
637 temp_slots_at_level (int level)
638 {
639 if (level >= (int) vec_safe_length (used_temp_slots))
640 vec_safe_grow_cleared (used_temp_slots, level + 1, true);
641
642 return &(*used_temp_slots)[level];
643 }
644
645 /* Returns the maximal temporary slot level. */
646
647 static int
648 max_slot_level (void)
649 {
650 if (!used_temp_slots)
651 return -1;
652
653 return used_temp_slots->length () - 1;
654 }
655
656 /* Moves temporary slot TEMP to LEVEL. */
657
658 static void
659 move_slot_to_level (class temp_slot *temp, int level)
660 {
661 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
662 insert_slot_to_list (temp, temp_slots_at_level (level));
663 temp->level = level;
664 }
665
666 /* Make temporary slot TEMP available. */
667
668 static void
669 make_slot_available (class temp_slot *temp)
670 {
671 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
672 insert_slot_to_list (temp, &avail_temp_slots);
673 temp->in_use = 0;
674 temp->level = -1;
675 n_temp_slots_in_use--;
676 }
677
678 /* Compute the hash value for an address -> temp slot mapping.
679 The value is cached on the mapping entry. */
680 static hashval_t
681 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
682 {
683 int do_not_record = 0;
684 return hash_rtx (t->address, GET_MODE (t->address),
685 &do_not_record, NULL, false);
686 }
687
688 /* Return the hash value for an address -> temp slot mapping. */
689 hashval_t
690 temp_address_hasher::hash (temp_slot_address_entry *t)
691 {
692 return t->hash;
693 }
694
695 /* Compare two address -> temp slot mapping entries. */
696 bool
697 temp_address_hasher::equal (temp_slot_address_entry *t1,
698 temp_slot_address_entry *t2)
699 {
700 return exp_equiv_p (t1->address, t2->address, 0, true);
701 }
702
703 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
704 static void
705 insert_temp_slot_address (rtx address, class temp_slot *temp_slot)
706 {
707 struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
708 t->address = copy_rtx (address);
709 t->temp_slot = temp_slot;
710 t->hash = temp_slot_address_compute_hash (t);
711 *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t;
712 }
713
714 /* Remove an address -> temp slot mapping entry if the temp slot is
715 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
716 int
717 remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
718 {
719 const struct temp_slot_address_entry *t = *slot;
720 if (! t->temp_slot->in_use)
721 temp_slot_address_table->clear_slot (slot);
722 return 1;
723 }
724
725 /* Remove all mappings of addresses to unused temp slots. */
726 static void
727 remove_unused_temp_slot_addresses (void)
728 {
729 /* Use quicker clearing if there aren't any active temp slots. */
730 if (n_temp_slots_in_use)
731 temp_slot_address_table->traverse
732 <void *, remove_unused_temp_slot_addresses_1> (NULL);
733 else
734 temp_slot_address_table->empty ();
735 }
736
737 /* Find the temp slot corresponding to the object at address X. */
738
739 static class temp_slot *
740 find_temp_slot_from_address (rtx x)
741 {
742 class temp_slot *p;
743 struct temp_slot_address_entry tmp, *t;
744
745 /* First try the easy way:
746 See if X exists in the address -> temp slot mapping. */
747 tmp.address = x;
748 tmp.temp_slot = NULL;
749 tmp.hash = temp_slot_address_compute_hash (&tmp);
750 t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash);
751 if (t)
752 return t->temp_slot;
753
754 /* If we have a sum involving a register, see if it points to a temp
755 slot. */
756 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
757 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
758 return p;
759 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
760 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
761 return p;
762
763 /* Last resort: Address is a virtual stack var address. */
764 poly_int64 offset;
765 if (strip_offset (x, &offset) == virtual_stack_vars_rtx)
766 {
767 int i;
768 for (i = max_slot_level (); i >= 0; i--)
769 for (p = *temp_slots_at_level (i); p; p = p->next)
770 if (known_in_range_p (offset, p->base_offset, p->full_size))
771 return p;
772 }
773
774 return NULL;
775 }
776 \f
777 /* Allocate a temporary stack slot and record it for possible later
778 reuse.
779
780 MODE is the machine mode to be given to the returned rtx.
781
782 SIZE is the size in units of the space required. We do no rounding here
783 since assign_stack_local will do any required rounding.
784
785 TYPE is the type that will be used for the stack slot. */
786
787 rtx
788 assign_stack_temp_for_type (machine_mode mode, poly_int64 size, tree type)
789 {
790 unsigned int align;
791 class temp_slot *p, *best_p = 0, *selected = NULL, **pp;
792 rtx slot;
793
794 gcc_assert (known_size_p (size));
795
796 align = get_stack_local_alignment (type, mode);
797
798 /* Try to find an available, already-allocated temporary of the proper
799 mode which meets the size and alignment requirements. Choose the
800 smallest one with the closest alignment.
801
802 If assign_stack_temp is called outside of the tree->rtl expansion,
803 we cannot reuse the stack slots (that may still refer to
804 VIRTUAL_STACK_VARS_REGNUM). */
805 if (!virtuals_instantiated)
806 {
807 for (p = avail_temp_slots; p; p = p->next)
808 {
809 if (p->align >= align
810 && known_ge (p->size, size)
811 && GET_MODE (p->slot) == mode
812 && objects_must_conflict_p (p->type, type)
813 && (best_p == 0
814 || (known_eq (best_p->size, p->size)
815 ? best_p->align > p->align
816 : known_ge (best_p->size, p->size))))
817 {
818 if (p->align == align && known_eq (p->size, size))
819 {
820 selected = p;
821 cut_slot_from_list (selected, &avail_temp_slots);
822 best_p = 0;
823 break;
824 }
825 best_p = p;
826 }
827 }
828 }
829
830 /* Make our best, if any, the one to use. */
831 if (best_p)
832 {
833 selected = best_p;
834 cut_slot_from_list (selected, &avail_temp_slots);
835
836 /* If there are enough aligned bytes left over, make them into a new
837 temp_slot so that the extra bytes don't get wasted. Do this only
838 for BLKmode slots, so that we can be sure of the alignment. */
839 if (GET_MODE (best_p->slot) == BLKmode)
840 {
841 int alignment = best_p->align / BITS_PER_UNIT;
842 poly_int64 rounded_size = aligned_upper_bound (size, alignment);
843
844 if (known_ge (best_p->size - rounded_size, alignment))
845 {
846 p = ggc_alloc<temp_slot> ();
847 p->in_use = 0;
848 p->size = best_p->size - rounded_size;
849 p->base_offset = best_p->base_offset + rounded_size;
850 p->full_size = best_p->full_size - rounded_size;
851 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
852 p->align = best_p->align;
853 p->type = best_p->type;
854 insert_slot_to_list (p, &avail_temp_slots);
855
856 vec_safe_push (stack_slot_list, p->slot);
857
858 best_p->size = rounded_size;
859 best_p->full_size = rounded_size;
860 }
861 }
862 }
863
864 /* If we still didn't find one, make a new temporary. */
865 if (selected == 0)
866 {
867 poly_int64 frame_offset_old = frame_offset;
868
869 p = ggc_alloc<temp_slot> ();
870
871 /* We are passing an explicit alignment request to assign_stack_local.
872 One side effect of that is assign_stack_local will not round SIZE
873 to ensure the frame offset remains suitably aligned.
874
875 So for requests which depended on the rounding of SIZE, we go ahead
876 and round it now. We also make sure ALIGNMENT is at least
877 BIGGEST_ALIGNMENT. */
878 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
879 p->slot = assign_stack_local_1 (mode,
880 (mode == BLKmode
881 ? aligned_upper_bound (size,
882 (int) align
883 / BITS_PER_UNIT)
884 : size),
885 align, 0);
886
887 p->align = align;
888
889 /* The following slot size computation is necessary because we don't
890 know the actual size of the temporary slot until assign_stack_local
891 has performed all the frame alignment and size rounding for the
892 requested temporary. Note that extra space added for alignment
893 can be either above or below this stack slot depending on which
894 way the frame grows. We include the extra space if and only if it
895 is above this slot. */
896 if (FRAME_GROWS_DOWNWARD)
897 p->size = frame_offset_old - frame_offset;
898 else
899 p->size = size;
900
901 /* Now define the fields used by combine_temp_slots. */
902 if (FRAME_GROWS_DOWNWARD)
903 {
904 p->base_offset = frame_offset;
905 p->full_size = frame_offset_old - frame_offset;
906 }
907 else
908 {
909 p->base_offset = frame_offset_old;
910 p->full_size = frame_offset - frame_offset_old;
911 }
912
913 selected = p;
914 }
915
916 p = selected;
917 p->in_use = 1;
918 p->type = type;
919 p->level = temp_slot_level;
920 n_temp_slots_in_use++;
921
922 pp = temp_slots_at_level (p->level);
923 insert_slot_to_list (p, pp);
924 insert_temp_slot_address (XEXP (p->slot, 0), p);
925
926 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
927 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
928 vec_safe_push (stack_slot_list, slot);
929
930 /* If we know the alias set for the memory that will be used, use
931 it. If there's no TYPE, then we don't know anything about the
932 alias set for the memory. */
933 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
934 set_mem_align (slot, align);
935
936 /* If a type is specified, set the relevant flags. */
937 if (type != 0)
938 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
939 MEM_NOTRAP_P (slot) = 1;
940
941 return slot;
942 }
943
944 /* Allocate a temporary stack slot and record it for possible later
945 reuse. First two arguments are same as in preceding function. */
946
947 rtx
948 assign_stack_temp (machine_mode mode, poly_int64 size)
949 {
950 return assign_stack_temp_for_type (mode, size, NULL_TREE);
951 }
952 \f
953 /* Assign a temporary.
954 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
955 and so that should be used in error messages. In either case, we
956 allocate of the given type.
957 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
958 it is 0 if a register is OK.
959 DONT_PROMOTE is 1 if we should not promote values in register
960 to wider modes. */
961
962 rtx
963 assign_temp (tree type_or_decl, int memory_required,
964 int dont_promote ATTRIBUTE_UNUSED)
965 {
966 tree type, decl;
967 machine_mode mode;
968 #ifdef PROMOTE_MODE
969 int unsignedp;
970 #endif
971
972 if (DECL_P (type_or_decl))
973 decl = type_or_decl, type = TREE_TYPE (decl);
974 else
975 decl = NULL, type = type_or_decl;
976
977 mode = TYPE_MODE (type);
978 #ifdef PROMOTE_MODE
979 unsignedp = TYPE_UNSIGNED (type);
980 #endif
981
982 /* Allocating temporaries of TREE_ADDRESSABLE type must be done in the front
983 end. See also create_tmp_var for the gimplification-time check. */
984 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
985
986 if (mode == BLKmode || memory_required)
987 {
988 poly_int64 size;
989 rtx tmp;
990
991 /* Unfortunately, we don't yet know how to allocate variable-sized
992 temporaries. However, sometimes we can find a fixed upper limit on
993 the size, so try that instead. */
994 if (!poly_int_tree_p (TYPE_SIZE_UNIT (type), &size))
995 size = max_int_size_in_bytes (type);
996
997 /* Zero sized arrays are a GNU C extension. Set size to 1 to avoid
998 problems with allocating the stack space. */
999 if (known_eq (size, 0))
1000 size = 1;
1001
1002 /* The size of the temporary may be too large to fit into an integer. */
1003 /* ??? Not sure this should happen except for user silliness, so limit
1004 this to things that aren't compiler-generated temporaries. The
1005 rest of the time we'll die in assign_stack_temp_for_type. */
1006 if (decl
1007 && !known_size_p (size)
1008 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
1009 {
1010 error ("size of variable %q+D is too large", decl);
1011 size = 1;
1012 }
1013
1014 tmp = assign_stack_temp_for_type (mode, size, type);
1015 return tmp;
1016 }
1017
1018 #ifdef PROMOTE_MODE
1019 if (! dont_promote)
1020 mode = promote_mode (type, mode, &unsignedp);
1021 #endif
1022
1023 return gen_reg_rtx (mode);
1024 }
1025 \f
1026 /* Combine temporary stack slots which are adjacent on the stack.
1027
1028 This allows for better use of already allocated stack space. This is only
1029 done for BLKmode slots because we can be sure that we won't have alignment
1030 problems in this case. */
1031
1032 static void
1033 combine_temp_slots (void)
1034 {
1035 class temp_slot *p, *q, *next, *next_q;
1036 int num_slots;
1037
1038 /* We can't combine slots, because the information about which slot
1039 is in which alias set will be lost. */
1040 if (flag_strict_aliasing)
1041 return;
1042
1043 /* If there are a lot of temp slots, don't do anything unless
1044 high levels of optimization. */
1045 if (! flag_expensive_optimizations)
1046 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1047 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1048 return;
1049
1050 for (p = avail_temp_slots; p; p = next)
1051 {
1052 int delete_p = 0;
1053
1054 next = p->next;
1055
1056 if (GET_MODE (p->slot) != BLKmode)
1057 continue;
1058
1059 for (q = p->next; q; q = next_q)
1060 {
1061 int delete_q = 0;
1062
1063 next_q = q->next;
1064
1065 if (GET_MODE (q->slot) != BLKmode)
1066 continue;
1067
1068 if (known_eq (p->base_offset + p->full_size, q->base_offset))
1069 {
1070 /* Q comes after P; combine Q into P. */
1071 p->size += q->size;
1072 p->full_size += q->full_size;
1073 delete_q = 1;
1074 }
1075 else if (known_eq (q->base_offset + q->full_size, p->base_offset))
1076 {
1077 /* P comes after Q; combine P into Q. */
1078 q->size += p->size;
1079 q->full_size += p->full_size;
1080 delete_p = 1;
1081 break;
1082 }
1083 if (delete_q)
1084 cut_slot_from_list (q, &avail_temp_slots);
1085 }
1086
1087 /* Either delete P or advance past it. */
1088 if (delete_p)
1089 cut_slot_from_list (p, &avail_temp_slots);
1090 }
1091 }
1092 \f
1093 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1094 slot that previously was known by OLD_RTX. */
1095
1096 void
1097 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1098 {
1099 class temp_slot *p;
1100
1101 if (rtx_equal_p (old_rtx, new_rtx))
1102 return;
1103
1104 p = find_temp_slot_from_address (old_rtx);
1105
1106 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1107 NEW_RTX is a register, see if one operand of the PLUS is a
1108 temporary location. If so, NEW_RTX points into it. Otherwise,
1109 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1110 in common between them. If so, try a recursive call on those
1111 values. */
1112 if (p == 0)
1113 {
1114 if (GET_CODE (old_rtx) != PLUS)
1115 return;
1116
1117 if (REG_P (new_rtx))
1118 {
1119 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1120 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1121 return;
1122 }
1123 else if (GET_CODE (new_rtx) != PLUS)
1124 return;
1125
1126 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1127 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1128 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1129 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1130 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1131 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1132 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1133 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1134
1135 return;
1136 }
1137
1138 /* Otherwise add an alias for the temp's address. */
1139 insert_temp_slot_address (new_rtx, p);
1140 }
1141
1142 /* If X could be a reference to a temporary slot, mark that slot as
1143 belonging to the to one level higher than the current level. If X
1144 matched one of our slots, just mark that one. Otherwise, we can't
1145 easily predict which it is, so upgrade all of them.
1146
1147 This is called when an ({...}) construct occurs and a statement
1148 returns a value in memory. */
1149
1150 void
1151 preserve_temp_slots (rtx x)
1152 {
1153 class temp_slot *p = 0, *next;
1154
1155 if (x == 0)
1156 return;
1157
1158 /* If X is a register that is being used as a pointer, see if we have
1159 a temporary slot we know it points to. */
1160 if (REG_P (x) && REG_POINTER (x))
1161 p = find_temp_slot_from_address (x);
1162
1163 /* If X is not in memory or is at a constant address, it cannot be in
1164 a temporary slot. */
1165 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1166 return;
1167
1168 /* First see if we can find a match. */
1169 if (p == 0)
1170 p = find_temp_slot_from_address (XEXP (x, 0));
1171
1172 if (p != 0)
1173 {
1174 if (p->level == temp_slot_level)
1175 move_slot_to_level (p, temp_slot_level - 1);
1176 return;
1177 }
1178
1179 /* Otherwise, preserve all non-kept slots at this level. */
1180 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1181 {
1182 next = p->next;
1183 move_slot_to_level (p, temp_slot_level - 1);
1184 }
1185 }
1186
1187 /* Free all temporaries used so far. This is normally called at the
1188 end of generating code for a statement. */
1189
1190 void
1191 free_temp_slots (void)
1192 {
1193 class temp_slot *p, *next;
1194 bool some_available = false;
1195
1196 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1197 {
1198 next = p->next;
1199 make_slot_available (p);
1200 some_available = true;
1201 }
1202
1203 if (some_available)
1204 {
1205 remove_unused_temp_slot_addresses ();
1206 combine_temp_slots ();
1207 }
1208 }
1209
1210 /* Push deeper into the nesting level for stack temporaries. */
1211
1212 void
1213 push_temp_slots (void)
1214 {
1215 temp_slot_level++;
1216 }
1217
1218 /* Pop a temporary nesting level. All slots in use in the current level
1219 are freed. */
1220
1221 void
1222 pop_temp_slots (void)
1223 {
1224 free_temp_slots ();
1225 temp_slot_level--;
1226 }
1227
1228 /* Initialize temporary slots. */
1229
1230 void
1231 init_temp_slots (void)
1232 {
1233 /* We have not allocated any temporaries yet. */
1234 avail_temp_slots = 0;
1235 vec_alloc (used_temp_slots, 0);
1236 temp_slot_level = 0;
1237 n_temp_slots_in_use = 0;
1238
1239 /* Set up the table to map addresses to temp slots. */
1240 if (! temp_slot_address_table)
1241 temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32);
1242 else
1243 temp_slot_address_table->empty ();
1244 }
1245 \f
1246 /* Functions and data structures to keep track of the values hard regs
1247 had at the start of the function. */
1248
1249 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1250 and has_hard_reg_initial_val.. */
1251 struct GTY(()) initial_value_pair {
1252 rtx hard_reg;
1253 rtx pseudo;
1254 };
1255 /* ??? This could be a VEC but there is currently no way to define an
1256 opaque VEC type. This could be worked around by defining struct
1257 initial_value_pair in function.h. */
1258 struct GTY(()) initial_value_struct {
1259 int num_entries;
1260 int max_entries;
1261 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1262 };
1263
1264 /* If a pseudo represents an initial hard reg (or expression), return
1265 it, else return NULL_RTX. */
1266
1267 rtx
1268 get_hard_reg_initial_reg (rtx reg)
1269 {
1270 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1271 int i;
1272
1273 if (ivs == 0)
1274 return NULL_RTX;
1275
1276 for (i = 0; i < ivs->num_entries; i++)
1277 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1278 return ivs->entries[i].hard_reg;
1279
1280 return NULL_RTX;
1281 }
1282
1283 /* Make sure that there's a pseudo register of mode MODE that stores the
1284 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1285
1286 rtx
1287 get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1288 {
1289 struct initial_value_struct *ivs;
1290 rtx rv;
1291
1292 rv = has_hard_reg_initial_val (mode, regno);
1293 if (rv)
1294 return rv;
1295
1296 ivs = crtl->hard_reg_initial_vals;
1297 if (ivs == 0)
1298 {
1299 ivs = ggc_alloc<initial_value_struct> ();
1300 ivs->num_entries = 0;
1301 ivs->max_entries = 5;
1302 ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
1303 crtl->hard_reg_initial_vals = ivs;
1304 }
1305
1306 if (ivs->num_entries >= ivs->max_entries)
1307 {
1308 ivs->max_entries += 5;
1309 ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1310 ivs->max_entries);
1311 }
1312
1313 ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1314 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1315
1316 return ivs->entries[ivs->num_entries++].pseudo;
1317 }
1318
1319 /* See if get_hard_reg_initial_val has been used to create a pseudo
1320 for the initial value of hard register REGNO in mode MODE. Return
1321 the associated pseudo if so, otherwise return NULL. */
1322
1323 rtx
1324 has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1325 {
1326 struct initial_value_struct *ivs;
1327 int i;
1328
1329 ivs = crtl->hard_reg_initial_vals;
1330 if (ivs != 0)
1331 for (i = 0; i < ivs->num_entries; i++)
1332 if (GET_MODE (ivs->entries[i].hard_reg) == mode
1333 && REGNO (ivs->entries[i].hard_reg) == regno)
1334 return ivs->entries[i].pseudo;
1335
1336 return NULL_RTX;
1337 }
1338
1339 unsigned int
1340 emit_initial_value_sets (void)
1341 {
1342 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1343 int i;
1344 rtx_insn *seq;
1345
1346 if (ivs == 0)
1347 return 0;
1348
1349 start_sequence ();
1350 for (i = 0; i < ivs->num_entries; i++)
1351 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1352 seq = get_insns ();
1353 end_sequence ();
1354
1355 emit_insn_at_entry (seq);
1356 return 0;
1357 }
1358
1359 /* Return the hardreg-pseudoreg initial values pair entry I and
1360 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1361 bool
1362 initial_value_entry (int i, rtx *hreg, rtx *preg)
1363 {
1364 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1365 if (!ivs || i >= ivs->num_entries)
1366 return false;
1367
1368 *hreg = ivs->entries[i].hard_reg;
1369 *preg = ivs->entries[i].pseudo;
1370 return true;
1371 }
1372 \f
1373 /* These routines are responsible for converting virtual register references
1374 to the actual hard register references once RTL generation is complete.
1375
1376 The following four variables are used for communication between the
1377 routines. They contain the offsets of the virtual registers from their
1378 respective hard registers. */
1379
1380 static poly_int64 in_arg_offset;
1381 static poly_int64 var_offset;
1382 static poly_int64 dynamic_offset;
1383 static poly_int64 out_arg_offset;
1384 static poly_int64 cfa_offset;
1385
1386 /* In most machines, the stack pointer register is equivalent to the bottom
1387 of the stack. */
1388
1389 #ifndef STACK_POINTER_OFFSET
1390 #define STACK_POINTER_OFFSET 0
1391 #endif
1392
1393 #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1394 #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1395 #endif
1396
1397 /* If not defined, pick an appropriate default for the offset of dynamically
1398 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1399 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1400
1401 #ifndef STACK_DYNAMIC_OFFSET
1402
1403 /* The bottom of the stack points to the actual arguments. If
1404 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1405 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1406 stack space for register parameters is not pushed by the caller, but
1407 rather part of the fixed stack areas and hence not included in
1408 `crtl->outgoing_args_size'. Nevertheless, we must allow
1409 for it when allocating stack dynamic objects. */
1410
1411 #ifdef INCOMING_REG_PARM_STACK_SPACE
1412 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1413 ((ACCUMULATE_OUTGOING_ARGS \
1414 ? (crtl->outgoing_args_size \
1415 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1416 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1417 : 0) + (STACK_POINTER_OFFSET))
1418 #else
1419 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1420 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : poly_int64 (0)) \
1421 + (STACK_POINTER_OFFSET))
1422 #endif
1423 #endif
1424
1425 \f
1426 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1427 is a virtual register, return the equivalent hard register and set the
1428 offset indirectly through the pointer. Otherwise, return 0. */
1429
1430 static rtx
1431 instantiate_new_reg (rtx x, poly_int64_pod *poffset)
1432 {
1433 rtx new_rtx;
1434 poly_int64 offset;
1435
1436 if (x == virtual_incoming_args_rtx)
1437 {
1438 if (stack_realign_drap)
1439 {
1440 /* Replace virtual_incoming_args_rtx with internal arg
1441 pointer if DRAP is used to realign stack. */
1442 new_rtx = crtl->args.internal_arg_pointer;
1443 offset = 0;
1444 }
1445 else
1446 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1447 }
1448 else if (x == virtual_stack_vars_rtx)
1449 new_rtx = frame_pointer_rtx, offset = var_offset;
1450 else if (x == virtual_stack_dynamic_rtx)
1451 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1452 else if (x == virtual_outgoing_args_rtx)
1453 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1454 else if (x == virtual_cfa_rtx)
1455 {
1456 #ifdef FRAME_POINTER_CFA_OFFSET
1457 new_rtx = frame_pointer_rtx;
1458 #else
1459 new_rtx = arg_pointer_rtx;
1460 #endif
1461 offset = cfa_offset;
1462 }
1463 else if (x == virtual_preferred_stack_boundary_rtx)
1464 {
1465 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1466 offset = 0;
1467 }
1468 else
1469 return NULL_RTX;
1470
1471 *poffset = offset;
1472 return new_rtx;
1473 }
1474
1475 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1476 registers present inside of *LOC. The expression is simplified,
1477 as much as possible, but is not to be considered "valid" in any sense
1478 implied by the target. Return true if any change is made. */
1479
1480 static bool
1481 instantiate_virtual_regs_in_rtx (rtx *loc)
1482 {
1483 if (!*loc)
1484 return false;
1485 bool changed = false;
1486 subrtx_ptr_iterator::array_type array;
1487 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
1488 {
1489 rtx *loc = *iter;
1490 if (rtx x = *loc)
1491 {
1492 rtx new_rtx;
1493 poly_int64 offset;
1494 switch (GET_CODE (x))
1495 {
1496 case REG:
1497 new_rtx = instantiate_new_reg (x, &offset);
1498 if (new_rtx)
1499 {
1500 *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1501 changed = true;
1502 }
1503 iter.skip_subrtxes ();
1504 break;
1505
1506 case PLUS:
1507 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1508 if (new_rtx)
1509 {
1510 XEXP (x, 0) = new_rtx;
1511 *loc = plus_constant (GET_MODE (x), x, offset, true);
1512 changed = true;
1513 iter.skip_subrtxes ();
1514 break;
1515 }
1516
1517 /* FIXME -- from old code */
1518 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1519 we can commute the PLUS and SUBREG because pointers into the
1520 frame are well-behaved. */
1521 break;
1522
1523 default:
1524 break;
1525 }
1526 }
1527 }
1528 return changed;
1529 }
1530
1531 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1532 matches the predicate for insn CODE operand OPERAND. */
1533
1534 static int
1535 safe_insn_predicate (int code, int operand, rtx x)
1536 {
1537 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
1538 }
1539
1540 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1541 registers present inside of insn. The result will be a valid insn. */
1542
1543 static void
1544 instantiate_virtual_regs_in_insn (rtx_insn *insn)
1545 {
1546 poly_int64 offset;
1547 int insn_code, i;
1548 bool any_change = false;
1549 rtx set, new_rtx, x;
1550 rtx_insn *seq;
1551
1552 /* There are some special cases to be handled first. */
1553 set = single_set (insn);
1554 if (set)
1555 {
1556 /* We're allowed to assign to a virtual register. This is interpreted
1557 to mean that the underlying register gets assigned the inverse
1558 transformation. This is used, for example, in the handling of
1559 non-local gotos. */
1560 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1561 if (new_rtx)
1562 {
1563 start_sequence ();
1564
1565 instantiate_virtual_regs_in_rtx (&SET_SRC (set));
1566 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1567 gen_int_mode (-offset, GET_MODE (new_rtx)));
1568 x = force_operand (x, new_rtx);
1569 if (x != new_rtx)
1570 emit_move_insn (new_rtx, x);
1571
1572 seq = get_insns ();
1573 end_sequence ();
1574
1575 emit_insn_before (seq, insn);
1576 delete_insn (insn);
1577 return;
1578 }
1579
1580 /* Handle a straight copy from a virtual register by generating a
1581 new add insn. The difference between this and falling through
1582 to the generic case is avoiding a new pseudo and eliminating a
1583 move insn in the initial rtl stream. */
1584 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1585 if (new_rtx
1586 && maybe_ne (offset, 0)
1587 && REG_P (SET_DEST (set))
1588 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1589 {
1590 start_sequence ();
1591
1592 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
1593 gen_int_mode (offset,
1594 GET_MODE (SET_DEST (set))),
1595 SET_DEST (set), 1, OPTAB_LIB_WIDEN);
1596 if (x != SET_DEST (set))
1597 emit_move_insn (SET_DEST (set), x);
1598
1599 seq = get_insns ();
1600 end_sequence ();
1601
1602 emit_insn_before (seq, insn);
1603 delete_insn (insn);
1604 return;
1605 }
1606
1607 extract_insn (insn);
1608 insn_code = INSN_CODE (insn);
1609
1610 /* Handle a plus involving a virtual register by determining if the
1611 operands remain valid if they're modified in place. */
1612 poly_int64 delta;
1613 if (GET_CODE (SET_SRC (set)) == PLUS
1614 && recog_data.n_operands >= 3
1615 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1616 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1617 && poly_int_rtx_p (recog_data.operand[2], &delta)
1618 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1619 {
1620 offset += delta;
1621
1622 /* If the sum is zero, then replace with a plain move. */
1623 if (known_eq (offset, 0)
1624 && REG_P (SET_DEST (set))
1625 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1626 {
1627 start_sequence ();
1628 emit_move_insn (SET_DEST (set), new_rtx);
1629 seq = get_insns ();
1630 end_sequence ();
1631
1632 emit_insn_before (seq, insn);
1633 delete_insn (insn);
1634 return;
1635 }
1636
1637 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1638
1639 /* Using validate_change and apply_change_group here leaves
1640 recog_data in an invalid state. Since we know exactly what
1641 we want to check, do those two by hand. */
1642 if (safe_insn_predicate (insn_code, 1, new_rtx)
1643 && safe_insn_predicate (insn_code, 2, x))
1644 {
1645 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1646 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1647 any_change = true;
1648
1649 /* Fall through into the regular operand fixup loop in
1650 order to take care of operands other than 1 and 2. */
1651 }
1652 }
1653 }
1654 else
1655 {
1656 extract_insn (insn);
1657 insn_code = INSN_CODE (insn);
1658 }
1659
1660 /* In the general case, we expect virtual registers to appear only in
1661 operands, and then only as either bare registers or inside memories. */
1662 for (i = 0; i < recog_data.n_operands; ++i)
1663 {
1664 x = recog_data.operand[i];
1665 switch (GET_CODE (x))
1666 {
1667 case MEM:
1668 {
1669 rtx addr = XEXP (x, 0);
1670
1671 if (!instantiate_virtual_regs_in_rtx (&addr))
1672 continue;
1673
1674 start_sequence ();
1675 x = replace_equiv_address (x, addr, true);
1676 /* It may happen that the address with the virtual reg
1677 was valid (e.g. based on the virtual stack reg, which might
1678 be acceptable to the predicates with all offsets), whereas
1679 the address now isn't anymore, for instance when the address
1680 is still offsetted, but the base reg isn't virtual-stack-reg
1681 anymore. Below we would do a force_reg on the whole operand,
1682 but this insn might actually only accept memory. Hence,
1683 before doing that last resort, try to reload the address into
1684 a register, so this operand stays a MEM. */
1685 if (!safe_insn_predicate (insn_code, i, x))
1686 {
1687 addr = force_reg (GET_MODE (addr), addr);
1688 x = replace_equiv_address (x, addr, true);
1689 }
1690 seq = get_insns ();
1691 end_sequence ();
1692 if (seq)
1693 emit_insn_before (seq, insn);
1694 }
1695 break;
1696
1697 case REG:
1698 new_rtx = instantiate_new_reg (x, &offset);
1699 if (new_rtx == NULL)
1700 continue;
1701 if (known_eq (offset, 0))
1702 x = new_rtx;
1703 else
1704 {
1705 start_sequence ();
1706
1707 /* Careful, special mode predicates may have stuff in
1708 insn_data[insn_code].operand[i].mode that isn't useful
1709 to us for computing a new value. */
1710 /* ??? Recognize address_operand and/or "p" constraints
1711 to see if (plus new offset) is a valid before we put
1712 this through expand_simple_binop. */
1713 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1714 gen_int_mode (offset, GET_MODE (x)),
1715 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1716 seq = get_insns ();
1717 end_sequence ();
1718 emit_insn_before (seq, insn);
1719 }
1720 break;
1721
1722 case SUBREG:
1723 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1724 if (new_rtx == NULL)
1725 continue;
1726 if (maybe_ne (offset, 0))
1727 {
1728 start_sequence ();
1729 new_rtx = expand_simple_binop
1730 (GET_MODE (new_rtx), PLUS, new_rtx,
1731 gen_int_mode (offset, GET_MODE (new_rtx)),
1732 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1733 seq = get_insns ();
1734 end_sequence ();
1735 emit_insn_before (seq, insn);
1736 }
1737 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1738 GET_MODE (new_rtx), SUBREG_BYTE (x));
1739 gcc_assert (x);
1740 break;
1741
1742 default:
1743 continue;
1744 }
1745
1746 /* At this point, X contains the new value for the operand.
1747 Validate the new value vs the insn predicate. Note that
1748 asm insns will have insn_code -1 here. */
1749 if (!safe_insn_predicate (insn_code, i, x))
1750 {
1751 start_sequence ();
1752 if (REG_P (x))
1753 {
1754 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1755 x = copy_to_reg (x);
1756 }
1757 else
1758 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1759 seq = get_insns ();
1760 end_sequence ();
1761 if (seq)
1762 emit_insn_before (seq, insn);
1763 }
1764
1765 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1766 any_change = true;
1767 }
1768
1769 if (any_change)
1770 {
1771 /* Propagate operand changes into the duplicates. */
1772 for (i = 0; i < recog_data.n_dups; ++i)
1773 *recog_data.dup_loc[i]
1774 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1775
1776 /* Force re-recognition of the instruction for validation. */
1777 INSN_CODE (insn) = -1;
1778 }
1779
1780 if (asm_noperands (PATTERN (insn)) >= 0)
1781 {
1782 if (!check_asm_operands (PATTERN (insn)))
1783 {
1784 error_for_asm (insn, "impossible constraint in %<asm%>");
1785 /* For asm goto, instead of fixing up all the edges
1786 just clear the template and clear input operands
1787 (asm goto doesn't have any output operands). */
1788 if (JUMP_P (insn))
1789 {
1790 rtx asm_op = extract_asm_operands (PATTERN (insn));
1791 ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1792 ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1793 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1794 }
1795 else
1796 delete_insn (insn);
1797 }
1798 }
1799 else
1800 {
1801 if (recog_memoized (insn) < 0)
1802 fatal_insn_not_found (insn);
1803 }
1804 }
1805
1806 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1807 do any instantiation required. */
1808
1809 void
1810 instantiate_decl_rtl (rtx x)
1811 {
1812 rtx addr;
1813
1814 if (x == 0)
1815 return;
1816
1817 /* If this is a CONCAT, recurse for the pieces. */
1818 if (GET_CODE (x) == CONCAT)
1819 {
1820 instantiate_decl_rtl (XEXP (x, 0));
1821 instantiate_decl_rtl (XEXP (x, 1));
1822 return;
1823 }
1824
1825 /* If this is not a MEM, no need to do anything. Similarly if the
1826 address is a constant or a register that is not a virtual register. */
1827 if (!MEM_P (x))
1828 return;
1829
1830 addr = XEXP (x, 0);
1831 if (CONSTANT_P (addr)
1832 || (REG_P (addr)
1833 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1834 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1835 return;
1836
1837 instantiate_virtual_regs_in_rtx (&XEXP (x, 0));
1838 }
1839
1840 /* Helper for instantiate_decls called via walk_tree: Process all decls
1841 in the given DECL_VALUE_EXPR. */
1842
1843 static tree
1844 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1845 {
1846 tree t = *tp;
1847 if (! EXPR_P (t))
1848 {
1849 *walk_subtrees = 0;
1850 if (DECL_P (t))
1851 {
1852 if (DECL_RTL_SET_P (t))
1853 instantiate_decl_rtl (DECL_RTL (t));
1854 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1855 && DECL_INCOMING_RTL (t))
1856 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1857 if ((VAR_P (t) || TREE_CODE (t) == RESULT_DECL)
1858 && DECL_HAS_VALUE_EXPR_P (t))
1859 {
1860 tree v = DECL_VALUE_EXPR (t);
1861 walk_tree (&v, instantiate_expr, NULL, NULL);
1862 }
1863 }
1864 }
1865 return NULL;
1866 }
1867
1868 /* Subroutine of instantiate_decls: Process all decls in the given
1869 BLOCK node and all its subblocks. */
1870
1871 static void
1872 instantiate_decls_1 (tree let)
1873 {
1874 tree t;
1875
1876 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1877 {
1878 if (DECL_RTL_SET_P (t))
1879 instantiate_decl_rtl (DECL_RTL (t));
1880 if (VAR_P (t) && DECL_HAS_VALUE_EXPR_P (t))
1881 {
1882 tree v = DECL_VALUE_EXPR (t);
1883 walk_tree (&v, instantiate_expr, NULL, NULL);
1884 }
1885 }
1886
1887 /* Process all subblocks. */
1888 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1889 instantiate_decls_1 (t);
1890 }
1891
1892 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1893 all virtual registers in their DECL_RTL's. */
1894
1895 static void
1896 instantiate_decls (tree fndecl)
1897 {
1898 tree decl;
1899 unsigned ix;
1900
1901 /* Process all parameters of the function. */
1902 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1903 {
1904 instantiate_decl_rtl (DECL_RTL (decl));
1905 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1906 if (DECL_HAS_VALUE_EXPR_P (decl))
1907 {
1908 tree v = DECL_VALUE_EXPR (decl);
1909 walk_tree (&v, instantiate_expr, NULL, NULL);
1910 }
1911 }
1912
1913 if ((decl = DECL_RESULT (fndecl))
1914 && TREE_CODE (decl) == RESULT_DECL)
1915 {
1916 if (DECL_RTL_SET_P (decl))
1917 instantiate_decl_rtl (DECL_RTL (decl));
1918 if (DECL_HAS_VALUE_EXPR_P (decl))
1919 {
1920 tree v = DECL_VALUE_EXPR (decl);
1921 walk_tree (&v, instantiate_expr, NULL, NULL);
1922 }
1923 }
1924
1925 /* Process the saved static chain if it exists. */
1926 decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
1927 if (decl && DECL_HAS_VALUE_EXPR_P (decl))
1928 instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
1929
1930 /* Now process all variables defined in the function or its subblocks. */
1931 if (DECL_INITIAL (fndecl))
1932 instantiate_decls_1 (DECL_INITIAL (fndecl));
1933
1934 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1935 if (DECL_RTL_SET_P (decl))
1936 instantiate_decl_rtl (DECL_RTL (decl));
1937 vec_free (cfun->local_decls);
1938 }
1939
1940 /* Pass through the INSNS of function FNDECL and convert virtual register
1941 references to hard register references. */
1942
1943 static unsigned int
1944 instantiate_virtual_regs (void)
1945 {
1946 rtx_insn *insn;
1947
1948 /* Compute the offsets to use for this function. */
1949 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1950 var_offset = targetm.starting_frame_offset ();
1951 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1952 out_arg_offset = STACK_POINTER_OFFSET;
1953 #ifdef FRAME_POINTER_CFA_OFFSET
1954 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1955 #else
1956 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1957 #endif
1958
1959 /* Initialize recognition, indicating that volatile is OK. */
1960 init_recog ();
1961
1962 /* Scan through all the insns, instantiating every virtual register still
1963 present. */
1964 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1965 if (INSN_P (insn))
1966 {
1967 /* These patterns in the instruction stream can never be recognized.
1968 Fortunately, they shouldn't contain virtual registers either. */
1969 if (GET_CODE (PATTERN (insn)) == USE
1970 || GET_CODE (PATTERN (insn)) == CLOBBER
1971 || GET_CODE (PATTERN (insn)) == ASM_INPUT
1972 || DEBUG_MARKER_INSN_P (insn))
1973 continue;
1974 else if (DEBUG_BIND_INSN_P (insn))
1975 instantiate_virtual_regs_in_rtx (INSN_VAR_LOCATION_PTR (insn));
1976 else
1977 instantiate_virtual_regs_in_insn (insn);
1978
1979 if (insn->deleted ())
1980 continue;
1981
1982 instantiate_virtual_regs_in_rtx (&REG_NOTES (insn));
1983
1984 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1985 if (CALL_P (insn))
1986 instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn));
1987 }
1988
1989 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1990 instantiate_decls (current_function_decl);
1991
1992 targetm.instantiate_decls ();
1993
1994 /* Indicate that, from now on, assign_stack_local should use
1995 frame_pointer_rtx. */
1996 virtuals_instantiated = 1;
1997
1998 return 0;
1999 }
2000
2001 namespace {
2002
2003 const pass_data pass_data_instantiate_virtual_regs =
2004 {
2005 RTL_PASS, /* type */
2006 "vregs", /* name */
2007 OPTGROUP_NONE, /* optinfo_flags */
2008 TV_NONE, /* tv_id */
2009 0, /* properties_required */
2010 0, /* properties_provided */
2011 0, /* properties_destroyed */
2012 0, /* todo_flags_start */
2013 0, /* todo_flags_finish */
2014 };
2015
2016 class pass_instantiate_virtual_regs : public rtl_opt_pass
2017 {
2018 public:
2019 pass_instantiate_virtual_regs (gcc::context *ctxt)
2020 : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
2021 {}
2022
2023 /* opt_pass methods: */
2024 virtual unsigned int execute (function *)
2025 {
2026 return instantiate_virtual_regs ();
2027 }
2028
2029 }; // class pass_instantiate_virtual_regs
2030
2031 } // anon namespace
2032
2033 rtl_opt_pass *
2034 make_pass_instantiate_virtual_regs (gcc::context *ctxt)
2035 {
2036 return new pass_instantiate_virtual_regs (ctxt);
2037 }
2038
2039 \f
2040 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2041 This means a type for which function calls must pass an address to the
2042 function or get an address back from the function.
2043 EXP may be a type node or an expression (whose type is tested). */
2044
2045 int
2046 aggregate_value_p (const_tree exp, const_tree fntype)
2047 {
2048 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
2049 int i, regno, nregs;
2050 rtx reg;
2051
2052 if (fntype)
2053 switch (TREE_CODE (fntype))
2054 {
2055 case CALL_EXPR:
2056 {
2057 tree fndecl = get_callee_fndecl (fntype);
2058 if (fndecl)
2059 fntype = TREE_TYPE (fndecl);
2060 else if (CALL_EXPR_FN (fntype))
2061 fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype)));
2062 else
2063 /* For internal functions, assume nothing needs to be
2064 returned in memory. */
2065 return 0;
2066 }
2067 break;
2068 case FUNCTION_DECL:
2069 fntype = TREE_TYPE (fntype);
2070 break;
2071 case FUNCTION_TYPE:
2072 case METHOD_TYPE:
2073 break;
2074 case IDENTIFIER_NODE:
2075 fntype = NULL_TREE;
2076 break;
2077 default:
2078 /* We don't expect other tree types here. */
2079 gcc_unreachable ();
2080 }
2081
2082 if (VOID_TYPE_P (type))
2083 return 0;
2084
2085 /* If a record should be passed the same as its first (and only) member
2086 don't pass it as an aggregate. */
2087 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2088 return aggregate_value_p (first_field (type), fntype);
2089
2090 /* If the front end has decided that this needs to be passed by
2091 reference, do so. */
2092 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2093 && DECL_BY_REFERENCE (exp))
2094 return 1;
2095
2096 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2097 if (fntype && TREE_ADDRESSABLE (fntype))
2098 return 1;
2099
2100 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2101 and thus can't be returned in registers. */
2102 if (TREE_ADDRESSABLE (type))
2103 return 1;
2104
2105 if (TYPE_EMPTY_P (type))
2106 return 0;
2107
2108 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2109 return 1;
2110
2111 if (targetm.calls.return_in_memory (type, fntype))
2112 return 1;
2113
2114 /* Make sure we have suitable call-clobbered regs to return
2115 the value in; if not, we must return it in memory. */
2116 reg = hard_function_value (type, 0, fntype, 0);
2117
2118 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2119 it is OK. */
2120 if (!REG_P (reg))
2121 return 0;
2122
2123 /* Use the default ABI if the type of the function isn't known.
2124 The scheme for handling interoperability between different ABIs
2125 requires us to be able to tell when we're calling a function with
2126 a nondefault ABI. */
2127 const predefined_function_abi &abi = (fntype
2128 ? fntype_abi (fntype)
2129 : default_function_abi);
2130 regno = REGNO (reg);
2131 nregs = hard_regno_nregs (regno, TYPE_MODE (type));
2132 for (i = 0; i < nregs; i++)
2133 if (!fixed_regs[regno + i] && !abi.clobbers_full_reg_p (regno + i))
2134 return 1;
2135
2136 return 0;
2137 }
2138 \f
2139 /* Return true if we should assign DECL a pseudo register; false if it
2140 should live on the local stack. */
2141
2142 bool
2143 use_register_for_decl (const_tree decl)
2144 {
2145 if (TREE_CODE (decl) == SSA_NAME)
2146 {
2147 /* We often try to use the SSA_NAME, instead of its underlying
2148 decl, to get type information and guide decisions, to avoid
2149 differences of behavior between anonymous and named
2150 variables, but in this one case we have to go for the actual
2151 variable if there is one. The main reason is that, at least
2152 at -O0, we want to place user variables on the stack, but we
2153 don't mind using pseudos for anonymous or ignored temps.
2154 Should we take the SSA_NAME, we'd conclude all SSA_NAMEs
2155 should go in pseudos, whereas their corresponding variables
2156 might have to go on the stack. So, disregarding the decl
2157 here would negatively impact debug info at -O0, enable
2158 coalescing between SSA_NAMEs that ought to get different
2159 stack/pseudo assignments, and get the incoming argument
2160 processing thoroughly confused by PARM_DECLs expected to live
2161 in stack slots but assigned to pseudos. */
2162 if (!SSA_NAME_VAR (decl))
2163 return TYPE_MODE (TREE_TYPE (decl)) != BLKmode
2164 && !(flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)));
2165
2166 decl = SSA_NAME_VAR (decl);
2167 }
2168
2169 /* Honor volatile. */
2170 if (TREE_SIDE_EFFECTS (decl))
2171 return false;
2172
2173 /* Honor addressability. */
2174 if (TREE_ADDRESSABLE (decl))
2175 return false;
2176
2177 /* RESULT_DECLs are a bit special in that they're assigned without
2178 regard to use_register_for_decl, but we generally only store in
2179 them. If we coalesce their SSA NAMEs, we'd better return a
2180 result that matches the assignment in expand_function_start. */
2181 if (TREE_CODE (decl) == RESULT_DECL)
2182 {
2183 /* If it's not an aggregate, we're going to use a REG or a
2184 PARALLEL containing a REG. */
2185 if (!aggregate_value_p (decl, current_function_decl))
2186 return true;
2187
2188 /* If expand_function_start determines the return value, we'll
2189 use MEM if it's not by reference. */
2190 if (cfun->returns_pcc_struct
2191 || (targetm.calls.struct_value_rtx
2192 (TREE_TYPE (current_function_decl), 1)))
2193 return DECL_BY_REFERENCE (decl);
2194
2195 /* Otherwise, we're taking an extra all.function_result_decl
2196 argument. It's set up in assign_parms_augmented_arg_list,
2197 under the (negated) conditions above, and then it's used to
2198 set up the RESULT_DECL rtl in assign_params, after looping
2199 over all parameters. Now, if the RESULT_DECL is not by
2200 reference, we'll use a MEM either way. */
2201 if (!DECL_BY_REFERENCE (decl))
2202 return false;
2203
2204 /* Otherwise, if RESULT_DECL is DECL_BY_REFERENCE, it will take
2205 the function_result_decl's assignment. Since it's a pointer,
2206 we can short-circuit a number of the tests below, and we must
2207 duplicat e them because we don't have the
2208 function_result_decl to test. */
2209 if (!targetm.calls.allocate_stack_slots_for_args ())
2210 return true;
2211 /* We don't set DECL_IGNORED_P for the function_result_decl. */
2212 if (optimize)
2213 return true;
2214 /* We don't set DECL_REGISTER for the function_result_decl. */
2215 return false;
2216 }
2217
2218 /* Only register-like things go in registers. */
2219 if (DECL_MODE (decl) == BLKmode)
2220 return false;
2221
2222 /* If -ffloat-store specified, don't put explicit float variables
2223 into registers. */
2224 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2225 propagates values across these stores, and it probably shouldn't. */
2226 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2227 return false;
2228
2229 if (!targetm.calls.allocate_stack_slots_for_args ())
2230 return true;
2231
2232 /* If we're not interested in tracking debugging information for
2233 this decl, then we can certainly put it in a register. */
2234 if (DECL_IGNORED_P (decl))
2235 return true;
2236
2237 if (optimize)
2238 return true;
2239
2240 if (!DECL_REGISTER (decl))
2241 return false;
2242
2243 /* When not optimizing, disregard register keyword for types that
2244 could have methods, otherwise the methods won't be callable from
2245 the debugger. */
2246 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (decl)))
2247 return false;
2248
2249 return true;
2250 }
2251
2252 /* Structures to communicate between the subroutines of assign_parms.
2253 The first holds data persistent across all parameters, the second
2254 is cleared out for each parameter. */
2255
2256 struct assign_parm_data_all
2257 {
2258 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2259 should become a job of the target or otherwise encapsulated. */
2260 CUMULATIVE_ARGS args_so_far_v;
2261 cumulative_args_t args_so_far;
2262 struct args_size stack_args_size;
2263 tree function_result_decl;
2264 tree orig_fnargs;
2265 rtx_insn *first_conversion_insn;
2266 rtx_insn *last_conversion_insn;
2267 HOST_WIDE_INT pretend_args_size;
2268 HOST_WIDE_INT extra_pretend_bytes;
2269 int reg_parm_stack_space;
2270 };
2271
2272 struct assign_parm_data_one
2273 {
2274 tree nominal_type;
2275 function_arg_info arg;
2276 rtx entry_parm;
2277 rtx stack_parm;
2278 machine_mode nominal_mode;
2279 machine_mode passed_mode;
2280 struct locate_and_pad_arg_data locate;
2281 int partial;
2282 };
2283
2284 /* A subroutine of assign_parms. Initialize ALL. */
2285
2286 static void
2287 assign_parms_initialize_all (struct assign_parm_data_all *all)
2288 {
2289 tree fntype ATTRIBUTE_UNUSED;
2290
2291 memset (all, 0, sizeof (*all));
2292
2293 fntype = TREE_TYPE (current_function_decl);
2294
2295 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2296 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
2297 #else
2298 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
2299 current_function_decl, -1);
2300 #endif
2301 all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
2302
2303 #ifdef INCOMING_REG_PARM_STACK_SPACE
2304 all->reg_parm_stack_space
2305 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
2306 #endif
2307 }
2308
2309 /* If ARGS contains entries with complex types, split the entry into two
2310 entries of the component type. Return a new list of substitutions are
2311 needed, else the old list. */
2312
2313 static void
2314 split_complex_args (vec<tree> *args)
2315 {
2316 unsigned i;
2317 tree p;
2318
2319 FOR_EACH_VEC_ELT (*args, i, p)
2320 {
2321 tree type = TREE_TYPE (p);
2322 if (TREE_CODE (type) == COMPLEX_TYPE
2323 && targetm.calls.split_complex_arg (type))
2324 {
2325 tree decl;
2326 tree subtype = TREE_TYPE (type);
2327 bool addressable = TREE_ADDRESSABLE (p);
2328
2329 /* Rewrite the PARM_DECL's type with its component. */
2330 p = copy_node (p);
2331 TREE_TYPE (p) = subtype;
2332 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2333 SET_DECL_MODE (p, VOIDmode);
2334 DECL_SIZE (p) = NULL;
2335 DECL_SIZE_UNIT (p) = NULL;
2336 /* If this arg must go in memory, put it in a pseudo here.
2337 We can't allow it to go in memory as per normal parms,
2338 because the usual place might not have the imag part
2339 adjacent to the real part. */
2340 DECL_ARTIFICIAL (p) = addressable;
2341 DECL_IGNORED_P (p) = addressable;
2342 TREE_ADDRESSABLE (p) = 0;
2343 layout_decl (p, 0);
2344 (*args)[i] = p;
2345
2346 /* Build a second synthetic decl. */
2347 decl = build_decl (EXPR_LOCATION (p),
2348 PARM_DECL, NULL_TREE, subtype);
2349 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2350 DECL_ARTIFICIAL (decl) = addressable;
2351 DECL_IGNORED_P (decl) = addressable;
2352 layout_decl (decl, 0);
2353 args->safe_insert (++i, decl);
2354 }
2355 }
2356 }
2357
2358 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2359 the hidden struct return argument, and (abi willing) complex args.
2360 Return the new parameter list. */
2361
2362 static vec<tree>
2363 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2364 {
2365 tree fndecl = current_function_decl;
2366 tree fntype = TREE_TYPE (fndecl);
2367 vec<tree> fnargs = vNULL;
2368 tree arg;
2369
2370 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2371 fnargs.safe_push (arg);
2372
2373 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2374
2375 /* If struct value address is treated as the first argument, make it so. */
2376 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2377 && ! cfun->returns_pcc_struct
2378 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2379 {
2380 tree type = build_pointer_type (TREE_TYPE (fntype));
2381 tree decl;
2382
2383 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2384 PARM_DECL, get_identifier (".result_ptr"), type);
2385 DECL_ARG_TYPE (decl) = type;
2386 DECL_ARTIFICIAL (decl) = 1;
2387 DECL_NAMELESS (decl) = 1;
2388 TREE_CONSTANT (decl) = 1;
2389 /* We don't set DECL_IGNORED_P or DECL_REGISTER here. If this
2390 changes, the end of the RESULT_DECL handling block in
2391 use_register_for_decl must be adjusted to match. */
2392
2393 DECL_CHAIN (decl) = all->orig_fnargs;
2394 all->orig_fnargs = decl;
2395 fnargs.safe_insert (0, decl);
2396
2397 all->function_result_decl = decl;
2398 }
2399
2400 /* If the target wants to split complex arguments into scalars, do so. */
2401 if (targetm.calls.split_complex_arg)
2402 split_complex_args (&fnargs);
2403
2404 return fnargs;
2405 }
2406
2407 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2408 data for the parameter. Incorporate ABI specifics such as pass-by-
2409 reference and type promotion. */
2410
2411 static void
2412 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2413 struct assign_parm_data_one *data)
2414 {
2415 int unsignedp;
2416
2417 #ifndef BROKEN_VALUE_INITIALIZATION
2418 *data = assign_parm_data_one ();
2419 #else
2420 /* Old versions of GCC used to miscompile the above by only initializing
2421 the members with explicit constructors and copying garbage
2422 to the other members. */
2423 assign_parm_data_one zero_data = {};
2424 *data = zero_data;
2425 #endif
2426
2427 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2428 if (!cfun->stdarg)
2429 data->arg.named = 1; /* No variadic parms. */
2430 else if (DECL_CHAIN (parm))
2431 data->arg.named = 1; /* Not the last non-variadic parm. */
2432 else if (targetm.calls.strict_argument_naming (all->args_so_far))
2433 data->arg.named = 1; /* Only variadic ones are unnamed. */
2434 else
2435 data->arg.named = 0; /* Treat as variadic. */
2436
2437 data->nominal_type = TREE_TYPE (parm);
2438 data->arg.type = DECL_ARG_TYPE (parm);
2439
2440 /* Look out for errors propagating this far. Also, if the parameter's
2441 type is void then its value doesn't matter. */
2442 if (TREE_TYPE (parm) == error_mark_node
2443 /* This can happen after weird syntax errors
2444 or if an enum type is defined among the parms. */
2445 || TREE_CODE (parm) != PARM_DECL
2446 || data->arg.type == NULL
2447 || VOID_TYPE_P (data->nominal_type))
2448 {
2449 data->nominal_type = data->arg.type = void_type_node;
2450 data->nominal_mode = data->passed_mode = data->arg.mode = VOIDmode;
2451 return;
2452 }
2453
2454 /* Find mode of arg as it is passed, and mode of arg as it should be
2455 during execution of this function. */
2456 data->passed_mode = data->arg.mode = TYPE_MODE (data->arg.type);
2457 data->nominal_mode = TYPE_MODE (data->nominal_type);
2458
2459 /* If the parm is to be passed as a transparent union or record, use the
2460 type of the first field for the tests below. We have already verified
2461 that the modes are the same. */
2462 if (RECORD_OR_UNION_TYPE_P (data->arg.type)
2463 && TYPE_TRANSPARENT_AGGR (data->arg.type))
2464 data->arg.type = TREE_TYPE (first_field (data->arg.type));
2465
2466 /* See if this arg was passed by invisible reference. */
2467 if (apply_pass_by_reference_rules (&all->args_so_far_v, data->arg))
2468 {
2469 data->nominal_type = data->arg.type;
2470 data->passed_mode = data->nominal_mode = data->arg.mode;
2471 }
2472
2473 /* Find mode as it is passed by the ABI. */
2474 unsignedp = TYPE_UNSIGNED (data->arg.type);
2475 data->arg.mode
2476 = promote_function_mode (data->arg.type, data->arg.mode, &unsignedp,
2477 TREE_TYPE (current_function_decl), 0);
2478 }
2479
2480 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2481
2482 static void
2483 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2484 struct assign_parm_data_one *data, bool no_rtl)
2485 {
2486 int varargs_pretend_bytes = 0;
2487
2488 function_arg_info last_named_arg = data->arg;
2489 last_named_arg.named = true;
2490 targetm.calls.setup_incoming_varargs (all->args_so_far, last_named_arg,
2491 &varargs_pretend_bytes, no_rtl);
2492
2493 /* If the back-end has requested extra stack space, record how much is
2494 needed. Do not change pretend_args_size otherwise since it may be
2495 nonzero from an earlier partial argument. */
2496 if (varargs_pretend_bytes > 0)
2497 all->pretend_args_size = varargs_pretend_bytes;
2498 }
2499
2500 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2501 the incoming location of the current parameter. */
2502
2503 static void
2504 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2505 struct assign_parm_data_one *data)
2506 {
2507 HOST_WIDE_INT pretend_bytes = 0;
2508 rtx entry_parm;
2509 bool in_regs;
2510
2511 if (data->arg.mode == VOIDmode)
2512 {
2513 data->entry_parm = data->stack_parm = const0_rtx;
2514 return;
2515 }
2516
2517 targetm.calls.warn_parameter_passing_abi (all->args_so_far,
2518 data->arg.type);
2519
2520 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2521 data->arg);
2522 if (entry_parm == 0)
2523 data->arg.mode = data->passed_mode;
2524
2525 /* Determine parm's home in the stack, in case it arrives in the stack
2526 or we should pretend it did. Compute the stack position and rtx where
2527 the argument arrives and its size.
2528
2529 There is one complexity here: If this was a parameter that would
2530 have been passed in registers, but wasn't only because it is
2531 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2532 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2533 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2534 as it was the previous time. */
2535 in_regs = (entry_parm != 0);
2536 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2537 in_regs = true;
2538 #endif
2539 if (!in_regs && !data->arg.named)
2540 {
2541 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
2542 {
2543 rtx tem;
2544 function_arg_info named_arg = data->arg;
2545 named_arg.named = true;
2546 tem = targetm.calls.function_incoming_arg (all->args_so_far,
2547 named_arg);
2548 in_regs = tem != NULL;
2549 }
2550 }
2551
2552 /* If this parameter was passed both in registers and in the stack, use
2553 the copy on the stack. */
2554 if (targetm.calls.must_pass_in_stack (data->arg))
2555 entry_parm = 0;
2556
2557 if (entry_parm)
2558 {
2559 int partial;
2560
2561 partial = targetm.calls.arg_partial_bytes (all->args_so_far, data->arg);
2562 data->partial = partial;
2563
2564 /* The caller might already have allocated stack space for the
2565 register parameters. */
2566 if (partial != 0 && all->reg_parm_stack_space == 0)
2567 {
2568 /* Part of this argument is passed in registers and part
2569 is passed on the stack. Ask the prologue code to extend
2570 the stack part so that we can recreate the full value.
2571
2572 PRETEND_BYTES is the size of the registers we need to store.
2573 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2574 stack space that the prologue should allocate.
2575
2576 Internally, gcc assumes that the argument pointer is aligned
2577 to STACK_BOUNDARY bits. This is used both for alignment
2578 optimizations (see init_emit) and to locate arguments that are
2579 aligned to more than PARM_BOUNDARY bits. We must preserve this
2580 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2581 a stack boundary. */
2582
2583 /* We assume at most one partial arg, and it must be the first
2584 argument on the stack. */
2585 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2586
2587 pretend_bytes = partial;
2588 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2589
2590 /* We want to align relative to the actual stack pointer, so
2591 don't include this in the stack size until later. */
2592 all->extra_pretend_bytes = all->pretend_args_size;
2593 }
2594 }
2595
2596 locate_and_pad_parm (data->arg.mode, data->arg.type, in_regs,
2597 all->reg_parm_stack_space,
2598 entry_parm ? data->partial : 0, current_function_decl,
2599 &all->stack_args_size, &data->locate);
2600
2601 /* Update parm_stack_boundary if this parameter is passed in the
2602 stack. */
2603 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2604 crtl->parm_stack_boundary = data->locate.boundary;
2605
2606 /* Adjust offsets to include the pretend args. */
2607 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2608 data->locate.slot_offset.constant += pretend_bytes;
2609 data->locate.offset.constant += pretend_bytes;
2610
2611 data->entry_parm = entry_parm;
2612 }
2613
2614 /* A subroutine of assign_parms. If there is actually space on the stack
2615 for this parm, count it in stack_args_size and return true. */
2616
2617 static bool
2618 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2619 struct assign_parm_data_one *data)
2620 {
2621 /* Trivially true if we've no incoming register. */
2622 if (data->entry_parm == NULL)
2623 ;
2624 /* Also true if we're partially in registers and partially not,
2625 since we've arranged to drop the entire argument on the stack. */
2626 else if (data->partial != 0)
2627 ;
2628 /* Also true if the target says that it's passed in both registers
2629 and on the stack. */
2630 else if (GET_CODE (data->entry_parm) == PARALLEL
2631 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2632 ;
2633 /* Also true if the target says that there's stack allocated for
2634 all register parameters. */
2635 else if (all->reg_parm_stack_space > 0)
2636 ;
2637 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2638 else
2639 return false;
2640
2641 all->stack_args_size.constant += data->locate.size.constant;
2642 if (data->locate.size.var)
2643 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2644
2645 return true;
2646 }
2647
2648 /* A subroutine of assign_parms. Given that this parameter is allocated
2649 stack space by the ABI, find it. */
2650
2651 static void
2652 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2653 {
2654 rtx offset_rtx, stack_parm;
2655 unsigned int align, boundary;
2656
2657 /* If we're passing this arg using a reg, make its stack home the
2658 aligned stack slot. */
2659 if (data->entry_parm)
2660 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2661 else
2662 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2663
2664 stack_parm = crtl->args.internal_arg_pointer;
2665 if (offset_rtx != const0_rtx)
2666 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2667 stack_parm = gen_rtx_MEM (data->arg.mode, stack_parm);
2668
2669 if (!data->arg.pass_by_reference)
2670 {
2671 set_mem_attributes (stack_parm, parm, 1);
2672 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2673 while promoted mode's size is needed. */
2674 if (data->arg.mode != BLKmode
2675 && data->arg.mode != DECL_MODE (parm))
2676 {
2677 set_mem_size (stack_parm, GET_MODE_SIZE (data->arg.mode));
2678 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2679 {
2680 poly_int64 offset = subreg_lowpart_offset (DECL_MODE (parm),
2681 data->arg.mode);
2682 if (maybe_ne (offset, 0))
2683 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2684 }
2685 }
2686 }
2687
2688 boundary = data->locate.boundary;
2689 align = BITS_PER_UNIT;
2690
2691 /* If we're padding upward, we know that the alignment of the slot
2692 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2693 intentionally forcing upward padding. Otherwise we have to come
2694 up with a guess at the alignment based on OFFSET_RTX. */
2695 poly_int64 offset;
2696 if (data->locate.where_pad == PAD_NONE || data->entry_parm)
2697 align = boundary;
2698 else if (data->locate.where_pad == PAD_UPWARD)
2699 {
2700 align = boundary;
2701 /* If the argument offset is actually more aligned than the nominal
2702 stack slot boundary, take advantage of that excess alignment.
2703 Don't make any assumptions if STACK_POINTER_OFFSET is in use. */
2704 if (poly_int_rtx_p (offset_rtx, &offset)
2705 && known_eq (STACK_POINTER_OFFSET, 0))
2706 {
2707 unsigned int offset_align = known_alignment (offset) * BITS_PER_UNIT;
2708 if (offset_align == 0 || offset_align > STACK_BOUNDARY)
2709 offset_align = STACK_BOUNDARY;
2710 align = MAX (align, offset_align);
2711 }
2712 }
2713 else if (poly_int_rtx_p (offset_rtx, &offset))
2714 {
2715 align = least_bit_hwi (boundary);
2716 unsigned int offset_align = known_alignment (offset) * BITS_PER_UNIT;
2717 if (offset_align != 0)
2718 align = MIN (align, offset_align);
2719 }
2720 set_mem_align (stack_parm, align);
2721
2722 if (data->entry_parm)
2723 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2724
2725 data->stack_parm = stack_parm;
2726 }
2727
2728 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2729 always valid and contiguous. */
2730
2731 static void
2732 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2733 {
2734 rtx entry_parm = data->entry_parm;
2735 rtx stack_parm = data->stack_parm;
2736
2737 /* If this parm was passed part in regs and part in memory, pretend it
2738 arrived entirely in memory by pushing the register-part onto the stack.
2739 In the special case of a DImode or DFmode that is split, we could put
2740 it together in a pseudoreg directly, but for now that's not worth
2741 bothering with. */
2742 if (data->partial != 0)
2743 {
2744 /* Handle calls that pass values in multiple non-contiguous
2745 locations. The Irix 6 ABI has examples of this. */
2746 if (GET_CODE (entry_parm) == PARALLEL)
2747 emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
2748 data->arg.type, int_size_in_bytes (data->arg.type));
2749 else
2750 {
2751 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2752 move_block_from_reg (REGNO (entry_parm),
2753 validize_mem (copy_rtx (stack_parm)),
2754 data->partial / UNITS_PER_WORD);
2755 }
2756
2757 entry_parm = stack_parm;
2758 }
2759
2760 /* If we didn't decide this parm came in a register, by default it came
2761 on the stack. */
2762 else if (entry_parm == NULL)
2763 entry_parm = stack_parm;
2764
2765 /* When an argument is passed in multiple locations, we can't make use
2766 of this information, but we can save some copying if the whole argument
2767 is passed in a single register. */
2768 else if (GET_CODE (entry_parm) == PARALLEL
2769 && data->nominal_mode != BLKmode
2770 && data->passed_mode != BLKmode)
2771 {
2772 size_t i, len = XVECLEN (entry_parm, 0);
2773
2774 for (i = 0; i < len; i++)
2775 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2776 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2777 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2778 == data->passed_mode)
2779 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2780 {
2781 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2782 break;
2783 }
2784 }
2785
2786 data->entry_parm = entry_parm;
2787 }
2788
2789 /* A subroutine of assign_parms. Reconstitute any values which were
2790 passed in multiple registers and would fit in a single register. */
2791
2792 static void
2793 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2794 {
2795 rtx entry_parm = data->entry_parm;
2796
2797 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2798 This can be done with register operations rather than on the
2799 stack, even if we will store the reconstituted parameter on the
2800 stack later. */
2801 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2802 {
2803 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2804 emit_group_store (parmreg, entry_parm, data->arg.type,
2805 GET_MODE_SIZE (GET_MODE (entry_parm)));
2806 entry_parm = parmreg;
2807 }
2808
2809 data->entry_parm = entry_parm;
2810 }
2811
2812 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2813 always valid and properly aligned. */
2814
2815 static void
2816 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2817 {
2818 rtx stack_parm = data->stack_parm;
2819
2820 /* If we can't trust the parm stack slot to be aligned enough for its
2821 ultimate type, don't use that slot after entry. We'll make another
2822 stack slot, if we need one. */
2823 if (stack_parm
2824 && ((GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm)
2825 && ((optab_handler (movmisalign_optab, data->nominal_mode)
2826 != CODE_FOR_nothing)
2827 || targetm.slow_unaligned_access (data->nominal_mode,
2828 MEM_ALIGN (stack_parm))))
2829 || (data->nominal_type
2830 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2831 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2832 stack_parm = NULL;
2833
2834 /* If parm was passed in memory, and we need to convert it on entry,
2835 don't store it back in that same slot. */
2836 else if (data->entry_parm == stack_parm
2837 && data->nominal_mode != BLKmode
2838 && data->nominal_mode != data->passed_mode)
2839 stack_parm = NULL;
2840
2841 /* If stack protection is in effect for this function, don't leave any
2842 pointers in their passed stack slots. */
2843 else if (crtl->stack_protect_guard
2844 && (flag_stack_protect == 2
2845 || data->arg.pass_by_reference
2846 || POINTER_TYPE_P (data->nominal_type)))
2847 stack_parm = NULL;
2848
2849 data->stack_parm = stack_parm;
2850 }
2851
2852 /* A subroutine of assign_parms. Return true if the current parameter
2853 should be stored as a BLKmode in the current frame. */
2854
2855 static bool
2856 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2857 {
2858 if (data->nominal_mode == BLKmode)
2859 return true;
2860 if (GET_MODE (data->entry_parm) == BLKmode)
2861 return true;
2862
2863 #ifdef BLOCK_REG_PADDING
2864 /* Only assign_parm_setup_block knows how to deal with register arguments
2865 that are padded at the least significant end. */
2866 if (REG_P (data->entry_parm)
2867 && known_lt (GET_MODE_SIZE (data->arg.mode), UNITS_PER_WORD)
2868 && (BLOCK_REG_PADDING (data->passed_mode, data->arg.type, 1)
2869 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
2870 return true;
2871 #endif
2872
2873 return false;
2874 }
2875
2876 /* A subroutine of assign_parms. Arrange for the parameter to be
2877 present and valid in DATA->STACK_RTL. */
2878
2879 static void
2880 assign_parm_setup_block (struct assign_parm_data_all *all,
2881 tree parm, struct assign_parm_data_one *data)
2882 {
2883 rtx entry_parm = data->entry_parm;
2884 rtx stack_parm = data->stack_parm;
2885 rtx target_reg = NULL_RTX;
2886 bool in_conversion_seq = false;
2887 HOST_WIDE_INT size;
2888 HOST_WIDE_INT size_stored;
2889
2890 if (GET_CODE (entry_parm) == PARALLEL)
2891 entry_parm = emit_group_move_into_temps (entry_parm);
2892
2893 /* If we want the parameter in a pseudo, don't use a stack slot. */
2894 if (is_gimple_reg (parm) && use_register_for_decl (parm))
2895 {
2896 tree def = ssa_default_def (cfun, parm);
2897 gcc_assert (def);
2898 machine_mode mode = promote_ssa_mode (def, NULL);
2899 rtx reg = gen_reg_rtx (mode);
2900 if (GET_CODE (reg) != CONCAT)
2901 stack_parm = reg;
2902 else
2903 {
2904 target_reg = reg;
2905 /* Avoid allocating a stack slot, if there isn't one
2906 preallocated by the ABI. It might seem like we should
2907 always prefer a pseudo, but converting between
2908 floating-point and integer modes goes through the stack
2909 on various machines, so it's better to use the reserved
2910 stack slot than to risk wasting it and allocating more
2911 for the conversion. */
2912 if (stack_parm == NULL_RTX)
2913 {
2914 int save = generating_concat_p;
2915 generating_concat_p = 0;
2916 stack_parm = gen_reg_rtx (mode);
2917 generating_concat_p = save;
2918 }
2919 }
2920 data->stack_parm = NULL;
2921 }
2922
2923 size = int_size_in_bytes (data->arg.type);
2924 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2925 if (stack_parm == 0)
2926 {
2927 HOST_WIDE_INT parm_align
2928 = (STRICT_ALIGNMENT
2929 ? MAX (DECL_ALIGN (parm), BITS_PER_WORD) : DECL_ALIGN (parm));
2930
2931 SET_DECL_ALIGN (parm, parm_align);
2932 if (DECL_ALIGN (parm) > MAX_SUPPORTED_STACK_ALIGNMENT)
2933 {
2934 rtx allocsize = gen_int_mode (size_stored, Pmode);
2935 get_dynamic_stack_size (&allocsize, 0, DECL_ALIGN (parm), NULL);
2936 stack_parm = assign_stack_local (BLKmode, UINTVAL (allocsize),
2937 MAX_SUPPORTED_STACK_ALIGNMENT);
2938 rtx addr = align_dynamic_address (XEXP (stack_parm, 0),
2939 DECL_ALIGN (parm));
2940 mark_reg_pointer (addr, DECL_ALIGN (parm));
2941 stack_parm = gen_rtx_MEM (GET_MODE (stack_parm), addr);
2942 MEM_NOTRAP_P (stack_parm) = 1;
2943 }
2944 else
2945 stack_parm = assign_stack_local (BLKmode, size_stored,
2946 DECL_ALIGN (parm));
2947 if (known_eq (GET_MODE_SIZE (GET_MODE (entry_parm)), size))
2948 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2949 set_mem_attributes (stack_parm, parm, 1);
2950 }
2951
2952 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2953 calls that pass values in multiple non-contiguous locations. */
2954 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2955 {
2956 rtx mem;
2957
2958 /* Note that we will be storing an integral number of words.
2959 So we have to be careful to ensure that we allocate an
2960 integral number of words. We do this above when we call
2961 assign_stack_local if space was not allocated in the argument
2962 list. If it was, this will not work if PARM_BOUNDARY is not
2963 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2964 if it becomes a problem. Exception is when BLKmode arrives
2965 with arguments not conforming to word_mode. */
2966
2967 if (data->stack_parm == 0)
2968 ;
2969 else if (GET_CODE (entry_parm) == PARALLEL)
2970 ;
2971 else
2972 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2973
2974 mem = validize_mem (copy_rtx (stack_parm));
2975
2976 /* Handle values in multiple non-contiguous locations. */
2977 if (GET_CODE (entry_parm) == PARALLEL && !MEM_P (mem))
2978 emit_group_store (mem, entry_parm, data->arg.type, size);
2979 else if (GET_CODE (entry_parm) == PARALLEL)
2980 {
2981 push_to_sequence2 (all->first_conversion_insn,
2982 all->last_conversion_insn);
2983 emit_group_store (mem, entry_parm, data->arg.type, size);
2984 all->first_conversion_insn = get_insns ();
2985 all->last_conversion_insn = get_last_insn ();
2986 end_sequence ();
2987 in_conversion_seq = true;
2988 }
2989
2990 else if (size == 0)
2991 ;
2992
2993 /* If SIZE is that of a mode no bigger than a word, just use
2994 that mode's store operation. */
2995 else if (size <= UNITS_PER_WORD)
2996 {
2997 unsigned int bits = size * BITS_PER_UNIT;
2998 machine_mode mode = int_mode_for_size (bits, 0).else_blk ();
2999
3000 if (mode != BLKmode
3001 #ifdef BLOCK_REG_PADDING
3002 && (size == UNITS_PER_WORD
3003 || (BLOCK_REG_PADDING (mode, data->arg.type, 1)
3004 != (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
3005 #endif
3006 )
3007 {
3008 rtx reg;
3009
3010 /* We are really truncating a word_mode value containing
3011 SIZE bytes into a value of mode MODE. If such an
3012 operation requires no actual instructions, we can refer
3013 to the value directly in mode MODE, otherwise we must
3014 start with the register in word_mode and explicitly
3015 convert it. */
3016 if (mode == word_mode
3017 || TRULY_NOOP_TRUNCATION_MODES_P (mode, word_mode))
3018 reg = gen_rtx_REG (mode, REGNO (entry_parm));
3019 else
3020 {
3021 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3022 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
3023 }
3024 emit_move_insn (change_address (mem, mode, 0), reg);
3025 }
3026
3027 #ifdef BLOCK_REG_PADDING
3028 /* Storing the register in memory as a full word, as
3029 move_block_from_reg below would do, and then using the
3030 MEM in a smaller mode, has the effect of shifting right
3031 if BYTES_BIG_ENDIAN. If we're bypassing memory, the
3032 shifting must be explicit. */
3033 else if (!MEM_P (mem))
3034 {
3035 rtx x;
3036
3037 /* If the assert below fails, we should have taken the
3038 mode != BLKmode path above, unless we have downward
3039 padding of smaller-than-word arguments on a machine
3040 with little-endian bytes, which would likely require
3041 additional changes to work correctly. */
3042 gcc_checking_assert (BYTES_BIG_ENDIAN
3043 && (BLOCK_REG_PADDING (mode,
3044 data->arg.type, 1)
3045 == PAD_UPWARD));
3046
3047 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3048
3049 x = gen_rtx_REG (word_mode, REGNO (entry_parm));
3050 x = expand_shift (RSHIFT_EXPR, word_mode, x, by,
3051 NULL_RTX, 1);
3052 x = force_reg (word_mode, x);
3053 x = gen_lowpart_SUBREG (GET_MODE (mem), x);
3054
3055 emit_move_insn (mem, x);
3056 }
3057 #endif
3058
3059 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
3060 machine must be aligned to the left before storing
3061 to memory. Note that the previous test doesn't
3062 handle all cases (e.g. SIZE == 3). */
3063 else if (size != UNITS_PER_WORD
3064 #ifdef BLOCK_REG_PADDING
3065 && (BLOCK_REG_PADDING (mode, data->arg.type, 1)
3066 == PAD_DOWNWARD)
3067 #else
3068 && BYTES_BIG_ENDIAN
3069 #endif
3070 )
3071 {
3072 rtx tem, x;
3073 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
3074 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
3075
3076 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
3077 tem = change_address (mem, word_mode, 0);
3078 emit_move_insn (tem, x);
3079 }
3080 else
3081 move_block_from_reg (REGNO (entry_parm), mem,
3082 size_stored / UNITS_PER_WORD);
3083 }
3084 else if (!MEM_P (mem))
3085 {
3086 gcc_checking_assert (size > UNITS_PER_WORD);
3087 #ifdef BLOCK_REG_PADDING
3088 gcc_checking_assert (BLOCK_REG_PADDING (GET_MODE (mem),
3089 data->arg.type, 0)
3090 == PAD_UPWARD);
3091 #endif
3092 emit_move_insn (mem, entry_parm);
3093 }
3094 else
3095 move_block_from_reg (REGNO (entry_parm), mem,
3096 size_stored / UNITS_PER_WORD);
3097 }
3098 else if (data->stack_parm == 0 && !TYPE_EMPTY_P (data->arg.type))
3099 {
3100 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3101 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
3102 BLOCK_OP_NORMAL);
3103 all->first_conversion_insn = get_insns ();
3104 all->last_conversion_insn = get_last_insn ();
3105 end_sequence ();
3106 in_conversion_seq = true;
3107 }
3108
3109 if (target_reg)
3110 {
3111 if (!in_conversion_seq)
3112 emit_move_insn (target_reg, stack_parm);
3113 else
3114 {
3115 push_to_sequence2 (all->first_conversion_insn,
3116 all->last_conversion_insn);
3117 emit_move_insn (target_reg, stack_parm);
3118 all->first_conversion_insn = get_insns ();
3119 all->last_conversion_insn = get_last_insn ();
3120 end_sequence ();
3121 }
3122 stack_parm = target_reg;
3123 }
3124
3125 data->stack_parm = stack_parm;
3126 set_parm_rtl (parm, stack_parm);
3127 }
3128
3129 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
3130 parameter. Get it there. Perform all ABI specified conversions. */
3131
3132 static void
3133 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
3134 struct assign_parm_data_one *data)
3135 {
3136 rtx parmreg, validated_mem;
3137 rtx equiv_stack_parm;
3138 machine_mode promoted_nominal_mode;
3139 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
3140 bool did_conversion = false;
3141 bool need_conversion, moved;
3142 enum insn_code icode;
3143 rtx rtl;
3144
3145 /* Store the parm in a pseudoregister during the function, but we may
3146 need to do it in a wider mode. Using 2 here makes the result
3147 consistent with promote_decl_mode and thus expand_expr_real_1. */
3148 promoted_nominal_mode
3149 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
3150 TREE_TYPE (current_function_decl), 2);
3151
3152 parmreg = gen_reg_rtx (promoted_nominal_mode);
3153 if (!DECL_ARTIFICIAL (parm))
3154 mark_user_reg (parmreg);
3155
3156 /* If this was an item that we received a pointer to,
3157 set rtl appropriately. */
3158 if (data->arg.pass_by_reference)
3159 {
3160 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->arg.type)), parmreg);
3161 set_mem_attributes (rtl, parm, 1);
3162 }
3163 else
3164 rtl = parmreg;
3165
3166 assign_parm_remove_parallels (data);
3167
3168 /* Copy the value into the register, thus bridging between
3169 assign_parm_find_data_types and expand_expr_real_1. */
3170
3171 equiv_stack_parm = data->stack_parm;
3172 validated_mem = validize_mem (copy_rtx (data->entry_parm));
3173
3174 need_conversion = (data->nominal_mode != data->passed_mode
3175 || promoted_nominal_mode != data->arg.mode);
3176 moved = false;
3177
3178 if (need_conversion
3179 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
3180 && data->nominal_mode == data->passed_mode
3181 && data->nominal_mode == GET_MODE (data->entry_parm))
3182 {
3183 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3184 mode, by the caller. We now have to convert it to
3185 NOMINAL_MODE, if different. However, PARMREG may be in
3186 a different mode than NOMINAL_MODE if it is being stored
3187 promoted.
3188
3189 If ENTRY_PARM is a hard register, it might be in a register
3190 not valid for operating in its mode (e.g., an odd-numbered
3191 register for a DFmode). In that case, moves are the only
3192 thing valid, so we can't do a convert from there. This
3193 occurs when the calling sequence allow such misaligned
3194 usages.
3195
3196 In addition, the conversion may involve a call, which could
3197 clobber parameters which haven't been copied to pseudo
3198 registers yet.
3199
3200 First, we try to emit an insn which performs the necessary
3201 conversion. We verify that this insn does not clobber any
3202 hard registers. */
3203
3204 rtx op0, op1;
3205
3206 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3207 unsignedp);
3208
3209 op0 = parmreg;
3210 op1 = validated_mem;
3211 if (icode != CODE_FOR_nothing
3212 && insn_operand_matches (icode, 0, op0)
3213 && insn_operand_matches (icode, 1, op1))
3214 {
3215 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
3216 rtx_insn *insn, *insns;
3217 rtx t = op1;
3218 HARD_REG_SET hardregs;
3219
3220 start_sequence ();
3221 /* If op1 is a hard register that is likely spilled, first
3222 force it into a pseudo, otherwise combiner might extend
3223 its lifetime too much. */
3224 if (GET_CODE (t) == SUBREG)
3225 t = SUBREG_REG (t);
3226 if (REG_P (t)
3227 && HARD_REGISTER_P (t)
3228 && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3229 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3230 {
3231 t = gen_reg_rtx (GET_MODE (op1));
3232 emit_move_insn (t, op1);
3233 }
3234 else
3235 t = op1;
3236 rtx_insn *pat = gen_extend_insn (op0, t, promoted_nominal_mode,
3237 data->passed_mode, unsignedp);
3238 emit_insn (pat);
3239 insns = get_insns ();
3240
3241 moved = true;
3242 CLEAR_HARD_REG_SET (hardregs);
3243 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3244 {
3245 if (INSN_P (insn))
3246 note_stores (insn, record_hard_reg_sets, &hardregs);
3247 if (!hard_reg_set_empty_p (hardregs))
3248 moved = false;
3249 }
3250
3251 end_sequence ();
3252
3253 if (moved)
3254 {
3255 emit_insn (insns);
3256 if (equiv_stack_parm != NULL_RTX)
3257 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3258 equiv_stack_parm);
3259 }
3260 }
3261 }
3262
3263 if (moved)
3264 /* Nothing to do. */
3265 ;
3266 else if (need_conversion)
3267 {
3268 /* We did not have an insn to convert directly, or the sequence
3269 generated appeared unsafe. We must first copy the parm to a
3270 pseudo reg, and save the conversion until after all
3271 parameters have been moved. */
3272
3273 int save_tree_used;
3274 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3275
3276 emit_move_insn (tempreg, validated_mem);
3277
3278 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3279 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3280
3281 if (partial_subreg_p (tempreg)
3282 && GET_MODE (tempreg) == data->nominal_mode
3283 && REG_P (SUBREG_REG (tempreg))
3284 && data->nominal_mode == data->passed_mode
3285 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm))
3286 {
3287 /* The argument is already sign/zero extended, so note it
3288 into the subreg. */
3289 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3290 SUBREG_PROMOTED_SET (tempreg, unsignedp);
3291 }
3292
3293 /* TREE_USED gets set erroneously during expand_assignment. */
3294 save_tree_used = TREE_USED (parm);
3295 SET_DECL_RTL (parm, rtl);
3296 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3297 SET_DECL_RTL (parm, NULL_RTX);
3298 TREE_USED (parm) = save_tree_used;
3299 all->first_conversion_insn = get_insns ();
3300 all->last_conversion_insn = get_last_insn ();
3301 end_sequence ();
3302
3303 did_conversion = true;
3304 }
3305 else if (MEM_P (data->entry_parm)
3306 && GET_MODE_ALIGNMENT (promoted_nominal_mode)
3307 > MEM_ALIGN (data->entry_parm)
3308 && (((icode = optab_handler (movmisalign_optab,
3309 promoted_nominal_mode))
3310 != CODE_FOR_nothing)
3311 || targetm.slow_unaligned_access (promoted_nominal_mode,
3312 MEM_ALIGN (data->entry_parm))))
3313 {
3314 if (icode != CODE_FOR_nothing)
3315 emit_insn (GEN_FCN (icode) (parmreg, validated_mem));
3316 else
3317 rtl = parmreg = extract_bit_field (validated_mem,
3318 GET_MODE_BITSIZE (promoted_nominal_mode), 0,
3319 unsignedp, parmreg,
3320 promoted_nominal_mode, VOIDmode, false, NULL);
3321 }
3322 else
3323 emit_move_insn (parmreg, validated_mem);
3324
3325 /* If we were passed a pointer but the actual value can safely live
3326 in a register, retrieve it and use it directly. */
3327 if (data->arg.pass_by_reference && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
3328 {
3329 /* We can't use nominal_mode, because it will have been set to
3330 Pmode above. We must use the actual mode of the parm. */
3331 if (use_register_for_decl (parm))
3332 {
3333 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3334 mark_user_reg (parmreg);
3335 }
3336 else
3337 {
3338 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3339 TYPE_MODE (TREE_TYPE (parm)),
3340 TYPE_ALIGN (TREE_TYPE (parm)));
3341 parmreg
3342 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
3343 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
3344 align);
3345 set_mem_attributes (parmreg, parm, 1);
3346 }
3347
3348 /* We need to preserve an address based on VIRTUAL_STACK_VARS_REGNUM for
3349 the debug info in case it is not legitimate. */
3350 if (GET_MODE (parmreg) != GET_MODE (rtl))
3351 {
3352 rtx tempreg = gen_reg_rtx (GET_MODE (rtl));
3353 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3354
3355 push_to_sequence2 (all->first_conversion_insn,
3356 all->last_conversion_insn);
3357 emit_move_insn (tempreg, rtl);
3358 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3359 emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg,
3360 tempreg);
3361 all->first_conversion_insn = get_insns ();
3362 all->last_conversion_insn = get_last_insn ();
3363 end_sequence ();
3364
3365 did_conversion = true;
3366 }
3367 else
3368 emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg, rtl);
3369
3370 rtl = parmreg;
3371
3372 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3373 now the parm. */
3374 data->stack_parm = NULL;
3375 }
3376
3377 set_parm_rtl (parm, rtl);
3378
3379 /* Mark the register as eliminable if we did no conversion and it was
3380 copied from memory at a fixed offset, and the arg pointer was not
3381 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3382 offset formed an invalid address, such memory-equivalences as we
3383 make here would screw up life analysis for it. */
3384 if (data->nominal_mode == data->passed_mode
3385 && !did_conversion
3386 && data->stack_parm != 0
3387 && MEM_P (data->stack_parm)
3388 && data->locate.offset.var == 0
3389 && reg_mentioned_p (virtual_incoming_args_rtx,
3390 XEXP (data->stack_parm, 0)))
3391 {
3392 rtx_insn *linsn = get_last_insn ();
3393 rtx_insn *sinsn;
3394 rtx set;
3395
3396 /* Mark complex types separately. */
3397 if (GET_CODE (parmreg) == CONCAT)
3398 {
3399 scalar_mode submode = GET_MODE_INNER (GET_MODE (parmreg));
3400 int regnor = REGNO (XEXP (parmreg, 0));
3401 int regnoi = REGNO (XEXP (parmreg, 1));
3402 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3403 rtx stacki = adjust_address_nv (data->stack_parm, submode,
3404 GET_MODE_SIZE (submode));
3405
3406 /* Scan backwards for the set of the real and
3407 imaginary parts. */
3408 for (sinsn = linsn; sinsn != 0;
3409 sinsn = prev_nonnote_insn (sinsn))
3410 {
3411 set = single_set (sinsn);
3412 if (set == 0)
3413 continue;
3414
3415 if (SET_DEST (set) == regno_reg_rtx [regnoi])
3416 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3417 else if (SET_DEST (set) == regno_reg_rtx [regnor])
3418 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3419 }
3420 }
3421 else
3422 set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
3423 }
3424
3425 /* For pointer data type, suggest pointer register. */
3426 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3427 mark_reg_pointer (parmreg,
3428 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3429 }
3430
3431 /* A subroutine of assign_parms. Allocate stack space to hold the current
3432 parameter. Get it there. Perform all ABI specified conversions. */
3433
3434 static void
3435 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3436 struct assign_parm_data_one *data)
3437 {
3438 /* Value must be stored in the stack slot STACK_PARM during function
3439 execution. */
3440 bool to_conversion = false;
3441
3442 assign_parm_remove_parallels (data);
3443
3444 if (data->arg.mode != data->nominal_mode)
3445 {
3446 /* Conversion is required. */
3447 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3448
3449 emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
3450
3451 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3452 to_conversion = true;
3453
3454 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3455 TYPE_UNSIGNED (TREE_TYPE (parm)));
3456
3457 if (data->stack_parm)
3458 {
3459 poly_int64 offset
3460 = subreg_lowpart_offset (data->nominal_mode,
3461 GET_MODE (data->stack_parm));
3462 /* ??? This may need a big-endian conversion on sparc64. */
3463 data->stack_parm
3464 = adjust_address (data->stack_parm, data->nominal_mode, 0);
3465 if (maybe_ne (offset, 0) && MEM_OFFSET_KNOWN_P (data->stack_parm))
3466 set_mem_offset (data->stack_parm,
3467 MEM_OFFSET (data->stack_parm) + offset);
3468 }
3469 }
3470
3471 if (data->entry_parm != data->stack_parm)
3472 {
3473 rtx src, dest;
3474
3475 if (data->stack_parm == 0)
3476 {
3477 int align = STACK_SLOT_ALIGNMENT (data->arg.type,
3478 GET_MODE (data->entry_parm),
3479 TYPE_ALIGN (data->arg.type));
3480 if (align < (int)GET_MODE_ALIGNMENT (GET_MODE (data->entry_parm))
3481 && ((optab_handler (movmisalign_optab,
3482 GET_MODE (data->entry_parm))
3483 != CODE_FOR_nothing)
3484 || targetm.slow_unaligned_access (GET_MODE (data->entry_parm),
3485 align)))
3486 align = GET_MODE_ALIGNMENT (GET_MODE (data->entry_parm));
3487 data->stack_parm
3488 = assign_stack_local (GET_MODE (data->entry_parm),
3489 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3490 align);
3491 align = MEM_ALIGN (data->stack_parm);
3492 set_mem_attributes (data->stack_parm, parm, 1);
3493 set_mem_align (data->stack_parm, align);
3494 }
3495
3496 dest = validize_mem (copy_rtx (data->stack_parm));
3497 src = validize_mem (copy_rtx (data->entry_parm));
3498
3499 if (TYPE_EMPTY_P (data->arg.type))
3500 /* Empty types don't really need to be copied. */;
3501 else if (MEM_P (src))
3502 {
3503 /* Use a block move to handle potentially misaligned entry_parm. */
3504 if (!to_conversion)
3505 push_to_sequence2 (all->first_conversion_insn,
3506 all->last_conversion_insn);
3507 to_conversion = true;
3508
3509 emit_block_move (dest, src,
3510 GEN_INT (int_size_in_bytes (data->arg.type)),
3511 BLOCK_OP_NORMAL);
3512 }
3513 else
3514 {
3515 if (!REG_P (src))
3516 src = force_reg (GET_MODE (src), src);
3517 emit_move_insn (dest, src);
3518 }
3519 }
3520
3521 if (to_conversion)
3522 {
3523 all->first_conversion_insn = get_insns ();
3524 all->last_conversion_insn = get_last_insn ();
3525 end_sequence ();
3526 }
3527
3528 set_parm_rtl (parm, data->stack_parm);
3529 }
3530
3531 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3532 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3533
3534 static void
3535 assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3536 vec<tree> fnargs)
3537 {
3538 tree parm;
3539 tree orig_fnargs = all->orig_fnargs;
3540 unsigned i = 0;
3541
3542 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3543 {
3544 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3545 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3546 {
3547 rtx tmp, real, imag;
3548 scalar_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3549
3550 real = DECL_RTL (fnargs[i]);
3551 imag = DECL_RTL (fnargs[i + 1]);
3552 if (inner != GET_MODE (real))
3553 {
3554 real = gen_lowpart_SUBREG (inner, real);
3555 imag = gen_lowpart_SUBREG (inner, imag);
3556 }
3557
3558 if (TREE_ADDRESSABLE (parm))
3559 {
3560 rtx rmem, imem;
3561 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3562 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3563 DECL_MODE (parm),
3564 TYPE_ALIGN (TREE_TYPE (parm)));
3565
3566 /* split_complex_arg put the real and imag parts in
3567 pseudos. Move them to memory. */
3568 tmp = assign_stack_local (DECL_MODE (parm), size, align);
3569 set_mem_attributes (tmp, parm, 1);
3570 rmem = adjust_address_nv (tmp, inner, 0);
3571 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3572 push_to_sequence2 (all->first_conversion_insn,
3573 all->last_conversion_insn);
3574 emit_move_insn (rmem, real);
3575 emit_move_insn (imem, imag);
3576 all->first_conversion_insn = get_insns ();
3577 all->last_conversion_insn = get_last_insn ();
3578 end_sequence ();
3579 }
3580 else
3581 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3582 set_parm_rtl (parm, tmp);
3583
3584 real = DECL_INCOMING_RTL (fnargs[i]);
3585 imag = DECL_INCOMING_RTL (fnargs[i + 1]);
3586 if (inner != GET_MODE (real))
3587 {
3588 real = gen_lowpart_SUBREG (inner, real);
3589 imag = gen_lowpart_SUBREG (inner, imag);
3590 }
3591 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3592 set_decl_incoming_rtl (parm, tmp, false);
3593 i++;
3594 }
3595 }
3596 }
3597
3598 /* Assign RTL expressions to the function's parameters. This may involve
3599 copying them into registers and using those registers as the DECL_RTL. */
3600
3601 static void
3602 assign_parms (tree fndecl)
3603 {
3604 struct assign_parm_data_all all;
3605 tree parm;
3606 vec<tree> fnargs;
3607 unsigned i;
3608
3609 crtl->args.internal_arg_pointer
3610 = targetm.calls.internal_arg_pointer ();
3611
3612 assign_parms_initialize_all (&all);
3613 fnargs = assign_parms_augmented_arg_list (&all);
3614
3615 FOR_EACH_VEC_ELT (fnargs, i, parm)
3616 {
3617 struct assign_parm_data_one data;
3618
3619 /* Extract the type of PARM; adjust it according to ABI. */
3620 assign_parm_find_data_types (&all, parm, &data);
3621
3622 /* Early out for errors and void parameters. */
3623 if (data.passed_mode == VOIDmode)
3624 {
3625 SET_DECL_RTL (parm, const0_rtx);
3626 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3627 continue;
3628 }
3629
3630 /* Estimate stack alignment from parameter alignment. */
3631 if (SUPPORTS_STACK_ALIGNMENT)
3632 {
3633 unsigned int align
3634 = targetm.calls.function_arg_boundary (data.arg.mode,
3635 data.arg.type);
3636 align = MINIMUM_ALIGNMENT (data.arg.type, data.arg.mode, align);
3637 if (TYPE_ALIGN (data.nominal_type) > align)
3638 align = MINIMUM_ALIGNMENT (data.nominal_type,
3639 TYPE_MODE (data.nominal_type),
3640 TYPE_ALIGN (data.nominal_type));
3641 if (crtl->stack_alignment_estimated < align)
3642 {
3643 gcc_assert (!crtl->stack_realign_processed);
3644 crtl->stack_alignment_estimated = align;
3645 }
3646 }
3647
3648 /* Find out where the parameter arrives in this function. */
3649 assign_parm_find_entry_rtl (&all, &data);
3650
3651 /* Find out where stack space for this parameter might be. */
3652 if (assign_parm_is_stack_parm (&all, &data))
3653 {
3654 assign_parm_find_stack_rtl (parm, &data);
3655 assign_parm_adjust_entry_rtl (&data);
3656 /* For arguments that occupy no space in the parameter
3657 passing area, have non-zero size and have address taken,
3658 force creation of a stack slot so that they have distinct
3659 address from other parameters. */
3660 if (TYPE_EMPTY_P (data.arg.type)
3661 && TREE_ADDRESSABLE (parm)
3662 && data.entry_parm == data.stack_parm
3663 && MEM_P (data.entry_parm)
3664 && int_size_in_bytes (data.arg.type))
3665 data.stack_parm = NULL_RTX;
3666 }
3667 /* Record permanently how this parm was passed. */
3668 if (data.arg.pass_by_reference)
3669 {
3670 rtx incoming_rtl
3671 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.arg.type)),
3672 data.entry_parm);
3673 set_decl_incoming_rtl (parm, incoming_rtl, true);
3674 }
3675 else
3676 set_decl_incoming_rtl (parm, data.entry_parm, false);
3677
3678 assign_parm_adjust_stack_rtl (&data);
3679
3680 if (assign_parm_setup_block_p (&data))
3681 assign_parm_setup_block (&all, parm, &data);
3682 else if (data.arg.pass_by_reference || use_register_for_decl (parm))
3683 assign_parm_setup_reg (&all, parm, &data);
3684 else
3685 assign_parm_setup_stack (&all, parm, &data);
3686
3687 if (cfun->stdarg && !DECL_CHAIN (parm))
3688 assign_parms_setup_varargs (&all, &data, false);
3689
3690 /* Update info on where next arg arrives in registers. */
3691 targetm.calls.function_arg_advance (all.args_so_far, data.arg);
3692 }
3693
3694 if (targetm.calls.split_complex_arg)
3695 assign_parms_unsplit_complex (&all, fnargs);
3696
3697 fnargs.release ();
3698
3699 /* Output all parameter conversion instructions (possibly including calls)
3700 now that all parameters have been copied out of hard registers. */
3701 emit_insn (all.first_conversion_insn);
3702
3703 /* Estimate reload stack alignment from scalar return mode. */
3704 if (SUPPORTS_STACK_ALIGNMENT)
3705 {
3706 if (DECL_RESULT (fndecl))
3707 {
3708 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3709 machine_mode mode = TYPE_MODE (type);
3710
3711 if (mode != BLKmode
3712 && mode != VOIDmode
3713 && !AGGREGATE_TYPE_P (type))
3714 {
3715 unsigned int align = GET_MODE_ALIGNMENT (mode);
3716 if (crtl->stack_alignment_estimated < align)
3717 {
3718 gcc_assert (!crtl->stack_realign_processed);
3719 crtl->stack_alignment_estimated = align;
3720 }
3721 }
3722 }
3723 }
3724
3725 /* If we are receiving a struct value address as the first argument, set up
3726 the RTL for the function result. As this might require code to convert
3727 the transmitted address to Pmode, we do this here to ensure that possible
3728 preliminary conversions of the address have been emitted already. */
3729 if (all.function_result_decl)
3730 {
3731 tree result = DECL_RESULT (current_function_decl);
3732 rtx addr = DECL_RTL (all.function_result_decl);
3733 rtx x;
3734
3735 if (DECL_BY_REFERENCE (result))
3736 {
3737 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3738 x = addr;
3739 }
3740 else
3741 {
3742 SET_DECL_VALUE_EXPR (result,
3743 build1 (INDIRECT_REF, TREE_TYPE (result),
3744 all.function_result_decl));
3745 addr = convert_memory_address (Pmode, addr);
3746 x = gen_rtx_MEM (DECL_MODE (result), addr);
3747 set_mem_attributes (x, result, 1);
3748 }
3749
3750 DECL_HAS_VALUE_EXPR_P (result) = 1;
3751
3752 set_parm_rtl (result, x);
3753 }
3754
3755 /* We have aligned all the args, so add space for the pretend args. */
3756 crtl->args.pretend_args_size = all.pretend_args_size;
3757 all.stack_args_size.constant += all.extra_pretend_bytes;
3758 crtl->args.size = all.stack_args_size.constant;
3759
3760 /* Adjust function incoming argument size for alignment and
3761 minimum length. */
3762
3763 crtl->args.size = upper_bound (crtl->args.size, all.reg_parm_stack_space);
3764 crtl->args.size = aligned_upper_bound (crtl->args.size,
3765 PARM_BOUNDARY / BITS_PER_UNIT);
3766
3767 if (ARGS_GROW_DOWNWARD)
3768 {
3769 crtl->args.arg_offset_rtx
3770 = (all.stack_args_size.var == 0
3771 ? gen_int_mode (-all.stack_args_size.constant, Pmode)
3772 : expand_expr (size_diffop (all.stack_args_size.var,
3773 size_int (-all.stack_args_size.constant)),
3774 NULL_RTX, VOIDmode, EXPAND_NORMAL));
3775 }
3776 else
3777 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3778
3779 /* See how many bytes, if any, of its args a function should try to pop
3780 on return. */
3781
3782 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3783 TREE_TYPE (fndecl),
3784 crtl->args.size);
3785
3786 /* For stdarg.h function, save info about
3787 regs and stack space used by the named args. */
3788
3789 crtl->args.info = all.args_so_far_v;
3790
3791 /* Set the rtx used for the function return value. Put this in its
3792 own variable so any optimizers that need this information don't have
3793 to include tree.h. Do this here so it gets done when an inlined
3794 function gets output. */
3795
3796 crtl->return_rtx
3797 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3798 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3799
3800 /* If scalar return value was computed in a pseudo-reg, or was a named
3801 return value that got dumped to the stack, copy that to the hard
3802 return register. */
3803 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3804 {
3805 tree decl_result = DECL_RESULT (fndecl);
3806 rtx decl_rtl = DECL_RTL (decl_result);
3807
3808 if (REG_P (decl_rtl)
3809 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3810 : DECL_REGISTER (decl_result))
3811 {
3812 rtx real_decl_rtl;
3813
3814 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3815 fndecl, true);
3816 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3817 /* The delay slot scheduler assumes that crtl->return_rtx
3818 holds the hard register containing the return value, not a
3819 temporary pseudo. */
3820 crtl->return_rtx = real_decl_rtl;
3821 }
3822 }
3823 }
3824
3825 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3826 For all seen types, gimplify their sizes. */
3827
3828 static tree
3829 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3830 {
3831 tree t = *tp;
3832
3833 *walk_subtrees = 0;
3834 if (TYPE_P (t))
3835 {
3836 if (POINTER_TYPE_P (t))
3837 *walk_subtrees = 1;
3838 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3839 && !TYPE_SIZES_GIMPLIFIED (t))
3840 {
3841 gimplify_type_sizes (t, (gimple_seq *) data);
3842 *walk_subtrees = 1;
3843 }
3844 }
3845
3846 return NULL;
3847 }
3848
3849 /* Gimplify the parameter list for current_function_decl. This involves
3850 evaluating SAVE_EXPRs of variable sized parameters and generating code
3851 to implement callee-copies reference parameters. Returns a sequence of
3852 statements to add to the beginning of the function. */
3853
3854 gimple_seq
3855 gimplify_parameters (gimple_seq *cleanup)
3856 {
3857 struct assign_parm_data_all all;
3858 tree parm;
3859 gimple_seq stmts = NULL;
3860 vec<tree> fnargs;
3861 unsigned i;
3862
3863 assign_parms_initialize_all (&all);
3864 fnargs = assign_parms_augmented_arg_list (&all);
3865
3866 FOR_EACH_VEC_ELT (fnargs, i, parm)
3867 {
3868 struct assign_parm_data_one data;
3869
3870 /* Extract the type of PARM; adjust it according to ABI. */
3871 assign_parm_find_data_types (&all, parm, &data);
3872
3873 /* Early out for errors and void parameters. */
3874 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3875 continue;
3876
3877 /* Update info on where next arg arrives in registers. */
3878 targetm.calls.function_arg_advance (all.args_so_far, data.arg);
3879
3880 /* ??? Once upon a time variable_size stuffed parameter list
3881 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3882 turned out to be less than manageable in the gimple world.
3883 Now we have to hunt them down ourselves. */
3884 walk_tree_without_duplicates (&data.arg.type,
3885 gimplify_parm_type, &stmts);
3886
3887 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
3888 {
3889 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3890 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3891 }
3892
3893 if (data.arg.pass_by_reference)
3894 {
3895 tree type = TREE_TYPE (data.arg.type);
3896 function_arg_info orig_arg (type, data.arg.named);
3897 if (reference_callee_copied (&all.args_so_far_v, orig_arg))
3898 {
3899 tree local, t;
3900
3901 /* For constant-sized objects, this is trivial; for
3902 variable-sized objects, we have to play games. */
3903 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3904 && !(flag_stack_check == GENERIC_STACK_CHECK
3905 && compare_tree_int (DECL_SIZE_UNIT (parm),
3906 STACK_CHECK_MAX_VAR_SIZE) > 0))
3907 {
3908 local = create_tmp_var (type, get_name (parm));
3909 DECL_IGNORED_P (local) = 0;
3910 /* If PARM was addressable, move that flag over
3911 to the local copy, as its address will be taken,
3912 not the PARMs. Keep the parms address taken
3913 as we'll query that flag during gimplification. */
3914 if (TREE_ADDRESSABLE (parm))
3915 TREE_ADDRESSABLE (local) = 1;
3916 if (DECL_NOT_GIMPLE_REG_P (parm))
3917 DECL_NOT_GIMPLE_REG_P (local) = 1;
3918
3919 if (!is_gimple_reg (local)
3920 && flag_stack_reuse != SR_NONE)
3921 {
3922 tree clobber = build_clobber (type);
3923 gimple *clobber_stmt;
3924 clobber_stmt = gimple_build_assign (local, clobber);
3925 gimple_seq_add_stmt (cleanup, clobber_stmt);
3926 }
3927 }
3928 else
3929 {
3930 tree ptr_type, addr;
3931
3932 ptr_type = build_pointer_type (type);
3933 addr = create_tmp_reg (ptr_type, get_name (parm));
3934 DECL_IGNORED_P (addr) = 0;
3935 local = build_fold_indirect_ref (addr);
3936
3937 t = build_alloca_call_expr (DECL_SIZE_UNIT (parm),
3938 DECL_ALIGN (parm),
3939 max_int_size_in_bytes (type));
3940 /* The call has been built for a variable-sized object. */
3941 CALL_ALLOCA_FOR_VAR_P (t) = 1;
3942 t = fold_convert (ptr_type, t);
3943 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3944 gimplify_and_add (t, &stmts);
3945 }
3946
3947 gimplify_assign (local, parm, &stmts);
3948
3949 SET_DECL_VALUE_EXPR (parm, local);
3950 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3951 }
3952 }
3953 }
3954
3955 fnargs.release ();
3956
3957 return stmts;
3958 }
3959 \f
3960 /* Compute the size and offset from the start of the stacked arguments for a
3961 parm passed in mode PASSED_MODE and with type TYPE.
3962
3963 INITIAL_OFFSET_PTR points to the current offset into the stacked
3964 arguments.
3965
3966 The starting offset and size for this parm are returned in
3967 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3968 nonzero, the offset is that of stack slot, which is returned in
3969 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3970 padding required from the initial offset ptr to the stack slot.
3971
3972 IN_REGS is nonzero if the argument will be passed in registers. It will
3973 never be set if REG_PARM_STACK_SPACE is not defined.
3974
3975 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
3976 for arguments which are passed in registers.
3977
3978 FNDECL is the function in which the argument was defined.
3979
3980 There are two types of rounding that are done. The first, controlled by
3981 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3982 argument list to be aligned to the specific boundary (in bits). This
3983 rounding affects the initial and starting offsets, but not the argument
3984 size.
3985
3986 The second, controlled by TARGET_FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3987 optionally rounds the size of the parm to PARM_BOUNDARY. The
3988 initial offset is not affected by this rounding, while the size always
3989 is and the starting offset may be. */
3990
3991 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3992 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3993 callers pass in the total size of args so far as
3994 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3995
3996 void
3997 locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
3998 int reg_parm_stack_space, int partial,
3999 tree fndecl ATTRIBUTE_UNUSED,
4000 struct args_size *initial_offset_ptr,
4001 struct locate_and_pad_arg_data *locate)
4002 {
4003 tree sizetree;
4004 pad_direction where_pad;
4005 unsigned int boundary, round_boundary;
4006 int part_size_in_regs;
4007
4008 /* If we have found a stack parm before we reach the end of the
4009 area reserved for registers, skip that area. */
4010 if (! in_regs)
4011 {
4012 if (reg_parm_stack_space > 0)
4013 {
4014 if (initial_offset_ptr->var
4015 || !ordered_p (initial_offset_ptr->constant,
4016 reg_parm_stack_space))
4017 {
4018 initial_offset_ptr->var
4019 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4020 ssize_int (reg_parm_stack_space));
4021 initial_offset_ptr->constant = 0;
4022 }
4023 else
4024 initial_offset_ptr->constant
4025 = ordered_max (initial_offset_ptr->constant,
4026 reg_parm_stack_space);
4027 }
4028 }
4029
4030 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
4031
4032 sizetree = (type
4033 ? arg_size_in_bytes (type)
4034 : size_int (GET_MODE_SIZE (passed_mode)));
4035 where_pad = targetm.calls.function_arg_padding (passed_mode, type);
4036 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
4037 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
4038 type);
4039 locate->where_pad = where_pad;
4040
4041 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
4042 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4043 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4044
4045 locate->boundary = boundary;
4046
4047 if (SUPPORTS_STACK_ALIGNMENT)
4048 {
4049 /* stack_alignment_estimated can't change after stack has been
4050 realigned. */
4051 if (crtl->stack_alignment_estimated < boundary)
4052 {
4053 if (!crtl->stack_realign_processed)
4054 crtl->stack_alignment_estimated = boundary;
4055 else
4056 {
4057 /* If stack is realigned and stack alignment value
4058 hasn't been finalized, it is OK not to increase
4059 stack_alignment_estimated. The bigger alignment
4060 requirement is recorded in stack_alignment_needed
4061 below. */
4062 gcc_assert (!crtl->stack_realign_finalized
4063 && crtl->stack_realign_needed);
4064 }
4065 }
4066 }
4067
4068 if (ARGS_GROW_DOWNWARD)
4069 {
4070 locate->slot_offset.constant = -initial_offset_ptr->constant;
4071 if (initial_offset_ptr->var)
4072 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
4073 initial_offset_ptr->var);
4074
4075 {
4076 tree s2 = sizetree;
4077 if (where_pad != PAD_NONE
4078 && (!tree_fits_uhwi_p (sizetree)
4079 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4080 s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
4081 SUB_PARM_SIZE (locate->slot_offset, s2);
4082 }
4083
4084 locate->slot_offset.constant += part_size_in_regs;
4085
4086 if (!in_regs || reg_parm_stack_space > 0)
4087 pad_to_arg_alignment (&locate->slot_offset, boundary,
4088 &locate->alignment_pad);
4089
4090 locate->size.constant = (-initial_offset_ptr->constant
4091 - locate->slot_offset.constant);
4092 if (initial_offset_ptr->var)
4093 locate->size.var = size_binop (MINUS_EXPR,
4094 size_binop (MINUS_EXPR,
4095 ssize_int (0),
4096 initial_offset_ptr->var),
4097 locate->slot_offset.var);
4098
4099 /* Pad_below needs the pre-rounded size to know how much to pad
4100 below. */
4101 locate->offset = locate->slot_offset;
4102 if (where_pad == PAD_DOWNWARD)
4103 pad_below (&locate->offset, passed_mode, sizetree);
4104
4105 }
4106 else
4107 {
4108 if (!in_regs || reg_parm_stack_space > 0)
4109 pad_to_arg_alignment (initial_offset_ptr, boundary,
4110 &locate->alignment_pad);
4111 locate->slot_offset = *initial_offset_ptr;
4112
4113 #ifdef PUSH_ROUNDING
4114 if (passed_mode != BLKmode)
4115 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4116 #endif
4117
4118 /* Pad_below needs the pre-rounded size to know how much to pad below
4119 so this must be done before rounding up. */
4120 locate->offset = locate->slot_offset;
4121 if (where_pad == PAD_DOWNWARD)
4122 pad_below (&locate->offset, passed_mode, sizetree);
4123
4124 if (where_pad != PAD_NONE
4125 && (!tree_fits_uhwi_p (sizetree)
4126 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4127 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
4128
4129 ADD_PARM_SIZE (locate->size, sizetree);
4130
4131 locate->size.constant -= part_size_in_regs;
4132 }
4133
4134 locate->offset.constant
4135 += targetm.calls.function_arg_offset (passed_mode, type);
4136 }
4137
4138 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4139 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4140
4141 static void
4142 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
4143 struct args_size *alignment_pad)
4144 {
4145 tree save_var = NULL_TREE;
4146 poly_int64 save_constant = 0;
4147 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4148 poly_int64 sp_offset = STACK_POINTER_OFFSET;
4149
4150 #ifdef SPARC_STACK_BOUNDARY_HACK
4151 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4152 the real alignment of %sp. However, when it does this, the
4153 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
4154 if (SPARC_STACK_BOUNDARY_HACK)
4155 sp_offset = 0;
4156 #endif
4157
4158 if (boundary > PARM_BOUNDARY)
4159 {
4160 save_var = offset_ptr->var;
4161 save_constant = offset_ptr->constant;
4162 }
4163
4164 alignment_pad->var = NULL_TREE;
4165 alignment_pad->constant = 0;
4166
4167 if (boundary > BITS_PER_UNIT)
4168 {
4169 int misalign;
4170 if (offset_ptr->var
4171 || !known_misalignment (offset_ptr->constant + sp_offset,
4172 boundary_in_bytes, &misalign))
4173 {
4174 tree sp_offset_tree = ssize_int (sp_offset);
4175 tree offset = size_binop (PLUS_EXPR,
4176 ARGS_SIZE_TREE (*offset_ptr),
4177 sp_offset_tree);
4178 tree rounded;
4179 if (ARGS_GROW_DOWNWARD)
4180 rounded = round_down (offset, boundary / BITS_PER_UNIT);
4181 else
4182 rounded = round_up (offset, boundary / BITS_PER_UNIT);
4183
4184 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
4185 /* ARGS_SIZE_TREE includes constant term. */
4186 offset_ptr->constant = 0;
4187 if (boundary > PARM_BOUNDARY)
4188 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
4189 save_var);
4190 }
4191 else
4192 {
4193 if (ARGS_GROW_DOWNWARD)
4194 offset_ptr->constant -= misalign;
4195 else
4196 offset_ptr->constant += -misalign & (boundary_in_bytes - 1);
4197
4198 if (boundary > PARM_BOUNDARY)
4199 alignment_pad->constant = offset_ptr->constant - save_constant;
4200 }
4201 }
4202 }
4203
4204 static void
4205 pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
4206 {
4207 unsigned int align = PARM_BOUNDARY / BITS_PER_UNIT;
4208 int misalign;
4209 if (passed_mode != BLKmode
4210 && known_misalignment (GET_MODE_SIZE (passed_mode), align, &misalign))
4211 offset_ptr->constant += -misalign & (align - 1);
4212 else
4213 {
4214 if (TREE_CODE (sizetree) != INTEGER_CST
4215 || (TREE_INT_CST_LOW (sizetree) & (align - 1)) != 0)
4216 {
4217 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4218 tree s2 = round_up (sizetree, align);
4219 /* Add it in. */
4220 ADD_PARM_SIZE (*offset_ptr, s2);
4221 SUB_PARM_SIZE (*offset_ptr, sizetree);
4222 }
4223 }
4224 }
4225 \f
4226
4227 /* True if register REGNO was alive at a place where `setjmp' was
4228 called and was set more than once or is an argument. Such regs may
4229 be clobbered by `longjmp'. */
4230
4231 static bool
4232 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
4233 {
4234 /* There appear to be cases where some local vars never reach the
4235 backend but have bogus regnos. */
4236 if (regno >= max_reg_num ())
4237 return false;
4238
4239 return ((REG_N_SETS (regno) > 1
4240 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
4241 regno))
4242 && REGNO_REG_SET_P (setjmp_crosses, regno));
4243 }
4244
4245 /* Walk the tree of blocks describing the binding levels within a
4246 function and warn about variables the might be killed by setjmp or
4247 vfork. This is done after calling flow_analysis before register
4248 allocation since that will clobber the pseudo-regs to hard
4249 regs. */
4250
4251 static void
4252 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
4253 {
4254 tree decl, sub;
4255
4256 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
4257 {
4258 if (VAR_P (decl)
4259 && DECL_RTL_SET_P (decl)
4260 && REG_P (DECL_RTL (decl))
4261 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4262 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
4263 " %<longjmp%> or %<vfork%>", decl);
4264 }
4265
4266 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
4267 setjmp_vars_warning (setjmp_crosses, sub);
4268 }
4269
4270 /* Do the appropriate part of setjmp_vars_warning
4271 but for arguments instead of local variables. */
4272
4273 static void
4274 setjmp_args_warning (bitmap setjmp_crosses)
4275 {
4276 tree decl;
4277 for (decl = DECL_ARGUMENTS (current_function_decl);
4278 decl; decl = DECL_CHAIN (decl))
4279 if (DECL_RTL (decl) != 0
4280 && REG_P (DECL_RTL (decl))
4281 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4282 warning (OPT_Wclobbered,
4283 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4284 decl);
4285 }
4286
4287 /* Generate warning messages for variables live across setjmp. */
4288
4289 void
4290 generate_setjmp_warnings (void)
4291 {
4292 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4293
4294 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
4295 || bitmap_empty_p (setjmp_crosses))
4296 return;
4297
4298 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4299 setjmp_args_warning (setjmp_crosses);
4300 }
4301
4302 \f
4303 /* Reverse the order of elements in the fragment chain T of blocks,
4304 and return the new head of the chain (old last element).
4305 In addition to that clear BLOCK_SAME_RANGE flags when needed
4306 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4307 its super fragment origin. */
4308
4309 static tree
4310 block_fragments_nreverse (tree t)
4311 {
4312 tree prev = 0, block, next, prev_super = 0;
4313 tree super = BLOCK_SUPERCONTEXT (t);
4314 if (BLOCK_FRAGMENT_ORIGIN (super))
4315 super = BLOCK_FRAGMENT_ORIGIN (super);
4316 for (block = t; block; block = next)
4317 {
4318 next = BLOCK_FRAGMENT_CHAIN (block);
4319 BLOCK_FRAGMENT_CHAIN (block) = prev;
4320 if ((prev && !BLOCK_SAME_RANGE (prev))
4321 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4322 != prev_super))
4323 BLOCK_SAME_RANGE (block) = 0;
4324 prev_super = BLOCK_SUPERCONTEXT (block);
4325 BLOCK_SUPERCONTEXT (block) = super;
4326 prev = block;
4327 }
4328 t = BLOCK_FRAGMENT_ORIGIN (t);
4329 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4330 != prev_super)
4331 BLOCK_SAME_RANGE (t) = 0;
4332 BLOCK_SUPERCONTEXT (t) = super;
4333 return prev;
4334 }
4335
4336 /* Reverse the order of elements in the chain T of blocks,
4337 and return the new head of the chain (old last element).
4338 Also do the same on subblocks and reverse the order of elements
4339 in BLOCK_FRAGMENT_CHAIN as well. */
4340
4341 static tree
4342 blocks_nreverse_all (tree t)
4343 {
4344 tree prev = 0, block, next;
4345 for (block = t; block; block = next)
4346 {
4347 next = BLOCK_CHAIN (block);
4348 BLOCK_CHAIN (block) = prev;
4349 if (BLOCK_FRAGMENT_CHAIN (block)
4350 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4351 {
4352 BLOCK_FRAGMENT_CHAIN (block)
4353 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4354 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4355 BLOCK_SAME_RANGE (block) = 0;
4356 }
4357 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4358 prev = block;
4359 }
4360 return prev;
4361 }
4362
4363
4364 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4365 and create duplicate blocks. */
4366 /* ??? Need an option to either create block fragments or to create
4367 abstract origin duplicates of a source block. It really depends
4368 on what optimization has been performed. */
4369
4370 void
4371 reorder_blocks (void)
4372 {
4373 tree block = DECL_INITIAL (current_function_decl);
4374
4375 if (block == NULL_TREE)
4376 return;
4377
4378 auto_vec<tree, 10> block_stack;
4379
4380 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4381 clear_block_marks (block);
4382
4383 /* Prune the old trees away, so that they don't get in the way. */
4384 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4385 BLOCK_CHAIN (block) = NULL_TREE;
4386
4387 /* Recreate the block tree from the note nesting. */
4388 reorder_blocks_1 (get_insns (), block, &block_stack);
4389 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4390 }
4391
4392 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4393
4394 void
4395 clear_block_marks (tree block)
4396 {
4397 while (block)
4398 {
4399 TREE_ASM_WRITTEN (block) = 0;
4400 clear_block_marks (BLOCK_SUBBLOCKS (block));
4401 block = BLOCK_CHAIN (block);
4402 }
4403 }
4404
4405 static void
4406 reorder_blocks_1 (rtx_insn *insns, tree current_block,
4407 vec<tree> *p_block_stack)
4408 {
4409 rtx_insn *insn;
4410 tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
4411
4412 for (insn = insns; insn; insn = NEXT_INSN (insn))
4413 {
4414 if (NOTE_P (insn))
4415 {
4416 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4417 {
4418 tree block = NOTE_BLOCK (insn);
4419 tree origin;
4420
4421 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4422 origin = block;
4423
4424 if (prev_end)
4425 BLOCK_SAME_RANGE (prev_end) = 0;
4426 prev_end = NULL_TREE;
4427
4428 /* If we have seen this block before, that means it now
4429 spans multiple address regions. Create a new fragment. */
4430 if (TREE_ASM_WRITTEN (block))
4431 {
4432 tree new_block = copy_node (block);
4433
4434 BLOCK_SAME_RANGE (new_block) = 0;
4435 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4436 BLOCK_FRAGMENT_CHAIN (new_block)
4437 = BLOCK_FRAGMENT_CHAIN (origin);
4438 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4439
4440 NOTE_BLOCK (insn) = new_block;
4441 block = new_block;
4442 }
4443
4444 if (prev_beg == current_block && prev_beg)
4445 BLOCK_SAME_RANGE (block) = 1;
4446
4447 prev_beg = origin;
4448
4449 BLOCK_SUBBLOCKS (block) = 0;
4450 TREE_ASM_WRITTEN (block) = 1;
4451 /* When there's only one block for the entire function,
4452 current_block == block and we mustn't do this, it
4453 will cause infinite recursion. */
4454 if (block != current_block)
4455 {
4456 tree super;
4457 if (block != origin)
4458 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4459 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4460 (origin))
4461 == current_block);
4462 if (p_block_stack->is_empty ())
4463 super = current_block;
4464 else
4465 {
4466 super = p_block_stack->last ();
4467 gcc_assert (super == current_block
4468 || BLOCK_FRAGMENT_ORIGIN (super)
4469 == current_block);
4470 }
4471 BLOCK_SUPERCONTEXT (block) = super;
4472 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4473 BLOCK_SUBBLOCKS (current_block) = block;
4474 current_block = origin;
4475 }
4476 p_block_stack->safe_push (block);
4477 }
4478 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4479 {
4480 NOTE_BLOCK (insn) = p_block_stack->pop ();
4481 current_block = BLOCK_SUPERCONTEXT (current_block);
4482 if (BLOCK_FRAGMENT_ORIGIN (current_block))
4483 current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4484 prev_beg = NULL_TREE;
4485 prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4486 ? NOTE_BLOCK (insn) : NULL_TREE;
4487 }
4488 }
4489 else
4490 {
4491 prev_beg = NULL_TREE;
4492 if (prev_end)
4493 BLOCK_SAME_RANGE (prev_end) = 0;
4494 prev_end = NULL_TREE;
4495 }
4496 }
4497 }
4498
4499 /* Reverse the order of elements in the chain T of blocks,
4500 and return the new head of the chain (old last element). */
4501
4502 tree
4503 blocks_nreverse (tree t)
4504 {
4505 tree prev = 0, block, next;
4506 for (block = t; block; block = next)
4507 {
4508 next = BLOCK_CHAIN (block);
4509 BLOCK_CHAIN (block) = prev;
4510 prev = block;
4511 }
4512 return prev;
4513 }
4514
4515 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4516 by modifying the last node in chain 1 to point to chain 2. */
4517
4518 tree
4519 block_chainon (tree op1, tree op2)
4520 {
4521 tree t1;
4522
4523 if (!op1)
4524 return op2;
4525 if (!op2)
4526 return op1;
4527
4528 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4529 continue;
4530 BLOCK_CHAIN (t1) = op2;
4531
4532 #ifdef ENABLE_TREE_CHECKING
4533 {
4534 tree t2;
4535 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4536 gcc_assert (t2 != t1);
4537 }
4538 #endif
4539
4540 return op1;
4541 }
4542
4543 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4544 non-NULL, list them all into VECTOR, in a depth-first preorder
4545 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4546 blocks. */
4547
4548 static int
4549 all_blocks (tree block, tree *vector)
4550 {
4551 int n_blocks = 0;
4552
4553 while (block)
4554 {
4555 TREE_ASM_WRITTEN (block) = 0;
4556
4557 /* Record this block. */
4558 if (vector)
4559 vector[n_blocks] = block;
4560
4561 ++n_blocks;
4562
4563 /* Record the subblocks, and their subblocks... */
4564 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4565 vector ? vector + n_blocks : 0);
4566 block = BLOCK_CHAIN (block);
4567 }
4568
4569 return n_blocks;
4570 }
4571
4572 /* Return a vector containing all the blocks rooted at BLOCK. The
4573 number of elements in the vector is stored in N_BLOCKS_P. The
4574 vector is dynamically allocated; it is the caller's responsibility
4575 to call `free' on the pointer returned. */
4576
4577 static tree *
4578 get_block_vector (tree block, int *n_blocks_p)
4579 {
4580 tree *block_vector;
4581
4582 *n_blocks_p = all_blocks (block, NULL);
4583 block_vector = XNEWVEC (tree, *n_blocks_p);
4584 all_blocks (block, block_vector);
4585
4586 return block_vector;
4587 }
4588
4589 static GTY(()) int next_block_index = 2;
4590
4591 /* Set BLOCK_NUMBER for all the blocks in FN. */
4592
4593 void
4594 number_blocks (tree fn)
4595 {
4596 int i;
4597 int n_blocks;
4598 tree *block_vector;
4599
4600 /* For XCOFF debugging output, we start numbering the blocks
4601 from 1 within each function, rather than keeping a running
4602 count. */
4603 #if defined (XCOFF_DEBUGGING_INFO)
4604 if (write_symbols == XCOFF_DEBUG)
4605 next_block_index = 1;
4606 #endif
4607
4608 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4609
4610 /* The top-level BLOCK isn't numbered at all. */
4611 for (i = 1; i < n_blocks; ++i)
4612 /* We number the blocks from two. */
4613 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4614
4615 free (block_vector);
4616
4617 return;
4618 }
4619
4620 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4621
4622 DEBUG_FUNCTION tree
4623 debug_find_var_in_block_tree (tree var, tree block)
4624 {
4625 tree t;
4626
4627 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4628 if (t == var)
4629 return block;
4630
4631 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4632 {
4633 tree ret = debug_find_var_in_block_tree (var, t);
4634 if (ret)
4635 return ret;
4636 }
4637
4638 return NULL_TREE;
4639 }
4640 \f
4641 /* Keep track of whether we're in a dummy function context. If we are,
4642 we don't want to invoke the set_current_function hook, because we'll
4643 get into trouble if the hook calls target_reinit () recursively or
4644 when the initial initialization is not yet complete. */
4645
4646 static bool in_dummy_function;
4647
4648 /* Invoke the target hook when setting cfun. Update the optimization options
4649 if the function uses different options than the default. */
4650
4651 static void
4652 invoke_set_current_function_hook (tree fndecl)
4653 {
4654 if (!in_dummy_function)
4655 {
4656 tree opts = ((fndecl)
4657 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4658 : optimization_default_node);
4659
4660 if (!opts)
4661 opts = optimization_default_node;
4662
4663 /* Change optimization options if needed. */
4664 if (optimization_current_node != opts)
4665 {
4666 optimization_current_node = opts;
4667 cl_optimization_restore (&global_options, &global_options_set,
4668 TREE_OPTIMIZATION (opts));
4669 }
4670
4671 targetm.set_current_function (fndecl);
4672 this_fn_optabs = this_target_optabs;
4673
4674 /* Initialize global alignment variables after op. */
4675 parse_alignment_opts ();
4676
4677 if (opts != optimization_default_node)
4678 {
4679 init_tree_optimization_optabs (opts);
4680 if (TREE_OPTIMIZATION_OPTABS (opts))
4681 this_fn_optabs = (struct target_optabs *)
4682 TREE_OPTIMIZATION_OPTABS (opts);
4683 }
4684 }
4685 }
4686
4687 /* cfun should never be set directly; use this function. */
4688
4689 void
4690 set_cfun (struct function *new_cfun, bool force)
4691 {
4692 if (cfun != new_cfun || force)
4693 {
4694 cfun = new_cfun;
4695 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4696 redirect_edge_var_map_empty ();
4697 }
4698 }
4699
4700 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4701
4702 static vec<function *> cfun_stack;
4703
4704 /* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4705 current_function_decl accordingly. */
4706
4707 void
4708 push_cfun (struct function *new_cfun)
4709 {
4710 gcc_assert ((!cfun && !current_function_decl)
4711 || (cfun && current_function_decl == cfun->decl));
4712 cfun_stack.safe_push (cfun);
4713 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4714 set_cfun (new_cfun);
4715 }
4716
4717 /* Pop cfun from the stack. Also set current_function_decl accordingly. */
4718
4719 void
4720 pop_cfun (void)
4721 {
4722 struct function *new_cfun = cfun_stack.pop ();
4723 /* When in_dummy_function, we do have a cfun but current_function_decl is
4724 NULL. We also allow pushing NULL cfun and subsequently changing
4725 current_function_decl to something else and have both restored by
4726 pop_cfun. */
4727 gcc_checking_assert (in_dummy_function
4728 || !cfun
4729 || current_function_decl == cfun->decl);
4730 set_cfun (new_cfun);
4731 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4732 }
4733
4734 /* Return value of funcdef and increase it. */
4735 int
4736 get_next_funcdef_no (void)
4737 {
4738 return funcdef_no++;
4739 }
4740
4741 /* Return value of funcdef. */
4742 int
4743 get_last_funcdef_no (void)
4744 {
4745 return funcdef_no;
4746 }
4747
4748 /* Allocate and initialize the stack usage info data structure for the
4749 current function. */
4750 static void
4751 allocate_stack_usage_info (void)
4752 {
4753 gcc_assert (!cfun->su);
4754 cfun->su = ggc_cleared_alloc<stack_usage> ();
4755 cfun->su->static_stack_size = -1;
4756 }
4757
4758 /* Allocate a function structure for FNDECL and set its contents
4759 to the defaults. Set cfun to the newly-allocated object.
4760 Some of the helper functions invoked during initialization assume
4761 that cfun has already been set. Therefore, assign the new object
4762 directly into cfun and invoke the back end hook explicitly at the
4763 very end, rather than initializing a temporary and calling set_cfun
4764 on it.
4765
4766 ABSTRACT_P is true if this is a function that will never be seen by
4767 the middle-end. Such functions are front-end concepts (like C++
4768 function templates) that do not correspond directly to functions
4769 placed in object files. */
4770
4771 void
4772 allocate_struct_function (tree fndecl, bool abstract_p)
4773 {
4774 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4775
4776 cfun = ggc_cleared_alloc<function> ();
4777
4778 init_eh_for_function ();
4779
4780 if (init_machine_status)
4781 cfun->machine = (*init_machine_status) ();
4782
4783 #ifdef OVERRIDE_ABI_FORMAT
4784 OVERRIDE_ABI_FORMAT (fndecl);
4785 #endif
4786
4787 if (fndecl != NULL_TREE)
4788 {
4789 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4790 cfun->decl = fndecl;
4791 current_function_funcdef_no = get_next_funcdef_no ();
4792 }
4793
4794 invoke_set_current_function_hook (fndecl);
4795
4796 if (fndecl != NULL_TREE)
4797 {
4798 tree result = DECL_RESULT (fndecl);
4799
4800 if (!abstract_p)
4801 {
4802 /* Now that we have activated any function-specific attributes
4803 that might affect layout, particularly vector modes, relayout
4804 each of the parameters and the result. */
4805 relayout_decl (result);
4806 for (tree parm = DECL_ARGUMENTS (fndecl); parm;
4807 parm = DECL_CHAIN (parm))
4808 relayout_decl (parm);
4809
4810 /* Similarly relayout the function decl. */
4811 targetm.target_option.relayout_function (fndecl);
4812 }
4813
4814 if (!abstract_p && aggregate_value_p (result, fndecl))
4815 {
4816 #ifdef PCC_STATIC_STRUCT_RETURN
4817 cfun->returns_pcc_struct = 1;
4818 #endif
4819 cfun->returns_struct = 1;
4820 }
4821
4822 cfun->stdarg = stdarg_p (fntype);
4823
4824 /* Assume all registers in stdarg functions need to be saved. */
4825 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4826 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4827
4828 /* ??? This could be set on a per-function basis by the front-end
4829 but is this worth the hassle? */
4830 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4831 cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
4832
4833 if (!profile_flag && !flag_instrument_function_entry_exit)
4834 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
4835
4836 if (flag_callgraph_info)
4837 allocate_stack_usage_info ();
4838 }
4839
4840 /* Don't enable begin stmt markers if var-tracking at assignments is
4841 disabled. The markers make little sense without the variable
4842 binding annotations among them. */
4843 cfun->debug_nonbind_markers = lang_hooks.emits_begin_stmt
4844 && MAY_HAVE_DEBUG_MARKER_STMTS;
4845 }
4846
4847 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4848 instead of just setting it. */
4849
4850 void
4851 push_struct_function (tree fndecl)
4852 {
4853 /* When in_dummy_function we might be in the middle of a pop_cfun and
4854 current_function_decl and cfun may not match. */
4855 gcc_assert (in_dummy_function
4856 || (!cfun && !current_function_decl)
4857 || (cfun && current_function_decl == cfun->decl));
4858 cfun_stack.safe_push (cfun);
4859 current_function_decl = fndecl;
4860 allocate_struct_function (fndecl, false);
4861 }
4862
4863 /* Reset crtl and other non-struct-function variables to defaults as
4864 appropriate for emitting rtl at the start of a function. */
4865
4866 static void
4867 prepare_function_start (void)
4868 {
4869 gcc_assert (!get_last_insn ());
4870
4871 if (in_dummy_function)
4872 crtl->abi = &default_function_abi;
4873 else
4874 crtl->abi = &fndecl_abi (cfun->decl).base_abi ();
4875
4876 init_temp_slots ();
4877 init_emit ();
4878 init_varasm_status ();
4879 init_expr ();
4880 default_rtl_profile ();
4881
4882 if (flag_stack_usage_info && !flag_callgraph_info)
4883 allocate_stack_usage_info ();
4884
4885 cse_not_expected = ! optimize;
4886
4887 /* Caller save not needed yet. */
4888 caller_save_needed = 0;
4889
4890 /* We haven't done register allocation yet. */
4891 reg_renumber = 0;
4892
4893 /* Indicate that we have not instantiated virtual registers yet. */
4894 virtuals_instantiated = 0;
4895
4896 /* Indicate that we want CONCATs now. */
4897 generating_concat_p = 1;
4898
4899 /* Indicate we have no need of a frame pointer yet. */
4900 frame_pointer_needed = 0;
4901 }
4902
4903 void
4904 push_dummy_function (bool with_decl)
4905 {
4906 tree fn_decl, fn_type, fn_result_decl;
4907
4908 gcc_assert (!in_dummy_function);
4909 in_dummy_function = true;
4910
4911 if (with_decl)
4912 {
4913 fn_type = build_function_type_list (void_type_node, NULL_TREE);
4914 fn_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
4915 fn_type);
4916 fn_result_decl = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
4917 NULL_TREE, void_type_node);
4918 DECL_RESULT (fn_decl) = fn_result_decl;
4919 }
4920 else
4921 fn_decl = NULL_TREE;
4922
4923 push_struct_function (fn_decl);
4924 }
4925
4926 /* Initialize the rtl expansion mechanism so that we can do simple things
4927 like generate sequences. This is used to provide a context during global
4928 initialization of some passes. You must call expand_dummy_function_end
4929 to exit this context. */
4930
4931 void
4932 init_dummy_function_start (void)
4933 {
4934 push_dummy_function (false);
4935 prepare_function_start ();
4936 }
4937
4938 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4939 and initialize static variables for generating RTL for the statements
4940 of the function. */
4941
4942 void
4943 init_function_start (tree subr)
4944 {
4945 /* Initialize backend, if needed. */
4946 initialize_rtl ();
4947
4948 prepare_function_start ();
4949 decide_function_section (subr);
4950
4951 /* Warn if this value is an aggregate type,
4952 regardless of which calling convention we are using for it. */
4953 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4954 warning (OPT_Waggregate_return, "function returns an aggregate");
4955 }
4956
4957 /* Expand code to verify the stack_protect_guard. This is invoked at
4958 the end of a function to be protected. */
4959
4960 void
4961 stack_protect_epilogue (void)
4962 {
4963 tree guard_decl = crtl->stack_protect_guard_decl;
4964 rtx_code_label *label = gen_label_rtx ();
4965 rtx x, y;
4966 rtx_insn *seq = NULL;
4967
4968 x = expand_normal (crtl->stack_protect_guard);
4969
4970 if (targetm.have_stack_protect_combined_test () && guard_decl)
4971 {
4972 gcc_assert (DECL_P (guard_decl));
4973 y = DECL_RTL (guard_decl);
4974 /* Allow the target to compute address of Y and compare it with X without
4975 leaking Y into a register. This combined address + compare pattern
4976 allows the target to prevent spilling of any intermediate results by
4977 splitting it after register allocator. */
4978 seq = targetm.gen_stack_protect_combined_test (x, y, label);
4979 }
4980 else
4981 {
4982 if (guard_decl)
4983 y = expand_normal (guard_decl);
4984 else
4985 y = const0_rtx;
4986
4987 /* Allow the target to compare Y with X without leaking either into
4988 a register. */
4989 if (targetm.have_stack_protect_test ())
4990 seq = targetm.gen_stack_protect_test (x, y, label);
4991 }
4992
4993 if (seq)
4994 emit_insn (seq);
4995 else
4996 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4997
4998 /* The noreturn predictor has been moved to the tree level. The rtl-level
4999 predictors estimate this branch about 20%, which isn't enough to get
5000 things moved out of line. Since this is the only extant case of adding
5001 a noreturn function at the rtl level, it doesn't seem worth doing ought
5002 except adding the prediction by hand. */
5003 rtx_insn *tmp = get_last_insn ();
5004 if (JUMP_P (tmp))
5005 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
5006
5007 expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
5008 free_temp_slots ();
5009 emit_label (label);
5010 }
5011 \f
5012 /* Start the RTL for a new function, and set variables used for
5013 emitting RTL.
5014 SUBR is the FUNCTION_DECL node.
5015 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5016 the function's parameters, which must be run at any return statement. */
5017
5018 void
5019 expand_function_start (tree subr)
5020 {
5021 /* Make sure volatile mem refs aren't considered
5022 valid operands of arithmetic insns. */
5023 init_recog_no_volatile ();
5024
5025 crtl->profile
5026 = (profile_flag
5027 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5028
5029 crtl->limit_stack
5030 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
5031
5032 /* Make the label for return statements to jump to. Do not special
5033 case machines with special return instructions -- they will be
5034 handled later during jump, ifcvt, or epilogue creation. */
5035 return_label = gen_label_rtx ();
5036
5037 /* Initialize rtx used to return the value. */
5038 /* Do this before assign_parms so that we copy the struct value address
5039 before any library calls that assign parms might generate. */
5040
5041 /* Decide whether to return the value in memory or in a register. */
5042 tree res = DECL_RESULT (subr);
5043 if (aggregate_value_p (res, subr))
5044 {
5045 /* Returning something that won't go in a register. */
5046 rtx value_address = 0;
5047
5048 #ifdef PCC_STATIC_STRUCT_RETURN
5049 if (cfun->returns_pcc_struct)
5050 {
5051 int size = int_size_in_bytes (TREE_TYPE (res));
5052 value_address = assemble_static_space (size);
5053 }
5054 else
5055 #endif
5056 {
5057 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
5058 /* Expect to be passed the address of a place to store the value.
5059 If it is passed as an argument, assign_parms will take care of
5060 it. */
5061 if (sv)
5062 {
5063 value_address = gen_reg_rtx (Pmode);
5064 emit_move_insn (value_address, sv);
5065 }
5066 }
5067 if (value_address)
5068 {
5069 rtx x = value_address;
5070 if (!DECL_BY_REFERENCE (res))
5071 {
5072 x = gen_rtx_MEM (DECL_MODE (res), x);
5073 set_mem_attributes (x, res, 1);
5074 }
5075 set_parm_rtl (res, x);
5076 }
5077 }
5078 else if (DECL_MODE (res) == VOIDmode)
5079 /* If return mode is void, this decl rtl should not be used. */
5080 set_parm_rtl (res, NULL_RTX);
5081 else
5082 {
5083 /* Compute the return values into a pseudo reg, which we will copy
5084 into the true return register after the cleanups are done. */
5085 tree return_type = TREE_TYPE (res);
5086
5087 /* If we may coalesce this result, make sure it has the expected mode
5088 in case it was promoted. But we need not bother about BLKmode. */
5089 machine_mode promoted_mode
5090 = flag_tree_coalesce_vars && is_gimple_reg (res)
5091 ? promote_ssa_mode (ssa_default_def (cfun, res), NULL)
5092 : BLKmode;
5093
5094 if (promoted_mode != BLKmode)
5095 set_parm_rtl (res, gen_reg_rtx (promoted_mode));
5096 else if (TYPE_MODE (return_type) != BLKmode
5097 && targetm.calls.return_in_msb (return_type))
5098 /* expand_function_end will insert the appropriate padding in
5099 this case. Use the return value's natural (unpadded) mode
5100 within the function proper. */
5101 set_parm_rtl (res, gen_reg_rtx (TYPE_MODE (return_type)));
5102 else
5103 {
5104 /* In order to figure out what mode to use for the pseudo, we
5105 figure out what the mode of the eventual return register will
5106 actually be, and use that. */
5107 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
5108
5109 /* Structures that are returned in registers are not
5110 aggregate_value_p, so we may see a PARALLEL or a REG. */
5111 if (REG_P (hard_reg))
5112 set_parm_rtl (res, gen_reg_rtx (GET_MODE (hard_reg)));
5113 else
5114 {
5115 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
5116 set_parm_rtl (res, gen_group_rtx (hard_reg));
5117 }
5118 }
5119
5120 /* Set DECL_REGISTER flag so that expand_function_end will copy the
5121 result to the real return register(s). */
5122 DECL_REGISTER (res) = 1;
5123 }
5124
5125 /* Initialize rtx for parameters and local variables.
5126 In some cases this requires emitting insns. */
5127 assign_parms (subr);
5128
5129 /* If function gets a static chain arg, store it. */
5130 if (cfun->static_chain_decl)
5131 {
5132 tree parm = cfun->static_chain_decl;
5133 rtx local, chain;
5134 rtx_insn *insn;
5135 int unsignedp;
5136
5137 local = gen_reg_rtx (promote_decl_mode (parm, &unsignedp));
5138 chain = targetm.calls.static_chain (current_function_decl, true);
5139
5140 set_decl_incoming_rtl (parm, chain, false);
5141 set_parm_rtl (parm, local);
5142 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
5143
5144 if (GET_MODE (local) != GET_MODE (chain))
5145 {
5146 convert_move (local, chain, unsignedp);
5147 insn = get_last_insn ();
5148 }
5149 else
5150 insn = emit_move_insn (local, chain);
5151
5152 /* Mark the register as eliminable, similar to parameters. */
5153 if (MEM_P (chain)
5154 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
5155 set_dst_reg_note (insn, REG_EQUIV, chain, local);
5156
5157 /* If we aren't optimizing, save the static chain onto the stack. */
5158 if (!optimize)
5159 {
5160 tree saved_static_chain_decl
5161 = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
5162 DECL_NAME (parm), TREE_TYPE (parm));
5163 rtx saved_static_chain_rtx
5164 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5165 SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
5166 emit_move_insn (saved_static_chain_rtx, chain);
5167 SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
5168 DECL_HAS_VALUE_EXPR_P (parm) = 1;
5169 }
5170 }
5171
5172 /* The following was moved from init_function_start.
5173 The move was supposed to make sdb output more accurate. */
5174 /* Indicate the beginning of the function body,
5175 as opposed to parm setup. */
5176 emit_note (NOTE_INSN_FUNCTION_BEG);
5177
5178 gcc_assert (NOTE_P (get_last_insn ()));
5179
5180 parm_birth_insn = get_last_insn ();
5181
5182 /* If the function receives a non-local goto, then store the
5183 bits we need to restore the frame pointer. */
5184 if (cfun->nonlocal_goto_save_area)
5185 {
5186 tree t_save;
5187 rtx r_save;
5188
5189 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
5190 gcc_assert (DECL_RTL_SET_P (var));
5191
5192 t_save = build4 (ARRAY_REF,
5193 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
5194 cfun->nonlocal_goto_save_area,
5195 integer_zero_node, NULL_TREE, NULL_TREE);
5196 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
5197 gcc_assert (GET_MODE (r_save) == Pmode);
5198
5199 emit_move_insn (r_save, hard_frame_pointer_rtx);
5200 update_nonlocal_goto_save_area ();
5201 }
5202
5203 if (crtl->profile)
5204 {
5205 #ifdef PROFILE_HOOK
5206 PROFILE_HOOK (current_function_funcdef_no);
5207 #endif
5208 }
5209
5210 /* If we are doing generic stack checking, the probe should go here. */
5211 if (flag_stack_check == GENERIC_STACK_CHECK)
5212 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
5213 }
5214 \f
5215 void
5216 pop_dummy_function (void)
5217 {
5218 pop_cfun ();
5219 in_dummy_function = false;
5220 }
5221
5222 /* Undo the effects of init_dummy_function_start. */
5223 void
5224 expand_dummy_function_end (void)
5225 {
5226 gcc_assert (in_dummy_function);
5227
5228 /* End any sequences that failed to be closed due to syntax errors. */
5229 while (in_sequence_p ())
5230 end_sequence ();
5231
5232 /* Outside function body, can't compute type's actual size
5233 until next function's body starts. */
5234
5235 free_after_parsing (cfun);
5236 free_after_compilation (cfun);
5237 pop_dummy_function ();
5238 }
5239
5240 /* Helper for diddle_return_value. */
5241
5242 void
5243 diddle_return_value_1 (void (*doit) (rtx, void *), void *arg, rtx outgoing)
5244 {
5245 if (! outgoing)
5246 return;
5247
5248 if (REG_P (outgoing))
5249 (*doit) (outgoing, arg);
5250 else if (GET_CODE (outgoing) == PARALLEL)
5251 {
5252 int i;
5253
5254 for (i = 0; i < XVECLEN (outgoing, 0); i++)
5255 {
5256 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
5257
5258 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5259 (*doit) (x, arg);
5260 }
5261 }
5262 }
5263
5264 /* Call DOIT for each hard register used as a return value from
5265 the current function. */
5266
5267 void
5268 diddle_return_value (void (*doit) (rtx, void *), void *arg)
5269 {
5270 diddle_return_value_1 (doit, arg, crtl->return_rtx);
5271 }
5272
5273 static void
5274 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5275 {
5276 emit_clobber (reg);
5277 }
5278
5279 void
5280 clobber_return_register (void)
5281 {
5282 diddle_return_value (do_clobber_return_reg, NULL);
5283
5284 /* In case we do use pseudo to return value, clobber it too. */
5285 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5286 {
5287 tree decl_result = DECL_RESULT (current_function_decl);
5288 rtx decl_rtl = DECL_RTL (decl_result);
5289 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
5290 {
5291 do_clobber_return_reg (decl_rtl, NULL);
5292 }
5293 }
5294 }
5295
5296 static void
5297 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5298 {
5299 emit_use (reg);
5300 }
5301
5302 static void
5303 use_return_register (void)
5304 {
5305 diddle_return_value (do_use_return_reg, NULL);
5306 }
5307
5308 /* Generate RTL for the end of the current function. */
5309
5310 void
5311 expand_function_end (void)
5312 {
5313 /* If arg_pointer_save_area was referenced only from a nested
5314 function, we will not have initialized it yet. Do that now. */
5315 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
5316 get_arg_pointer_save_area ();
5317
5318 /* If we are doing generic stack checking and this function makes calls,
5319 do a stack probe at the start of the function to ensure we have enough
5320 space for another stack frame. */
5321 if (flag_stack_check == GENERIC_STACK_CHECK)
5322 {
5323 rtx_insn *insn, *seq;
5324
5325 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5326 if (CALL_P (insn))
5327 {
5328 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
5329 start_sequence ();
5330 if (STACK_CHECK_MOVING_SP)
5331 anti_adjust_stack_and_probe (max_frame_size, true);
5332 else
5333 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
5334 seq = get_insns ();
5335 end_sequence ();
5336 set_insn_locations (seq, prologue_location);
5337 emit_insn_before (seq, stack_check_probe_note);
5338 break;
5339 }
5340 }
5341
5342 /* End any sequences that failed to be closed due to syntax errors. */
5343 while (in_sequence_p ())
5344 end_sequence ();
5345
5346 clear_pending_stack_adjust ();
5347 do_pending_stack_adjust ();
5348
5349 /* Output a linenumber for the end of the function.
5350 SDB depended on this. */
5351 set_curr_insn_location (input_location);
5352
5353 /* Before the return label (if any), clobber the return
5354 registers so that they are not propagated live to the rest of
5355 the function. This can only happen with functions that drop
5356 through; if there had been a return statement, there would
5357 have either been a return rtx, or a jump to the return label.
5358
5359 We delay actual code generation after the current_function_value_rtx
5360 is computed. */
5361 rtx_insn *clobber_after = get_last_insn ();
5362
5363 /* Output the label for the actual return from the function. */
5364 emit_label (return_label);
5365
5366 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
5367 {
5368 /* Let except.c know where it should emit the call to unregister
5369 the function context for sjlj exceptions. */
5370 if (flag_exceptions)
5371 sjlj_emit_function_exit_after (get_last_insn ());
5372 }
5373
5374 /* If this is an implementation of throw, do what's necessary to
5375 communicate between __builtin_eh_return and the epilogue. */
5376 expand_eh_return ();
5377
5378 /* If stack protection is enabled for this function, check the guard. */
5379 if (crtl->stack_protect_guard
5380 && targetm.stack_protect_runtime_enabled_p ()
5381 && naked_return_label == NULL_RTX)
5382 stack_protect_epilogue ();
5383
5384 /* If scalar return value was computed in a pseudo-reg, or was a named
5385 return value that got dumped to the stack, copy that to the hard
5386 return register. */
5387 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5388 {
5389 tree decl_result = DECL_RESULT (current_function_decl);
5390 rtx decl_rtl = DECL_RTL (decl_result);
5391
5392 if (REG_P (decl_rtl)
5393 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5394 : DECL_REGISTER (decl_result))
5395 {
5396 rtx real_decl_rtl = crtl->return_rtx;
5397 complex_mode cmode;
5398
5399 /* This should be set in assign_parms. */
5400 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
5401
5402 /* If this is a BLKmode structure being returned in registers,
5403 then use the mode computed in expand_return. Note that if
5404 decl_rtl is memory, then its mode may have been changed,
5405 but that crtl->return_rtx has not. */
5406 if (GET_MODE (real_decl_rtl) == BLKmode)
5407 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
5408
5409 /* If a non-BLKmode return value should be padded at the least
5410 significant end of the register, shift it left by the appropriate
5411 amount. BLKmode results are handled using the group load/store
5412 machinery. */
5413 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5414 && REG_P (real_decl_rtl)
5415 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5416 {
5417 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5418 REGNO (real_decl_rtl)),
5419 decl_rtl);
5420 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5421 }
5422 else if (GET_CODE (real_decl_rtl) == PARALLEL)
5423 {
5424 /* If expand_function_start has created a PARALLEL for decl_rtl,
5425 move the result to the real return registers. Otherwise, do
5426 a group load from decl_rtl for a named return. */
5427 if (GET_CODE (decl_rtl) == PARALLEL)
5428 emit_group_move (real_decl_rtl, decl_rtl);
5429 else
5430 emit_group_load (real_decl_rtl, decl_rtl,
5431 TREE_TYPE (decl_result),
5432 int_size_in_bytes (TREE_TYPE (decl_result)));
5433 }
5434 /* In the case of complex integer modes smaller than a word, we'll
5435 need to generate some non-trivial bitfield insertions. Do that
5436 on a pseudo and not the hard register. */
5437 else if (GET_CODE (decl_rtl) == CONCAT
5438 && is_complex_int_mode (GET_MODE (decl_rtl), &cmode)
5439 && GET_MODE_BITSIZE (cmode) <= BITS_PER_WORD)
5440 {
5441 int old_generating_concat_p;
5442 rtx tmp;
5443
5444 old_generating_concat_p = generating_concat_p;
5445 generating_concat_p = 0;
5446 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5447 generating_concat_p = old_generating_concat_p;
5448
5449 emit_move_insn (tmp, decl_rtl);
5450 emit_move_insn (real_decl_rtl, tmp);
5451 }
5452 /* If a named return value dumped decl_return to memory, then
5453 we may need to re-do the PROMOTE_MODE signed/unsigned
5454 extension. */
5455 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5456 {
5457 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5458 promote_function_mode (TREE_TYPE (decl_result),
5459 GET_MODE (decl_rtl), &unsignedp,
5460 TREE_TYPE (current_function_decl), 1);
5461
5462 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5463 }
5464 else
5465 emit_move_insn (real_decl_rtl, decl_rtl);
5466 }
5467 }
5468
5469 /* If returning a structure, arrange to return the address of the value
5470 in a place where debuggers expect to find it.
5471
5472 If returning a structure PCC style,
5473 the caller also depends on this value.
5474 And cfun->returns_pcc_struct is not necessarily set. */
5475 if ((cfun->returns_struct || cfun->returns_pcc_struct)
5476 && !targetm.calls.omit_struct_return_reg)
5477 {
5478 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5479 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5480 rtx outgoing;
5481
5482 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5483 type = TREE_TYPE (type);
5484 else
5485 value_address = XEXP (value_address, 0);
5486
5487 outgoing = targetm.calls.function_value (build_pointer_type (type),
5488 current_function_decl, true);
5489
5490 /* Mark this as a function return value so integrate will delete the
5491 assignment and USE below when inlining this function. */
5492 REG_FUNCTION_VALUE_P (outgoing) = 1;
5493
5494 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5495 scalar_int_mode mode = as_a <scalar_int_mode> (GET_MODE (outgoing));
5496 value_address = convert_memory_address (mode, value_address);
5497
5498 emit_move_insn (outgoing, value_address);
5499
5500 /* Show return register used to hold result (in this case the address
5501 of the result. */
5502 crtl->return_rtx = outgoing;
5503 }
5504
5505 /* Emit the actual code to clobber return register. Don't emit
5506 it if clobber_after is a barrier, then the previous basic block
5507 certainly doesn't fall thru into the exit block. */
5508 if (!BARRIER_P (clobber_after))
5509 {
5510 start_sequence ();
5511 clobber_return_register ();
5512 rtx_insn *seq = get_insns ();
5513 end_sequence ();
5514
5515 emit_insn_after (seq, clobber_after);
5516 }
5517
5518 /* Output the label for the naked return from the function. */
5519 if (naked_return_label)
5520 emit_label (naked_return_label);
5521
5522 /* @@@ This is a kludge. We want to ensure that instructions that
5523 may trap are not moved into the epilogue by scheduling, because
5524 we don't always emit unwind information for the epilogue. */
5525 if (cfun->can_throw_non_call_exceptions
5526 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
5527 emit_insn (gen_blockage ());
5528
5529 /* If stack protection is enabled for this function, check the guard. */
5530 if (crtl->stack_protect_guard
5531 && targetm.stack_protect_runtime_enabled_p ()
5532 && naked_return_label)
5533 stack_protect_epilogue ();
5534
5535 /* If we had calls to alloca, and this machine needs
5536 an accurate stack pointer to exit the function,
5537 insert some code to save and restore the stack pointer. */
5538 if (! EXIT_IGNORE_STACK
5539 && cfun->calls_alloca)
5540 {
5541 rtx tem = 0;
5542
5543 start_sequence ();
5544 emit_stack_save (SAVE_FUNCTION, &tem);
5545 rtx_insn *seq = get_insns ();
5546 end_sequence ();
5547 emit_insn_before (seq, parm_birth_insn);
5548
5549 emit_stack_restore (SAVE_FUNCTION, tem);
5550 }
5551
5552 /* ??? This should no longer be necessary since stupid is no longer with
5553 us, but there are some parts of the compiler (eg reload_combine, and
5554 sh mach_dep_reorg) that still try and compute their own lifetime info
5555 instead of using the general framework. */
5556 use_return_register ();
5557 }
5558
5559 rtx
5560 get_arg_pointer_save_area (void)
5561 {
5562 rtx ret = arg_pointer_save_area;
5563
5564 if (! ret)
5565 {
5566 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5567 arg_pointer_save_area = ret;
5568 }
5569
5570 if (! crtl->arg_pointer_save_area_init)
5571 {
5572 /* Save the arg pointer at the beginning of the function. The
5573 generated stack slot may not be a valid memory address, so we
5574 have to check it and fix it if necessary. */
5575 start_sequence ();
5576 emit_move_insn (validize_mem (copy_rtx (ret)),
5577 crtl->args.internal_arg_pointer);
5578 rtx_insn *seq = get_insns ();
5579 end_sequence ();
5580
5581 push_topmost_sequence ();
5582 emit_insn_after (seq, entry_of_function ());
5583 pop_topmost_sequence ();
5584
5585 crtl->arg_pointer_save_area_init = true;
5586 }
5587
5588 return ret;
5589 }
5590 \f
5591
5592 /* If debugging dumps are requested, dump information about how the
5593 target handled -fstack-check=clash for the prologue.
5594
5595 PROBES describes what if any probes were emitted.
5596
5597 RESIDUALS indicates if the prologue had any residual allocation
5598 (i.e. total allocation was not a multiple of PROBE_INTERVAL). */
5599
5600 void
5601 dump_stack_clash_frame_info (enum stack_clash_probes probes, bool residuals)
5602 {
5603 if (!dump_file)
5604 return;
5605
5606 switch (probes)
5607 {
5608 case NO_PROBE_NO_FRAME:
5609 fprintf (dump_file,
5610 "Stack clash no probe no stack adjustment in prologue.\n");
5611 break;
5612 case NO_PROBE_SMALL_FRAME:
5613 fprintf (dump_file,
5614 "Stack clash no probe small stack adjustment in prologue.\n");
5615 break;
5616 case PROBE_INLINE:
5617 fprintf (dump_file, "Stack clash inline probes in prologue.\n");
5618 break;
5619 case PROBE_LOOP:
5620 fprintf (dump_file, "Stack clash probe loop in prologue.\n");
5621 break;
5622 }
5623
5624 if (residuals)
5625 fprintf (dump_file, "Stack clash residual allocation in prologue.\n");
5626 else
5627 fprintf (dump_file, "Stack clash no residual allocation in prologue.\n");
5628
5629 if (frame_pointer_needed)
5630 fprintf (dump_file, "Stack clash frame pointer needed.\n");
5631 else
5632 fprintf (dump_file, "Stack clash no frame pointer needed.\n");
5633
5634 if (TREE_THIS_VOLATILE (cfun->decl))
5635 fprintf (dump_file,
5636 "Stack clash noreturn prologue, assuming no implicit"
5637 " probes in caller.\n");
5638 else
5639 fprintf (dump_file,
5640 "Stack clash not noreturn prologue.\n");
5641 }
5642
5643 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5644 for the first time. */
5645
5646 static void
5647 record_insns (rtx_insn *insns, rtx end, hash_table<insn_cache_hasher> **hashp)
5648 {
5649 rtx_insn *tmp;
5650 hash_table<insn_cache_hasher> *hash = *hashp;
5651
5652 if (hash == NULL)
5653 *hashp = hash = hash_table<insn_cache_hasher>::create_ggc (17);
5654
5655 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5656 {
5657 rtx *slot = hash->find_slot (tmp, INSERT);
5658 gcc_assert (*slot == NULL);
5659 *slot = tmp;
5660 }
5661 }
5662
5663 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5664 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5665 insn, then record COPY as well. */
5666
5667 void
5668 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5669 {
5670 hash_table<insn_cache_hasher> *hash;
5671 rtx *slot;
5672
5673 hash = epilogue_insn_hash;
5674 if (!hash || !hash->find (insn))
5675 {
5676 hash = prologue_insn_hash;
5677 if (!hash || !hash->find (insn))
5678 return;
5679 }
5680
5681 slot = hash->find_slot (copy, INSERT);
5682 gcc_assert (*slot == NULL);
5683 *slot = copy;
5684 }
5685
5686 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5687 we can be running after reorg, SEQUENCE rtl is possible. */
5688
5689 static bool
5690 contains (const rtx_insn *insn, hash_table<insn_cache_hasher> *hash)
5691 {
5692 if (hash == NULL)
5693 return false;
5694
5695 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5696 {
5697 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
5698 int i;
5699 for (i = seq->len () - 1; i >= 0; i--)
5700 if (hash->find (seq->element (i)))
5701 return true;
5702 return false;
5703 }
5704
5705 return hash->find (const_cast<rtx_insn *> (insn)) != NULL;
5706 }
5707
5708 int
5709 prologue_contains (const rtx_insn *insn)
5710 {
5711 return contains (insn, prologue_insn_hash);
5712 }
5713
5714 int
5715 epilogue_contains (const rtx_insn *insn)
5716 {
5717 return contains (insn, epilogue_insn_hash);
5718 }
5719
5720 int
5721 prologue_epilogue_contains (const rtx_insn *insn)
5722 {
5723 if (contains (insn, prologue_insn_hash))
5724 return 1;
5725 if (contains (insn, epilogue_insn_hash))
5726 return 1;
5727 return 0;
5728 }
5729
5730 void
5731 record_prologue_seq (rtx_insn *seq)
5732 {
5733 record_insns (seq, NULL, &prologue_insn_hash);
5734 }
5735
5736 void
5737 record_epilogue_seq (rtx_insn *seq)
5738 {
5739 record_insns (seq, NULL, &epilogue_insn_hash);
5740 }
5741
5742 /* Set JUMP_LABEL for a return insn. */
5743
5744 void
5745 set_return_jump_label (rtx_insn *returnjump)
5746 {
5747 rtx pat = PATTERN (returnjump);
5748 if (GET_CODE (pat) == PARALLEL)
5749 pat = XVECEXP (pat, 0, 0);
5750 if (ANY_RETURN_P (pat))
5751 JUMP_LABEL (returnjump) = pat;
5752 else
5753 JUMP_LABEL (returnjump) = ret_rtx;
5754 }
5755
5756 /* Return a sequence to be used as the split prologue for the current
5757 function, or NULL. */
5758
5759 static rtx_insn *
5760 make_split_prologue_seq (void)
5761 {
5762 if (!flag_split_stack
5763 || lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl)))
5764 return NULL;
5765
5766 start_sequence ();
5767 emit_insn (targetm.gen_split_stack_prologue ());
5768 rtx_insn *seq = get_insns ();
5769 end_sequence ();
5770
5771 record_insns (seq, NULL, &prologue_insn_hash);
5772 set_insn_locations (seq, prologue_location);
5773
5774 return seq;
5775 }
5776
5777 /* Return a sequence to be used as the prologue for the current function,
5778 or NULL. */
5779
5780 static rtx_insn *
5781 make_prologue_seq (void)
5782 {
5783 if (!targetm.have_prologue ())
5784 return NULL;
5785
5786 start_sequence ();
5787 rtx_insn *seq = targetm.gen_prologue ();
5788 emit_insn (seq);
5789
5790 /* Insert an explicit USE for the frame pointer
5791 if the profiling is on and the frame pointer is required. */
5792 if (crtl->profile && frame_pointer_needed)
5793 emit_use (hard_frame_pointer_rtx);
5794
5795 /* Retain a map of the prologue insns. */
5796 record_insns (seq, NULL, &prologue_insn_hash);
5797 emit_note (NOTE_INSN_PROLOGUE_END);
5798
5799 /* Ensure that instructions are not moved into the prologue when
5800 profiling is on. The call to the profiling routine can be
5801 emitted within the live range of a call-clobbered register. */
5802 if (!targetm.profile_before_prologue () && crtl->profile)
5803 emit_insn (gen_blockage ());
5804
5805 seq = get_insns ();
5806 end_sequence ();
5807 set_insn_locations (seq, prologue_location);
5808
5809 return seq;
5810 }
5811
5812 /* Return a sequence to be used as the epilogue for the current function,
5813 or NULL. */
5814
5815 static rtx_insn *
5816 make_epilogue_seq (void)
5817 {
5818 if (!targetm.have_epilogue ())
5819 return NULL;
5820
5821 start_sequence ();
5822 emit_note (NOTE_INSN_EPILOGUE_BEG);
5823 rtx_insn *seq = targetm.gen_epilogue ();
5824 if (seq)
5825 emit_jump_insn (seq);
5826
5827 /* Retain a map of the epilogue insns. */
5828 record_insns (seq, NULL, &epilogue_insn_hash);
5829 set_insn_locations (seq, epilogue_location);
5830
5831 seq = get_insns ();
5832 rtx_insn *returnjump = get_last_insn ();
5833 end_sequence ();
5834
5835 if (JUMP_P (returnjump))
5836 set_return_jump_label (returnjump);
5837
5838 return seq;
5839 }
5840
5841
5842 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5843 this into place with notes indicating where the prologue ends and where
5844 the epilogue begins. Update the basic block information when possible.
5845
5846 Notes on epilogue placement:
5847 There are several kinds of edges to the exit block:
5848 * a single fallthru edge from LAST_BB
5849 * possibly, edges from blocks containing sibcalls
5850 * possibly, fake edges from infinite loops
5851
5852 The epilogue is always emitted on the fallthru edge from the last basic
5853 block in the function, LAST_BB, into the exit block.
5854
5855 If LAST_BB is empty except for a label, it is the target of every
5856 other basic block in the function that ends in a return. If a
5857 target has a return or simple_return pattern (possibly with
5858 conditional variants), these basic blocks can be changed so that a
5859 return insn is emitted into them, and their target is adjusted to
5860 the real exit block.
5861
5862 Notes on shrink wrapping: We implement a fairly conservative
5863 version of shrink-wrapping rather than the textbook one. We only
5864 generate a single prologue and a single epilogue. This is
5865 sufficient to catch a number of interesting cases involving early
5866 exits.
5867
5868 First, we identify the blocks that require the prologue to occur before
5869 them. These are the ones that modify a call-saved register, or reference
5870 any of the stack or frame pointer registers. To simplify things, we then
5871 mark everything reachable from these blocks as also requiring a prologue.
5872 This takes care of loops automatically, and avoids the need to examine
5873 whether MEMs reference the frame, since it is sufficient to check for
5874 occurrences of the stack or frame pointer.
5875
5876 We then compute the set of blocks for which the need for a prologue
5877 is anticipatable (borrowing terminology from the shrink-wrapping
5878 description in Muchnick's book). These are the blocks which either
5879 require a prologue themselves, or those that have only successors
5880 where the prologue is anticipatable. The prologue needs to be
5881 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5882 is not. For the moment, we ensure that only one such edge exists.
5883
5884 The epilogue is placed as described above, but we make a
5885 distinction between inserting return and simple_return patterns
5886 when modifying other blocks that end in a return. Blocks that end
5887 in a sibcall omit the sibcall_epilogue if the block is not in
5888 ANTIC. */
5889
5890 void
5891 thread_prologue_and_epilogue_insns (void)
5892 {
5893 df_analyze ();
5894
5895 /* Can't deal with multiple successors of the entry block at the
5896 moment. Function should always have at least one entry
5897 point. */
5898 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5899
5900 edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5901 edge orig_entry_edge = entry_edge;
5902
5903 rtx_insn *split_prologue_seq = make_split_prologue_seq ();
5904 rtx_insn *prologue_seq = make_prologue_seq ();
5905 rtx_insn *epilogue_seq = make_epilogue_seq ();
5906
5907 /* Try to perform a kind of shrink-wrapping, making sure the
5908 prologue/epilogue is emitted only around those parts of the
5909 function that require it. */
5910 try_shrink_wrapping (&entry_edge, prologue_seq);
5911
5912 /* If the target can handle splitting the prologue/epilogue into separate
5913 components, try to shrink-wrap these components separately. */
5914 try_shrink_wrapping_separate (entry_edge->dest);
5915
5916 /* If that did anything for any component we now need the generate the
5917 "main" prologue again. Because some targets require some of these
5918 to be called in a specific order (i386 requires the split prologue
5919 to be first, for example), we create all three sequences again here.
5920 If this does not work for some target, that target should not enable
5921 separate shrink-wrapping. */
5922 if (crtl->shrink_wrapped_separate)
5923 {
5924 split_prologue_seq = make_split_prologue_seq ();
5925 prologue_seq = make_prologue_seq ();
5926 epilogue_seq = make_epilogue_seq ();
5927 }
5928
5929 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5930
5931 /* A small fib -- epilogue is not yet completed, but we wish to re-use
5932 this marker for the splits of EH_RETURN patterns, and nothing else
5933 uses the flag in the meantime. */
5934 epilogue_completed = 1;
5935
5936 /* Find non-fallthru edges that end with EH_RETURN instructions. On
5937 some targets, these get split to a special version of the epilogue
5938 code. In order to be able to properly annotate these with unwind
5939 info, try to split them now. If we get a valid split, drop an
5940 EPILOGUE_BEG note and mark the insns as epilogue insns. */
5941 edge e;
5942 edge_iterator ei;
5943 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5944 {
5945 rtx_insn *prev, *last, *trial;
5946
5947 if (e->flags & EDGE_FALLTHRU)
5948 continue;
5949 last = BB_END (e->src);
5950 if (!eh_returnjump_p (last))
5951 continue;
5952
5953 prev = PREV_INSN (last);
5954 trial = try_split (PATTERN (last), last, 1);
5955 if (trial == last)
5956 continue;
5957
5958 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
5959 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
5960 }
5961
5962 edge exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
5963
5964 if (exit_fallthru_edge)
5965 {
5966 if (epilogue_seq)
5967 {
5968 insert_insn_on_edge (epilogue_seq, exit_fallthru_edge);
5969 commit_edge_insertions ();
5970
5971 /* The epilogue insns we inserted may cause the exit edge to no longer
5972 be fallthru. */
5973 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5974 {
5975 if (((e->flags & EDGE_FALLTHRU) != 0)
5976 && returnjump_p (BB_END (e->src)))
5977 e->flags &= ~EDGE_FALLTHRU;
5978 }
5979 }
5980 else if (next_active_insn (BB_END (exit_fallthru_edge->src)))
5981 {
5982 /* We have a fall-through edge to the exit block, the source is not
5983 at the end of the function, and there will be an assembler epilogue
5984 at the end of the function.
5985 We can't use force_nonfallthru here, because that would try to
5986 use return. Inserting a jump 'by hand' is extremely messy, so
5987 we take advantage of cfg_layout_finalize using
5988 fixup_fallthru_exit_predecessor. */
5989 cfg_layout_initialize (0);
5990 basic_block cur_bb;
5991 FOR_EACH_BB_FN (cur_bb, cfun)
5992 if (cur_bb->index >= NUM_FIXED_BLOCKS
5993 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
5994 cur_bb->aux = cur_bb->next_bb;
5995 cfg_layout_finalize ();
5996 }
5997 }
5998
5999 /* Insert the prologue. */
6000
6001 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
6002
6003 if (split_prologue_seq || prologue_seq)
6004 {
6005 rtx_insn *split_prologue_insn = split_prologue_seq;
6006 if (split_prologue_seq)
6007 {
6008 while (split_prologue_insn && !NONDEBUG_INSN_P (split_prologue_insn))
6009 split_prologue_insn = NEXT_INSN (split_prologue_insn);
6010 insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
6011 }
6012
6013 rtx_insn *prologue_insn = prologue_seq;
6014 if (prologue_seq)
6015 {
6016 while (prologue_insn && !NONDEBUG_INSN_P (prologue_insn))
6017 prologue_insn = NEXT_INSN (prologue_insn);
6018 insert_insn_on_edge (prologue_seq, entry_edge);
6019 }
6020
6021 commit_edge_insertions ();
6022
6023 /* Look for basic blocks within the prologue insns. */
6024 if (split_prologue_insn
6025 && BLOCK_FOR_INSN (split_prologue_insn) == NULL)
6026 split_prologue_insn = NULL;
6027 if (prologue_insn
6028 && BLOCK_FOR_INSN (prologue_insn) == NULL)
6029 prologue_insn = NULL;
6030 if (split_prologue_insn || prologue_insn)
6031 {
6032 auto_sbitmap blocks (last_basic_block_for_fn (cfun));
6033 bitmap_clear (blocks);
6034 if (split_prologue_insn)
6035 bitmap_set_bit (blocks,
6036 BLOCK_FOR_INSN (split_prologue_insn)->index);
6037 if (prologue_insn)
6038 bitmap_set_bit (blocks, BLOCK_FOR_INSN (prologue_insn)->index);
6039 find_many_sub_basic_blocks (blocks);
6040 }
6041 }
6042
6043 default_rtl_profile ();
6044
6045 /* Emit sibling epilogues before any sibling call sites. */
6046 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
6047 (e = ei_safe_edge (ei));
6048 ei_next (&ei))
6049 {
6050 /* Skip those already handled, the ones that run without prologue. */
6051 if (e->flags & EDGE_IGNORE)
6052 {
6053 e->flags &= ~EDGE_IGNORE;
6054 continue;
6055 }
6056
6057 rtx_insn *insn = BB_END (e->src);
6058
6059 if (!(CALL_P (insn) && SIBLING_CALL_P (insn)))
6060 continue;
6061
6062 if (rtx_insn *ep_seq = targetm.gen_sibcall_epilogue ())
6063 {
6064 start_sequence ();
6065 emit_note (NOTE_INSN_EPILOGUE_BEG);
6066 emit_insn (ep_seq);
6067 rtx_insn *seq = get_insns ();
6068 end_sequence ();
6069
6070 /* Retain a map of the epilogue insns. Used in life analysis to
6071 avoid getting rid of sibcall epilogue insns. Do this before we
6072 actually emit the sequence. */
6073 record_insns (seq, NULL, &epilogue_insn_hash);
6074 set_insn_locations (seq, epilogue_location);
6075
6076 emit_insn_before (seq, insn);
6077 }
6078 }
6079
6080 if (epilogue_seq)
6081 {
6082 rtx_insn *insn, *next;
6083
6084 /* Similarly, move any line notes that appear after the epilogue.
6085 There is no need, however, to be quite so anal about the existence
6086 of such a note. Also possibly move
6087 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6088 info generation. */
6089 for (insn = epilogue_seq; insn; insn = next)
6090 {
6091 next = NEXT_INSN (insn);
6092 if (NOTE_P (insn)
6093 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
6094 reorder_insns (insn, insn, PREV_INSN (epilogue_seq));
6095 }
6096 }
6097
6098 /* Threading the prologue and epilogue changes the artificial refs
6099 in the entry and exit blocks. */
6100 epilogue_completed = 1;
6101 df_update_entry_exit_and_calls ();
6102 }
6103
6104 /* Reposition the prologue-end and epilogue-begin notes after
6105 instruction scheduling. */
6106
6107 void
6108 reposition_prologue_and_epilogue_notes (void)
6109 {
6110 if (!targetm.have_prologue ()
6111 && !targetm.have_epilogue ()
6112 && !targetm.have_sibcall_epilogue ())
6113 return;
6114
6115 /* Since the hash table is created on demand, the fact that it is
6116 non-null is a signal that it is non-empty. */
6117 if (prologue_insn_hash != NULL)
6118 {
6119 size_t len = prologue_insn_hash->elements ();
6120 rtx_insn *insn, *last = NULL, *note = NULL;
6121
6122 /* Scan from the beginning until we reach the last prologue insn. */
6123 /* ??? While we do have the CFG intact, there are two problems:
6124 (1) The prologue can contain loops (typically probing the stack),
6125 which means that the end of the prologue isn't in the first bb.
6126 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6127 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6128 {
6129 if (NOTE_P (insn))
6130 {
6131 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
6132 note = insn;
6133 }
6134 else if (contains (insn, prologue_insn_hash))
6135 {
6136 last = insn;
6137 if (--len == 0)
6138 break;
6139 }
6140 }
6141
6142 if (last)
6143 {
6144 if (note == NULL)
6145 {
6146 /* Scan forward looking for the PROLOGUE_END note. It should
6147 be right at the beginning of the block, possibly with other
6148 insn notes that got moved there. */
6149 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6150 {
6151 if (NOTE_P (note)
6152 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6153 break;
6154 }
6155 }
6156
6157 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6158 if (LABEL_P (last))
6159 last = NEXT_INSN (last);
6160 reorder_insns (note, note, last);
6161 }
6162 }
6163
6164 if (epilogue_insn_hash != NULL)
6165 {
6166 edge_iterator ei;
6167 edge e;
6168
6169 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6170 {
6171 rtx_insn *insn, *first = NULL, *note = NULL;
6172 basic_block bb = e->src;
6173
6174 /* Scan from the beginning until we reach the first epilogue insn. */
6175 FOR_BB_INSNS (bb, insn)
6176 {
6177 if (NOTE_P (insn))
6178 {
6179 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6180 {
6181 note = insn;
6182 if (first != NULL)
6183 break;
6184 }
6185 }
6186 else if (first == NULL && contains (insn, epilogue_insn_hash))
6187 {
6188 first = insn;
6189 if (note != NULL)
6190 break;
6191 }
6192 }
6193
6194 if (note)
6195 {
6196 /* If the function has a single basic block, and no real
6197 epilogue insns (e.g. sibcall with no cleanup), the
6198 epilogue note can get scheduled before the prologue
6199 note. If we have frame related prologue insns, having
6200 them scanned during the epilogue will result in a crash.
6201 In this case re-order the epilogue note to just before
6202 the last insn in the block. */
6203 if (first == NULL)
6204 first = BB_END (bb);
6205
6206 if (PREV_INSN (first) != note)
6207 reorder_insns (note, note, PREV_INSN (first));
6208 }
6209 }
6210 }
6211 }
6212
6213 /* Returns the name of function declared by FNDECL. */
6214 const char *
6215 fndecl_name (tree fndecl)
6216 {
6217 if (fndecl == NULL)
6218 return "(nofn)";
6219 return lang_hooks.decl_printable_name (fndecl, 1);
6220 }
6221
6222 /* Returns the name of function FN. */
6223 const char *
6224 function_name (struct function *fn)
6225 {
6226 tree fndecl = (fn == NULL) ? NULL : fn->decl;
6227 return fndecl_name (fndecl);
6228 }
6229
6230 /* Returns the name of the current function. */
6231 const char *
6232 current_function_name (void)
6233 {
6234 return function_name (cfun);
6235 }
6236 \f
6237
6238 static unsigned int
6239 rest_of_handle_check_leaf_regs (void)
6240 {
6241 #ifdef LEAF_REGISTERS
6242 crtl->uses_only_leaf_regs
6243 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6244 #endif
6245 return 0;
6246 }
6247
6248 /* Insert a TYPE into the used types hash table of CFUN. */
6249
6250 static void
6251 used_types_insert_helper (tree type, struct function *func)
6252 {
6253 if (type != NULL && func != NULL)
6254 {
6255 if (func->used_types_hash == NULL)
6256 func->used_types_hash = hash_set<tree>::create_ggc (37);
6257
6258 func->used_types_hash->add (type);
6259 }
6260 }
6261
6262 /* Given a type, insert it into the used hash table in cfun. */
6263 void
6264 used_types_insert (tree t)
6265 {
6266 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
6267 if (TYPE_NAME (t))
6268 break;
6269 else
6270 t = TREE_TYPE (t);
6271 if (TREE_CODE (t) == ERROR_MARK)
6272 return;
6273 if (TYPE_NAME (t) == NULL_TREE
6274 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6275 t = TYPE_MAIN_VARIANT (t);
6276 if (debug_info_level > DINFO_LEVEL_NONE)
6277 {
6278 if (cfun)
6279 used_types_insert_helper (t, cfun);
6280 else
6281 {
6282 /* So this might be a type referenced by a global variable.
6283 Record that type so that we can later decide to emit its
6284 debug information. */
6285 vec_safe_push (types_used_by_cur_var_decl, t);
6286 }
6287 }
6288 }
6289
6290 /* Helper to Hash a struct types_used_by_vars_entry. */
6291
6292 static hashval_t
6293 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6294 {
6295 gcc_assert (entry && entry->var_decl && entry->type);
6296
6297 return iterative_hash_object (entry->type,
6298 iterative_hash_object (entry->var_decl, 0));
6299 }
6300
6301 /* Hash function of the types_used_by_vars_entry hash table. */
6302
6303 hashval_t
6304 used_type_hasher::hash (types_used_by_vars_entry *entry)
6305 {
6306 return hash_types_used_by_vars_entry (entry);
6307 }
6308
6309 /*Equality function of the types_used_by_vars_entry hash table. */
6310
6311 bool
6312 used_type_hasher::equal (types_used_by_vars_entry *e1,
6313 types_used_by_vars_entry *e2)
6314 {
6315 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6316 }
6317
6318 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6319
6320 void
6321 types_used_by_var_decl_insert (tree type, tree var_decl)
6322 {
6323 if (type != NULL && var_decl != NULL)
6324 {
6325 types_used_by_vars_entry **slot;
6326 struct types_used_by_vars_entry e;
6327 e.var_decl = var_decl;
6328 e.type = type;
6329 if (types_used_by_vars_hash == NULL)
6330 types_used_by_vars_hash
6331 = hash_table<used_type_hasher>::create_ggc (37);
6332
6333 slot = types_used_by_vars_hash->find_slot (&e, INSERT);
6334 if (*slot == NULL)
6335 {
6336 struct types_used_by_vars_entry *entry;
6337 entry = ggc_alloc<types_used_by_vars_entry> ();
6338 entry->type = type;
6339 entry->var_decl = var_decl;
6340 *slot = entry;
6341 }
6342 }
6343 }
6344
6345 namespace {
6346
6347 const pass_data pass_data_leaf_regs =
6348 {
6349 RTL_PASS, /* type */
6350 "*leaf_regs", /* name */
6351 OPTGROUP_NONE, /* optinfo_flags */
6352 TV_NONE, /* tv_id */
6353 0, /* properties_required */
6354 0, /* properties_provided */
6355 0, /* properties_destroyed */
6356 0, /* todo_flags_start */
6357 0, /* todo_flags_finish */
6358 };
6359
6360 class pass_leaf_regs : public rtl_opt_pass
6361 {
6362 public:
6363 pass_leaf_regs (gcc::context *ctxt)
6364 : rtl_opt_pass (pass_data_leaf_regs, ctxt)
6365 {}
6366
6367 /* opt_pass methods: */
6368 virtual unsigned int execute (function *)
6369 {
6370 return rest_of_handle_check_leaf_regs ();
6371 }
6372
6373 }; // class pass_leaf_regs
6374
6375 } // anon namespace
6376
6377 rtl_opt_pass *
6378 make_pass_leaf_regs (gcc::context *ctxt)
6379 {
6380 return new pass_leaf_regs (ctxt);
6381 }
6382
6383 static unsigned int
6384 rest_of_handle_thread_prologue_and_epilogue (void)
6385 {
6386 /* prepare_shrink_wrap is sensitive to the block structure of the control
6387 flow graph, so clean it up first. */
6388 if (optimize)
6389 cleanup_cfg (0);
6390
6391 /* On some machines, the prologue and epilogue code, or parts thereof,
6392 can be represented as RTL. Doing so lets us schedule insns between
6393 it and the rest of the code and also allows delayed branch
6394 scheduling to operate in the epilogue. */
6395 thread_prologue_and_epilogue_insns ();
6396
6397 /* Some non-cold blocks may now be only reachable from cold blocks.
6398 Fix that up. */
6399 fixup_partitions ();
6400
6401 /* Shrink-wrapping can result in unreachable edges in the epilogue,
6402 see PR57320. */
6403 cleanup_cfg (optimize ? CLEANUP_EXPENSIVE : 0);
6404
6405 /* The stack usage info is finalized during prologue expansion. */
6406 if (flag_stack_usage_info || flag_callgraph_info)
6407 output_stack_usage ();
6408
6409 return 0;
6410 }
6411
6412 /* Record a final call to CALLEE at LOCATION. */
6413
6414 void
6415 record_final_call (tree callee, location_t location)
6416 {
6417 struct callinfo_callee datum = { location, callee };
6418 vec_safe_push (cfun->su->callees, datum);
6419 }
6420
6421 /* Record a dynamic allocation made for DECL_OR_EXP. */
6422
6423 void
6424 record_dynamic_alloc (tree decl_or_exp)
6425 {
6426 struct callinfo_dalloc datum;
6427
6428 if (DECL_P (decl_or_exp))
6429 {
6430 datum.location = DECL_SOURCE_LOCATION (decl_or_exp);
6431 const char *name = lang_hooks.decl_printable_name (decl_or_exp, 2);
6432 const char *dot = strrchr (name, '.');
6433 if (dot)
6434 name = dot + 1;
6435 datum.name = ggc_strdup (name);
6436 }
6437 else
6438 {
6439 datum.location = EXPR_LOCATION (decl_or_exp);
6440 datum.name = NULL;
6441 }
6442
6443 vec_safe_push (cfun->su->dallocs, datum);
6444 }
6445
6446 namespace {
6447
6448 const pass_data pass_data_thread_prologue_and_epilogue =
6449 {
6450 RTL_PASS, /* type */
6451 "pro_and_epilogue", /* name */
6452 OPTGROUP_NONE, /* optinfo_flags */
6453 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6454 0, /* properties_required */
6455 0, /* properties_provided */
6456 0, /* properties_destroyed */
6457 0, /* todo_flags_start */
6458 ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
6459 };
6460
6461 class pass_thread_prologue_and_epilogue : public rtl_opt_pass
6462 {
6463 public:
6464 pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6465 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
6466 {}
6467
6468 /* opt_pass methods: */
6469 virtual unsigned int execute (function *)
6470 {
6471 return rest_of_handle_thread_prologue_and_epilogue ();
6472 }
6473
6474 }; // class pass_thread_prologue_and_epilogue
6475
6476 } // anon namespace
6477
6478 rtl_opt_pass *
6479 make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6480 {
6481 return new pass_thread_prologue_and_epilogue (ctxt);
6482 }
6483 \f
6484
6485 /* If CONSTRAINT is a matching constraint, then return its number.
6486 Otherwise, return -1. */
6487
6488 static int
6489 matching_constraint_num (const char *constraint)
6490 {
6491 if (*constraint == '%')
6492 constraint++;
6493
6494 if (IN_RANGE (*constraint, '0', '9'))
6495 return strtoul (constraint, NULL, 10);
6496
6497 return -1;
6498 }
6499
6500 /* This mini-pass fixes fall-out from SSA in asm statements that have
6501 in-out constraints. Say you start with
6502
6503 orig = inout;
6504 asm ("": "+mr" (inout));
6505 use (orig);
6506
6507 which is transformed very early to use explicit output and match operands:
6508
6509 orig = inout;
6510 asm ("": "=mr" (inout) : "0" (inout));
6511 use (orig);
6512
6513 Or, after SSA and copyprop,
6514
6515 asm ("": "=mr" (inout_2) : "0" (inout_1));
6516 use (inout_1);
6517
6518 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6519 they represent two separate values, so they will get different pseudo
6520 registers during expansion. Then, since the two operands need to match
6521 per the constraints, but use different pseudo registers, reload can
6522 only register a reload for these operands. But reloads can only be
6523 satisfied by hardregs, not by memory, so we need a register for this
6524 reload, just because we are presented with non-matching operands.
6525 So, even though we allow memory for this operand, no memory can be
6526 used for it, just because the two operands don't match. This can
6527 cause reload failures on register-starved targets.
6528
6529 So it's a symptom of reload not being able to use memory for reloads
6530 or, alternatively it's also a symptom of both operands not coming into
6531 reload as matching (in which case the pseudo could go to memory just
6532 fine, as the alternative allows it, and no reload would be necessary).
6533 We fix the latter problem here, by transforming
6534
6535 asm ("": "=mr" (inout_2) : "0" (inout_1));
6536
6537 back to
6538
6539 inout_2 = inout_1;
6540 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6541
6542 static void
6543 match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
6544 {
6545 int i;
6546 bool changed = false;
6547 rtx op = SET_SRC (p_sets[0]);
6548 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6549 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
6550 bool *output_matched = XALLOCAVEC (bool, noutputs);
6551
6552 memset (output_matched, 0, noutputs * sizeof (bool));
6553 for (i = 0; i < ninputs; i++)
6554 {
6555 rtx input, output;
6556 rtx_insn *insns;
6557 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6558 int match, j;
6559
6560 match = matching_constraint_num (constraint);
6561 if (match < 0)
6562 continue;
6563
6564 gcc_assert (match < noutputs);
6565 output = SET_DEST (p_sets[match]);
6566 input = RTVEC_ELT (inputs, i);
6567 /* Only do the transformation for pseudos. */
6568 if (! REG_P (output)
6569 || rtx_equal_p (output, input)
6570 || !(REG_P (input) || SUBREG_P (input)
6571 || MEM_P (input) || CONSTANT_P (input))
6572 || !general_operand (input, GET_MODE (output)))
6573 continue;
6574
6575 /* We can't do anything if the output is also used as input,
6576 as we're going to overwrite it. */
6577 for (j = 0; j < ninputs; j++)
6578 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6579 break;
6580 if (j != ninputs)
6581 continue;
6582
6583 /* Avoid changing the same input several times. For
6584 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6585 only change it once (to out1), rather than changing it
6586 first to out1 and afterwards to out2. */
6587 if (i > 0)
6588 {
6589 for (j = 0; j < noutputs; j++)
6590 if (output_matched[j] && input == SET_DEST (p_sets[j]))
6591 break;
6592 if (j != noutputs)
6593 continue;
6594 }
6595 output_matched[match] = true;
6596
6597 start_sequence ();
6598 emit_move_insn (output, copy_rtx (input));
6599 insns = get_insns ();
6600 end_sequence ();
6601 emit_insn_before (insns, insn);
6602
6603 constraint = ASM_OPERANDS_OUTPUT_CONSTRAINT(SET_SRC(p_sets[match]));
6604 bool early_clobber_p = strchr (constraint, '&') != NULL;
6605
6606 /* Now replace all mentions of the input with output. We can't
6607 just replace the occurrence in inputs[i], as the register might
6608 also be used in some other input (or even in an address of an
6609 output), which would mean possibly increasing the number of
6610 inputs by one (namely 'output' in addition), which might pose
6611 a too complicated problem for reload to solve. E.g. this situation:
6612
6613 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6614
6615 Here 'input' is used in two occurrences as input (once for the
6616 input operand, once for the address in the second output operand).
6617 If we would replace only the occurrence of the input operand (to
6618 make the matching) we would be left with this:
6619
6620 output = input
6621 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6622
6623 Now we suddenly have two different input values (containing the same
6624 value, but different pseudos) where we formerly had only one.
6625 With more complicated asms this might lead to reload failures
6626 which wouldn't have happen without this pass. So, iterate over
6627 all operands and replace all occurrences of the register used.
6628
6629 However, if one or more of the 'input' uses have a non-matching
6630 constraint and the matched output operand is an early clobber
6631 operand, then do not replace the input operand, since by definition
6632 it conflicts with the output operand and cannot share the same
6633 register. See PR89313 for details. */
6634
6635 for (j = 0; j < noutputs; j++)
6636 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
6637 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6638 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6639 input, output);
6640 for (j = 0; j < ninputs; j++)
6641 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6642 {
6643 if (!early_clobber_p
6644 || match == matching_constraint_num
6645 (ASM_OPERANDS_INPUT_CONSTRAINT (op, j)))
6646 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6647 input, output);
6648 }
6649
6650 changed = true;
6651 }
6652
6653 if (changed)
6654 df_insn_rescan (insn);
6655 }
6656
6657 /* Add the decl D to the local_decls list of FUN. */
6658
6659 void
6660 add_local_decl (struct function *fun, tree d)
6661 {
6662 gcc_assert (VAR_P (d));
6663 vec_safe_push (fun->local_decls, d);
6664 }
6665
6666 namespace {
6667
6668 const pass_data pass_data_match_asm_constraints =
6669 {
6670 RTL_PASS, /* type */
6671 "asmcons", /* name */
6672 OPTGROUP_NONE, /* optinfo_flags */
6673 TV_NONE, /* tv_id */
6674 0, /* properties_required */
6675 0, /* properties_provided */
6676 0, /* properties_destroyed */
6677 0, /* todo_flags_start */
6678 0, /* todo_flags_finish */
6679 };
6680
6681 class pass_match_asm_constraints : public rtl_opt_pass
6682 {
6683 public:
6684 pass_match_asm_constraints (gcc::context *ctxt)
6685 : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
6686 {}
6687
6688 /* opt_pass methods: */
6689 virtual unsigned int execute (function *);
6690
6691 }; // class pass_match_asm_constraints
6692
6693 unsigned
6694 pass_match_asm_constraints::execute (function *fun)
6695 {
6696 basic_block bb;
6697 rtx_insn *insn;
6698 rtx pat, *p_sets;
6699 int noutputs;
6700
6701 if (!crtl->has_asm_statement)
6702 return 0;
6703
6704 df_set_flags (DF_DEFER_INSN_RESCAN);
6705 FOR_EACH_BB_FN (bb, fun)
6706 {
6707 FOR_BB_INSNS (bb, insn)
6708 {
6709 if (!INSN_P (insn))
6710 continue;
6711
6712 pat = PATTERN (insn);
6713 if (GET_CODE (pat) == PARALLEL)
6714 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6715 else if (GET_CODE (pat) == SET)
6716 p_sets = &PATTERN (insn), noutputs = 1;
6717 else
6718 continue;
6719
6720 if (GET_CODE (*p_sets) == SET
6721 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6722 match_asm_constraints_1 (insn, p_sets, noutputs);
6723 }
6724 }
6725
6726 return TODO_df_finish;
6727 }
6728
6729 } // anon namespace
6730
6731 rtl_opt_pass *
6732 make_pass_match_asm_constraints (gcc::context *ctxt)
6733 {
6734 return new pass_match_asm_constraints (ctxt);
6735 }
6736
6737
6738 #include "gt-function.h"