coretypes.h: Include machmode.h...
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
24
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
28
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
32 not get a hard register. */
33
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "rtl-error.h"
39 #include "hash-set.h"
40 #include "vec.h"
41 #include "input.h"
42 #include "alias.h"
43 #include "symtab.h"
44 #include "inchash.h"
45 #include "tree.h"
46 #include "fold-const.h"
47 #include "stor-layout.h"
48 #include "varasm.h"
49 #include "stringpool.h"
50 #include "flags.h"
51 #include "except.h"
52 #include "hashtab.h"
53 #include "hard-reg-set.h"
54 #include "function.h"
55 #include "rtl.h"
56 #include "statistics.h"
57 #include "insn-config.h"
58 #include "expmed.h"
59 #include "dojump.h"
60 #include "explow.h"
61 #include "calls.h"
62 #include "emit-rtl.h"
63 #include "stmt.h"
64 #include "expr.h"
65 #include "insn-codes.h"
66 #include "optabs.h"
67 #include "libfuncs.h"
68 #include "regs.h"
69 #include "recog.h"
70 #include "output.h"
71 #include "tm_p.h"
72 #include "langhooks.h"
73 #include "target.h"
74 #include "common/common-target.h"
75 #include "gimple-expr.h"
76 #include "gimplify.h"
77 #include "tree-pass.h"
78 #include "predict.h"
79 #include "dominance.h"
80 #include "cfg.h"
81 #include "cfgrtl.h"
82 #include "cfganal.h"
83 #include "cfgbuild.h"
84 #include "cfgcleanup.h"
85 #include "basic-block.h"
86 #include "df.h"
87 #include "params.h"
88 #include "bb-reorder.h"
89 #include "shrink-wrap.h"
90 #include "toplev.h"
91 #include "rtl-iter.h"
92 #include "tree-chkp.h"
93 #include "rtl-chkp.h"
94
95 /* So we can assign to cfun in this file. */
96 #undef cfun
97
98 #ifndef STACK_ALIGNMENT_NEEDED
99 #define STACK_ALIGNMENT_NEEDED 1
100 #endif
101
102 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
103
104 /* Round a value to the lowest integer less than it that is a multiple of
105 the required alignment. Avoid using division in case the value is
106 negative. Assume the alignment is a power of two. */
107 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
108
109 /* Similar, but round to the next highest integer that meets the
110 alignment. */
111 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
112
113 /* Nonzero once virtual register instantiation has been done.
114 assign_stack_local uses frame_pointer_rtx when this is nonzero.
115 calls.c:emit_library_call_value_1 uses it to set up
116 post-instantiation libcalls. */
117 int virtuals_instantiated;
118
119 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
120 static GTY(()) int funcdef_no;
121
122 /* These variables hold pointers to functions to create and destroy
123 target specific, per-function data structures. */
124 struct machine_function * (*init_machine_status) (void);
125
126 /* The currently compiled function. */
127 struct function *cfun = 0;
128
129 /* These hashes record the prologue and epilogue insns. */
130
131 struct insn_cache_hasher : ggc_cache_hasher<rtx>
132 {
133 static hashval_t hash (rtx x) { return htab_hash_pointer (x); }
134 static bool equal (rtx a, rtx b) { return a == b; }
135 };
136
137 static GTY((cache))
138 hash_table<insn_cache_hasher> *prologue_insn_hash;
139 static GTY((cache))
140 hash_table<insn_cache_hasher> *epilogue_insn_hash;
141 \f
142
143 hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
144 vec<tree, va_gc> *types_used_by_cur_var_decl;
145
146 /* Forward declarations. */
147
148 static struct temp_slot *find_temp_slot_from_address (rtx);
149 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
150 static void pad_below (struct args_size *, machine_mode, tree);
151 static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
152 static int all_blocks (tree, tree *);
153 static tree *get_block_vector (tree, int *);
154 extern tree debug_find_var_in_block_tree (tree, tree);
155 /* We always define `record_insns' even if it's not used so that we
156 can always export `prologue_epilogue_contains'. */
157 static void record_insns (rtx_insn *, rtx, hash_table<insn_cache_hasher> **)
158 ATTRIBUTE_UNUSED;
159 static bool contains (const_rtx, hash_table<insn_cache_hasher> *);
160 static void prepare_function_start (void);
161 static void do_clobber_return_reg (rtx, void *);
162 static void do_use_return_reg (rtx, void *);
163 \f
164 /* Stack of nested functions. */
165 /* Keep track of the cfun stack. */
166
167 typedef struct function *function_p;
168
169 static vec<function_p> function_context_stack;
170
171 /* Save the current context for compilation of a nested function.
172 This is called from language-specific code. */
173
174 void
175 push_function_context (void)
176 {
177 if (cfun == 0)
178 allocate_struct_function (NULL, false);
179
180 function_context_stack.safe_push (cfun);
181 set_cfun (NULL);
182 }
183
184 /* Restore the last saved context, at the end of a nested function.
185 This function is called from language-specific code. */
186
187 void
188 pop_function_context (void)
189 {
190 struct function *p = function_context_stack.pop ();
191 set_cfun (p);
192 current_function_decl = p->decl;
193
194 /* Reset variables that have known state during rtx generation. */
195 virtuals_instantiated = 0;
196 generating_concat_p = 1;
197 }
198
199 /* Clear out all parts of the state in F that can safely be discarded
200 after the function has been parsed, but not compiled, to let
201 garbage collection reclaim the memory. */
202
203 void
204 free_after_parsing (struct function *f)
205 {
206 f->language = 0;
207 }
208
209 /* Clear out all parts of the state in F that can safely be discarded
210 after the function has been compiled, to let garbage collection
211 reclaim the memory. */
212
213 void
214 free_after_compilation (struct function *f)
215 {
216 prologue_insn_hash = NULL;
217 epilogue_insn_hash = NULL;
218
219 free (crtl->emit.regno_pointer_align);
220
221 memset (crtl, 0, sizeof (struct rtl_data));
222 f->eh = NULL;
223 f->machine = NULL;
224 f->cfg = NULL;
225
226 regno_reg_rtx = NULL;
227 }
228 \f
229 /* Return size needed for stack frame based on slots so far allocated.
230 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
231 the caller may have to do that. */
232
233 HOST_WIDE_INT
234 get_frame_size (void)
235 {
236 if (FRAME_GROWS_DOWNWARD)
237 return -frame_offset;
238 else
239 return frame_offset;
240 }
241
242 /* Issue an error message and return TRUE if frame OFFSET overflows in
243 the signed target pointer arithmetics for function FUNC. Otherwise
244 return FALSE. */
245
246 bool
247 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
248 {
249 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
250
251 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
252 /* Leave room for the fixed part of the frame. */
253 - 64 * UNITS_PER_WORD)
254 {
255 error_at (DECL_SOURCE_LOCATION (func),
256 "total size of local objects too large");
257 return TRUE;
258 }
259
260 return FALSE;
261 }
262
263 /* Return stack slot alignment in bits for TYPE and MODE. */
264
265 static unsigned int
266 get_stack_local_alignment (tree type, machine_mode mode)
267 {
268 unsigned int alignment;
269
270 if (mode == BLKmode)
271 alignment = BIGGEST_ALIGNMENT;
272 else
273 alignment = GET_MODE_ALIGNMENT (mode);
274
275 /* Allow the frond-end to (possibly) increase the alignment of this
276 stack slot. */
277 if (! type)
278 type = lang_hooks.types.type_for_mode (mode, 0);
279
280 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
281 }
282
283 /* Determine whether it is possible to fit a stack slot of size SIZE and
284 alignment ALIGNMENT into an area in the stack frame that starts at
285 frame offset START and has a length of LENGTH. If so, store the frame
286 offset to be used for the stack slot in *POFFSET and return true;
287 return false otherwise. This function will extend the frame size when
288 given a start/length pair that lies at the end of the frame. */
289
290 static bool
291 try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
292 HOST_WIDE_INT size, unsigned int alignment,
293 HOST_WIDE_INT *poffset)
294 {
295 HOST_WIDE_INT this_frame_offset;
296 int frame_off, frame_alignment, frame_phase;
297
298 /* Calculate how many bytes the start of local variables is off from
299 stack alignment. */
300 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
301 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
302 frame_phase = frame_off ? frame_alignment - frame_off : 0;
303
304 /* Round the frame offset to the specified alignment. */
305
306 /* We must be careful here, since FRAME_OFFSET might be negative and
307 division with a negative dividend isn't as well defined as we might
308 like. So we instead assume that ALIGNMENT is a power of two and
309 use logical operations which are unambiguous. */
310 if (FRAME_GROWS_DOWNWARD)
311 this_frame_offset
312 = (FLOOR_ROUND (start + length - size - frame_phase,
313 (unsigned HOST_WIDE_INT) alignment)
314 + frame_phase);
315 else
316 this_frame_offset
317 = (CEIL_ROUND (start - frame_phase,
318 (unsigned HOST_WIDE_INT) alignment)
319 + frame_phase);
320
321 /* See if it fits. If this space is at the edge of the frame,
322 consider extending the frame to make it fit. Our caller relies on
323 this when allocating a new slot. */
324 if (frame_offset == start && this_frame_offset < frame_offset)
325 frame_offset = this_frame_offset;
326 else if (this_frame_offset < start)
327 return false;
328 else if (start + length == frame_offset
329 && this_frame_offset + size > start + length)
330 frame_offset = this_frame_offset + size;
331 else if (this_frame_offset + size > start + length)
332 return false;
333
334 *poffset = this_frame_offset;
335 return true;
336 }
337
338 /* Create a new frame_space structure describing free space in the stack
339 frame beginning at START and ending at END, and chain it into the
340 function's frame_space_list. */
341
342 static void
343 add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
344 {
345 struct frame_space *space = ggc_alloc<frame_space> ();
346 space->next = crtl->frame_space_list;
347 crtl->frame_space_list = space;
348 space->start = start;
349 space->length = end - start;
350 }
351
352 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
353 with machine mode MODE.
354
355 ALIGN controls the amount of alignment for the address of the slot:
356 0 means according to MODE,
357 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
358 -2 means use BITS_PER_UNIT,
359 positive specifies alignment boundary in bits.
360
361 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
362 alignment and ASLK_RECORD_PAD bit set if we should remember
363 extra space we allocated for alignment purposes. When we are
364 called from assign_stack_temp_for_type, it is not set so we don't
365 track the same stack slot in two independent lists.
366
367 We do not round to stack_boundary here. */
368
369 rtx
370 assign_stack_local_1 (machine_mode mode, HOST_WIDE_INT size,
371 int align, int kind)
372 {
373 rtx x, addr;
374 int bigend_correction = 0;
375 HOST_WIDE_INT slot_offset = 0, old_frame_offset;
376 unsigned int alignment, alignment_in_bits;
377
378 if (align == 0)
379 {
380 alignment = get_stack_local_alignment (NULL, mode);
381 alignment /= BITS_PER_UNIT;
382 }
383 else if (align == -1)
384 {
385 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
386 size = CEIL_ROUND (size, alignment);
387 }
388 else if (align == -2)
389 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
390 else
391 alignment = align / BITS_PER_UNIT;
392
393 alignment_in_bits = alignment * BITS_PER_UNIT;
394
395 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
396 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
397 {
398 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
399 alignment = alignment_in_bits / BITS_PER_UNIT;
400 }
401
402 if (SUPPORTS_STACK_ALIGNMENT)
403 {
404 if (crtl->stack_alignment_estimated < alignment_in_bits)
405 {
406 if (!crtl->stack_realign_processed)
407 crtl->stack_alignment_estimated = alignment_in_bits;
408 else
409 {
410 /* If stack is realigned and stack alignment value
411 hasn't been finalized, it is OK not to increase
412 stack_alignment_estimated. The bigger alignment
413 requirement is recorded in stack_alignment_needed
414 below. */
415 gcc_assert (!crtl->stack_realign_finalized);
416 if (!crtl->stack_realign_needed)
417 {
418 /* It is OK to reduce the alignment as long as the
419 requested size is 0 or the estimated stack
420 alignment >= mode alignment. */
421 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
422 || size == 0
423 || (crtl->stack_alignment_estimated
424 >= GET_MODE_ALIGNMENT (mode)));
425 alignment_in_bits = crtl->stack_alignment_estimated;
426 alignment = alignment_in_bits / BITS_PER_UNIT;
427 }
428 }
429 }
430 }
431
432 if (crtl->stack_alignment_needed < alignment_in_bits)
433 crtl->stack_alignment_needed = alignment_in_bits;
434 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
435 crtl->max_used_stack_slot_alignment = alignment_in_bits;
436
437 if (mode != BLKmode || size != 0)
438 {
439 if (kind & ASLK_RECORD_PAD)
440 {
441 struct frame_space **psp;
442
443 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
444 {
445 struct frame_space *space = *psp;
446 if (!try_fit_stack_local (space->start, space->length, size,
447 alignment, &slot_offset))
448 continue;
449 *psp = space->next;
450 if (slot_offset > space->start)
451 add_frame_space (space->start, slot_offset);
452 if (slot_offset + size < space->start + space->length)
453 add_frame_space (slot_offset + size,
454 space->start + space->length);
455 goto found_space;
456 }
457 }
458 }
459 else if (!STACK_ALIGNMENT_NEEDED)
460 {
461 slot_offset = frame_offset;
462 goto found_space;
463 }
464
465 old_frame_offset = frame_offset;
466
467 if (FRAME_GROWS_DOWNWARD)
468 {
469 frame_offset -= size;
470 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
471
472 if (kind & ASLK_RECORD_PAD)
473 {
474 if (slot_offset > frame_offset)
475 add_frame_space (frame_offset, slot_offset);
476 if (slot_offset + size < old_frame_offset)
477 add_frame_space (slot_offset + size, old_frame_offset);
478 }
479 }
480 else
481 {
482 frame_offset += size;
483 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
484
485 if (kind & ASLK_RECORD_PAD)
486 {
487 if (slot_offset > old_frame_offset)
488 add_frame_space (old_frame_offset, slot_offset);
489 if (slot_offset + size < frame_offset)
490 add_frame_space (slot_offset + size, frame_offset);
491 }
492 }
493
494 found_space:
495 /* On a big-endian machine, if we are allocating more space than we will use,
496 use the least significant bytes of those that are allocated. */
497 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
498 bigend_correction = size - GET_MODE_SIZE (mode);
499
500 /* If we have already instantiated virtual registers, return the actual
501 address relative to the frame pointer. */
502 if (virtuals_instantiated)
503 addr = plus_constant (Pmode, frame_pointer_rtx,
504 trunc_int_for_mode
505 (slot_offset + bigend_correction
506 + STARTING_FRAME_OFFSET, Pmode));
507 else
508 addr = plus_constant (Pmode, virtual_stack_vars_rtx,
509 trunc_int_for_mode
510 (slot_offset + bigend_correction,
511 Pmode));
512
513 x = gen_rtx_MEM (mode, addr);
514 set_mem_align (x, alignment_in_bits);
515 MEM_NOTRAP_P (x) = 1;
516
517 stack_slot_list
518 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
519
520 if (frame_offset_overflow (frame_offset, current_function_decl))
521 frame_offset = 0;
522
523 return x;
524 }
525
526 /* Wrap up assign_stack_local_1 with last parameter as false. */
527
528 rtx
529 assign_stack_local (machine_mode mode, HOST_WIDE_INT size, int align)
530 {
531 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
532 }
533 \f
534 /* In order to evaluate some expressions, such as function calls returning
535 structures in memory, we need to temporarily allocate stack locations.
536 We record each allocated temporary in the following structure.
537
538 Associated with each temporary slot is a nesting level. When we pop up
539 one level, all temporaries associated with the previous level are freed.
540 Normally, all temporaries are freed after the execution of the statement
541 in which they were created. However, if we are inside a ({...}) grouping,
542 the result may be in a temporary and hence must be preserved. If the
543 result could be in a temporary, we preserve it if we can determine which
544 one it is in. If we cannot determine which temporary may contain the
545 result, all temporaries are preserved. A temporary is preserved by
546 pretending it was allocated at the previous nesting level. */
547
548 struct GTY(()) temp_slot {
549 /* Points to next temporary slot. */
550 struct temp_slot *next;
551 /* Points to previous temporary slot. */
552 struct temp_slot *prev;
553 /* The rtx to used to reference the slot. */
554 rtx slot;
555 /* The size, in units, of the slot. */
556 HOST_WIDE_INT size;
557 /* The type of the object in the slot, or zero if it doesn't correspond
558 to a type. We use this to determine whether a slot can be reused.
559 It can be reused if objects of the type of the new slot will always
560 conflict with objects of the type of the old slot. */
561 tree type;
562 /* The alignment (in bits) of the slot. */
563 unsigned int align;
564 /* Nonzero if this temporary is currently in use. */
565 char in_use;
566 /* Nesting level at which this slot is being used. */
567 int level;
568 /* The offset of the slot from the frame_pointer, including extra space
569 for alignment. This info is for combine_temp_slots. */
570 HOST_WIDE_INT base_offset;
571 /* The size of the slot, including extra space for alignment. This
572 info is for combine_temp_slots. */
573 HOST_WIDE_INT full_size;
574 };
575
576 /* Entry for the below hash table. */
577 struct GTY((for_user)) temp_slot_address_entry {
578 hashval_t hash;
579 rtx address;
580 struct temp_slot *temp_slot;
581 };
582
583 struct temp_address_hasher : ggc_hasher<temp_slot_address_entry *>
584 {
585 static hashval_t hash (temp_slot_address_entry *);
586 static bool equal (temp_slot_address_entry *, temp_slot_address_entry *);
587 };
588
589 /* A table of addresses that represent a stack slot. The table is a mapping
590 from address RTXen to a temp slot. */
591 static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table;
592 static size_t n_temp_slots_in_use;
593
594 /* Removes temporary slot TEMP from LIST. */
595
596 static void
597 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
598 {
599 if (temp->next)
600 temp->next->prev = temp->prev;
601 if (temp->prev)
602 temp->prev->next = temp->next;
603 else
604 *list = temp->next;
605
606 temp->prev = temp->next = NULL;
607 }
608
609 /* Inserts temporary slot TEMP to LIST. */
610
611 static void
612 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
613 {
614 temp->next = *list;
615 if (*list)
616 (*list)->prev = temp;
617 temp->prev = NULL;
618 *list = temp;
619 }
620
621 /* Returns the list of used temp slots at LEVEL. */
622
623 static struct temp_slot **
624 temp_slots_at_level (int level)
625 {
626 if (level >= (int) vec_safe_length (used_temp_slots))
627 vec_safe_grow_cleared (used_temp_slots, level + 1);
628
629 return &(*used_temp_slots)[level];
630 }
631
632 /* Returns the maximal temporary slot level. */
633
634 static int
635 max_slot_level (void)
636 {
637 if (!used_temp_slots)
638 return -1;
639
640 return used_temp_slots->length () - 1;
641 }
642
643 /* Moves temporary slot TEMP to LEVEL. */
644
645 static void
646 move_slot_to_level (struct temp_slot *temp, int level)
647 {
648 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
649 insert_slot_to_list (temp, temp_slots_at_level (level));
650 temp->level = level;
651 }
652
653 /* Make temporary slot TEMP available. */
654
655 static void
656 make_slot_available (struct temp_slot *temp)
657 {
658 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
659 insert_slot_to_list (temp, &avail_temp_slots);
660 temp->in_use = 0;
661 temp->level = -1;
662 n_temp_slots_in_use--;
663 }
664
665 /* Compute the hash value for an address -> temp slot mapping.
666 The value is cached on the mapping entry. */
667 static hashval_t
668 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
669 {
670 int do_not_record = 0;
671 return hash_rtx (t->address, GET_MODE (t->address),
672 &do_not_record, NULL, false);
673 }
674
675 /* Return the hash value for an address -> temp slot mapping. */
676 hashval_t
677 temp_address_hasher::hash (temp_slot_address_entry *t)
678 {
679 return t->hash;
680 }
681
682 /* Compare two address -> temp slot mapping entries. */
683 bool
684 temp_address_hasher::equal (temp_slot_address_entry *t1,
685 temp_slot_address_entry *t2)
686 {
687 return exp_equiv_p (t1->address, t2->address, 0, true);
688 }
689
690 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
691 static void
692 insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
693 {
694 struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
695 t->address = address;
696 t->temp_slot = temp_slot;
697 t->hash = temp_slot_address_compute_hash (t);
698 *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t;
699 }
700
701 /* Remove an address -> temp slot mapping entry if the temp slot is
702 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
703 int
704 remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
705 {
706 const struct temp_slot_address_entry *t = *slot;
707 if (! t->temp_slot->in_use)
708 temp_slot_address_table->clear_slot (slot);
709 return 1;
710 }
711
712 /* Remove all mappings of addresses to unused temp slots. */
713 static void
714 remove_unused_temp_slot_addresses (void)
715 {
716 /* Use quicker clearing if there aren't any active temp slots. */
717 if (n_temp_slots_in_use)
718 temp_slot_address_table->traverse
719 <void *, remove_unused_temp_slot_addresses_1> (NULL);
720 else
721 temp_slot_address_table->empty ();
722 }
723
724 /* Find the temp slot corresponding to the object at address X. */
725
726 static struct temp_slot *
727 find_temp_slot_from_address (rtx x)
728 {
729 struct temp_slot *p;
730 struct temp_slot_address_entry tmp, *t;
731
732 /* First try the easy way:
733 See if X exists in the address -> temp slot mapping. */
734 tmp.address = x;
735 tmp.temp_slot = NULL;
736 tmp.hash = temp_slot_address_compute_hash (&tmp);
737 t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash);
738 if (t)
739 return t->temp_slot;
740
741 /* If we have a sum involving a register, see if it points to a temp
742 slot. */
743 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
744 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
745 return p;
746 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
747 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
748 return p;
749
750 /* Last resort: Address is a virtual stack var address. */
751 if (GET_CODE (x) == PLUS
752 && XEXP (x, 0) == virtual_stack_vars_rtx
753 && CONST_INT_P (XEXP (x, 1)))
754 {
755 int i;
756 for (i = max_slot_level (); i >= 0; i--)
757 for (p = *temp_slots_at_level (i); p; p = p->next)
758 {
759 if (INTVAL (XEXP (x, 1)) >= p->base_offset
760 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
761 return p;
762 }
763 }
764
765 return NULL;
766 }
767 \f
768 /* Allocate a temporary stack slot and record it for possible later
769 reuse.
770
771 MODE is the machine mode to be given to the returned rtx.
772
773 SIZE is the size in units of the space required. We do no rounding here
774 since assign_stack_local will do any required rounding.
775
776 TYPE is the type that will be used for the stack slot. */
777
778 rtx
779 assign_stack_temp_for_type (machine_mode mode, HOST_WIDE_INT size,
780 tree type)
781 {
782 unsigned int align;
783 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
784 rtx slot;
785
786 /* If SIZE is -1 it means that somebody tried to allocate a temporary
787 of a variable size. */
788 gcc_assert (size != -1);
789
790 align = get_stack_local_alignment (type, mode);
791
792 /* Try to find an available, already-allocated temporary of the proper
793 mode which meets the size and alignment requirements. Choose the
794 smallest one with the closest alignment.
795
796 If assign_stack_temp is called outside of the tree->rtl expansion,
797 we cannot reuse the stack slots (that may still refer to
798 VIRTUAL_STACK_VARS_REGNUM). */
799 if (!virtuals_instantiated)
800 {
801 for (p = avail_temp_slots; p; p = p->next)
802 {
803 if (p->align >= align && p->size >= size
804 && GET_MODE (p->slot) == mode
805 && objects_must_conflict_p (p->type, type)
806 && (best_p == 0 || best_p->size > p->size
807 || (best_p->size == p->size && best_p->align > p->align)))
808 {
809 if (p->align == align && p->size == size)
810 {
811 selected = p;
812 cut_slot_from_list (selected, &avail_temp_slots);
813 best_p = 0;
814 break;
815 }
816 best_p = p;
817 }
818 }
819 }
820
821 /* Make our best, if any, the one to use. */
822 if (best_p)
823 {
824 selected = best_p;
825 cut_slot_from_list (selected, &avail_temp_slots);
826
827 /* If there are enough aligned bytes left over, make them into a new
828 temp_slot so that the extra bytes don't get wasted. Do this only
829 for BLKmode slots, so that we can be sure of the alignment. */
830 if (GET_MODE (best_p->slot) == BLKmode)
831 {
832 int alignment = best_p->align / BITS_PER_UNIT;
833 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
834
835 if (best_p->size - rounded_size >= alignment)
836 {
837 p = ggc_alloc<temp_slot> ();
838 p->in_use = 0;
839 p->size = best_p->size - rounded_size;
840 p->base_offset = best_p->base_offset + rounded_size;
841 p->full_size = best_p->full_size - rounded_size;
842 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
843 p->align = best_p->align;
844 p->type = best_p->type;
845 insert_slot_to_list (p, &avail_temp_slots);
846
847 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
848 stack_slot_list);
849
850 best_p->size = rounded_size;
851 best_p->full_size = rounded_size;
852 }
853 }
854 }
855
856 /* If we still didn't find one, make a new temporary. */
857 if (selected == 0)
858 {
859 HOST_WIDE_INT frame_offset_old = frame_offset;
860
861 p = ggc_alloc<temp_slot> ();
862
863 /* We are passing an explicit alignment request to assign_stack_local.
864 One side effect of that is assign_stack_local will not round SIZE
865 to ensure the frame offset remains suitably aligned.
866
867 So for requests which depended on the rounding of SIZE, we go ahead
868 and round it now. We also make sure ALIGNMENT is at least
869 BIGGEST_ALIGNMENT. */
870 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
871 p->slot = assign_stack_local_1 (mode,
872 (mode == BLKmode
873 ? CEIL_ROUND (size,
874 (int) align
875 / BITS_PER_UNIT)
876 : size),
877 align, 0);
878
879 p->align = align;
880
881 /* The following slot size computation is necessary because we don't
882 know the actual size of the temporary slot until assign_stack_local
883 has performed all the frame alignment and size rounding for the
884 requested temporary. Note that extra space added for alignment
885 can be either above or below this stack slot depending on which
886 way the frame grows. We include the extra space if and only if it
887 is above this slot. */
888 if (FRAME_GROWS_DOWNWARD)
889 p->size = frame_offset_old - frame_offset;
890 else
891 p->size = size;
892
893 /* Now define the fields used by combine_temp_slots. */
894 if (FRAME_GROWS_DOWNWARD)
895 {
896 p->base_offset = frame_offset;
897 p->full_size = frame_offset_old - frame_offset;
898 }
899 else
900 {
901 p->base_offset = frame_offset_old;
902 p->full_size = frame_offset - frame_offset_old;
903 }
904
905 selected = p;
906 }
907
908 p = selected;
909 p->in_use = 1;
910 p->type = type;
911 p->level = temp_slot_level;
912 n_temp_slots_in_use++;
913
914 pp = temp_slots_at_level (p->level);
915 insert_slot_to_list (p, pp);
916 insert_temp_slot_address (XEXP (p->slot, 0), p);
917
918 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
919 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
920 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
921
922 /* If we know the alias set for the memory that will be used, use
923 it. If there's no TYPE, then we don't know anything about the
924 alias set for the memory. */
925 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
926 set_mem_align (slot, align);
927
928 /* If a type is specified, set the relevant flags. */
929 if (type != 0)
930 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
931 MEM_NOTRAP_P (slot) = 1;
932
933 return slot;
934 }
935
936 /* Allocate a temporary stack slot and record it for possible later
937 reuse. First two arguments are same as in preceding function. */
938
939 rtx
940 assign_stack_temp (machine_mode mode, HOST_WIDE_INT size)
941 {
942 return assign_stack_temp_for_type (mode, size, NULL_TREE);
943 }
944 \f
945 /* Assign a temporary.
946 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
947 and so that should be used in error messages. In either case, we
948 allocate of the given type.
949 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
950 it is 0 if a register is OK.
951 DONT_PROMOTE is 1 if we should not promote values in register
952 to wider modes. */
953
954 rtx
955 assign_temp (tree type_or_decl, int memory_required,
956 int dont_promote ATTRIBUTE_UNUSED)
957 {
958 tree type, decl;
959 machine_mode mode;
960 #ifdef PROMOTE_MODE
961 int unsignedp;
962 #endif
963
964 if (DECL_P (type_or_decl))
965 decl = type_or_decl, type = TREE_TYPE (decl);
966 else
967 decl = NULL, type = type_or_decl;
968
969 mode = TYPE_MODE (type);
970 #ifdef PROMOTE_MODE
971 unsignedp = TYPE_UNSIGNED (type);
972 #endif
973
974 if (mode == BLKmode || memory_required)
975 {
976 HOST_WIDE_INT size = int_size_in_bytes (type);
977 rtx tmp;
978
979 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
980 problems with allocating the stack space. */
981 if (size == 0)
982 size = 1;
983
984 /* Unfortunately, we don't yet know how to allocate variable-sized
985 temporaries. However, sometimes we can find a fixed upper limit on
986 the size, so try that instead. */
987 else if (size == -1)
988 size = max_int_size_in_bytes (type);
989
990 /* The size of the temporary may be too large to fit into an integer. */
991 /* ??? Not sure this should happen except for user silliness, so limit
992 this to things that aren't compiler-generated temporaries. The
993 rest of the time we'll die in assign_stack_temp_for_type. */
994 if (decl && size == -1
995 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
996 {
997 error ("size of variable %q+D is too large", decl);
998 size = 1;
999 }
1000
1001 tmp = assign_stack_temp_for_type (mode, size, type);
1002 return tmp;
1003 }
1004
1005 #ifdef PROMOTE_MODE
1006 if (! dont_promote)
1007 mode = promote_mode (type, mode, &unsignedp);
1008 #endif
1009
1010 return gen_reg_rtx (mode);
1011 }
1012 \f
1013 /* Combine temporary stack slots which are adjacent on the stack.
1014
1015 This allows for better use of already allocated stack space. This is only
1016 done for BLKmode slots because we can be sure that we won't have alignment
1017 problems in this case. */
1018
1019 static void
1020 combine_temp_slots (void)
1021 {
1022 struct temp_slot *p, *q, *next, *next_q;
1023 int num_slots;
1024
1025 /* We can't combine slots, because the information about which slot
1026 is in which alias set will be lost. */
1027 if (flag_strict_aliasing)
1028 return;
1029
1030 /* If there are a lot of temp slots, don't do anything unless
1031 high levels of optimization. */
1032 if (! flag_expensive_optimizations)
1033 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1034 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1035 return;
1036
1037 for (p = avail_temp_slots; p; p = next)
1038 {
1039 int delete_p = 0;
1040
1041 next = p->next;
1042
1043 if (GET_MODE (p->slot) != BLKmode)
1044 continue;
1045
1046 for (q = p->next; q; q = next_q)
1047 {
1048 int delete_q = 0;
1049
1050 next_q = q->next;
1051
1052 if (GET_MODE (q->slot) != BLKmode)
1053 continue;
1054
1055 if (p->base_offset + p->full_size == q->base_offset)
1056 {
1057 /* Q comes after P; combine Q into P. */
1058 p->size += q->size;
1059 p->full_size += q->full_size;
1060 delete_q = 1;
1061 }
1062 else if (q->base_offset + q->full_size == p->base_offset)
1063 {
1064 /* P comes after Q; combine P into Q. */
1065 q->size += p->size;
1066 q->full_size += p->full_size;
1067 delete_p = 1;
1068 break;
1069 }
1070 if (delete_q)
1071 cut_slot_from_list (q, &avail_temp_slots);
1072 }
1073
1074 /* Either delete P or advance past it. */
1075 if (delete_p)
1076 cut_slot_from_list (p, &avail_temp_slots);
1077 }
1078 }
1079 \f
1080 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1081 slot that previously was known by OLD_RTX. */
1082
1083 void
1084 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1085 {
1086 struct temp_slot *p;
1087
1088 if (rtx_equal_p (old_rtx, new_rtx))
1089 return;
1090
1091 p = find_temp_slot_from_address (old_rtx);
1092
1093 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1094 NEW_RTX is a register, see if one operand of the PLUS is a
1095 temporary location. If so, NEW_RTX points into it. Otherwise,
1096 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1097 in common between them. If so, try a recursive call on those
1098 values. */
1099 if (p == 0)
1100 {
1101 if (GET_CODE (old_rtx) != PLUS)
1102 return;
1103
1104 if (REG_P (new_rtx))
1105 {
1106 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1107 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1108 return;
1109 }
1110 else if (GET_CODE (new_rtx) != PLUS)
1111 return;
1112
1113 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1114 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1115 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1116 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1117 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1118 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1119 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1120 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1121
1122 return;
1123 }
1124
1125 /* Otherwise add an alias for the temp's address. */
1126 insert_temp_slot_address (new_rtx, p);
1127 }
1128
1129 /* If X could be a reference to a temporary slot, mark that slot as
1130 belonging to the to one level higher than the current level. If X
1131 matched one of our slots, just mark that one. Otherwise, we can't
1132 easily predict which it is, so upgrade all of them.
1133
1134 This is called when an ({...}) construct occurs and a statement
1135 returns a value in memory. */
1136
1137 void
1138 preserve_temp_slots (rtx x)
1139 {
1140 struct temp_slot *p = 0, *next;
1141
1142 if (x == 0)
1143 return;
1144
1145 /* If X is a register that is being used as a pointer, see if we have
1146 a temporary slot we know it points to. */
1147 if (REG_P (x) && REG_POINTER (x))
1148 p = find_temp_slot_from_address (x);
1149
1150 /* If X is not in memory or is at a constant address, it cannot be in
1151 a temporary slot. */
1152 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1153 return;
1154
1155 /* First see if we can find a match. */
1156 if (p == 0)
1157 p = find_temp_slot_from_address (XEXP (x, 0));
1158
1159 if (p != 0)
1160 {
1161 if (p->level == temp_slot_level)
1162 move_slot_to_level (p, temp_slot_level - 1);
1163 return;
1164 }
1165
1166 /* Otherwise, preserve all non-kept slots at this level. */
1167 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1168 {
1169 next = p->next;
1170 move_slot_to_level (p, temp_slot_level - 1);
1171 }
1172 }
1173
1174 /* Free all temporaries used so far. This is normally called at the
1175 end of generating code for a statement. */
1176
1177 void
1178 free_temp_slots (void)
1179 {
1180 struct temp_slot *p, *next;
1181 bool some_available = false;
1182
1183 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1184 {
1185 next = p->next;
1186 make_slot_available (p);
1187 some_available = true;
1188 }
1189
1190 if (some_available)
1191 {
1192 remove_unused_temp_slot_addresses ();
1193 combine_temp_slots ();
1194 }
1195 }
1196
1197 /* Push deeper into the nesting level for stack temporaries. */
1198
1199 void
1200 push_temp_slots (void)
1201 {
1202 temp_slot_level++;
1203 }
1204
1205 /* Pop a temporary nesting level. All slots in use in the current level
1206 are freed. */
1207
1208 void
1209 pop_temp_slots (void)
1210 {
1211 free_temp_slots ();
1212 temp_slot_level--;
1213 }
1214
1215 /* Initialize temporary slots. */
1216
1217 void
1218 init_temp_slots (void)
1219 {
1220 /* We have not allocated any temporaries yet. */
1221 avail_temp_slots = 0;
1222 vec_alloc (used_temp_slots, 0);
1223 temp_slot_level = 0;
1224 n_temp_slots_in_use = 0;
1225
1226 /* Set up the table to map addresses to temp slots. */
1227 if (! temp_slot_address_table)
1228 temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32);
1229 else
1230 temp_slot_address_table->empty ();
1231 }
1232 \f
1233 /* Functions and data structures to keep track of the values hard regs
1234 had at the start of the function. */
1235
1236 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1237 and has_hard_reg_initial_val.. */
1238 typedef struct GTY(()) initial_value_pair {
1239 rtx hard_reg;
1240 rtx pseudo;
1241 } initial_value_pair;
1242 /* ??? This could be a VEC but there is currently no way to define an
1243 opaque VEC type. This could be worked around by defining struct
1244 initial_value_pair in function.h. */
1245 typedef struct GTY(()) initial_value_struct {
1246 int num_entries;
1247 int max_entries;
1248 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
1249 } initial_value_struct;
1250
1251 /* If a pseudo represents an initial hard reg (or expression), return
1252 it, else return NULL_RTX. */
1253
1254 rtx
1255 get_hard_reg_initial_reg (rtx reg)
1256 {
1257 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1258 int i;
1259
1260 if (ivs == 0)
1261 return NULL_RTX;
1262
1263 for (i = 0; i < ivs->num_entries; i++)
1264 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1265 return ivs->entries[i].hard_reg;
1266
1267 return NULL_RTX;
1268 }
1269
1270 /* Make sure that there's a pseudo register of mode MODE that stores the
1271 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1272
1273 rtx
1274 get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1275 {
1276 struct initial_value_struct *ivs;
1277 rtx rv;
1278
1279 rv = has_hard_reg_initial_val (mode, regno);
1280 if (rv)
1281 return rv;
1282
1283 ivs = crtl->hard_reg_initial_vals;
1284 if (ivs == 0)
1285 {
1286 ivs = ggc_alloc<initial_value_struct> ();
1287 ivs->num_entries = 0;
1288 ivs->max_entries = 5;
1289 ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
1290 crtl->hard_reg_initial_vals = ivs;
1291 }
1292
1293 if (ivs->num_entries >= ivs->max_entries)
1294 {
1295 ivs->max_entries += 5;
1296 ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
1297 ivs->max_entries);
1298 }
1299
1300 ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
1301 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
1302
1303 return ivs->entries[ivs->num_entries++].pseudo;
1304 }
1305
1306 /* See if get_hard_reg_initial_val has been used to create a pseudo
1307 for the initial value of hard register REGNO in mode MODE. Return
1308 the associated pseudo if so, otherwise return NULL. */
1309
1310 rtx
1311 has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
1312 {
1313 struct initial_value_struct *ivs;
1314 int i;
1315
1316 ivs = crtl->hard_reg_initial_vals;
1317 if (ivs != 0)
1318 for (i = 0; i < ivs->num_entries; i++)
1319 if (GET_MODE (ivs->entries[i].hard_reg) == mode
1320 && REGNO (ivs->entries[i].hard_reg) == regno)
1321 return ivs->entries[i].pseudo;
1322
1323 return NULL_RTX;
1324 }
1325
1326 unsigned int
1327 emit_initial_value_sets (void)
1328 {
1329 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1330 int i;
1331 rtx_insn *seq;
1332
1333 if (ivs == 0)
1334 return 0;
1335
1336 start_sequence ();
1337 for (i = 0; i < ivs->num_entries; i++)
1338 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1339 seq = get_insns ();
1340 end_sequence ();
1341
1342 emit_insn_at_entry (seq);
1343 return 0;
1344 }
1345
1346 /* Return the hardreg-pseudoreg initial values pair entry I and
1347 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1348 bool
1349 initial_value_entry (int i, rtx *hreg, rtx *preg)
1350 {
1351 struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
1352 if (!ivs || i >= ivs->num_entries)
1353 return false;
1354
1355 *hreg = ivs->entries[i].hard_reg;
1356 *preg = ivs->entries[i].pseudo;
1357 return true;
1358 }
1359 \f
1360 /* These routines are responsible for converting virtual register references
1361 to the actual hard register references once RTL generation is complete.
1362
1363 The following four variables are used for communication between the
1364 routines. They contain the offsets of the virtual registers from their
1365 respective hard registers. */
1366
1367 static int in_arg_offset;
1368 static int var_offset;
1369 static int dynamic_offset;
1370 static int out_arg_offset;
1371 static int cfa_offset;
1372
1373 /* In most machines, the stack pointer register is equivalent to the bottom
1374 of the stack. */
1375
1376 #ifndef STACK_POINTER_OFFSET
1377 #define STACK_POINTER_OFFSET 0
1378 #endif
1379
1380 #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1381 #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1382 #endif
1383
1384 /* If not defined, pick an appropriate default for the offset of dynamically
1385 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1386 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1387
1388 #ifndef STACK_DYNAMIC_OFFSET
1389
1390 /* The bottom of the stack points to the actual arguments. If
1391 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1392 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1393 stack space for register parameters is not pushed by the caller, but
1394 rather part of the fixed stack areas and hence not included in
1395 `crtl->outgoing_args_size'. Nevertheless, we must allow
1396 for it when allocating stack dynamic objects. */
1397
1398 #ifdef INCOMING_REG_PARM_STACK_SPACE
1399 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1400 ((ACCUMULATE_OUTGOING_ARGS \
1401 ? (crtl->outgoing_args_size \
1402 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1403 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1404 : 0) + (STACK_POINTER_OFFSET))
1405 #else
1406 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1407 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1408 + (STACK_POINTER_OFFSET))
1409 #endif
1410 #endif
1411
1412 \f
1413 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1414 is a virtual register, return the equivalent hard register and set the
1415 offset indirectly through the pointer. Otherwise, return 0. */
1416
1417 static rtx
1418 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1419 {
1420 rtx new_rtx;
1421 HOST_WIDE_INT offset;
1422
1423 if (x == virtual_incoming_args_rtx)
1424 {
1425 if (stack_realign_drap)
1426 {
1427 /* Replace virtual_incoming_args_rtx with internal arg
1428 pointer if DRAP is used to realign stack. */
1429 new_rtx = crtl->args.internal_arg_pointer;
1430 offset = 0;
1431 }
1432 else
1433 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1434 }
1435 else if (x == virtual_stack_vars_rtx)
1436 new_rtx = frame_pointer_rtx, offset = var_offset;
1437 else if (x == virtual_stack_dynamic_rtx)
1438 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1439 else if (x == virtual_outgoing_args_rtx)
1440 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1441 else if (x == virtual_cfa_rtx)
1442 {
1443 #ifdef FRAME_POINTER_CFA_OFFSET
1444 new_rtx = frame_pointer_rtx;
1445 #else
1446 new_rtx = arg_pointer_rtx;
1447 #endif
1448 offset = cfa_offset;
1449 }
1450 else if (x == virtual_preferred_stack_boundary_rtx)
1451 {
1452 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1453 offset = 0;
1454 }
1455 else
1456 return NULL_RTX;
1457
1458 *poffset = offset;
1459 return new_rtx;
1460 }
1461
1462 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1463 registers present inside of *LOC. The expression is simplified,
1464 as much as possible, but is not to be considered "valid" in any sense
1465 implied by the target. Return true if any change is made. */
1466
1467 static bool
1468 instantiate_virtual_regs_in_rtx (rtx *loc)
1469 {
1470 if (!*loc)
1471 return false;
1472 bool changed = false;
1473 subrtx_ptr_iterator::array_type array;
1474 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
1475 {
1476 rtx *loc = *iter;
1477 if (rtx x = *loc)
1478 {
1479 rtx new_rtx;
1480 HOST_WIDE_INT offset;
1481 switch (GET_CODE (x))
1482 {
1483 case REG:
1484 new_rtx = instantiate_new_reg (x, &offset);
1485 if (new_rtx)
1486 {
1487 *loc = plus_constant (GET_MODE (x), new_rtx, offset);
1488 changed = true;
1489 }
1490 iter.skip_subrtxes ();
1491 break;
1492
1493 case PLUS:
1494 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1495 if (new_rtx)
1496 {
1497 XEXP (x, 0) = new_rtx;
1498 *loc = plus_constant (GET_MODE (x), x, offset, true);
1499 changed = true;
1500 iter.skip_subrtxes ();
1501 break;
1502 }
1503
1504 /* FIXME -- from old code */
1505 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1506 we can commute the PLUS and SUBREG because pointers into the
1507 frame are well-behaved. */
1508 break;
1509
1510 default:
1511 break;
1512 }
1513 }
1514 }
1515 return changed;
1516 }
1517
1518 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1519 matches the predicate for insn CODE operand OPERAND. */
1520
1521 static int
1522 safe_insn_predicate (int code, int operand, rtx x)
1523 {
1524 return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
1525 }
1526
1527 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1528 registers present inside of insn. The result will be a valid insn. */
1529
1530 static void
1531 instantiate_virtual_regs_in_insn (rtx_insn *insn)
1532 {
1533 HOST_WIDE_INT offset;
1534 int insn_code, i;
1535 bool any_change = false;
1536 rtx set, new_rtx, x;
1537 rtx_insn *seq;
1538
1539 /* There are some special cases to be handled first. */
1540 set = single_set (insn);
1541 if (set)
1542 {
1543 /* We're allowed to assign to a virtual register. This is interpreted
1544 to mean that the underlying register gets assigned the inverse
1545 transformation. This is used, for example, in the handling of
1546 non-local gotos. */
1547 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1548 if (new_rtx)
1549 {
1550 start_sequence ();
1551
1552 instantiate_virtual_regs_in_rtx (&SET_SRC (set));
1553 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1554 gen_int_mode (-offset, GET_MODE (new_rtx)));
1555 x = force_operand (x, new_rtx);
1556 if (x != new_rtx)
1557 emit_move_insn (new_rtx, x);
1558
1559 seq = get_insns ();
1560 end_sequence ();
1561
1562 emit_insn_before (seq, insn);
1563 delete_insn (insn);
1564 return;
1565 }
1566
1567 /* Handle a straight copy from a virtual register by generating a
1568 new add insn. The difference between this and falling through
1569 to the generic case is avoiding a new pseudo and eliminating a
1570 move insn in the initial rtl stream. */
1571 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1572 if (new_rtx && offset != 0
1573 && REG_P (SET_DEST (set))
1574 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1575 {
1576 start_sequence ();
1577
1578 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
1579 gen_int_mode (offset,
1580 GET_MODE (SET_DEST (set))),
1581 SET_DEST (set), 1, OPTAB_LIB_WIDEN);
1582 if (x != SET_DEST (set))
1583 emit_move_insn (SET_DEST (set), x);
1584
1585 seq = get_insns ();
1586 end_sequence ();
1587
1588 emit_insn_before (seq, insn);
1589 delete_insn (insn);
1590 return;
1591 }
1592
1593 extract_insn (insn);
1594 insn_code = INSN_CODE (insn);
1595
1596 /* Handle a plus involving a virtual register by determining if the
1597 operands remain valid if they're modified in place. */
1598 if (GET_CODE (SET_SRC (set)) == PLUS
1599 && recog_data.n_operands >= 3
1600 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1601 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1602 && CONST_INT_P (recog_data.operand[2])
1603 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1604 {
1605 offset += INTVAL (recog_data.operand[2]);
1606
1607 /* If the sum is zero, then replace with a plain move. */
1608 if (offset == 0
1609 && REG_P (SET_DEST (set))
1610 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1611 {
1612 start_sequence ();
1613 emit_move_insn (SET_DEST (set), new_rtx);
1614 seq = get_insns ();
1615 end_sequence ();
1616
1617 emit_insn_before (seq, insn);
1618 delete_insn (insn);
1619 return;
1620 }
1621
1622 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1623
1624 /* Using validate_change and apply_change_group here leaves
1625 recog_data in an invalid state. Since we know exactly what
1626 we want to check, do those two by hand. */
1627 if (safe_insn_predicate (insn_code, 1, new_rtx)
1628 && safe_insn_predicate (insn_code, 2, x))
1629 {
1630 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1631 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1632 any_change = true;
1633
1634 /* Fall through into the regular operand fixup loop in
1635 order to take care of operands other than 1 and 2. */
1636 }
1637 }
1638 }
1639 else
1640 {
1641 extract_insn (insn);
1642 insn_code = INSN_CODE (insn);
1643 }
1644
1645 /* In the general case, we expect virtual registers to appear only in
1646 operands, and then only as either bare registers or inside memories. */
1647 for (i = 0; i < recog_data.n_operands; ++i)
1648 {
1649 x = recog_data.operand[i];
1650 switch (GET_CODE (x))
1651 {
1652 case MEM:
1653 {
1654 rtx addr = XEXP (x, 0);
1655
1656 if (!instantiate_virtual_regs_in_rtx (&addr))
1657 continue;
1658
1659 start_sequence ();
1660 x = replace_equiv_address (x, addr, true);
1661 /* It may happen that the address with the virtual reg
1662 was valid (e.g. based on the virtual stack reg, which might
1663 be acceptable to the predicates with all offsets), whereas
1664 the address now isn't anymore, for instance when the address
1665 is still offsetted, but the base reg isn't virtual-stack-reg
1666 anymore. Below we would do a force_reg on the whole operand,
1667 but this insn might actually only accept memory. Hence,
1668 before doing that last resort, try to reload the address into
1669 a register, so this operand stays a MEM. */
1670 if (!safe_insn_predicate (insn_code, i, x))
1671 {
1672 addr = force_reg (GET_MODE (addr), addr);
1673 x = replace_equiv_address (x, addr, true);
1674 }
1675 seq = get_insns ();
1676 end_sequence ();
1677 if (seq)
1678 emit_insn_before (seq, insn);
1679 }
1680 break;
1681
1682 case REG:
1683 new_rtx = instantiate_new_reg (x, &offset);
1684 if (new_rtx == NULL)
1685 continue;
1686 if (offset == 0)
1687 x = new_rtx;
1688 else
1689 {
1690 start_sequence ();
1691
1692 /* Careful, special mode predicates may have stuff in
1693 insn_data[insn_code].operand[i].mode that isn't useful
1694 to us for computing a new value. */
1695 /* ??? Recognize address_operand and/or "p" constraints
1696 to see if (plus new offset) is a valid before we put
1697 this through expand_simple_binop. */
1698 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1699 gen_int_mode (offset, GET_MODE (x)),
1700 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1701 seq = get_insns ();
1702 end_sequence ();
1703 emit_insn_before (seq, insn);
1704 }
1705 break;
1706
1707 case SUBREG:
1708 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1709 if (new_rtx == NULL)
1710 continue;
1711 if (offset != 0)
1712 {
1713 start_sequence ();
1714 new_rtx = expand_simple_binop
1715 (GET_MODE (new_rtx), PLUS, new_rtx,
1716 gen_int_mode (offset, GET_MODE (new_rtx)),
1717 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1718 seq = get_insns ();
1719 end_sequence ();
1720 emit_insn_before (seq, insn);
1721 }
1722 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1723 GET_MODE (new_rtx), SUBREG_BYTE (x));
1724 gcc_assert (x);
1725 break;
1726
1727 default:
1728 continue;
1729 }
1730
1731 /* At this point, X contains the new value for the operand.
1732 Validate the new value vs the insn predicate. Note that
1733 asm insns will have insn_code -1 here. */
1734 if (!safe_insn_predicate (insn_code, i, x))
1735 {
1736 start_sequence ();
1737 if (REG_P (x))
1738 {
1739 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1740 x = copy_to_reg (x);
1741 }
1742 else
1743 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1744 seq = get_insns ();
1745 end_sequence ();
1746 if (seq)
1747 emit_insn_before (seq, insn);
1748 }
1749
1750 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1751 any_change = true;
1752 }
1753
1754 if (any_change)
1755 {
1756 /* Propagate operand changes into the duplicates. */
1757 for (i = 0; i < recog_data.n_dups; ++i)
1758 *recog_data.dup_loc[i]
1759 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1760
1761 /* Force re-recognition of the instruction for validation. */
1762 INSN_CODE (insn) = -1;
1763 }
1764
1765 if (asm_noperands (PATTERN (insn)) >= 0)
1766 {
1767 if (!check_asm_operands (PATTERN (insn)))
1768 {
1769 error_for_asm (insn, "impossible constraint in %<asm%>");
1770 /* For asm goto, instead of fixing up all the edges
1771 just clear the template and clear input operands
1772 (asm goto doesn't have any output operands). */
1773 if (JUMP_P (insn))
1774 {
1775 rtx asm_op = extract_asm_operands (PATTERN (insn));
1776 ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
1777 ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
1778 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
1779 }
1780 else
1781 delete_insn (insn);
1782 }
1783 }
1784 else
1785 {
1786 if (recog_memoized (insn) < 0)
1787 fatal_insn_not_found (insn);
1788 }
1789 }
1790
1791 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1792 do any instantiation required. */
1793
1794 void
1795 instantiate_decl_rtl (rtx x)
1796 {
1797 rtx addr;
1798
1799 if (x == 0)
1800 return;
1801
1802 /* If this is a CONCAT, recurse for the pieces. */
1803 if (GET_CODE (x) == CONCAT)
1804 {
1805 instantiate_decl_rtl (XEXP (x, 0));
1806 instantiate_decl_rtl (XEXP (x, 1));
1807 return;
1808 }
1809
1810 /* If this is not a MEM, no need to do anything. Similarly if the
1811 address is a constant or a register that is not a virtual register. */
1812 if (!MEM_P (x))
1813 return;
1814
1815 addr = XEXP (x, 0);
1816 if (CONSTANT_P (addr)
1817 || (REG_P (addr)
1818 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1819 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1820 return;
1821
1822 instantiate_virtual_regs_in_rtx (&XEXP (x, 0));
1823 }
1824
1825 /* Helper for instantiate_decls called via walk_tree: Process all decls
1826 in the given DECL_VALUE_EXPR. */
1827
1828 static tree
1829 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1830 {
1831 tree t = *tp;
1832 if (! EXPR_P (t))
1833 {
1834 *walk_subtrees = 0;
1835 if (DECL_P (t))
1836 {
1837 if (DECL_RTL_SET_P (t))
1838 instantiate_decl_rtl (DECL_RTL (t));
1839 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1840 && DECL_INCOMING_RTL (t))
1841 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1842 if ((TREE_CODE (t) == VAR_DECL
1843 || TREE_CODE (t) == RESULT_DECL)
1844 && DECL_HAS_VALUE_EXPR_P (t))
1845 {
1846 tree v = DECL_VALUE_EXPR (t);
1847 walk_tree (&v, instantiate_expr, NULL, NULL);
1848 }
1849 }
1850 }
1851 return NULL;
1852 }
1853
1854 /* Subroutine of instantiate_decls: Process all decls in the given
1855 BLOCK node and all its subblocks. */
1856
1857 static void
1858 instantiate_decls_1 (tree let)
1859 {
1860 tree t;
1861
1862 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1863 {
1864 if (DECL_RTL_SET_P (t))
1865 instantiate_decl_rtl (DECL_RTL (t));
1866 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1867 {
1868 tree v = DECL_VALUE_EXPR (t);
1869 walk_tree (&v, instantiate_expr, NULL, NULL);
1870 }
1871 }
1872
1873 /* Process all subblocks. */
1874 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1875 instantiate_decls_1 (t);
1876 }
1877
1878 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1879 all virtual registers in their DECL_RTL's. */
1880
1881 static void
1882 instantiate_decls (tree fndecl)
1883 {
1884 tree decl;
1885 unsigned ix;
1886
1887 /* Process all parameters of the function. */
1888 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1889 {
1890 instantiate_decl_rtl (DECL_RTL (decl));
1891 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1892 if (DECL_HAS_VALUE_EXPR_P (decl))
1893 {
1894 tree v = DECL_VALUE_EXPR (decl);
1895 walk_tree (&v, instantiate_expr, NULL, NULL);
1896 }
1897 }
1898
1899 if ((decl = DECL_RESULT (fndecl))
1900 && TREE_CODE (decl) == RESULT_DECL)
1901 {
1902 if (DECL_RTL_SET_P (decl))
1903 instantiate_decl_rtl (DECL_RTL (decl));
1904 if (DECL_HAS_VALUE_EXPR_P (decl))
1905 {
1906 tree v = DECL_VALUE_EXPR (decl);
1907 walk_tree (&v, instantiate_expr, NULL, NULL);
1908 }
1909 }
1910
1911 /* Process the saved static chain if it exists. */
1912 decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
1913 if (decl && DECL_HAS_VALUE_EXPR_P (decl))
1914 instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
1915
1916 /* Now process all variables defined in the function or its subblocks. */
1917 instantiate_decls_1 (DECL_INITIAL (fndecl));
1918
1919 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1920 if (DECL_RTL_SET_P (decl))
1921 instantiate_decl_rtl (DECL_RTL (decl));
1922 vec_free (cfun->local_decls);
1923 }
1924
1925 /* Pass through the INSNS of function FNDECL and convert virtual register
1926 references to hard register references. */
1927
1928 static unsigned int
1929 instantiate_virtual_regs (void)
1930 {
1931 rtx_insn *insn;
1932
1933 /* Compute the offsets to use for this function. */
1934 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1935 var_offset = STARTING_FRAME_OFFSET;
1936 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1937 out_arg_offset = STACK_POINTER_OFFSET;
1938 #ifdef FRAME_POINTER_CFA_OFFSET
1939 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1940 #else
1941 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1942 #endif
1943
1944 /* Initialize recognition, indicating that volatile is OK. */
1945 init_recog ();
1946
1947 /* Scan through all the insns, instantiating every virtual register still
1948 present. */
1949 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1950 if (INSN_P (insn))
1951 {
1952 /* These patterns in the instruction stream can never be recognized.
1953 Fortunately, they shouldn't contain virtual registers either. */
1954 if (GET_CODE (PATTERN (insn)) == USE
1955 || GET_CODE (PATTERN (insn)) == CLOBBER
1956 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1957 continue;
1958 else if (DEBUG_INSN_P (insn))
1959 instantiate_virtual_regs_in_rtx (&INSN_VAR_LOCATION (insn));
1960 else
1961 instantiate_virtual_regs_in_insn (insn);
1962
1963 if (insn->deleted ())
1964 continue;
1965
1966 instantiate_virtual_regs_in_rtx (&REG_NOTES (insn));
1967
1968 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1969 if (CALL_P (insn))
1970 instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn));
1971 }
1972
1973 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1974 instantiate_decls (current_function_decl);
1975
1976 targetm.instantiate_decls ();
1977
1978 /* Indicate that, from now on, assign_stack_local should use
1979 frame_pointer_rtx. */
1980 virtuals_instantiated = 1;
1981
1982 return 0;
1983 }
1984
1985 namespace {
1986
1987 const pass_data pass_data_instantiate_virtual_regs =
1988 {
1989 RTL_PASS, /* type */
1990 "vregs", /* name */
1991 OPTGROUP_NONE, /* optinfo_flags */
1992 TV_NONE, /* tv_id */
1993 0, /* properties_required */
1994 0, /* properties_provided */
1995 0, /* properties_destroyed */
1996 0, /* todo_flags_start */
1997 0, /* todo_flags_finish */
1998 };
1999
2000 class pass_instantiate_virtual_regs : public rtl_opt_pass
2001 {
2002 public:
2003 pass_instantiate_virtual_regs (gcc::context *ctxt)
2004 : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
2005 {}
2006
2007 /* opt_pass methods: */
2008 virtual unsigned int execute (function *)
2009 {
2010 return instantiate_virtual_regs ();
2011 }
2012
2013 }; // class pass_instantiate_virtual_regs
2014
2015 } // anon namespace
2016
2017 rtl_opt_pass *
2018 make_pass_instantiate_virtual_regs (gcc::context *ctxt)
2019 {
2020 return new pass_instantiate_virtual_regs (ctxt);
2021 }
2022
2023 \f
2024 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2025 This means a type for which function calls must pass an address to the
2026 function or get an address back from the function.
2027 EXP may be a type node or an expression (whose type is tested). */
2028
2029 int
2030 aggregate_value_p (const_tree exp, const_tree fntype)
2031 {
2032 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
2033 int i, regno, nregs;
2034 rtx reg;
2035
2036 if (fntype)
2037 switch (TREE_CODE (fntype))
2038 {
2039 case CALL_EXPR:
2040 {
2041 tree fndecl = get_callee_fndecl (fntype);
2042 if (fndecl)
2043 fntype = TREE_TYPE (fndecl);
2044 else if (CALL_EXPR_FN (fntype))
2045 fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype)));
2046 else
2047 /* For internal functions, assume nothing needs to be
2048 returned in memory. */
2049 return 0;
2050 }
2051 break;
2052 case FUNCTION_DECL:
2053 fntype = TREE_TYPE (fntype);
2054 break;
2055 case FUNCTION_TYPE:
2056 case METHOD_TYPE:
2057 break;
2058 case IDENTIFIER_NODE:
2059 fntype = NULL_TREE;
2060 break;
2061 default:
2062 /* We don't expect other tree types here. */
2063 gcc_unreachable ();
2064 }
2065
2066 if (VOID_TYPE_P (type))
2067 return 0;
2068
2069 /* If a record should be passed the same as its first (and only) member
2070 don't pass it as an aggregate. */
2071 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2072 return aggregate_value_p (first_field (type), fntype);
2073
2074 /* If the front end has decided that this needs to be passed by
2075 reference, do so. */
2076 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2077 && DECL_BY_REFERENCE (exp))
2078 return 1;
2079
2080 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2081 if (fntype && TREE_ADDRESSABLE (fntype))
2082 return 1;
2083
2084 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2085 and thus can't be returned in registers. */
2086 if (TREE_ADDRESSABLE (type))
2087 return 1;
2088
2089 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2090 return 1;
2091
2092 if (targetm.calls.return_in_memory (type, fntype))
2093 return 1;
2094
2095 /* Make sure we have suitable call-clobbered regs to return
2096 the value in; if not, we must return it in memory. */
2097 reg = hard_function_value (type, 0, fntype, 0);
2098
2099 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2100 it is OK. */
2101 if (!REG_P (reg))
2102 return 0;
2103
2104 regno = REGNO (reg);
2105 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
2106 for (i = 0; i < nregs; i++)
2107 if (! call_used_regs[regno + i])
2108 return 1;
2109
2110 return 0;
2111 }
2112 \f
2113 /* Return true if we should assign DECL a pseudo register; false if it
2114 should live on the local stack. */
2115
2116 bool
2117 use_register_for_decl (const_tree decl)
2118 {
2119 if (!targetm.calls.allocate_stack_slots_for_args ())
2120 return true;
2121
2122 /* Honor volatile. */
2123 if (TREE_SIDE_EFFECTS (decl))
2124 return false;
2125
2126 /* Honor addressability. */
2127 if (TREE_ADDRESSABLE (decl))
2128 return false;
2129
2130 /* Decl is implicitly addressible by bound stores and loads
2131 if it is an aggregate holding bounds. */
2132 if (chkp_function_instrumented_p (current_function_decl)
2133 && TREE_TYPE (decl)
2134 && !BOUNDED_P (decl)
2135 && chkp_type_has_pointer (TREE_TYPE (decl)))
2136 return false;
2137
2138 /* Only register-like things go in registers. */
2139 if (DECL_MODE (decl) == BLKmode)
2140 return false;
2141
2142 /* If -ffloat-store specified, don't put explicit float variables
2143 into registers. */
2144 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2145 propagates values across these stores, and it probably shouldn't. */
2146 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2147 return false;
2148
2149 /* If we're not interested in tracking debugging information for
2150 this decl, then we can certainly put it in a register. */
2151 if (DECL_IGNORED_P (decl))
2152 return true;
2153
2154 if (optimize)
2155 return true;
2156
2157 if (!DECL_REGISTER (decl))
2158 return false;
2159
2160 switch (TREE_CODE (TREE_TYPE (decl)))
2161 {
2162 case RECORD_TYPE:
2163 case UNION_TYPE:
2164 case QUAL_UNION_TYPE:
2165 /* When not optimizing, disregard register keyword for variables with
2166 types containing methods, otherwise the methods won't be callable
2167 from the debugger. */
2168 if (TYPE_METHODS (TYPE_MAIN_VARIANT (TREE_TYPE (decl))))
2169 return false;
2170 break;
2171 default:
2172 break;
2173 }
2174
2175 return true;
2176 }
2177
2178 /* Return true if TYPE should be passed by invisible reference. */
2179
2180 bool
2181 pass_by_reference (CUMULATIVE_ARGS *ca, machine_mode mode,
2182 tree type, bool named_arg)
2183 {
2184 if (type)
2185 {
2186 /* If this type contains non-trivial constructors, then it is
2187 forbidden for the middle-end to create any new copies. */
2188 if (TREE_ADDRESSABLE (type))
2189 return true;
2190
2191 /* GCC post 3.4 passes *all* variable sized types by reference. */
2192 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2193 return true;
2194
2195 /* If a record type should be passed the same as its first (and only)
2196 member, use the type and mode of that member. */
2197 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2198 {
2199 type = TREE_TYPE (first_field (type));
2200 mode = TYPE_MODE (type);
2201 }
2202 }
2203
2204 return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
2205 type, named_arg);
2206 }
2207
2208 /* Return true if TYPE, which is passed by reference, should be callee
2209 copied instead of caller copied. */
2210
2211 bool
2212 reference_callee_copied (CUMULATIVE_ARGS *ca, machine_mode mode,
2213 tree type, bool named_arg)
2214 {
2215 if (type && TREE_ADDRESSABLE (type))
2216 return false;
2217 return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
2218 named_arg);
2219 }
2220
2221 /* Structures to communicate between the subroutines of assign_parms.
2222 The first holds data persistent across all parameters, the second
2223 is cleared out for each parameter. */
2224
2225 struct assign_parm_data_all
2226 {
2227 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2228 should become a job of the target or otherwise encapsulated. */
2229 CUMULATIVE_ARGS args_so_far_v;
2230 cumulative_args_t args_so_far;
2231 struct args_size stack_args_size;
2232 tree function_result_decl;
2233 tree orig_fnargs;
2234 rtx_insn *first_conversion_insn;
2235 rtx_insn *last_conversion_insn;
2236 HOST_WIDE_INT pretend_args_size;
2237 HOST_WIDE_INT extra_pretend_bytes;
2238 int reg_parm_stack_space;
2239 };
2240
2241 struct assign_parm_data_one
2242 {
2243 tree nominal_type;
2244 tree passed_type;
2245 rtx entry_parm;
2246 rtx stack_parm;
2247 machine_mode nominal_mode;
2248 machine_mode passed_mode;
2249 machine_mode promoted_mode;
2250 struct locate_and_pad_arg_data locate;
2251 int partial;
2252 BOOL_BITFIELD named_arg : 1;
2253 BOOL_BITFIELD passed_pointer : 1;
2254 BOOL_BITFIELD on_stack : 1;
2255 BOOL_BITFIELD loaded_in_reg : 1;
2256 };
2257
2258 struct bounds_parm_data
2259 {
2260 assign_parm_data_one parm_data;
2261 tree bounds_parm;
2262 tree ptr_parm;
2263 rtx ptr_entry;
2264 int bound_no;
2265 };
2266
2267 /* A subroutine of assign_parms. Initialize ALL. */
2268
2269 static void
2270 assign_parms_initialize_all (struct assign_parm_data_all *all)
2271 {
2272 tree fntype ATTRIBUTE_UNUSED;
2273
2274 memset (all, 0, sizeof (*all));
2275
2276 fntype = TREE_TYPE (current_function_decl);
2277
2278 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2279 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
2280 #else
2281 INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
2282 current_function_decl, -1);
2283 #endif
2284 all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
2285
2286 #ifdef INCOMING_REG_PARM_STACK_SPACE
2287 all->reg_parm_stack_space
2288 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
2289 #endif
2290 }
2291
2292 /* If ARGS contains entries with complex types, split the entry into two
2293 entries of the component type. Return a new list of substitutions are
2294 needed, else the old list. */
2295
2296 static void
2297 split_complex_args (vec<tree> *args)
2298 {
2299 unsigned i;
2300 tree p;
2301
2302 FOR_EACH_VEC_ELT (*args, i, p)
2303 {
2304 tree type = TREE_TYPE (p);
2305 if (TREE_CODE (type) == COMPLEX_TYPE
2306 && targetm.calls.split_complex_arg (type))
2307 {
2308 tree decl;
2309 tree subtype = TREE_TYPE (type);
2310 bool addressable = TREE_ADDRESSABLE (p);
2311
2312 /* Rewrite the PARM_DECL's type with its component. */
2313 p = copy_node (p);
2314 TREE_TYPE (p) = subtype;
2315 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2316 DECL_MODE (p) = VOIDmode;
2317 DECL_SIZE (p) = NULL;
2318 DECL_SIZE_UNIT (p) = NULL;
2319 /* If this arg must go in memory, put it in a pseudo here.
2320 We can't allow it to go in memory as per normal parms,
2321 because the usual place might not have the imag part
2322 adjacent to the real part. */
2323 DECL_ARTIFICIAL (p) = addressable;
2324 DECL_IGNORED_P (p) = addressable;
2325 TREE_ADDRESSABLE (p) = 0;
2326 layout_decl (p, 0);
2327 (*args)[i] = p;
2328
2329 /* Build a second synthetic decl. */
2330 decl = build_decl (EXPR_LOCATION (p),
2331 PARM_DECL, NULL_TREE, subtype);
2332 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2333 DECL_ARTIFICIAL (decl) = addressable;
2334 DECL_IGNORED_P (decl) = addressable;
2335 layout_decl (decl, 0);
2336 args->safe_insert (++i, decl);
2337 }
2338 }
2339 }
2340
2341 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2342 the hidden struct return argument, and (abi willing) complex args.
2343 Return the new parameter list. */
2344
2345 static vec<tree>
2346 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2347 {
2348 tree fndecl = current_function_decl;
2349 tree fntype = TREE_TYPE (fndecl);
2350 vec<tree> fnargs = vNULL;
2351 tree arg;
2352
2353 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2354 fnargs.safe_push (arg);
2355
2356 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2357
2358 /* If struct value address is treated as the first argument, make it so. */
2359 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2360 && ! cfun->returns_pcc_struct
2361 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2362 {
2363 tree type = build_pointer_type (TREE_TYPE (fntype));
2364 tree decl;
2365
2366 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2367 PARM_DECL, get_identifier (".result_ptr"), type);
2368 DECL_ARG_TYPE (decl) = type;
2369 DECL_ARTIFICIAL (decl) = 1;
2370 DECL_NAMELESS (decl) = 1;
2371 TREE_CONSTANT (decl) = 1;
2372
2373 DECL_CHAIN (decl) = all->orig_fnargs;
2374 all->orig_fnargs = decl;
2375 fnargs.safe_insert (0, decl);
2376
2377 all->function_result_decl = decl;
2378
2379 /* If function is instrumented then bounds of the
2380 passed structure address is the second argument. */
2381 if (chkp_function_instrumented_p (fndecl))
2382 {
2383 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2384 PARM_DECL, get_identifier (".result_bnd"),
2385 pointer_bounds_type_node);
2386 DECL_ARG_TYPE (decl) = pointer_bounds_type_node;
2387 DECL_ARTIFICIAL (decl) = 1;
2388 DECL_NAMELESS (decl) = 1;
2389 TREE_CONSTANT (decl) = 1;
2390
2391 DECL_CHAIN (decl) = DECL_CHAIN (all->orig_fnargs);
2392 DECL_CHAIN (all->orig_fnargs) = decl;
2393 fnargs.safe_insert (1, decl);
2394 }
2395 }
2396
2397 /* If the target wants to split complex arguments into scalars, do so. */
2398 if (targetm.calls.split_complex_arg)
2399 split_complex_args (&fnargs);
2400
2401 return fnargs;
2402 }
2403
2404 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2405 data for the parameter. Incorporate ABI specifics such as pass-by-
2406 reference and type promotion. */
2407
2408 static void
2409 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2410 struct assign_parm_data_one *data)
2411 {
2412 tree nominal_type, passed_type;
2413 machine_mode nominal_mode, passed_mode, promoted_mode;
2414 int unsignedp;
2415
2416 memset (data, 0, sizeof (*data));
2417
2418 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2419 if (!cfun->stdarg)
2420 data->named_arg = 1; /* No variadic parms. */
2421 else if (DECL_CHAIN (parm))
2422 data->named_arg = 1; /* Not the last non-variadic parm. */
2423 else if (targetm.calls.strict_argument_naming (all->args_so_far))
2424 data->named_arg = 1; /* Only variadic ones are unnamed. */
2425 else
2426 data->named_arg = 0; /* Treat as variadic. */
2427
2428 nominal_type = TREE_TYPE (parm);
2429 passed_type = DECL_ARG_TYPE (parm);
2430
2431 /* Look out for errors propagating this far. Also, if the parameter's
2432 type is void then its value doesn't matter. */
2433 if (TREE_TYPE (parm) == error_mark_node
2434 /* This can happen after weird syntax errors
2435 or if an enum type is defined among the parms. */
2436 || TREE_CODE (parm) != PARM_DECL
2437 || passed_type == NULL
2438 || VOID_TYPE_P (nominal_type))
2439 {
2440 nominal_type = passed_type = void_type_node;
2441 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2442 goto egress;
2443 }
2444
2445 /* Find mode of arg as it is passed, and mode of arg as it should be
2446 during execution of this function. */
2447 passed_mode = TYPE_MODE (passed_type);
2448 nominal_mode = TYPE_MODE (nominal_type);
2449
2450 /* If the parm is to be passed as a transparent union or record, use the
2451 type of the first field for the tests below. We have already verified
2452 that the modes are the same. */
2453 if ((TREE_CODE (passed_type) == UNION_TYPE
2454 || TREE_CODE (passed_type) == RECORD_TYPE)
2455 && TYPE_TRANSPARENT_AGGR (passed_type))
2456 passed_type = TREE_TYPE (first_field (passed_type));
2457
2458 /* See if this arg was passed by invisible reference. */
2459 if (pass_by_reference (&all->args_so_far_v, passed_mode,
2460 passed_type, data->named_arg))
2461 {
2462 passed_type = nominal_type = build_pointer_type (passed_type);
2463 data->passed_pointer = true;
2464 passed_mode = nominal_mode = TYPE_MODE (nominal_type);
2465 }
2466
2467 /* Find mode as it is passed by the ABI. */
2468 unsignedp = TYPE_UNSIGNED (passed_type);
2469 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2470 TREE_TYPE (current_function_decl), 0);
2471
2472 egress:
2473 data->nominal_type = nominal_type;
2474 data->passed_type = passed_type;
2475 data->nominal_mode = nominal_mode;
2476 data->passed_mode = passed_mode;
2477 data->promoted_mode = promoted_mode;
2478 }
2479
2480 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2481
2482 static void
2483 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2484 struct assign_parm_data_one *data, bool no_rtl)
2485 {
2486 int varargs_pretend_bytes = 0;
2487
2488 targetm.calls.setup_incoming_varargs (all->args_so_far,
2489 data->promoted_mode,
2490 data->passed_type,
2491 &varargs_pretend_bytes, no_rtl);
2492
2493 /* If the back-end has requested extra stack space, record how much is
2494 needed. Do not change pretend_args_size otherwise since it may be
2495 nonzero from an earlier partial argument. */
2496 if (varargs_pretend_bytes > 0)
2497 all->pretend_args_size = varargs_pretend_bytes;
2498 }
2499
2500 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2501 the incoming location of the current parameter. */
2502
2503 static void
2504 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2505 struct assign_parm_data_one *data)
2506 {
2507 HOST_WIDE_INT pretend_bytes = 0;
2508 rtx entry_parm;
2509 bool in_regs;
2510
2511 if (data->promoted_mode == VOIDmode)
2512 {
2513 data->entry_parm = data->stack_parm = const0_rtx;
2514 return;
2515 }
2516
2517 entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
2518 data->promoted_mode,
2519 data->passed_type,
2520 data->named_arg);
2521
2522 if (entry_parm == 0)
2523 data->promoted_mode = data->passed_mode;
2524
2525 /* Determine parm's home in the stack, in case it arrives in the stack
2526 or we should pretend it did. Compute the stack position and rtx where
2527 the argument arrives and its size.
2528
2529 There is one complexity here: If this was a parameter that would
2530 have been passed in registers, but wasn't only because it is
2531 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2532 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2533 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2534 as it was the previous time. */
2535 in_regs = (entry_parm != 0) || POINTER_BOUNDS_TYPE_P (data->passed_type);
2536 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2537 in_regs = true;
2538 #endif
2539 if (!in_regs && !data->named_arg)
2540 {
2541 if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
2542 {
2543 rtx tem;
2544 tem = targetm.calls.function_incoming_arg (all->args_so_far,
2545 data->promoted_mode,
2546 data->passed_type, true);
2547 in_regs = tem != NULL;
2548 }
2549 }
2550
2551 /* If this parameter was passed both in registers and in the stack, use
2552 the copy on the stack. */
2553 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2554 data->passed_type))
2555 entry_parm = 0;
2556
2557 if (entry_parm)
2558 {
2559 int partial;
2560
2561 partial = targetm.calls.arg_partial_bytes (all->args_so_far,
2562 data->promoted_mode,
2563 data->passed_type,
2564 data->named_arg);
2565 data->partial = partial;
2566
2567 /* The caller might already have allocated stack space for the
2568 register parameters. */
2569 if (partial != 0 && all->reg_parm_stack_space == 0)
2570 {
2571 /* Part of this argument is passed in registers and part
2572 is passed on the stack. Ask the prologue code to extend
2573 the stack part so that we can recreate the full value.
2574
2575 PRETEND_BYTES is the size of the registers we need to store.
2576 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2577 stack space that the prologue should allocate.
2578
2579 Internally, gcc assumes that the argument pointer is aligned
2580 to STACK_BOUNDARY bits. This is used both for alignment
2581 optimizations (see init_emit) and to locate arguments that are
2582 aligned to more than PARM_BOUNDARY bits. We must preserve this
2583 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2584 a stack boundary. */
2585
2586 /* We assume at most one partial arg, and it must be the first
2587 argument on the stack. */
2588 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2589
2590 pretend_bytes = partial;
2591 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2592
2593 /* We want to align relative to the actual stack pointer, so
2594 don't include this in the stack size until later. */
2595 all->extra_pretend_bytes = all->pretend_args_size;
2596 }
2597 }
2598
2599 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2600 all->reg_parm_stack_space,
2601 entry_parm ? data->partial : 0, current_function_decl,
2602 &all->stack_args_size, &data->locate);
2603
2604 /* Update parm_stack_boundary if this parameter is passed in the
2605 stack. */
2606 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2607 crtl->parm_stack_boundary = data->locate.boundary;
2608
2609 /* Adjust offsets to include the pretend args. */
2610 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2611 data->locate.slot_offset.constant += pretend_bytes;
2612 data->locate.offset.constant += pretend_bytes;
2613
2614 data->entry_parm = entry_parm;
2615 }
2616
2617 /* A subroutine of assign_parms. If there is actually space on the stack
2618 for this parm, count it in stack_args_size and return true. */
2619
2620 static bool
2621 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2622 struct assign_parm_data_one *data)
2623 {
2624 /* Bounds are never passed on the stack to keep compatibility
2625 with not instrumented code. */
2626 if (POINTER_BOUNDS_TYPE_P (data->passed_type))
2627 return false;
2628 /* Trivially true if we've no incoming register. */
2629 else if (data->entry_parm == NULL)
2630 ;
2631 /* Also true if we're partially in registers and partially not,
2632 since we've arranged to drop the entire argument on the stack. */
2633 else if (data->partial != 0)
2634 ;
2635 /* Also true if the target says that it's passed in both registers
2636 and on the stack. */
2637 else if (GET_CODE (data->entry_parm) == PARALLEL
2638 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2639 ;
2640 /* Also true if the target says that there's stack allocated for
2641 all register parameters. */
2642 else if (all->reg_parm_stack_space > 0)
2643 ;
2644 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2645 else
2646 return false;
2647
2648 all->stack_args_size.constant += data->locate.size.constant;
2649 if (data->locate.size.var)
2650 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2651
2652 return true;
2653 }
2654
2655 /* A subroutine of assign_parms. Given that this parameter is allocated
2656 stack space by the ABI, find it. */
2657
2658 static void
2659 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2660 {
2661 rtx offset_rtx, stack_parm;
2662 unsigned int align, boundary;
2663
2664 /* If we're passing this arg using a reg, make its stack home the
2665 aligned stack slot. */
2666 if (data->entry_parm)
2667 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2668 else
2669 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2670
2671 stack_parm = crtl->args.internal_arg_pointer;
2672 if (offset_rtx != const0_rtx)
2673 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2674 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2675
2676 if (!data->passed_pointer)
2677 {
2678 set_mem_attributes (stack_parm, parm, 1);
2679 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2680 while promoted mode's size is needed. */
2681 if (data->promoted_mode != BLKmode
2682 && data->promoted_mode != DECL_MODE (parm))
2683 {
2684 set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
2685 if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
2686 {
2687 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2688 data->promoted_mode);
2689 if (offset)
2690 set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
2691 }
2692 }
2693 }
2694
2695 boundary = data->locate.boundary;
2696 align = BITS_PER_UNIT;
2697
2698 /* If we're padding upward, we know that the alignment of the slot
2699 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2700 intentionally forcing upward padding. Otherwise we have to come
2701 up with a guess at the alignment based on OFFSET_RTX. */
2702 if (data->locate.where_pad != downward || data->entry_parm)
2703 align = boundary;
2704 else if (CONST_INT_P (offset_rtx))
2705 {
2706 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2707 align = align & -align;
2708 }
2709 set_mem_align (stack_parm, align);
2710
2711 if (data->entry_parm)
2712 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2713
2714 data->stack_parm = stack_parm;
2715 }
2716
2717 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2718 always valid and contiguous. */
2719
2720 static void
2721 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2722 {
2723 rtx entry_parm = data->entry_parm;
2724 rtx stack_parm = data->stack_parm;
2725
2726 /* If this parm was passed part in regs and part in memory, pretend it
2727 arrived entirely in memory by pushing the register-part onto the stack.
2728 In the special case of a DImode or DFmode that is split, we could put
2729 it together in a pseudoreg directly, but for now that's not worth
2730 bothering with. */
2731 if (data->partial != 0)
2732 {
2733 /* Handle calls that pass values in multiple non-contiguous
2734 locations. The Irix 6 ABI has examples of this. */
2735 if (GET_CODE (entry_parm) == PARALLEL)
2736 emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
2737 data->passed_type,
2738 int_size_in_bytes (data->passed_type));
2739 else
2740 {
2741 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2742 move_block_from_reg (REGNO (entry_parm),
2743 validize_mem (copy_rtx (stack_parm)),
2744 data->partial / UNITS_PER_WORD);
2745 }
2746
2747 entry_parm = stack_parm;
2748 }
2749
2750 /* If we didn't decide this parm came in a register, by default it came
2751 on the stack. */
2752 else if (entry_parm == NULL)
2753 entry_parm = stack_parm;
2754
2755 /* When an argument is passed in multiple locations, we can't make use
2756 of this information, but we can save some copying if the whole argument
2757 is passed in a single register. */
2758 else if (GET_CODE (entry_parm) == PARALLEL
2759 && data->nominal_mode != BLKmode
2760 && data->passed_mode != BLKmode)
2761 {
2762 size_t i, len = XVECLEN (entry_parm, 0);
2763
2764 for (i = 0; i < len; i++)
2765 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2766 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2767 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2768 == data->passed_mode)
2769 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2770 {
2771 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2772 break;
2773 }
2774 }
2775
2776 data->entry_parm = entry_parm;
2777 }
2778
2779 /* A subroutine of assign_parms. Reconstitute any values which were
2780 passed in multiple registers and would fit in a single register. */
2781
2782 static void
2783 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2784 {
2785 rtx entry_parm = data->entry_parm;
2786
2787 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2788 This can be done with register operations rather than on the
2789 stack, even if we will store the reconstituted parameter on the
2790 stack later. */
2791 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2792 {
2793 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2794 emit_group_store (parmreg, entry_parm, data->passed_type,
2795 GET_MODE_SIZE (GET_MODE (entry_parm)));
2796 entry_parm = parmreg;
2797 }
2798
2799 data->entry_parm = entry_parm;
2800 }
2801
2802 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2803 always valid and properly aligned. */
2804
2805 static void
2806 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2807 {
2808 rtx stack_parm = data->stack_parm;
2809
2810 /* If we can't trust the parm stack slot to be aligned enough for its
2811 ultimate type, don't use that slot after entry. We'll make another
2812 stack slot, if we need one. */
2813 if (stack_parm
2814 && ((STRICT_ALIGNMENT
2815 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2816 || (data->nominal_type
2817 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2818 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2819 stack_parm = NULL;
2820
2821 /* If parm was passed in memory, and we need to convert it on entry,
2822 don't store it back in that same slot. */
2823 else if (data->entry_parm == stack_parm
2824 && data->nominal_mode != BLKmode
2825 && data->nominal_mode != data->passed_mode)
2826 stack_parm = NULL;
2827
2828 /* If stack protection is in effect for this function, don't leave any
2829 pointers in their passed stack slots. */
2830 else if (crtl->stack_protect_guard
2831 && (flag_stack_protect == 2
2832 || data->passed_pointer
2833 || POINTER_TYPE_P (data->nominal_type)))
2834 stack_parm = NULL;
2835
2836 data->stack_parm = stack_parm;
2837 }
2838
2839 /* A subroutine of assign_parms. Return true if the current parameter
2840 should be stored as a BLKmode in the current frame. */
2841
2842 static bool
2843 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2844 {
2845 if (data->nominal_mode == BLKmode)
2846 return true;
2847 if (GET_MODE (data->entry_parm) == BLKmode)
2848 return true;
2849
2850 #ifdef BLOCK_REG_PADDING
2851 /* Only assign_parm_setup_block knows how to deal with register arguments
2852 that are padded at the least significant end. */
2853 if (REG_P (data->entry_parm)
2854 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2855 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2856 == (BYTES_BIG_ENDIAN ? upward : downward)))
2857 return true;
2858 #endif
2859
2860 return false;
2861 }
2862
2863 /* A subroutine of assign_parms. Arrange for the parameter to be
2864 present and valid in DATA->STACK_RTL. */
2865
2866 static void
2867 assign_parm_setup_block (struct assign_parm_data_all *all,
2868 tree parm, struct assign_parm_data_one *data)
2869 {
2870 rtx entry_parm = data->entry_parm;
2871 rtx stack_parm = data->stack_parm;
2872 HOST_WIDE_INT size;
2873 HOST_WIDE_INT size_stored;
2874
2875 if (GET_CODE (entry_parm) == PARALLEL)
2876 entry_parm = emit_group_move_into_temps (entry_parm);
2877
2878 size = int_size_in_bytes (data->passed_type);
2879 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2880 if (stack_parm == 0)
2881 {
2882 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2883 stack_parm = assign_stack_local (BLKmode, size_stored,
2884 DECL_ALIGN (parm));
2885 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2886 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2887 set_mem_attributes (stack_parm, parm, 1);
2888 }
2889
2890 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2891 calls that pass values in multiple non-contiguous locations. */
2892 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2893 {
2894 rtx mem;
2895
2896 /* Note that we will be storing an integral number of words.
2897 So we have to be careful to ensure that we allocate an
2898 integral number of words. We do this above when we call
2899 assign_stack_local if space was not allocated in the argument
2900 list. If it was, this will not work if PARM_BOUNDARY is not
2901 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2902 if it becomes a problem. Exception is when BLKmode arrives
2903 with arguments not conforming to word_mode. */
2904
2905 if (data->stack_parm == 0)
2906 ;
2907 else if (GET_CODE (entry_parm) == PARALLEL)
2908 ;
2909 else
2910 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2911
2912 mem = validize_mem (copy_rtx (stack_parm));
2913
2914 /* Handle values in multiple non-contiguous locations. */
2915 if (GET_CODE (entry_parm) == PARALLEL)
2916 {
2917 push_to_sequence2 (all->first_conversion_insn,
2918 all->last_conversion_insn);
2919 emit_group_store (mem, entry_parm, data->passed_type, size);
2920 all->first_conversion_insn = get_insns ();
2921 all->last_conversion_insn = get_last_insn ();
2922 end_sequence ();
2923 }
2924
2925 else if (size == 0)
2926 ;
2927
2928 /* If SIZE is that of a mode no bigger than a word, just use
2929 that mode's store operation. */
2930 else if (size <= UNITS_PER_WORD)
2931 {
2932 machine_mode mode
2933 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2934
2935 if (mode != BLKmode
2936 #ifdef BLOCK_REG_PADDING
2937 && (size == UNITS_PER_WORD
2938 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2939 != (BYTES_BIG_ENDIAN ? upward : downward)))
2940 #endif
2941 )
2942 {
2943 rtx reg;
2944
2945 /* We are really truncating a word_mode value containing
2946 SIZE bytes into a value of mode MODE. If such an
2947 operation requires no actual instructions, we can refer
2948 to the value directly in mode MODE, otherwise we must
2949 start with the register in word_mode and explicitly
2950 convert it. */
2951 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2952 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2953 else
2954 {
2955 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2956 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2957 }
2958 emit_move_insn (change_address (mem, mode, 0), reg);
2959 }
2960
2961 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2962 machine must be aligned to the left before storing
2963 to memory. Note that the previous test doesn't
2964 handle all cases (e.g. SIZE == 3). */
2965 else if (size != UNITS_PER_WORD
2966 #ifdef BLOCK_REG_PADDING
2967 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2968 == downward)
2969 #else
2970 && BYTES_BIG_ENDIAN
2971 #endif
2972 )
2973 {
2974 rtx tem, x;
2975 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2976 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2977
2978 x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
2979 tem = change_address (mem, word_mode, 0);
2980 emit_move_insn (tem, x);
2981 }
2982 else
2983 move_block_from_reg (REGNO (entry_parm), mem,
2984 size_stored / UNITS_PER_WORD);
2985 }
2986 else
2987 move_block_from_reg (REGNO (entry_parm), mem,
2988 size_stored / UNITS_PER_WORD);
2989 }
2990 else if (data->stack_parm == 0)
2991 {
2992 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2993 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2994 BLOCK_OP_NORMAL);
2995 all->first_conversion_insn = get_insns ();
2996 all->last_conversion_insn = get_last_insn ();
2997 end_sequence ();
2998 }
2999
3000 data->stack_parm = stack_parm;
3001 SET_DECL_RTL (parm, stack_parm);
3002 }
3003
3004 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
3005 parameter. Get it there. Perform all ABI specified conversions. */
3006
3007 static void
3008 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
3009 struct assign_parm_data_one *data)
3010 {
3011 rtx parmreg, validated_mem;
3012 rtx equiv_stack_parm;
3013 machine_mode promoted_nominal_mode;
3014 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
3015 bool did_conversion = false;
3016 bool need_conversion, moved;
3017
3018 /* Store the parm in a pseudoregister during the function, but we may
3019 need to do it in a wider mode. Using 2 here makes the result
3020 consistent with promote_decl_mode and thus expand_expr_real_1. */
3021 promoted_nominal_mode
3022 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
3023 TREE_TYPE (current_function_decl), 2);
3024
3025 parmreg = gen_reg_rtx (promoted_nominal_mode);
3026
3027 if (!DECL_ARTIFICIAL (parm))
3028 mark_user_reg (parmreg);
3029
3030 /* If this was an item that we received a pointer to,
3031 set DECL_RTL appropriately. */
3032 if (data->passed_pointer)
3033 {
3034 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
3035 set_mem_attributes (x, parm, 1);
3036 SET_DECL_RTL (parm, x);
3037 }
3038 else
3039 SET_DECL_RTL (parm, parmreg);
3040
3041 assign_parm_remove_parallels (data);
3042
3043 /* Copy the value into the register, thus bridging between
3044 assign_parm_find_data_types and expand_expr_real_1. */
3045
3046 equiv_stack_parm = data->stack_parm;
3047 validated_mem = validize_mem (copy_rtx (data->entry_parm));
3048
3049 need_conversion = (data->nominal_mode != data->passed_mode
3050 || promoted_nominal_mode != data->promoted_mode);
3051 moved = false;
3052
3053 if (need_conversion
3054 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
3055 && data->nominal_mode == data->passed_mode
3056 && data->nominal_mode == GET_MODE (data->entry_parm))
3057 {
3058 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3059 mode, by the caller. We now have to convert it to
3060 NOMINAL_MODE, if different. However, PARMREG may be in
3061 a different mode than NOMINAL_MODE if it is being stored
3062 promoted.
3063
3064 If ENTRY_PARM is a hard register, it might be in a register
3065 not valid for operating in its mode (e.g., an odd-numbered
3066 register for a DFmode). In that case, moves are the only
3067 thing valid, so we can't do a convert from there. This
3068 occurs when the calling sequence allow such misaligned
3069 usages.
3070
3071 In addition, the conversion may involve a call, which could
3072 clobber parameters which haven't been copied to pseudo
3073 registers yet.
3074
3075 First, we try to emit an insn which performs the necessary
3076 conversion. We verify that this insn does not clobber any
3077 hard registers. */
3078
3079 enum insn_code icode;
3080 rtx op0, op1;
3081
3082 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3083 unsignedp);
3084
3085 op0 = parmreg;
3086 op1 = validated_mem;
3087 if (icode != CODE_FOR_nothing
3088 && insn_operand_matches (icode, 0, op0)
3089 && insn_operand_matches (icode, 1, op1))
3090 {
3091 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
3092 rtx_insn *insn, *insns;
3093 rtx t = op1;
3094 HARD_REG_SET hardregs;
3095
3096 start_sequence ();
3097 /* If op1 is a hard register that is likely spilled, first
3098 force it into a pseudo, otherwise combiner might extend
3099 its lifetime too much. */
3100 if (GET_CODE (t) == SUBREG)
3101 t = SUBREG_REG (t);
3102 if (REG_P (t)
3103 && HARD_REGISTER_P (t)
3104 && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
3105 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
3106 {
3107 t = gen_reg_rtx (GET_MODE (op1));
3108 emit_move_insn (t, op1);
3109 }
3110 else
3111 t = op1;
3112 rtx pat = gen_extend_insn (op0, t, promoted_nominal_mode,
3113 data->passed_mode, unsignedp);
3114 emit_insn (pat);
3115 insns = get_insns ();
3116
3117 moved = true;
3118 CLEAR_HARD_REG_SET (hardregs);
3119 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3120 {
3121 if (INSN_P (insn))
3122 note_stores (PATTERN (insn), record_hard_reg_sets,
3123 &hardregs);
3124 if (!hard_reg_set_empty_p (hardregs))
3125 moved = false;
3126 }
3127
3128 end_sequence ();
3129
3130 if (moved)
3131 {
3132 emit_insn (insns);
3133 if (equiv_stack_parm != NULL_RTX)
3134 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3135 equiv_stack_parm);
3136 }
3137 }
3138 }
3139
3140 if (moved)
3141 /* Nothing to do. */
3142 ;
3143 else if (need_conversion)
3144 {
3145 /* We did not have an insn to convert directly, or the sequence
3146 generated appeared unsafe. We must first copy the parm to a
3147 pseudo reg, and save the conversion until after all
3148 parameters have been moved. */
3149
3150 int save_tree_used;
3151 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3152
3153 emit_move_insn (tempreg, validated_mem);
3154
3155 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3156 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3157
3158 if (GET_CODE (tempreg) == SUBREG
3159 && GET_MODE (tempreg) == data->nominal_mode
3160 && REG_P (SUBREG_REG (tempreg))
3161 && data->nominal_mode == data->passed_mode
3162 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
3163 && GET_MODE_SIZE (GET_MODE (tempreg))
3164 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
3165 {
3166 /* The argument is already sign/zero extended, so note it
3167 into the subreg. */
3168 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3169 SUBREG_PROMOTED_SET (tempreg, unsignedp);
3170 }
3171
3172 /* TREE_USED gets set erroneously during expand_assignment. */
3173 save_tree_used = TREE_USED (parm);
3174 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3175 TREE_USED (parm) = save_tree_used;
3176 all->first_conversion_insn = get_insns ();
3177 all->last_conversion_insn = get_last_insn ();
3178 end_sequence ();
3179
3180 did_conversion = true;
3181 }
3182 else
3183 emit_move_insn (parmreg, validated_mem);
3184
3185 /* If we were passed a pointer but the actual value can safely live
3186 in a register, retrieve it and use it directly. */
3187 if (data->passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
3188 {
3189 /* We can't use nominal_mode, because it will have been set to
3190 Pmode above. We must use the actual mode of the parm. */
3191 if (use_register_for_decl (parm))
3192 {
3193 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3194 mark_user_reg (parmreg);
3195 }
3196 else
3197 {
3198 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3199 TYPE_MODE (TREE_TYPE (parm)),
3200 TYPE_ALIGN (TREE_TYPE (parm)));
3201 parmreg
3202 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
3203 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
3204 align);
3205 set_mem_attributes (parmreg, parm, 1);
3206 }
3207
3208 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
3209 {
3210 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
3211 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3212
3213 push_to_sequence2 (all->first_conversion_insn,
3214 all->last_conversion_insn);
3215 emit_move_insn (tempreg, DECL_RTL (parm));
3216 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3217 emit_move_insn (parmreg, tempreg);
3218 all->first_conversion_insn = get_insns ();
3219 all->last_conversion_insn = get_last_insn ();
3220 end_sequence ();
3221
3222 did_conversion = true;
3223 }
3224 else
3225 emit_move_insn (parmreg, DECL_RTL (parm));
3226
3227 SET_DECL_RTL (parm, parmreg);
3228
3229 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3230 now the parm. */
3231 data->stack_parm = NULL;
3232 }
3233
3234 /* Mark the register as eliminable if we did no conversion and it was
3235 copied from memory at a fixed offset, and the arg pointer was not
3236 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3237 offset formed an invalid address, such memory-equivalences as we
3238 make here would screw up life analysis for it. */
3239 if (data->nominal_mode == data->passed_mode
3240 && !did_conversion
3241 && data->stack_parm != 0
3242 && MEM_P (data->stack_parm)
3243 && data->locate.offset.var == 0
3244 && reg_mentioned_p (virtual_incoming_args_rtx,
3245 XEXP (data->stack_parm, 0)))
3246 {
3247 rtx_insn *linsn = get_last_insn ();
3248 rtx_insn *sinsn;
3249 rtx set;
3250
3251 /* Mark complex types separately. */
3252 if (GET_CODE (parmreg) == CONCAT)
3253 {
3254 machine_mode submode
3255 = GET_MODE_INNER (GET_MODE (parmreg));
3256 int regnor = REGNO (XEXP (parmreg, 0));
3257 int regnoi = REGNO (XEXP (parmreg, 1));
3258 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3259 rtx stacki = adjust_address_nv (data->stack_parm, submode,
3260 GET_MODE_SIZE (submode));
3261
3262 /* Scan backwards for the set of the real and
3263 imaginary parts. */
3264 for (sinsn = linsn; sinsn != 0;
3265 sinsn = prev_nonnote_insn (sinsn))
3266 {
3267 set = single_set (sinsn);
3268 if (set == 0)
3269 continue;
3270
3271 if (SET_DEST (set) == regno_reg_rtx [regnoi])
3272 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3273 else if (SET_DEST (set) == regno_reg_rtx [regnor])
3274 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3275 }
3276 }
3277 else
3278 set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
3279 }
3280
3281 /* For pointer data type, suggest pointer register. */
3282 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3283 mark_reg_pointer (parmreg,
3284 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3285 }
3286
3287 /* A subroutine of assign_parms. Allocate stack space to hold the current
3288 parameter. Get it there. Perform all ABI specified conversions. */
3289
3290 static void
3291 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3292 struct assign_parm_data_one *data)
3293 {
3294 /* Value must be stored in the stack slot STACK_PARM during function
3295 execution. */
3296 bool to_conversion = false;
3297
3298 assign_parm_remove_parallels (data);
3299
3300 if (data->promoted_mode != data->nominal_mode)
3301 {
3302 /* Conversion is required. */
3303 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3304
3305 emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
3306
3307 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3308 to_conversion = true;
3309
3310 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3311 TYPE_UNSIGNED (TREE_TYPE (parm)));
3312
3313 if (data->stack_parm)
3314 {
3315 int offset = subreg_lowpart_offset (data->nominal_mode,
3316 GET_MODE (data->stack_parm));
3317 /* ??? This may need a big-endian conversion on sparc64. */
3318 data->stack_parm
3319 = adjust_address (data->stack_parm, data->nominal_mode, 0);
3320 if (offset && MEM_OFFSET_KNOWN_P (data->stack_parm))
3321 set_mem_offset (data->stack_parm,
3322 MEM_OFFSET (data->stack_parm) + offset);
3323 }
3324 }
3325
3326 if (data->entry_parm != data->stack_parm)
3327 {
3328 rtx src, dest;
3329
3330 if (data->stack_parm == 0)
3331 {
3332 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3333 GET_MODE (data->entry_parm),
3334 TYPE_ALIGN (data->passed_type));
3335 data->stack_parm
3336 = assign_stack_local (GET_MODE (data->entry_parm),
3337 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3338 align);
3339 set_mem_attributes (data->stack_parm, parm, 1);
3340 }
3341
3342 dest = validize_mem (copy_rtx (data->stack_parm));
3343 src = validize_mem (copy_rtx (data->entry_parm));
3344
3345 if (MEM_P (src))
3346 {
3347 /* Use a block move to handle potentially misaligned entry_parm. */
3348 if (!to_conversion)
3349 push_to_sequence2 (all->first_conversion_insn,
3350 all->last_conversion_insn);
3351 to_conversion = true;
3352
3353 emit_block_move (dest, src,
3354 GEN_INT (int_size_in_bytes (data->passed_type)),
3355 BLOCK_OP_NORMAL);
3356 }
3357 else
3358 emit_move_insn (dest, src);
3359 }
3360
3361 if (to_conversion)
3362 {
3363 all->first_conversion_insn = get_insns ();
3364 all->last_conversion_insn = get_last_insn ();
3365 end_sequence ();
3366 }
3367
3368 SET_DECL_RTL (parm, data->stack_parm);
3369 }
3370
3371 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3372 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3373
3374 static void
3375 assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3376 vec<tree> fnargs)
3377 {
3378 tree parm;
3379 tree orig_fnargs = all->orig_fnargs;
3380 unsigned i = 0;
3381
3382 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3383 {
3384 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3385 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3386 {
3387 rtx tmp, real, imag;
3388 machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3389
3390 real = DECL_RTL (fnargs[i]);
3391 imag = DECL_RTL (fnargs[i + 1]);
3392 if (inner != GET_MODE (real))
3393 {
3394 real = gen_lowpart_SUBREG (inner, real);
3395 imag = gen_lowpart_SUBREG (inner, imag);
3396 }
3397
3398 if (TREE_ADDRESSABLE (parm))
3399 {
3400 rtx rmem, imem;
3401 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3402 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3403 DECL_MODE (parm),
3404 TYPE_ALIGN (TREE_TYPE (parm)));
3405
3406 /* split_complex_arg put the real and imag parts in
3407 pseudos. Move them to memory. */
3408 tmp = assign_stack_local (DECL_MODE (parm), size, align);
3409 set_mem_attributes (tmp, parm, 1);
3410 rmem = adjust_address_nv (tmp, inner, 0);
3411 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3412 push_to_sequence2 (all->first_conversion_insn,
3413 all->last_conversion_insn);
3414 emit_move_insn (rmem, real);
3415 emit_move_insn (imem, imag);
3416 all->first_conversion_insn = get_insns ();
3417 all->last_conversion_insn = get_last_insn ();
3418 end_sequence ();
3419 }
3420 else
3421 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3422 SET_DECL_RTL (parm, tmp);
3423
3424 real = DECL_INCOMING_RTL (fnargs[i]);
3425 imag = DECL_INCOMING_RTL (fnargs[i + 1]);
3426 if (inner != GET_MODE (real))
3427 {
3428 real = gen_lowpart_SUBREG (inner, real);
3429 imag = gen_lowpart_SUBREG (inner, imag);
3430 }
3431 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3432 set_decl_incoming_rtl (parm, tmp, false);
3433 i++;
3434 }
3435 }
3436 }
3437
3438 /* Load bounds of PARM from bounds table. */
3439 static void
3440 assign_parm_load_bounds (struct assign_parm_data_one *data,
3441 tree parm,
3442 rtx entry,
3443 unsigned bound_no)
3444 {
3445 bitmap_iterator bi;
3446 unsigned i, offs = 0;
3447 int bnd_no = -1;
3448 rtx slot = NULL, ptr = NULL;
3449
3450 if (parm)
3451 {
3452 bitmap slots;
3453 bitmap_obstack_initialize (NULL);
3454 slots = BITMAP_ALLOC (NULL);
3455 chkp_find_bound_slots (TREE_TYPE (parm), slots);
3456 EXECUTE_IF_SET_IN_BITMAP (slots, 0, i, bi)
3457 {
3458 if (bound_no)
3459 bound_no--;
3460 else
3461 {
3462 bnd_no = i;
3463 break;
3464 }
3465 }
3466 BITMAP_FREE (slots);
3467 bitmap_obstack_release (NULL);
3468 }
3469
3470 /* We may have bounds not associated with any pointer. */
3471 if (bnd_no != -1)
3472 offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
3473
3474 /* Find associated pointer. */
3475 if (bnd_no == -1)
3476 {
3477 /* If bounds are not associated with any bounds,
3478 then it is passed in a register or special slot. */
3479 gcc_assert (data->entry_parm);
3480 ptr = const0_rtx;
3481 }
3482 else if (MEM_P (entry))
3483 slot = adjust_address (entry, Pmode, offs);
3484 else if (REG_P (entry))
3485 ptr = gen_rtx_REG (Pmode, REGNO (entry) + bnd_no);
3486 else if (GET_CODE (entry) == PARALLEL)
3487 ptr = chkp_get_value_with_offs (entry, GEN_INT (offs));
3488 else
3489 gcc_unreachable ();
3490 data->entry_parm = targetm.calls.load_bounds_for_arg (slot, ptr,
3491 data->entry_parm);
3492 }
3493
3494 /* Assign RTL expressions to the function's bounds parameters BNDARGS. */
3495
3496 static void
3497 assign_bounds (vec<bounds_parm_data> &bndargs,
3498 struct assign_parm_data_all &all)
3499 {
3500 unsigned i, pass, handled = 0;
3501 bounds_parm_data *pbdata;
3502
3503 if (!bndargs.exists ())
3504 return;
3505
3506 /* We make few passes to store input bounds. Firstly handle bounds
3507 passed in registers. After that we load bounds passed in special
3508 slots. Finally we load bounds from Bounds Table. */
3509 for (pass = 0; pass < 3; pass++)
3510 FOR_EACH_VEC_ELT (bndargs, i, pbdata)
3511 {
3512 /* Pass 0 => regs only. */
3513 if (pass == 0
3514 && (!pbdata->parm_data.entry_parm
3515 || GET_CODE (pbdata->parm_data.entry_parm) != REG))
3516 continue;
3517 /* Pass 1 => slots only. */
3518 else if (pass == 1
3519 && (!pbdata->parm_data.entry_parm
3520 || GET_CODE (pbdata->parm_data.entry_parm) == REG))
3521 continue;
3522 /* Pass 2 => BT only. */
3523 else if (pass == 2
3524 && pbdata->parm_data.entry_parm)
3525 continue;
3526
3527 if (!pbdata->parm_data.entry_parm
3528 || GET_CODE (pbdata->parm_data.entry_parm) != REG)
3529 assign_parm_load_bounds (&pbdata->parm_data, pbdata->ptr_parm,
3530 pbdata->ptr_entry, pbdata->bound_no);
3531
3532 set_decl_incoming_rtl (pbdata->bounds_parm,
3533 pbdata->parm_data.entry_parm, false);
3534
3535 if (assign_parm_setup_block_p (&pbdata->parm_data))
3536 assign_parm_setup_block (&all, pbdata->bounds_parm,
3537 &pbdata->parm_data);
3538 else if (pbdata->parm_data.passed_pointer
3539 || use_register_for_decl (pbdata->bounds_parm))
3540 assign_parm_setup_reg (&all, pbdata->bounds_parm,
3541 &pbdata->parm_data);
3542 else
3543 assign_parm_setup_stack (&all, pbdata->bounds_parm,
3544 &pbdata->parm_data);
3545
3546 /* Count handled bounds to make sure we miss nothing. */
3547 handled++;
3548 }
3549
3550 gcc_assert (handled == bndargs.length ());
3551
3552 bndargs.release ();
3553 }
3554
3555 /* Assign RTL expressions to the function's parameters. This may involve
3556 copying them into registers and using those registers as the DECL_RTL. */
3557
3558 static void
3559 assign_parms (tree fndecl)
3560 {
3561 struct assign_parm_data_all all;
3562 tree parm;
3563 vec<tree> fnargs;
3564 unsigned i, bound_no = 0;
3565 tree last_arg = NULL;
3566 rtx last_arg_entry = NULL;
3567 vec<bounds_parm_data> bndargs = vNULL;
3568 bounds_parm_data bdata;
3569
3570 crtl->args.internal_arg_pointer
3571 = targetm.calls.internal_arg_pointer ();
3572
3573 assign_parms_initialize_all (&all);
3574 fnargs = assign_parms_augmented_arg_list (&all);
3575
3576 FOR_EACH_VEC_ELT (fnargs, i, parm)
3577 {
3578 struct assign_parm_data_one data;
3579
3580 /* Extract the type of PARM; adjust it according to ABI. */
3581 assign_parm_find_data_types (&all, parm, &data);
3582
3583 /* Early out for errors and void parameters. */
3584 if (data.passed_mode == VOIDmode)
3585 {
3586 SET_DECL_RTL (parm, const0_rtx);
3587 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3588 continue;
3589 }
3590
3591 /* Estimate stack alignment from parameter alignment. */
3592 if (SUPPORTS_STACK_ALIGNMENT)
3593 {
3594 unsigned int align
3595 = targetm.calls.function_arg_boundary (data.promoted_mode,
3596 data.passed_type);
3597 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3598 align);
3599 if (TYPE_ALIGN (data.nominal_type) > align)
3600 align = MINIMUM_ALIGNMENT (data.nominal_type,
3601 TYPE_MODE (data.nominal_type),
3602 TYPE_ALIGN (data.nominal_type));
3603 if (crtl->stack_alignment_estimated < align)
3604 {
3605 gcc_assert (!crtl->stack_realign_processed);
3606 crtl->stack_alignment_estimated = align;
3607 }
3608 }
3609
3610 /* Find out where the parameter arrives in this function. */
3611 assign_parm_find_entry_rtl (&all, &data);
3612
3613 /* Find out where stack space for this parameter might be. */
3614 if (assign_parm_is_stack_parm (&all, &data))
3615 {
3616 assign_parm_find_stack_rtl (parm, &data);
3617 assign_parm_adjust_entry_rtl (&data);
3618 }
3619 if (!POINTER_BOUNDS_TYPE_P (data.passed_type))
3620 {
3621 /* Remember where last non bounds arg was passed in case
3622 we have to load associated bounds for it from Bounds
3623 Table. */
3624 last_arg = parm;
3625 last_arg_entry = data.entry_parm;
3626 bound_no = 0;
3627 }
3628 /* Record permanently how this parm was passed. */
3629 if (data.passed_pointer)
3630 {
3631 rtx incoming_rtl
3632 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
3633 data.entry_parm);
3634 set_decl_incoming_rtl (parm, incoming_rtl, true);
3635 }
3636 else
3637 set_decl_incoming_rtl (parm, data.entry_parm, false);
3638
3639 /* Boudns should be loaded in the particular order to
3640 have registers allocated correctly. Collect info about
3641 input bounds and load them later. */
3642 if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3643 {
3644 /* Expect bounds in instrumented functions only. */
3645 gcc_assert (chkp_function_instrumented_p (fndecl));
3646
3647 bdata.parm_data = data;
3648 bdata.bounds_parm = parm;
3649 bdata.ptr_parm = last_arg;
3650 bdata.ptr_entry = last_arg_entry;
3651 bdata.bound_no = bound_no;
3652 bndargs.safe_push (bdata);
3653 }
3654 else
3655 {
3656 assign_parm_adjust_stack_rtl (&data);
3657
3658 if (assign_parm_setup_block_p (&data))
3659 assign_parm_setup_block (&all, parm, &data);
3660 else if (data.passed_pointer || use_register_for_decl (parm))
3661 assign_parm_setup_reg (&all, parm, &data);
3662 else
3663 assign_parm_setup_stack (&all, parm, &data);
3664 }
3665
3666 if (cfun->stdarg && !DECL_CHAIN (parm))
3667 {
3668 int pretend_bytes = 0;
3669
3670 assign_parms_setup_varargs (&all, &data, false);
3671
3672 if (chkp_function_instrumented_p (fndecl))
3673 {
3674 /* We expect this is the last parm. Otherwise it is wrong
3675 to assign bounds right now. */
3676 gcc_assert (i == (fnargs.length () - 1));
3677 assign_bounds (bndargs, all);
3678 targetm.calls.setup_incoming_vararg_bounds (all.args_so_far,
3679 data.promoted_mode,
3680 data.passed_type,
3681 &pretend_bytes,
3682 false);
3683 }
3684 }
3685
3686 /* Update info on where next arg arrives in registers. */
3687 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3688 data.passed_type, data.named_arg);
3689
3690 if (POINTER_BOUNDS_TYPE_P (data.passed_type))
3691 bound_no++;
3692 }
3693
3694 assign_bounds (bndargs, all);
3695
3696 if (targetm.calls.split_complex_arg)
3697 assign_parms_unsplit_complex (&all, fnargs);
3698
3699 fnargs.release ();
3700
3701 /* Output all parameter conversion instructions (possibly including calls)
3702 now that all parameters have been copied out of hard registers. */
3703 emit_insn (all.first_conversion_insn);
3704
3705 /* Estimate reload stack alignment from scalar return mode. */
3706 if (SUPPORTS_STACK_ALIGNMENT)
3707 {
3708 if (DECL_RESULT (fndecl))
3709 {
3710 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3711 machine_mode mode = TYPE_MODE (type);
3712
3713 if (mode != BLKmode
3714 && mode != VOIDmode
3715 && !AGGREGATE_TYPE_P (type))
3716 {
3717 unsigned int align = GET_MODE_ALIGNMENT (mode);
3718 if (crtl->stack_alignment_estimated < align)
3719 {
3720 gcc_assert (!crtl->stack_realign_processed);
3721 crtl->stack_alignment_estimated = align;
3722 }
3723 }
3724 }
3725 }
3726
3727 /* If we are receiving a struct value address as the first argument, set up
3728 the RTL for the function result. As this might require code to convert
3729 the transmitted address to Pmode, we do this here to ensure that possible
3730 preliminary conversions of the address have been emitted already. */
3731 if (all.function_result_decl)
3732 {
3733 tree result = DECL_RESULT (current_function_decl);
3734 rtx addr = DECL_RTL (all.function_result_decl);
3735 rtx x;
3736
3737 if (DECL_BY_REFERENCE (result))
3738 {
3739 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3740 x = addr;
3741 }
3742 else
3743 {
3744 SET_DECL_VALUE_EXPR (result,
3745 build1 (INDIRECT_REF, TREE_TYPE (result),
3746 all.function_result_decl));
3747 addr = convert_memory_address (Pmode, addr);
3748 x = gen_rtx_MEM (DECL_MODE (result), addr);
3749 set_mem_attributes (x, result, 1);
3750 }
3751
3752 DECL_HAS_VALUE_EXPR_P (result) = 1;
3753
3754 SET_DECL_RTL (result, x);
3755 }
3756
3757 /* We have aligned all the args, so add space for the pretend args. */
3758 crtl->args.pretend_args_size = all.pretend_args_size;
3759 all.stack_args_size.constant += all.extra_pretend_bytes;
3760 crtl->args.size = all.stack_args_size.constant;
3761
3762 /* Adjust function incoming argument size for alignment and
3763 minimum length. */
3764
3765 crtl->args.size = MAX (crtl->args.size, all.reg_parm_stack_space);
3766 crtl->args.size = CEIL_ROUND (crtl->args.size,
3767 PARM_BOUNDARY / BITS_PER_UNIT);
3768
3769 if (ARGS_GROW_DOWNWARD)
3770 {
3771 crtl->args.arg_offset_rtx
3772 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3773 : expand_expr (size_diffop (all.stack_args_size.var,
3774 size_int (-all.stack_args_size.constant)),
3775 NULL_RTX, VOIDmode, EXPAND_NORMAL));
3776 }
3777 else
3778 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3779
3780 /* See how many bytes, if any, of its args a function should try to pop
3781 on return. */
3782
3783 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3784 TREE_TYPE (fndecl),
3785 crtl->args.size);
3786
3787 /* For stdarg.h function, save info about
3788 regs and stack space used by the named args. */
3789
3790 crtl->args.info = all.args_so_far_v;
3791
3792 /* Set the rtx used for the function return value. Put this in its
3793 own variable so any optimizers that need this information don't have
3794 to include tree.h. Do this here so it gets done when an inlined
3795 function gets output. */
3796
3797 crtl->return_rtx
3798 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3799 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3800
3801 /* If scalar return value was computed in a pseudo-reg, or was a named
3802 return value that got dumped to the stack, copy that to the hard
3803 return register. */
3804 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3805 {
3806 tree decl_result = DECL_RESULT (fndecl);
3807 rtx decl_rtl = DECL_RTL (decl_result);
3808
3809 if (REG_P (decl_rtl)
3810 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3811 : DECL_REGISTER (decl_result))
3812 {
3813 rtx real_decl_rtl;
3814
3815 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3816 fndecl, true);
3817 if (chkp_function_instrumented_p (fndecl))
3818 crtl->return_bnd
3819 = targetm.calls.chkp_function_value_bounds (TREE_TYPE (decl_result),
3820 fndecl, true);
3821 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3822 /* The delay slot scheduler assumes that crtl->return_rtx
3823 holds the hard register containing the return value, not a
3824 temporary pseudo. */
3825 crtl->return_rtx = real_decl_rtl;
3826 }
3827 }
3828 }
3829
3830 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3831 For all seen types, gimplify their sizes. */
3832
3833 static tree
3834 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3835 {
3836 tree t = *tp;
3837
3838 *walk_subtrees = 0;
3839 if (TYPE_P (t))
3840 {
3841 if (POINTER_TYPE_P (t))
3842 *walk_subtrees = 1;
3843 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3844 && !TYPE_SIZES_GIMPLIFIED (t))
3845 {
3846 gimplify_type_sizes (t, (gimple_seq *) data);
3847 *walk_subtrees = 1;
3848 }
3849 }
3850
3851 return NULL;
3852 }
3853
3854 /* Gimplify the parameter list for current_function_decl. This involves
3855 evaluating SAVE_EXPRs of variable sized parameters and generating code
3856 to implement callee-copies reference parameters. Returns a sequence of
3857 statements to add to the beginning of the function. */
3858
3859 gimple_seq
3860 gimplify_parameters (void)
3861 {
3862 struct assign_parm_data_all all;
3863 tree parm;
3864 gimple_seq stmts = NULL;
3865 vec<tree> fnargs;
3866 unsigned i;
3867
3868 assign_parms_initialize_all (&all);
3869 fnargs = assign_parms_augmented_arg_list (&all);
3870
3871 FOR_EACH_VEC_ELT (fnargs, i, parm)
3872 {
3873 struct assign_parm_data_one data;
3874
3875 /* Extract the type of PARM; adjust it according to ABI. */
3876 assign_parm_find_data_types (&all, parm, &data);
3877
3878 /* Early out for errors and void parameters. */
3879 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3880 continue;
3881
3882 /* Update info on where next arg arrives in registers. */
3883 targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
3884 data.passed_type, data.named_arg);
3885
3886 /* ??? Once upon a time variable_size stuffed parameter list
3887 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3888 turned out to be less than manageable in the gimple world.
3889 Now we have to hunt them down ourselves. */
3890 walk_tree_without_duplicates (&data.passed_type,
3891 gimplify_parm_type, &stmts);
3892
3893 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
3894 {
3895 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3896 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3897 }
3898
3899 if (data.passed_pointer)
3900 {
3901 tree type = TREE_TYPE (data.passed_type);
3902 if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
3903 type, data.named_arg))
3904 {
3905 tree local, t;
3906
3907 /* For constant-sized objects, this is trivial; for
3908 variable-sized objects, we have to play games. */
3909 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3910 && !(flag_stack_check == GENERIC_STACK_CHECK
3911 && compare_tree_int (DECL_SIZE_UNIT (parm),
3912 STACK_CHECK_MAX_VAR_SIZE) > 0))
3913 {
3914 local = create_tmp_var (type, get_name (parm));
3915 DECL_IGNORED_P (local) = 0;
3916 /* If PARM was addressable, move that flag over
3917 to the local copy, as its address will be taken,
3918 not the PARMs. Keep the parms address taken
3919 as we'll query that flag during gimplification. */
3920 if (TREE_ADDRESSABLE (parm))
3921 TREE_ADDRESSABLE (local) = 1;
3922 else if (TREE_CODE (type) == COMPLEX_TYPE
3923 || TREE_CODE (type) == VECTOR_TYPE)
3924 DECL_GIMPLE_REG_P (local) = 1;
3925 }
3926 else
3927 {
3928 tree ptr_type, addr;
3929
3930 ptr_type = build_pointer_type (type);
3931 addr = create_tmp_reg (ptr_type, get_name (parm));
3932 DECL_IGNORED_P (addr) = 0;
3933 local = build_fold_indirect_ref (addr);
3934
3935 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3936 t = build_call_expr (t, 2, DECL_SIZE_UNIT (parm),
3937 size_int (DECL_ALIGN (parm)));
3938
3939 /* The call has been built for a variable-sized object. */
3940 CALL_ALLOCA_FOR_VAR_P (t) = 1;
3941 t = fold_convert (ptr_type, t);
3942 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3943 gimplify_and_add (t, &stmts);
3944 }
3945
3946 gimplify_assign (local, parm, &stmts);
3947
3948 SET_DECL_VALUE_EXPR (parm, local);
3949 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3950 }
3951 }
3952 }
3953
3954 fnargs.release ();
3955
3956 return stmts;
3957 }
3958 \f
3959 /* Compute the size and offset from the start of the stacked arguments for a
3960 parm passed in mode PASSED_MODE and with type TYPE.
3961
3962 INITIAL_OFFSET_PTR points to the current offset into the stacked
3963 arguments.
3964
3965 The starting offset and size for this parm are returned in
3966 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3967 nonzero, the offset is that of stack slot, which is returned in
3968 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3969 padding required from the initial offset ptr to the stack slot.
3970
3971 IN_REGS is nonzero if the argument will be passed in registers. It will
3972 never be set if REG_PARM_STACK_SPACE is not defined.
3973
3974 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
3975 for arguments which are passed in registers.
3976
3977 FNDECL is the function in which the argument was defined.
3978
3979 There are two types of rounding that are done. The first, controlled by
3980 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3981 argument list to be aligned to the specific boundary (in bits). This
3982 rounding affects the initial and starting offsets, but not the argument
3983 size.
3984
3985 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3986 optionally rounds the size of the parm to PARM_BOUNDARY. The
3987 initial offset is not affected by this rounding, while the size always
3988 is and the starting offset may be. */
3989
3990 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3991 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3992 callers pass in the total size of args so far as
3993 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3994
3995 void
3996 locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
3997 int reg_parm_stack_space, int partial,
3998 tree fndecl ATTRIBUTE_UNUSED,
3999 struct args_size *initial_offset_ptr,
4000 struct locate_and_pad_arg_data *locate)
4001 {
4002 tree sizetree;
4003 enum direction where_pad;
4004 unsigned int boundary, round_boundary;
4005 int part_size_in_regs;
4006
4007 /* If we have found a stack parm before we reach the end of the
4008 area reserved for registers, skip that area. */
4009 if (! in_regs)
4010 {
4011 if (reg_parm_stack_space > 0)
4012 {
4013 if (initial_offset_ptr->var)
4014 {
4015 initial_offset_ptr->var
4016 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4017 ssize_int (reg_parm_stack_space));
4018 initial_offset_ptr->constant = 0;
4019 }
4020 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4021 initial_offset_ptr->constant = reg_parm_stack_space;
4022 }
4023 }
4024
4025 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
4026
4027 sizetree
4028 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4029 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4030 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
4031 round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
4032 type);
4033 locate->where_pad = where_pad;
4034
4035 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
4036 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4037 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4038
4039 locate->boundary = boundary;
4040
4041 if (SUPPORTS_STACK_ALIGNMENT)
4042 {
4043 /* stack_alignment_estimated can't change after stack has been
4044 realigned. */
4045 if (crtl->stack_alignment_estimated < boundary)
4046 {
4047 if (!crtl->stack_realign_processed)
4048 crtl->stack_alignment_estimated = boundary;
4049 else
4050 {
4051 /* If stack is realigned and stack alignment value
4052 hasn't been finalized, it is OK not to increase
4053 stack_alignment_estimated. The bigger alignment
4054 requirement is recorded in stack_alignment_needed
4055 below. */
4056 gcc_assert (!crtl->stack_realign_finalized
4057 && crtl->stack_realign_needed);
4058 }
4059 }
4060 }
4061
4062 /* Remember if the outgoing parameter requires extra alignment on the
4063 calling function side. */
4064 if (crtl->stack_alignment_needed < boundary)
4065 crtl->stack_alignment_needed = boundary;
4066 if (crtl->preferred_stack_boundary < boundary)
4067 crtl->preferred_stack_boundary = boundary;
4068
4069 if (ARGS_GROW_DOWNWARD)
4070 {
4071 locate->slot_offset.constant = -initial_offset_ptr->constant;
4072 if (initial_offset_ptr->var)
4073 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
4074 initial_offset_ptr->var);
4075
4076 {
4077 tree s2 = sizetree;
4078 if (where_pad != none
4079 && (!tree_fits_uhwi_p (sizetree)
4080 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4081 s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
4082 SUB_PARM_SIZE (locate->slot_offset, s2);
4083 }
4084
4085 locate->slot_offset.constant += part_size_in_regs;
4086
4087 if (!in_regs || reg_parm_stack_space > 0)
4088 pad_to_arg_alignment (&locate->slot_offset, boundary,
4089 &locate->alignment_pad);
4090
4091 locate->size.constant = (-initial_offset_ptr->constant
4092 - locate->slot_offset.constant);
4093 if (initial_offset_ptr->var)
4094 locate->size.var = size_binop (MINUS_EXPR,
4095 size_binop (MINUS_EXPR,
4096 ssize_int (0),
4097 initial_offset_ptr->var),
4098 locate->slot_offset.var);
4099
4100 /* Pad_below needs the pre-rounded size to know how much to pad
4101 below. */
4102 locate->offset = locate->slot_offset;
4103 if (where_pad == downward)
4104 pad_below (&locate->offset, passed_mode, sizetree);
4105
4106 }
4107 else
4108 {
4109 if (!in_regs || reg_parm_stack_space > 0)
4110 pad_to_arg_alignment (initial_offset_ptr, boundary,
4111 &locate->alignment_pad);
4112 locate->slot_offset = *initial_offset_ptr;
4113
4114 #ifdef PUSH_ROUNDING
4115 if (passed_mode != BLKmode)
4116 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4117 #endif
4118
4119 /* Pad_below needs the pre-rounded size to know how much to pad below
4120 so this must be done before rounding up. */
4121 locate->offset = locate->slot_offset;
4122 if (where_pad == downward)
4123 pad_below (&locate->offset, passed_mode, sizetree);
4124
4125 if (where_pad != none
4126 && (!tree_fits_uhwi_p (sizetree)
4127 || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
4128 sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
4129
4130 ADD_PARM_SIZE (locate->size, sizetree);
4131
4132 locate->size.constant -= part_size_in_regs;
4133 }
4134
4135 #ifdef FUNCTION_ARG_OFFSET
4136 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
4137 #endif
4138 }
4139
4140 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4141 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4142
4143 static void
4144 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
4145 struct args_size *alignment_pad)
4146 {
4147 tree save_var = NULL_TREE;
4148 HOST_WIDE_INT save_constant = 0;
4149 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4150 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
4151
4152 #ifdef SPARC_STACK_BOUNDARY_HACK
4153 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4154 the real alignment of %sp. However, when it does this, the
4155 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
4156 if (SPARC_STACK_BOUNDARY_HACK)
4157 sp_offset = 0;
4158 #endif
4159
4160 if (boundary > PARM_BOUNDARY)
4161 {
4162 save_var = offset_ptr->var;
4163 save_constant = offset_ptr->constant;
4164 }
4165
4166 alignment_pad->var = NULL_TREE;
4167 alignment_pad->constant = 0;
4168
4169 if (boundary > BITS_PER_UNIT)
4170 {
4171 if (offset_ptr->var)
4172 {
4173 tree sp_offset_tree = ssize_int (sp_offset);
4174 tree offset = size_binop (PLUS_EXPR,
4175 ARGS_SIZE_TREE (*offset_ptr),
4176 sp_offset_tree);
4177 tree rounded;
4178 if (ARGS_GROW_DOWNWARD)
4179 rounded = round_down (offset, boundary / BITS_PER_UNIT);
4180 else
4181 rounded = round_up (offset, boundary / BITS_PER_UNIT);
4182
4183 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
4184 /* ARGS_SIZE_TREE includes constant term. */
4185 offset_ptr->constant = 0;
4186 if (boundary > PARM_BOUNDARY)
4187 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
4188 save_var);
4189 }
4190 else
4191 {
4192 offset_ptr->constant = -sp_offset +
4193 (ARGS_GROW_DOWNWARD
4194 ? FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes)
4195 : CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes));
4196
4197 if (boundary > PARM_BOUNDARY)
4198 alignment_pad->constant = offset_ptr->constant - save_constant;
4199 }
4200 }
4201 }
4202
4203 static void
4204 pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
4205 {
4206 if (passed_mode != BLKmode)
4207 {
4208 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4209 offset_ptr->constant
4210 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4211 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4212 - GET_MODE_SIZE (passed_mode));
4213 }
4214 else
4215 {
4216 if (TREE_CODE (sizetree) != INTEGER_CST
4217 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4218 {
4219 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4220 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4221 /* Add it in. */
4222 ADD_PARM_SIZE (*offset_ptr, s2);
4223 SUB_PARM_SIZE (*offset_ptr, sizetree);
4224 }
4225 }
4226 }
4227 \f
4228
4229 /* True if register REGNO was alive at a place where `setjmp' was
4230 called and was set more than once or is an argument. Such regs may
4231 be clobbered by `longjmp'. */
4232
4233 static bool
4234 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
4235 {
4236 /* There appear to be cases where some local vars never reach the
4237 backend but have bogus regnos. */
4238 if (regno >= max_reg_num ())
4239 return false;
4240
4241 return ((REG_N_SETS (regno) > 1
4242 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
4243 regno))
4244 && REGNO_REG_SET_P (setjmp_crosses, regno));
4245 }
4246
4247 /* Walk the tree of blocks describing the binding levels within a
4248 function and warn about variables the might be killed by setjmp or
4249 vfork. This is done after calling flow_analysis before register
4250 allocation since that will clobber the pseudo-regs to hard
4251 regs. */
4252
4253 static void
4254 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
4255 {
4256 tree decl, sub;
4257
4258 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
4259 {
4260 if (TREE_CODE (decl) == VAR_DECL
4261 && DECL_RTL_SET_P (decl)
4262 && REG_P (DECL_RTL (decl))
4263 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4264 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
4265 " %<longjmp%> or %<vfork%>", decl);
4266 }
4267
4268 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
4269 setjmp_vars_warning (setjmp_crosses, sub);
4270 }
4271
4272 /* Do the appropriate part of setjmp_vars_warning
4273 but for arguments instead of local variables. */
4274
4275 static void
4276 setjmp_args_warning (bitmap setjmp_crosses)
4277 {
4278 tree decl;
4279 for (decl = DECL_ARGUMENTS (current_function_decl);
4280 decl; decl = DECL_CHAIN (decl))
4281 if (DECL_RTL (decl) != 0
4282 && REG_P (DECL_RTL (decl))
4283 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4284 warning (OPT_Wclobbered,
4285 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4286 decl);
4287 }
4288
4289 /* Generate warning messages for variables live across setjmp. */
4290
4291 void
4292 generate_setjmp_warnings (void)
4293 {
4294 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4295
4296 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
4297 || bitmap_empty_p (setjmp_crosses))
4298 return;
4299
4300 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4301 setjmp_args_warning (setjmp_crosses);
4302 }
4303
4304 \f
4305 /* Reverse the order of elements in the fragment chain T of blocks,
4306 and return the new head of the chain (old last element).
4307 In addition to that clear BLOCK_SAME_RANGE flags when needed
4308 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4309 its super fragment origin. */
4310
4311 static tree
4312 block_fragments_nreverse (tree t)
4313 {
4314 tree prev = 0, block, next, prev_super = 0;
4315 tree super = BLOCK_SUPERCONTEXT (t);
4316 if (BLOCK_FRAGMENT_ORIGIN (super))
4317 super = BLOCK_FRAGMENT_ORIGIN (super);
4318 for (block = t; block; block = next)
4319 {
4320 next = BLOCK_FRAGMENT_CHAIN (block);
4321 BLOCK_FRAGMENT_CHAIN (block) = prev;
4322 if ((prev && !BLOCK_SAME_RANGE (prev))
4323 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
4324 != prev_super))
4325 BLOCK_SAME_RANGE (block) = 0;
4326 prev_super = BLOCK_SUPERCONTEXT (block);
4327 BLOCK_SUPERCONTEXT (block) = super;
4328 prev = block;
4329 }
4330 t = BLOCK_FRAGMENT_ORIGIN (t);
4331 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
4332 != prev_super)
4333 BLOCK_SAME_RANGE (t) = 0;
4334 BLOCK_SUPERCONTEXT (t) = super;
4335 return prev;
4336 }
4337
4338 /* Reverse the order of elements in the chain T of blocks,
4339 and return the new head of the chain (old last element).
4340 Also do the same on subblocks and reverse the order of elements
4341 in BLOCK_FRAGMENT_CHAIN as well. */
4342
4343 static tree
4344 blocks_nreverse_all (tree t)
4345 {
4346 tree prev = 0, block, next;
4347 for (block = t; block; block = next)
4348 {
4349 next = BLOCK_CHAIN (block);
4350 BLOCK_CHAIN (block) = prev;
4351 if (BLOCK_FRAGMENT_CHAIN (block)
4352 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4353 {
4354 BLOCK_FRAGMENT_CHAIN (block)
4355 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4356 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
4357 BLOCK_SAME_RANGE (block) = 0;
4358 }
4359 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4360 prev = block;
4361 }
4362 return prev;
4363 }
4364
4365
4366 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4367 and create duplicate blocks. */
4368 /* ??? Need an option to either create block fragments or to create
4369 abstract origin duplicates of a source block. It really depends
4370 on what optimization has been performed. */
4371
4372 void
4373 reorder_blocks (void)
4374 {
4375 tree block = DECL_INITIAL (current_function_decl);
4376
4377 if (block == NULL_TREE)
4378 return;
4379
4380 auto_vec<tree, 10> block_stack;
4381
4382 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4383 clear_block_marks (block);
4384
4385 /* Prune the old trees away, so that they don't get in the way. */
4386 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4387 BLOCK_CHAIN (block) = NULL_TREE;
4388
4389 /* Recreate the block tree from the note nesting. */
4390 reorder_blocks_1 (get_insns (), block, &block_stack);
4391 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4392 }
4393
4394 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4395
4396 void
4397 clear_block_marks (tree block)
4398 {
4399 while (block)
4400 {
4401 TREE_ASM_WRITTEN (block) = 0;
4402 clear_block_marks (BLOCK_SUBBLOCKS (block));
4403 block = BLOCK_CHAIN (block);
4404 }
4405 }
4406
4407 static void
4408 reorder_blocks_1 (rtx_insn *insns, tree current_block,
4409 vec<tree> *p_block_stack)
4410 {
4411 rtx_insn *insn;
4412 tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
4413
4414 for (insn = insns; insn; insn = NEXT_INSN (insn))
4415 {
4416 if (NOTE_P (insn))
4417 {
4418 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4419 {
4420 tree block = NOTE_BLOCK (insn);
4421 tree origin;
4422
4423 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4424 origin = block;
4425
4426 if (prev_end)
4427 BLOCK_SAME_RANGE (prev_end) = 0;
4428 prev_end = NULL_TREE;
4429
4430 /* If we have seen this block before, that means it now
4431 spans multiple address regions. Create a new fragment. */
4432 if (TREE_ASM_WRITTEN (block))
4433 {
4434 tree new_block = copy_node (block);
4435
4436 BLOCK_SAME_RANGE (new_block) = 0;
4437 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4438 BLOCK_FRAGMENT_CHAIN (new_block)
4439 = BLOCK_FRAGMENT_CHAIN (origin);
4440 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4441
4442 NOTE_BLOCK (insn) = new_block;
4443 block = new_block;
4444 }
4445
4446 if (prev_beg == current_block && prev_beg)
4447 BLOCK_SAME_RANGE (block) = 1;
4448
4449 prev_beg = origin;
4450
4451 BLOCK_SUBBLOCKS (block) = 0;
4452 TREE_ASM_WRITTEN (block) = 1;
4453 /* When there's only one block for the entire function,
4454 current_block == block and we mustn't do this, it
4455 will cause infinite recursion. */
4456 if (block != current_block)
4457 {
4458 tree super;
4459 if (block != origin)
4460 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
4461 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4462 (origin))
4463 == current_block);
4464 if (p_block_stack->is_empty ())
4465 super = current_block;
4466 else
4467 {
4468 super = p_block_stack->last ();
4469 gcc_assert (super == current_block
4470 || BLOCK_FRAGMENT_ORIGIN (super)
4471 == current_block);
4472 }
4473 BLOCK_SUPERCONTEXT (block) = super;
4474 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4475 BLOCK_SUBBLOCKS (current_block) = block;
4476 current_block = origin;
4477 }
4478 p_block_stack->safe_push (block);
4479 }
4480 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4481 {
4482 NOTE_BLOCK (insn) = p_block_stack->pop ();
4483 current_block = BLOCK_SUPERCONTEXT (current_block);
4484 if (BLOCK_FRAGMENT_ORIGIN (current_block))
4485 current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
4486 prev_beg = NULL_TREE;
4487 prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
4488 ? NOTE_BLOCK (insn) : NULL_TREE;
4489 }
4490 }
4491 else
4492 {
4493 prev_beg = NULL_TREE;
4494 if (prev_end)
4495 BLOCK_SAME_RANGE (prev_end) = 0;
4496 prev_end = NULL_TREE;
4497 }
4498 }
4499 }
4500
4501 /* Reverse the order of elements in the chain T of blocks,
4502 and return the new head of the chain (old last element). */
4503
4504 tree
4505 blocks_nreverse (tree t)
4506 {
4507 tree prev = 0, block, next;
4508 for (block = t; block; block = next)
4509 {
4510 next = BLOCK_CHAIN (block);
4511 BLOCK_CHAIN (block) = prev;
4512 prev = block;
4513 }
4514 return prev;
4515 }
4516
4517 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4518 by modifying the last node in chain 1 to point to chain 2. */
4519
4520 tree
4521 block_chainon (tree op1, tree op2)
4522 {
4523 tree t1;
4524
4525 if (!op1)
4526 return op2;
4527 if (!op2)
4528 return op1;
4529
4530 for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
4531 continue;
4532 BLOCK_CHAIN (t1) = op2;
4533
4534 #ifdef ENABLE_TREE_CHECKING
4535 {
4536 tree t2;
4537 for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
4538 gcc_assert (t2 != t1);
4539 }
4540 #endif
4541
4542 return op1;
4543 }
4544
4545 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4546 non-NULL, list them all into VECTOR, in a depth-first preorder
4547 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4548 blocks. */
4549
4550 static int
4551 all_blocks (tree block, tree *vector)
4552 {
4553 int n_blocks = 0;
4554
4555 while (block)
4556 {
4557 TREE_ASM_WRITTEN (block) = 0;
4558
4559 /* Record this block. */
4560 if (vector)
4561 vector[n_blocks] = block;
4562
4563 ++n_blocks;
4564
4565 /* Record the subblocks, and their subblocks... */
4566 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4567 vector ? vector + n_blocks : 0);
4568 block = BLOCK_CHAIN (block);
4569 }
4570
4571 return n_blocks;
4572 }
4573
4574 /* Return a vector containing all the blocks rooted at BLOCK. The
4575 number of elements in the vector is stored in N_BLOCKS_P. The
4576 vector is dynamically allocated; it is the caller's responsibility
4577 to call `free' on the pointer returned. */
4578
4579 static tree *
4580 get_block_vector (tree block, int *n_blocks_p)
4581 {
4582 tree *block_vector;
4583
4584 *n_blocks_p = all_blocks (block, NULL);
4585 block_vector = XNEWVEC (tree, *n_blocks_p);
4586 all_blocks (block, block_vector);
4587
4588 return block_vector;
4589 }
4590
4591 static GTY(()) int next_block_index = 2;
4592
4593 /* Set BLOCK_NUMBER for all the blocks in FN. */
4594
4595 void
4596 number_blocks (tree fn)
4597 {
4598 int i;
4599 int n_blocks;
4600 tree *block_vector;
4601
4602 /* For SDB and XCOFF debugging output, we start numbering the blocks
4603 from 1 within each function, rather than keeping a running
4604 count. */
4605 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
4606 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
4607 next_block_index = 1;
4608 #endif
4609
4610 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4611
4612 /* The top-level BLOCK isn't numbered at all. */
4613 for (i = 1; i < n_blocks; ++i)
4614 /* We number the blocks from two. */
4615 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4616
4617 free (block_vector);
4618
4619 return;
4620 }
4621
4622 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4623
4624 DEBUG_FUNCTION tree
4625 debug_find_var_in_block_tree (tree var, tree block)
4626 {
4627 tree t;
4628
4629 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4630 if (t == var)
4631 return block;
4632
4633 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4634 {
4635 tree ret = debug_find_var_in_block_tree (var, t);
4636 if (ret)
4637 return ret;
4638 }
4639
4640 return NULL_TREE;
4641 }
4642 \f
4643 /* Keep track of whether we're in a dummy function context. If we are,
4644 we don't want to invoke the set_current_function hook, because we'll
4645 get into trouble if the hook calls target_reinit () recursively or
4646 when the initial initialization is not yet complete. */
4647
4648 static bool in_dummy_function;
4649
4650 /* Invoke the target hook when setting cfun. Update the optimization options
4651 if the function uses different options than the default. */
4652
4653 static void
4654 invoke_set_current_function_hook (tree fndecl)
4655 {
4656 if (!in_dummy_function)
4657 {
4658 tree opts = ((fndecl)
4659 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4660 : optimization_default_node);
4661
4662 if (!opts)
4663 opts = optimization_default_node;
4664
4665 /* Change optimization options if needed. */
4666 if (optimization_current_node != opts)
4667 {
4668 optimization_current_node = opts;
4669 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
4670 }
4671
4672 targetm.set_current_function (fndecl);
4673 this_fn_optabs = this_target_optabs;
4674
4675 if (opts != optimization_default_node)
4676 {
4677 init_tree_optimization_optabs (opts);
4678 if (TREE_OPTIMIZATION_OPTABS (opts))
4679 this_fn_optabs = (struct target_optabs *)
4680 TREE_OPTIMIZATION_OPTABS (opts);
4681 }
4682 }
4683 }
4684
4685 /* cfun should never be set directly; use this function. */
4686
4687 void
4688 set_cfun (struct function *new_cfun)
4689 {
4690 if (cfun != new_cfun)
4691 {
4692 cfun = new_cfun;
4693 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4694 }
4695 }
4696
4697 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4698
4699 static vec<function_p> cfun_stack;
4700
4701 /* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4702 current_function_decl accordingly. */
4703
4704 void
4705 push_cfun (struct function *new_cfun)
4706 {
4707 gcc_assert ((!cfun && !current_function_decl)
4708 || (cfun && current_function_decl == cfun->decl));
4709 cfun_stack.safe_push (cfun);
4710 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4711 set_cfun (new_cfun);
4712 }
4713
4714 /* Pop cfun from the stack. Also set current_function_decl accordingly. */
4715
4716 void
4717 pop_cfun (void)
4718 {
4719 struct function *new_cfun = cfun_stack.pop ();
4720 /* When in_dummy_function, we do have a cfun but current_function_decl is
4721 NULL. We also allow pushing NULL cfun and subsequently changing
4722 current_function_decl to something else and have both restored by
4723 pop_cfun. */
4724 gcc_checking_assert (in_dummy_function
4725 || !cfun
4726 || current_function_decl == cfun->decl);
4727 set_cfun (new_cfun);
4728 current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
4729 }
4730
4731 /* Return value of funcdef and increase it. */
4732 int
4733 get_next_funcdef_no (void)
4734 {
4735 return funcdef_no++;
4736 }
4737
4738 /* Return value of funcdef. */
4739 int
4740 get_last_funcdef_no (void)
4741 {
4742 return funcdef_no;
4743 }
4744
4745 /* Allocate a function structure for FNDECL and set its contents
4746 to the defaults. Set cfun to the newly-allocated object.
4747 Some of the helper functions invoked during initialization assume
4748 that cfun has already been set. Therefore, assign the new object
4749 directly into cfun and invoke the back end hook explicitly at the
4750 very end, rather than initializing a temporary and calling set_cfun
4751 on it.
4752
4753 ABSTRACT_P is true if this is a function that will never be seen by
4754 the middle-end. Such functions are front-end concepts (like C++
4755 function templates) that do not correspond directly to functions
4756 placed in object files. */
4757
4758 void
4759 allocate_struct_function (tree fndecl, bool abstract_p)
4760 {
4761 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4762
4763 cfun = ggc_cleared_alloc<function> ();
4764
4765 init_eh_for_function ();
4766
4767 if (init_machine_status)
4768 cfun->machine = (*init_machine_status) ();
4769
4770 #ifdef OVERRIDE_ABI_FORMAT
4771 OVERRIDE_ABI_FORMAT (fndecl);
4772 #endif
4773
4774 if (fndecl != NULL_TREE)
4775 {
4776 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4777 cfun->decl = fndecl;
4778 current_function_funcdef_no = get_next_funcdef_no ();
4779 }
4780
4781 invoke_set_current_function_hook (fndecl);
4782
4783 if (fndecl != NULL_TREE)
4784 {
4785 tree result = DECL_RESULT (fndecl);
4786 if (!abstract_p && aggregate_value_p (result, fndecl))
4787 {
4788 #ifdef PCC_STATIC_STRUCT_RETURN
4789 cfun->returns_pcc_struct = 1;
4790 #endif
4791 cfun->returns_struct = 1;
4792 }
4793
4794 cfun->stdarg = stdarg_p (fntype);
4795
4796 /* Assume all registers in stdarg functions need to be saved. */
4797 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4798 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4799
4800 /* ??? This could be set on a per-function basis by the front-end
4801 but is this worth the hassle? */
4802 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4803 cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
4804
4805 if (!profile_flag && !flag_instrument_function_entry_exit)
4806 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
4807 }
4808 }
4809
4810 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4811 instead of just setting it. */
4812
4813 void
4814 push_struct_function (tree fndecl)
4815 {
4816 /* When in_dummy_function we might be in the middle of a pop_cfun and
4817 current_function_decl and cfun may not match. */
4818 gcc_assert (in_dummy_function
4819 || (!cfun && !current_function_decl)
4820 || (cfun && current_function_decl == cfun->decl));
4821 cfun_stack.safe_push (cfun);
4822 current_function_decl = fndecl;
4823 allocate_struct_function (fndecl, false);
4824 }
4825
4826 /* Reset crtl and other non-struct-function variables to defaults as
4827 appropriate for emitting rtl at the start of a function. */
4828
4829 static void
4830 prepare_function_start (void)
4831 {
4832 gcc_assert (!get_last_insn ());
4833 init_temp_slots ();
4834 init_emit ();
4835 init_varasm_status ();
4836 init_expr ();
4837 default_rtl_profile ();
4838
4839 if (flag_stack_usage_info)
4840 {
4841 cfun->su = ggc_cleared_alloc<stack_usage> ();
4842 cfun->su->static_stack_size = -1;
4843 }
4844
4845 cse_not_expected = ! optimize;
4846
4847 /* Caller save not needed yet. */
4848 caller_save_needed = 0;
4849
4850 /* We haven't done register allocation yet. */
4851 reg_renumber = 0;
4852
4853 /* Indicate that we have not instantiated virtual registers yet. */
4854 virtuals_instantiated = 0;
4855
4856 /* Indicate that we want CONCATs now. */
4857 generating_concat_p = 1;
4858
4859 /* Indicate we have no need of a frame pointer yet. */
4860 frame_pointer_needed = 0;
4861 }
4862
4863 void
4864 push_dummy_function (bool with_decl)
4865 {
4866 tree fn_decl, fn_type, fn_result_decl;
4867
4868 gcc_assert (!in_dummy_function);
4869 in_dummy_function = true;
4870
4871 if (with_decl)
4872 {
4873 fn_type = build_function_type_list (void_type_node, NULL_TREE);
4874 fn_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
4875 fn_type);
4876 fn_result_decl = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
4877 NULL_TREE, void_type_node);
4878 DECL_RESULT (fn_decl) = fn_result_decl;
4879 }
4880 else
4881 fn_decl = NULL_TREE;
4882
4883 push_struct_function (fn_decl);
4884 }
4885
4886 /* Initialize the rtl expansion mechanism so that we can do simple things
4887 like generate sequences. This is used to provide a context during global
4888 initialization of some passes. You must call expand_dummy_function_end
4889 to exit this context. */
4890
4891 void
4892 init_dummy_function_start (void)
4893 {
4894 push_dummy_function (false);
4895 prepare_function_start ();
4896 }
4897
4898 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4899 and initialize static variables for generating RTL for the statements
4900 of the function. */
4901
4902 void
4903 init_function_start (tree subr)
4904 {
4905 if (subr && DECL_STRUCT_FUNCTION (subr))
4906 set_cfun (DECL_STRUCT_FUNCTION (subr));
4907 else
4908 allocate_struct_function (subr, false);
4909
4910 /* Initialize backend, if needed. */
4911 initialize_rtl ();
4912
4913 prepare_function_start ();
4914 decide_function_section (subr);
4915
4916 /* Warn if this value is an aggregate type,
4917 regardless of which calling convention we are using for it. */
4918 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4919 warning (OPT_Waggregate_return, "function returns an aggregate");
4920 }
4921
4922 /* Expand code to verify the stack_protect_guard. This is invoked at
4923 the end of a function to be protected. */
4924
4925 #ifndef HAVE_stack_protect_test
4926 # define HAVE_stack_protect_test 0
4927 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4928 #endif
4929
4930 void
4931 stack_protect_epilogue (void)
4932 {
4933 tree guard_decl = targetm.stack_protect_guard ();
4934 rtx_code_label *label = gen_label_rtx ();
4935 rtx x, y, tmp;
4936
4937 x = expand_normal (crtl->stack_protect_guard);
4938 y = expand_normal (guard_decl);
4939
4940 /* Allow the target to compare Y with X without leaking either into
4941 a register. */
4942 switch ((int) (HAVE_stack_protect_test != 0))
4943 {
4944 case 1:
4945 tmp = gen_stack_protect_test (x, y, label);
4946 if (tmp)
4947 {
4948 emit_insn (tmp);
4949 break;
4950 }
4951 /* FALLTHRU */
4952
4953 default:
4954 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4955 break;
4956 }
4957
4958 /* The noreturn predictor has been moved to the tree level. The rtl-level
4959 predictors estimate this branch about 20%, which isn't enough to get
4960 things moved out of line. Since this is the only extant case of adding
4961 a noreturn function at the rtl level, it doesn't seem worth doing ought
4962 except adding the prediction by hand. */
4963 tmp = get_last_insn ();
4964 if (JUMP_P (tmp))
4965 predict_insn_def (as_a <rtx_insn *> (tmp), PRED_NORETURN, TAKEN);
4966
4967 expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
4968 free_temp_slots ();
4969 emit_label (label);
4970 }
4971 \f
4972 /* Start the RTL for a new function, and set variables used for
4973 emitting RTL.
4974 SUBR is the FUNCTION_DECL node.
4975 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4976 the function's parameters, which must be run at any return statement. */
4977
4978 void
4979 expand_function_start (tree subr)
4980 {
4981 /* Make sure volatile mem refs aren't considered
4982 valid operands of arithmetic insns. */
4983 init_recog_no_volatile ();
4984
4985 crtl->profile
4986 = (profile_flag
4987 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4988
4989 crtl->limit_stack
4990 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4991
4992 /* Make the label for return statements to jump to. Do not special
4993 case machines with special return instructions -- they will be
4994 handled later during jump, ifcvt, or epilogue creation. */
4995 return_label = gen_label_rtx ();
4996
4997 /* Initialize rtx used to return the value. */
4998 /* Do this before assign_parms so that we copy the struct value address
4999 before any library calls that assign parms might generate. */
5000
5001 /* Decide whether to return the value in memory or in a register. */
5002 if (aggregate_value_p (DECL_RESULT (subr), subr))
5003 {
5004 /* Returning something that won't go in a register. */
5005 rtx value_address = 0;
5006
5007 #ifdef PCC_STATIC_STRUCT_RETURN
5008 if (cfun->returns_pcc_struct)
5009 {
5010 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5011 value_address = assemble_static_space (size);
5012 }
5013 else
5014 #endif
5015 {
5016 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
5017 /* Expect to be passed the address of a place to store the value.
5018 If it is passed as an argument, assign_parms will take care of
5019 it. */
5020 if (sv)
5021 {
5022 value_address = gen_reg_rtx (Pmode);
5023 emit_move_insn (value_address, sv);
5024 }
5025 }
5026 if (value_address)
5027 {
5028 rtx x = value_address;
5029 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
5030 {
5031 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
5032 set_mem_attributes (x, DECL_RESULT (subr), 1);
5033 }
5034 SET_DECL_RTL (DECL_RESULT (subr), x);
5035 }
5036 }
5037 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5038 /* If return mode is void, this decl rtl should not be used. */
5039 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
5040 else
5041 {
5042 /* Compute the return values into a pseudo reg, which we will copy
5043 into the true return register after the cleanups are done. */
5044 tree return_type = TREE_TYPE (DECL_RESULT (subr));
5045 if (TYPE_MODE (return_type) != BLKmode
5046 && targetm.calls.return_in_msb (return_type))
5047 /* expand_function_end will insert the appropriate padding in
5048 this case. Use the return value's natural (unpadded) mode
5049 within the function proper. */
5050 SET_DECL_RTL (DECL_RESULT (subr),
5051 gen_reg_rtx (TYPE_MODE (return_type)));
5052 else
5053 {
5054 /* In order to figure out what mode to use for the pseudo, we
5055 figure out what the mode of the eventual return register will
5056 actually be, and use that. */
5057 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
5058
5059 /* Structures that are returned in registers are not
5060 aggregate_value_p, so we may see a PARALLEL or a REG. */
5061 if (REG_P (hard_reg))
5062 SET_DECL_RTL (DECL_RESULT (subr),
5063 gen_reg_rtx (GET_MODE (hard_reg)));
5064 else
5065 {
5066 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
5067 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
5068 }
5069 }
5070
5071 /* Set DECL_REGISTER flag so that expand_function_end will copy the
5072 result to the real return register(s). */
5073 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5074
5075 if (chkp_function_instrumented_p (current_function_decl))
5076 {
5077 tree return_type = TREE_TYPE (DECL_RESULT (subr));
5078 rtx bounds = targetm.calls.chkp_function_value_bounds (return_type,
5079 subr, 1);
5080 SET_DECL_BOUNDS_RTL (DECL_RESULT (subr), bounds);
5081 }
5082 }
5083
5084 /* Initialize rtx for parameters and local variables.
5085 In some cases this requires emitting insns. */
5086 assign_parms (subr);
5087
5088 /* If function gets a static chain arg, store it. */
5089 if (cfun->static_chain_decl)
5090 {
5091 tree parm = cfun->static_chain_decl;
5092 rtx local, chain;
5093 rtx_insn *insn;
5094
5095 local = gen_reg_rtx (Pmode);
5096 chain = targetm.calls.static_chain (current_function_decl, true);
5097
5098 set_decl_incoming_rtl (parm, chain, false);
5099 SET_DECL_RTL (parm, local);
5100 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
5101
5102 insn = emit_move_insn (local, chain);
5103
5104 /* Mark the register as eliminable, similar to parameters. */
5105 if (MEM_P (chain)
5106 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
5107 set_dst_reg_note (insn, REG_EQUIV, chain, local);
5108
5109 /* If we aren't optimizing, save the static chain onto the stack. */
5110 if (!optimize)
5111 {
5112 tree saved_static_chain_decl
5113 = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
5114 DECL_NAME (parm), TREE_TYPE (parm));
5115 rtx saved_static_chain_rtx
5116 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5117 SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
5118 emit_move_insn (saved_static_chain_rtx, chain);
5119 SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
5120 DECL_HAS_VALUE_EXPR_P (parm) = 1;
5121 }
5122 }
5123
5124 /* If the function receives a non-local goto, then store the
5125 bits we need to restore the frame pointer. */
5126 if (cfun->nonlocal_goto_save_area)
5127 {
5128 tree t_save;
5129 rtx r_save;
5130
5131 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
5132 gcc_assert (DECL_RTL_SET_P (var));
5133
5134 t_save = build4 (ARRAY_REF,
5135 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
5136 cfun->nonlocal_goto_save_area,
5137 integer_zero_node, NULL_TREE, NULL_TREE);
5138 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
5139 gcc_assert (GET_MODE (r_save) == Pmode);
5140
5141 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
5142 update_nonlocal_goto_save_area ();
5143 }
5144
5145 /* The following was moved from init_function_start.
5146 The move is supposed to make sdb output more accurate. */
5147 /* Indicate the beginning of the function body,
5148 as opposed to parm setup. */
5149 emit_note (NOTE_INSN_FUNCTION_BEG);
5150
5151 gcc_assert (NOTE_P (get_last_insn ()));
5152
5153 parm_birth_insn = get_last_insn ();
5154
5155 if (crtl->profile)
5156 {
5157 #ifdef PROFILE_HOOK
5158 PROFILE_HOOK (current_function_funcdef_no);
5159 #endif
5160 }
5161
5162 /* If we are doing generic stack checking, the probe should go here. */
5163 if (flag_stack_check == GENERIC_STACK_CHECK)
5164 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
5165 }
5166 \f
5167 void
5168 pop_dummy_function (void)
5169 {
5170 pop_cfun ();
5171 in_dummy_function = false;
5172 }
5173
5174 /* Undo the effects of init_dummy_function_start. */
5175 void
5176 expand_dummy_function_end (void)
5177 {
5178 gcc_assert (in_dummy_function);
5179
5180 /* End any sequences that failed to be closed due to syntax errors. */
5181 while (in_sequence_p ())
5182 end_sequence ();
5183
5184 /* Outside function body, can't compute type's actual size
5185 until next function's body starts. */
5186
5187 free_after_parsing (cfun);
5188 free_after_compilation (cfun);
5189 pop_dummy_function ();
5190 }
5191
5192 /* Helper for diddle_return_value. */
5193
5194 void
5195 diddle_return_value_1 (void (*doit) (rtx, void *), void *arg, rtx outgoing)
5196 {
5197 if (! outgoing)
5198 return;
5199
5200 if (REG_P (outgoing))
5201 (*doit) (outgoing, arg);
5202 else if (GET_CODE (outgoing) == PARALLEL)
5203 {
5204 int i;
5205
5206 for (i = 0; i < XVECLEN (outgoing, 0); i++)
5207 {
5208 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
5209
5210 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5211 (*doit) (x, arg);
5212 }
5213 }
5214 }
5215
5216 /* Call DOIT for each hard register used as a return value from
5217 the current function. */
5218
5219 void
5220 diddle_return_value (void (*doit) (rtx, void *), void *arg)
5221 {
5222 diddle_return_value_1 (doit, arg, crtl->return_bnd);
5223 diddle_return_value_1 (doit, arg, crtl->return_rtx);
5224 }
5225
5226 static void
5227 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5228 {
5229 emit_clobber (reg);
5230 }
5231
5232 void
5233 clobber_return_register (void)
5234 {
5235 diddle_return_value (do_clobber_return_reg, NULL);
5236
5237 /* In case we do use pseudo to return value, clobber it too. */
5238 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5239 {
5240 tree decl_result = DECL_RESULT (current_function_decl);
5241 rtx decl_rtl = DECL_RTL (decl_result);
5242 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
5243 {
5244 do_clobber_return_reg (decl_rtl, NULL);
5245 }
5246 }
5247 }
5248
5249 static void
5250 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
5251 {
5252 emit_use (reg);
5253 }
5254
5255 static void
5256 use_return_register (void)
5257 {
5258 diddle_return_value (do_use_return_reg, NULL);
5259 }
5260
5261 /* Possibly warn about unused parameters. */
5262 void
5263 do_warn_unused_parameter (tree fn)
5264 {
5265 tree decl;
5266
5267 for (decl = DECL_ARGUMENTS (fn);
5268 decl; decl = DECL_CHAIN (decl))
5269 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
5270 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
5271 && !TREE_NO_WARNING (decl))
5272 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
5273 }
5274
5275 /* Set the location of the insn chain starting at INSN to LOC. */
5276
5277 static void
5278 set_insn_locations (rtx_insn *insn, int loc)
5279 {
5280 while (insn != NULL)
5281 {
5282 if (INSN_P (insn))
5283 INSN_LOCATION (insn) = loc;
5284 insn = NEXT_INSN (insn);
5285 }
5286 }
5287
5288 /* Generate RTL for the end of the current function. */
5289
5290 void
5291 expand_function_end (void)
5292 {
5293 rtx clobber_after;
5294
5295 /* If arg_pointer_save_area was referenced only from a nested
5296 function, we will not have initialized it yet. Do that now. */
5297 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
5298 get_arg_pointer_save_area ();
5299
5300 /* If we are doing generic stack checking and this function makes calls,
5301 do a stack probe at the start of the function to ensure we have enough
5302 space for another stack frame. */
5303 if (flag_stack_check == GENERIC_STACK_CHECK)
5304 {
5305 rtx_insn *insn, *seq;
5306
5307 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5308 if (CALL_P (insn))
5309 {
5310 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
5311 start_sequence ();
5312 if (STACK_CHECK_MOVING_SP)
5313 anti_adjust_stack_and_probe (max_frame_size, true);
5314 else
5315 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
5316 seq = get_insns ();
5317 end_sequence ();
5318 set_insn_locations (seq, prologue_location);
5319 emit_insn_before (seq, stack_check_probe_note);
5320 break;
5321 }
5322 }
5323
5324 /* End any sequences that failed to be closed due to syntax errors. */
5325 while (in_sequence_p ())
5326 end_sequence ();
5327
5328 clear_pending_stack_adjust ();
5329 do_pending_stack_adjust ();
5330
5331 /* Output a linenumber for the end of the function.
5332 SDB depends on this. */
5333 set_curr_insn_location (input_location);
5334
5335 /* Before the return label (if any), clobber the return
5336 registers so that they are not propagated live to the rest of
5337 the function. This can only happen with functions that drop
5338 through; if there had been a return statement, there would
5339 have either been a return rtx, or a jump to the return label.
5340
5341 We delay actual code generation after the current_function_value_rtx
5342 is computed. */
5343 clobber_after = get_last_insn ();
5344
5345 /* Output the label for the actual return from the function. */
5346 emit_label (return_label);
5347
5348 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
5349 {
5350 /* Let except.c know where it should emit the call to unregister
5351 the function context for sjlj exceptions. */
5352 if (flag_exceptions)
5353 sjlj_emit_function_exit_after (get_last_insn ());
5354 }
5355 else
5356 {
5357 /* We want to ensure that instructions that may trap are not
5358 moved into the epilogue by scheduling, because we don't
5359 always emit unwind information for the epilogue. */
5360 if (cfun->can_throw_non_call_exceptions)
5361 emit_insn (gen_blockage ());
5362 }
5363
5364 /* If this is an implementation of throw, do what's necessary to
5365 communicate between __builtin_eh_return and the epilogue. */
5366 expand_eh_return ();
5367
5368 /* If scalar return value was computed in a pseudo-reg, or was a named
5369 return value that got dumped to the stack, copy that to the hard
5370 return register. */
5371 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
5372 {
5373 tree decl_result = DECL_RESULT (current_function_decl);
5374 rtx decl_rtl = DECL_RTL (decl_result);
5375
5376 if (REG_P (decl_rtl)
5377 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5378 : DECL_REGISTER (decl_result))
5379 {
5380 rtx real_decl_rtl = crtl->return_rtx;
5381
5382 /* This should be set in assign_parms. */
5383 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
5384
5385 /* If this is a BLKmode structure being returned in registers,
5386 then use the mode computed in expand_return. Note that if
5387 decl_rtl is memory, then its mode may have been changed,
5388 but that crtl->return_rtx has not. */
5389 if (GET_MODE (real_decl_rtl) == BLKmode)
5390 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
5391
5392 /* If a non-BLKmode return value should be padded at the least
5393 significant end of the register, shift it left by the appropriate
5394 amount. BLKmode results are handled using the group load/store
5395 machinery. */
5396 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5397 && REG_P (real_decl_rtl)
5398 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5399 {
5400 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5401 REGNO (real_decl_rtl)),
5402 decl_rtl);
5403 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5404 }
5405 /* If a named return value dumped decl_return to memory, then
5406 we may need to re-do the PROMOTE_MODE signed/unsigned
5407 extension. */
5408 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5409 {
5410 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5411 promote_function_mode (TREE_TYPE (decl_result),
5412 GET_MODE (decl_rtl), &unsignedp,
5413 TREE_TYPE (current_function_decl), 1);
5414
5415 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5416 }
5417 else if (GET_CODE (real_decl_rtl) == PARALLEL)
5418 {
5419 /* If expand_function_start has created a PARALLEL for decl_rtl,
5420 move the result to the real return registers. Otherwise, do
5421 a group load from decl_rtl for a named return. */
5422 if (GET_CODE (decl_rtl) == PARALLEL)
5423 emit_group_move (real_decl_rtl, decl_rtl);
5424 else
5425 emit_group_load (real_decl_rtl, decl_rtl,
5426 TREE_TYPE (decl_result),
5427 int_size_in_bytes (TREE_TYPE (decl_result)));
5428 }
5429 /* In the case of complex integer modes smaller than a word, we'll
5430 need to generate some non-trivial bitfield insertions. Do that
5431 on a pseudo and not the hard register. */
5432 else if (GET_CODE (decl_rtl) == CONCAT
5433 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
5434 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
5435 {
5436 int old_generating_concat_p;
5437 rtx tmp;
5438
5439 old_generating_concat_p = generating_concat_p;
5440 generating_concat_p = 0;
5441 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5442 generating_concat_p = old_generating_concat_p;
5443
5444 emit_move_insn (tmp, decl_rtl);
5445 emit_move_insn (real_decl_rtl, tmp);
5446 }
5447 else
5448 emit_move_insn (real_decl_rtl, decl_rtl);
5449 }
5450 }
5451
5452 /* If returning a structure, arrange to return the address of the value
5453 in a place where debuggers expect to find it.
5454
5455 If returning a structure PCC style,
5456 the caller also depends on this value.
5457 And cfun->returns_pcc_struct is not necessarily set. */
5458 if ((cfun->returns_struct || cfun->returns_pcc_struct)
5459 && !targetm.calls.omit_struct_return_reg)
5460 {
5461 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5462 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5463 rtx outgoing;
5464
5465 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5466 type = TREE_TYPE (type);
5467 else
5468 value_address = XEXP (value_address, 0);
5469
5470 outgoing = targetm.calls.function_value (build_pointer_type (type),
5471 current_function_decl, true);
5472
5473 /* Mark this as a function return value so integrate will delete the
5474 assignment and USE below when inlining this function. */
5475 REG_FUNCTION_VALUE_P (outgoing) = 1;
5476
5477 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5478 value_address = convert_memory_address (GET_MODE (outgoing),
5479 value_address);
5480
5481 emit_move_insn (outgoing, value_address);
5482
5483 /* Show return register used to hold result (in this case the address
5484 of the result. */
5485 crtl->return_rtx = outgoing;
5486 }
5487
5488 /* Emit the actual code to clobber return register. Don't emit
5489 it if clobber_after is a barrier, then the previous basic block
5490 certainly doesn't fall thru into the exit block. */
5491 if (!BARRIER_P (clobber_after))
5492 {
5493 rtx seq;
5494
5495 start_sequence ();
5496 clobber_return_register ();
5497 seq = get_insns ();
5498 end_sequence ();
5499
5500 emit_insn_after (seq, clobber_after);
5501 }
5502
5503 /* Output the label for the naked return from the function. */
5504 if (naked_return_label)
5505 emit_label (naked_return_label);
5506
5507 /* @@@ This is a kludge. We want to ensure that instructions that
5508 may trap are not moved into the epilogue by scheduling, because
5509 we don't always emit unwind information for the epilogue. */
5510 if (cfun->can_throw_non_call_exceptions
5511 && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
5512 emit_insn (gen_blockage ());
5513
5514 /* If stack protection is enabled for this function, check the guard. */
5515 if (crtl->stack_protect_guard)
5516 stack_protect_epilogue ();
5517
5518 /* If we had calls to alloca, and this machine needs
5519 an accurate stack pointer to exit the function,
5520 insert some code to save and restore the stack pointer. */
5521 if (! EXIT_IGNORE_STACK
5522 && cfun->calls_alloca)
5523 {
5524 rtx tem = 0, seq;
5525
5526 start_sequence ();
5527 emit_stack_save (SAVE_FUNCTION, &tem);
5528 seq = get_insns ();
5529 end_sequence ();
5530 emit_insn_before (seq, parm_birth_insn);
5531
5532 emit_stack_restore (SAVE_FUNCTION, tem);
5533 }
5534
5535 /* ??? This should no longer be necessary since stupid is no longer with
5536 us, but there are some parts of the compiler (eg reload_combine, and
5537 sh mach_dep_reorg) that still try and compute their own lifetime info
5538 instead of using the general framework. */
5539 use_return_register ();
5540 }
5541
5542 rtx
5543 get_arg_pointer_save_area (void)
5544 {
5545 rtx ret = arg_pointer_save_area;
5546
5547 if (! ret)
5548 {
5549 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5550 arg_pointer_save_area = ret;
5551 }
5552
5553 if (! crtl->arg_pointer_save_area_init)
5554 {
5555 rtx seq;
5556
5557 /* Save the arg pointer at the beginning of the function. The
5558 generated stack slot may not be a valid memory address, so we
5559 have to check it and fix it if necessary. */
5560 start_sequence ();
5561 emit_move_insn (validize_mem (copy_rtx (ret)),
5562 crtl->args.internal_arg_pointer);
5563 seq = get_insns ();
5564 end_sequence ();
5565
5566 push_topmost_sequence ();
5567 emit_insn_after (seq, entry_of_function ());
5568 pop_topmost_sequence ();
5569
5570 crtl->arg_pointer_save_area_init = true;
5571 }
5572
5573 return ret;
5574 }
5575 \f
5576 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5577 for the first time. */
5578
5579 static void
5580 record_insns (rtx_insn *insns, rtx end, hash_table<insn_cache_hasher> **hashp)
5581 {
5582 rtx_insn *tmp;
5583 hash_table<insn_cache_hasher> *hash = *hashp;
5584
5585 if (hash == NULL)
5586 *hashp = hash = hash_table<insn_cache_hasher>::create_ggc (17);
5587
5588 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5589 {
5590 rtx *slot = hash->find_slot (tmp, INSERT);
5591 gcc_assert (*slot == NULL);
5592 *slot = tmp;
5593 }
5594 }
5595
5596 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5597 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5598 insn, then record COPY as well. */
5599
5600 void
5601 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5602 {
5603 hash_table<insn_cache_hasher> *hash;
5604 rtx *slot;
5605
5606 hash = epilogue_insn_hash;
5607 if (!hash || !hash->find (insn))
5608 {
5609 hash = prologue_insn_hash;
5610 if (!hash || !hash->find (insn))
5611 return;
5612 }
5613
5614 slot = hash->find_slot (copy, INSERT);
5615 gcc_assert (*slot == NULL);
5616 *slot = copy;
5617 }
5618
5619 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5620 we can be running after reorg, SEQUENCE rtl is possible. */
5621
5622 static bool
5623 contains (const_rtx insn, hash_table<insn_cache_hasher> *hash)
5624 {
5625 if (hash == NULL)
5626 return false;
5627
5628 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5629 {
5630 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
5631 int i;
5632 for (i = seq->len () - 1; i >= 0; i--)
5633 if (hash->find (seq->element (i)))
5634 return true;
5635 return false;
5636 }
5637
5638 return hash->find (const_cast<rtx> (insn)) != NULL;
5639 }
5640
5641 int
5642 prologue_epilogue_contains (const_rtx insn)
5643 {
5644 if (contains (insn, prologue_insn_hash))
5645 return 1;
5646 if (contains (insn, epilogue_insn_hash))
5647 return 1;
5648 return 0;
5649 }
5650
5651 /* Insert use of return register before the end of BB. */
5652
5653 static void
5654 emit_use_return_register_into_block (basic_block bb)
5655 {
5656 rtx seq;
5657 rtx_insn *insn;
5658 start_sequence ();
5659 use_return_register ();
5660 seq = get_insns ();
5661 end_sequence ();
5662 insn = BB_END (bb);
5663 if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
5664 insn = prev_cc0_setter (insn);
5665
5666 emit_insn_before (seq, insn);
5667 }
5668
5669
5670 /* Create a return pattern, either simple_return or return, depending on
5671 simple_p. */
5672
5673 static rtx
5674 gen_return_pattern (bool simple_p)
5675 {
5676 if (!HAVE_simple_return)
5677 gcc_assert (!simple_p);
5678
5679 return simple_p ? gen_simple_return () : gen_return ();
5680 }
5681
5682 /* Insert an appropriate return pattern at the end of block BB. This
5683 also means updating block_for_insn appropriately. SIMPLE_P is
5684 the same as in gen_return_pattern and passed to it. */
5685
5686 void
5687 emit_return_into_block (bool simple_p, basic_block bb)
5688 {
5689 rtx jump, pat;
5690 jump = emit_jump_insn_after (gen_return_pattern (simple_p), BB_END (bb));
5691 pat = PATTERN (jump);
5692 if (GET_CODE (pat) == PARALLEL)
5693 pat = XVECEXP (pat, 0, 0);
5694 gcc_assert (ANY_RETURN_P (pat));
5695 JUMP_LABEL (jump) = pat;
5696 }
5697
5698 /* Set JUMP_LABEL for a return insn. */
5699
5700 void
5701 set_return_jump_label (rtx_insn *returnjump)
5702 {
5703 rtx pat = PATTERN (returnjump);
5704 if (GET_CODE (pat) == PARALLEL)
5705 pat = XVECEXP (pat, 0, 0);
5706 if (ANY_RETURN_P (pat))
5707 JUMP_LABEL (returnjump) = pat;
5708 else
5709 JUMP_LABEL (returnjump) = ret_rtx;
5710 }
5711
5712 /* Return true if there are any active insns between HEAD and TAIL. */
5713 bool
5714 active_insn_between (rtx_insn *head, rtx_insn *tail)
5715 {
5716 while (tail)
5717 {
5718 if (active_insn_p (tail))
5719 return true;
5720 if (tail == head)
5721 return false;
5722 tail = PREV_INSN (tail);
5723 }
5724 return false;
5725 }
5726
5727 /* LAST_BB is a block that exits, and empty of active instructions.
5728 Examine its predecessors for jumps that can be converted to
5729 (conditional) returns. */
5730 vec<edge>
5731 convert_jumps_to_returns (basic_block last_bb, bool simple_p,
5732 vec<edge> unconverted ATTRIBUTE_UNUSED)
5733 {
5734 int i;
5735 basic_block bb;
5736 rtx label;
5737 edge_iterator ei;
5738 edge e;
5739 auto_vec<basic_block> src_bbs (EDGE_COUNT (last_bb->preds));
5740
5741 FOR_EACH_EDGE (e, ei, last_bb->preds)
5742 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5743 src_bbs.quick_push (e->src);
5744
5745 label = BB_HEAD (last_bb);
5746
5747 FOR_EACH_VEC_ELT (src_bbs, i, bb)
5748 {
5749 rtx_insn *jump = BB_END (bb);
5750
5751 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5752 continue;
5753
5754 e = find_edge (bb, last_bb);
5755
5756 /* If we have an unconditional jump, we can replace that
5757 with a simple return instruction. */
5758 if (simplejump_p (jump))
5759 {
5760 /* The use of the return register might be present in the exit
5761 fallthru block. Either:
5762 - removing the use is safe, and we should remove the use in
5763 the exit fallthru block, or
5764 - removing the use is not safe, and we should add it here.
5765 For now, we conservatively choose the latter. Either of the
5766 2 helps in crossjumping. */
5767 emit_use_return_register_into_block (bb);
5768
5769 emit_return_into_block (simple_p, bb);
5770 delete_insn (jump);
5771 }
5772
5773 /* If we have a conditional jump branching to the last
5774 block, we can try to replace that with a conditional
5775 return instruction. */
5776 else if (condjump_p (jump))
5777 {
5778 rtx dest;
5779
5780 if (simple_p)
5781 dest = simple_return_rtx;
5782 else
5783 dest = ret_rtx;
5784 if (!redirect_jump (as_a <rtx_jump_insn *> (jump), dest, 0))
5785 {
5786 if (HAVE_simple_return && simple_p)
5787 {
5788 if (dump_file)
5789 fprintf (dump_file,
5790 "Failed to redirect bb %d branch.\n", bb->index);
5791 unconverted.safe_push (e);
5792 }
5793 continue;
5794 }
5795
5796 /* See comment in simplejump_p case above. */
5797 emit_use_return_register_into_block (bb);
5798
5799 /* If this block has only one successor, it both jumps
5800 and falls through to the fallthru block, so we can't
5801 delete the edge. */
5802 if (single_succ_p (bb))
5803 continue;
5804 }
5805 else
5806 {
5807 if (HAVE_simple_return && simple_p)
5808 {
5809 if (dump_file)
5810 fprintf (dump_file,
5811 "Failed to redirect bb %d branch.\n", bb->index);
5812 unconverted.safe_push (e);
5813 }
5814 continue;
5815 }
5816
5817 /* Fix up the CFG for the successful change we just made. */
5818 redirect_edge_succ (e, EXIT_BLOCK_PTR_FOR_FN (cfun));
5819 e->flags &= ~EDGE_CROSSING;
5820 }
5821 src_bbs.release ();
5822 return unconverted;
5823 }
5824
5825 /* Emit a return insn for the exit fallthru block. */
5826 basic_block
5827 emit_return_for_exit (edge exit_fallthru_edge, bool simple_p)
5828 {
5829 basic_block last_bb = exit_fallthru_edge->src;
5830
5831 if (JUMP_P (BB_END (last_bb)))
5832 {
5833 last_bb = split_edge (exit_fallthru_edge);
5834 exit_fallthru_edge = single_succ_edge (last_bb);
5835 }
5836 emit_barrier_after (BB_END (last_bb));
5837 emit_return_into_block (simple_p, last_bb);
5838 exit_fallthru_edge->flags &= ~EDGE_FALLTHRU;
5839 return last_bb;
5840 }
5841
5842
5843 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5844 this into place with notes indicating where the prologue ends and where
5845 the epilogue begins. Update the basic block information when possible.
5846
5847 Notes on epilogue placement:
5848 There are several kinds of edges to the exit block:
5849 * a single fallthru edge from LAST_BB
5850 * possibly, edges from blocks containing sibcalls
5851 * possibly, fake edges from infinite loops
5852
5853 The epilogue is always emitted on the fallthru edge from the last basic
5854 block in the function, LAST_BB, into the exit block.
5855
5856 If LAST_BB is empty except for a label, it is the target of every
5857 other basic block in the function that ends in a return. If a
5858 target has a return or simple_return pattern (possibly with
5859 conditional variants), these basic blocks can be changed so that a
5860 return insn is emitted into them, and their target is adjusted to
5861 the real exit block.
5862
5863 Notes on shrink wrapping: We implement a fairly conservative
5864 version of shrink-wrapping rather than the textbook one. We only
5865 generate a single prologue and a single epilogue. This is
5866 sufficient to catch a number of interesting cases involving early
5867 exits.
5868
5869 First, we identify the blocks that require the prologue to occur before
5870 them. These are the ones that modify a call-saved register, or reference
5871 any of the stack or frame pointer registers. To simplify things, we then
5872 mark everything reachable from these blocks as also requiring a prologue.
5873 This takes care of loops automatically, and avoids the need to examine
5874 whether MEMs reference the frame, since it is sufficient to check for
5875 occurrences of the stack or frame pointer.
5876
5877 We then compute the set of blocks for which the need for a prologue
5878 is anticipatable (borrowing terminology from the shrink-wrapping
5879 description in Muchnick's book). These are the blocks which either
5880 require a prologue themselves, or those that have only successors
5881 where the prologue is anticipatable. The prologue needs to be
5882 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5883 is not. For the moment, we ensure that only one such edge exists.
5884
5885 The epilogue is placed as described above, but we make a
5886 distinction between inserting return and simple_return patterns
5887 when modifying other blocks that end in a return. Blocks that end
5888 in a sibcall omit the sibcall_epilogue if the block is not in
5889 ANTIC. */
5890
5891 void
5892 thread_prologue_and_epilogue_insns (void)
5893 {
5894 bool inserted;
5895 vec<edge> unconverted_simple_returns = vNULL;
5896 bitmap_head bb_flags;
5897 rtx_insn *returnjump;
5898 rtx_insn *epilogue_end ATTRIBUTE_UNUSED;
5899 rtx_insn *prologue_seq ATTRIBUTE_UNUSED, *split_prologue_seq ATTRIBUTE_UNUSED;
5900 edge e, entry_edge, orig_entry_edge, exit_fallthru_edge;
5901 edge_iterator ei;
5902
5903 df_analyze ();
5904
5905 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5906
5907 inserted = false;
5908 epilogue_end = NULL;
5909 returnjump = NULL;
5910
5911 /* Can't deal with multiple successors of the entry block at the
5912 moment. Function should always have at least one entry
5913 point. */
5914 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5915 entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5916 orig_entry_edge = entry_edge;
5917
5918 split_prologue_seq = NULL;
5919 if (flag_split_stack
5920 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
5921 == NULL))
5922 {
5923 #ifndef HAVE_split_stack_prologue
5924 gcc_unreachable ();
5925 #else
5926 gcc_assert (HAVE_split_stack_prologue);
5927
5928 start_sequence ();
5929 emit_insn (gen_split_stack_prologue ());
5930 split_prologue_seq = get_insns ();
5931 end_sequence ();
5932
5933 record_insns (split_prologue_seq, NULL, &prologue_insn_hash);
5934 set_insn_locations (split_prologue_seq, prologue_location);
5935 #endif
5936 }
5937
5938 prologue_seq = NULL;
5939 #ifdef HAVE_prologue
5940 if (HAVE_prologue)
5941 {
5942 start_sequence ();
5943 rtx_insn *seq = safe_as_a <rtx_insn *> (gen_prologue ());
5944 emit_insn (seq);
5945
5946 /* Insert an explicit USE for the frame pointer
5947 if the profiling is on and the frame pointer is required. */
5948 if (crtl->profile && frame_pointer_needed)
5949 emit_use (hard_frame_pointer_rtx);
5950
5951 /* Retain a map of the prologue insns. */
5952 record_insns (seq, NULL, &prologue_insn_hash);
5953 emit_note (NOTE_INSN_PROLOGUE_END);
5954
5955 /* Ensure that instructions are not moved into the prologue when
5956 profiling is on. The call to the profiling routine can be
5957 emitted within the live range of a call-clobbered register. */
5958 if (!targetm.profile_before_prologue () && crtl->profile)
5959 emit_insn (gen_blockage ());
5960
5961 prologue_seq = get_insns ();
5962 end_sequence ();
5963 set_insn_locations (prologue_seq, prologue_location);
5964 }
5965 #endif
5966
5967 bitmap_initialize (&bb_flags, &bitmap_default_obstack);
5968
5969 /* Try to perform a kind of shrink-wrapping, making sure the
5970 prologue/epilogue is emitted only around those parts of the
5971 function that require it. */
5972
5973 try_shrink_wrapping (&entry_edge, orig_entry_edge, &bb_flags, prologue_seq);
5974
5975 if (split_prologue_seq != NULL_RTX)
5976 {
5977 insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
5978 inserted = true;
5979 }
5980 if (prologue_seq != NULL_RTX)
5981 {
5982 insert_insn_on_edge (prologue_seq, entry_edge);
5983 inserted = true;
5984 }
5985
5986 /* If the exit block has no non-fake predecessors, we don't need
5987 an epilogue. */
5988 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5989 if ((e->flags & EDGE_FAKE) == 0)
5990 break;
5991 if (e == NULL)
5992 goto epilogue_done;
5993
5994 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5995
5996 exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
5997
5998 if (HAVE_simple_return && entry_edge != orig_entry_edge)
5999 exit_fallthru_edge
6000 = get_unconverted_simple_return (exit_fallthru_edge, bb_flags,
6001 &unconverted_simple_returns,
6002 &returnjump);
6003 if (HAVE_return)
6004 {
6005 if (exit_fallthru_edge == NULL)
6006 goto epilogue_done;
6007
6008 if (optimize)
6009 {
6010 basic_block last_bb = exit_fallthru_edge->src;
6011
6012 if (LABEL_P (BB_HEAD (last_bb))
6013 && !active_insn_between (BB_HEAD (last_bb), BB_END (last_bb)))
6014 convert_jumps_to_returns (last_bb, false, vNULL);
6015
6016 if (EDGE_COUNT (last_bb->preds) != 0
6017 && single_succ_p (last_bb))
6018 {
6019 last_bb = emit_return_for_exit (exit_fallthru_edge, false);
6020 epilogue_end = returnjump = BB_END (last_bb);
6021
6022 /* Emitting the return may add a basic block.
6023 Fix bb_flags for the added block. */
6024 if (HAVE_simple_return && last_bb != exit_fallthru_edge->src)
6025 bitmap_set_bit (&bb_flags, last_bb->index);
6026
6027 goto epilogue_done;
6028 }
6029 }
6030 }
6031
6032 /* A small fib -- epilogue is not yet completed, but we wish to re-use
6033 this marker for the splits of EH_RETURN patterns, and nothing else
6034 uses the flag in the meantime. */
6035 epilogue_completed = 1;
6036
6037 #ifdef HAVE_eh_return
6038 /* Find non-fallthru edges that end with EH_RETURN instructions. On
6039 some targets, these get split to a special version of the epilogue
6040 code. In order to be able to properly annotate these with unwind
6041 info, try to split them now. If we get a valid split, drop an
6042 EPILOGUE_BEG note and mark the insns as epilogue insns. */
6043 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6044 {
6045 rtx_insn *prev, *last, *trial;
6046
6047 if (e->flags & EDGE_FALLTHRU)
6048 continue;
6049 last = BB_END (e->src);
6050 if (!eh_returnjump_p (last))
6051 continue;
6052
6053 prev = PREV_INSN (last);
6054 trial = try_split (PATTERN (last), last, 1);
6055 if (trial == last)
6056 continue;
6057
6058 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
6059 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
6060 }
6061 #endif
6062
6063 /* If nothing falls through into the exit block, we don't need an
6064 epilogue. */
6065
6066 if (exit_fallthru_edge == NULL)
6067 goto epilogue_done;
6068
6069 if (HAVE_epilogue)
6070 {
6071 start_sequence ();
6072 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
6073 rtx_insn *seq = as_a <rtx_insn *> (gen_epilogue ());
6074 if (seq)
6075 emit_jump_insn (seq);
6076
6077 /* Retain a map of the epilogue insns. */
6078 record_insns (seq, NULL, &epilogue_insn_hash);
6079 set_insn_locations (seq, epilogue_location);
6080
6081 seq = get_insns ();
6082 returnjump = get_last_insn ();
6083 end_sequence ();
6084
6085 insert_insn_on_edge (seq, exit_fallthru_edge);
6086 inserted = true;
6087
6088 if (JUMP_P (returnjump))
6089 set_return_jump_label (returnjump);
6090 }
6091 else
6092 {
6093 basic_block cur_bb;
6094
6095 if (! next_active_insn (BB_END (exit_fallthru_edge->src)))
6096 goto epilogue_done;
6097 /* We have a fall-through edge to the exit block, the source is not
6098 at the end of the function, and there will be an assembler epilogue
6099 at the end of the function.
6100 We can't use force_nonfallthru here, because that would try to
6101 use return. Inserting a jump 'by hand' is extremely messy, so
6102 we take advantage of cfg_layout_finalize using
6103 fixup_fallthru_exit_predecessor. */
6104 cfg_layout_initialize (0);
6105 FOR_EACH_BB_FN (cur_bb, cfun)
6106 if (cur_bb->index >= NUM_FIXED_BLOCKS
6107 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
6108 cur_bb->aux = cur_bb->next_bb;
6109 cfg_layout_finalize ();
6110 }
6111
6112 epilogue_done:
6113
6114 default_rtl_profile ();
6115
6116 if (inserted)
6117 {
6118 sbitmap blocks;
6119
6120 commit_edge_insertions ();
6121
6122 /* Look for basic blocks within the prologue insns. */
6123 blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
6124 bitmap_clear (blocks);
6125 bitmap_set_bit (blocks, entry_edge->dest->index);
6126 bitmap_set_bit (blocks, orig_entry_edge->dest->index);
6127 find_many_sub_basic_blocks (blocks);
6128 sbitmap_free (blocks);
6129
6130 /* The epilogue insns we inserted may cause the exit edge to no longer
6131 be fallthru. */
6132 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6133 {
6134 if (((e->flags & EDGE_FALLTHRU) != 0)
6135 && returnjump_p (BB_END (e->src)))
6136 e->flags &= ~EDGE_FALLTHRU;
6137 }
6138 }
6139
6140 if (HAVE_simple_return)
6141 convert_to_simple_return (entry_edge, orig_entry_edge, bb_flags,
6142 returnjump, unconverted_simple_returns);
6143
6144 #ifdef HAVE_sibcall_epilogue
6145 /* Emit sibling epilogues before any sibling call sites. */
6146 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds); (e =
6147 ei_safe_edge (ei));
6148 )
6149 {
6150 basic_block bb = e->src;
6151 rtx_insn *insn = BB_END (bb);
6152 rtx ep_seq;
6153
6154 if (!CALL_P (insn)
6155 || ! SIBLING_CALL_P (insn)
6156 || (HAVE_simple_return && (entry_edge != orig_entry_edge
6157 && !bitmap_bit_p (&bb_flags, bb->index))))
6158 {
6159 ei_next (&ei);
6160 continue;
6161 }
6162
6163 ep_seq = gen_sibcall_epilogue ();
6164 if (ep_seq)
6165 {
6166 start_sequence ();
6167 emit_note (NOTE_INSN_EPILOGUE_BEG);
6168 emit_insn (ep_seq);
6169 rtx_insn *seq = get_insns ();
6170 end_sequence ();
6171
6172 /* Retain a map of the epilogue insns. Used in life analysis to
6173 avoid getting rid of sibcall epilogue insns. Do this before we
6174 actually emit the sequence. */
6175 record_insns (seq, NULL, &epilogue_insn_hash);
6176 set_insn_locations (seq, epilogue_location);
6177
6178 emit_insn_before (seq, insn);
6179 }
6180 ei_next (&ei);
6181 }
6182 #endif
6183
6184 if (epilogue_end)
6185 {
6186 rtx_insn *insn, *next;
6187
6188 /* Similarly, move any line notes that appear after the epilogue.
6189 There is no need, however, to be quite so anal about the existence
6190 of such a note. Also possibly move
6191 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6192 info generation. */
6193 for (insn = epilogue_end; insn; insn = next)
6194 {
6195 next = NEXT_INSN (insn);
6196 if (NOTE_P (insn)
6197 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
6198 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
6199 }
6200 }
6201
6202 bitmap_clear (&bb_flags);
6203
6204 /* Threading the prologue and epilogue changes the artificial refs
6205 in the entry and exit blocks. */
6206 epilogue_completed = 1;
6207 df_update_entry_exit_and_calls ();
6208 }
6209
6210 /* Reposition the prologue-end and epilogue-begin notes after
6211 instruction scheduling. */
6212
6213 void
6214 reposition_prologue_and_epilogue_notes (void)
6215 {
6216 #if ! defined (HAVE_prologue) && ! defined (HAVE_sibcall_epilogue)
6217 if (!HAVE_epilogue)
6218 return;
6219 #endif
6220
6221 /* Since the hash table is created on demand, the fact that it is
6222 non-null is a signal that it is non-empty. */
6223 if (prologue_insn_hash != NULL)
6224 {
6225 size_t len = prologue_insn_hash->elements ();
6226 rtx_insn *insn, *last = NULL, *note = NULL;
6227
6228 /* Scan from the beginning until we reach the last prologue insn. */
6229 /* ??? While we do have the CFG intact, there are two problems:
6230 (1) The prologue can contain loops (typically probing the stack),
6231 which means that the end of the prologue isn't in the first bb.
6232 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6233 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6234 {
6235 if (NOTE_P (insn))
6236 {
6237 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
6238 note = insn;
6239 }
6240 else if (contains (insn, prologue_insn_hash))
6241 {
6242 last = insn;
6243 if (--len == 0)
6244 break;
6245 }
6246 }
6247
6248 if (last)
6249 {
6250 if (note == NULL)
6251 {
6252 /* Scan forward looking for the PROLOGUE_END note. It should
6253 be right at the beginning of the block, possibly with other
6254 insn notes that got moved there. */
6255 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
6256 {
6257 if (NOTE_P (note)
6258 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
6259 break;
6260 }
6261 }
6262
6263 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6264 if (LABEL_P (last))
6265 last = NEXT_INSN (last);
6266 reorder_insns (note, note, last);
6267 }
6268 }
6269
6270 if (epilogue_insn_hash != NULL)
6271 {
6272 edge_iterator ei;
6273 edge e;
6274
6275 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
6276 {
6277 rtx_insn *insn, *first = NULL, *note = NULL;
6278 basic_block bb = e->src;
6279
6280 /* Scan from the beginning until we reach the first epilogue insn. */
6281 FOR_BB_INSNS (bb, insn)
6282 {
6283 if (NOTE_P (insn))
6284 {
6285 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
6286 {
6287 note = insn;
6288 if (first != NULL)
6289 break;
6290 }
6291 }
6292 else if (first == NULL && contains (insn, epilogue_insn_hash))
6293 {
6294 first = insn;
6295 if (note != NULL)
6296 break;
6297 }
6298 }
6299
6300 if (note)
6301 {
6302 /* If the function has a single basic block, and no real
6303 epilogue insns (e.g. sibcall with no cleanup), the
6304 epilogue note can get scheduled before the prologue
6305 note. If we have frame related prologue insns, having
6306 them scanned during the epilogue will result in a crash.
6307 In this case re-order the epilogue note to just before
6308 the last insn in the block. */
6309 if (first == NULL)
6310 first = BB_END (bb);
6311
6312 if (PREV_INSN (first) != note)
6313 reorder_insns (note, note, PREV_INSN (first));
6314 }
6315 }
6316 }
6317 }
6318
6319 /* Returns the name of function declared by FNDECL. */
6320 const char *
6321 fndecl_name (tree fndecl)
6322 {
6323 if (fndecl == NULL)
6324 return "(nofn)";
6325 return lang_hooks.decl_printable_name (fndecl, 2);
6326 }
6327
6328 /* Returns the name of function FN. */
6329 const char *
6330 function_name (struct function *fn)
6331 {
6332 tree fndecl = (fn == NULL) ? NULL : fn->decl;
6333 return fndecl_name (fndecl);
6334 }
6335
6336 /* Returns the name of the current function. */
6337 const char *
6338 current_function_name (void)
6339 {
6340 return function_name (cfun);
6341 }
6342 \f
6343
6344 static unsigned int
6345 rest_of_handle_check_leaf_regs (void)
6346 {
6347 #ifdef LEAF_REGISTERS
6348 crtl->uses_only_leaf_regs
6349 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
6350 #endif
6351 return 0;
6352 }
6353
6354 /* Insert a TYPE into the used types hash table of CFUN. */
6355
6356 static void
6357 used_types_insert_helper (tree type, struct function *func)
6358 {
6359 if (type != NULL && func != NULL)
6360 {
6361 if (func->used_types_hash == NULL)
6362 func->used_types_hash = hash_set<tree>::create_ggc (37);
6363
6364 func->used_types_hash->add (type);
6365 }
6366 }
6367
6368 /* Given a type, insert it into the used hash table in cfun. */
6369 void
6370 used_types_insert (tree t)
6371 {
6372 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
6373 if (TYPE_NAME (t))
6374 break;
6375 else
6376 t = TREE_TYPE (t);
6377 if (TREE_CODE (t) == ERROR_MARK)
6378 return;
6379 if (TYPE_NAME (t) == NULL_TREE
6380 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
6381 t = TYPE_MAIN_VARIANT (t);
6382 if (debug_info_level > DINFO_LEVEL_NONE)
6383 {
6384 if (cfun)
6385 used_types_insert_helper (t, cfun);
6386 else
6387 {
6388 /* So this might be a type referenced by a global variable.
6389 Record that type so that we can later decide to emit its
6390 debug information. */
6391 vec_safe_push (types_used_by_cur_var_decl, t);
6392 }
6393 }
6394 }
6395
6396 /* Helper to Hash a struct types_used_by_vars_entry. */
6397
6398 static hashval_t
6399 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
6400 {
6401 gcc_assert (entry && entry->var_decl && entry->type);
6402
6403 return iterative_hash_object (entry->type,
6404 iterative_hash_object (entry->var_decl, 0));
6405 }
6406
6407 /* Hash function of the types_used_by_vars_entry hash table. */
6408
6409 hashval_t
6410 used_type_hasher::hash (types_used_by_vars_entry *entry)
6411 {
6412 return hash_types_used_by_vars_entry (entry);
6413 }
6414
6415 /*Equality function of the types_used_by_vars_entry hash table. */
6416
6417 bool
6418 used_type_hasher::equal (types_used_by_vars_entry *e1,
6419 types_used_by_vars_entry *e2)
6420 {
6421 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
6422 }
6423
6424 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6425
6426 void
6427 types_used_by_var_decl_insert (tree type, tree var_decl)
6428 {
6429 if (type != NULL && var_decl != NULL)
6430 {
6431 types_used_by_vars_entry **slot;
6432 struct types_used_by_vars_entry e;
6433 e.var_decl = var_decl;
6434 e.type = type;
6435 if (types_used_by_vars_hash == NULL)
6436 types_used_by_vars_hash
6437 = hash_table<used_type_hasher>::create_ggc (37);
6438
6439 slot = types_used_by_vars_hash->find_slot (&e, INSERT);
6440 if (*slot == NULL)
6441 {
6442 struct types_used_by_vars_entry *entry;
6443 entry = ggc_alloc<types_used_by_vars_entry> ();
6444 entry->type = type;
6445 entry->var_decl = var_decl;
6446 *slot = entry;
6447 }
6448 }
6449 }
6450
6451 namespace {
6452
6453 const pass_data pass_data_leaf_regs =
6454 {
6455 RTL_PASS, /* type */
6456 "*leaf_regs", /* name */
6457 OPTGROUP_NONE, /* optinfo_flags */
6458 TV_NONE, /* tv_id */
6459 0, /* properties_required */
6460 0, /* properties_provided */
6461 0, /* properties_destroyed */
6462 0, /* todo_flags_start */
6463 0, /* todo_flags_finish */
6464 };
6465
6466 class pass_leaf_regs : public rtl_opt_pass
6467 {
6468 public:
6469 pass_leaf_regs (gcc::context *ctxt)
6470 : rtl_opt_pass (pass_data_leaf_regs, ctxt)
6471 {}
6472
6473 /* opt_pass methods: */
6474 virtual unsigned int execute (function *)
6475 {
6476 return rest_of_handle_check_leaf_regs ();
6477 }
6478
6479 }; // class pass_leaf_regs
6480
6481 } // anon namespace
6482
6483 rtl_opt_pass *
6484 make_pass_leaf_regs (gcc::context *ctxt)
6485 {
6486 return new pass_leaf_regs (ctxt);
6487 }
6488
6489 static unsigned int
6490 rest_of_handle_thread_prologue_and_epilogue (void)
6491 {
6492 if (optimize)
6493 cleanup_cfg (CLEANUP_EXPENSIVE);
6494
6495 /* On some machines, the prologue and epilogue code, or parts thereof,
6496 can be represented as RTL. Doing so lets us schedule insns between
6497 it and the rest of the code and also allows delayed branch
6498 scheduling to operate in the epilogue. */
6499 thread_prologue_and_epilogue_insns ();
6500
6501 /* Shrink-wrapping can result in unreachable edges in the epilogue,
6502 see PR57320. */
6503 cleanup_cfg (0);
6504
6505 /* The stack usage info is finalized during prologue expansion. */
6506 if (flag_stack_usage_info)
6507 output_stack_usage ();
6508
6509 return 0;
6510 }
6511
6512 namespace {
6513
6514 const pass_data pass_data_thread_prologue_and_epilogue =
6515 {
6516 RTL_PASS, /* type */
6517 "pro_and_epilogue", /* name */
6518 OPTGROUP_NONE, /* optinfo_flags */
6519 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
6520 0, /* properties_required */
6521 0, /* properties_provided */
6522 0, /* properties_destroyed */
6523 0, /* todo_flags_start */
6524 ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
6525 };
6526
6527 class pass_thread_prologue_and_epilogue : public rtl_opt_pass
6528 {
6529 public:
6530 pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6531 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
6532 {}
6533
6534 /* opt_pass methods: */
6535 virtual unsigned int execute (function *)
6536 {
6537 return rest_of_handle_thread_prologue_and_epilogue ();
6538 }
6539
6540 }; // class pass_thread_prologue_and_epilogue
6541
6542 } // anon namespace
6543
6544 rtl_opt_pass *
6545 make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
6546 {
6547 return new pass_thread_prologue_and_epilogue (ctxt);
6548 }
6549 \f
6550
6551 /* This mini-pass fixes fall-out from SSA in asm statements that have
6552 in-out constraints. Say you start with
6553
6554 orig = inout;
6555 asm ("": "+mr" (inout));
6556 use (orig);
6557
6558 which is transformed very early to use explicit output and match operands:
6559
6560 orig = inout;
6561 asm ("": "=mr" (inout) : "0" (inout));
6562 use (orig);
6563
6564 Or, after SSA and copyprop,
6565
6566 asm ("": "=mr" (inout_2) : "0" (inout_1));
6567 use (inout_1);
6568
6569 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6570 they represent two separate values, so they will get different pseudo
6571 registers during expansion. Then, since the two operands need to match
6572 per the constraints, but use different pseudo registers, reload can
6573 only register a reload for these operands. But reloads can only be
6574 satisfied by hardregs, not by memory, so we need a register for this
6575 reload, just because we are presented with non-matching operands.
6576 So, even though we allow memory for this operand, no memory can be
6577 used for it, just because the two operands don't match. This can
6578 cause reload failures on register-starved targets.
6579
6580 So it's a symptom of reload not being able to use memory for reloads
6581 or, alternatively it's also a symptom of both operands not coming into
6582 reload as matching (in which case the pseudo could go to memory just
6583 fine, as the alternative allows it, and no reload would be necessary).
6584 We fix the latter problem here, by transforming
6585
6586 asm ("": "=mr" (inout_2) : "0" (inout_1));
6587
6588 back to
6589
6590 inout_2 = inout_1;
6591 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6592
6593 static void
6594 match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
6595 {
6596 int i;
6597 bool changed = false;
6598 rtx op = SET_SRC (p_sets[0]);
6599 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
6600 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
6601 bool *output_matched = XALLOCAVEC (bool, noutputs);
6602
6603 memset (output_matched, 0, noutputs * sizeof (bool));
6604 for (i = 0; i < ninputs; i++)
6605 {
6606 rtx input, output;
6607 rtx_insn *insns;
6608 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
6609 char *end;
6610 int match, j;
6611
6612 if (*constraint == '%')
6613 constraint++;
6614
6615 match = strtoul (constraint, &end, 10);
6616 if (end == constraint)
6617 continue;
6618
6619 gcc_assert (match < noutputs);
6620 output = SET_DEST (p_sets[match]);
6621 input = RTVEC_ELT (inputs, i);
6622 /* Only do the transformation for pseudos. */
6623 if (! REG_P (output)
6624 || rtx_equal_p (output, input)
6625 || (GET_MODE (input) != VOIDmode
6626 && GET_MODE (input) != GET_MODE (output)))
6627 continue;
6628
6629 /* We can't do anything if the output is also used as input,
6630 as we're going to overwrite it. */
6631 for (j = 0; j < ninputs; j++)
6632 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
6633 break;
6634 if (j != ninputs)
6635 continue;
6636
6637 /* Avoid changing the same input several times. For
6638 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6639 only change in once (to out1), rather than changing it
6640 first to out1 and afterwards to out2. */
6641 if (i > 0)
6642 {
6643 for (j = 0; j < noutputs; j++)
6644 if (output_matched[j] && input == SET_DEST (p_sets[j]))
6645 break;
6646 if (j != noutputs)
6647 continue;
6648 }
6649 output_matched[match] = true;
6650
6651 start_sequence ();
6652 emit_move_insn (output, input);
6653 insns = get_insns ();
6654 end_sequence ();
6655 emit_insn_before (insns, insn);
6656
6657 /* Now replace all mentions of the input with output. We can't
6658 just replace the occurrence in inputs[i], as the register might
6659 also be used in some other input (or even in an address of an
6660 output), which would mean possibly increasing the number of
6661 inputs by one (namely 'output' in addition), which might pose
6662 a too complicated problem for reload to solve. E.g. this situation:
6663
6664 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6665
6666 Here 'input' is used in two occurrences as input (once for the
6667 input operand, once for the address in the second output operand).
6668 If we would replace only the occurrence of the input operand (to
6669 make the matching) we would be left with this:
6670
6671 output = input
6672 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6673
6674 Now we suddenly have two different input values (containing the same
6675 value, but different pseudos) where we formerly had only one.
6676 With more complicated asms this might lead to reload failures
6677 which wouldn't have happen without this pass. So, iterate over
6678 all operands and replace all occurrences of the register used. */
6679 for (j = 0; j < noutputs; j++)
6680 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
6681 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6682 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6683 input, output);
6684 for (j = 0; j < ninputs; j++)
6685 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6686 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6687 input, output);
6688
6689 changed = true;
6690 }
6691
6692 if (changed)
6693 df_insn_rescan (insn);
6694 }
6695
6696 /* Add the decl D to the local_decls list of FUN. */
6697
6698 void
6699 add_local_decl (struct function *fun, tree d)
6700 {
6701 gcc_assert (TREE_CODE (d) == VAR_DECL);
6702 vec_safe_push (fun->local_decls, d);
6703 }
6704
6705 namespace {
6706
6707 const pass_data pass_data_match_asm_constraints =
6708 {
6709 RTL_PASS, /* type */
6710 "asmcons", /* name */
6711 OPTGROUP_NONE, /* optinfo_flags */
6712 TV_NONE, /* tv_id */
6713 0, /* properties_required */
6714 0, /* properties_provided */
6715 0, /* properties_destroyed */
6716 0, /* todo_flags_start */
6717 0, /* todo_flags_finish */
6718 };
6719
6720 class pass_match_asm_constraints : public rtl_opt_pass
6721 {
6722 public:
6723 pass_match_asm_constraints (gcc::context *ctxt)
6724 : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
6725 {}
6726
6727 /* opt_pass methods: */
6728 virtual unsigned int execute (function *);
6729
6730 }; // class pass_match_asm_constraints
6731
6732 unsigned
6733 pass_match_asm_constraints::execute (function *fun)
6734 {
6735 basic_block bb;
6736 rtx_insn *insn;
6737 rtx pat, *p_sets;
6738 int noutputs;
6739
6740 if (!crtl->has_asm_statement)
6741 return 0;
6742
6743 df_set_flags (DF_DEFER_INSN_RESCAN);
6744 FOR_EACH_BB_FN (bb, fun)
6745 {
6746 FOR_BB_INSNS (bb, insn)
6747 {
6748 if (!INSN_P (insn))
6749 continue;
6750
6751 pat = PATTERN (insn);
6752 if (GET_CODE (pat) == PARALLEL)
6753 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6754 else if (GET_CODE (pat) == SET)
6755 p_sets = &PATTERN (insn), noutputs = 1;
6756 else
6757 continue;
6758
6759 if (GET_CODE (*p_sets) == SET
6760 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6761 match_asm_constraints_1 (insn, p_sets, noutputs);
6762 }
6763 }
6764
6765 return TODO_df_finish;
6766 }
6767
6768 } // anon namespace
6769
6770 rtl_opt_pass *
6771 make_pass_match_asm_constraints (gcc::context *ctxt)
6772 {
6773 return new pass_match_asm_constraints (ctxt);
6774 }
6775
6776
6777 #include "gt-function.h"