s390.c (s390_function_value): Rename to ...
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010, 2011 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "rtl-error.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "regs.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "output.h"
53 #include "basic-block.h"
54 #include "hashtab.h"
55 #include "ggc.h"
56 #include "tm_p.h"
57 #include "integrate.h"
58 #include "langhooks.h"
59 #include "target.h"
60 #include "cfglayout.h"
61 #include "gimple.h"
62 #include "tree-pass.h"
63 #include "predict.h"
64 #include "df.h"
65 #include "timevar.h"
66 #include "vecprim.h"
67
68 /* So we can assign to cfun in this file. */
69 #undef cfun
70
71 #ifndef STACK_ALIGNMENT_NEEDED
72 #define STACK_ALIGNMENT_NEEDED 1
73 #endif
74
75 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
76
77 /* Some systems use __main in a way incompatible with its use in gcc, in these
78 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
79 give the same symbol without quotes for an alternative entry point. You
80 must define both, or neither. */
81 #ifndef NAME__MAIN
82 #define NAME__MAIN "__main"
83 #endif
84
85 /* Round a value to the lowest integer less than it that is a multiple of
86 the required alignment. Avoid using division in case the value is
87 negative. Assume the alignment is a power of two. */
88 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
89
90 /* Similar, but round to the next highest integer that meets the
91 alignment. */
92 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
93
94 /* Nonzero if function being compiled doesn't contain any calls
95 (ignoring the prologue and epilogue). This is set prior to
96 local register allocation and is valid for the remaining
97 compiler passes. */
98 int current_function_is_leaf;
99
100 /* Nonzero if function being compiled doesn't modify the stack pointer
101 (ignoring the prologue and epilogue). This is only valid after
102 pass_stack_ptr_mod has run. */
103 int current_function_sp_is_unchanging;
104
105 /* Nonzero if the function being compiled is a leaf function which only
106 uses leaf registers. This is valid after reload (specifically after
107 sched2) and is useful only if the port defines LEAF_REGISTERS. */
108 int current_function_uses_only_leaf_regs;
109
110 /* Nonzero once virtual register instantiation has been done.
111 assign_stack_local uses frame_pointer_rtx when this is nonzero.
112 calls.c:emit_library_call_value_1 uses it to set up
113 post-instantiation libcalls. */
114 int virtuals_instantiated;
115
116 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
117 static GTY(()) int funcdef_no;
118
119 /* These variables hold pointers to functions to create and destroy
120 target specific, per-function data structures. */
121 struct machine_function * (*init_machine_status) (void);
122
123 /* The currently compiled function. */
124 struct function *cfun = 0;
125
126 /* These hashes record the prologue and epilogue insns. */
127 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
128 htab_t prologue_insn_hash;
129 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
130 htab_t epilogue_insn_hash;
131 \f
132
133 htab_t types_used_by_vars_hash = NULL;
134 VEC(tree,gc) *types_used_by_cur_var_decl;
135
136 /* Forward declarations. */
137
138 static struct temp_slot *find_temp_slot_from_address (rtx);
139 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
140 static void pad_below (struct args_size *, enum machine_mode, tree);
141 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
142 static int all_blocks (tree, tree *);
143 static tree *get_block_vector (tree, int *);
144 extern tree debug_find_var_in_block_tree (tree, tree);
145 /* We always define `record_insns' even if it's not used so that we
146 can always export `prologue_epilogue_contains'. */
147 static void record_insns (rtx, rtx, htab_t *) ATTRIBUTE_UNUSED;
148 static bool contains (const_rtx, htab_t);
149 #ifdef HAVE_return
150 static void emit_return_into_block (basic_block);
151 #endif
152 static void prepare_function_start (void);
153 static void do_clobber_return_reg (rtx, void *);
154 static void do_use_return_reg (rtx, void *);
155 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
156 \f
157 /* Stack of nested functions. */
158 /* Keep track of the cfun stack. */
159
160 typedef struct function *function_p;
161
162 DEF_VEC_P(function_p);
163 DEF_VEC_ALLOC_P(function_p,heap);
164 static VEC(function_p,heap) *function_context_stack;
165
166 /* Save the current context for compilation of a nested function.
167 This is called from language-specific code. */
168
169 void
170 push_function_context (void)
171 {
172 if (cfun == 0)
173 allocate_struct_function (NULL, false);
174
175 VEC_safe_push (function_p, heap, function_context_stack, cfun);
176 set_cfun (NULL);
177 }
178
179 /* Restore the last saved context, at the end of a nested function.
180 This function is called from language-specific code. */
181
182 void
183 pop_function_context (void)
184 {
185 struct function *p = VEC_pop (function_p, function_context_stack);
186 set_cfun (p);
187 current_function_decl = p->decl;
188
189 /* Reset variables that have known state during rtx generation. */
190 virtuals_instantiated = 0;
191 generating_concat_p = 1;
192 }
193
194 /* Clear out all parts of the state in F that can safely be discarded
195 after the function has been parsed, but not compiled, to let
196 garbage collection reclaim the memory. */
197
198 void
199 free_after_parsing (struct function *f)
200 {
201 f->language = 0;
202 }
203
204 /* Clear out all parts of the state in F that can safely be discarded
205 after the function has been compiled, to let garbage collection
206 reclaim the memory. */
207
208 void
209 free_after_compilation (struct function *f)
210 {
211 prologue_insn_hash = NULL;
212 epilogue_insn_hash = NULL;
213
214 if (crtl->emit.regno_pointer_align)
215 free (crtl->emit.regno_pointer_align);
216
217 memset (crtl, 0, sizeof (struct rtl_data));
218 f->eh = NULL;
219 f->machine = NULL;
220 f->cfg = NULL;
221
222 regno_reg_rtx = NULL;
223 insn_locators_free ();
224 }
225 \f
226 /* Return size needed for stack frame based on slots so far allocated.
227 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
228 the caller may have to do that. */
229
230 HOST_WIDE_INT
231 get_frame_size (void)
232 {
233 if (FRAME_GROWS_DOWNWARD)
234 return -frame_offset;
235 else
236 return frame_offset;
237 }
238
239 /* Issue an error message and return TRUE if frame OFFSET overflows in
240 the signed target pointer arithmetics for function FUNC. Otherwise
241 return FALSE. */
242
243 bool
244 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
245 {
246 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
247
248 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
249 /* Leave room for the fixed part of the frame. */
250 - 64 * UNITS_PER_WORD)
251 {
252 error_at (DECL_SOURCE_LOCATION (func),
253 "total size of local objects too large");
254 return TRUE;
255 }
256
257 return FALSE;
258 }
259
260 /* Return stack slot alignment in bits for TYPE and MODE. */
261
262 static unsigned int
263 get_stack_local_alignment (tree type, enum machine_mode mode)
264 {
265 unsigned int alignment;
266
267 if (mode == BLKmode)
268 alignment = BIGGEST_ALIGNMENT;
269 else
270 alignment = GET_MODE_ALIGNMENT (mode);
271
272 /* Allow the frond-end to (possibly) increase the alignment of this
273 stack slot. */
274 if (! type)
275 type = lang_hooks.types.type_for_mode (mode, 0);
276
277 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
278 }
279
280 /* Determine whether it is possible to fit a stack slot of size SIZE and
281 alignment ALIGNMENT into an area in the stack frame that starts at
282 frame offset START and has a length of LENGTH. If so, store the frame
283 offset to be used for the stack slot in *POFFSET and return true;
284 return false otherwise. This function will extend the frame size when
285 given a start/length pair that lies at the end of the frame. */
286
287 static bool
288 try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length,
289 HOST_WIDE_INT size, unsigned int alignment,
290 HOST_WIDE_INT *poffset)
291 {
292 HOST_WIDE_INT this_frame_offset;
293 int frame_off, frame_alignment, frame_phase;
294
295 /* Calculate how many bytes the start of local variables is off from
296 stack alignment. */
297 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
298 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
299 frame_phase = frame_off ? frame_alignment - frame_off : 0;
300
301 /* Round the frame offset to the specified alignment. */
302
303 /* We must be careful here, since FRAME_OFFSET might be negative and
304 division with a negative dividend isn't as well defined as we might
305 like. So we instead assume that ALIGNMENT is a power of two and
306 use logical operations which are unambiguous. */
307 if (FRAME_GROWS_DOWNWARD)
308 this_frame_offset
309 = (FLOOR_ROUND (start + length - size - frame_phase,
310 (unsigned HOST_WIDE_INT) alignment)
311 + frame_phase);
312 else
313 this_frame_offset
314 = (CEIL_ROUND (start - frame_phase,
315 (unsigned HOST_WIDE_INT) alignment)
316 + frame_phase);
317
318 /* See if it fits. If this space is at the edge of the frame,
319 consider extending the frame to make it fit. Our caller relies on
320 this when allocating a new slot. */
321 if (frame_offset == start && this_frame_offset < frame_offset)
322 frame_offset = this_frame_offset;
323 else if (this_frame_offset < start)
324 return false;
325 else if (start + length == frame_offset
326 && this_frame_offset + size > start + length)
327 frame_offset = this_frame_offset + size;
328 else if (this_frame_offset + size > start + length)
329 return false;
330
331 *poffset = this_frame_offset;
332 return true;
333 }
334
335 /* Create a new frame_space structure describing free space in the stack
336 frame beginning at START and ending at END, and chain it into the
337 function's frame_space_list. */
338
339 static void
340 add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end)
341 {
342 struct frame_space *space = ggc_alloc_frame_space ();
343 space->next = crtl->frame_space_list;
344 crtl->frame_space_list = space;
345 space->start = start;
346 space->length = end - start;
347 }
348
349 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
350 with machine mode MODE.
351
352 ALIGN controls the amount of alignment for the address of the slot:
353 0 means according to MODE,
354 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
355 -2 means use BITS_PER_UNIT,
356 positive specifies alignment boundary in bits.
357
358 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
359 alignment and ASLK_RECORD_PAD bit set if we should remember
360 extra space we allocated for alignment purposes. When we are
361 called from assign_stack_temp_for_type, it is not set so we don't
362 track the same stack slot in two independent lists.
363
364 We do not round to stack_boundary here. */
365
366 rtx
367 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size,
368 int align, int kind)
369 {
370 rtx x, addr;
371 int bigend_correction = 0;
372 HOST_WIDE_INT slot_offset = 0, old_frame_offset;
373 unsigned int alignment, alignment_in_bits;
374
375 if (align == 0)
376 {
377 alignment = get_stack_local_alignment (NULL, mode);
378 alignment /= BITS_PER_UNIT;
379 }
380 else if (align == -1)
381 {
382 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
383 size = CEIL_ROUND (size, alignment);
384 }
385 else if (align == -2)
386 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
387 else
388 alignment = align / BITS_PER_UNIT;
389
390 alignment_in_bits = alignment * BITS_PER_UNIT;
391
392 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
393 if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
394 {
395 alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
396 alignment = alignment_in_bits / BITS_PER_UNIT;
397 }
398
399 if (SUPPORTS_STACK_ALIGNMENT)
400 {
401 if (crtl->stack_alignment_estimated < alignment_in_bits)
402 {
403 if (!crtl->stack_realign_processed)
404 crtl->stack_alignment_estimated = alignment_in_bits;
405 else
406 {
407 /* If stack is realigned and stack alignment value
408 hasn't been finalized, it is OK not to increase
409 stack_alignment_estimated. The bigger alignment
410 requirement is recorded in stack_alignment_needed
411 below. */
412 gcc_assert (!crtl->stack_realign_finalized);
413 if (!crtl->stack_realign_needed)
414 {
415 /* It is OK to reduce the alignment as long as the
416 requested size is 0 or the estimated stack
417 alignment >= mode alignment. */
418 gcc_assert ((kind & ASLK_REDUCE_ALIGN)
419 || size == 0
420 || (crtl->stack_alignment_estimated
421 >= GET_MODE_ALIGNMENT (mode)));
422 alignment_in_bits = crtl->stack_alignment_estimated;
423 alignment = alignment_in_bits / BITS_PER_UNIT;
424 }
425 }
426 }
427 }
428
429 if (crtl->stack_alignment_needed < alignment_in_bits)
430 crtl->stack_alignment_needed = alignment_in_bits;
431 if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
432 crtl->max_used_stack_slot_alignment = alignment_in_bits;
433
434 if (mode != BLKmode || size != 0)
435 {
436 if (kind & ASLK_RECORD_PAD)
437 {
438 struct frame_space **psp;
439
440 for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
441 {
442 struct frame_space *space = *psp;
443 if (!try_fit_stack_local (space->start, space->length, size,
444 alignment, &slot_offset))
445 continue;
446 *psp = space->next;
447 if (slot_offset > space->start)
448 add_frame_space (space->start, slot_offset);
449 if (slot_offset + size < space->start + space->length)
450 add_frame_space (slot_offset + size,
451 space->start + space->length);
452 goto found_space;
453 }
454 }
455 }
456 else if (!STACK_ALIGNMENT_NEEDED)
457 {
458 slot_offset = frame_offset;
459 goto found_space;
460 }
461
462 old_frame_offset = frame_offset;
463
464 if (FRAME_GROWS_DOWNWARD)
465 {
466 frame_offset -= size;
467 try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
468
469 if (kind & ASLK_RECORD_PAD)
470 {
471 if (slot_offset > frame_offset)
472 add_frame_space (frame_offset, slot_offset);
473 if (slot_offset + size < old_frame_offset)
474 add_frame_space (slot_offset + size, old_frame_offset);
475 }
476 }
477 else
478 {
479 frame_offset += size;
480 try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
481
482 if (kind & ASLK_RECORD_PAD)
483 {
484 if (slot_offset > old_frame_offset)
485 add_frame_space (old_frame_offset, slot_offset);
486 if (slot_offset + size < frame_offset)
487 add_frame_space (slot_offset + size, frame_offset);
488 }
489 }
490
491 found_space:
492 /* On a big-endian machine, if we are allocating more space than we will use,
493 use the least significant bytes of those that are allocated. */
494 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
495 bigend_correction = size - GET_MODE_SIZE (mode);
496
497 /* If we have already instantiated virtual registers, return the actual
498 address relative to the frame pointer. */
499 if (virtuals_instantiated)
500 addr = plus_constant (frame_pointer_rtx,
501 trunc_int_for_mode
502 (slot_offset + bigend_correction
503 + STARTING_FRAME_OFFSET, Pmode));
504 else
505 addr = plus_constant (virtual_stack_vars_rtx,
506 trunc_int_for_mode
507 (slot_offset + bigend_correction,
508 Pmode));
509
510 x = gen_rtx_MEM (mode, addr);
511 set_mem_align (x, alignment_in_bits);
512 MEM_NOTRAP_P (x) = 1;
513
514 stack_slot_list
515 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
516
517 if (frame_offset_overflow (frame_offset, current_function_decl))
518 frame_offset = 0;
519
520 return x;
521 }
522
523 /* Wrap up assign_stack_local_1 with last parameter as false. */
524
525 rtx
526 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
527 {
528 return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
529 }
530 \f
531 \f
532 /* In order to evaluate some expressions, such as function calls returning
533 structures in memory, we need to temporarily allocate stack locations.
534 We record each allocated temporary in the following structure.
535
536 Associated with each temporary slot is a nesting level. When we pop up
537 one level, all temporaries associated with the previous level are freed.
538 Normally, all temporaries are freed after the execution of the statement
539 in which they were created. However, if we are inside a ({...}) grouping,
540 the result may be in a temporary and hence must be preserved. If the
541 result could be in a temporary, we preserve it if we can determine which
542 one it is in. If we cannot determine which temporary may contain the
543 result, all temporaries are preserved. A temporary is preserved by
544 pretending it was allocated at the previous nesting level.
545
546 Automatic variables are also assigned temporary slots, at the nesting
547 level where they are defined. They are marked a "kept" so that
548 free_temp_slots will not free them. */
549
550 struct GTY(()) temp_slot {
551 /* Points to next temporary slot. */
552 struct temp_slot *next;
553 /* Points to previous temporary slot. */
554 struct temp_slot *prev;
555 /* The rtx to used to reference the slot. */
556 rtx slot;
557 /* The size, in units, of the slot. */
558 HOST_WIDE_INT size;
559 /* The type of the object in the slot, or zero if it doesn't correspond
560 to a type. We use this to determine whether a slot can be reused.
561 It can be reused if objects of the type of the new slot will always
562 conflict with objects of the type of the old slot. */
563 tree type;
564 /* The alignment (in bits) of the slot. */
565 unsigned int align;
566 /* Nonzero if this temporary is currently in use. */
567 char in_use;
568 /* Nonzero if this temporary has its address taken. */
569 char addr_taken;
570 /* Nesting level at which this slot is being used. */
571 int level;
572 /* Nonzero if this should survive a call to free_temp_slots. */
573 int keep;
574 /* The offset of the slot from the frame_pointer, including extra space
575 for alignment. This info is for combine_temp_slots. */
576 HOST_WIDE_INT base_offset;
577 /* The size of the slot, including extra space for alignment. This
578 info is for combine_temp_slots. */
579 HOST_WIDE_INT full_size;
580 };
581
582 /* A table of addresses that represent a stack slot. The table is a mapping
583 from address RTXen to a temp slot. */
584 static GTY((param_is(struct temp_slot_address_entry))) htab_t temp_slot_address_table;
585
586 /* Entry for the above hash table. */
587 struct GTY(()) temp_slot_address_entry {
588 hashval_t hash;
589 rtx address;
590 struct temp_slot *temp_slot;
591 };
592
593 /* Removes temporary slot TEMP from LIST. */
594
595 static void
596 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
597 {
598 if (temp->next)
599 temp->next->prev = temp->prev;
600 if (temp->prev)
601 temp->prev->next = temp->next;
602 else
603 *list = temp->next;
604
605 temp->prev = temp->next = NULL;
606 }
607
608 /* Inserts temporary slot TEMP to LIST. */
609
610 static void
611 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
612 {
613 temp->next = *list;
614 if (*list)
615 (*list)->prev = temp;
616 temp->prev = NULL;
617 *list = temp;
618 }
619
620 /* Returns the list of used temp slots at LEVEL. */
621
622 static struct temp_slot **
623 temp_slots_at_level (int level)
624 {
625 if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
626 VEC_safe_grow_cleared (temp_slot_p, gc, used_temp_slots, level + 1);
627
628 return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
629 }
630
631 /* Returns the maximal temporary slot level. */
632
633 static int
634 max_slot_level (void)
635 {
636 if (!used_temp_slots)
637 return -1;
638
639 return VEC_length (temp_slot_p, used_temp_slots) - 1;
640 }
641
642 /* Moves temporary slot TEMP to LEVEL. */
643
644 static void
645 move_slot_to_level (struct temp_slot *temp, int level)
646 {
647 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
648 insert_slot_to_list (temp, temp_slots_at_level (level));
649 temp->level = level;
650 }
651
652 /* Make temporary slot TEMP available. */
653
654 static void
655 make_slot_available (struct temp_slot *temp)
656 {
657 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
658 insert_slot_to_list (temp, &avail_temp_slots);
659 temp->in_use = 0;
660 temp->level = -1;
661 }
662
663 /* Compute the hash value for an address -> temp slot mapping.
664 The value is cached on the mapping entry. */
665 static hashval_t
666 temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
667 {
668 int do_not_record = 0;
669 return hash_rtx (t->address, GET_MODE (t->address),
670 &do_not_record, NULL, false);
671 }
672
673 /* Return the hash value for an address -> temp slot mapping. */
674 static hashval_t
675 temp_slot_address_hash (const void *p)
676 {
677 const struct temp_slot_address_entry *t;
678 t = (const struct temp_slot_address_entry *) p;
679 return t->hash;
680 }
681
682 /* Compare two address -> temp slot mapping entries. */
683 static int
684 temp_slot_address_eq (const void *p1, const void *p2)
685 {
686 const struct temp_slot_address_entry *t1, *t2;
687 t1 = (const struct temp_slot_address_entry *) p1;
688 t2 = (const struct temp_slot_address_entry *) p2;
689 return exp_equiv_p (t1->address, t2->address, 0, true);
690 }
691
692 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
693 static void
694 insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
695 {
696 void **slot;
697 struct temp_slot_address_entry *t = ggc_alloc_temp_slot_address_entry ();
698 t->address = address;
699 t->temp_slot = temp_slot;
700 t->hash = temp_slot_address_compute_hash (t);
701 slot = htab_find_slot_with_hash (temp_slot_address_table, t, t->hash, INSERT);
702 *slot = t;
703 }
704
705 /* Remove an address -> temp slot mapping entry if the temp slot is
706 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
707 static int
708 remove_unused_temp_slot_addresses_1 (void **slot, void *data ATTRIBUTE_UNUSED)
709 {
710 const struct temp_slot_address_entry *t;
711 t = (const struct temp_slot_address_entry *) *slot;
712 if (! t->temp_slot->in_use)
713 *slot = NULL;
714 return 1;
715 }
716
717 /* Remove all mappings of addresses to unused temp slots. */
718 static void
719 remove_unused_temp_slot_addresses (void)
720 {
721 htab_traverse (temp_slot_address_table,
722 remove_unused_temp_slot_addresses_1,
723 NULL);
724 }
725
726 /* Find the temp slot corresponding to the object at address X. */
727
728 static struct temp_slot *
729 find_temp_slot_from_address (rtx x)
730 {
731 struct temp_slot *p;
732 struct temp_slot_address_entry tmp, *t;
733
734 /* First try the easy way:
735 See if X exists in the address -> temp slot mapping. */
736 tmp.address = x;
737 tmp.temp_slot = NULL;
738 tmp.hash = temp_slot_address_compute_hash (&tmp);
739 t = (struct temp_slot_address_entry *)
740 htab_find_with_hash (temp_slot_address_table, &tmp, tmp.hash);
741 if (t)
742 return t->temp_slot;
743
744 /* If we have a sum involving a register, see if it points to a temp
745 slot. */
746 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
747 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
748 return p;
749 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
750 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
751 return p;
752
753 /* Last resort: Address is a virtual stack var address. */
754 if (GET_CODE (x) == PLUS
755 && XEXP (x, 0) == virtual_stack_vars_rtx
756 && CONST_INT_P (XEXP (x, 1)))
757 {
758 int i;
759 for (i = max_slot_level (); i >= 0; i--)
760 for (p = *temp_slots_at_level (i); p; p = p->next)
761 {
762 if (INTVAL (XEXP (x, 1)) >= p->base_offset
763 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)
764 return p;
765 }
766 }
767
768 return NULL;
769 }
770 \f
771 /* Allocate a temporary stack slot and record it for possible later
772 reuse.
773
774 MODE is the machine mode to be given to the returned rtx.
775
776 SIZE is the size in units of the space required. We do no rounding here
777 since assign_stack_local will do any required rounding.
778
779 KEEP is 1 if this slot is to be retained after a call to
780 free_temp_slots. Automatic variables for a block are allocated
781 with this flag. KEEP values of 2 or 3 were needed respectively
782 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
783 or for SAVE_EXPRs, but they are now unused.
784
785 TYPE is the type that will be used for the stack slot. */
786
787 rtx
788 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
789 int keep, tree type)
790 {
791 unsigned int align;
792 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
793 rtx slot;
794
795 /* If SIZE is -1 it means that somebody tried to allocate a temporary
796 of a variable size. */
797 gcc_assert (size != -1);
798
799 /* These are now unused. */
800 gcc_assert (keep <= 1);
801
802 align = get_stack_local_alignment (type, mode);
803
804 /* Try to find an available, already-allocated temporary of the proper
805 mode which meets the size and alignment requirements. Choose the
806 smallest one with the closest alignment.
807
808 If assign_stack_temp is called outside of the tree->rtl expansion,
809 we cannot reuse the stack slots (that may still refer to
810 VIRTUAL_STACK_VARS_REGNUM). */
811 if (!virtuals_instantiated)
812 {
813 for (p = avail_temp_slots; p; p = p->next)
814 {
815 if (p->align >= align && p->size >= size
816 && GET_MODE (p->slot) == mode
817 && objects_must_conflict_p (p->type, type)
818 && (best_p == 0 || best_p->size > p->size
819 || (best_p->size == p->size && best_p->align > p->align)))
820 {
821 if (p->align == align && p->size == size)
822 {
823 selected = p;
824 cut_slot_from_list (selected, &avail_temp_slots);
825 best_p = 0;
826 break;
827 }
828 best_p = p;
829 }
830 }
831 }
832
833 /* Make our best, if any, the one to use. */
834 if (best_p)
835 {
836 selected = best_p;
837 cut_slot_from_list (selected, &avail_temp_slots);
838
839 /* If there are enough aligned bytes left over, make them into a new
840 temp_slot so that the extra bytes don't get wasted. Do this only
841 for BLKmode slots, so that we can be sure of the alignment. */
842 if (GET_MODE (best_p->slot) == BLKmode)
843 {
844 int alignment = best_p->align / BITS_PER_UNIT;
845 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
846
847 if (best_p->size - rounded_size >= alignment)
848 {
849 p = ggc_alloc_temp_slot ();
850 p->in_use = p->addr_taken = 0;
851 p->size = best_p->size - rounded_size;
852 p->base_offset = best_p->base_offset + rounded_size;
853 p->full_size = best_p->full_size - rounded_size;
854 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
855 p->align = best_p->align;
856 p->type = best_p->type;
857 insert_slot_to_list (p, &avail_temp_slots);
858
859 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
860 stack_slot_list);
861
862 best_p->size = rounded_size;
863 best_p->full_size = rounded_size;
864 }
865 }
866 }
867
868 /* If we still didn't find one, make a new temporary. */
869 if (selected == 0)
870 {
871 HOST_WIDE_INT frame_offset_old = frame_offset;
872
873 p = ggc_alloc_temp_slot ();
874
875 /* We are passing an explicit alignment request to assign_stack_local.
876 One side effect of that is assign_stack_local will not round SIZE
877 to ensure the frame offset remains suitably aligned.
878
879 So for requests which depended on the rounding of SIZE, we go ahead
880 and round it now. We also make sure ALIGNMENT is at least
881 BIGGEST_ALIGNMENT. */
882 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
883 p->slot = assign_stack_local_1 (mode,
884 (mode == BLKmode
885 ? CEIL_ROUND (size,
886 (int) align
887 / BITS_PER_UNIT)
888 : size),
889 align, 0);
890
891 p->align = align;
892
893 /* The following slot size computation is necessary because we don't
894 know the actual size of the temporary slot until assign_stack_local
895 has performed all the frame alignment and size rounding for the
896 requested temporary. Note that extra space added for alignment
897 can be either above or below this stack slot depending on which
898 way the frame grows. We include the extra space if and only if it
899 is above this slot. */
900 if (FRAME_GROWS_DOWNWARD)
901 p->size = frame_offset_old - frame_offset;
902 else
903 p->size = size;
904
905 /* Now define the fields used by combine_temp_slots. */
906 if (FRAME_GROWS_DOWNWARD)
907 {
908 p->base_offset = frame_offset;
909 p->full_size = frame_offset_old - frame_offset;
910 }
911 else
912 {
913 p->base_offset = frame_offset_old;
914 p->full_size = frame_offset - frame_offset_old;
915 }
916
917 selected = p;
918 }
919
920 p = selected;
921 p->in_use = 1;
922 p->addr_taken = 0;
923 p->type = type;
924 p->level = temp_slot_level;
925 p->keep = keep;
926
927 pp = temp_slots_at_level (p->level);
928 insert_slot_to_list (p, pp);
929 insert_temp_slot_address (XEXP (p->slot, 0), p);
930
931 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
932 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
933 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
934
935 /* If we know the alias set for the memory that will be used, use
936 it. If there's no TYPE, then we don't know anything about the
937 alias set for the memory. */
938 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
939 set_mem_align (slot, align);
940
941 /* If a type is specified, set the relevant flags. */
942 if (type != 0)
943 {
944 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
945 MEM_SET_IN_STRUCT_P (slot, (AGGREGATE_TYPE_P (type)
946 || TREE_CODE (type) == COMPLEX_TYPE));
947 }
948 MEM_NOTRAP_P (slot) = 1;
949
950 return slot;
951 }
952
953 /* Allocate a temporary stack slot and record it for possible later
954 reuse. First three arguments are same as in preceding function. */
955
956 rtx
957 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
958 {
959 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
960 }
961 \f
962 /* Assign a temporary.
963 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
964 and so that should be used in error messages. In either case, we
965 allocate of the given type.
966 KEEP is as for assign_stack_temp.
967 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
968 it is 0 if a register is OK.
969 DONT_PROMOTE is 1 if we should not promote values in register
970 to wider modes. */
971
972 rtx
973 assign_temp (tree type_or_decl, int keep, int memory_required,
974 int dont_promote ATTRIBUTE_UNUSED)
975 {
976 tree type, decl;
977 enum machine_mode mode;
978 #ifdef PROMOTE_MODE
979 int unsignedp;
980 #endif
981
982 if (DECL_P (type_or_decl))
983 decl = type_or_decl, type = TREE_TYPE (decl);
984 else
985 decl = NULL, type = type_or_decl;
986
987 mode = TYPE_MODE (type);
988 #ifdef PROMOTE_MODE
989 unsignedp = TYPE_UNSIGNED (type);
990 #endif
991
992 if (mode == BLKmode || memory_required)
993 {
994 HOST_WIDE_INT size = int_size_in_bytes (type);
995 rtx tmp;
996
997 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
998 problems with allocating the stack space. */
999 if (size == 0)
1000 size = 1;
1001
1002 /* Unfortunately, we don't yet know how to allocate variable-sized
1003 temporaries. However, sometimes we can find a fixed upper limit on
1004 the size, so try that instead. */
1005 else if (size == -1)
1006 size = max_int_size_in_bytes (type);
1007
1008 /* The size of the temporary may be too large to fit into an integer. */
1009 /* ??? Not sure this should happen except for user silliness, so limit
1010 this to things that aren't compiler-generated temporaries. The
1011 rest of the time we'll die in assign_stack_temp_for_type. */
1012 if (decl && size == -1
1013 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
1014 {
1015 error ("size of variable %q+D is too large", decl);
1016 size = 1;
1017 }
1018
1019 tmp = assign_stack_temp_for_type (mode, size, keep, type);
1020 return tmp;
1021 }
1022
1023 #ifdef PROMOTE_MODE
1024 if (! dont_promote)
1025 mode = promote_mode (type, mode, &unsignedp);
1026 #endif
1027
1028 return gen_reg_rtx (mode);
1029 }
1030 \f
1031 /* Combine temporary stack slots which are adjacent on the stack.
1032
1033 This allows for better use of already allocated stack space. This is only
1034 done for BLKmode slots because we can be sure that we won't have alignment
1035 problems in this case. */
1036
1037 static void
1038 combine_temp_slots (void)
1039 {
1040 struct temp_slot *p, *q, *next, *next_q;
1041 int num_slots;
1042
1043 /* We can't combine slots, because the information about which slot
1044 is in which alias set will be lost. */
1045 if (flag_strict_aliasing)
1046 return;
1047
1048 /* If there are a lot of temp slots, don't do anything unless
1049 high levels of optimization. */
1050 if (! flag_expensive_optimizations)
1051 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1052 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1053 return;
1054
1055 for (p = avail_temp_slots; p; p = next)
1056 {
1057 int delete_p = 0;
1058
1059 next = p->next;
1060
1061 if (GET_MODE (p->slot) != BLKmode)
1062 continue;
1063
1064 for (q = p->next; q; q = next_q)
1065 {
1066 int delete_q = 0;
1067
1068 next_q = q->next;
1069
1070 if (GET_MODE (q->slot) != BLKmode)
1071 continue;
1072
1073 if (p->base_offset + p->full_size == q->base_offset)
1074 {
1075 /* Q comes after P; combine Q into P. */
1076 p->size += q->size;
1077 p->full_size += q->full_size;
1078 delete_q = 1;
1079 }
1080 else if (q->base_offset + q->full_size == p->base_offset)
1081 {
1082 /* P comes after Q; combine P into Q. */
1083 q->size += p->size;
1084 q->full_size += p->full_size;
1085 delete_p = 1;
1086 break;
1087 }
1088 if (delete_q)
1089 cut_slot_from_list (q, &avail_temp_slots);
1090 }
1091
1092 /* Either delete P or advance past it. */
1093 if (delete_p)
1094 cut_slot_from_list (p, &avail_temp_slots);
1095 }
1096 }
1097 \f
1098 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1099 slot that previously was known by OLD_RTX. */
1100
1101 void
1102 update_temp_slot_address (rtx old_rtx, rtx new_rtx)
1103 {
1104 struct temp_slot *p;
1105
1106 if (rtx_equal_p (old_rtx, new_rtx))
1107 return;
1108
1109 p = find_temp_slot_from_address (old_rtx);
1110
1111 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1112 NEW_RTX is a register, see if one operand of the PLUS is a
1113 temporary location. If so, NEW_RTX points into it. Otherwise,
1114 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1115 in common between them. If so, try a recursive call on those
1116 values. */
1117 if (p == 0)
1118 {
1119 if (GET_CODE (old_rtx) != PLUS)
1120 return;
1121
1122 if (REG_P (new_rtx))
1123 {
1124 update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
1125 update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
1126 return;
1127 }
1128 else if (GET_CODE (new_rtx) != PLUS)
1129 return;
1130
1131 if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
1132 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
1133 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
1134 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
1135 else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
1136 update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
1137 else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
1138 update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
1139
1140 return;
1141 }
1142
1143 /* Otherwise add an alias for the temp's address. */
1144 insert_temp_slot_address (new_rtx, p);
1145 }
1146
1147 /* If X could be a reference to a temporary slot, mark the fact that its
1148 address was taken. */
1149
1150 void
1151 mark_temp_addr_taken (rtx x)
1152 {
1153 struct temp_slot *p;
1154
1155 if (x == 0)
1156 return;
1157
1158 /* If X is not in memory or is at a constant address, it cannot be in
1159 a temporary slot. */
1160 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
1161 return;
1162
1163 p = find_temp_slot_from_address (XEXP (x, 0));
1164 if (p != 0)
1165 p->addr_taken = 1;
1166 }
1167
1168 /* If X could be a reference to a temporary slot, mark that slot as
1169 belonging to the to one level higher than the current level. If X
1170 matched one of our slots, just mark that one. Otherwise, we can't
1171 easily predict which it is, so upgrade all of them. Kept slots
1172 need not be touched.
1173
1174 This is called when an ({...}) construct occurs and a statement
1175 returns a value in memory. */
1176
1177 void
1178 preserve_temp_slots (rtx x)
1179 {
1180 struct temp_slot *p = 0, *next;
1181
1182 /* If there is no result, we still might have some objects whose address
1183 were taken, so we need to make sure they stay around. */
1184 if (x == 0)
1185 {
1186 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1187 {
1188 next = p->next;
1189
1190 if (p->addr_taken)
1191 move_slot_to_level (p, temp_slot_level - 1);
1192 }
1193
1194 return;
1195 }
1196
1197 /* If X is a register that is being used as a pointer, see if we have
1198 a temporary slot we know it points to. To be consistent with
1199 the code below, we really should preserve all non-kept slots
1200 if we can't find a match, but that seems to be much too costly. */
1201 if (REG_P (x) && REG_POINTER (x))
1202 p = find_temp_slot_from_address (x);
1203
1204 /* If X is not in memory or is at a constant address, it cannot be in
1205 a temporary slot, but it can contain something whose address was
1206 taken. */
1207 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1208 {
1209 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1210 {
1211 next = p->next;
1212
1213 if (p->addr_taken)
1214 move_slot_to_level (p, temp_slot_level - 1);
1215 }
1216
1217 return;
1218 }
1219
1220 /* First see if we can find a match. */
1221 if (p == 0)
1222 p = find_temp_slot_from_address (XEXP (x, 0));
1223
1224 if (p != 0)
1225 {
1226 /* Move everything at our level whose address was taken to our new
1227 level in case we used its address. */
1228 struct temp_slot *q;
1229
1230 if (p->level == temp_slot_level)
1231 {
1232 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1233 {
1234 next = q->next;
1235
1236 if (p != q && q->addr_taken)
1237 move_slot_to_level (q, temp_slot_level - 1);
1238 }
1239
1240 move_slot_to_level (p, temp_slot_level - 1);
1241 p->addr_taken = 0;
1242 }
1243 return;
1244 }
1245
1246 /* Otherwise, preserve all non-kept slots at this level. */
1247 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1248 {
1249 next = p->next;
1250
1251 if (!p->keep)
1252 move_slot_to_level (p, temp_slot_level - 1);
1253 }
1254 }
1255
1256 /* Free all temporaries used so far. This is normally called at the
1257 end of generating code for a statement. */
1258
1259 void
1260 free_temp_slots (void)
1261 {
1262 struct temp_slot *p, *next;
1263 bool some_available = false;
1264
1265 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1266 {
1267 next = p->next;
1268
1269 if (!p->keep)
1270 {
1271 make_slot_available (p);
1272 some_available = true;
1273 }
1274 }
1275
1276 if (some_available)
1277 {
1278 remove_unused_temp_slot_addresses ();
1279 combine_temp_slots ();
1280 }
1281 }
1282
1283 /* Push deeper into the nesting level for stack temporaries. */
1284
1285 void
1286 push_temp_slots (void)
1287 {
1288 temp_slot_level++;
1289 }
1290
1291 /* Pop a temporary nesting level. All slots in use in the current level
1292 are freed. */
1293
1294 void
1295 pop_temp_slots (void)
1296 {
1297 struct temp_slot *p, *next;
1298 bool some_available = false;
1299
1300 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1301 {
1302 next = p->next;
1303 make_slot_available (p);
1304 some_available = true;
1305 }
1306
1307 if (some_available)
1308 {
1309 remove_unused_temp_slot_addresses ();
1310 combine_temp_slots ();
1311 }
1312
1313 temp_slot_level--;
1314 }
1315
1316 /* Initialize temporary slots. */
1317
1318 void
1319 init_temp_slots (void)
1320 {
1321 /* We have not allocated any temporaries yet. */
1322 avail_temp_slots = 0;
1323 used_temp_slots = 0;
1324 temp_slot_level = 0;
1325
1326 /* Set up the table to map addresses to temp slots. */
1327 if (! temp_slot_address_table)
1328 temp_slot_address_table = htab_create_ggc (32,
1329 temp_slot_address_hash,
1330 temp_slot_address_eq,
1331 NULL);
1332 else
1333 htab_empty (temp_slot_address_table);
1334 }
1335 \f
1336 /* These routines are responsible for converting virtual register references
1337 to the actual hard register references once RTL generation is complete.
1338
1339 The following four variables are used for communication between the
1340 routines. They contain the offsets of the virtual registers from their
1341 respective hard registers. */
1342
1343 static int in_arg_offset;
1344 static int var_offset;
1345 static int dynamic_offset;
1346 static int out_arg_offset;
1347 static int cfa_offset;
1348
1349 /* In most machines, the stack pointer register is equivalent to the bottom
1350 of the stack. */
1351
1352 #ifndef STACK_POINTER_OFFSET
1353 #define STACK_POINTER_OFFSET 0
1354 #endif
1355
1356 /* If not defined, pick an appropriate default for the offset of dynamically
1357 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1358 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1359
1360 #ifndef STACK_DYNAMIC_OFFSET
1361
1362 /* The bottom of the stack points to the actual arguments. If
1363 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1364 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1365 stack space for register parameters is not pushed by the caller, but
1366 rather part of the fixed stack areas and hence not included in
1367 `crtl->outgoing_args_size'. Nevertheless, we must allow
1368 for it when allocating stack dynamic objects. */
1369
1370 #if defined(REG_PARM_STACK_SPACE)
1371 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1372 ((ACCUMULATE_OUTGOING_ARGS \
1373 ? (crtl->outgoing_args_size \
1374 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1375 : REG_PARM_STACK_SPACE (FNDECL))) \
1376 : 0) + (STACK_POINTER_OFFSET))
1377 #else
1378 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1379 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1380 + (STACK_POINTER_OFFSET))
1381 #endif
1382 #endif
1383
1384 \f
1385 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1386 is a virtual register, return the equivalent hard register and set the
1387 offset indirectly through the pointer. Otherwise, return 0. */
1388
1389 static rtx
1390 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1391 {
1392 rtx new_rtx;
1393 HOST_WIDE_INT offset;
1394
1395 if (x == virtual_incoming_args_rtx)
1396 {
1397 if (stack_realign_drap)
1398 {
1399 /* Replace virtual_incoming_args_rtx with internal arg
1400 pointer if DRAP is used to realign stack. */
1401 new_rtx = crtl->args.internal_arg_pointer;
1402 offset = 0;
1403 }
1404 else
1405 new_rtx = arg_pointer_rtx, offset = in_arg_offset;
1406 }
1407 else if (x == virtual_stack_vars_rtx)
1408 new_rtx = frame_pointer_rtx, offset = var_offset;
1409 else if (x == virtual_stack_dynamic_rtx)
1410 new_rtx = stack_pointer_rtx, offset = dynamic_offset;
1411 else if (x == virtual_outgoing_args_rtx)
1412 new_rtx = stack_pointer_rtx, offset = out_arg_offset;
1413 else if (x == virtual_cfa_rtx)
1414 {
1415 #ifdef FRAME_POINTER_CFA_OFFSET
1416 new_rtx = frame_pointer_rtx;
1417 #else
1418 new_rtx = arg_pointer_rtx;
1419 #endif
1420 offset = cfa_offset;
1421 }
1422 else if (x == virtual_preferred_stack_boundary_rtx)
1423 {
1424 new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
1425 offset = 0;
1426 }
1427 else
1428 return NULL_RTX;
1429
1430 *poffset = offset;
1431 return new_rtx;
1432 }
1433
1434 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1435 Instantiate any virtual registers present inside of *LOC. The expression
1436 is simplified, as much as possible, but is not to be considered "valid"
1437 in any sense implied by the target. If any change is made, set CHANGED
1438 to true. */
1439
1440 static int
1441 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1442 {
1443 HOST_WIDE_INT offset;
1444 bool *changed = (bool *) data;
1445 rtx x, new_rtx;
1446
1447 x = *loc;
1448 if (x == 0)
1449 return 0;
1450
1451 switch (GET_CODE (x))
1452 {
1453 case REG:
1454 new_rtx = instantiate_new_reg (x, &offset);
1455 if (new_rtx)
1456 {
1457 *loc = plus_constant (new_rtx, offset);
1458 if (changed)
1459 *changed = true;
1460 }
1461 return -1;
1462
1463 case PLUS:
1464 new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
1465 if (new_rtx)
1466 {
1467 new_rtx = plus_constant (new_rtx, offset);
1468 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new_rtx, XEXP (x, 1));
1469 if (changed)
1470 *changed = true;
1471 return -1;
1472 }
1473
1474 /* FIXME -- from old code */
1475 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1476 we can commute the PLUS and SUBREG because pointers into the
1477 frame are well-behaved. */
1478 break;
1479
1480 default:
1481 break;
1482 }
1483
1484 return 0;
1485 }
1486
1487 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1488 matches the predicate for insn CODE operand OPERAND. */
1489
1490 static int
1491 safe_insn_predicate (int code, int operand, rtx x)
1492 {
1493 const struct insn_operand_data *op_data;
1494
1495 if (code < 0)
1496 return true;
1497
1498 op_data = &insn_data[code].operand[operand];
1499 if (op_data->predicate == NULL)
1500 return true;
1501
1502 return op_data->predicate (x, op_data->mode);
1503 }
1504
1505 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1506 registers present inside of insn. The result will be a valid insn. */
1507
1508 static void
1509 instantiate_virtual_regs_in_insn (rtx insn)
1510 {
1511 HOST_WIDE_INT offset;
1512 int insn_code, i;
1513 bool any_change = false;
1514 rtx set, new_rtx, x, seq;
1515
1516 /* There are some special cases to be handled first. */
1517 set = single_set (insn);
1518 if (set)
1519 {
1520 /* We're allowed to assign to a virtual register. This is interpreted
1521 to mean that the underlying register gets assigned the inverse
1522 transformation. This is used, for example, in the handling of
1523 non-local gotos. */
1524 new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
1525 if (new_rtx)
1526 {
1527 start_sequence ();
1528
1529 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1530 x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
1531 GEN_INT (-offset));
1532 x = force_operand (x, new_rtx);
1533 if (x != new_rtx)
1534 emit_move_insn (new_rtx, x);
1535
1536 seq = get_insns ();
1537 end_sequence ();
1538
1539 emit_insn_before (seq, insn);
1540 delete_insn (insn);
1541 return;
1542 }
1543
1544 /* Handle a straight copy from a virtual register by generating a
1545 new add insn. The difference between this and falling through
1546 to the generic case is avoiding a new pseudo and eliminating a
1547 move insn in the initial rtl stream. */
1548 new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
1549 if (new_rtx && offset != 0
1550 && REG_P (SET_DEST (set))
1551 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1552 {
1553 start_sequence ();
1554
1555 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1556 new_rtx, GEN_INT (offset), SET_DEST (set),
1557 1, OPTAB_LIB_WIDEN);
1558 if (x != SET_DEST (set))
1559 emit_move_insn (SET_DEST (set), x);
1560
1561 seq = get_insns ();
1562 end_sequence ();
1563
1564 emit_insn_before (seq, insn);
1565 delete_insn (insn);
1566 return;
1567 }
1568
1569 extract_insn (insn);
1570 insn_code = INSN_CODE (insn);
1571
1572 /* Handle a plus involving a virtual register by determining if the
1573 operands remain valid if they're modified in place. */
1574 if (GET_CODE (SET_SRC (set)) == PLUS
1575 && recog_data.n_operands >= 3
1576 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1577 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1578 && CONST_INT_P (recog_data.operand[2])
1579 && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
1580 {
1581 offset += INTVAL (recog_data.operand[2]);
1582
1583 /* If the sum is zero, then replace with a plain move. */
1584 if (offset == 0
1585 && REG_P (SET_DEST (set))
1586 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1587 {
1588 start_sequence ();
1589 emit_move_insn (SET_DEST (set), new_rtx);
1590 seq = get_insns ();
1591 end_sequence ();
1592
1593 emit_insn_before (seq, insn);
1594 delete_insn (insn);
1595 return;
1596 }
1597
1598 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1599
1600 /* Using validate_change and apply_change_group here leaves
1601 recog_data in an invalid state. Since we know exactly what
1602 we want to check, do those two by hand. */
1603 if (safe_insn_predicate (insn_code, 1, new_rtx)
1604 && safe_insn_predicate (insn_code, 2, x))
1605 {
1606 *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
1607 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1608 any_change = true;
1609
1610 /* Fall through into the regular operand fixup loop in
1611 order to take care of operands other than 1 and 2. */
1612 }
1613 }
1614 }
1615 else
1616 {
1617 extract_insn (insn);
1618 insn_code = INSN_CODE (insn);
1619 }
1620
1621 /* In the general case, we expect virtual registers to appear only in
1622 operands, and then only as either bare registers or inside memories. */
1623 for (i = 0; i < recog_data.n_operands; ++i)
1624 {
1625 x = recog_data.operand[i];
1626 switch (GET_CODE (x))
1627 {
1628 case MEM:
1629 {
1630 rtx addr = XEXP (x, 0);
1631 bool changed = false;
1632
1633 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1634 if (!changed)
1635 continue;
1636
1637 start_sequence ();
1638 x = replace_equiv_address (x, addr);
1639 /* It may happen that the address with the virtual reg
1640 was valid (e.g. based on the virtual stack reg, which might
1641 be acceptable to the predicates with all offsets), whereas
1642 the address now isn't anymore, for instance when the address
1643 is still offsetted, but the base reg isn't virtual-stack-reg
1644 anymore. Below we would do a force_reg on the whole operand,
1645 but this insn might actually only accept memory. Hence,
1646 before doing that last resort, try to reload the address into
1647 a register, so this operand stays a MEM. */
1648 if (!safe_insn_predicate (insn_code, i, x))
1649 {
1650 addr = force_reg (GET_MODE (addr), addr);
1651 x = replace_equiv_address (x, addr);
1652 }
1653 seq = get_insns ();
1654 end_sequence ();
1655 if (seq)
1656 emit_insn_before (seq, insn);
1657 }
1658 break;
1659
1660 case REG:
1661 new_rtx = instantiate_new_reg (x, &offset);
1662 if (new_rtx == NULL)
1663 continue;
1664 if (offset == 0)
1665 x = new_rtx;
1666 else
1667 {
1668 start_sequence ();
1669
1670 /* Careful, special mode predicates may have stuff in
1671 insn_data[insn_code].operand[i].mode that isn't useful
1672 to us for computing a new value. */
1673 /* ??? Recognize address_operand and/or "p" constraints
1674 to see if (plus new offset) is a valid before we put
1675 this through expand_simple_binop. */
1676 x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
1677 GEN_INT (offset), NULL_RTX,
1678 1, OPTAB_LIB_WIDEN);
1679 seq = get_insns ();
1680 end_sequence ();
1681 emit_insn_before (seq, insn);
1682 }
1683 break;
1684
1685 case SUBREG:
1686 new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
1687 if (new_rtx == NULL)
1688 continue;
1689 if (offset != 0)
1690 {
1691 start_sequence ();
1692 new_rtx = expand_simple_binop (GET_MODE (new_rtx), PLUS, new_rtx,
1693 GEN_INT (offset), NULL_RTX,
1694 1, OPTAB_LIB_WIDEN);
1695 seq = get_insns ();
1696 end_sequence ();
1697 emit_insn_before (seq, insn);
1698 }
1699 x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
1700 GET_MODE (new_rtx), SUBREG_BYTE (x));
1701 gcc_assert (x);
1702 break;
1703
1704 default:
1705 continue;
1706 }
1707
1708 /* At this point, X contains the new value for the operand.
1709 Validate the new value vs the insn predicate. Note that
1710 asm insns will have insn_code -1 here. */
1711 if (!safe_insn_predicate (insn_code, i, x))
1712 {
1713 start_sequence ();
1714 if (REG_P (x))
1715 {
1716 gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
1717 x = copy_to_reg (x);
1718 }
1719 else
1720 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1721 seq = get_insns ();
1722 end_sequence ();
1723 if (seq)
1724 emit_insn_before (seq, insn);
1725 }
1726
1727 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1728 any_change = true;
1729 }
1730
1731 if (any_change)
1732 {
1733 /* Propagate operand changes into the duplicates. */
1734 for (i = 0; i < recog_data.n_dups; ++i)
1735 *recog_data.dup_loc[i]
1736 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1737
1738 /* Force re-recognition of the instruction for validation. */
1739 INSN_CODE (insn) = -1;
1740 }
1741
1742 if (asm_noperands (PATTERN (insn)) >= 0)
1743 {
1744 if (!check_asm_operands (PATTERN (insn)))
1745 {
1746 error_for_asm (insn, "impossible constraint in %<asm%>");
1747 delete_insn (insn);
1748 }
1749 }
1750 else
1751 {
1752 if (recog_memoized (insn) < 0)
1753 fatal_insn_not_found (insn);
1754 }
1755 }
1756
1757 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1758 do any instantiation required. */
1759
1760 void
1761 instantiate_decl_rtl (rtx x)
1762 {
1763 rtx addr;
1764
1765 if (x == 0)
1766 return;
1767
1768 /* If this is a CONCAT, recurse for the pieces. */
1769 if (GET_CODE (x) == CONCAT)
1770 {
1771 instantiate_decl_rtl (XEXP (x, 0));
1772 instantiate_decl_rtl (XEXP (x, 1));
1773 return;
1774 }
1775
1776 /* If this is not a MEM, no need to do anything. Similarly if the
1777 address is a constant or a register that is not a virtual register. */
1778 if (!MEM_P (x))
1779 return;
1780
1781 addr = XEXP (x, 0);
1782 if (CONSTANT_P (addr)
1783 || (REG_P (addr)
1784 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1785 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1786 return;
1787
1788 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1789 }
1790
1791 /* Helper for instantiate_decls called via walk_tree: Process all decls
1792 in the given DECL_VALUE_EXPR. */
1793
1794 static tree
1795 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1796 {
1797 tree t = *tp;
1798 if (! EXPR_P (t))
1799 {
1800 *walk_subtrees = 0;
1801 if (DECL_P (t))
1802 {
1803 if (DECL_RTL_SET_P (t))
1804 instantiate_decl_rtl (DECL_RTL (t));
1805 if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
1806 && DECL_INCOMING_RTL (t))
1807 instantiate_decl_rtl (DECL_INCOMING_RTL (t));
1808 if ((TREE_CODE (t) == VAR_DECL
1809 || TREE_CODE (t) == RESULT_DECL)
1810 && DECL_HAS_VALUE_EXPR_P (t))
1811 {
1812 tree v = DECL_VALUE_EXPR (t);
1813 walk_tree (&v, instantiate_expr, NULL, NULL);
1814 }
1815 }
1816 }
1817 return NULL;
1818 }
1819
1820 /* Subroutine of instantiate_decls: Process all decls in the given
1821 BLOCK node and all its subblocks. */
1822
1823 static void
1824 instantiate_decls_1 (tree let)
1825 {
1826 tree t;
1827
1828 for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
1829 {
1830 if (DECL_RTL_SET_P (t))
1831 instantiate_decl_rtl (DECL_RTL (t));
1832 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1833 {
1834 tree v = DECL_VALUE_EXPR (t);
1835 walk_tree (&v, instantiate_expr, NULL, NULL);
1836 }
1837 }
1838
1839 /* Process all subblocks. */
1840 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1841 instantiate_decls_1 (t);
1842 }
1843
1844 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1845 all virtual registers in their DECL_RTL's. */
1846
1847 static void
1848 instantiate_decls (tree fndecl)
1849 {
1850 tree decl;
1851 unsigned ix;
1852
1853 /* Process all parameters of the function. */
1854 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
1855 {
1856 instantiate_decl_rtl (DECL_RTL (decl));
1857 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1858 if (DECL_HAS_VALUE_EXPR_P (decl))
1859 {
1860 tree v = DECL_VALUE_EXPR (decl);
1861 walk_tree (&v, instantiate_expr, NULL, NULL);
1862 }
1863 }
1864
1865 if ((decl = DECL_RESULT (fndecl))
1866 && TREE_CODE (decl) == RESULT_DECL)
1867 {
1868 if (DECL_RTL_SET_P (decl))
1869 instantiate_decl_rtl (DECL_RTL (decl));
1870 if (DECL_HAS_VALUE_EXPR_P (decl))
1871 {
1872 tree v = DECL_VALUE_EXPR (decl);
1873 walk_tree (&v, instantiate_expr, NULL, NULL);
1874 }
1875 }
1876
1877 /* Now process all variables defined in the function or its subblocks. */
1878 instantiate_decls_1 (DECL_INITIAL (fndecl));
1879
1880 FOR_EACH_LOCAL_DECL (cfun, ix, decl)
1881 if (DECL_RTL_SET_P (decl))
1882 instantiate_decl_rtl (DECL_RTL (decl));
1883 VEC_free (tree, gc, cfun->local_decls);
1884 }
1885
1886 /* Pass through the INSNS of function FNDECL and convert virtual register
1887 references to hard register references. */
1888
1889 static unsigned int
1890 instantiate_virtual_regs (void)
1891 {
1892 rtx insn;
1893
1894 /* Compute the offsets to use for this function. */
1895 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1896 var_offset = STARTING_FRAME_OFFSET;
1897 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1898 out_arg_offset = STACK_POINTER_OFFSET;
1899 #ifdef FRAME_POINTER_CFA_OFFSET
1900 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1901 #else
1902 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1903 #endif
1904
1905 /* Initialize recognition, indicating that volatile is OK. */
1906 init_recog ();
1907
1908 /* Scan through all the insns, instantiating every virtual register still
1909 present. */
1910 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1911 if (INSN_P (insn))
1912 {
1913 /* These patterns in the instruction stream can never be recognized.
1914 Fortunately, they shouldn't contain virtual registers either. */
1915 if (GET_CODE (PATTERN (insn)) == USE
1916 || GET_CODE (PATTERN (insn)) == CLOBBER
1917 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1918 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1919 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1920 continue;
1921 else if (DEBUG_INSN_P (insn))
1922 for_each_rtx (&INSN_VAR_LOCATION (insn),
1923 instantiate_virtual_regs_in_rtx, NULL);
1924 else
1925 instantiate_virtual_regs_in_insn (insn);
1926
1927 if (INSN_DELETED_P (insn))
1928 continue;
1929
1930 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1931
1932 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1933 if (CALL_P (insn))
1934 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1935 instantiate_virtual_regs_in_rtx, NULL);
1936 }
1937
1938 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1939 instantiate_decls (current_function_decl);
1940
1941 targetm.instantiate_decls ();
1942
1943 /* Indicate that, from now on, assign_stack_local should use
1944 frame_pointer_rtx. */
1945 virtuals_instantiated = 1;
1946
1947 /* See allocate_dynamic_stack_space for the rationale. */
1948 #ifdef SETJMP_VIA_SAVE_AREA
1949 if (flag_stack_usage && cfun->calls_setjmp)
1950 {
1951 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1952 dynamic_offset = (dynamic_offset + align - 1) / align * align;
1953 current_function_dynamic_stack_size
1954 += current_function_dynamic_alloc_count * dynamic_offset;
1955 }
1956 #endif
1957
1958 return 0;
1959 }
1960
1961 struct rtl_opt_pass pass_instantiate_virtual_regs =
1962 {
1963 {
1964 RTL_PASS,
1965 "vregs", /* name */
1966 NULL, /* gate */
1967 instantiate_virtual_regs, /* execute */
1968 NULL, /* sub */
1969 NULL, /* next */
1970 0, /* static_pass_number */
1971 TV_NONE, /* tv_id */
1972 0, /* properties_required */
1973 0, /* properties_provided */
1974 0, /* properties_destroyed */
1975 0, /* todo_flags_start */
1976 TODO_dump_func /* todo_flags_finish */
1977 }
1978 };
1979
1980 \f
1981 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1982 This means a type for which function calls must pass an address to the
1983 function or get an address back from the function.
1984 EXP may be a type node or an expression (whose type is tested). */
1985
1986 int
1987 aggregate_value_p (const_tree exp, const_tree fntype)
1988 {
1989 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1990 int i, regno, nregs;
1991 rtx reg;
1992
1993 if (fntype)
1994 switch (TREE_CODE (fntype))
1995 {
1996 case CALL_EXPR:
1997 {
1998 tree fndecl = get_callee_fndecl (fntype);
1999 fntype = (fndecl
2000 ? TREE_TYPE (fndecl)
2001 : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype))));
2002 }
2003 break;
2004 case FUNCTION_DECL:
2005 fntype = TREE_TYPE (fntype);
2006 break;
2007 case FUNCTION_TYPE:
2008 case METHOD_TYPE:
2009 break;
2010 case IDENTIFIER_NODE:
2011 fntype = NULL_TREE;
2012 break;
2013 default:
2014 /* We don't expect other tree types here. */
2015 gcc_unreachable ();
2016 }
2017
2018 if (VOID_TYPE_P (type))
2019 return 0;
2020
2021 /* If a record should be passed the same as its first (and only) member
2022 don't pass it as an aggregate. */
2023 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2024 return aggregate_value_p (first_field (type), fntype);
2025
2026 /* If the front end has decided that this needs to be passed by
2027 reference, do so. */
2028 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
2029 && DECL_BY_REFERENCE (exp))
2030 return 1;
2031
2032 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2033 if (fntype && TREE_ADDRESSABLE (fntype))
2034 return 1;
2035
2036 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2037 and thus can't be returned in registers. */
2038 if (TREE_ADDRESSABLE (type))
2039 return 1;
2040
2041 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
2042 return 1;
2043
2044 if (targetm.calls.return_in_memory (type, fntype))
2045 return 1;
2046
2047 /* Make sure we have suitable call-clobbered regs to return
2048 the value in; if not, we must return it in memory. */
2049 reg = hard_function_value (type, 0, fntype, 0);
2050
2051 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2052 it is OK. */
2053 if (!REG_P (reg))
2054 return 0;
2055
2056 regno = REGNO (reg);
2057 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
2058 for (i = 0; i < nregs; i++)
2059 if (! call_used_regs[regno + i])
2060 return 1;
2061
2062 return 0;
2063 }
2064 \f
2065 /* Return true if we should assign DECL a pseudo register; false if it
2066 should live on the local stack. */
2067
2068 bool
2069 use_register_for_decl (const_tree decl)
2070 {
2071 if (!targetm.calls.allocate_stack_slots_for_args())
2072 return true;
2073
2074 /* Honor volatile. */
2075 if (TREE_SIDE_EFFECTS (decl))
2076 return false;
2077
2078 /* Honor addressability. */
2079 if (TREE_ADDRESSABLE (decl))
2080 return false;
2081
2082 /* Only register-like things go in registers. */
2083 if (DECL_MODE (decl) == BLKmode)
2084 return false;
2085
2086 /* If -ffloat-store specified, don't put explicit float variables
2087 into registers. */
2088 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2089 propagates values across these stores, and it probably shouldn't. */
2090 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
2091 return false;
2092
2093 /* If we're not interested in tracking debugging information for
2094 this decl, then we can certainly put it in a register. */
2095 if (DECL_IGNORED_P (decl))
2096 return true;
2097
2098 if (optimize)
2099 return true;
2100
2101 if (!DECL_REGISTER (decl))
2102 return false;
2103
2104 switch (TREE_CODE (TREE_TYPE (decl)))
2105 {
2106 case RECORD_TYPE:
2107 case UNION_TYPE:
2108 case QUAL_UNION_TYPE:
2109 /* When not optimizing, disregard register keyword for variables with
2110 types containing methods, otherwise the methods won't be callable
2111 from the debugger. */
2112 if (TYPE_METHODS (TREE_TYPE (decl)))
2113 return false;
2114 break;
2115 default:
2116 break;
2117 }
2118
2119 return true;
2120 }
2121
2122 /* Return true if TYPE should be passed by invisible reference. */
2123
2124 bool
2125 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
2126 tree type, bool named_arg)
2127 {
2128 if (type)
2129 {
2130 /* If this type contains non-trivial constructors, then it is
2131 forbidden for the middle-end to create any new copies. */
2132 if (TREE_ADDRESSABLE (type))
2133 return true;
2134
2135 /* GCC post 3.4 passes *all* variable sized types by reference. */
2136 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2137 return true;
2138
2139 /* If a record type should be passed the same as its first (and only)
2140 member, use the type and mode of that member. */
2141 if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
2142 {
2143 type = TREE_TYPE (first_field (type));
2144 mode = TYPE_MODE (type);
2145 }
2146 }
2147
2148 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
2149 }
2150
2151 /* Return true if TYPE, which is passed by reference, should be callee
2152 copied instead of caller copied. */
2153
2154 bool
2155 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
2156 tree type, bool named_arg)
2157 {
2158 if (type && TREE_ADDRESSABLE (type))
2159 return false;
2160 return targetm.calls.callee_copies (ca, mode, type, named_arg);
2161 }
2162
2163 /* Structures to communicate between the subroutines of assign_parms.
2164 The first holds data persistent across all parameters, the second
2165 is cleared out for each parameter. */
2166
2167 struct assign_parm_data_all
2168 {
2169 CUMULATIVE_ARGS args_so_far;
2170 struct args_size stack_args_size;
2171 tree function_result_decl;
2172 tree orig_fnargs;
2173 rtx first_conversion_insn;
2174 rtx last_conversion_insn;
2175 HOST_WIDE_INT pretend_args_size;
2176 HOST_WIDE_INT extra_pretend_bytes;
2177 int reg_parm_stack_space;
2178 };
2179
2180 struct assign_parm_data_one
2181 {
2182 tree nominal_type;
2183 tree passed_type;
2184 rtx entry_parm;
2185 rtx stack_parm;
2186 enum machine_mode nominal_mode;
2187 enum machine_mode passed_mode;
2188 enum machine_mode promoted_mode;
2189 struct locate_and_pad_arg_data locate;
2190 int partial;
2191 BOOL_BITFIELD named_arg : 1;
2192 BOOL_BITFIELD passed_pointer : 1;
2193 BOOL_BITFIELD on_stack : 1;
2194 BOOL_BITFIELD loaded_in_reg : 1;
2195 };
2196
2197 /* A subroutine of assign_parms. Initialize ALL. */
2198
2199 static void
2200 assign_parms_initialize_all (struct assign_parm_data_all *all)
2201 {
2202 tree fntype ATTRIBUTE_UNUSED;
2203
2204 memset (all, 0, sizeof (*all));
2205
2206 fntype = TREE_TYPE (current_function_decl);
2207
2208 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2209 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
2210 #else
2211 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
2212 current_function_decl, -1);
2213 #endif
2214
2215 #ifdef REG_PARM_STACK_SPACE
2216 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
2217 #endif
2218 }
2219
2220 /* If ARGS contains entries with complex types, split the entry into two
2221 entries of the component type. Return a new list of substitutions are
2222 needed, else the old list. */
2223
2224 static void
2225 split_complex_args (VEC(tree, heap) **args)
2226 {
2227 unsigned i;
2228 tree p;
2229
2230 FOR_EACH_VEC_ELT (tree, *args, i, p)
2231 {
2232 tree type = TREE_TYPE (p);
2233 if (TREE_CODE (type) == COMPLEX_TYPE
2234 && targetm.calls.split_complex_arg (type))
2235 {
2236 tree decl;
2237 tree subtype = TREE_TYPE (type);
2238 bool addressable = TREE_ADDRESSABLE (p);
2239
2240 /* Rewrite the PARM_DECL's type with its component. */
2241 p = copy_node (p);
2242 TREE_TYPE (p) = subtype;
2243 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
2244 DECL_MODE (p) = VOIDmode;
2245 DECL_SIZE (p) = NULL;
2246 DECL_SIZE_UNIT (p) = NULL;
2247 /* If this arg must go in memory, put it in a pseudo here.
2248 We can't allow it to go in memory as per normal parms,
2249 because the usual place might not have the imag part
2250 adjacent to the real part. */
2251 DECL_ARTIFICIAL (p) = addressable;
2252 DECL_IGNORED_P (p) = addressable;
2253 TREE_ADDRESSABLE (p) = 0;
2254 layout_decl (p, 0);
2255 VEC_replace (tree, *args, i, p);
2256
2257 /* Build a second synthetic decl. */
2258 decl = build_decl (EXPR_LOCATION (p),
2259 PARM_DECL, NULL_TREE, subtype);
2260 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2261 DECL_ARTIFICIAL (decl) = addressable;
2262 DECL_IGNORED_P (decl) = addressable;
2263 layout_decl (decl, 0);
2264 VEC_safe_insert (tree, heap, *args, ++i, decl);
2265 }
2266 }
2267 }
2268
2269 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2270 the hidden struct return argument, and (abi willing) complex args.
2271 Return the new parameter list. */
2272
2273 static VEC(tree, heap) *
2274 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2275 {
2276 tree fndecl = current_function_decl;
2277 tree fntype = TREE_TYPE (fndecl);
2278 VEC(tree, heap) *fnargs = NULL;
2279 tree arg;
2280
2281 for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
2282 VEC_safe_push (tree, heap, fnargs, arg);
2283
2284 all->orig_fnargs = DECL_ARGUMENTS (fndecl);
2285
2286 /* If struct value address is treated as the first argument, make it so. */
2287 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2288 && ! cfun->returns_pcc_struct
2289 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2290 {
2291 tree type = build_pointer_type (TREE_TYPE (fntype));
2292 tree decl;
2293
2294 decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
2295 PARM_DECL, get_identifier (".result_ptr"), type);
2296 DECL_ARG_TYPE (decl) = type;
2297 DECL_ARTIFICIAL (decl) = 1;
2298 DECL_NAMELESS (decl) = 1;
2299 TREE_CONSTANT (decl) = 1;
2300
2301 DECL_CHAIN (decl) = all->orig_fnargs;
2302 all->orig_fnargs = decl;
2303 VEC_safe_insert (tree, heap, fnargs, 0, decl);
2304
2305 all->function_result_decl = decl;
2306 }
2307
2308 /* If the target wants to split complex arguments into scalars, do so. */
2309 if (targetm.calls.split_complex_arg)
2310 split_complex_args (&fnargs);
2311
2312 return fnargs;
2313 }
2314
2315 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2316 data for the parameter. Incorporate ABI specifics such as pass-by-
2317 reference and type promotion. */
2318
2319 static void
2320 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2321 struct assign_parm_data_one *data)
2322 {
2323 tree nominal_type, passed_type;
2324 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2325 int unsignedp;
2326
2327 memset (data, 0, sizeof (*data));
2328
2329 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2330 if (!cfun->stdarg)
2331 data->named_arg = 1; /* No variadic parms. */
2332 else if (DECL_CHAIN (parm))
2333 data->named_arg = 1; /* Not the last non-variadic parm. */
2334 else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2335 data->named_arg = 1; /* Only variadic ones are unnamed. */
2336 else
2337 data->named_arg = 0; /* Treat as variadic. */
2338
2339 nominal_type = TREE_TYPE (parm);
2340 passed_type = DECL_ARG_TYPE (parm);
2341
2342 /* Look out for errors propagating this far. Also, if the parameter's
2343 type is void then its value doesn't matter. */
2344 if (TREE_TYPE (parm) == error_mark_node
2345 /* This can happen after weird syntax errors
2346 or if an enum type is defined among the parms. */
2347 || TREE_CODE (parm) != PARM_DECL
2348 || passed_type == NULL
2349 || VOID_TYPE_P (nominal_type))
2350 {
2351 nominal_type = passed_type = void_type_node;
2352 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2353 goto egress;
2354 }
2355
2356 /* Find mode of arg as it is passed, and mode of arg as it should be
2357 during execution of this function. */
2358 passed_mode = TYPE_MODE (passed_type);
2359 nominal_mode = TYPE_MODE (nominal_type);
2360
2361 /* If the parm is to be passed as a transparent union or record, use the
2362 type of the first field for the tests below. We have already verified
2363 that the modes are the same. */
2364 if ((TREE_CODE (passed_type) == UNION_TYPE
2365 || TREE_CODE (passed_type) == RECORD_TYPE)
2366 && TYPE_TRANSPARENT_AGGR (passed_type))
2367 passed_type = TREE_TYPE (first_field (passed_type));
2368
2369 /* See if this arg was passed by invisible reference. */
2370 if (pass_by_reference (&all->args_so_far, passed_mode,
2371 passed_type, data->named_arg))
2372 {
2373 passed_type = nominal_type = build_pointer_type (passed_type);
2374 data->passed_pointer = true;
2375 passed_mode = nominal_mode = Pmode;
2376 }
2377
2378 /* Find mode as it is passed by the ABI. */
2379 unsignedp = TYPE_UNSIGNED (passed_type);
2380 promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
2381 TREE_TYPE (current_function_decl), 0);
2382
2383 egress:
2384 data->nominal_type = nominal_type;
2385 data->passed_type = passed_type;
2386 data->nominal_mode = nominal_mode;
2387 data->passed_mode = passed_mode;
2388 data->promoted_mode = promoted_mode;
2389 }
2390
2391 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2392
2393 static void
2394 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2395 struct assign_parm_data_one *data, bool no_rtl)
2396 {
2397 int varargs_pretend_bytes = 0;
2398
2399 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2400 data->promoted_mode,
2401 data->passed_type,
2402 &varargs_pretend_bytes, no_rtl);
2403
2404 /* If the back-end has requested extra stack space, record how much is
2405 needed. Do not change pretend_args_size otherwise since it may be
2406 nonzero from an earlier partial argument. */
2407 if (varargs_pretend_bytes > 0)
2408 all->pretend_args_size = varargs_pretend_bytes;
2409 }
2410
2411 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2412 the incoming location of the current parameter. */
2413
2414 static void
2415 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2416 struct assign_parm_data_one *data)
2417 {
2418 HOST_WIDE_INT pretend_bytes = 0;
2419 rtx entry_parm;
2420 bool in_regs;
2421
2422 if (data->promoted_mode == VOIDmode)
2423 {
2424 data->entry_parm = data->stack_parm = const0_rtx;
2425 return;
2426 }
2427
2428 entry_parm = targetm.calls.function_incoming_arg (&all->args_so_far,
2429 data->promoted_mode,
2430 data->passed_type,
2431 data->named_arg);
2432
2433 if (entry_parm == 0)
2434 data->promoted_mode = data->passed_mode;
2435
2436 /* Determine parm's home in the stack, in case it arrives in the stack
2437 or we should pretend it did. Compute the stack position and rtx where
2438 the argument arrives and its size.
2439
2440 There is one complexity here: If this was a parameter that would
2441 have been passed in registers, but wasn't only because it is
2442 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2443 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2444 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2445 as it was the previous time. */
2446 in_regs = entry_parm != 0;
2447 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2448 in_regs = true;
2449 #endif
2450 if (!in_regs && !data->named_arg)
2451 {
2452 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2453 {
2454 rtx tem;
2455 tem = targetm.calls.function_incoming_arg (&all->args_so_far,
2456 data->promoted_mode,
2457 data->passed_type, true);
2458 in_regs = tem != NULL;
2459 }
2460 }
2461
2462 /* If this parameter was passed both in registers and in the stack, use
2463 the copy on the stack. */
2464 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2465 data->passed_type))
2466 entry_parm = 0;
2467
2468 if (entry_parm)
2469 {
2470 int partial;
2471
2472 partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2473 data->promoted_mode,
2474 data->passed_type,
2475 data->named_arg);
2476 data->partial = partial;
2477
2478 /* The caller might already have allocated stack space for the
2479 register parameters. */
2480 if (partial != 0 && all->reg_parm_stack_space == 0)
2481 {
2482 /* Part of this argument is passed in registers and part
2483 is passed on the stack. Ask the prologue code to extend
2484 the stack part so that we can recreate the full value.
2485
2486 PRETEND_BYTES is the size of the registers we need to store.
2487 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2488 stack space that the prologue should allocate.
2489
2490 Internally, gcc assumes that the argument pointer is aligned
2491 to STACK_BOUNDARY bits. This is used both for alignment
2492 optimizations (see init_emit) and to locate arguments that are
2493 aligned to more than PARM_BOUNDARY bits. We must preserve this
2494 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2495 a stack boundary. */
2496
2497 /* We assume at most one partial arg, and it must be the first
2498 argument on the stack. */
2499 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2500
2501 pretend_bytes = partial;
2502 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2503
2504 /* We want to align relative to the actual stack pointer, so
2505 don't include this in the stack size until later. */
2506 all->extra_pretend_bytes = all->pretend_args_size;
2507 }
2508 }
2509
2510 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2511 entry_parm ? data->partial : 0, current_function_decl,
2512 &all->stack_args_size, &data->locate);
2513
2514 /* Update parm_stack_boundary if this parameter is passed in the
2515 stack. */
2516 if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
2517 crtl->parm_stack_boundary = data->locate.boundary;
2518
2519 /* Adjust offsets to include the pretend args. */
2520 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2521 data->locate.slot_offset.constant += pretend_bytes;
2522 data->locate.offset.constant += pretend_bytes;
2523
2524 data->entry_parm = entry_parm;
2525 }
2526
2527 /* A subroutine of assign_parms. If there is actually space on the stack
2528 for this parm, count it in stack_args_size and return true. */
2529
2530 static bool
2531 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2532 struct assign_parm_data_one *data)
2533 {
2534 /* Trivially true if we've no incoming register. */
2535 if (data->entry_parm == NULL)
2536 ;
2537 /* Also true if we're partially in registers and partially not,
2538 since we've arranged to drop the entire argument on the stack. */
2539 else if (data->partial != 0)
2540 ;
2541 /* Also true if the target says that it's passed in both registers
2542 and on the stack. */
2543 else if (GET_CODE (data->entry_parm) == PARALLEL
2544 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2545 ;
2546 /* Also true if the target says that there's stack allocated for
2547 all register parameters. */
2548 else if (all->reg_parm_stack_space > 0)
2549 ;
2550 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2551 else
2552 return false;
2553
2554 all->stack_args_size.constant += data->locate.size.constant;
2555 if (data->locate.size.var)
2556 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2557
2558 return true;
2559 }
2560
2561 /* A subroutine of assign_parms. Given that this parameter is allocated
2562 stack space by the ABI, find it. */
2563
2564 static void
2565 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2566 {
2567 rtx offset_rtx, stack_parm;
2568 unsigned int align, boundary;
2569
2570 /* If we're passing this arg using a reg, make its stack home the
2571 aligned stack slot. */
2572 if (data->entry_parm)
2573 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2574 else
2575 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2576
2577 stack_parm = crtl->args.internal_arg_pointer;
2578 if (offset_rtx != const0_rtx)
2579 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2580 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2581
2582 if (!data->passed_pointer)
2583 {
2584 set_mem_attributes (stack_parm, parm, 1);
2585 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2586 while promoted mode's size is needed. */
2587 if (data->promoted_mode != BLKmode
2588 && data->promoted_mode != DECL_MODE (parm))
2589 {
2590 set_mem_size (stack_parm,
2591 GEN_INT (GET_MODE_SIZE (data->promoted_mode)));
2592 if (MEM_EXPR (stack_parm) && MEM_OFFSET (stack_parm))
2593 {
2594 int offset = subreg_lowpart_offset (DECL_MODE (parm),
2595 data->promoted_mode);
2596 if (offset)
2597 set_mem_offset (stack_parm,
2598 plus_constant (MEM_OFFSET (stack_parm),
2599 -offset));
2600 }
2601 }
2602 }
2603
2604 boundary = data->locate.boundary;
2605 align = BITS_PER_UNIT;
2606
2607 /* If we're padding upward, we know that the alignment of the slot
2608 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2609 intentionally forcing upward padding. Otherwise we have to come
2610 up with a guess at the alignment based on OFFSET_RTX. */
2611 if (data->locate.where_pad != downward || data->entry_parm)
2612 align = boundary;
2613 else if (CONST_INT_P (offset_rtx))
2614 {
2615 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2616 align = align & -align;
2617 }
2618 set_mem_align (stack_parm, align);
2619
2620 if (data->entry_parm)
2621 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2622
2623 data->stack_parm = stack_parm;
2624 }
2625
2626 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2627 always valid and contiguous. */
2628
2629 static void
2630 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2631 {
2632 rtx entry_parm = data->entry_parm;
2633 rtx stack_parm = data->stack_parm;
2634
2635 /* If this parm was passed part in regs and part in memory, pretend it
2636 arrived entirely in memory by pushing the register-part onto the stack.
2637 In the special case of a DImode or DFmode that is split, we could put
2638 it together in a pseudoreg directly, but for now that's not worth
2639 bothering with. */
2640 if (data->partial != 0)
2641 {
2642 /* Handle calls that pass values in multiple non-contiguous
2643 locations. The Irix 6 ABI has examples of this. */
2644 if (GET_CODE (entry_parm) == PARALLEL)
2645 emit_group_store (validize_mem (stack_parm), entry_parm,
2646 data->passed_type,
2647 int_size_in_bytes (data->passed_type));
2648 else
2649 {
2650 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2651 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2652 data->partial / UNITS_PER_WORD);
2653 }
2654
2655 entry_parm = stack_parm;
2656 }
2657
2658 /* If we didn't decide this parm came in a register, by default it came
2659 on the stack. */
2660 else if (entry_parm == NULL)
2661 entry_parm = stack_parm;
2662
2663 /* When an argument is passed in multiple locations, we can't make use
2664 of this information, but we can save some copying if the whole argument
2665 is passed in a single register. */
2666 else if (GET_CODE (entry_parm) == PARALLEL
2667 && data->nominal_mode != BLKmode
2668 && data->passed_mode != BLKmode)
2669 {
2670 size_t i, len = XVECLEN (entry_parm, 0);
2671
2672 for (i = 0; i < len; i++)
2673 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2674 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2675 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2676 == data->passed_mode)
2677 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2678 {
2679 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2680 break;
2681 }
2682 }
2683
2684 data->entry_parm = entry_parm;
2685 }
2686
2687 /* A subroutine of assign_parms. Reconstitute any values which were
2688 passed in multiple registers and would fit in a single register. */
2689
2690 static void
2691 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2692 {
2693 rtx entry_parm = data->entry_parm;
2694
2695 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2696 This can be done with register operations rather than on the
2697 stack, even if we will store the reconstituted parameter on the
2698 stack later. */
2699 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2700 {
2701 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2702 emit_group_store (parmreg, entry_parm, data->passed_type,
2703 GET_MODE_SIZE (GET_MODE (entry_parm)));
2704 entry_parm = parmreg;
2705 }
2706
2707 data->entry_parm = entry_parm;
2708 }
2709
2710 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2711 always valid and properly aligned. */
2712
2713 static void
2714 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2715 {
2716 rtx stack_parm = data->stack_parm;
2717
2718 /* If we can't trust the parm stack slot to be aligned enough for its
2719 ultimate type, don't use that slot after entry. We'll make another
2720 stack slot, if we need one. */
2721 if (stack_parm
2722 && ((STRICT_ALIGNMENT
2723 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2724 || (data->nominal_type
2725 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2726 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2727 stack_parm = NULL;
2728
2729 /* If parm was passed in memory, and we need to convert it on entry,
2730 don't store it back in that same slot. */
2731 else if (data->entry_parm == stack_parm
2732 && data->nominal_mode != BLKmode
2733 && data->nominal_mode != data->passed_mode)
2734 stack_parm = NULL;
2735
2736 /* If stack protection is in effect for this function, don't leave any
2737 pointers in their passed stack slots. */
2738 else if (crtl->stack_protect_guard
2739 && (flag_stack_protect == 2
2740 || data->passed_pointer
2741 || POINTER_TYPE_P (data->nominal_type)))
2742 stack_parm = NULL;
2743
2744 data->stack_parm = stack_parm;
2745 }
2746
2747 /* A subroutine of assign_parms. Return true if the current parameter
2748 should be stored as a BLKmode in the current frame. */
2749
2750 static bool
2751 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2752 {
2753 if (data->nominal_mode == BLKmode)
2754 return true;
2755 if (GET_MODE (data->entry_parm) == BLKmode)
2756 return true;
2757
2758 #ifdef BLOCK_REG_PADDING
2759 /* Only assign_parm_setup_block knows how to deal with register arguments
2760 that are padded at the least significant end. */
2761 if (REG_P (data->entry_parm)
2762 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2763 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2764 == (BYTES_BIG_ENDIAN ? upward : downward)))
2765 return true;
2766 #endif
2767
2768 return false;
2769 }
2770
2771 /* A subroutine of assign_parms. Arrange for the parameter to be
2772 present and valid in DATA->STACK_RTL. */
2773
2774 static void
2775 assign_parm_setup_block (struct assign_parm_data_all *all,
2776 tree parm, struct assign_parm_data_one *data)
2777 {
2778 rtx entry_parm = data->entry_parm;
2779 rtx stack_parm = data->stack_parm;
2780 HOST_WIDE_INT size;
2781 HOST_WIDE_INT size_stored;
2782
2783 if (GET_CODE (entry_parm) == PARALLEL)
2784 entry_parm = emit_group_move_into_temps (entry_parm);
2785
2786 size = int_size_in_bytes (data->passed_type);
2787 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2788 if (stack_parm == 0)
2789 {
2790 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2791 stack_parm = assign_stack_local (BLKmode, size_stored,
2792 DECL_ALIGN (parm));
2793 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2794 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2795 set_mem_attributes (stack_parm, parm, 1);
2796 }
2797
2798 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2799 calls that pass values in multiple non-contiguous locations. */
2800 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2801 {
2802 rtx mem;
2803
2804 /* Note that we will be storing an integral number of words.
2805 So we have to be careful to ensure that we allocate an
2806 integral number of words. We do this above when we call
2807 assign_stack_local if space was not allocated in the argument
2808 list. If it was, this will not work if PARM_BOUNDARY is not
2809 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2810 if it becomes a problem. Exception is when BLKmode arrives
2811 with arguments not conforming to word_mode. */
2812
2813 if (data->stack_parm == 0)
2814 ;
2815 else if (GET_CODE (entry_parm) == PARALLEL)
2816 ;
2817 else
2818 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2819
2820 mem = validize_mem (stack_parm);
2821
2822 /* Handle values in multiple non-contiguous locations. */
2823 if (GET_CODE (entry_parm) == PARALLEL)
2824 {
2825 push_to_sequence2 (all->first_conversion_insn,
2826 all->last_conversion_insn);
2827 emit_group_store (mem, entry_parm, data->passed_type, size);
2828 all->first_conversion_insn = get_insns ();
2829 all->last_conversion_insn = get_last_insn ();
2830 end_sequence ();
2831 }
2832
2833 else if (size == 0)
2834 ;
2835
2836 /* If SIZE is that of a mode no bigger than a word, just use
2837 that mode's store operation. */
2838 else if (size <= UNITS_PER_WORD)
2839 {
2840 enum machine_mode mode
2841 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2842
2843 if (mode != BLKmode
2844 #ifdef BLOCK_REG_PADDING
2845 && (size == UNITS_PER_WORD
2846 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2847 != (BYTES_BIG_ENDIAN ? upward : downward)))
2848 #endif
2849 )
2850 {
2851 rtx reg;
2852
2853 /* We are really truncating a word_mode value containing
2854 SIZE bytes into a value of mode MODE. If such an
2855 operation requires no actual instructions, we can refer
2856 to the value directly in mode MODE, otherwise we must
2857 start with the register in word_mode and explicitly
2858 convert it. */
2859 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2860 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2861 else
2862 {
2863 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2864 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2865 }
2866 emit_move_insn (change_address (mem, mode, 0), reg);
2867 }
2868
2869 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2870 machine must be aligned to the left before storing
2871 to memory. Note that the previous test doesn't
2872 handle all cases (e.g. SIZE == 3). */
2873 else if (size != UNITS_PER_WORD
2874 #ifdef BLOCK_REG_PADDING
2875 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2876 == downward)
2877 #else
2878 && BYTES_BIG_ENDIAN
2879 #endif
2880 )
2881 {
2882 rtx tem, x;
2883 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2884 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2885
2886 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2887 build_int_cst (NULL_TREE, by),
2888 NULL_RTX, 1);
2889 tem = change_address (mem, word_mode, 0);
2890 emit_move_insn (tem, x);
2891 }
2892 else
2893 move_block_from_reg (REGNO (entry_parm), mem,
2894 size_stored / UNITS_PER_WORD);
2895 }
2896 else
2897 move_block_from_reg (REGNO (entry_parm), mem,
2898 size_stored / UNITS_PER_WORD);
2899 }
2900 else if (data->stack_parm == 0)
2901 {
2902 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2903 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2904 BLOCK_OP_NORMAL);
2905 all->first_conversion_insn = get_insns ();
2906 all->last_conversion_insn = get_last_insn ();
2907 end_sequence ();
2908 }
2909
2910 data->stack_parm = stack_parm;
2911 SET_DECL_RTL (parm, stack_parm);
2912 }
2913
2914 /* A subroutine of assign_parm_setup_reg, called through note_stores.
2915 This collects sets and clobbers of hard registers in a HARD_REG_SET,
2916 which is pointed to by DATA. */
2917 static void
2918 record_hard_reg_sets (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
2919 {
2920 HARD_REG_SET *pset = (HARD_REG_SET *)data;
2921 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
2922 {
2923 int nregs = hard_regno_nregs[REGNO (x)][GET_MODE (x)];
2924 while (nregs-- > 0)
2925 SET_HARD_REG_BIT (*pset, REGNO (x) + nregs);
2926 }
2927 }
2928
2929 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2930 parameter. Get it there. Perform all ABI specified conversions. */
2931
2932 static void
2933 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2934 struct assign_parm_data_one *data)
2935 {
2936 rtx parmreg, validated_mem;
2937 rtx equiv_stack_parm;
2938 enum machine_mode promoted_nominal_mode;
2939 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2940 bool did_conversion = false;
2941 bool need_conversion, moved;
2942
2943 /* Store the parm in a pseudoregister during the function, but we may
2944 need to do it in a wider mode. Using 2 here makes the result
2945 consistent with promote_decl_mode and thus expand_expr_real_1. */
2946 promoted_nominal_mode
2947 = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
2948 TREE_TYPE (current_function_decl), 2);
2949
2950 parmreg = gen_reg_rtx (promoted_nominal_mode);
2951
2952 if (!DECL_ARTIFICIAL (parm))
2953 mark_user_reg (parmreg);
2954
2955 /* If this was an item that we received a pointer to,
2956 set DECL_RTL appropriately. */
2957 if (data->passed_pointer)
2958 {
2959 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2960 set_mem_attributes (x, parm, 1);
2961 SET_DECL_RTL (parm, x);
2962 }
2963 else
2964 SET_DECL_RTL (parm, parmreg);
2965
2966 assign_parm_remove_parallels (data);
2967
2968 /* Copy the value into the register, thus bridging between
2969 assign_parm_find_data_types and expand_expr_real_1. */
2970
2971 equiv_stack_parm = data->stack_parm;
2972 validated_mem = validize_mem (data->entry_parm);
2973
2974 need_conversion = (data->nominal_mode != data->passed_mode
2975 || promoted_nominal_mode != data->promoted_mode);
2976 moved = false;
2977
2978 if (need_conversion
2979 && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
2980 && data->nominal_mode == data->passed_mode
2981 && data->nominal_mode == GET_MODE (data->entry_parm))
2982 {
2983 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2984 mode, by the caller. We now have to convert it to
2985 NOMINAL_MODE, if different. However, PARMREG may be in
2986 a different mode than NOMINAL_MODE if it is being stored
2987 promoted.
2988
2989 If ENTRY_PARM is a hard register, it might be in a register
2990 not valid for operating in its mode (e.g., an odd-numbered
2991 register for a DFmode). In that case, moves are the only
2992 thing valid, so we can't do a convert from there. This
2993 occurs when the calling sequence allow such misaligned
2994 usages.
2995
2996 In addition, the conversion may involve a call, which could
2997 clobber parameters which haven't been copied to pseudo
2998 registers yet.
2999
3000 First, we try to emit an insn which performs the necessary
3001 conversion. We verify that this insn does not clobber any
3002 hard registers. */
3003
3004 enum insn_code icode;
3005 rtx op0, op1;
3006
3007 icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
3008 unsignedp);
3009
3010 op0 = parmreg;
3011 op1 = validated_mem;
3012 if (icode != CODE_FOR_nothing
3013 && insn_data[icode].operand[0].predicate (op0, promoted_nominal_mode)
3014 && insn_data[icode].operand[1].predicate (op1, data->passed_mode))
3015 {
3016 enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
3017 rtx insn, insns;
3018 HARD_REG_SET hardregs;
3019
3020 start_sequence ();
3021 insn = gen_extend_insn (op0, op1, promoted_nominal_mode,
3022 data->passed_mode, unsignedp);
3023 emit_insn (insn);
3024 insns = get_insns ();
3025
3026 moved = true;
3027 CLEAR_HARD_REG_SET (hardregs);
3028 for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
3029 {
3030 if (INSN_P (insn))
3031 note_stores (PATTERN (insn), record_hard_reg_sets,
3032 &hardregs);
3033 if (!hard_reg_set_empty_p (hardregs))
3034 moved = false;
3035 }
3036
3037 end_sequence ();
3038
3039 if (moved)
3040 {
3041 emit_insn (insns);
3042 if (equiv_stack_parm != NULL_RTX)
3043 equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
3044 equiv_stack_parm);
3045 }
3046 }
3047 }
3048
3049 if (moved)
3050 /* Nothing to do. */
3051 ;
3052 else if (need_conversion)
3053 {
3054 /* We did not have an insn to convert directly, or the sequence
3055 generated appeared unsafe. We must first copy the parm to a
3056 pseudo reg, and save the conversion until after all
3057 parameters have been moved. */
3058
3059 int save_tree_used;
3060 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3061
3062 emit_move_insn (tempreg, validated_mem);
3063
3064 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3065 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
3066
3067 if (GET_CODE (tempreg) == SUBREG
3068 && GET_MODE (tempreg) == data->nominal_mode
3069 && REG_P (SUBREG_REG (tempreg))
3070 && data->nominal_mode == data->passed_mode
3071 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
3072 && GET_MODE_SIZE (GET_MODE (tempreg))
3073 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
3074 {
3075 /* The argument is already sign/zero extended, so note it
3076 into the subreg. */
3077 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
3078 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
3079 }
3080
3081 /* TREE_USED gets set erroneously during expand_assignment. */
3082 save_tree_used = TREE_USED (parm);
3083 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
3084 TREE_USED (parm) = save_tree_used;
3085 all->first_conversion_insn = get_insns ();
3086 all->last_conversion_insn = get_last_insn ();
3087 end_sequence ();
3088
3089 did_conversion = true;
3090 }
3091 else
3092 emit_move_insn (parmreg, validated_mem);
3093
3094 /* If we were passed a pointer but the actual value can safely live
3095 in a register, put it in one. */
3096 if (data->passed_pointer
3097 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
3098 /* If by-reference argument was promoted, demote it. */
3099 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
3100 || use_register_for_decl (parm)))
3101 {
3102 /* We can't use nominal_mode, because it will have been set to
3103 Pmode above. We must use the actual mode of the parm. */
3104 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3105 mark_user_reg (parmreg);
3106
3107 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
3108 {
3109 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
3110 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
3111
3112 push_to_sequence2 (all->first_conversion_insn,
3113 all->last_conversion_insn);
3114 emit_move_insn (tempreg, DECL_RTL (parm));
3115 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
3116 emit_move_insn (parmreg, tempreg);
3117 all->first_conversion_insn = get_insns ();
3118 all->last_conversion_insn = get_last_insn ();
3119 end_sequence ();
3120
3121 did_conversion = true;
3122 }
3123 else
3124 emit_move_insn (parmreg, DECL_RTL (parm));
3125
3126 SET_DECL_RTL (parm, parmreg);
3127
3128 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3129 now the parm. */
3130 data->stack_parm = NULL;
3131 }
3132
3133 /* Mark the register as eliminable if we did no conversion and it was
3134 copied from memory at a fixed offset, and the arg pointer was not
3135 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3136 offset formed an invalid address, such memory-equivalences as we
3137 make here would screw up life analysis for it. */
3138 if (data->nominal_mode == data->passed_mode
3139 && !did_conversion
3140 && data->stack_parm != 0
3141 && MEM_P (data->stack_parm)
3142 && data->locate.offset.var == 0
3143 && reg_mentioned_p (virtual_incoming_args_rtx,
3144 XEXP (data->stack_parm, 0)))
3145 {
3146 rtx linsn = get_last_insn ();
3147 rtx sinsn, set;
3148
3149 /* Mark complex types separately. */
3150 if (GET_CODE (parmreg) == CONCAT)
3151 {
3152 enum machine_mode submode
3153 = GET_MODE_INNER (GET_MODE (parmreg));
3154 int regnor = REGNO (XEXP (parmreg, 0));
3155 int regnoi = REGNO (XEXP (parmreg, 1));
3156 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
3157 rtx stacki = adjust_address_nv (data->stack_parm, submode,
3158 GET_MODE_SIZE (submode));
3159
3160 /* Scan backwards for the set of the real and
3161 imaginary parts. */
3162 for (sinsn = linsn; sinsn != 0;
3163 sinsn = prev_nonnote_insn (sinsn))
3164 {
3165 set = single_set (sinsn);
3166 if (set == 0)
3167 continue;
3168
3169 if (SET_DEST (set) == regno_reg_rtx [regnoi])
3170 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
3171 else if (SET_DEST (set) == regno_reg_rtx [regnor])
3172 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
3173 }
3174 }
3175 else if ((set = single_set (linsn)) != 0
3176 && SET_DEST (set) == parmreg)
3177 set_unique_reg_note (linsn, REG_EQUIV, equiv_stack_parm);
3178 }
3179
3180 /* For pointer data type, suggest pointer register. */
3181 if (POINTER_TYPE_P (TREE_TYPE (parm)))
3182 mark_reg_pointer (parmreg,
3183 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
3184 }
3185
3186 /* A subroutine of assign_parms. Allocate stack space to hold the current
3187 parameter. Get it there. Perform all ABI specified conversions. */
3188
3189 static void
3190 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
3191 struct assign_parm_data_one *data)
3192 {
3193 /* Value must be stored in the stack slot STACK_PARM during function
3194 execution. */
3195 bool to_conversion = false;
3196
3197 assign_parm_remove_parallels (data);
3198
3199 if (data->promoted_mode != data->nominal_mode)
3200 {
3201 /* Conversion is required. */
3202 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
3203
3204 emit_move_insn (tempreg, validize_mem (data->entry_parm));
3205
3206 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
3207 to_conversion = true;
3208
3209 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
3210 TYPE_UNSIGNED (TREE_TYPE (parm)));
3211
3212 if (data->stack_parm)
3213 {
3214 int offset = subreg_lowpart_offset (data->nominal_mode,
3215 GET_MODE (data->stack_parm));
3216 /* ??? This may need a big-endian conversion on sparc64. */
3217 data->stack_parm
3218 = adjust_address (data->stack_parm, data->nominal_mode, 0);
3219 if (offset && MEM_OFFSET (data->stack_parm))
3220 set_mem_offset (data->stack_parm,
3221 plus_constant (MEM_OFFSET (data->stack_parm),
3222 offset));
3223 }
3224 }
3225
3226 if (data->entry_parm != data->stack_parm)
3227 {
3228 rtx src, dest;
3229
3230 if (data->stack_parm == 0)
3231 {
3232 int align = STACK_SLOT_ALIGNMENT (data->passed_type,
3233 GET_MODE (data->entry_parm),
3234 TYPE_ALIGN (data->passed_type));
3235 data->stack_parm
3236 = assign_stack_local (GET_MODE (data->entry_parm),
3237 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
3238 align);
3239 set_mem_attributes (data->stack_parm, parm, 1);
3240 }
3241
3242 dest = validize_mem (data->stack_parm);
3243 src = validize_mem (data->entry_parm);
3244
3245 if (MEM_P (src))
3246 {
3247 /* Use a block move to handle potentially misaligned entry_parm. */
3248 if (!to_conversion)
3249 push_to_sequence2 (all->first_conversion_insn,
3250 all->last_conversion_insn);
3251 to_conversion = true;
3252
3253 emit_block_move (dest, src,
3254 GEN_INT (int_size_in_bytes (data->passed_type)),
3255 BLOCK_OP_NORMAL);
3256 }
3257 else
3258 emit_move_insn (dest, src);
3259 }
3260
3261 if (to_conversion)
3262 {
3263 all->first_conversion_insn = get_insns ();
3264 all->last_conversion_insn = get_last_insn ();
3265 end_sequence ();
3266 }
3267
3268 SET_DECL_RTL (parm, data->stack_parm);
3269 }
3270
3271 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3272 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3273
3274 static void
3275 assign_parms_unsplit_complex (struct assign_parm_data_all *all,
3276 VEC(tree, heap) *fnargs)
3277 {
3278 tree parm;
3279 tree orig_fnargs = all->orig_fnargs;
3280 unsigned i = 0;
3281
3282 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
3283 {
3284 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
3285 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
3286 {
3287 rtx tmp, real, imag;
3288 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
3289
3290 real = DECL_RTL (VEC_index (tree, fnargs, i));
3291 imag = DECL_RTL (VEC_index (tree, fnargs, i + 1));
3292 if (inner != GET_MODE (real))
3293 {
3294 real = gen_lowpart_SUBREG (inner, real);
3295 imag = gen_lowpart_SUBREG (inner, imag);
3296 }
3297
3298 if (TREE_ADDRESSABLE (parm))
3299 {
3300 rtx rmem, imem;
3301 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
3302 int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
3303 DECL_MODE (parm),
3304 TYPE_ALIGN (TREE_TYPE (parm)));
3305
3306 /* split_complex_arg put the real and imag parts in
3307 pseudos. Move them to memory. */
3308 tmp = assign_stack_local (DECL_MODE (parm), size, align);
3309 set_mem_attributes (tmp, parm, 1);
3310 rmem = adjust_address_nv (tmp, inner, 0);
3311 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
3312 push_to_sequence2 (all->first_conversion_insn,
3313 all->last_conversion_insn);
3314 emit_move_insn (rmem, real);
3315 emit_move_insn (imem, imag);
3316 all->first_conversion_insn = get_insns ();
3317 all->last_conversion_insn = get_last_insn ();
3318 end_sequence ();
3319 }
3320 else
3321 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3322 SET_DECL_RTL (parm, tmp);
3323
3324 real = DECL_INCOMING_RTL (VEC_index (tree, fnargs, i));
3325 imag = DECL_INCOMING_RTL (VEC_index (tree, fnargs, i + 1));
3326 if (inner != GET_MODE (real))
3327 {
3328 real = gen_lowpart_SUBREG (inner, real);
3329 imag = gen_lowpart_SUBREG (inner, imag);
3330 }
3331 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
3332 set_decl_incoming_rtl (parm, tmp, false);
3333 i++;
3334 }
3335 }
3336 }
3337
3338 /* Assign RTL expressions to the function's parameters. This may involve
3339 copying them into registers and using those registers as the DECL_RTL. */
3340
3341 static void
3342 assign_parms (tree fndecl)
3343 {
3344 struct assign_parm_data_all all;
3345 tree parm;
3346 VEC(tree, heap) *fnargs;
3347 unsigned i;
3348
3349 crtl->args.internal_arg_pointer
3350 = targetm.calls.internal_arg_pointer ();
3351
3352 assign_parms_initialize_all (&all);
3353 fnargs = assign_parms_augmented_arg_list (&all);
3354
3355 FOR_EACH_VEC_ELT (tree, fnargs, i, parm)
3356 {
3357 struct assign_parm_data_one data;
3358
3359 /* Extract the type of PARM; adjust it according to ABI. */
3360 assign_parm_find_data_types (&all, parm, &data);
3361
3362 /* Early out for errors and void parameters. */
3363 if (data.passed_mode == VOIDmode)
3364 {
3365 SET_DECL_RTL (parm, const0_rtx);
3366 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3367 continue;
3368 }
3369
3370 /* Estimate stack alignment from parameter alignment. */
3371 if (SUPPORTS_STACK_ALIGNMENT)
3372 {
3373 unsigned int align
3374 = targetm.calls.function_arg_boundary (data.promoted_mode,
3375 data.passed_type);
3376 align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
3377 align);
3378 if (TYPE_ALIGN (data.nominal_type) > align)
3379 align = MINIMUM_ALIGNMENT (data.nominal_type,
3380 TYPE_MODE (data.nominal_type),
3381 TYPE_ALIGN (data.nominal_type));
3382 if (crtl->stack_alignment_estimated < align)
3383 {
3384 gcc_assert (!crtl->stack_realign_processed);
3385 crtl->stack_alignment_estimated = align;
3386 }
3387 }
3388
3389 if (cfun->stdarg && !DECL_CHAIN (parm))
3390 assign_parms_setup_varargs (&all, &data, false);
3391
3392 /* Find out where the parameter arrives in this function. */
3393 assign_parm_find_entry_rtl (&all, &data);
3394
3395 /* Find out where stack space for this parameter might be. */
3396 if (assign_parm_is_stack_parm (&all, &data))
3397 {
3398 assign_parm_find_stack_rtl (parm, &data);
3399 assign_parm_adjust_entry_rtl (&data);
3400 }
3401
3402 /* Record permanently how this parm was passed. */
3403 set_decl_incoming_rtl (parm, data.entry_parm, data.passed_pointer);
3404
3405 /* Update info on where next arg arrives in registers. */
3406 targetm.calls.function_arg_advance (&all.args_so_far, data.promoted_mode,
3407 data.passed_type, data.named_arg);
3408
3409 assign_parm_adjust_stack_rtl (&data);
3410
3411 if (assign_parm_setup_block_p (&data))
3412 assign_parm_setup_block (&all, parm, &data);
3413 else if (data.passed_pointer || use_register_for_decl (parm))
3414 assign_parm_setup_reg (&all, parm, &data);
3415 else
3416 assign_parm_setup_stack (&all, parm, &data);
3417 }
3418
3419 if (targetm.calls.split_complex_arg)
3420 assign_parms_unsplit_complex (&all, fnargs);
3421
3422 VEC_free (tree, heap, fnargs);
3423
3424 /* Output all parameter conversion instructions (possibly including calls)
3425 now that all parameters have been copied out of hard registers. */
3426 emit_insn (all.first_conversion_insn);
3427
3428 /* Estimate reload stack alignment from scalar return mode. */
3429 if (SUPPORTS_STACK_ALIGNMENT)
3430 {
3431 if (DECL_RESULT (fndecl))
3432 {
3433 tree type = TREE_TYPE (DECL_RESULT (fndecl));
3434 enum machine_mode mode = TYPE_MODE (type);
3435
3436 if (mode != BLKmode
3437 && mode != VOIDmode
3438 && !AGGREGATE_TYPE_P (type))
3439 {
3440 unsigned int align = GET_MODE_ALIGNMENT (mode);
3441 if (crtl->stack_alignment_estimated < align)
3442 {
3443 gcc_assert (!crtl->stack_realign_processed);
3444 crtl->stack_alignment_estimated = align;
3445 }
3446 }
3447 }
3448 }
3449
3450 /* If we are receiving a struct value address as the first argument, set up
3451 the RTL for the function result. As this might require code to convert
3452 the transmitted address to Pmode, we do this here to ensure that possible
3453 preliminary conversions of the address have been emitted already. */
3454 if (all.function_result_decl)
3455 {
3456 tree result = DECL_RESULT (current_function_decl);
3457 rtx addr = DECL_RTL (all.function_result_decl);
3458 rtx x;
3459
3460 if (DECL_BY_REFERENCE (result))
3461 {
3462 SET_DECL_VALUE_EXPR (result, all.function_result_decl);
3463 x = addr;
3464 }
3465 else
3466 {
3467 SET_DECL_VALUE_EXPR (result,
3468 build1 (INDIRECT_REF, TREE_TYPE (result),
3469 all.function_result_decl));
3470 addr = convert_memory_address (Pmode, addr);
3471 x = gen_rtx_MEM (DECL_MODE (result), addr);
3472 set_mem_attributes (x, result, 1);
3473 }
3474
3475 DECL_HAS_VALUE_EXPR_P (result) = 1;
3476
3477 SET_DECL_RTL (result, x);
3478 }
3479
3480 /* We have aligned all the args, so add space for the pretend args. */
3481 crtl->args.pretend_args_size = all.pretend_args_size;
3482 all.stack_args_size.constant += all.extra_pretend_bytes;
3483 crtl->args.size = all.stack_args_size.constant;
3484
3485 /* Adjust function incoming argument size for alignment and
3486 minimum length. */
3487
3488 #ifdef REG_PARM_STACK_SPACE
3489 crtl->args.size = MAX (crtl->args.size,
3490 REG_PARM_STACK_SPACE (fndecl));
3491 #endif
3492
3493 crtl->args.size = CEIL_ROUND (crtl->args.size,
3494 PARM_BOUNDARY / BITS_PER_UNIT);
3495
3496 #ifdef ARGS_GROW_DOWNWARD
3497 crtl->args.arg_offset_rtx
3498 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3499 : expand_expr (size_diffop (all.stack_args_size.var,
3500 size_int (-all.stack_args_size.constant)),
3501 NULL_RTX, VOIDmode, EXPAND_NORMAL));
3502 #else
3503 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3504 #endif
3505
3506 /* See how many bytes, if any, of its args a function should try to pop
3507 on return. */
3508
3509 crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
3510 TREE_TYPE (fndecl),
3511 crtl->args.size);
3512
3513 /* For stdarg.h function, save info about
3514 regs and stack space used by the named args. */
3515
3516 crtl->args.info = all.args_so_far;
3517
3518 /* Set the rtx used for the function return value. Put this in its
3519 own variable so any optimizers that need this information don't have
3520 to include tree.h. Do this here so it gets done when an inlined
3521 function gets output. */
3522
3523 crtl->return_rtx
3524 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3525 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3526
3527 /* If scalar return value was computed in a pseudo-reg, or was a named
3528 return value that got dumped to the stack, copy that to the hard
3529 return register. */
3530 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3531 {
3532 tree decl_result = DECL_RESULT (fndecl);
3533 rtx decl_rtl = DECL_RTL (decl_result);
3534
3535 if (REG_P (decl_rtl)
3536 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3537 : DECL_REGISTER (decl_result))
3538 {
3539 rtx real_decl_rtl;
3540
3541 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3542 fndecl, true);
3543 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3544 /* The delay slot scheduler assumes that crtl->return_rtx
3545 holds the hard register containing the return value, not a
3546 temporary pseudo. */
3547 crtl->return_rtx = real_decl_rtl;
3548 }
3549 }
3550 }
3551
3552 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3553 For all seen types, gimplify their sizes. */
3554
3555 static tree
3556 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3557 {
3558 tree t = *tp;
3559
3560 *walk_subtrees = 0;
3561 if (TYPE_P (t))
3562 {
3563 if (POINTER_TYPE_P (t))
3564 *walk_subtrees = 1;
3565 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3566 && !TYPE_SIZES_GIMPLIFIED (t))
3567 {
3568 gimplify_type_sizes (t, (gimple_seq *) data);
3569 *walk_subtrees = 1;
3570 }
3571 }
3572
3573 return NULL;
3574 }
3575
3576 /* Gimplify the parameter list for current_function_decl. This involves
3577 evaluating SAVE_EXPRs of variable sized parameters and generating code
3578 to implement callee-copies reference parameters. Returns a sequence of
3579 statements to add to the beginning of the function. */
3580
3581 gimple_seq
3582 gimplify_parameters (void)
3583 {
3584 struct assign_parm_data_all all;
3585 tree parm;
3586 gimple_seq stmts = NULL;
3587 VEC(tree, heap) *fnargs;
3588 unsigned i;
3589
3590 assign_parms_initialize_all (&all);
3591 fnargs = assign_parms_augmented_arg_list (&all);
3592
3593 FOR_EACH_VEC_ELT (tree, fnargs, i, parm)
3594 {
3595 struct assign_parm_data_one data;
3596
3597 /* Extract the type of PARM; adjust it according to ABI. */
3598 assign_parm_find_data_types (&all, parm, &data);
3599
3600 /* Early out for errors and void parameters. */
3601 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3602 continue;
3603
3604 /* Update info on where next arg arrives in registers. */
3605 targetm.calls.function_arg_advance (&all.args_so_far, data.promoted_mode,
3606 data.passed_type, data.named_arg);
3607
3608 /* ??? Once upon a time variable_size stuffed parameter list
3609 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3610 turned out to be less than manageable in the gimple world.
3611 Now we have to hunt them down ourselves. */
3612 walk_tree_without_duplicates (&data.passed_type,
3613 gimplify_parm_type, &stmts);
3614
3615 if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
3616 {
3617 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3618 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3619 }
3620
3621 if (data.passed_pointer)
3622 {
3623 tree type = TREE_TYPE (data.passed_type);
3624 if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3625 type, data.named_arg))
3626 {
3627 tree local, t;
3628
3629 /* For constant-sized objects, this is trivial; for
3630 variable-sized objects, we have to play games. */
3631 if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
3632 && !(flag_stack_check == GENERIC_STACK_CHECK
3633 && compare_tree_int (DECL_SIZE_UNIT (parm),
3634 STACK_CHECK_MAX_VAR_SIZE) > 0))
3635 {
3636 local = create_tmp_reg (type, get_name (parm));
3637 DECL_IGNORED_P (local) = 0;
3638 /* If PARM was addressable, move that flag over
3639 to the local copy, as its address will be taken,
3640 not the PARMs. Keep the parms address taken
3641 as we'll query that flag during gimplification. */
3642 if (TREE_ADDRESSABLE (parm))
3643 TREE_ADDRESSABLE (local) = 1;
3644 }
3645 else
3646 {
3647 tree ptr_type, addr;
3648
3649 ptr_type = build_pointer_type (type);
3650 addr = create_tmp_reg (ptr_type, get_name (parm));
3651 DECL_IGNORED_P (addr) = 0;
3652 local = build_fold_indirect_ref (addr);
3653
3654 t = built_in_decls[BUILT_IN_ALLOCA];
3655 t = build_call_expr (t, 1, DECL_SIZE_UNIT (parm));
3656 /* The call has been built for a variable-sized object. */
3657 ALLOCA_FOR_VAR_P (t) = 1;
3658 t = fold_convert (ptr_type, t);
3659 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
3660 gimplify_and_add (t, &stmts);
3661 }
3662
3663 gimplify_assign (local, parm, &stmts);
3664
3665 SET_DECL_VALUE_EXPR (parm, local);
3666 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3667 }
3668 }
3669 }
3670
3671 VEC_free (tree, heap, fnargs);
3672
3673 return stmts;
3674 }
3675 \f
3676 /* Compute the size and offset from the start of the stacked arguments for a
3677 parm passed in mode PASSED_MODE and with type TYPE.
3678
3679 INITIAL_OFFSET_PTR points to the current offset into the stacked
3680 arguments.
3681
3682 The starting offset and size for this parm are returned in
3683 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3684 nonzero, the offset is that of stack slot, which is returned in
3685 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3686 padding required from the initial offset ptr to the stack slot.
3687
3688 IN_REGS is nonzero if the argument will be passed in registers. It will
3689 never be set if REG_PARM_STACK_SPACE is not defined.
3690
3691 FNDECL is the function in which the argument was defined.
3692
3693 There are two types of rounding that are done. The first, controlled by
3694 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3695 argument list to be aligned to the specific boundary (in bits). This
3696 rounding affects the initial and starting offsets, but not the argument
3697 size.
3698
3699 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3700 optionally rounds the size of the parm to PARM_BOUNDARY. The
3701 initial offset is not affected by this rounding, while the size always
3702 is and the starting offset may be. */
3703
3704 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3705 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3706 callers pass in the total size of args so far as
3707 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3708
3709 void
3710 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3711 int partial, tree fndecl ATTRIBUTE_UNUSED,
3712 struct args_size *initial_offset_ptr,
3713 struct locate_and_pad_arg_data *locate)
3714 {
3715 tree sizetree;
3716 enum direction where_pad;
3717 unsigned int boundary;
3718 int reg_parm_stack_space = 0;
3719 int part_size_in_regs;
3720
3721 #ifdef REG_PARM_STACK_SPACE
3722 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3723
3724 /* If we have found a stack parm before we reach the end of the
3725 area reserved for registers, skip that area. */
3726 if (! in_regs)
3727 {
3728 if (reg_parm_stack_space > 0)
3729 {
3730 if (initial_offset_ptr->var)
3731 {
3732 initial_offset_ptr->var
3733 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3734 ssize_int (reg_parm_stack_space));
3735 initial_offset_ptr->constant = 0;
3736 }
3737 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3738 initial_offset_ptr->constant = reg_parm_stack_space;
3739 }
3740 }
3741 #endif /* REG_PARM_STACK_SPACE */
3742
3743 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3744
3745 sizetree
3746 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3747 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3748 boundary = targetm.calls.function_arg_boundary (passed_mode, type);
3749 locate->where_pad = where_pad;
3750
3751 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3752 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
3753 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
3754
3755 locate->boundary = boundary;
3756
3757 if (SUPPORTS_STACK_ALIGNMENT)
3758 {
3759 /* stack_alignment_estimated can't change after stack has been
3760 realigned. */
3761 if (crtl->stack_alignment_estimated < boundary)
3762 {
3763 if (!crtl->stack_realign_processed)
3764 crtl->stack_alignment_estimated = boundary;
3765 else
3766 {
3767 /* If stack is realigned and stack alignment value
3768 hasn't been finalized, it is OK not to increase
3769 stack_alignment_estimated. The bigger alignment
3770 requirement is recorded in stack_alignment_needed
3771 below. */
3772 gcc_assert (!crtl->stack_realign_finalized
3773 && crtl->stack_realign_needed);
3774 }
3775 }
3776 }
3777
3778 /* Remember if the outgoing parameter requires extra alignment on the
3779 calling function side. */
3780 if (crtl->stack_alignment_needed < boundary)
3781 crtl->stack_alignment_needed = boundary;
3782 if (crtl->preferred_stack_boundary < boundary)
3783 crtl->preferred_stack_boundary = boundary;
3784
3785 #ifdef ARGS_GROW_DOWNWARD
3786 locate->slot_offset.constant = -initial_offset_ptr->constant;
3787 if (initial_offset_ptr->var)
3788 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3789 initial_offset_ptr->var);
3790
3791 {
3792 tree s2 = sizetree;
3793 if (where_pad != none
3794 && (!host_integerp (sizetree, 1)
3795 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3796 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3797 SUB_PARM_SIZE (locate->slot_offset, s2);
3798 }
3799
3800 locate->slot_offset.constant += part_size_in_regs;
3801
3802 if (!in_regs
3803 #ifdef REG_PARM_STACK_SPACE
3804 || REG_PARM_STACK_SPACE (fndecl) > 0
3805 #endif
3806 )
3807 pad_to_arg_alignment (&locate->slot_offset, boundary,
3808 &locate->alignment_pad);
3809
3810 locate->size.constant = (-initial_offset_ptr->constant
3811 - locate->slot_offset.constant);
3812 if (initial_offset_ptr->var)
3813 locate->size.var = size_binop (MINUS_EXPR,
3814 size_binop (MINUS_EXPR,
3815 ssize_int (0),
3816 initial_offset_ptr->var),
3817 locate->slot_offset.var);
3818
3819 /* Pad_below needs the pre-rounded size to know how much to pad
3820 below. */
3821 locate->offset = locate->slot_offset;
3822 if (where_pad == downward)
3823 pad_below (&locate->offset, passed_mode, sizetree);
3824
3825 #else /* !ARGS_GROW_DOWNWARD */
3826 if (!in_regs
3827 #ifdef REG_PARM_STACK_SPACE
3828 || REG_PARM_STACK_SPACE (fndecl) > 0
3829 #endif
3830 )
3831 pad_to_arg_alignment (initial_offset_ptr, boundary,
3832 &locate->alignment_pad);
3833 locate->slot_offset = *initial_offset_ptr;
3834
3835 #ifdef PUSH_ROUNDING
3836 if (passed_mode != BLKmode)
3837 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3838 #endif
3839
3840 /* Pad_below needs the pre-rounded size to know how much to pad below
3841 so this must be done before rounding up. */
3842 locate->offset = locate->slot_offset;
3843 if (where_pad == downward)
3844 pad_below (&locate->offset, passed_mode, sizetree);
3845
3846 if (where_pad != none
3847 && (!host_integerp (sizetree, 1)
3848 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3849 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3850
3851 ADD_PARM_SIZE (locate->size, sizetree);
3852
3853 locate->size.constant -= part_size_in_regs;
3854 #endif /* ARGS_GROW_DOWNWARD */
3855
3856 #ifdef FUNCTION_ARG_OFFSET
3857 locate->offset.constant += FUNCTION_ARG_OFFSET (passed_mode, type);
3858 #endif
3859 }
3860
3861 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3862 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3863
3864 static void
3865 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3866 struct args_size *alignment_pad)
3867 {
3868 tree save_var = NULL_TREE;
3869 HOST_WIDE_INT save_constant = 0;
3870 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3871 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3872
3873 #ifdef SPARC_STACK_BOUNDARY_HACK
3874 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3875 the real alignment of %sp. However, when it does this, the
3876 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3877 if (SPARC_STACK_BOUNDARY_HACK)
3878 sp_offset = 0;
3879 #endif
3880
3881 if (boundary > PARM_BOUNDARY)
3882 {
3883 save_var = offset_ptr->var;
3884 save_constant = offset_ptr->constant;
3885 }
3886
3887 alignment_pad->var = NULL_TREE;
3888 alignment_pad->constant = 0;
3889
3890 if (boundary > BITS_PER_UNIT)
3891 {
3892 if (offset_ptr->var)
3893 {
3894 tree sp_offset_tree = ssize_int (sp_offset);
3895 tree offset = size_binop (PLUS_EXPR,
3896 ARGS_SIZE_TREE (*offset_ptr),
3897 sp_offset_tree);
3898 #ifdef ARGS_GROW_DOWNWARD
3899 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3900 #else
3901 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3902 #endif
3903
3904 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3905 /* ARGS_SIZE_TREE includes constant term. */
3906 offset_ptr->constant = 0;
3907 if (boundary > PARM_BOUNDARY)
3908 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3909 save_var);
3910 }
3911 else
3912 {
3913 offset_ptr->constant = -sp_offset +
3914 #ifdef ARGS_GROW_DOWNWARD
3915 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3916 #else
3917 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3918 #endif
3919 if (boundary > PARM_BOUNDARY)
3920 alignment_pad->constant = offset_ptr->constant - save_constant;
3921 }
3922 }
3923 }
3924
3925 static void
3926 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3927 {
3928 if (passed_mode != BLKmode)
3929 {
3930 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3931 offset_ptr->constant
3932 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3933 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3934 - GET_MODE_SIZE (passed_mode));
3935 }
3936 else
3937 {
3938 if (TREE_CODE (sizetree) != INTEGER_CST
3939 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3940 {
3941 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3942 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3943 /* Add it in. */
3944 ADD_PARM_SIZE (*offset_ptr, s2);
3945 SUB_PARM_SIZE (*offset_ptr, sizetree);
3946 }
3947 }
3948 }
3949 \f
3950
3951 /* True if register REGNO was alive at a place where `setjmp' was
3952 called and was set more than once or is an argument. Such regs may
3953 be clobbered by `longjmp'. */
3954
3955 static bool
3956 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
3957 {
3958 /* There appear to be cases where some local vars never reach the
3959 backend but have bogus regnos. */
3960 if (regno >= max_reg_num ())
3961 return false;
3962
3963 return ((REG_N_SETS (regno) > 1
3964 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno))
3965 && REGNO_REG_SET_P (setjmp_crosses, regno));
3966 }
3967
3968 /* Walk the tree of blocks describing the binding levels within a
3969 function and warn about variables the might be killed by setjmp or
3970 vfork. This is done after calling flow_analysis before register
3971 allocation since that will clobber the pseudo-regs to hard
3972 regs. */
3973
3974 static void
3975 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
3976 {
3977 tree decl, sub;
3978
3979 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
3980 {
3981 if (TREE_CODE (decl) == VAR_DECL
3982 && DECL_RTL_SET_P (decl)
3983 && REG_P (DECL_RTL (decl))
3984 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3985 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
3986 " %<longjmp%> or %<vfork%>", decl);
3987 }
3988
3989 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3990 setjmp_vars_warning (setjmp_crosses, sub);
3991 }
3992
3993 /* Do the appropriate part of setjmp_vars_warning
3994 but for arguments instead of local variables. */
3995
3996 static void
3997 setjmp_args_warning (bitmap setjmp_crosses)
3998 {
3999 tree decl;
4000 for (decl = DECL_ARGUMENTS (current_function_decl);
4001 decl; decl = DECL_CHAIN (decl))
4002 if (DECL_RTL (decl) != 0
4003 && REG_P (DECL_RTL (decl))
4004 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
4005 warning (OPT_Wclobbered,
4006 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4007 decl);
4008 }
4009
4010 /* Generate warning messages for variables live across setjmp. */
4011
4012 void
4013 generate_setjmp_warnings (void)
4014 {
4015 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
4016
4017 if (n_basic_blocks == NUM_FIXED_BLOCKS
4018 || bitmap_empty_p (setjmp_crosses))
4019 return;
4020
4021 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
4022 setjmp_args_warning (setjmp_crosses);
4023 }
4024
4025 \f
4026 /* Reverse the order of elements in the fragment chain T of blocks,
4027 and return the new head of the chain (old last element). */
4028
4029 static tree
4030 block_fragments_nreverse (tree t)
4031 {
4032 tree prev = 0, block, next;
4033 for (block = t; block; block = next)
4034 {
4035 next = BLOCK_FRAGMENT_CHAIN (block);
4036 BLOCK_FRAGMENT_CHAIN (block) = prev;
4037 prev = block;
4038 }
4039 return prev;
4040 }
4041
4042 /* Reverse the order of elements in the chain T of blocks,
4043 and return the new head of the chain (old last element).
4044 Also do the same on subblocks and reverse the order of elements
4045 in BLOCK_FRAGMENT_CHAIN as well. */
4046
4047 static tree
4048 blocks_nreverse_all (tree t)
4049 {
4050 tree prev = 0, block, next;
4051 for (block = t; block; block = next)
4052 {
4053 next = BLOCK_CHAIN (block);
4054 BLOCK_CHAIN (block) = prev;
4055 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4056 if (BLOCK_FRAGMENT_CHAIN (block)
4057 && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
4058 BLOCK_FRAGMENT_CHAIN (block)
4059 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
4060 prev = block;
4061 }
4062 return prev;
4063 }
4064
4065
4066 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4067 and create duplicate blocks. */
4068 /* ??? Need an option to either create block fragments or to create
4069 abstract origin duplicates of a source block. It really depends
4070 on what optimization has been performed. */
4071
4072 void
4073 reorder_blocks (void)
4074 {
4075 tree block = DECL_INITIAL (current_function_decl);
4076 VEC(tree,heap) *block_stack;
4077
4078 if (block == NULL_TREE)
4079 return;
4080
4081 block_stack = VEC_alloc (tree, heap, 10);
4082
4083 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4084 clear_block_marks (block);
4085
4086 /* Prune the old trees away, so that they don't get in the way. */
4087 BLOCK_SUBBLOCKS (block) = NULL_TREE;
4088 BLOCK_CHAIN (block) = NULL_TREE;
4089
4090 /* Recreate the block tree from the note nesting. */
4091 reorder_blocks_1 (get_insns (), block, &block_stack);
4092 BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
4093
4094 VEC_free (tree, heap, block_stack);
4095 }
4096
4097 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4098
4099 void
4100 clear_block_marks (tree block)
4101 {
4102 while (block)
4103 {
4104 TREE_ASM_WRITTEN (block) = 0;
4105 clear_block_marks (BLOCK_SUBBLOCKS (block));
4106 block = BLOCK_CHAIN (block);
4107 }
4108 }
4109
4110 static void
4111 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
4112 {
4113 rtx insn;
4114
4115 for (insn = insns; insn; insn = NEXT_INSN (insn))
4116 {
4117 if (NOTE_P (insn))
4118 {
4119 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
4120 {
4121 tree block = NOTE_BLOCK (insn);
4122 tree origin;
4123
4124 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
4125 origin = block;
4126
4127 /* If we have seen this block before, that means it now
4128 spans multiple address regions. Create a new fragment. */
4129 if (TREE_ASM_WRITTEN (block))
4130 {
4131 tree new_block = copy_node (block);
4132
4133 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
4134 BLOCK_FRAGMENT_CHAIN (new_block)
4135 = BLOCK_FRAGMENT_CHAIN (origin);
4136 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
4137
4138 NOTE_BLOCK (insn) = new_block;
4139 block = new_block;
4140 }
4141
4142 BLOCK_SUBBLOCKS (block) = 0;
4143 TREE_ASM_WRITTEN (block) = 1;
4144 /* When there's only one block for the entire function,
4145 current_block == block and we mustn't do this, it
4146 will cause infinite recursion. */
4147 if (block != current_block)
4148 {
4149 if (block != origin)
4150 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block);
4151
4152 BLOCK_SUPERCONTEXT (block) = current_block;
4153 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4154 BLOCK_SUBBLOCKS (current_block) = block;
4155 current_block = origin;
4156 }
4157 VEC_safe_push (tree, heap, *p_block_stack, block);
4158 }
4159 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
4160 {
4161 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
4162 current_block = BLOCK_SUPERCONTEXT (current_block);
4163 }
4164 }
4165 }
4166 }
4167
4168 /* Reverse the order of elements in the chain T of blocks,
4169 and return the new head of the chain (old last element). */
4170
4171 tree
4172 blocks_nreverse (tree t)
4173 {
4174 tree prev = 0, block, next;
4175 for (block = t; block; block = next)
4176 {
4177 next = BLOCK_CHAIN (block);
4178 BLOCK_CHAIN (block) = prev;
4179 prev = block;
4180 }
4181 return prev;
4182 }
4183
4184 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4185 non-NULL, list them all into VECTOR, in a depth-first preorder
4186 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4187 blocks. */
4188
4189 static int
4190 all_blocks (tree block, tree *vector)
4191 {
4192 int n_blocks = 0;
4193
4194 while (block)
4195 {
4196 TREE_ASM_WRITTEN (block) = 0;
4197
4198 /* Record this block. */
4199 if (vector)
4200 vector[n_blocks] = block;
4201
4202 ++n_blocks;
4203
4204 /* Record the subblocks, and their subblocks... */
4205 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4206 vector ? vector + n_blocks : 0);
4207 block = BLOCK_CHAIN (block);
4208 }
4209
4210 return n_blocks;
4211 }
4212
4213 /* Return a vector containing all the blocks rooted at BLOCK. The
4214 number of elements in the vector is stored in N_BLOCKS_P. The
4215 vector is dynamically allocated; it is the caller's responsibility
4216 to call `free' on the pointer returned. */
4217
4218 static tree *
4219 get_block_vector (tree block, int *n_blocks_p)
4220 {
4221 tree *block_vector;
4222
4223 *n_blocks_p = all_blocks (block, NULL);
4224 block_vector = XNEWVEC (tree, *n_blocks_p);
4225 all_blocks (block, block_vector);
4226
4227 return block_vector;
4228 }
4229
4230 static GTY(()) int next_block_index = 2;
4231
4232 /* Set BLOCK_NUMBER for all the blocks in FN. */
4233
4234 void
4235 number_blocks (tree fn)
4236 {
4237 int i;
4238 int n_blocks;
4239 tree *block_vector;
4240
4241 /* For SDB and XCOFF debugging output, we start numbering the blocks
4242 from 1 within each function, rather than keeping a running
4243 count. */
4244 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
4245 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
4246 next_block_index = 1;
4247 #endif
4248
4249 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
4250
4251 /* The top-level BLOCK isn't numbered at all. */
4252 for (i = 1; i < n_blocks; ++i)
4253 /* We number the blocks from two. */
4254 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
4255
4256 free (block_vector);
4257
4258 return;
4259 }
4260
4261 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4262
4263 DEBUG_FUNCTION tree
4264 debug_find_var_in_block_tree (tree var, tree block)
4265 {
4266 tree t;
4267
4268 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
4269 if (t == var)
4270 return block;
4271
4272 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
4273 {
4274 tree ret = debug_find_var_in_block_tree (var, t);
4275 if (ret)
4276 return ret;
4277 }
4278
4279 return NULL_TREE;
4280 }
4281 \f
4282 /* Keep track of whether we're in a dummy function context. If we are,
4283 we don't want to invoke the set_current_function hook, because we'll
4284 get into trouble if the hook calls target_reinit () recursively or
4285 when the initial initialization is not yet complete. */
4286
4287 static bool in_dummy_function;
4288
4289 /* Invoke the target hook when setting cfun. Update the optimization options
4290 if the function uses different options than the default. */
4291
4292 static void
4293 invoke_set_current_function_hook (tree fndecl)
4294 {
4295 if (!in_dummy_function)
4296 {
4297 tree opts = ((fndecl)
4298 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
4299 : optimization_default_node);
4300
4301 if (!opts)
4302 opts = optimization_default_node;
4303
4304 /* Change optimization options if needed. */
4305 if (optimization_current_node != opts)
4306 {
4307 optimization_current_node = opts;
4308 cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
4309 }
4310
4311 targetm.set_current_function (fndecl);
4312 }
4313 }
4314
4315 /* cfun should never be set directly; use this function. */
4316
4317 void
4318 set_cfun (struct function *new_cfun)
4319 {
4320 if (cfun != new_cfun)
4321 {
4322 cfun = new_cfun;
4323 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
4324 }
4325 }
4326
4327 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4328
4329 static VEC(function_p,heap) *cfun_stack;
4330
4331 /* Push the current cfun onto the stack, and set cfun to new_cfun. */
4332
4333 void
4334 push_cfun (struct function *new_cfun)
4335 {
4336 VEC_safe_push (function_p, heap, cfun_stack, cfun);
4337 set_cfun (new_cfun);
4338 }
4339
4340 /* Pop cfun from the stack. */
4341
4342 void
4343 pop_cfun (void)
4344 {
4345 struct function *new_cfun = VEC_pop (function_p, cfun_stack);
4346 set_cfun (new_cfun);
4347 }
4348
4349 /* Return value of funcdef and increase it. */
4350 int
4351 get_next_funcdef_no (void)
4352 {
4353 return funcdef_no++;
4354 }
4355
4356 /* Allocate a function structure for FNDECL and set its contents
4357 to the defaults. Set cfun to the newly-allocated object.
4358 Some of the helper functions invoked during initialization assume
4359 that cfun has already been set. Therefore, assign the new object
4360 directly into cfun and invoke the back end hook explicitly at the
4361 very end, rather than initializing a temporary and calling set_cfun
4362 on it.
4363
4364 ABSTRACT_P is true if this is a function that will never be seen by
4365 the middle-end. Such functions are front-end concepts (like C++
4366 function templates) that do not correspond directly to functions
4367 placed in object files. */
4368
4369 void
4370 allocate_struct_function (tree fndecl, bool abstract_p)
4371 {
4372 tree result;
4373 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
4374
4375 cfun = ggc_alloc_cleared_function ();
4376
4377 init_eh_for_function ();
4378
4379 if (init_machine_status)
4380 cfun->machine = (*init_machine_status) ();
4381
4382 #ifdef OVERRIDE_ABI_FORMAT
4383 OVERRIDE_ABI_FORMAT (fndecl);
4384 #endif
4385
4386 invoke_set_current_function_hook (fndecl);
4387
4388 if (fndecl != NULL_TREE)
4389 {
4390 DECL_STRUCT_FUNCTION (fndecl) = cfun;
4391 cfun->decl = fndecl;
4392 current_function_funcdef_no = get_next_funcdef_no ();
4393
4394 result = DECL_RESULT (fndecl);
4395 if (!abstract_p && aggregate_value_p (result, fndecl))
4396 {
4397 #ifdef PCC_STATIC_STRUCT_RETURN
4398 cfun->returns_pcc_struct = 1;
4399 #endif
4400 cfun->returns_struct = 1;
4401 }
4402
4403 cfun->stdarg = stdarg_p (fntype);
4404
4405 /* Assume all registers in stdarg functions need to be saved. */
4406 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
4407 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
4408
4409 /* ??? This could be set on a per-function basis by the front-end
4410 but is this worth the hassle? */
4411 cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
4412 }
4413 }
4414
4415 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4416 instead of just setting it. */
4417
4418 void
4419 push_struct_function (tree fndecl)
4420 {
4421 VEC_safe_push (function_p, heap, cfun_stack, cfun);
4422 allocate_struct_function (fndecl, false);
4423 }
4424
4425 /* Reset crtl and other non-struct-function variables to defaults as
4426 appropriate for emitting rtl at the start of a function. */
4427
4428 static void
4429 prepare_function_start (void)
4430 {
4431 gcc_assert (!crtl->emit.x_last_insn);
4432 init_temp_slots ();
4433 init_emit ();
4434 init_varasm_status ();
4435 init_expr ();
4436 default_rtl_profile ();
4437
4438 if (flag_stack_usage)
4439 {
4440 cfun->su = ggc_alloc_cleared_stack_usage ();
4441 cfun->su->static_stack_size = -1;
4442 }
4443
4444 cse_not_expected = ! optimize;
4445
4446 /* Caller save not needed yet. */
4447 caller_save_needed = 0;
4448
4449 /* We haven't done register allocation yet. */
4450 reg_renumber = 0;
4451
4452 /* Indicate that we have not instantiated virtual registers yet. */
4453 virtuals_instantiated = 0;
4454
4455 /* Indicate that we want CONCATs now. */
4456 generating_concat_p = 1;
4457
4458 /* Indicate we have no need of a frame pointer yet. */
4459 frame_pointer_needed = 0;
4460 }
4461
4462 /* Initialize the rtl expansion mechanism so that we can do simple things
4463 like generate sequences. This is used to provide a context during global
4464 initialization of some passes. You must call expand_dummy_function_end
4465 to exit this context. */
4466
4467 void
4468 init_dummy_function_start (void)
4469 {
4470 gcc_assert (!in_dummy_function);
4471 in_dummy_function = true;
4472 push_struct_function (NULL_TREE);
4473 prepare_function_start ();
4474 }
4475
4476 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4477 and initialize static variables for generating RTL for the statements
4478 of the function. */
4479
4480 void
4481 init_function_start (tree subr)
4482 {
4483 if (subr && DECL_STRUCT_FUNCTION (subr))
4484 set_cfun (DECL_STRUCT_FUNCTION (subr));
4485 else
4486 allocate_struct_function (subr, false);
4487 prepare_function_start ();
4488
4489 /* Warn if this value is an aggregate type,
4490 regardless of which calling convention we are using for it. */
4491 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4492 warning (OPT_Waggregate_return, "function returns an aggregate");
4493 }
4494
4495 /* Make sure all values used by the optimization passes have sane defaults. */
4496 unsigned int
4497 init_function_for_compilation (void)
4498 {
4499 reg_renumber = 0;
4500 return 0;
4501 }
4502
4503 struct rtl_opt_pass pass_init_function =
4504 {
4505 {
4506 RTL_PASS,
4507 "*init_function", /* name */
4508 NULL, /* gate */
4509 init_function_for_compilation, /* execute */
4510 NULL, /* sub */
4511 NULL, /* next */
4512 0, /* static_pass_number */
4513 TV_NONE, /* tv_id */
4514 0, /* properties_required */
4515 0, /* properties_provided */
4516 0, /* properties_destroyed */
4517 0, /* todo_flags_start */
4518 0 /* todo_flags_finish */
4519 }
4520 };
4521
4522
4523 void
4524 expand_main_function (void)
4525 {
4526 #if (defined(INVOKE__main) \
4527 || (!defined(HAS_INIT_SECTION) \
4528 && !defined(INIT_SECTION_ASM_OP) \
4529 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
4530 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
4531 #endif
4532 }
4533 \f
4534 /* Expand code to initialize the stack_protect_guard. This is invoked at
4535 the beginning of a function to be protected. */
4536
4537 #ifndef HAVE_stack_protect_set
4538 # define HAVE_stack_protect_set 0
4539 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
4540 #endif
4541
4542 void
4543 stack_protect_prologue (void)
4544 {
4545 tree guard_decl = targetm.stack_protect_guard ();
4546 rtx x, y;
4547
4548 x = expand_normal (crtl->stack_protect_guard);
4549 y = expand_normal (guard_decl);
4550
4551 /* Allow the target to copy from Y to X without leaking Y into a
4552 register. */
4553 if (HAVE_stack_protect_set)
4554 {
4555 rtx insn = gen_stack_protect_set (x, y);
4556 if (insn)
4557 {
4558 emit_insn (insn);
4559 return;
4560 }
4561 }
4562
4563 /* Otherwise do a straight move. */
4564 emit_move_insn (x, y);
4565 }
4566
4567 /* Expand code to verify the stack_protect_guard. This is invoked at
4568 the end of a function to be protected. */
4569
4570 #ifndef HAVE_stack_protect_test
4571 # define HAVE_stack_protect_test 0
4572 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4573 #endif
4574
4575 void
4576 stack_protect_epilogue (void)
4577 {
4578 tree guard_decl = targetm.stack_protect_guard ();
4579 rtx label = gen_label_rtx ();
4580 rtx x, y, tmp;
4581
4582 x = expand_normal (crtl->stack_protect_guard);
4583 y = expand_normal (guard_decl);
4584
4585 /* Allow the target to compare Y with X without leaking either into
4586 a register. */
4587 switch (HAVE_stack_protect_test != 0)
4588 {
4589 case 1:
4590 tmp = gen_stack_protect_test (x, y, label);
4591 if (tmp)
4592 {
4593 emit_insn (tmp);
4594 break;
4595 }
4596 /* FALLTHRU */
4597
4598 default:
4599 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4600 break;
4601 }
4602
4603 /* The noreturn predictor has been moved to the tree level. The rtl-level
4604 predictors estimate this branch about 20%, which isn't enough to get
4605 things moved out of line. Since this is the only extant case of adding
4606 a noreturn function at the rtl level, it doesn't seem worth doing ought
4607 except adding the prediction by hand. */
4608 tmp = get_last_insn ();
4609 if (JUMP_P (tmp))
4610 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4611
4612 expand_expr_stmt (targetm.stack_protect_fail ());
4613 emit_label (label);
4614 }
4615 \f
4616 /* Start the RTL for a new function, and set variables used for
4617 emitting RTL.
4618 SUBR is the FUNCTION_DECL node.
4619 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4620 the function's parameters, which must be run at any return statement. */
4621
4622 void
4623 expand_function_start (tree subr)
4624 {
4625 /* Make sure volatile mem refs aren't considered
4626 valid operands of arithmetic insns. */
4627 init_recog_no_volatile ();
4628
4629 crtl->profile
4630 = (profile_flag
4631 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4632
4633 crtl->limit_stack
4634 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4635
4636 /* Make the label for return statements to jump to. Do not special
4637 case machines with special return instructions -- they will be
4638 handled later during jump, ifcvt, or epilogue creation. */
4639 return_label = gen_label_rtx ();
4640
4641 /* Initialize rtx used to return the value. */
4642 /* Do this before assign_parms so that we copy the struct value address
4643 before any library calls that assign parms might generate. */
4644
4645 /* Decide whether to return the value in memory or in a register. */
4646 if (aggregate_value_p (DECL_RESULT (subr), subr))
4647 {
4648 /* Returning something that won't go in a register. */
4649 rtx value_address = 0;
4650
4651 #ifdef PCC_STATIC_STRUCT_RETURN
4652 if (cfun->returns_pcc_struct)
4653 {
4654 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4655 value_address = assemble_static_space (size);
4656 }
4657 else
4658 #endif
4659 {
4660 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4661 /* Expect to be passed the address of a place to store the value.
4662 If it is passed as an argument, assign_parms will take care of
4663 it. */
4664 if (sv)
4665 {
4666 value_address = gen_reg_rtx (Pmode);
4667 emit_move_insn (value_address, sv);
4668 }
4669 }
4670 if (value_address)
4671 {
4672 rtx x = value_address;
4673 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4674 {
4675 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4676 set_mem_attributes (x, DECL_RESULT (subr), 1);
4677 }
4678 SET_DECL_RTL (DECL_RESULT (subr), x);
4679 }
4680 }
4681 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4682 /* If return mode is void, this decl rtl should not be used. */
4683 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4684 else
4685 {
4686 /* Compute the return values into a pseudo reg, which we will copy
4687 into the true return register after the cleanups are done. */
4688 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4689 if (TYPE_MODE (return_type) != BLKmode
4690 && targetm.calls.return_in_msb (return_type))
4691 /* expand_function_end will insert the appropriate padding in
4692 this case. Use the return value's natural (unpadded) mode
4693 within the function proper. */
4694 SET_DECL_RTL (DECL_RESULT (subr),
4695 gen_reg_rtx (TYPE_MODE (return_type)));
4696 else
4697 {
4698 /* In order to figure out what mode to use for the pseudo, we
4699 figure out what the mode of the eventual return register will
4700 actually be, and use that. */
4701 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4702
4703 /* Structures that are returned in registers are not
4704 aggregate_value_p, so we may see a PARALLEL or a REG. */
4705 if (REG_P (hard_reg))
4706 SET_DECL_RTL (DECL_RESULT (subr),
4707 gen_reg_rtx (GET_MODE (hard_reg)));
4708 else
4709 {
4710 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4711 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4712 }
4713 }
4714
4715 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4716 result to the real return register(s). */
4717 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4718 }
4719
4720 /* Initialize rtx for parameters and local variables.
4721 In some cases this requires emitting insns. */
4722 assign_parms (subr);
4723
4724 /* If function gets a static chain arg, store it. */
4725 if (cfun->static_chain_decl)
4726 {
4727 tree parm = cfun->static_chain_decl;
4728 rtx local, chain, insn;
4729
4730 local = gen_reg_rtx (Pmode);
4731 chain = targetm.calls.static_chain (current_function_decl, true);
4732
4733 set_decl_incoming_rtl (parm, chain, false);
4734 SET_DECL_RTL (parm, local);
4735 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4736
4737 insn = emit_move_insn (local, chain);
4738
4739 /* Mark the register as eliminable, similar to parameters. */
4740 if (MEM_P (chain)
4741 && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
4742 set_unique_reg_note (insn, REG_EQUIV, chain);
4743 }
4744
4745 /* If the function receives a non-local goto, then store the
4746 bits we need to restore the frame pointer. */
4747 if (cfun->nonlocal_goto_save_area)
4748 {
4749 tree t_save;
4750 rtx r_save;
4751
4752 /* ??? We need to do this save early. Unfortunately here is
4753 before the frame variable gets declared. Help out... */
4754 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
4755 if (!DECL_RTL_SET_P (var))
4756 expand_decl (var);
4757
4758 t_save = build4 (ARRAY_REF, ptr_type_node,
4759 cfun->nonlocal_goto_save_area,
4760 integer_zero_node, NULL_TREE, NULL_TREE);
4761 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4762 r_save = convert_memory_address (Pmode, r_save);
4763
4764 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
4765 update_nonlocal_goto_save_area ();
4766 }
4767
4768 /* The following was moved from init_function_start.
4769 The move is supposed to make sdb output more accurate. */
4770 /* Indicate the beginning of the function body,
4771 as opposed to parm setup. */
4772 emit_note (NOTE_INSN_FUNCTION_BEG);
4773
4774 gcc_assert (NOTE_P (get_last_insn ()));
4775
4776 parm_birth_insn = get_last_insn ();
4777
4778 if (crtl->profile)
4779 {
4780 #ifdef PROFILE_HOOK
4781 PROFILE_HOOK (current_function_funcdef_no);
4782 #endif
4783 }
4784
4785 /* After the display initializations is where the stack checking
4786 probe should go. */
4787 if(flag_stack_check)
4788 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4789
4790 /* Make sure there is a line number after the function entry setup code. */
4791 force_next_line_note ();
4792 }
4793 \f
4794 /* Undo the effects of init_dummy_function_start. */
4795 void
4796 expand_dummy_function_end (void)
4797 {
4798 gcc_assert (in_dummy_function);
4799
4800 /* End any sequences that failed to be closed due to syntax errors. */
4801 while (in_sequence_p ())
4802 end_sequence ();
4803
4804 /* Outside function body, can't compute type's actual size
4805 until next function's body starts. */
4806
4807 free_after_parsing (cfun);
4808 free_after_compilation (cfun);
4809 pop_cfun ();
4810 in_dummy_function = false;
4811 }
4812
4813 /* Call DOIT for each hard register used as a return value from
4814 the current function. */
4815
4816 void
4817 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4818 {
4819 rtx outgoing = crtl->return_rtx;
4820
4821 if (! outgoing)
4822 return;
4823
4824 if (REG_P (outgoing))
4825 (*doit) (outgoing, arg);
4826 else if (GET_CODE (outgoing) == PARALLEL)
4827 {
4828 int i;
4829
4830 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4831 {
4832 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4833
4834 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4835 (*doit) (x, arg);
4836 }
4837 }
4838 }
4839
4840 static void
4841 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4842 {
4843 emit_clobber (reg);
4844 }
4845
4846 void
4847 clobber_return_register (void)
4848 {
4849 diddle_return_value (do_clobber_return_reg, NULL);
4850
4851 /* In case we do use pseudo to return value, clobber it too. */
4852 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4853 {
4854 tree decl_result = DECL_RESULT (current_function_decl);
4855 rtx decl_rtl = DECL_RTL (decl_result);
4856 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4857 {
4858 do_clobber_return_reg (decl_rtl, NULL);
4859 }
4860 }
4861 }
4862
4863 static void
4864 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4865 {
4866 emit_use (reg);
4867 }
4868
4869 static void
4870 use_return_register (void)
4871 {
4872 diddle_return_value (do_use_return_reg, NULL);
4873 }
4874
4875 /* Possibly warn about unused parameters. */
4876 void
4877 do_warn_unused_parameter (tree fn)
4878 {
4879 tree decl;
4880
4881 for (decl = DECL_ARGUMENTS (fn);
4882 decl; decl = DECL_CHAIN (decl))
4883 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4884 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
4885 && !TREE_NO_WARNING (decl))
4886 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4887 }
4888
4889 static GTY(()) rtx initial_trampoline;
4890
4891 /* Generate RTL for the end of the current function. */
4892
4893 void
4894 expand_function_end (void)
4895 {
4896 rtx clobber_after;
4897
4898 /* If arg_pointer_save_area was referenced only from a nested
4899 function, we will not have initialized it yet. Do that now. */
4900 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
4901 get_arg_pointer_save_area ();
4902
4903 /* If we are doing generic stack checking and this function makes calls,
4904 do a stack probe at the start of the function to ensure we have enough
4905 space for another stack frame. */
4906 if (flag_stack_check == GENERIC_STACK_CHECK)
4907 {
4908 rtx insn, seq;
4909
4910 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4911 if (CALL_P (insn))
4912 {
4913 rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
4914 start_sequence ();
4915 if (STACK_CHECK_MOVING_SP)
4916 anti_adjust_stack_and_probe (max_frame_size, true);
4917 else
4918 probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
4919 seq = get_insns ();
4920 end_sequence ();
4921 set_insn_locators (seq, prologue_locator);
4922 emit_insn_before (seq, stack_check_probe_note);
4923 break;
4924 }
4925 }
4926
4927 /* End any sequences that failed to be closed due to syntax errors. */
4928 while (in_sequence_p ())
4929 end_sequence ();
4930
4931 clear_pending_stack_adjust ();
4932 do_pending_stack_adjust ();
4933
4934 /* Output a linenumber for the end of the function.
4935 SDB depends on this. */
4936 force_next_line_note ();
4937 set_curr_insn_source_location (input_location);
4938
4939 /* Before the return label (if any), clobber the return
4940 registers so that they are not propagated live to the rest of
4941 the function. This can only happen with functions that drop
4942 through; if there had been a return statement, there would
4943 have either been a return rtx, or a jump to the return label.
4944
4945 We delay actual code generation after the current_function_value_rtx
4946 is computed. */
4947 clobber_after = get_last_insn ();
4948
4949 /* Output the label for the actual return from the function. */
4950 emit_label (return_label);
4951
4952 if (targetm.except_unwind_info (&global_options) == UI_SJLJ)
4953 {
4954 /* Let except.c know where it should emit the call to unregister
4955 the function context for sjlj exceptions. */
4956 if (flag_exceptions)
4957 sjlj_emit_function_exit_after (get_last_insn ());
4958 }
4959 else
4960 {
4961 /* We want to ensure that instructions that may trap are not
4962 moved into the epilogue by scheduling, because we don't
4963 always emit unwind information for the epilogue. */
4964 if (cfun->can_throw_non_call_exceptions)
4965 emit_insn (gen_blockage ());
4966 }
4967
4968 /* If this is an implementation of throw, do what's necessary to
4969 communicate between __builtin_eh_return and the epilogue. */
4970 expand_eh_return ();
4971
4972 /* If scalar return value was computed in a pseudo-reg, or was a named
4973 return value that got dumped to the stack, copy that to the hard
4974 return register. */
4975 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4976 {
4977 tree decl_result = DECL_RESULT (current_function_decl);
4978 rtx decl_rtl = DECL_RTL (decl_result);
4979
4980 if (REG_P (decl_rtl)
4981 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4982 : DECL_REGISTER (decl_result))
4983 {
4984 rtx real_decl_rtl = crtl->return_rtx;
4985
4986 /* This should be set in assign_parms. */
4987 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
4988
4989 /* If this is a BLKmode structure being returned in registers,
4990 then use the mode computed in expand_return. Note that if
4991 decl_rtl is memory, then its mode may have been changed,
4992 but that crtl->return_rtx has not. */
4993 if (GET_MODE (real_decl_rtl) == BLKmode)
4994 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4995
4996 /* If a non-BLKmode return value should be padded at the least
4997 significant end of the register, shift it left by the appropriate
4998 amount. BLKmode results are handled using the group load/store
4999 machinery. */
5000 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
5001 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
5002 {
5003 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
5004 REGNO (real_decl_rtl)),
5005 decl_rtl);
5006 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
5007 }
5008 /* If a named return value dumped decl_return to memory, then
5009 we may need to re-do the PROMOTE_MODE signed/unsigned
5010 extension. */
5011 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
5012 {
5013 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
5014 promote_function_mode (TREE_TYPE (decl_result),
5015 GET_MODE (decl_rtl), &unsignedp,
5016 TREE_TYPE (current_function_decl), 1);
5017
5018 convert_move (real_decl_rtl, decl_rtl, unsignedp);
5019 }
5020 else if (GET_CODE (real_decl_rtl) == PARALLEL)
5021 {
5022 /* If expand_function_start has created a PARALLEL for decl_rtl,
5023 move the result to the real return registers. Otherwise, do
5024 a group load from decl_rtl for a named return. */
5025 if (GET_CODE (decl_rtl) == PARALLEL)
5026 emit_group_move (real_decl_rtl, decl_rtl);
5027 else
5028 emit_group_load (real_decl_rtl, decl_rtl,
5029 TREE_TYPE (decl_result),
5030 int_size_in_bytes (TREE_TYPE (decl_result)));
5031 }
5032 /* In the case of complex integer modes smaller than a word, we'll
5033 need to generate some non-trivial bitfield insertions. Do that
5034 on a pseudo and not the hard register. */
5035 else if (GET_CODE (decl_rtl) == CONCAT
5036 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
5037 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
5038 {
5039 int old_generating_concat_p;
5040 rtx tmp;
5041
5042 old_generating_concat_p = generating_concat_p;
5043 generating_concat_p = 0;
5044 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
5045 generating_concat_p = old_generating_concat_p;
5046
5047 emit_move_insn (tmp, decl_rtl);
5048 emit_move_insn (real_decl_rtl, tmp);
5049 }
5050 else
5051 emit_move_insn (real_decl_rtl, decl_rtl);
5052 }
5053 }
5054
5055 /* If returning a structure, arrange to return the address of the value
5056 in a place where debuggers expect to find it.
5057
5058 If returning a structure PCC style,
5059 the caller also depends on this value.
5060 And cfun->returns_pcc_struct is not necessarily set. */
5061 if (cfun->returns_struct
5062 || cfun->returns_pcc_struct)
5063 {
5064 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
5065 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5066 rtx outgoing;
5067
5068 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
5069 type = TREE_TYPE (type);
5070 else
5071 value_address = XEXP (value_address, 0);
5072
5073 outgoing = targetm.calls.function_value (build_pointer_type (type),
5074 current_function_decl, true);
5075
5076 /* Mark this as a function return value so integrate will delete the
5077 assignment and USE below when inlining this function. */
5078 REG_FUNCTION_VALUE_P (outgoing) = 1;
5079
5080 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5081 value_address = convert_memory_address (GET_MODE (outgoing),
5082 value_address);
5083
5084 emit_move_insn (outgoing, value_address);
5085
5086 /* Show return register used to hold result (in this case the address
5087 of the result. */
5088 crtl->return_rtx = outgoing;
5089 }
5090
5091 /* Emit the actual code to clobber return register. */
5092 {
5093 rtx seq;
5094
5095 start_sequence ();
5096 clobber_return_register ();
5097 seq = get_insns ();
5098 end_sequence ();
5099
5100 emit_insn_after (seq, clobber_after);
5101 }
5102
5103 /* Output the label for the naked return from the function. */
5104 if (naked_return_label)
5105 emit_label (naked_return_label);
5106
5107 /* @@@ This is a kludge. We want to ensure that instructions that
5108 may trap are not moved into the epilogue by scheduling, because
5109 we don't always emit unwind information for the epilogue. */
5110 if (cfun->can_throw_non_call_exceptions
5111 && targetm.except_unwind_info (&global_options) != UI_SJLJ)
5112 emit_insn (gen_blockage ());
5113
5114 /* If stack protection is enabled for this function, check the guard. */
5115 if (crtl->stack_protect_guard)
5116 stack_protect_epilogue ();
5117
5118 /* If we had calls to alloca, and this machine needs
5119 an accurate stack pointer to exit the function,
5120 insert some code to save and restore the stack pointer. */
5121 if (! EXIT_IGNORE_STACK
5122 && cfun->calls_alloca)
5123 {
5124 rtx tem = 0;
5125
5126 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
5127 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
5128 }
5129
5130 /* ??? This should no longer be necessary since stupid is no longer with
5131 us, but there are some parts of the compiler (eg reload_combine, and
5132 sh mach_dep_reorg) that still try and compute their own lifetime info
5133 instead of using the general framework. */
5134 use_return_register ();
5135 }
5136
5137 rtx
5138 get_arg_pointer_save_area (void)
5139 {
5140 rtx ret = arg_pointer_save_area;
5141
5142 if (! ret)
5143 {
5144 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5145 arg_pointer_save_area = ret;
5146 }
5147
5148 if (! crtl->arg_pointer_save_area_init)
5149 {
5150 rtx seq;
5151
5152 /* Save the arg pointer at the beginning of the function. The
5153 generated stack slot may not be a valid memory address, so we
5154 have to check it and fix it if necessary. */
5155 start_sequence ();
5156 emit_move_insn (validize_mem (ret),
5157 crtl->args.internal_arg_pointer);
5158 seq = get_insns ();
5159 end_sequence ();
5160
5161 push_topmost_sequence ();
5162 emit_insn_after (seq, entry_of_function ());
5163 pop_topmost_sequence ();
5164
5165 crtl->arg_pointer_save_area_init = true;
5166 }
5167
5168 return ret;
5169 }
5170 \f
5171 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5172 for the first time. */
5173
5174 static void
5175 record_insns (rtx insns, rtx end, htab_t *hashp)
5176 {
5177 rtx tmp;
5178 htab_t hash = *hashp;
5179
5180 if (hash == NULL)
5181 *hashp = hash
5182 = htab_create_ggc (17, htab_hash_pointer, htab_eq_pointer, NULL);
5183
5184 for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
5185 {
5186 void **slot = htab_find_slot (hash, tmp, INSERT);
5187 gcc_assert (*slot == NULL);
5188 *slot = tmp;
5189 }
5190 }
5191
5192 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5193 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5194 insn, then record COPY as well. */
5195
5196 void
5197 maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
5198 {
5199 htab_t hash;
5200 void **slot;
5201
5202 hash = epilogue_insn_hash;
5203 if (!hash || !htab_find (hash, insn))
5204 {
5205 hash = prologue_insn_hash;
5206 if (!hash || !htab_find (hash, insn))
5207 return;
5208 }
5209
5210 slot = htab_find_slot (hash, copy, INSERT);
5211 gcc_assert (*slot == NULL);
5212 *slot = copy;
5213 }
5214
5215 /* Set the locator of the insn chain starting at INSN to LOC. */
5216 static void
5217 set_insn_locators (rtx insn, int loc)
5218 {
5219 while (insn != NULL_RTX)
5220 {
5221 if (INSN_P (insn))
5222 INSN_LOCATOR (insn) = loc;
5223 insn = NEXT_INSN (insn);
5224 }
5225 }
5226
5227 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5228 we can be running after reorg, SEQUENCE rtl is possible. */
5229
5230 static bool
5231 contains (const_rtx insn, htab_t hash)
5232 {
5233 if (hash == NULL)
5234 return false;
5235
5236 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
5237 {
5238 int i;
5239 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
5240 if (htab_find (hash, XVECEXP (PATTERN (insn), 0, i)))
5241 return true;
5242 return false;
5243 }
5244
5245 return htab_find (hash, insn) != NULL;
5246 }
5247
5248 int
5249 prologue_epilogue_contains (const_rtx insn)
5250 {
5251 if (contains (insn, prologue_insn_hash))
5252 return 1;
5253 if (contains (insn, epilogue_insn_hash))
5254 return 1;
5255 return 0;
5256 }
5257
5258 #ifdef HAVE_return
5259 /* Insert gen_return at the end of block BB. This also means updating
5260 block_for_insn appropriately. */
5261
5262 static void
5263 emit_return_into_block (basic_block bb)
5264 {
5265 emit_jump_insn_after (gen_return (), BB_END (bb));
5266 }
5267 #endif /* HAVE_return */
5268
5269 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5270 this into place with notes indicating where the prologue ends and where
5271 the epilogue begins. Update the basic block information when possible. */
5272
5273 static void
5274 thread_prologue_and_epilogue_insns (void)
5275 {
5276 bool inserted;
5277 rtx seq ATTRIBUTE_UNUSED, epilogue_end ATTRIBUTE_UNUSED;
5278 edge entry_edge ATTRIBUTE_UNUSED;
5279 edge e;
5280 edge_iterator ei;
5281
5282 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
5283
5284 inserted = false;
5285 seq = NULL_RTX;
5286 epilogue_end = NULL_RTX;
5287
5288 /* Can't deal with multiple successors of the entry block at the
5289 moment. Function should always have at least one entry
5290 point. */
5291 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5292 entry_edge = single_succ_edge (ENTRY_BLOCK_PTR);
5293
5294 if (flag_split_stack
5295 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
5296 == NULL))
5297 {
5298 #ifndef HAVE_split_stack_prologue
5299 gcc_unreachable ();
5300 #else
5301 gcc_assert (HAVE_split_stack_prologue);
5302
5303 start_sequence ();
5304 emit_insn (gen_split_stack_prologue ());
5305 seq = get_insns ();
5306 end_sequence ();
5307
5308 record_insns (seq, NULL, &prologue_insn_hash);
5309 set_insn_locators (seq, prologue_locator);
5310
5311 /* This relies on the fact that committing the edge insertion
5312 will look for basic blocks within the inserted instructions,
5313 which in turn relies on the fact that we are not in CFG
5314 layout mode here. */
5315 insert_insn_on_edge (seq, entry_edge);
5316 inserted = true;
5317 #endif
5318 }
5319
5320 #ifdef HAVE_prologue
5321 if (HAVE_prologue)
5322 {
5323 start_sequence ();
5324 seq = gen_prologue ();
5325 emit_insn (seq);
5326
5327 /* Insert an explicit USE for the frame pointer
5328 if the profiling is on and the frame pointer is required. */
5329 if (crtl->profile && frame_pointer_needed)
5330 emit_use (hard_frame_pointer_rtx);
5331
5332 /* Retain a map of the prologue insns. */
5333 record_insns (seq, NULL, &prologue_insn_hash);
5334 emit_note (NOTE_INSN_PROLOGUE_END);
5335
5336 /* Ensure that instructions are not moved into the prologue when
5337 profiling is on. The call to the profiling routine can be
5338 emitted within the live range of a call-clobbered register. */
5339 if (!targetm.profile_before_prologue () && crtl->profile)
5340 emit_insn (gen_blockage ());
5341
5342 seq = get_insns ();
5343 end_sequence ();
5344 set_insn_locators (seq, prologue_locator);
5345
5346 insert_insn_on_edge (seq, entry_edge);
5347 inserted = true;
5348 }
5349 #endif
5350
5351 /* If the exit block has no non-fake predecessors, we don't need
5352 an epilogue. */
5353 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5354 if ((e->flags & EDGE_FAKE) == 0)
5355 break;
5356 if (e == NULL)
5357 goto epilogue_done;
5358
5359 rtl_profile_for_bb (EXIT_BLOCK_PTR);
5360 #ifdef HAVE_return
5361 if (optimize && HAVE_return)
5362 {
5363 /* If we're allowed to generate a simple return instruction,
5364 then by definition we don't need a full epilogue. Examine
5365 the block that falls through to EXIT. If it does not
5366 contain any code, examine its predecessors and try to
5367 emit (conditional) return instructions. */
5368
5369 basic_block last;
5370 rtx label;
5371
5372 e = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
5373 if (e == NULL)
5374 goto epilogue_done;
5375 last = e->src;
5376
5377 /* Verify that there are no active instructions in the last block. */
5378 label = BB_END (last);
5379 while (label && !LABEL_P (label))
5380 {
5381 if (active_insn_p (label))
5382 break;
5383 label = PREV_INSN (label);
5384 }
5385
5386 if (BB_HEAD (last) == label && LABEL_P (label))
5387 {
5388 edge_iterator ei2;
5389
5390 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
5391 {
5392 basic_block bb = e->src;
5393 rtx jump;
5394
5395 if (bb == ENTRY_BLOCK_PTR)
5396 {
5397 ei_next (&ei2);
5398 continue;
5399 }
5400
5401 jump = BB_END (bb);
5402 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5403 {
5404 ei_next (&ei2);
5405 continue;
5406 }
5407
5408 /* If we have an unconditional jump, we can replace that
5409 with a simple return instruction. */
5410 if (simplejump_p (jump))
5411 {
5412 emit_return_into_block (bb);
5413 delete_insn (jump);
5414 }
5415
5416 /* If we have a conditional jump, we can try to replace
5417 that with a conditional return instruction. */
5418 else if (condjump_p (jump))
5419 {
5420 if (! redirect_jump (jump, 0, 0))
5421 {
5422 ei_next (&ei2);
5423 continue;
5424 }
5425
5426 /* If this block has only one successor, it both jumps
5427 and falls through to the fallthru block, so we can't
5428 delete the edge. */
5429 if (single_succ_p (bb))
5430 {
5431 ei_next (&ei2);
5432 continue;
5433 }
5434 }
5435 else
5436 {
5437 ei_next (&ei2);
5438 continue;
5439 }
5440
5441 /* Fix up the CFG for the successful change we just made. */
5442 redirect_edge_succ (e, EXIT_BLOCK_PTR);
5443 }
5444
5445 /* Emit a return insn for the exit fallthru block. Whether
5446 this is still reachable will be determined later. */
5447
5448 emit_barrier_after (BB_END (last));
5449 emit_return_into_block (last);
5450 epilogue_end = BB_END (last);
5451 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
5452 goto epilogue_done;
5453 }
5454 }
5455 #endif
5456
5457 /* A small fib -- epilogue is not yet completed, but we wish to re-use
5458 this marker for the splits of EH_RETURN patterns, and nothing else
5459 uses the flag in the meantime. */
5460 epilogue_completed = 1;
5461
5462 #ifdef HAVE_eh_return
5463 /* Find non-fallthru edges that end with EH_RETURN instructions. On
5464 some targets, these get split to a special version of the epilogue
5465 code. In order to be able to properly annotate these with unwind
5466 info, try to split them now. If we get a valid split, drop an
5467 EPILOGUE_BEG note and mark the insns as epilogue insns. */
5468 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5469 {
5470 rtx prev, last, trial;
5471
5472 if (e->flags & EDGE_FALLTHRU)
5473 continue;
5474 last = BB_END (e->src);
5475 if (!eh_returnjump_p (last))
5476 continue;
5477
5478 prev = PREV_INSN (last);
5479 trial = try_split (PATTERN (last), last, 1);
5480 if (trial == last)
5481 continue;
5482
5483 record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
5484 emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
5485 }
5486 #endif
5487
5488 /* Find the edge that falls through to EXIT. Other edges may exist
5489 due to RETURN instructions, but those don't need epilogues.
5490 There really shouldn't be a mixture -- either all should have
5491 been converted or none, however... */
5492
5493 e = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
5494 if (e == NULL)
5495 goto epilogue_done;
5496
5497 #ifdef HAVE_epilogue
5498 if (HAVE_epilogue)
5499 {
5500 start_sequence ();
5501 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5502 seq = gen_epilogue ();
5503 if (seq)
5504 emit_jump_insn (seq);
5505
5506 /* Retain a map of the epilogue insns. */
5507 record_insns (seq, NULL, &epilogue_insn_hash);
5508 set_insn_locators (seq, epilogue_locator);
5509
5510 seq = get_insns ();
5511 end_sequence ();
5512
5513 insert_insn_on_edge (seq, e);
5514 inserted = true;
5515 }
5516 else
5517 #endif
5518 {
5519 basic_block cur_bb;
5520
5521 if (! next_active_insn (BB_END (e->src)))
5522 goto epilogue_done;
5523 /* We have a fall-through edge to the exit block, the source is not
5524 at the end of the function, and there will be an assembler epilogue
5525 at the end of the function.
5526 We can't use force_nonfallthru here, because that would try to
5527 use return. Inserting a jump 'by hand' is extremely messy, so
5528 we take advantage of cfg_layout_finalize using
5529 fixup_fallthru_exit_predecessor. */
5530 cfg_layout_initialize (0);
5531 FOR_EACH_BB (cur_bb)
5532 if (cur_bb->index >= NUM_FIXED_BLOCKS
5533 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
5534 cur_bb->aux = cur_bb->next_bb;
5535 cfg_layout_finalize ();
5536 }
5537 epilogue_done:
5538 default_rtl_profile ();
5539
5540 if (inserted)
5541 {
5542 commit_edge_insertions ();
5543
5544 /* The epilogue insns we inserted may cause the exit edge to no longer
5545 be fallthru. */
5546 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5547 {
5548 if (((e->flags & EDGE_FALLTHRU) != 0)
5549 && returnjump_p (BB_END (e->src)))
5550 e->flags &= ~EDGE_FALLTHRU;
5551 }
5552 }
5553
5554 #ifdef HAVE_sibcall_epilogue
5555 /* Emit sibling epilogues before any sibling call sites. */
5556 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
5557 {
5558 basic_block bb = e->src;
5559 rtx insn = BB_END (bb);
5560
5561 if (!CALL_P (insn)
5562 || ! SIBLING_CALL_P (insn))
5563 {
5564 ei_next (&ei);
5565 continue;
5566 }
5567
5568 start_sequence ();
5569 emit_note (NOTE_INSN_EPILOGUE_BEG);
5570 emit_insn (gen_sibcall_epilogue ());
5571 seq = get_insns ();
5572 end_sequence ();
5573
5574 /* Retain a map of the epilogue insns. Used in life analysis to
5575 avoid getting rid of sibcall epilogue insns. Do this before we
5576 actually emit the sequence. */
5577 record_insns (seq, NULL, &epilogue_insn_hash);
5578 set_insn_locators (seq, epilogue_locator);
5579
5580 emit_insn_before (seq, insn);
5581 ei_next (&ei);
5582 }
5583 #endif
5584
5585 #ifdef HAVE_epilogue
5586 if (epilogue_end)
5587 {
5588 rtx insn, next;
5589
5590 /* Similarly, move any line notes that appear after the epilogue.
5591 There is no need, however, to be quite so anal about the existence
5592 of such a note. Also possibly move
5593 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5594 info generation. */
5595 for (insn = epilogue_end; insn; insn = next)
5596 {
5597 next = NEXT_INSN (insn);
5598 if (NOTE_P (insn)
5599 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
5600 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5601 }
5602 }
5603 #endif
5604
5605 /* Threading the prologue and epilogue changes the artificial refs
5606 in the entry and exit blocks. */
5607 epilogue_completed = 1;
5608 df_update_entry_exit_and_calls ();
5609 }
5610
5611 /* Reposition the prologue-end and epilogue-begin notes after
5612 instruction scheduling. */
5613
5614 void
5615 reposition_prologue_and_epilogue_notes (void)
5616 {
5617 #if defined (HAVE_prologue) || defined (HAVE_epilogue) \
5618 || defined (HAVE_sibcall_epilogue)
5619 /* Since the hash table is created on demand, the fact that it is
5620 non-null is a signal that it is non-empty. */
5621 if (prologue_insn_hash != NULL)
5622 {
5623 size_t len = htab_elements (prologue_insn_hash);
5624 rtx insn, last = NULL, note = NULL;
5625
5626 /* Scan from the beginning until we reach the last prologue insn. */
5627 /* ??? While we do have the CFG intact, there are two problems:
5628 (1) The prologue can contain loops (typically probing the stack),
5629 which means that the end of the prologue isn't in the first bb.
5630 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
5631 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5632 {
5633 if (NOTE_P (insn))
5634 {
5635 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
5636 note = insn;
5637 }
5638 else if (contains (insn, prologue_insn_hash))
5639 {
5640 last = insn;
5641 if (--len == 0)
5642 break;
5643 }
5644 }
5645
5646 if (last)
5647 {
5648 if (note == NULL)
5649 {
5650 /* Scan forward looking for the PROLOGUE_END note. It should
5651 be right at the beginning of the block, possibly with other
5652 insn notes that got moved there. */
5653 for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
5654 {
5655 if (NOTE_P (note)
5656 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
5657 break;
5658 }
5659 }
5660
5661 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5662 if (LABEL_P (last))
5663 last = NEXT_INSN (last);
5664 reorder_insns (note, note, last);
5665 }
5666 }
5667
5668 if (epilogue_insn_hash != NULL)
5669 {
5670 edge_iterator ei;
5671 edge e;
5672
5673 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5674 {
5675 rtx insn, first = NULL, note = NULL;
5676 basic_block bb = e->src;
5677
5678 /* Scan from the beginning until we reach the first epilogue insn. */
5679 FOR_BB_INSNS (bb, insn)
5680 {
5681 if (NOTE_P (insn))
5682 {
5683 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
5684 {
5685 note = insn;
5686 if (first != NULL)
5687 break;
5688 }
5689 }
5690 else if (first == NULL && contains (insn, epilogue_insn_hash))
5691 {
5692 first = insn;
5693 if (note != NULL)
5694 break;
5695 }
5696 }
5697
5698 if (note)
5699 {
5700 /* If the function has a single basic block, and no real
5701 epilogue insns (e.g. sibcall with no cleanup), the
5702 epilogue note can get scheduled before the prologue
5703 note. If we have frame related prologue insns, having
5704 them scanned during the epilogue will result in a crash.
5705 In this case re-order the epilogue note to just before
5706 the last insn in the block. */
5707 if (first == NULL)
5708 first = BB_END (bb);
5709
5710 if (PREV_INSN (first) != note)
5711 reorder_insns (note, note, PREV_INSN (first));
5712 }
5713 }
5714 }
5715 #endif /* HAVE_prologue or HAVE_epilogue */
5716 }
5717
5718 /* Returns the name of the current function. */
5719 const char *
5720 current_function_name (void)
5721 {
5722 if (cfun == NULL)
5723 return "<none>";
5724 return lang_hooks.decl_printable_name (cfun->decl, 2);
5725 }
5726 \f
5727
5728 static unsigned int
5729 rest_of_handle_check_leaf_regs (void)
5730 {
5731 #ifdef LEAF_REGISTERS
5732 current_function_uses_only_leaf_regs
5733 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5734 #endif
5735 return 0;
5736 }
5737
5738 /* Insert a TYPE into the used types hash table of CFUN. */
5739
5740 static void
5741 used_types_insert_helper (tree type, struct function *func)
5742 {
5743 if (type != NULL && func != NULL)
5744 {
5745 void **slot;
5746
5747 if (func->used_types_hash == NULL)
5748 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
5749 htab_eq_pointer, NULL);
5750 slot = htab_find_slot (func->used_types_hash, type, INSERT);
5751 if (*slot == NULL)
5752 *slot = type;
5753 }
5754 }
5755
5756 /* Given a type, insert it into the used hash table in cfun. */
5757 void
5758 used_types_insert (tree t)
5759 {
5760 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
5761 if (TYPE_NAME (t))
5762 break;
5763 else
5764 t = TREE_TYPE (t);
5765 if (TREE_CODE (t) == ERROR_MARK)
5766 return;
5767 if (TYPE_NAME (t) == NULL_TREE
5768 || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
5769 t = TYPE_MAIN_VARIANT (t);
5770 if (debug_info_level > DINFO_LEVEL_NONE)
5771 {
5772 if (cfun)
5773 used_types_insert_helper (t, cfun);
5774 else
5775 /* So this might be a type referenced by a global variable.
5776 Record that type so that we can later decide to emit its debug
5777 information. */
5778 VEC_safe_push (tree, gc, types_used_by_cur_var_decl, t);
5779 }
5780 }
5781
5782 /* Helper to Hash a struct types_used_by_vars_entry. */
5783
5784 static hashval_t
5785 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
5786 {
5787 gcc_assert (entry && entry->var_decl && entry->type);
5788
5789 return iterative_hash_object (entry->type,
5790 iterative_hash_object (entry->var_decl, 0));
5791 }
5792
5793 /* Hash function of the types_used_by_vars_entry hash table. */
5794
5795 hashval_t
5796 types_used_by_vars_do_hash (const void *x)
5797 {
5798 const struct types_used_by_vars_entry *entry =
5799 (const struct types_used_by_vars_entry *) x;
5800
5801 return hash_types_used_by_vars_entry (entry);
5802 }
5803
5804 /*Equality function of the types_used_by_vars_entry hash table. */
5805
5806 int
5807 types_used_by_vars_eq (const void *x1, const void *x2)
5808 {
5809 const struct types_used_by_vars_entry *e1 =
5810 (const struct types_used_by_vars_entry *) x1;
5811 const struct types_used_by_vars_entry *e2 =
5812 (const struct types_used_by_vars_entry *)x2;
5813
5814 return (e1->var_decl == e2->var_decl && e1->type == e2->type);
5815 }
5816
5817 /* Inserts an entry into the types_used_by_vars_hash hash table. */
5818
5819 void
5820 types_used_by_var_decl_insert (tree type, tree var_decl)
5821 {
5822 if (type != NULL && var_decl != NULL)
5823 {
5824 void **slot;
5825 struct types_used_by_vars_entry e;
5826 e.var_decl = var_decl;
5827 e.type = type;
5828 if (types_used_by_vars_hash == NULL)
5829 types_used_by_vars_hash =
5830 htab_create_ggc (37, types_used_by_vars_do_hash,
5831 types_used_by_vars_eq, NULL);
5832 slot = htab_find_slot_with_hash (types_used_by_vars_hash, &e,
5833 hash_types_used_by_vars_entry (&e), INSERT);
5834 if (*slot == NULL)
5835 {
5836 struct types_used_by_vars_entry *entry;
5837 entry = ggc_alloc_types_used_by_vars_entry ();
5838 entry->type = type;
5839 entry->var_decl = var_decl;
5840 *slot = entry;
5841 }
5842 }
5843 }
5844
5845 struct rtl_opt_pass pass_leaf_regs =
5846 {
5847 {
5848 RTL_PASS,
5849 "*leaf_regs", /* name */
5850 NULL, /* gate */
5851 rest_of_handle_check_leaf_regs, /* execute */
5852 NULL, /* sub */
5853 NULL, /* next */
5854 0, /* static_pass_number */
5855 TV_NONE, /* tv_id */
5856 0, /* properties_required */
5857 0, /* properties_provided */
5858 0, /* properties_destroyed */
5859 0, /* todo_flags_start */
5860 0 /* todo_flags_finish */
5861 }
5862 };
5863
5864 static unsigned int
5865 rest_of_handle_thread_prologue_and_epilogue (void)
5866 {
5867 if (optimize)
5868 cleanup_cfg (CLEANUP_EXPENSIVE);
5869
5870 /* On some machines, the prologue and epilogue code, or parts thereof,
5871 can be represented as RTL. Doing so lets us schedule insns between
5872 it and the rest of the code and also allows delayed branch
5873 scheduling to operate in the epilogue. */
5874 thread_prologue_and_epilogue_insns ();
5875
5876 /* The stack usage info is finalized during prologue expansion. */
5877 if (flag_stack_usage)
5878 output_stack_usage ();
5879
5880 return 0;
5881 }
5882
5883 struct rtl_opt_pass pass_thread_prologue_and_epilogue =
5884 {
5885 {
5886 RTL_PASS,
5887 "pro_and_epilogue", /* name */
5888 NULL, /* gate */
5889 rest_of_handle_thread_prologue_and_epilogue, /* execute */
5890 NULL, /* sub */
5891 NULL, /* next */
5892 0, /* static_pass_number */
5893 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
5894 0, /* properties_required */
5895 0, /* properties_provided */
5896 0, /* properties_destroyed */
5897 TODO_verify_flow, /* todo_flags_start */
5898 TODO_dump_func |
5899 TODO_df_verify |
5900 TODO_df_finish | TODO_verify_rtl_sharing |
5901 TODO_ggc_collect /* todo_flags_finish */
5902 }
5903 };
5904 \f
5905
5906 /* This mini-pass fixes fall-out from SSA in asm statements that have
5907 in-out constraints. Say you start with
5908
5909 orig = inout;
5910 asm ("": "+mr" (inout));
5911 use (orig);
5912
5913 which is transformed very early to use explicit output and match operands:
5914
5915 orig = inout;
5916 asm ("": "=mr" (inout) : "0" (inout));
5917 use (orig);
5918
5919 Or, after SSA and copyprop,
5920
5921 asm ("": "=mr" (inout_2) : "0" (inout_1));
5922 use (inout_1);
5923
5924 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
5925 they represent two separate values, so they will get different pseudo
5926 registers during expansion. Then, since the two operands need to match
5927 per the constraints, but use different pseudo registers, reload can
5928 only register a reload for these operands. But reloads can only be
5929 satisfied by hardregs, not by memory, so we need a register for this
5930 reload, just because we are presented with non-matching operands.
5931 So, even though we allow memory for this operand, no memory can be
5932 used for it, just because the two operands don't match. This can
5933 cause reload failures on register-starved targets.
5934
5935 So it's a symptom of reload not being able to use memory for reloads
5936 or, alternatively it's also a symptom of both operands not coming into
5937 reload as matching (in which case the pseudo could go to memory just
5938 fine, as the alternative allows it, and no reload would be necessary).
5939 We fix the latter problem here, by transforming
5940
5941 asm ("": "=mr" (inout_2) : "0" (inout_1));
5942
5943 back to
5944
5945 inout_2 = inout_1;
5946 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
5947
5948 static void
5949 match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs)
5950 {
5951 int i;
5952 bool changed = false;
5953 rtx op = SET_SRC (p_sets[0]);
5954 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
5955 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
5956 bool *output_matched = XALLOCAVEC (bool, noutputs);
5957
5958 memset (output_matched, 0, noutputs * sizeof (bool));
5959 for (i = 0; i < ninputs; i++)
5960 {
5961 rtx input, output, insns;
5962 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
5963 char *end;
5964 int match, j;
5965
5966 if (*constraint == '%')
5967 constraint++;
5968
5969 match = strtoul (constraint, &end, 10);
5970 if (end == constraint)
5971 continue;
5972
5973 gcc_assert (match < noutputs);
5974 output = SET_DEST (p_sets[match]);
5975 input = RTVEC_ELT (inputs, i);
5976 /* Only do the transformation for pseudos. */
5977 if (! REG_P (output)
5978 || rtx_equal_p (output, input)
5979 || (GET_MODE (input) != VOIDmode
5980 && GET_MODE (input) != GET_MODE (output)))
5981 continue;
5982
5983 /* We can't do anything if the output is also used as input,
5984 as we're going to overwrite it. */
5985 for (j = 0; j < ninputs; j++)
5986 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
5987 break;
5988 if (j != ninputs)
5989 continue;
5990
5991 /* Avoid changing the same input several times. For
5992 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
5993 only change in once (to out1), rather than changing it
5994 first to out1 and afterwards to out2. */
5995 if (i > 0)
5996 {
5997 for (j = 0; j < noutputs; j++)
5998 if (output_matched[j] && input == SET_DEST (p_sets[j]))
5999 break;
6000 if (j != noutputs)
6001 continue;
6002 }
6003 output_matched[match] = true;
6004
6005 start_sequence ();
6006 emit_move_insn (output, input);
6007 insns = get_insns ();
6008 end_sequence ();
6009 emit_insn_before (insns, insn);
6010
6011 /* Now replace all mentions of the input with output. We can't
6012 just replace the occurrence in inputs[i], as the register might
6013 also be used in some other input (or even in an address of an
6014 output), which would mean possibly increasing the number of
6015 inputs by one (namely 'output' in addition), which might pose
6016 a too complicated problem for reload to solve. E.g. this situation:
6017
6018 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6019
6020 Here 'input' is used in two occurrences as input (once for the
6021 input operand, once for the address in the second output operand).
6022 If we would replace only the occurrence of the input operand (to
6023 make the matching) we would be left with this:
6024
6025 output = input
6026 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6027
6028 Now we suddenly have two different input values (containing the same
6029 value, but different pseudos) where we formerly had only one.
6030 With more complicated asms this might lead to reload failures
6031 which wouldn't have happen without this pass. So, iterate over
6032 all operands and replace all occurrences of the register used. */
6033 for (j = 0; j < noutputs; j++)
6034 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
6035 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
6036 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
6037 input, output);
6038 for (j = 0; j < ninputs; j++)
6039 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
6040 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
6041 input, output);
6042
6043 changed = true;
6044 }
6045
6046 if (changed)
6047 df_insn_rescan (insn);
6048 }
6049
6050 static unsigned
6051 rest_of_match_asm_constraints (void)
6052 {
6053 basic_block bb;
6054 rtx insn, pat, *p_sets;
6055 int noutputs;
6056
6057 if (!crtl->has_asm_statement)
6058 return 0;
6059
6060 df_set_flags (DF_DEFER_INSN_RESCAN);
6061 FOR_EACH_BB (bb)
6062 {
6063 FOR_BB_INSNS (bb, insn)
6064 {
6065 if (!INSN_P (insn))
6066 continue;
6067
6068 pat = PATTERN (insn);
6069 if (GET_CODE (pat) == PARALLEL)
6070 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
6071 else if (GET_CODE (pat) == SET)
6072 p_sets = &PATTERN (insn), noutputs = 1;
6073 else
6074 continue;
6075
6076 if (GET_CODE (*p_sets) == SET
6077 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
6078 match_asm_constraints_1 (insn, p_sets, noutputs);
6079 }
6080 }
6081
6082 return TODO_df_finish;
6083 }
6084
6085 struct rtl_opt_pass pass_match_asm_constraints =
6086 {
6087 {
6088 RTL_PASS,
6089 "asmcons", /* name */
6090 NULL, /* gate */
6091 rest_of_match_asm_constraints, /* execute */
6092 NULL, /* sub */
6093 NULL, /* next */
6094 0, /* static_pass_number */
6095 TV_NONE, /* tv_id */
6096 0, /* properties_required */
6097 0, /* properties_provided */
6098 0, /* properties_destroyed */
6099 0, /* todo_flags_start */
6100 TODO_dump_func /* todo_flags_finish */
6101 }
6102 };
6103
6104
6105 #include "gt-function.h"