Makefile.in: Replace toplev.h with TOPLEV_H.
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "flags.h"
43 #include "except.h"
44 #include "function.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "regs.h"
49 #include "hard-reg-set.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "output.h"
53 #include "basic-block.h"
54 #include "toplev.h"
55 #include "hashtab.h"
56 #include "ggc.h"
57 #include "tm_p.h"
58 #include "integrate.h"
59 #include "langhooks.h"
60 #include "target.h"
61 #include "cfglayout.h"
62 #include "tree-gimple.h"
63 #include "tree-pass.h"
64 #include "predict.h"
65 #include "df.h"
66 #include "timevar.h"
67 #include "vecprim.h"
68
69 /* So we can assign to cfun in this file. */
70 #undef cfun
71
72 #ifndef STACK_ALIGNMENT_NEEDED
73 #define STACK_ALIGNMENT_NEEDED 1
74 #endif
75
76 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
77
78 /* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
81 must define both, or neither. */
82 #ifndef NAME__MAIN
83 #define NAME__MAIN "__main"
84 #endif
85
86 /* Round a value to the lowest integer less than it that is a multiple of
87 the required alignment. Avoid using division in case the value is
88 negative. Assume the alignment is a power of two. */
89 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
90
91 /* Similar, but round to the next highest integer that meets the
92 alignment. */
93 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
94
95 /* Nonzero if function being compiled doesn't contain any calls
96 (ignoring the prologue and epilogue). This is set prior to
97 local register allocation and is valid for the remaining
98 compiler passes. */
99 int current_function_is_leaf;
100
101 /* Nonzero if function being compiled doesn't modify the stack pointer
102 (ignoring the prologue and epilogue). This is only valid after
103 pass_stack_ptr_mod has run. */
104 int current_function_sp_is_unchanging;
105
106 /* Nonzero if the function being compiled is a leaf function which only
107 uses leaf registers. This is valid after reload (specifically after
108 sched2) and is useful only if the port defines LEAF_REGISTERS. */
109 int current_function_uses_only_leaf_regs;
110
111 /* Nonzero once virtual register instantiation has been done.
112 assign_stack_local uses frame_pointer_rtx when this is nonzero.
113 calls.c:emit_library_call_value_1 uses it to set up
114 post-instantiation libcalls. */
115 int virtuals_instantiated;
116
117 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
118 static GTY(()) int funcdef_no;
119
120 /* These variables hold pointers to functions to create and destroy
121 target specific, per-function data structures. */
122 struct machine_function * (*init_machine_status) (void);
123
124 /* The currently compiled function. */
125 struct function *cfun = 0;
126
127 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
128 static VEC(int,heap) *prologue;
129 static VEC(int,heap) *epilogue;
130
131 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
132 in this function. */
133 static VEC(int,heap) *sibcall_epilogue;
134 \f
135 /* In order to evaluate some expressions, such as function calls returning
136 structures in memory, we need to temporarily allocate stack locations.
137 We record each allocated temporary in the following structure.
138
139 Associated with each temporary slot is a nesting level. When we pop up
140 one level, all temporaries associated with the previous level are freed.
141 Normally, all temporaries are freed after the execution of the statement
142 in which they were created. However, if we are inside a ({...}) grouping,
143 the result may be in a temporary and hence must be preserved. If the
144 result could be in a temporary, we preserve it if we can determine which
145 one it is in. If we cannot determine which temporary may contain the
146 result, all temporaries are preserved. A temporary is preserved by
147 pretending it was allocated at the previous nesting level.
148
149 Automatic variables are also assigned temporary slots, at the nesting
150 level where they are defined. They are marked a "kept" so that
151 free_temp_slots will not free them. */
152
153 struct temp_slot GTY(())
154 {
155 /* Points to next temporary slot. */
156 struct temp_slot *next;
157 /* Points to previous temporary slot. */
158 struct temp_slot *prev;
159
160 /* The rtx to used to reference the slot. */
161 rtx slot;
162 /* The rtx used to represent the address if not the address of the
163 slot above. May be an EXPR_LIST if multiple addresses exist. */
164 rtx address;
165 /* The alignment (in bits) of the slot. */
166 unsigned int align;
167 /* The size, in units, of the slot. */
168 HOST_WIDE_INT size;
169 /* The type of the object in the slot, or zero if it doesn't correspond
170 to a type. We use this to determine whether a slot can be reused.
171 It can be reused if objects of the type of the new slot will always
172 conflict with objects of the type of the old slot. */
173 tree type;
174 /* Nonzero if this temporary is currently in use. */
175 char in_use;
176 /* Nonzero if this temporary has its address taken. */
177 char addr_taken;
178 /* Nesting level at which this slot is being used. */
179 int level;
180 /* Nonzero if this should survive a call to free_temp_slots. */
181 int keep;
182 /* The offset of the slot from the frame_pointer, including extra space
183 for alignment. This info is for combine_temp_slots. */
184 HOST_WIDE_INT base_offset;
185 /* The size of the slot, including extra space for alignment. This
186 info is for combine_temp_slots. */
187 HOST_WIDE_INT full_size;
188 };
189 \f
190 /* Forward declarations. */
191
192 static struct temp_slot *find_temp_slot_from_address (rtx);
193 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
194 static void pad_below (struct args_size *, enum machine_mode, tree);
195 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
196 static int all_blocks (tree, tree *);
197 static tree *get_block_vector (tree, int *);
198 extern tree debug_find_var_in_block_tree (tree, tree);
199 /* We always define `record_insns' even if it's not used so that we
200 can always export `prologue_epilogue_contains'. */
201 static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED;
202 static int contains (const_rtx, VEC(int,heap) **);
203 #ifdef HAVE_return
204 static void emit_return_into_block (basic_block);
205 #endif
206 static void prepare_function_start (void);
207 static void do_clobber_return_reg (rtx, void *);
208 static void do_use_return_reg (rtx, void *);
209 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
210 \f
211 /* Pointer to chain of `struct function' for containing functions. */
212 struct function *outer_function_chain;
213
214 /* Given a function decl for a containing function,
215 return the `struct function' for it. */
216
217 struct function *
218 find_function_data (tree decl)
219 {
220 struct function *p;
221
222 for (p = outer_function_chain; p; p = p->outer)
223 if (p->decl == decl)
224 return p;
225
226 gcc_unreachable ();
227 }
228
229 /* Save the current context for compilation of a nested function.
230 This is called from language-specific code. */
231
232 void
233 push_function_context (void)
234 {
235 if (cfun == 0)
236 allocate_struct_function (NULL, false);
237
238 cfun->outer = outer_function_chain;
239 outer_function_chain = cfun;
240 set_cfun (NULL);
241 }
242
243 /* Restore the last saved context, at the end of a nested function.
244 This function is called from language-specific code. */
245
246 void
247 pop_function_context (void)
248 {
249 struct function *p = outer_function_chain;
250
251 set_cfun (p);
252 outer_function_chain = p->outer;
253 current_function_decl = p->decl;
254
255 /* Reset variables that have known state during rtx generation. */
256 virtuals_instantiated = 0;
257 generating_concat_p = 1;
258 }
259
260 /* Clear out all parts of the state in F that can safely be discarded
261 after the function has been parsed, but not compiled, to let
262 garbage collection reclaim the memory. */
263
264 void
265 free_after_parsing (struct function *f)
266 {
267 f->language = 0;
268 }
269
270 /* Clear out all parts of the state in F that can safely be discarded
271 after the function has been compiled, to let garbage collection
272 reclaim the memory. */
273
274 void
275 free_after_compilation (struct function *f)
276 {
277 VEC_free (int, heap, prologue);
278 VEC_free (int, heap, epilogue);
279 VEC_free (int, heap, sibcall_epilogue);
280 if (crtl->emit.regno_pointer_align)
281 free (crtl->emit.regno_pointer_align);
282
283 memset (crtl, 0, sizeof (struct rtl_data));
284 f->eh = NULL;
285 f->machine = NULL;
286 f->cfg = NULL;
287
288 regno_reg_rtx = NULL;
289 }
290 \f
291 /* Return size needed for stack frame based on slots so far allocated.
292 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
293 the caller may have to do that. */
294
295 HOST_WIDE_INT
296 get_frame_size (void)
297 {
298 if (FRAME_GROWS_DOWNWARD)
299 return -frame_offset;
300 else
301 return frame_offset;
302 }
303
304 /* Issue an error message and return TRUE if frame OFFSET overflows in
305 the signed target pointer arithmetics for function FUNC. Otherwise
306 return FALSE. */
307
308 bool
309 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
310 {
311 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
312
313 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
314 /* Leave room for the fixed part of the frame. */
315 - 64 * UNITS_PER_WORD)
316 {
317 error ("%Jtotal size of local objects too large", func);
318 return TRUE;
319 }
320
321 return FALSE;
322 }
323
324 /* Return stack slot alignment in bits for TYPE and MODE. */
325
326 static unsigned int
327 get_stack_local_alignment (tree type, enum machine_mode mode)
328 {
329 unsigned int alignment;
330
331 if (mode == BLKmode)
332 alignment = BIGGEST_ALIGNMENT;
333 else
334 alignment = GET_MODE_ALIGNMENT (mode);
335
336 /* Allow the frond-end to (possibly) increase the alignment of this
337 stack slot. */
338 if (! type)
339 type = lang_hooks.types.type_for_mode (mode, 0);
340
341 return STACK_SLOT_ALIGNMENT (type, mode, alignment);
342 }
343
344 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
345 with machine mode MODE.
346
347 ALIGN controls the amount of alignment for the address of the slot:
348 0 means according to MODE,
349 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
350 -2 means use BITS_PER_UNIT,
351 positive specifies alignment boundary in bits.
352
353 We do not round to stack_boundary here. */
354
355 rtx
356 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
357 {
358 rtx x, addr;
359 int bigend_correction = 0;
360 unsigned int alignment, alignment_in_bits;
361 int frame_off, frame_alignment, frame_phase;
362
363 if (align == 0)
364 {
365 alignment = get_stack_local_alignment (NULL, mode);
366 alignment /= BITS_PER_UNIT;
367 }
368 else if (align == -1)
369 {
370 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
371 size = CEIL_ROUND (size, alignment);
372 }
373 else if (align == -2)
374 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
375 else
376 alignment = align / BITS_PER_UNIT;
377
378 if (FRAME_GROWS_DOWNWARD)
379 frame_offset -= size;
380
381 /* Ignore alignment we can't do with expected alignment of the boundary. */
382 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
383 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
384
385 alignment_in_bits = alignment * BITS_PER_UNIT;
386
387 if (crtl->stack_alignment_needed < alignment_in_bits)
388 crtl->stack_alignment_needed = alignment_in_bits;
389
390 /* Calculate how many bytes the start of local variables is off from
391 stack alignment. */
392 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
393 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
394 frame_phase = frame_off ? frame_alignment - frame_off : 0;
395
396 /* Round the frame offset to the specified alignment. The default is
397 to always honor requests to align the stack but a port may choose to
398 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
399 if (STACK_ALIGNMENT_NEEDED
400 || mode != BLKmode
401 || size != 0)
402 {
403 /* We must be careful here, since FRAME_OFFSET might be negative and
404 division with a negative dividend isn't as well defined as we might
405 like. So we instead assume that ALIGNMENT is a power of two and
406 use logical operations which are unambiguous. */
407 if (FRAME_GROWS_DOWNWARD)
408 frame_offset
409 = (FLOOR_ROUND (frame_offset - frame_phase,
410 (unsigned HOST_WIDE_INT) alignment)
411 + frame_phase);
412 else
413 frame_offset
414 = (CEIL_ROUND (frame_offset - frame_phase,
415 (unsigned HOST_WIDE_INT) alignment)
416 + frame_phase);
417 }
418
419 /* On a big-endian machine, if we are allocating more space than we will use,
420 use the least significant bytes of those that are allocated. */
421 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
422 bigend_correction = size - GET_MODE_SIZE (mode);
423
424 /* If we have already instantiated virtual registers, return the actual
425 address relative to the frame pointer. */
426 if (virtuals_instantiated)
427 addr = plus_constant (frame_pointer_rtx,
428 trunc_int_for_mode
429 (frame_offset + bigend_correction
430 + STARTING_FRAME_OFFSET, Pmode));
431 else
432 addr = plus_constant (virtual_stack_vars_rtx,
433 trunc_int_for_mode
434 (frame_offset + bigend_correction,
435 Pmode));
436
437 if (!FRAME_GROWS_DOWNWARD)
438 frame_offset += size;
439
440 x = gen_rtx_MEM (mode, addr);
441 set_mem_align (x, alignment_in_bits);
442 MEM_NOTRAP_P (x) = 1;
443
444 stack_slot_list
445 = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
446
447 if (frame_offset_overflow (frame_offset, current_function_decl))
448 frame_offset = 0;
449
450 return x;
451 }
452 \f
453 /* Removes temporary slot TEMP from LIST. */
454
455 static void
456 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
457 {
458 if (temp->next)
459 temp->next->prev = temp->prev;
460 if (temp->prev)
461 temp->prev->next = temp->next;
462 else
463 *list = temp->next;
464
465 temp->prev = temp->next = NULL;
466 }
467
468 /* Inserts temporary slot TEMP to LIST. */
469
470 static void
471 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
472 {
473 temp->next = *list;
474 if (*list)
475 (*list)->prev = temp;
476 temp->prev = NULL;
477 *list = temp;
478 }
479
480 /* Returns the list of used temp slots at LEVEL. */
481
482 static struct temp_slot **
483 temp_slots_at_level (int level)
484 {
485 if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
486 VEC_safe_grow_cleared (temp_slot_p, gc, used_temp_slots, level + 1);
487
488 return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
489 }
490
491 /* Returns the maximal temporary slot level. */
492
493 static int
494 max_slot_level (void)
495 {
496 if (!used_temp_slots)
497 return -1;
498
499 return VEC_length (temp_slot_p, used_temp_slots) - 1;
500 }
501
502 /* Moves temporary slot TEMP to LEVEL. */
503
504 static void
505 move_slot_to_level (struct temp_slot *temp, int level)
506 {
507 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
508 insert_slot_to_list (temp, temp_slots_at_level (level));
509 temp->level = level;
510 }
511
512 /* Make temporary slot TEMP available. */
513
514 static void
515 make_slot_available (struct temp_slot *temp)
516 {
517 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
518 insert_slot_to_list (temp, &avail_temp_slots);
519 temp->in_use = 0;
520 temp->level = -1;
521 }
522 \f
523 /* Allocate a temporary stack slot and record it for possible later
524 reuse.
525
526 MODE is the machine mode to be given to the returned rtx.
527
528 SIZE is the size in units of the space required. We do no rounding here
529 since assign_stack_local will do any required rounding.
530
531 KEEP is 1 if this slot is to be retained after a call to
532 free_temp_slots. Automatic variables for a block are allocated
533 with this flag. KEEP values of 2 or 3 were needed respectively
534 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
535 or for SAVE_EXPRs, but they are now unused.
536
537 TYPE is the type that will be used for the stack slot. */
538
539 rtx
540 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
541 int keep, tree type)
542 {
543 unsigned int align;
544 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
545 rtx slot;
546
547 /* If SIZE is -1 it means that somebody tried to allocate a temporary
548 of a variable size. */
549 gcc_assert (size != -1);
550
551 /* These are now unused. */
552 gcc_assert (keep <= 1);
553
554 align = get_stack_local_alignment (type, mode);
555
556 /* Try to find an available, already-allocated temporary of the proper
557 mode which meets the size and alignment requirements. Choose the
558 smallest one with the closest alignment.
559
560 If assign_stack_temp is called outside of the tree->rtl expansion,
561 we cannot reuse the stack slots (that may still refer to
562 VIRTUAL_STACK_VARS_REGNUM). */
563 if (!virtuals_instantiated)
564 {
565 for (p = avail_temp_slots; p; p = p->next)
566 {
567 if (p->align >= align && p->size >= size
568 && GET_MODE (p->slot) == mode
569 && objects_must_conflict_p (p->type, type)
570 && (best_p == 0 || best_p->size > p->size
571 || (best_p->size == p->size && best_p->align > p->align)))
572 {
573 if (p->align == align && p->size == size)
574 {
575 selected = p;
576 cut_slot_from_list (selected, &avail_temp_slots);
577 best_p = 0;
578 break;
579 }
580 best_p = p;
581 }
582 }
583 }
584
585 /* Make our best, if any, the one to use. */
586 if (best_p)
587 {
588 selected = best_p;
589 cut_slot_from_list (selected, &avail_temp_slots);
590
591 /* If there are enough aligned bytes left over, make them into a new
592 temp_slot so that the extra bytes don't get wasted. Do this only
593 for BLKmode slots, so that we can be sure of the alignment. */
594 if (GET_MODE (best_p->slot) == BLKmode)
595 {
596 int alignment = best_p->align / BITS_PER_UNIT;
597 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
598
599 if (best_p->size - rounded_size >= alignment)
600 {
601 p = GGC_NEW (struct temp_slot);
602 p->in_use = p->addr_taken = 0;
603 p->size = best_p->size - rounded_size;
604 p->base_offset = best_p->base_offset + rounded_size;
605 p->full_size = best_p->full_size - rounded_size;
606 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
607 p->align = best_p->align;
608 p->address = 0;
609 p->type = best_p->type;
610 insert_slot_to_list (p, &avail_temp_slots);
611
612 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
613 stack_slot_list);
614
615 best_p->size = rounded_size;
616 best_p->full_size = rounded_size;
617 }
618 }
619 }
620
621 /* If we still didn't find one, make a new temporary. */
622 if (selected == 0)
623 {
624 HOST_WIDE_INT frame_offset_old = frame_offset;
625
626 p = GGC_NEW (struct temp_slot);
627
628 /* We are passing an explicit alignment request to assign_stack_local.
629 One side effect of that is assign_stack_local will not round SIZE
630 to ensure the frame offset remains suitably aligned.
631
632 So for requests which depended on the rounding of SIZE, we go ahead
633 and round it now. We also make sure ALIGNMENT is at least
634 BIGGEST_ALIGNMENT. */
635 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
636 p->slot = assign_stack_local (mode,
637 (mode == BLKmode
638 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
639 : size),
640 align);
641
642 p->align = align;
643
644 /* The following slot size computation is necessary because we don't
645 know the actual size of the temporary slot until assign_stack_local
646 has performed all the frame alignment and size rounding for the
647 requested temporary. Note that extra space added for alignment
648 can be either above or below this stack slot depending on which
649 way the frame grows. We include the extra space if and only if it
650 is above this slot. */
651 if (FRAME_GROWS_DOWNWARD)
652 p->size = frame_offset_old - frame_offset;
653 else
654 p->size = size;
655
656 /* Now define the fields used by combine_temp_slots. */
657 if (FRAME_GROWS_DOWNWARD)
658 {
659 p->base_offset = frame_offset;
660 p->full_size = frame_offset_old - frame_offset;
661 }
662 else
663 {
664 p->base_offset = frame_offset_old;
665 p->full_size = frame_offset - frame_offset_old;
666 }
667 p->address = 0;
668
669 selected = p;
670 }
671
672 p = selected;
673 p->in_use = 1;
674 p->addr_taken = 0;
675 p->type = type;
676 p->level = temp_slot_level;
677 p->keep = keep;
678
679 pp = temp_slots_at_level (p->level);
680 insert_slot_to_list (p, pp);
681
682 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
683 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
684 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
685
686 /* If we know the alias set for the memory that will be used, use
687 it. If there's no TYPE, then we don't know anything about the
688 alias set for the memory. */
689 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
690 set_mem_align (slot, align);
691
692 /* If a type is specified, set the relevant flags. */
693 if (type != 0)
694 {
695 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
696 MEM_SET_IN_STRUCT_P (slot, (AGGREGATE_TYPE_P (type)
697 || TREE_CODE (type) == COMPLEX_TYPE));
698 }
699 MEM_NOTRAP_P (slot) = 1;
700
701 return slot;
702 }
703
704 /* Allocate a temporary stack slot and record it for possible later
705 reuse. First three arguments are same as in preceding function. */
706
707 rtx
708 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
709 {
710 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
711 }
712 \f
713 /* Assign a temporary.
714 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
715 and so that should be used in error messages. In either case, we
716 allocate of the given type.
717 KEEP is as for assign_stack_temp.
718 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
719 it is 0 if a register is OK.
720 DONT_PROMOTE is 1 if we should not promote values in register
721 to wider modes. */
722
723 rtx
724 assign_temp (tree type_or_decl, int keep, int memory_required,
725 int dont_promote ATTRIBUTE_UNUSED)
726 {
727 tree type, decl;
728 enum machine_mode mode;
729 #ifdef PROMOTE_MODE
730 int unsignedp;
731 #endif
732
733 if (DECL_P (type_or_decl))
734 decl = type_or_decl, type = TREE_TYPE (decl);
735 else
736 decl = NULL, type = type_or_decl;
737
738 mode = TYPE_MODE (type);
739 #ifdef PROMOTE_MODE
740 unsignedp = TYPE_UNSIGNED (type);
741 #endif
742
743 if (mode == BLKmode || memory_required)
744 {
745 HOST_WIDE_INT size = int_size_in_bytes (type);
746 rtx tmp;
747
748 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
749 problems with allocating the stack space. */
750 if (size == 0)
751 size = 1;
752
753 /* Unfortunately, we don't yet know how to allocate variable-sized
754 temporaries. However, sometimes we can find a fixed upper limit on
755 the size, so try that instead. */
756 else if (size == -1)
757 size = max_int_size_in_bytes (type);
758
759 /* The size of the temporary may be too large to fit into an integer. */
760 /* ??? Not sure this should happen except for user silliness, so limit
761 this to things that aren't compiler-generated temporaries. The
762 rest of the time we'll die in assign_stack_temp_for_type. */
763 if (decl && size == -1
764 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
765 {
766 error ("size of variable %q+D is too large", decl);
767 size = 1;
768 }
769
770 tmp = assign_stack_temp_for_type (mode, size, keep, type);
771 return tmp;
772 }
773
774 #ifdef PROMOTE_MODE
775 if (! dont_promote)
776 mode = promote_mode (type, mode, &unsignedp, 0);
777 #endif
778
779 return gen_reg_rtx (mode);
780 }
781 \f
782 /* Combine temporary stack slots which are adjacent on the stack.
783
784 This allows for better use of already allocated stack space. This is only
785 done for BLKmode slots because we can be sure that we won't have alignment
786 problems in this case. */
787
788 static void
789 combine_temp_slots (void)
790 {
791 struct temp_slot *p, *q, *next, *next_q;
792 int num_slots;
793
794 /* We can't combine slots, because the information about which slot
795 is in which alias set will be lost. */
796 if (flag_strict_aliasing)
797 return;
798
799 /* If there are a lot of temp slots, don't do anything unless
800 high levels of optimization. */
801 if (! flag_expensive_optimizations)
802 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
803 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
804 return;
805
806 for (p = avail_temp_slots; p; p = next)
807 {
808 int delete_p = 0;
809
810 next = p->next;
811
812 if (GET_MODE (p->slot) != BLKmode)
813 continue;
814
815 for (q = p->next; q; q = next_q)
816 {
817 int delete_q = 0;
818
819 next_q = q->next;
820
821 if (GET_MODE (q->slot) != BLKmode)
822 continue;
823
824 if (p->base_offset + p->full_size == q->base_offset)
825 {
826 /* Q comes after P; combine Q into P. */
827 p->size += q->size;
828 p->full_size += q->full_size;
829 delete_q = 1;
830 }
831 else if (q->base_offset + q->full_size == p->base_offset)
832 {
833 /* P comes after Q; combine P into Q. */
834 q->size += p->size;
835 q->full_size += p->full_size;
836 delete_p = 1;
837 break;
838 }
839 if (delete_q)
840 cut_slot_from_list (q, &avail_temp_slots);
841 }
842
843 /* Either delete P or advance past it. */
844 if (delete_p)
845 cut_slot_from_list (p, &avail_temp_slots);
846 }
847 }
848 \f
849 /* Find the temp slot corresponding to the object at address X. */
850
851 static struct temp_slot *
852 find_temp_slot_from_address (rtx x)
853 {
854 struct temp_slot *p;
855 rtx next;
856 int i;
857
858 for (i = max_slot_level (); i >= 0; i--)
859 for (p = *temp_slots_at_level (i); p; p = p->next)
860 {
861 if (XEXP (p->slot, 0) == x
862 || p->address == x
863 || (GET_CODE (x) == PLUS
864 && XEXP (x, 0) == virtual_stack_vars_rtx
865 && GET_CODE (XEXP (x, 1)) == CONST_INT
866 && INTVAL (XEXP (x, 1)) >= p->base_offset
867 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
868 return p;
869
870 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
871 for (next = p->address; next; next = XEXP (next, 1))
872 if (XEXP (next, 0) == x)
873 return p;
874 }
875
876 /* If we have a sum involving a register, see if it points to a temp
877 slot. */
878 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
879 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
880 return p;
881 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
882 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
883 return p;
884
885 return 0;
886 }
887
888 /* Indicate that NEW is an alternate way of referring to the temp slot
889 that previously was known by OLD. */
890
891 void
892 update_temp_slot_address (rtx old, rtx new)
893 {
894 struct temp_slot *p;
895
896 if (rtx_equal_p (old, new))
897 return;
898
899 p = find_temp_slot_from_address (old);
900
901 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
902 is a register, see if one operand of the PLUS is a temporary
903 location. If so, NEW points into it. Otherwise, if both OLD and
904 NEW are a PLUS and if there is a register in common between them.
905 If so, try a recursive call on those values. */
906 if (p == 0)
907 {
908 if (GET_CODE (old) != PLUS)
909 return;
910
911 if (REG_P (new))
912 {
913 update_temp_slot_address (XEXP (old, 0), new);
914 update_temp_slot_address (XEXP (old, 1), new);
915 return;
916 }
917 else if (GET_CODE (new) != PLUS)
918 return;
919
920 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
921 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
922 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
923 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
924 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
925 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
926 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
927 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
928
929 return;
930 }
931
932 /* Otherwise add an alias for the temp's address. */
933 else if (p->address == 0)
934 p->address = new;
935 else
936 {
937 if (GET_CODE (p->address) != EXPR_LIST)
938 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
939
940 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
941 }
942 }
943
944 /* If X could be a reference to a temporary slot, mark the fact that its
945 address was taken. */
946
947 void
948 mark_temp_addr_taken (rtx x)
949 {
950 struct temp_slot *p;
951
952 if (x == 0)
953 return;
954
955 /* If X is not in memory or is at a constant address, it cannot be in
956 a temporary slot. */
957 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
958 return;
959
960 p = find_temp_slot_from_address (XEXP (x, 0));
961 if (p != 0)
962 p->addr_taken = 1;
963 }
964
965 /* If X could be a reference to a temporary slot, mark that slot as
966 belonging to the to one level higher than the current level. If X
967 matched one of our slots, just mark that one. Otherwise, we can't
968 easily predict which it is, so upgrade all of them. Kept slots
969 need not be touched.
970
971 This is called when an ({...}) construct occurs and a statement
972 returns a value in memory. */
973
974 void
975 preserve_temp_slots (rtx x)
976 {
977 struct temp_slot *p = 0, *next;
978
979 /* If there is no result, we still might have some objects whose address
980 were taken, so we need to make sure they stay around. */
981 if (x == 0)
982 {
983 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
984 {
985 next = p->next;
986
987 if (p->addr_taken)
988 move_slot_to_level (p, temp_slot_level - 1);
989 }
990
991 return;
992 }
993
994 /* If X is a register that is being used as a pointer, see if we have
995 a temporary slot we know it points to. To be consistent with
996 the code below, we really should preserve all non-kept slots
997 if we can't find a match, but that seems to be much too costly. */
998 if (REG_P (x) && REG_POINTER (x))
999 p = find_temp_slot_from_address (x);
1000
1001 /* If X is not in memory or is at a constant address, it cannot be in
1002 a temporary slot, but it can contain something whose address was
1003 taken. */
1004 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1005 {
1006 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1007 {
1008 next = p->next;
1009
1010 if (p->addr_taken)
1011 move_slot_to_level (p, temp_slot_level - 1);
1012 }
1013
1014 return;
1015 }
1016
1017 /* First see if we can find a match. */
1018 if (p == 0)
1019 p = find_temp_slot_from_address (XEXP (x, 0));
1020
1021 if (p != 0)
1022 {
1023 /* Move everything at our level whose address was taken to our new
1024 level in case we used its address. */
1025 struct temp_slot *q;
1026
1027 if (p->level == temp_slot_level)
1028 {
1029 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1030 {
1031 next = q->next;
1032
1033 if (p != q && q->addr_taken)
1034 move_slot_to_level (q, temp_slot_level - 1);
1035 }
1036
1037 move_slot_to_level (p, temp_slot_level - 1);
1038 p->addr_taken = 0;
1039 }
1040 return;
1041 }
1042
1043 /* Otherwise, preserve all non-kept slots at this level. */
1044 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1045 {
1046 next = p->next;
1047
1048 if (!p->keep)
1049 move_slot_to_level (p, temp_slot_level - 1);
1050 }
1051 }
1052
1053 /* Free all temporaries used so far. This is normally called at the
1054 end of generating code for a statement. */
1055
1056 void
1057 free_temp_slots (void)
1058 {
1059 struct temp_slot *p, *next;
1060
1061 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1062 {
1063 next = p->next;
1064
1065 if (!p->keep)
1066 make_slot_available (p);
1067 }
1068
1069 combine_temp_slots ();
1070 }
1071
1072 /* Push deeper into the nesting level for stack temporaries. */
1073
1074 void
1075 push_temp_slots (void)
1076 {
1077 temp_slot_level++;
1078 }
1079
1080 /* Pop a temporary nesting level. All slots in use in the current level
1081 are freed. */
1082
1083 void
1084 pop_temp_slots (void)
1085 {
1086 struct temp_slot *p, *next;
1087
1088 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1089 {
1090 next = p->next;
1091 make_slot_available (p);
1092 }
1093
1094 combine_temp_slots ();
1095
1096 temp_slot_level--;
1097 }
1098
1099 /* Initialize temporary slots. */
1100
1101 void
1102 init_temp_slots (void)
1103 {
1104 /* We have not allocated any temporaries yet. */
1105 avail_temp_slots = 0;
1106 used_temp_slots = 0;
1107 temp_slot_level = 0;
1108 }
1109 \f
1110 /* These routines are responsible for converting virtual register references
1111 to the actual hard register references once RTL generation is complete.
1112
1113 The following four variables are used for communication between the
1114 routines. They contain the offsets of the virtual registers from their
1115 respective hard registers. */
1116
1117 static int in_arg_offset;
1118 static int var_offset;
1119 static int dynamic_offset;
1120 static int out_arg_offset;
1121 static int cfa_offset;
1122
1123 /* In most machines, the stack pointer register is equivalent to the bottom
1124 of the stack. */
1125
1126 #ifndef STACK_POINTER_OFFSET
1127 #define STACK_POINTER_OFFSET 0
1128 #endif
1129
1130 /* If not defined, pick an appropriate default for the offset of dynamically
1131 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1132 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1133
1134 #ifndef STACK_DYNAMIC_OFFSET
1135
1136 /* The bottom of the stack points to the actual arguments. If
1137 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1138 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1139 stack space for register parameters is not pushed by the caller, but
1140 rather part of the fixed stack areas and hence not included in
1141 `crtl->outgoing_args_size'. Nevertheless, we must allow
1142 for it when allocating stack dynamic objects. */
1143
1144 #if defined(REG_PARM_STACK_SPACE)
1145 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1146 ((ACCUMULATE_OUTGOING_ARGS \
1147 ? (crtl->outgoing_args_size \
1148 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1149 : REG_PARM_STACK_SPACE (FNDECL))) \
1150 : 0) + (STACK_POINTER_OFFSET))
1151 #else
1152 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1153 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1154 + (STACK_POINTER_OFFSET))
1155 #endif
1156 #endif
1157
1158 \f
1159 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1160 is a virtual register, return the equivalent hard register and set the
1161 offset indirectly through the pointer. Otherwise, return 0. */
1162
1163 static rtx
1164 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1165 {
1166 rtx new;
1167 HOST_WIDE_INT offset;
1168
1169 if (x == virtual_incoming_args_rtx)
1170 new = arg_pointer_rtx, offset = in_arg_offset;
1171 else if (x == virtual_stack_vars_rtx)
1172 new = frame_pointer_rtx, offset = var_offset;
1173 else if (x == virtual_stack_dynamic_rtx)
1174 new = stack_pointer_rtx, offset = dynamic_offset;
1175 else if (x == virtual_outgoing_args_rtx)
1176 new = stack_pointer_rtx, offset = out_arg_offset;
1177 else if (x == virtual_cfa_rtx)
1178 {
1179 #ifdef FRAME_POINTER_CFA_OFFSET
1180 new = frame_pointer_rtx;
1181 #else
1182 new = arg_pointer_rtx;
1183 #endif
1184 offset = cfa_offset;
1185 }
1186 else
1187 return NULL_RTX;
1188
1189 *poffset = offset;
1190 return new;
1191 }
1192
1193 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1194 Instantiate any virtual registers present inside of *LOC. The expression
1195 is simplified, as much as possible, but is not to be considered "valid"
1196 in any sense implied by the target. If any change is made, set CHANGED
1197 to true. */
1198
1199 static int
1200 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1201 {
1202 HOST_WIDE_INT offset;
1203 bool *changed = (bool *) data;
1204 rtx x, new;
1205
1206 x = *loc;
1207 if (x == 0)
1208 return 0;
1209
1210 switch (GET_CODE (x))
1211 {
1212 case REG:
1213 new = instantiate_new_reg (x, &offset);
1214 if (new)
1215 {
1216 *loc = plus_constant (new, offset);
1217 if (changed)
1218 *changed = true;
1219 }
1220 return -1;
1221
1222 case PLUS:
1223 new = instantiate_new_reg (XEXP (x, 0), &offset);
1224 if (new)
1225 {
1226 new = plus_constant (new, offset);
1227 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new, XEXP (x, 1));
1228 if (changed)
1229 *changed = true;
1230 return -1;
1231 }
1232
1233 /* FIXME -- from old code */
1234 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1235 we can commute the PLUS and SUBREG because pointers into the
1236 frame are well-behaved. */
1237 break;
1238
1239 default:
1240 break;
1241 }
1242
1243 return 0;
1244 }
1245
1246 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1247 matches the predicate for insn CODE operand OPERAND. */
1248
1249 static int
1250 safe_insn_predicate (int code, int operand, rtx x)
1251 {
1252 const struct insn_operand_data *op_data;
1253
1254 if (code < 0)
1255 return true;
1256
1257 op_data = &insn_data[code].operand[operand];
1258 if (op_data->predicate == NULL)
1259 return true;
1260
1261 return op_data->predicate (x, op_data->mode);
1262 }
1263
1264 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1265 registers present inside of insn. The result will be a valid insn. */
1266
1267 static void
1268 instantiate_virtual_regs_in_insn (rtx insn)
1269 {
1270 HOST_WIDE_INT offset;
1271 int insn_code, i;
1272 bool any_change = false;
1273 rtx set, new, x, seq;
1274
1275 /* There are some special cases to be handled first. */
1276 set = single_set (insn);
1277 if (set)
1278 {
1279 /* We're allowed to assign to a virtual register. This is interpreted
1280 to mean that the underlying register gets assigned the inverse
1281 transformation. This is used, for example, in the handling of
1282 non-local gotos. */
1283 new = instantiate_new_reg (SET_DEST (set), &offset);
1284 if (new)
1285 {
1286 start_sequence ();
1287
1288 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1289 x = simplify_gen_binary (PLUS, GET_MODE (new), SET_SRC (set),
1290 GEN_INT (-offset));
1291 x = force_operand (x, new);
1292 if (x != new)
1293 emit_move_insn (new, x);
1294
1295 seq = get_insns ();
1296 end_sequence ();
1297
1298 emit_insn_before (seq, insn);
1299 delete_insn (insn);
1300 return;
1301 }
1302
1303 /* Handle a straight copy from a virtual register by generating a
1304 new add insn. The difference between this and falling through
1305 to the generic case is avoiding a new pseudo and eliminating a
1306 move insn in the initial rtl stream. */
1307 new = instantiate_new_reg (SET_SRC (set), &offset);
1308 if (new && offset != 0
1309 && REG_P (SET_DEST (set))
1310 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1311 {
1312 start_sequence ();
1313
1314 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1315 new, GEN_INT (offset), SET_DEST (set),
1316 1, OPTAB_LIB_WIDEN);
1317 if (x != SET_DEST (set))
1318 emit_move_insn (SET_DEST (set), x);
1319
1320 seq = get_insns ();
1321 end_sequence ();
1322
1323 emit_insn_before (seq, insn);
1324 delete_insn (insn);
1325 return;
1326 }
1327
1328 extract_insn (insn);
1329 insn_code = INSN_CODE (insn);
1330
1331 /* Handle a plus involving a virtual register by determining if the
1332 operands remain valid if they're modified in place. */
1333 if (GET_CODE (SET_SRC (set)) == PLUS
1334 && recog_data.n_operands >= 3
1335 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1336 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1337 && GET_CODE (recog_data.operand[2]) == CONST_INT
1338 && (new = instantiate_new_reg (recog_data.operand[1], &offset)))
1339 {
1340 offset += INTVAL (recog_data.operand[2]);
1341
1342 /* If the sum is zero, then replace with a plain move. */
1343 if (offset == 0
1344 && REG_P (SET_DEST (set))
1345 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1346 {
1347 start_sequence ();
1348 emit_move_insn (SET_DEST (set), new);
1349 seq = get_insns ();
1350 end_sequence ();
1351
1352 emit_insn_before (seq, insn);
1353 delete_insn (insn);
1354 return;
1355 }
1356
1357 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1358
1359 /* Using validate_change and apply_change_group here leaves
1360 recog_data in an invalid state. Since we know exactly what
1361 we want to check, do those two by hand. */
1362 if (safe_insn_predicate (insn_code, 1, new)
1363 && safe_insn_predicate (insn_code, 2, x))
1364 {
1365 *recog_data.operand_loc[1] = recog_data.operand[1] = new;
1366 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1367 any_change = true;
1368
1369 /* Fall through into the regular operand fixup loop in
1370 order to take care of operands other than 1 and 2. */
1371 }
1372 }
1373 }
1374 else
1375 {
1376 extract_insn (insn);
1377 insn_code = INSN_CODE (insn);
1378 }
1379
1380 /* In the general case, we expect virtual registers to appear only in
1381 operands, and then only as either bare registers or inside memories. */
1382 for (i = 0; i < recog_data.n_operands; ++i)
1383 {
1384 x = recog_data.operand[i];
1385 switch (GET_CODE (x))
1386 {
1387 case MEM:
1388 {
1389 rtx addr = XEXP (x, 0);
1390 bool changed = false;
1391
1392 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1393 if (!changed)
1394 continue;
1395
1396 start_sequence ();
1397 x = replace_equiv_address (x, addr);
1398 /* It may happen that the address with the virtual reg
1399 was valid (e.g. based on the virtual stack reg, which might
1400 be acceptable to the predicates with all offsets), whereas
1401 the address now isn't anymore, for instance when the address
1402 is still offsetted, but the base reg isn't virtual-stack-reg
1403 anymore. Below we would do a force_reg on the whole operand,
1404 but this insn might actually only accept memory. Hence,
1405 before doing that last resort, try to reload the address into
1406 a register, so this operand stays a MEM. */
1407 if (!safe_insn_predicate (insn_code, i, x))
1408 {
1409 addr = force_reg (GET_MODE (addr), addr);
1410 x = replace_equiv_address (x, addr);
1411 }
1412 seq = get_insns ();
1413 end_sequence ();
1414 if (seq)
1415 emit_insn_before (seq, insn);
1416 }
1417 break;
1418
1419 case REG:
1420 new = instantiate_new_reg (x, &offset);
1421 if (new == NULL)
1422 continue;
1423 if (offset == 0)
1424 x = new;
1425 else
1426 {
1427 start_sequence ();
1428
1429 /* Careful, special mode predicates may have stuff in
1430 insn_data[insn_code].operand[i].mode that isn't useful
1431 to us for computing a new value. */
1432 /* ??? Recognize address_operand and/or "p" constraints
1433 to see if (plus new offset) is a valid before we put
1434 this through expand_simple_binop. */
1435 x = expand_simple_binop (GET_MODE (x), PLUS, new,
1436 GEN_INT (offset), NULL_RTX,
1437 1, OPTAB_LIB_WIDEN);
1438 seq = get_insns ();
1439 end_sequence ();
1440 emit_insn_before (seq, insn);
1441 }
1442 break;
1443
1444 case SUBREG:
1445 new = instantiate_new_reg (SUBREG_REG (x), &offset);
1446 if (new == NULL)
1447 continue;
1448 if (offset != 0)
1449 {
1450 start_sequence ();
1451 new = expand_simple_binop (GET_MODE (new), PLUS, new,
1452 GEN_INT (offset), NULL_RTX,
1453 1, OPTAB_LIB_WIDEN);
1454 seq = get_insns ();
1455 end_sequence ();
1456 emit_insn_before (seq, insn);
1457 }
1458 x = simplify_gen_subreg (recog_data.operand_mode[i], new,
1459 GET_MODE (new), SUBREG_BYTE (x));
1460 break;
1461
1462 default:
1463 continue;
1464 }
1465
1466 /* At this point, X contains the new value for the operand.
1467 Validate the new value vs the insn predicate. Note that
1468 asm insns will have insn_code -1 here. */
1469 if (!safe_insn_predicate (insn_code, i, x))
1470 {
1471 start_sequence ();
1472 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1473 seq = get_insns ();
1474 end_sequence ();
1475 if (seq)
1476 emit_insn_before (seq, insn);
1477 }
1478
1479 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1480 any_change = true;
1481 }
1482
1483 if (any_change)
1484 {
1485 /* Propagate operand changes into the duplicates. */
1486 for (i = 0; i < recog_data.n_dups; ++i)
1487 *recog_data.dup_loc[i]
1488 = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
1489
1490 /* Force re-recognition of the instruction for validation. */
1491 INSN_CODE (insn) = -1;
1492 }
1493
1494 if (asm_noperands (PATTERN (insn)) >= 0)
1495 {
1496 if (!check_asm_operands (PATTERN (insn)))
1497 {
1498 error_for_asm (insn, "impossible constraint in %<asm%>");
1499 delete_insn (insn);
1500 }
1501 }
1502 else
1503 {
1504 if (recog_memoized (insn) < 0)
1505 fatal_insn_not_found (insn);
1506 }
1507 }
1508
1509 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1510 do any instantiation required. */
1511
1512 void
1513 instantiate_decl_rtl (rtx x)
1514 {
1515 rtx addr;
1516
1517 if (x == 0)
1518 return;
1519
1520 /* If this is a CONCAT, recurse for the pieces. */
1521 if (GET_CODE (x) == CONCAT)
1522 {
1523 instantiate_decl_rtl (XEXP (x, 0));
1524 instantiate_decl_rtl (XEXP (x, 1));
1525 return;
1526 }
1527
1528 /* If this is not a MEM, no need to do anything. Similarly if the
1529 address is a constant or a register that is not a virtual register. */
1530 if (!MEM_P (x))
1531 return;
1532
1533 addr = XEXP (x, 0);
1534 if (CONSTANT_P (addr)
1535 || (REG_P (addr)
1536 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1537 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1538 return;
1539
1540 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1541 }
1542
1543 /* Helper for instantiate_decls called via walk_tree: Process all decls
1544 in the given DECL_VALUE_EXPR. */
1545
1546 static tree
1547 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1548 {
1549 tree t = *tp;
1550 if (! EXPR_P (t) && ! GIMPLE_STMT_P (t))
1551 {
1552 *walk_subtrees = 0;
1553 if (DECL_P (t) && DECL_RTL_SET_P (t))
1554 instantiate_decl_rtl (DECL_RTL (t));
1555 }
1556 return NULL;
1557 }
1558
1559 /* Subroutine of instantiate_decls: Process all decls in the given
1560 BLOCK node and all its subblocks. */
1561
1562 static void
1563 instantiate_decls_1 (tree let)
1564 {
1565 tree t;
1566
1567 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1568 {
1569 if (DECL_RTL_SET_P (t))
1570 instantiate_decl_rtl (DECL_RTL (t));
1571 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1572 {
1573 tree v = DECL_VALUE_EXPR (t);
1574 walk_tree (&v, instantiate_expr, NULL, NULL);
1575 }
1576 }
1577
1578 /* Process all subblocks. */
1579 for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
1580 instantiate_decls_1 (t);
1581 }
1582
1583 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1584 all virtual registers in their DECL_RTL's. */
1585
1586 static void
1587 instantiate_decls (tree fndecl)
1588 {
1589 tree decl;
1590
1591 /* Process all parameters of the function. */
1592 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1593 {
1594 instantiate_decl_rtl (DECL_RTL (decl));
1595 instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
1596 if (DECL_HAS_VALUE_EXPR_P (decl))
1597 {
1598 tree v = DECL_VALUE_EXPR (decl);
1599 walk_tree (&v, instantiate_expr, NULL, NULL);
1600 }
1601 }
1602
1603 /* Now process all variables defined in the function or its subblocks. */
1604 instantiate_decls_1 (DECL_INITIAL (fndecl));
1605 }
1606
1607 /* Pass through the INSNS of function FNDECL and convert virtual register
1608 references to hard register references. */
1609
1610 static unsigned int
1611 instantiate_virtual_regs (void)
1612 {
1613 rtx insn;
1614
1615 /* Compute the offsets to use for this function. */
1616 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1617 var_offset = STARTING_FRAME_OFFSET;
1618 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1619 out_arg_offset = STACK_POINTER_OFFSET;
1620 #ifdef FRAME_POINTER_CFA_OFFSET
1621 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1622 #else
1623 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1624 #endif
1625
1626 /* Initialize recognition, indicating that volatile is OK. */
1627 init_recog ();
1628
1629 /* Scan through all the insns, instantiating every virtual register still
1630 present. */
1631 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1632 if (INSN_P (insn))
1633 {
1634 /* These patterns in the instruction stream can never be recognized.
1635 Fortunately, they shouldn't contain virtual registers either. */
1636 if (GET_CODE (PATTERN (insn)) == USE
1637 || GET_CODE (PATTERN (insn)) == CLOBBER
1638 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1639 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1640 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1641 continue;
1642
1643 instantiate_virtual_regs_in_insn (insn);
1644
1645 if (INSN_DELETED_P (insn))
1646 continue;
1647
1648 for_each_rtx (&REG_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1649
1650 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1651 if (GET_CODE (insn) == CALL_INSN)
1652 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1653 instantiate_virtual_regs_in_rtx, NULL);
1654 }
1655
1656 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1657 instantiate_decls (current_function_decl);
1658
1659 targetm.instantiate_decls ();
1660
1661 /* Indicate that, from now on, assign_stack_local should use
1662 frame_pointer_rtx. */
1663 virtuals_instantiated = 1;
1664 return 0;
1665 }
1666
1667 struct rtl_opt_pass pass_instantiate_virtual_regs =
1668 {
1669 {
1670 RTL_PASS,
1671 "vregs", /* name */
1672 NULL, /* gate */
1673 instantiate_virtual_regs, /* execute */
1674 NULL, /* sub */
1675 NULL, /* next */
1676 0, /* static_pass_number */
1677 0, /* tv_id */
1678 0, /* properties_required */
1679 0, /* properties_provided */
1680 0, /* properties_destroyed */
1681 0, /* todo_flags_start */
1682 TODO_dump_func /* todo_flags_finish */
1683 }
1684 };
1685
1686 \f
1687 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1688 This means a type for which function calls must pass an address to the
1689 function or get an address back from the function.
1690 EXP may be a type node or an expression (whose type is tested). */
1691
1692 int
1693 aggregate_value_p (const_tree exp, const_tree fntype)
1694 {
1695 int i, regno, nregs;
1696 rtx reg;
1697
1698 const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1699
1700 /* DECL node associated with FNTYPE when relevant, which we might need to
1701 check for by-invisible-reference returns, typically for CALL_EXPR input
1702 EXPressions. */
1703 const_tree fndecl = NULL_TREE;
1704
1705 if (fntype)
1706 switch (TREE_CODE (fntype))
1707 {
1708 case CALL_EXPR:
1709 fndecl = get_callee_fndecl (fntype);
1710 fntype = fndecl ? TREE_TYPE (fndecl) : 0;
1711 break;
1712 case FUNCTION_DECL:
1713 fndecl = fntype;
1714 fntype = TREE_TYPE (fndecl);
1715 break;
1716 case FUNCTION_TYPE:
1717 case METHOD_TYPE:
1718 break;
1719 case IDENTIFIER_NODE:
1720 fntype = 0;
1721 break;
1722 default:
1723 /* We don't expect other rtl types here. */
1724 gcc_unreachable ();
1725 }
1726
1727 if (TREE_CODE (type) == VOID_TYPE)
1728 return 0;
1729
1730 /* If the front end has decided that this needs to be passed by
1731 reference, do so. */
1732 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1733 && DECL_BY_REFERENCE (exp))
1734 return 1;
1735
1736 /* If the EXPression is a CALL_EXPR, honor DECL_BY_REFERENCE set on the
1737 called function RESULT_DECL, meaning the function returns in memory by
1738 invisible reference. This check lets front-ends not set TREE_ADDRESSABLE
1739 on the function type, which used to be the way to request such a return
1740 mechanism but might now be causing troubles at gimplification time if
1741 temporaries with the function type need to be created. */
1742 if (TREE_CODE (exp) == CALL_EXPR && fndecl && DECL_RESULT (fndecl)
1743 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
1744 return 1;
1745
1746 if (targetm.calls.return_in_memory (type, fntype))
1747 return 1;
1748 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1749 and thus can't be returned in registers. */
1750 if (TREE_ADDRESSABLE (type))
1751 return 1;
1752 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
1753 return 1;
1754 /* Make sure we have suitable call-clobbered regs to return
1755 the value in; if not, we must return it in memory. */
1756 reg = hard_function_value (type, 0, fntype, 0);
1757
1758 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1759 it is OK. */
1760 if (!REG_P (reg))
1761 return 0;
1762
1763 regno = REGNO (reg);
1764 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
1765 for (i = 0; i < nregs; i++)
1766 if (! call_used_regs[regno + i])
1767 return 1;
1768 return 0;
1769 }
1770 \f
1771 /* Return true if we should assign DECL a pseudo register; false if it
1772 should live on the local stack. */
1773
1774 bool
1775 use_register_for_decl (const_tree decl)
1776 {
1777 if (!targetm.calls.allocate_stack_slots_for_args())
1778 return true;
1779
1780 /* Honor volatile. */
1781 if (TREE_SIDE_EFFECTS (decl))
1782 return false;
1783
1784 /* Honor addressability. */
1785 if (TREE_ADDRESSABLE (decl))
1786 return false;
1787
1788 /* Only register-like things go in registers. */
1789 if (DECL_MODE (decl) == BLKmode)
1790 return false;
1791
1792 /* If -ffloat-store specified, don't put explicit float variables
1793 into registers. */
1794 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1795 propagates values across these stores, and it probably shouldn't. */
1796 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1797 return false;
1798
1799 /* If we're not interested in tracking debugging information for
1800 this decl, then we can certainly put it in a register. */
1801 if (DECL_IGNORED_P (decl))
1802 return true;
1803
1804 return (optimize || DECL_REGISTER (decl));
1805 }
1806
1807 /* Return true if TYPE should be passed by invisible reference. */
1808
1809 bool
1810 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1811 tree type, bool named_arg)
1812 {
1813 if (type)
1814 {
1815 /* If this type contains non-trivial constructors, then it is
1816 forbidden for the middle-end to create any new copies. */
1817 if (TREE_ADDRESSABLE (type))
1818 return true;
1819
1820 /* GCC post 3.4 passes *all* variable sized types by reference. */
1821 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1822 return true;
1823 }
1824
1825 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
1826 }
1827
1828 /* Return true if TYPE, which is passed by reference, should be callee
1829 copied instead of caller copied. */
1830
1831 bool
1832 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1833 tree type, bool named_arg)
1834 {
1835 if (type && TREE_ADDRESSABLE (type))
1836 return false;
1837 return targetm.calls.callee_copies (ca, mode, type, named_arg);
1838 }
1839
1840 /* Structures to communicate between the subroutines of assign_parms.
1841 The first holds data persistent across all parameters, the second
1842 is cleared out for each parameter. */
1843
1844 struct assign_parm_data_all
1845 {
1846 CUMULATIVE_ARGS args_so_far;
1847 struct args_size stack_args_size;
1848 tree function_result_decl;
1849 tree orig_fnargs;
1850 rtx first_conversion_insn;
1851 rtx last_conversion_insn;
1852 HOST_WIDE_INT pretend_args_size;
1853 HOST_WIDE_INT extra_pretend_bytes;
1854 int reg_parm_stack_space;
1855 };
1856
1857 struct assign_parm_data_one
1858 {
1859 tree nominal_type;
1860 tree passed_type;
1861 rtx entry_parm;
1862 rtx stack_parm;
1863 enum machine_mode nominal_mode;
1864 enum machine_mode passed_mode;
1865 enum machine_mode promoted_mode;
1866 struct locate_and_pad_arg_data locate;
1867 int partial;
1868 BOOL_BITFIELD named_arg : 1;
1869 BOOL_BITFIELD passed_pointer : 1;
1870 BOOL_BITFIELD on_stack : 1;
1871 BOOL_BITFIELD loaded_in_reg : 1;
1872 };
1873
1874 /* A subroutine of assign_parms. Initialize ALL. */
1875
1876 static void
1877 assign_parms_initialize_all (struct assign_parm_data_all *all)
1878 {
1879 tree fntype;
1880
1881 memset (all, 0, sizeof (*all));
1882
1883 fntype = TREE_TYPE (current_function_decl);
1884
1885 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
1886 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
1887 #else
1888 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
1889 current_function_decl, -1);
1890 #endif
1891
1892 #ifdef REG_PARM_STACK_SPACE
1893 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
1894 #endif
1895 }
1896
1897 /* If ARGS contains entries with complex types, split the entry into two
1898 entries of the component type. Return a new list of substitutions are
1899 needed, else the old list. */
1900
1901 static tree
1902 split_complex_args (tree args)
1903 {
1904 tree p;
1905
1906 /* Before allocating memory, check for the common case of no complex. */
1907 for (p = args; p; p = TREE_CHAIN (p))
1908 {
1909 tree type = TREE_TYPE (p);
1910 if (TREE_CODE (type) == COMPLEX_TYPE
1911 && targetm.calls.split_complex_arg (type))
1912 goto found;
1913 }
1914 return args;
1915
1916 found:
1917 args = copy_list (args);
1918
1919 for (p = args; p; p = TREE_CHAIN (p))
1920 {
1921 tree type = TREE_TYPE (p);
1922 if (TREE_CODE (type) == COMPLEX_TYPE
1923 && targetm.calls.split_complex_arg (type))
1924 {
1925 tree decl;
1926 tree subtype = TREE_TYPE (type);
1927 bool addressable = TREE_ADDRESSABLE (p);
1928
1929 /* Rewrite the PARM_DECL's type with its component. */
1930 TREE_TYPE (p) = subtype;
1931 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
1932 DECL_MODE (p) = VOIDmode;
1933 DECL_SIZE (p) = NULL;
1934 DECL_SIZE_UNIT (p) = NULL;
1935 /* If this arg must go in memory, put it in a pseudo here.
1936 We can't allow it to go in memory as per normal parms,
1937 because the usual place might not have the imag part
1938 adjacent to the real part. */
1939 DECL_ARTIFICIAL (p) = addressable;
1940 DECL_IGNORED_P (p) = addressable;
1941 TREE_ADDRESSABLE (p) = 0;
1942 layout_decl (p, 0);
1943
1944 /* Build a second synthetic decl. */
1945 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
1946 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
1947 DECL_ARTIFICIAL (decl) = addressable;
1948 DECL_IGNORED_P (decl) = addressable;
1949 layout_decl (decl, 0);
1950
1951 /* Splice it in; skip the new decl. */
1952 TREE_CHAIN (decl) = TREE_CHAIN (p);
1953 TREE_CHAIN (p) = decl;
1954 p = decl;
1955 }
1956 }
1957
1958 return args;
1959 }
1960
1961 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
1962 the hidden struct return argument, and (abi willing) complex args.
1963 Return the new parameter list. */
1964
1965 static tree
1966 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
1967 {
1968 tree fndecl = current_function_decl;
1969 tree fntype = TREE_TYPE (fndecl);
1970 tree fnargs = DECL_ARGUMENTS (fndecl);
1971
1972 /* If struct value address is treated as the first argument, make it so. */
1973 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
1974 && ! cfun->returns_pcc_struct
1975 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
1976 {
1977 tree type = build_pointer_type (TREE_TYPE (fntype));
1978 tree decl;
1979
1980 decl = build_decl (PARM_DECL, NULL_TREE, type);
1981 DECL_ARG_TYPE (decl) = type;
1982 DECL_ARTIFICIAL (decl) = 1;
1983 DECL_IGNORED_P (decl) = 1;
1984
1985 TREE_CHAIN (decl) = fnargs;
1986 fnargs = decl;
1987 all->function_result_decl = decl;
1988 }
1989
1990 all->orig_fnargs = fnargs;
1991
1992 /* If the target wants to split complex arguments into scalars, do so. */
1993 if (targetm.calls.split_complex_arg)
1994 fnargs = split_complex_args (fnargs);
1995
1996 return fnargs;
1997 }
1998
1999 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2000 data for the parameter. Incorporate ABI specifics such as pass-by-
2001 reference and type promotion. */
2002
2003 static void
2004 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2005 struct assign_parm_data_one *data)
2006 {
2007 tree nominal_type, passed_type;
2008 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2009
2010 memset (data, 0, sizeof (*data));
2011
2012 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2013 if (!cfun->stdarg)
2014 data->named_arg = 1; /* No variadic parms. */
2015 else if (TREE_CHAIN (parm))
2016 data->named_arg = 1; /* Not the last non-variadic parm. */
2017 else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2018 data->named_arg = 1; /* Only variadic ones are unnamed. */
2019 else
2020 data->named_arg = 0; /* Treat as variadic. */
2021
2022 nominal_type = TREE_TYPE (parm);
2023 passed_type = DECL_ARG_TYPE (parm);
2024
2025 /* Look out for errors propagating this far. Also, if the parameter's
2026 type is void then its value doesn't matter. */
2027 if (TREE_TYPE (parm) == error_mark_node
2028 /* This can happen after weird syntax errors
2029 or if an enum type is defined among the parms. */
2030 || TREE_CODE (parm) != PARM_DECL
2031 || passed_type == NULL
2032 || VOID_TYPE_P (nominal_type))
2033 {
2034 nominal_type = passed_type = void_type_node;
2035 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2036 goto egress;
2037 }
2038
2039 /* Find mode of arg as it is passed, and mode of arg as it should be
2040 during execution of this function. */
2041 passed_mode = TYPE_MODE (passed_type);
2042 nominal_mode = TYPE_MODE (nominal_type);
2043
2044 /* If the parm is to be passed as a transparent union, use the type of
2045 the first field for the tests below. We have already verified that
2046 the modes are the same. */
2047 if (TREE_CODE (passed_type) == UNION_TYPE
2048 && TYPE_TRANSPARENT_UNION (passed_type))
2049 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2050
2051 /* See if this arg was passed by invisible reference. */
2052 if (pass_by_reference (&all->args_so_far, passed_mode,
2053 passed_type, data->named_arg))
2054 {
2055 passed_type = nominal_type = build_pointer_type (passed_type);
2056 data->passed_pointer = true;
2057 passed_mode = nominal_mode = Pmode;
2058 }
2059
2060 /* Find mode as it is passed by the ABI. */
2061 promoted_mode = passed_mode;
2062 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2063 {
2064 int unsignedp = TYPE_UNSIGNED (passed_type);
2065 promoted_mode = promote_mode (passed_type, promoted_mode,
2066 &unsignedp, 1);
2067 }
2068
2069 egress:
2070 data->nominal_type = nominal_type;
2071 data->passed_type = passed_type;
2072 data->nominal_mode = nominal_mode;
2073 data->passed_mode = passed_mode;
2074 data->promoted_mode = promoted_mode;
2075 }
2076
2077 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2078
2079 static void
2080 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2081 struct assign_parm_data_one *data, bool no_rtl)
2082 {
2083 int varargs_pretend_bytes = 0;
2084
2085 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2086 data->promoted_mode,
2087 data->passed_type,
2088 &varargs_pretend_bytes, no_rtl);
2089
2090 /* If the back-end has requested extra stack space, record how much is
2091 needed. Do not change pretend_args_size otherwise since it may be
2092 nonzero from an earlier partial argument. */
2093 if (varargs_pretend_bytes > 0)
2094 all->pretend_args_size = varargs_pretend_bytes;
2095 }
2096
2097 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2098 the incoming location of the current parameter. */
2099
2100 static void
2101 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2102 struct assign_parm_data_one *data)
2103 {
2104 HOST_WIDE_INT pretend_bytes = 0;
2105 rtx entry_parm;
2106 bool in_regs;
2107
2108 if (data->promoted_mode == VOIDmode)
2109 {
2110 data->entry_parm = data->stack_parm = const0_rtx;
2111 return;
2112 }
2113
2114 #ifdef FUNCTION_INCOMING_ARG
2115 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2116 data->passed_type, data->named_arg);
2117 #else
2118 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2119 data->passed_type, data->named_arg);
2120 #endif
2121
2122 if (entry_parm == 0)
2123 data->promoted_mode = data->passed_mode;
2124
2125 /* Determine parm's home in the stack, in case it arrives in the stack
2126 or we should pretend it did. Compute the stack position and rtx where
2127 the argument arrives and its size.
2128
2129 There is one complexity here: If this was a parameter that would
2130 have been passed in registers, but wasn't only because it is
2131 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2132 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2133 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2134 as it was the previous time. */
2135 in_regs = entry_parm != 0;
2136 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2137 in_regs = true;
2138 #endif
2139 if (!in_regs && !data->named_arg)
2140 {
2141 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2142 {
2143 rtx tem;
2144 #ifdef FUNCTION_INCOMING_ARG
2145 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2146 data->passed_type, true);
2147 #else
2148 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2149 data->passed_type, true);
2150 #endif
2151 in_regs = tem != NULL;
2152 }
2153 }
2154
2155 /* If this parameter was passed both in registers and in the stack, use
2156 the copy on the stack. */
2157 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2158 data->passed_type))
2159 entry_parm = 0;
2160
2161 if (entry_parm)
2162 {
2163 int partial;
2164
2165 partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2166 data->promoted_mode,
2167 data->passed_type,
2168 data->named_arg);
2169 data->partial = partial;
2170
2171 /* The caller might already have allocated stack space for the
2172 register parameters. */
2173 if (partial != 0 && all->reg_parm_stack_space == 0)
2174 {
2175 /* Part of this argument is passed in registers and part
2176 is passed on the stack. Ask the prologue code to extend
2177 the stack part so that we can recreate the full value.
2178
2179 PRETEND_BYTES is the size of the registers we need to store.
2180 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2181 stack space that the prologue should allocate.
2182
2183 Internally, gcc assumes that the argument pointer is aligned
2184 to STACK_BOUNDARY bits. This is used both for alignment
2185 optimizations (see init_emit) and to locate arguments that are
2186 aligned to more than PARM_BOUNDARY bits. We must preserve this
2187 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2188 a stack boundary. */
2189
2190 /* We assume at most one partial arg, and it must be the first
2191 argument on the stack. */
2192 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2193
2194 pretend_bytes = partial;
2195 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2196
2197 /* We want to align relative to the actual stack pointer, so
2198 don't include this in the stack size until later. */
2199 all->extra_pretend_bytes = all->pretend_args_size;
2200 }
2201 }
2202
2203 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2204 entry_parm ? data->partial : 0, current_function_decl,
2205 &all->stack_args_size, &data->locate);
2206
2207 /* Adjust offsets to include the pretend args. */
2208 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2209 data->locate.slot_offset.constant += pretend_bytes;
2210 data->locate.offset.constant += pretend_bytes;
2211
2212 data->entry_parm = entry_parm;
2213 }
2214
2215 /* A subroutine of assign_parms. If there is actually space on the stack
2216 for this parm, count it in stack_args_size and return true. */
2217
2218 static bool
2219 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2220 struct assign_parm_data_one *data)
2221 {
2222 /* Trivially true if we've no incoming register. */
2223 if (data->entry_parm == NULL)
2224 ;
2225 /* Also true if we're partially in registers and partially not,
2226 since we've arranged to drop the entire argument on the stack. */
2227 else if (data->partial != 0)
2228 ;
2229 /* Also true if the target says that it's passed in both registers
2230 and on the stack. */
2231 else if (GET_CODE (data->entry_parm) == PARALLEL
2232 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2233 ;
2234 /* Also true if the target says that there's stack allocated for
2235 all register parameters. */
2236 else if (all->reg_parm_stack_space > 0)
2237 ;
2238 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2239 else
2240 return false;
2241
2242 all->stack_args_size.constant += data->locate.size.constant;
2243 if (data->locate.size.var)
2244 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2245
2246 return true;
2247 }
2248
2249 /* A subroutine of assign_parms. Given that this parameter is allocated
2250 stack space by the ABI, find it. */
2251
2252 static void
2253 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2254 {
2255 rtx offset_rtx, stack_parm;
2256 unsigned int align, boundary;
2257
2258 /* If we're passing this arg using a reg, make its stack home the
2259 aligned stack slot. */
2260 if (data->entry_parm)
2261 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2262 else
2263 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2264
2265 stack_parm = crtl->args.internal_arg_pointer;
2266 if (offset_rtx != const0_rtx)
2267 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2268 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2269
2270 set_mem_attributes (stack_parm, parm, 1);
2271
2272 boundary = data->locate.boundary;
2273 align = BITS_PER_UNIT;
2274
2275 /* If we're padding upward, we know that the alignment of the slot
2276 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2277 intentionally forcing upward padding. Otherwise we have to come
2278 up with a guess at the alignment based on OFFSET_RTX. */
2279 if (data->locate.where_pad != downward || data->entry_parm)
2280 align = boundary;
2281 else if (GET_CODE (offset_rtx) == CONST_INT)
2282 {
2283 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2284 align = align & -align;
2285 }
2286 set_mem_align (stack_parm, align);
2287
2288 if (data->entry_parm)
2289 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2290
2291 data->stack_parm = stack_parm;
2292 }
2293
2294 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2295 always valid and contiguous. */
2296
2297 static void
2298 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2299 {
2300 rtx entry_parm = data->entry_parm;
2301 rtx stack_parm = data->stack_parm;
2302
2303 /* If this parm was passed part in regs and part in memory, pretend it
2304 arrived entirely in memory by pushing the register-part onto the stack.
2305 In the special case of a DImode or DFmode that is split, we could put
2306 it together in a pseudoreg directly, but for now that's not worth
2307 bothering with. */
2308 if (data->partial != 0)
2309 {
2310 /* Handle calls that pass values in multiple non-contiguous
2311 locations. The Irix 6 ABI has examples of this. */
2312 if (GET_CODE (entry_parm) == PARALLEL)
2313 emit_group_store (validize_mem (stack_parm), entry_parm,
2314 data->passed_type,
2315 int_size_in_bytes (data->passed_type));
2316 else
2317 {
2318 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2319 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2320 data->partial / UNITS_PER_WORD);
2321 }
2322
2323 entry_parm = stack_parm;
2324 }
2325
2326 /* If we didn't decide this parm came in a register, by default it came
2327 on the stack. */
2328 else if (entry_parm == NULL)
2329 entry_parm = stack_parm;
2330
2331 /* When an argument is passed in multiple locations, we can't make use
2332 of this information, but we can save some copying if the whole argument
2333 is passed in a single register. */
2334 else if (GET_CODE (entry_parm) == PARALLEL
2335 && data->nominal_mode != BLKmode
2336 && data->passed_mode != BLKmode)
2337 {
2338 size_t i, len = XVECLEN (entry_parm, 0);
2339
2340 for (i = 0; i < len; i++)
2341 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2342 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2343 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2344 == data->passed_mode)
2345 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2346 {
2347 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2348 break;
2349 }
2350 }
2351
2352 data->entry_parm = entry_parm;
2353 }
2354
2355 /* A subroutine of assign_parms. Reconstitute any values which were
2356 passed in multiple registers and would fit in a single register. */
2357
2358 static void
2359 assign_parm_remove_parallels (struct assign_parm_data_one *data)
2360 {
2361 rtx entry_parm = data->entry_parm;
2362
2363 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2364 This can be done with register operations rather than on the
2365 stack, even if we will store the reconstituted parameter on the
2366 stack later. */
2367 if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
2368 {
2369 rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
2370 emit_group_store (parmreg, entry_parm, NULL_TREE,
2371 GET_MODE_SIZE (GET_MODE (entry_parm)));
2372 entry_parm = parmreg;
2373 }
2374
2375 data->entry_parm = entry_parm;
2376 }
2377
2378 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2379 always valid and properly aligned. */
2380
2381 static void
2382 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2383 {
2384 rtx stack_parm = data->stack_parm;
2385
2386 /* If we can't trust the parm stack slot to be aligned enough for its
2387 ultimate type, don't use that slot after entry. We'll make another
2388 stack slot, if we need one. */
2389 if (stack_parm
2390 && ((STRICT_ALIGNMENT
2391 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2392 || (data->nominal_type
2393 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2394 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2395 stack_parm = NULL;
2396
2397 /* If parm was passed in memory, and we need to convert it on entry,
2398 don't store it back in that same slot. */
2399 else if (data->entry_parm == stack_parm
2400 && data->nominal_mode != BLKmode
2401 && data->nominal_mode != data->passed_mode)
2402 stack_parm = NULL;
2403
2404 /* If stack protection is in effect for this function, don't leave any
2405 pointers in their passed stack slots. */
2406 else if (crtl->stack_protect_guard
2407 && (flag_stack_protect == 2
2408 || data->passed_pointer
2409 || POINTER_TYPE_P (data->nominal_type)))
2410 stack_parm = NULL;
2411
2412 data->stack_parm = stack_parm;
2413 }
2414
2415 /* A subroutine of assign_parms. Return true if the current parameter
2416 should be stored as a BLKmode in the current frame. */
2417
2418 static bool
2419 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2420 {
2421 if (data->nominal_mode == BLKmode)
2422 return true;
2423 if (GET_MODE (data->entry_parm) == BLKmode)
2424 return true;
2425
2426 #ifdef BLOCK_REG_PADDING
2427 /* Only assign_parm_setup_block knows how to deal with register arguments
2428 that are padded at the least significant end. */
2429 if (REG_P (data->entry_parm)
2430 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2431 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2432 == (BYTES_BIG_ENDIAN ? upward : downward)))
2433 return true;
2434 #endif
2435
2436 return false;
2437 }
2438
2439 /* A subroutine of assign_parms. Arrange for the parameter to be
2440 present and valid in DATA->STACK_RTL. */
2441
2442 static void
2443 assign_parm_setup_block (struct assign_parm_data_all *all,
2444 tree parm, struct assign_parm_data_one *data)
2445 {
2446 rtx entry_parm = data->entry_parm;
2447 rtx stack_parm = data->stack_parm;
2448 HOST_WIDE_INT size;
2449 HOST_WIDE_INT size_stored;
2450
2451 if (GET_CODE (entry_parm) == PARALLEL)
2452 entry_parm = emit_group_move_into_temps (entry_parm);
2453
2454 size = int_size_in_bytes (data->passed_type);
2455 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2456 if (stack_parm == 0)
2457 {
2458 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2459 stack_parm = assign_stack_local (BLKmode, size_stored,
2460 DECL_ALIGN (parm));
2461 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2462 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2463 set_mem_attributes (stack_parm, parm, 1);
2464 }
2465
2466 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2467 calls that pass values in multiple non-contiguous locations. */
2468 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2469 {
2470 rtx mem;
2471
2472 /* Note that we will be storing an integral number of words.
2473 So we have to be careful to ensure that we allocate an
2474 integral number of words. We do this above when we call
2475 assign_stack_local if space was not allocated in the argument
2476 list. If it was, this will not work if PARM_BOUNDARY is not
2477 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2478 if it becomes a problem. Exception is when BLKmode arrives
2479 with arguments not conforming to word_mode. */
2480
2481 if (data->stack_parm == 0)
2482 ;
2483 else if (GET_CODE (entry_parm) == PARALLEL)
2484 ;
2485 else
2486 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2487
2488 mem = validize_mem (stack_parm);
2489
2490 /* Handle values in multiple non-contiguous locations. */
2491 if (GET_CODE (entry_parm) == PARALLEL)
2492 {
2493 push_to_sequence2 (all->first_conversion_insn,
2494 all->last_conversion_insn);
2495 emit_group_store (mem, entry_parm, data->passed_type, size);
2496 all->first_conversion_insn = get_insns ();
2497 all->last_conversion_insn = get_last_insn ();
2498 end_sequence ();
2499 }
2500
2501 else if (size == 0)
2502 ;
2503
2504 /* If SIZE is that of a mode no bigger than a word, just use
2505 that mode's store operation. */
2506 else if (size <= UNITS_PER_WORD)
2507 {
2508 enum machine_mode mode
2509 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2510
2511 if (mode != BLKmode
2512 #ifdef BLOCK_REG_PADDING
2513 && (size == UNITS_PER_WORD
2514 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2515 != (BYTES_BIG_ENDIAN ? upward : downward)))
2516 #endif
2517 )
2518 {
2519 rtx reg;
2520
2521 /* We are really truncating a word_mode value containing
2522 SIZE bytes into a value of mode MODE. If such an
2523 operation requires no actual instructions, we can refer
2524 to the value directly in mode MODE, otherwise we must
2525 start with the register in word_mode and explicitly
2526 convert it. */
2527 if (TRULY_NOOP_TRUNCATION (size * BITS_PER_UNIT, BITS_PER_WORD))
2528 reg = gen_rtx_REG (mode, REGNO (entry_parm));
2529 else
2530 {
2531 reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2532 reg = convert_to_mode (mode, copy_to_reg (reg), 1);
2533 }
2534 emit_move_insn (change_address (mem, mode, 0), reg);
2535 }
2536
2537 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2538 machine must be aligned to the left before storing
2539 to memory. Note that the previous test doesn't
2540 handle all cases (e.g. SIZE == 3). */
2541 else if (size != UNITS_PER_WORD
2542 #ifdef BLOCK_REG_PADDING
2543 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2544 == downward)
2545 #else
2546 && BYTES_BIG_ENDIAN
2547 #endif
2548 )
2549 {
2550 rtx tem, x;
2551 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2552 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2553
2554 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2555 build_int_cst (NULL_TREE, by),
2556 NULL_RTX, 1);
2557 tem = change_address (mem, word_mode, 0);
2558 emit_move_insn (tem, x);
2559 }
2560 else
2561 move_block_from_reg (REGNO (entry_parm), mem,
2562 size_stored / UNITS_PER_WORD);
2563 }
2564 else
2565 move_block_from_reg (REGNO (entry_parm), mem,
2566 size_stored / UNITS_PER_WORD);
2567 }
2568 else if (data->stack_parm == 0)
2569 {
2570 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2571 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2572 BLOCK_OP_NORMAL);
2573 all->first_conversion_insn = get_insns ();
2574 all->last_conversion_insn = get_last_insn ();
2575 end_sequence ();
2576 }
2577
2578 data->stack_parm = stack_parm;
2579 SET_DECL_RTL (parm, stack_parm);
2580 }
2581
2582 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2583 parameter. Get it there. Perform all ABI specified conversions. */
2584
2585 static void
2586 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2587 struct assign_parm_data_one *data)
2588 {
2589 rtx parmreg;
2590 enum machine_mode promoted_nominal_mode;
2591 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2592 bool did_conversion = false;
2593
2594 /* Store the parm in a pseudoregister during the function, but we may
2595 need to do it in a wider mode. */
2596
2597 /* This is not really promoting for a call. However we need to be
2598 consistent with assign_parm_find_data_types and expand_expr_real_1. */
2599 promoted_nominal_mode
2600 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1);
2601
2602 parmreg = gen_reg_rtx (promoted_nominal_mode);
2603
2604 if (!DECL_ARTIFICIAL (parm))
2605 mark_user_reg (parmreg);
2606
2607 /* If this was an item that we received a pointer to,
2608 set DECL_RTL appropriately. */
2609 if (data->passed_pointer)
2610 {
2611 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2612 set_mem_attributes (x, parm, 1);
2613 SET_DECL_RTL (parm, x);
2614 }
2615 else
2616 SET_DECL_RTL (parm, parmreg);
2617
2618 assign_parm_remove_parallels (data);
2619
2620 /* Copy the value into the register. */
2621 if (data->nominal_mode != data->passed_mode
2622 || promoted_nominal_mode != data->promoted_mode)
2623 {
2624 int save_tree_used;
2625
2626 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2627 mode, by the caller. We now have to convert it to
2628 NOMINAL_MODE, if different. However, PARMREG may be in
2629 a different mode than NOMINAL_MODE if it is being stored
2630 promoted.
2631
2632 If ENTRY_PARM is a hard register, it might be in a register
2633 not valid for operating in its mode (e.g., an odd-numbered
2634 register for a DFmode). In that case, moves are the only
2635 thing valid, so we can't do a convert from there. This
2636 occurs when the calling sequence allow such misaligned
2637 usages.
2638
2639 In addition, the conversion may involve a call, which could
2640 clobber parameters which haven't been copied to pseudo
2641 registers yet. Therefore, we must first copy the parm to
2642 a pseudo reg here, and save the conversion until after all
2643 parameters have been moved. */
2644
2645 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2646
2647 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2648
2649 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2650 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2651
2652 if (GET_CODE (tempreg) == SUBREG
2653 && GET_MODE (tempreg) == data->nominal_mode
2654 && REG_P (SUBREG_REG (tempreg))
2655 && data->nominal_mode == data->passed_mode
2656 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2657 && GET_MODE_SIZE (GET_MODE (tempreg))
2658 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
2659 {
2660 /* The argument is already sign/zero extended, so note it
2661 into the subreg. */
2662 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2663 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2664 }
2665
2666 /* TREE_USED gets set erroneously during expand_assignment. */
2667 save_tree_used = TREE_USED (parm);
2668 expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
2669 TREE_USED (parm) = save_tree_used;
2670 all->first_conversion_insn = get_insns ();
2671 all->last_conversion_insn = get_last_insn ();
2672 end_sequence ();
2673
2674 did_conversion = true;
2675 }
2676 else
2677 emit_move_insn (parmreg, validize_mem (data->entry_parm));
2678
2679 /* If we were passed a pointer but the actual value can safely live
2680 in a register, put it in one. */
2681 if (data->passed_pointer
2682 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2683 /* If by-reference argument was promoted, demote it. */
2684 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2685 || use_register_for_decl (parm)))
2686 {
2687 /* We can't use nominal_mode, because it will have been set to
2688 Pmode above. We must use the actual mode of the parm. */
2689 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2690 mark_user_reg (parmreg);
2691
2692 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2693 {
2694 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2695 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2696
2697 push_to_sequence2 (all->first_conversion_insn,
2698 all->last_conversion_insn);
2699 emit_move_insn (tempreg, DECL_RTL (parm));
2700 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2701 emit_move_insn (parmreg, tempreg);
2702 all->first_conversion_insn = get_insns ();
2703 all->last_conversion_insn = get_last_insn ();
2704 end_sequence ();
2705
2706 did_conversion = true;
2707 }
2708 else
2709 emit_move_insn (parmreg, DECL_RTL (parm));
2710
2711 SET_DECL_RTL (parm, parmreg);
2712
2713 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2714 now the parm. */
2715 data->stack_parm = NULL;
2716 }
2717
2718 /* Mark the register as eliminable if we did no conversion and it was
2719 copied from memory at a fixed offset, and the arg pointer was not
2720 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2721 offset formed an invalid address, such memory-equivalences as we
2722 make here would screw up life analysis for it. */
2723 if (data->nominal_mode == data->passed_mode
2724 && !did_conversion
2725 && data->stack_parm != 0
2726 && MEM_P (data->stack_parm)
2727 && data->locate.offset.var == 0
2728 && reg_mentioned_p (virtual_incoming_args_rtx,
2729 XEXP (data->stack_parm, 0)))
2730 {
2731 rtx linsn = get_last_insn ();
2732 rtx sinsn, set;
2733
2734 /* Mark complex types separately. */
2735 if (GET_CODE (parmreg) == CONCAT)
2736 {
2737 enum machine_mode submode
2738 = GET_MODE_INNER (GET_MODE (parmreg));
2739 int regnor = REGNO (XEXP (parmreg, 0));
2740 int regnoi = REGNO (XEXP (parmreg, 1));
2741 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
2742 rtx stacki = adjust_address_nv (data->stack_parm, submode,
2743 GET_MODE_SIZE (submode));
2744
2745 /* Scan backwards for the set of the real and
2746 imaginary parts. */
2747 for (sinsn = linsn; sinsn != 0;
2748 sinsn = prev_nonnote_insn (sinsn))
2749 {
2750 set = single_set (sinsn);
2751 if (set == 0)
2752 continue;
2753
2754 if (SET_DEST (set) == regno_reg_rtx [regnoi])
2755 set_unique_reg_note (sinsn, REG_EQUIV, stacki);
2756 else if (SET_DEST (set) == regno_reg_rtx [regnor])
2757 set_unique_reg_note (sinsn, REG_EQUIV, stackr);
2758 }
2759 }
2760 else if ((set = single_set (linsn)) != 0
2761 && SET_DEST (set) == parmreg)
2762 set_unique_reg_note (linsn, REG_EQUIV, data->stack_parm);
2763 }
2764
2765 /* For pointer data type, suggest pointer register. */
2766 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2767 mark_reg_pointer (parmreg,
2768 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2769 }
2770
2771 /* A subroutine of assign_parms. Allocate stack space to hold the current
2772 parameter. Get it there. Perform all ABI specified conversions. */
2773
2774 static void
2775 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2776 struct assign_parm_data_one *data)
2777 {
2778 /* Value must be stored in the stack slot STACK_PARM during function
2779 execution. */
2780 bool to_conversion = false;
2781
2782 assign_parm_remove_parallels (data);
2783
2784 if (data->promoted_mode != data->nominal_mode)
2785 {
2786 /* Conversion is required. */
2787 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2788
2789 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2790
2791 push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
2792 to_conversion = true;
2793
2794 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2795 TYPE_UNSIGNED (TREE_TYPE (parm)));
2796
2797 if (data->stack_parm)
2798 /* ??? This may need a big-endian conversion on sparc64. */
2799 data->stack_parm
2800 = adjust_address (data->stack_parm, data->nominal_mode, 0);
2801 }
2802
2803 if (data->entry_parm != data->stack_parm)
2804 {
2805 rtx src, dest;
2806
2807 if (data->stack_parm == 0)
2808 {
2809 data->stack_parm
2810 = assign_stack_local (GET_MODE (data->entry_parm),
2811 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
2812 TYPE_ALIGN (data->passed_type));
2813 set_mem_attributes (data->stack_parm, parm, 1);
2814 }
2815
2816 dest = validize_mem (data->stack_parm);
2817 src = validize_mem (data->entry_parm);
2818
2819 if (MEM_P (src))
2820 {
2821 /* Use a block move to handle potentially misaligned entry_parm. */
2822 if (!to_conversion)
2823 push_to_sequence2 (all->first_conversion_insn,
2824 all->last_conversion_insn);
2825 to_conversion = true;
2826
2827 emit_block_move (dest, src,
2828 GEN_INT (int_size_in_bytes (data->passed_type)),
2829 BLOCK_OP_NORMAL);
2830 }
2831 else
2832 emit_move_insn (dest, src);
2833 }
2834
2835 if (to_conversion)
2836 {
2837 all->first_conversion_insn = get_insns ();
2838 all->last_conversion_insn = get_last_insn ();
2839 end_sequence ();
2840 }
2841
2842 SET_DECL_RTL (parm, data->stack_parm);
2843 }
2844
2845 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
2846 undo the frobbing that we did in assign_parms_augmented_arg_list. */
2847
2848 static void
2849 assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
2850 {
2851 tree parm;
2852 tree orig_fnargs = all->orig_fnargs;
2853
2854 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2855 {
2856 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2857 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2858 {
2859 rtx tmp, real, imag;
2860 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
2861
2862 real = DECL_RTL (fnargs);
2863 imag = DECL_RTL (TREE_CHAIN (fnargs));
2864 if (inner != GET_MODE (real))
2865 {
2866 real = gen_lowpart_SUBREG (inner, real);
2867 imag = gen_lowpart_SUBREG (inner, imag);
2868 }
2869
2870 if (TREE_ADDRESSABLE (parm))
2871 {
2872 rtx rmem, imem;
2873 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
2874
2875 /* split_complex_arg put the real and imag parts in
2876 pseudos. Move them to memory. */
2877 tmp = assign_stack_local (DECL_MODE (parm), size,
2878 TYPE_ALIGN (TREE_TYPE (parm)));
2879 set_mem_attributes (tmp, parm, 1);
2880 rmem = adjust_address_nv (tmp, inner, 0);
2881 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
2882 push_to_sequence2 (all->first_conversion_insn,
2883 all->last_conversion_insn);
2884 emit_move_insn (rmem, real);
2885 emit_move_insn (imem, imag);
2886 all->first_conversion_insn = get_insns ();
2887 all->last_conversion_insn = get_last_insn ();
2888 end_sequence ();
2889 }
2890 else
2891 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2892 SET_DECL_RTL (parm, tmp);
2893
2894 real = DECL_INCOMING_RTL (fnargs);
2895 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
2896 if (inner != GET_MODE (real))
2897 {
2898 real = gen_lowpart_SUBREG (inner, real);
2899 imag = gen_lowpart_SUBREG (inner, imag);
2900 }
2901 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2902 set_decl_incoming_rtl (parm, tmp, false);
2903 fnargs = TREE_CHAIN (fnargs);
2904 }
2905 else
2906 {
2907 SET_DECL_RTL (parm, DECL_RTL (fnargs));
2908 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs), false);
2909
2910 /* Set MEM_EXPR to the original decl, i.e. to PARM,
2911 instead of the copy of decl, i.e. FNARGS. */
2912 if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
2913 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
2914 }
2915
2916 fnargs = TREE_CHAIN (fnargs);
2917 }
2918 }
2919
2920 /* Assign RTL expressions to the function's parameters. This may involve
2921 copying them into registers and using those registers as the DECL_RTL. */
2922
2923 static void
2924 assign_parms (tree fndecl)
2925 {
2926 struct assign_parm_data_all all;
2927 tree fnargs, parm;
2928
2929 crtl->args.internal_arg_pointer
2930 = targetm.calls.internal_arg_pointer ();
2931
2932 assign_parms_initialize_all (&all);
2933 fnargs = assign_parms_augmented_arg_list (&all);
2934
2935 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
2936 {
2937 struct assign_parm_data_one data;
2938
2939 /* Extract the type of PARM; adjust it according to ABI. */
2940 assign_parm_find_data_types (&all, parm, &data);
2941
2942 /* Early out for errors and void parameters. */
2943 if (data.passed_mode == VOIDmode)
2944 {
2945 SET_DECL_RTL (parm, const0_rtx);
2946 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
2947 continue;
2948 }
2949
2950 if (cfun->stdarg && !TREE_CHAIN (parm))
2951 assign_parms_setup_varargs (&all, &data, false);
2952
2953 /* Find out where the parameter arrives in this function. */
2954 assign_parm_find_entry_rtl (&all, &data);
2955
2956 /* Find out where stack space for this parameter might be. */
2957 if (assign_parm_is_stack_parm (&all, &data))
2958 {
2959 assign_parm_find_stack_rtl (parm, &data);
2960 assign_parm_adjust_entry_rtl (&data);
2961 }
2962
2963 /* Record permanently how this parm was passed. */
2964 set_decl_incoming_rtl (parm, data.entry_parm, data.passed_pointer);
2965
2966 /* Update info on where next arg arrives in registers. */
2967 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
2968 data.passed_type, data.named_arg);
2969
2970 assign_parm_adjust_stack_rtl (&data);
2971
2972 if (assign_parm_setup_block_p (&data))
2973 assign_parm_setup_block (&all, parm, &data);
2974 else if (data.passed_pointer || use_register_for_decl (parm))
2975 assign_parm_setup_reg (&all, parm, &data);
2976 else
2977 assign_parm_setup_stack (&all, parm, &data);
2978 }
2979
2980 if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
2981 assign_parms_unsplit_complex (&all, fnargs);
2982
2983 /* Output all parameter conversion instructions (possibly including calls)
2984 now that all parameters have been copied out of hard registers. */
2985 emit_insn (all.first_conversion_insn);
2986
2987 /* If we are receiving a struct value address as the first argument, set up
2988 the RTL for the function result. As this might require code to convert
2989 the transmitted address to Pmode, we do this here to ensure that possible
2990 preliminary conversions of the address have been emitted already. */
2991 if (all.function_result_decl)
2992 {
2993 tree result = DECL_RESULT (current_function_decl);
2994 rtx addr = DECL_RTL (all.function_result_decl);
2995 rtx x;
2996
2997 if (DECL_BY_REFERENCE (result))
2998 x = addr;
2999 else
3000 {
3001 addr = convert_memory_address (Pmode, addr);
3002 x = gen_rtx_MEM (DECL_MODE (result), addr);
3003 set_mem_attributes (x, result, 1);
3004 }
3005 SET_DECL_RTL (result, x);
3006 }
3007
3008 /* We have aligned all the args, so add space for the pretend args. */
3009 crtl->args.pretend_args_size = all.pretend_args_size;
3010 all.stack_args_size.constant += all.extra_pretend_bytes;
3011 crtl->args.size = all.stack_args_size.constant;
3012
3013 /* Adjust function incoming argument size for alignment and
3014 minimum length. */
3015
3016 #ifdef REG_PARM_STACK_SPACE
3017 crtl->args.size = MAX (crtl->args.size,
3018 REG_PARM_STACK_SPACE (fndecl));
3019 #endif
3020
3021 crtl->args.size = CEIL_ROUND (crtl->args.size,
3022 PARM_BOUNDARY / BITS_PER_UNIT);
3023
3024 #ifdef ARGS_GROW_DOWNWARD
3025 crtl->args.arg_offset_rtx
3026 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3027 : expand_expr (size_diffop (all.stack_args_size.var,
3028 size_int (-all.stack_args_size.constant)),
3029 NULL_RTX, VOIDmode, 0));
3030 #else
3031 crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3032 #endif
3033
3034 /* See how many bytes, if any, of its args a function should try to pop
3035 on return. */
3036
3037 crtl->args.pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
3038 crtl->args.size);
3039
3040 /* For stdarg.h function, save info about
3041 regs and stack space used by the named args. */
3042
3043 crtl->args.info = all.args_so_far;
3044
3045 /* Set the rtx used for the function return value. Put this in its
3046 own variable so any optimizers that need this information don't have
3047 to include tree.h. Do this here so it gets done when an inlined
3048 function gets output. */
3049
3050 crtl->return_rtx
3051 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3052 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3053
3054 /* If scalar return value was computed in a pseudo-reg, or was a named
3055 return value that got dumped to the stack, copy that to the hard
3056 return register. */
3057 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3058 {
3059 tree decl_result = DECL_RESULT (fndecl);
3060 rtx decl_rtl = DECL_RTL (decl_result);
3061
3062 if (REG_P (decl_rtl)
3063 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3064 : DECL_REGISTER (decl_result))
3065 {
3066 rtx real_decl_rtl;
3067
3068 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3069 fndecl, true);
3070 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3071 /* The delay slot scheduler assumes that crtl->return_rtx
3072 holds the hard register containing the return value, not a
3073 temporary pseudo. */
3074 crtl->return_rtx = real_decl_rtl;
3075 }
3076 }
3077 }
3078
3079 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3080 For all seen types, gimplify their sizes. */
3081
3082 static tree
3083 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3084 {
3085 tree t = *tp;
3086
3087 *walk_subtrees = 0;
3088 if (TYPE_P (t))
3089 {
3090 if (POINTER_TYPE_P (t))
3091 *walk_subtrees = 1;
3092 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3093 && !TYPE_SIZES_GIMPLIFIED (t))
3094 {
3095 gimplify_type_sizes (t, (tree *) data);
3096 *walk_subtrees = 1;
3097 }
3098 }
3099
3100 return NULL;
3101 }
3102
3103 /* Gimplify the parameter list for current_function_decl. This involves
3104 evaluating SAVE_EXPRs of variable sized parameters and generating code
3105 to implement callee-copies reference parameters. Returns a list of
3106 statements to add to the beginning of the function, or NULL if nothing
3107 to do. */
3108
3109 tree
3110 gimplify_parameters (void)
3111 {
3112 struct assign_parm_data_all all;
3113 tree fnargs, parm, stmts = NULL;
3114
3115 assign_parms_initialize_all (&all);
3116 fnargs = assign_parms_augmented_arg_list (&all);
3117
3118 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3119 {
3120 struct assign_parm_data_one data;
3121
3122 /* Extract the type of PARM; adjust it according to ABI. */
3123 assign_parm_find_data_types (&all, parm, &data);
3124
3125 /* Early out for errors and void parameters. */
3126 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3127 continue;
3128
3129 /* Update info on where next arg arrives in registers. */
3130 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3131 data.passed_type, data.named_arg);
3132
3133 /* ??? Once upon a time variable_size stuffed parameter list
3134 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3135 turned out to be less than manageable in the gimple world.
3136 Now we have to hunt them down ourselves. */
3137 walk_tree_without_duplicates (&data.passed_type,
3138 gimplify_parm_type, &stmts);
3139
3140 if (!TREE_CONSTANT (DECL_SIZE (parm)))
3141 {
3142 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3143 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3144 }
3145
3146 if (data.passed_pointer)
3147 {
3148 tree type = TREE_TYPE (data.passed_type);
3149 if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3150 type, data.named_arg))
3151 {
3152 tree local, t;
3153
3154 /* For constant sized objects, this is trivial; for
3155 variable-sized objects, we have to play games. */
3156 if (TREE_CONSTANT (DECL_SIZE (parm)))
3157 {
3158 local = create_tmp_var (type, get_name (parm));
3159 DECL_IGNORED_P (local) = 0;
3160 }
3161 else
3162 {
3163 tree ptr_type, addr;
3164
3165 ptr_type = build_pointer_type (type);
3166 addr = create_tmp_var (ptr_type, get_name (parm));
3167 DECL_IGNORED_P (addr) = 0;
3168 local = build_fold_indirect_ref (addr);
3169
3170 t = built_in_decls[BUILT_IN_ALLOCA];
3171 t = build_call_expr (t, 1, DECL_SIZE_UNIT (parm));
3172 t = fold_convert (ptr_type, t);
3173 t = build_gimple_modify_stmt (addr, t);
3174 gimplify_and_add (t, &stmts);
3175 }
3176
3177 t = build_gimple_modify_stmt (local, parm);
3178 gimplify_and_add (t, &stmts);
3179
3180 SET_DECL_VALUE_EXPR (parm, local);
3181 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3182 }
3183 }
3184 }
3185
3186 return stmts;
3187 }
3188 \f
3189 /* Compute the size and offset from the start of the stacked arguments for a
3190 parm passed in mode PASSED_MODE and with type TYPE.
3191
3192 INITIAL_OFFSET_PTR points to the current offset into the stacked
3193 arguments.
3194
3195 The starting offset and size for this parm are returned in
3196 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3197 nonzero, the offset is that of stack slot, which is returned in
3198 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3199 padding required from the initial offset ptr to the stack slot.
3200
3201 IN_REGS is nonzero if the argument will be passed in registers. It will
3202 never be set if REG_PARM_STACK_SPACE is not defined.
3203
3204 FNDECL is the function in which the argument was defined.
3205
3206 There are two types of rounding that are done. The first, controlled by
3207 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3208 list to be aligned to the specific boundary (in bits). This rounding
3209 affects the initial and starting offsets, but not the argument size.
3210
3211 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3212 optionally rounds the size of the parm to PARM_BOUNDARY. The
3213 initial offset is not affected by this rounding, while the size always
3214 is and the starting offset may be. */
3215
3216 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3217 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3218 callers pass in the total size of args so far as
3219 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3220
3221 void
3222 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3223 int partial, tree fndecl ATTRIBUTE_UNUSED,
3224 struct args_size *initial_offset_ptr,
3225 struct locate_and_pad_arg_data *locate)
3226 {
3227 tree sizetree;
3228 enum direction where_pad;
3229 unsigned int boundary;
3230 int reg_parm_stack_space = 0;
3231 int part_size_in_regs;
3232
3233 #ifdef REG_PARM_STACK_SPACE
3234 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3235
3236 /* If we have found a stack parm before we reach the end of the
3237 area reserved for registers, skip that area. */
3238 if (! in_regs)
3239 {
3240 if (reg_parm_stack_space > 0)
3241 {
3242 if (initial_offset_ptr->var)
3243 {
3244 initial_offset_ptr->var
3245 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3246 ssize_int (reg_parm_stack_space));
3247 initial_offset_ptr->constant = 0;
3248 }
3249 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3250 initial_offset_ptr->constant = reg_parm_stack_space;
3251 }
3252 }
3253 #endif /* REG_PARM_STACK_SPACE */
3254
3255 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3256
3257 sizetree
3258 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3259 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3260 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3261 if (boundary > PREFERRED_STACK_BOUNDARY)
3262 boundary = PREFERRED_STACK_BOUNDARY;
3263 locate->where_pad = where_pad;
3264 locate->boundary = boundary;
3265
3266 /* Remember if the outgoing parameter requires extra alignment on the
3267 calling function side. */
3268 if (crtl->stack_alignment_needed < boundary)
3269 crtl->stack_alignment_needed = boundary;
3270
3271 #ifdef ARGS_GROW_DOWNWARD
3272 locate->slot_offset.constant = -initial_offset_ptr->constant;
3273 if (initial_offset_ptr->var)
3274 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3275 initial_offset_ptr->var);
3276
3277 {
3278 tree s2 = sizetree;
3279 if (where_pad != none
3280 && (!host_integerp (sizetree, 1)
3281 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3282 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3283 SUB_PARM_SIZE (locate->slot_offset, s2);
3284 }
3285
3286 locate->slot_offset.constant += part_size_in_regs;
3287
3288 if (!in_regs
3289 #ifdef REG_PARM_STACK_SPACE
3290 || REG_PARM_STACK_SPACE (fndecl) > 0
3291 #endif
3292 )
3293 pad_to_arg_alignment (&locate->slot_offset, boundary,
3294 &locate->alignment_pad);
3295
3296 locate->size.constant = (-initial_offset_ptr->constant
3297 - locate->slot_offset.constant);
3298 if (initial_offset_ptr->var)
3299 locate->size.var = size_binop (MINUS_EXPR,
3300 size_binop (MINUS_EXPR,
3301 ssize_int (0),
3302 initial_offset_ptr->var),
3303 locate->slot_offset.var);
3304
3305 /* Pad_below needs the pre-rounded size to know how much to pad
3306 below. */
3307 locate->offset = locate->slot_offset;
3308 if (where_pad == downward)
3309 pad_below (&locate->offset, passed_mode, sizetree);
3310
3311 #else /* !ARGS_GROW_DOWNWARD */
3312 if (!in_regs
3313 #ifdef REG_PARM_STACK_SPACE
3314 || REG_PARM_STACK_SPACE (fndecl) > 0
3315 #endif
3316 )
3317 pad_to_arg_alignment (initial_offset_ptr, boundary,
3318 &locate->alignment_pad);
3319 locate->slot_offset = *initial_offset_ptr;
3320
3321 #ifdef PUSH_ROUNDING
3322 if (passed_mode != BLKmode)
3323 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3324 #endif
3325
3326 /* Pad_below needs the pre-rounded size to know how much to pad below
3327 so this must be done before rounding up. */
3328 locate->offset = locate->slot_offset;
3329 if (where_pad == downward)
3330 pad_below (&locate->offset, passed_mode, sizetree);
3331
3332 if (where_pad != none
3333 && (!host_integerp (sizetree, 1)
3334 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3335 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3336
3337 ADD_PARM_SIZE (locate->size, sizetree);
3338
3339 locate->size.constant -= part_size_in_regs;
3340 #endif /* ARGS_GROW_DOWNWARD */
3341 }
3342
3343 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3344 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3345
3346 static void
3347 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3348 struct args_size *alignment_pad)
3349 {
3350 tree save_var = NULL_TREE;
3351 HOST_WIDE_INT save_constant = 0;
3352 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3353 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3354
3355 #ifdef SPARC_STACK_BOUNDARY_HACK
3356 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3357 the real alignment of %sp. However, when it does this, the
3358 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3359 if (SPARC_STACK_BOUNDARY_HACK)
3360 sp_offset = 0;
3361 #endif
3362
3363 if (boundary > PARM_BOUNDARY)
3364 {
3365 save_var = offset_ptr->var;
3366 save_constant = offset_ptr->constant;
3367 }
3368
3369 alignment_pad->var = NULL_TREE;
3370 alignment_pad->constant = 0;
3371
3372 if (boundary > BITS_PER_UNIT)
3373 {
3374 if (offset_ptr->var)
3375 {
3376 tree sp_offset_tree = ssize_int (sp_offset);
3377 tree offset = size_binop (PLUS_EXPR,
3378 ARGS_SIZE_TREE (*offset_ptr),
3379 sp_offset_tree);
3380 #ifdef ARGS_GROW_DOWNWARD
3381 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3382 #else
3383 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3384 #endif
3385
3386 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3387 /* ARGS_SIZE_TREE includes constant term. */
3388 offset_ptr->constant = 0;
3389 if (boundary > PARM_BOUNDARY)
3390 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3391 save_var);
3392 }
3393 else
3394 {
3395 offset_ptr->constant = -sp_offset +
3396 #ifdef ARGS_GROW_DOWNWARD
3397 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3398 #else
3399 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3400 #endif
3401 if (boundary > PARM_BOUNDARY)
3402 alignment_pad->constant = offset_ptr->constant - save_constant;
3403 }
3404 }
3405 }
3406
3407 static void
3408 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3409 {
3410 if (passed_mode != BLKmode)
3411 {
3412 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3413 offset_ptr->constant
3414 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3415 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3416 - GET_MODE_SIZE (passed_mode));
3417 }
3418 else
3419 {
3420 if (TREE_CODE (sizetree) != INTEGER_CST
3421 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3422 {
3423 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3424 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3425 /* Add it in. */
3426 ADD_PARM_SIZE (*offset_ptr, s2);
3427 SUB_PARM_SIZE (*offset_ptr, sizetree);
3428 }
3429 }
3430 }
3431 \f
3432
3433 /* True if register REGNO was alive at a place where `setjmp' was
3434 called and was set more than once or is an argument. Such regs may
3435 be clobbered by `longjmp'. */
3436
3437 static bool
3438 regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
3439 {
3440 /* There appear to be cases where some local vars never reach the
3441 backend but have bogus regnos. */
3442 if (regno >= max_reg_num ())
3443 return false;
3444
3445 return ((REG_N_SETS (regno) > 1
3446 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno))
3447 && REGNO_REG_SET_P (setjmp_crosses, regno));
3448 }
3449
3450 /* Walk the tree of blocks describing the binding levels within a
3451 function and warn about variables the might be killed by setjmp or
3452 vfork. This is done after calling flow_analysis before register
3453 allocation since that will clobber the pseudo-regs to hard
3454 regs. */
3455
3456 static void
3457 setjmp_vars_warning (bitmap setjmp_crosses, tree block)
3458 {
3459 tree decl, sub;
3460
3461 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3462 {
3463 if (TREE_CODE (decl) == VAR_DECL
3464 && DECL_RTL_SET_P (decl)
3465 && REG_P (DECL_RTL (decl))
3466 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3467 warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
3468 " %<longjmp%> or %<vfork%>", decl);
3469 }
3470
3471 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
3472 setjmp_vars_warning (setjmp_crosses, sub);
3473 }
3474
3475 /* Do the appropriate part of setjmp_vars_warning
3476 but for arguments instead of local variables. */
3477
3478 static void
3479 setjmp_args_warning (bitmap setjmp_crosses)
3480 {
3481 tree decl;
3482 for (decl = DECL_ARGUMENTS (current_function_decl);
3483 decl; decl = TREE_CHAIN (decl))
3484 if (DECL_RTL (decl) != 0
3485 && REG_P (DECL_RTL (decl))
3486 && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
3487 warning (OPT_Wclobbered,
3488 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3489 decl);
3490 }
3491
3492 /* Generate warning messages for variables live across setjmp. */
3493
3494 void
3495 generate_setjmp_warnings (void)
3496 {
3497 bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
3498
3499 if (n_basic_blocks == NUM_FIXED_BLOCKS
3500 || bitmap_empty_p (setjmp_crosses))
3501 return;
3502
3503 setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
3504 setjmp_args_warning (setjmp_crosses);
3505 }
3506
3507 \f
3508 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3509 and create duplicate blocks. */
3510 /* ??? Need an option to either create block fragments or to create
3511 abstract origin duplicates of a source block. It really depends
3512 on what optimization has been performed. */
3513
3514 void
3515 reorder_blocks (void)
3516 {
3517 tree block = DECL_INITIAL (current_function_decl);
3518 VEC(tree,heap) *block_stack;
3519
3520 if (block == NULL_TREE)
3521 return;
3522
3523 block_stack = VEC_alloc (tree, heap, 10);
3524
3525 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3526 clear_block_marks (block);
3527
3528 /* Prune the old trees away, so that they don't get in the way. */
3529 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3530 BLOCK_CHAIN (block) = NULL_TREE;
3531
3532 /* Recreate the block tree from the note nesting. */
3533 reorder_blocks_1 (get_insns (), block, &block_stack);
3534 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
3535
3536 VEC_free (tree, heap, block_stack);
3537 }
3538
3539 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3540
3541 void
3542 clear_block_marks (tree block)
3543 {
3544 while (block)
3545 {
3546 TREE_ASM_WRITTEN (block) = 0;
3547 clear_block_marks (BLOCK_SUBBLOCKS (block));
3548 block = BLOCK_CHAIN (block);
3549 }
3550 }
3551
3552 static void
3553 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
3554 {
3555 rtx insn;
3556
3557 for (insn = insns; insn; insn = NEXT_INSN (insn))
3558 {
3559 if (NOTE_P (insn))
3560 {
3561 if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
3562 {
3563 tree block = NOTE_BLOCK (insn);
3564 tree origin;
3565
3566 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3567 ? BLOCK_FRAGMENT_ORIGIN (block)
3568 : block);
3569
3570 /* If we have seen this block before, that means it now
3571 spans multiple address regions. Create a new fragment. */
3572 if (TREE_ASM_WRITTEN (block))
3573 {
3574 tree new_block = copy_node (block);
3575
3576 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3577 BLOCK_FRAGMENT_CHAIN (new_block)
3578 = BLOCK_FRAGMENT_CHAIN (origin);
3579 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3580
3581 NOTE_BLOCK (insn) = new_block;
3582 block = new_block;
3583 }
3584
3585 BLOCK_SUBBLOCKS (block) = 0;
3586 TREE_ASM_WRITTEN (block) = 1;
3587 /* When there's only one block for the entire function,
3588 current_block == block and we mustn't do this, it
3589 will cause infinite recursion. */
3590 if (block != current_block)
3591 {
3592 if (block != origin)
3593 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block);
3594
3595 BLOCK_SUPERCONTEXT (block) = current_block;
3596 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3597 BLOCK_SUBBLOCKS (current_block) = block;
3598 current_block = origin;
3599 }
3600 VEC_safe_push (tree, heap, *p_block_stack, block);
3601 }
3602 else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
3603 {
3604 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
3605 BLOCK_SUBBLOCKS (current_block)
3606 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3607 current_block = BLOCK_SUPERCONTEXT (current_block);
3608 }
3609 }
3610 }
3611 }
3612
3613 /* Reverse the order of elements in the chain T of blocks,
3614 and return the new head of the chain (old last element). */
3615
3616 tree
3617 blocks_nreverse (tree t)
3618 {
3619 tree prev = 0, decl, next;
3620 for (decl = t; decl; decl = next)
3621 {
3622 next = BLOCK_CHAIN (decl);
3623 BLOCK_CHAIN (decl) = prev;
3624 prev = decl;
3625 }
3626 return prev;
3627 }
3628
3629 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3630 non-NULL, list them all into VECTOR, in a depth-first preorder
3631 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3632 blocks. */
3633
3634 static int
3635 all_blocks (tree block, tree *vector)
3636 {
3637 int n_blocks = 0;
3638
3639 while (block)
3640 {
3641 TREE_ASM_WRITTEN (block) = 0;
3642
3643 /* Record this block. */
3644 if (vector)
3645 vector[n_blocks] = block;
3646
3647 ++n_blocks;
3648
3649 /* Record the subblocks, and their subblocks... */
3650 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3651 vector ? vector + n_blocks : 0);
3652 block = BLOCK_CHAIN (block);
3653 }
3654
3655 return n_blocks;
3656 }
3657
3658 /* Return a vector containing all the blocks rooted at BLOCK. The
3659 number of elements in the vector is stored in N_BLOCKS_P. The
3660 vector is dynamically allocated; it is the caller's responsibility
3661 to call `free' on the pointer returned. */
3662
3663 static tree *
3664 get_block_vector (tree block, int *n_blocks_p)
3665 {
3666 tree *block_vector;
3667
3668 *n_blocks_p = all_blocks (block, NULL);
3669 block_vector = XNEWVEC (tree, *n_blocks_p);
3670 all_blocks (block, block_vector);
3671
3672 return block_vector;
3673 }
3674
3675 static GTY(()) int next_block_index = 2;
3676
3677 /* Set BLOCK_NUMBER for all the blocks in FN. */
3678
3679 void
3680 number_blocks (tree fn)
3681 {
3682 int i;
3683 int n_blocks;
3684 tree *block_vector;
3685
3686 /* For SDB and XCOFF debugging output, we start numbering the blocks
3687 from 1 within each function, rather than keeping a running
3688 count. */
3689 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3690 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3691 next_block_index = 1;
3692 #endif
3693
3694 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3695
3696 /* The top-level BLOCK isn't numbered at all. */
3697 for (i = 1; i < n_blocks; ++i)
3698 /* We number the blocks from two. */
3699 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3700
3701 free (block_vector);
3702
3703 return;
3704 }
3705
3706 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3707
3708 tree
3709 debug_find_var_in_block_tree (tree var, tree block)
3710 {
3711 tree t;
3712
3713 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3714 if (t == var)
3715 return block;
3716
3717 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3718 {
3719 tree ret = debug_find_var_in_block_tree (var, t);
3720 if (ret)
3721 return ret;
3722 }
3723
3724 return NULL_TREE;
3725 }
3726 \f
3727 /* Keep track of whether we're in a dummy function context. If we are,
3728 we don't want to invoke the set_current_function hook, because we'll
3729 get into trouble if the hook calls target_reinit () recursively or
3730 when the initial initialization is not yet complete. */
3731
3732 static bool in_dummy_function;
3733
3734 /* Invoke the target hook when setting cfun. */
3735
3736 static void
3737 invoke_set_current_function_hook (tree fndecl)
3738 {
3739 if (!in_dummy_function)
3740 targetm.set_current_function (fndecl);
3741 }
3742
3743 /* cfun should never be set directly; use this function. */
3744
3745 void
3746 set_cfun (struct function *new_cfun)
3747 {
3748 if (cfun != new_cfun)
3749 {
3750 cfun = new_cfun;
3751 invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
3752 }
3753 }
3754
3755 /* Keep track of the cfun stack. */
3756
3757 typedef struct function *function_p;
3758
3759 DEF_VEC_P(function_p);
3760 DEF_VEC_ALLOC_P(function_p,heap);
3761
3762 /* Initialized with NOGC, making this poisonous to the garbage collector. */
3763
3764 static VEC(function_p,heap) *cfun_stack;
3765
3766 /* Push the current cfun onto the stack, and set cfun to new_cfun. */
3767
3768 void
3769 push_cfun (struct function *new_cfun)
3770 {
3771 VEC_safe_push (function_p, heap, cfun_stack, cfun);
3772 set_cfun (new_cfun);
3773 }
3774
3775 /* Pop cfun from the stack. */
3776
3777 void
3778 pop_cfun (void)
3779 {
3780 struct function *new_cfun = VEC_pop (function_p, cfun_stack);
3781 set_cfun (new_cfun);
3782 }
3783
3784 /* Return value of funcdef and increase it. */
3785 int
3786 get_next_funcdef_no (void)
3787 {
3788 return funcdef_no++;
3789 }
3790
3791 /* Allocate a function structure for FNDECL and set its contents
3792 to the defaults. Set cfun to the newly-allocated object.
3793 Some of the helper functions invoked during initialization assume
3794 that cfun has already been set. Therefore, assign the new object
3795 directly into cfun and invoke the back end hook explicitly at the
3796 very end, rather than initializing a temporary and calling set_cfun
3797 on it.
3798
3799 ABSTRACT_P is true if this is a function that will never be seen by
3800 the middle-end. Such functions are front-end concepts (like C++
3801 function templates) that do not correspond directly to functions
3802 placed in object files. */
3803
3804 void
3805 allocate_struct_function (tree fndecl, bool abstract_p)
3806 {
3807 tree result;
3808 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
3809
3810 cfun = GGC_CNEW (struct function);
3811
3812 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
3813
3814 init_eh_for_function ();
3815
3816 if (init_machine_status)
3817 cfun->machine = (*init_machine_status) ();
3818
3819 #ifdef OVERRIDE_ABI_FORMAT
3820 OVERRIDE_ABI_FORMAT (fndecl);
3821 #endif
3822
3823 if (fndecl != NULL_TREE)
3824 {
3825 DECL_STRUCT_FUNCTION (fndecl) = cfun;
3826 cfun->decl = fndecl;
3827 current_function_funcdef_no = get_next_funcdef_no ();
3828
3829 result = DECL_RESULT (fndecl);
3830 if (!abstract_p && aggregate_value_p (result, fndecl))
3831 {
3832 #ifdef PCC_STATIC_STRUCT_RETURN
3833 cfun->returns_pcc_struct = 1;
3834 #endif
3835 cfun->returns_struct = 1;
3836 }
3837
3838 cfun->stdarg
3839 = (fntype
3840 && TYPE_ARG_TYPES (fntype) != 0
3841 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3842 != void_type_node));
3843
3844 /* Assume all registers in stdarg functions need to be saved. */
3845 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
3846 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
3847 }
3848
3849 invoke_set_current_function_hook (fndecl);
3850 }
3851
3852 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
3853 instead of just setting it. */
3854
3855 void
3856 push_struct_function (tree fndecl)
3857 {
3858 VEC_safe_push (function_p, heap, cfun_stack, cfun);
3859 allocate_struct_function (fndecl, false);
3860 }
3861
3862 /* Reset cfun, and other non-struct-function variables to defaults as
3863 appropriate for emitting rtl at the start of a function. */
3864
3865 static void
3866 prepare_function_start (void)
3867 {
3868 gcc_assert (!crtl->emit.x_last_insn);
3869 init_emit ();
3870 init_varasm_status ();
3871 init_expr ();
3872
3873 cse_not_expected = ! optimize;
3874
3875 /* Caller save not needed yet. */
3876 caller_save_needed = 0;
3877
3878 /* We haven't done register allocation yet. */
3879 reg_renumber = 0;
3880
3881 /* Indicate that we have not instantiated virtual registers yet. */
3882 virtuals_instantiated = 0;
3883
3884 /* Indicate that we want CONCATs now. */
3885 generating_concat_p = 1;
3886
3887 /* Indicate we have no need of a frame pointer yet. */
3888 frame_pointer_needed = 0;
3889 }
3890
3891 /* Initialize the rtl expansion mechanism so that we can do simple things
3892 like generate sequences. This is used to provide a context during global
3893 initialization of some passes. You must call expand_dummy_function_end
3894 to exit this context. */
3895
3896 void
3897 init_dummy_function_start (void)
3898 {
3899 gcc_assert (!in_dummy_function);
3900 in_dummy_function = true;
3901 push_struct_function (NULL_TREE);
3902 prepare_function_start ();
3903 }
3904
3905 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3906 and initialize static variables for generating RTL for the statements
3907 of the function. */
3908
3909 void
3910 init_function_start (tree subr)
3911 {
3912 if (subr && DECL_STRUCT_FUNCTION (subr))
3913 set_cfun (DECL_STRUCT_FUNCTION (subr));
3914 else
3915 allocate_struct_function (subr, false);
3916 prepare_function_start ();
3917
3918 /* Warn if this value is an aggregate type,
3919 regardless of which calling convention we are using for it. */
3920 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
3921 warning (OPT_Waggregate_return, "function returns an aggregate");
3922 }
3923
3924 /* Make sure all values used by the optimization passes have sane
3925 defaults. */
3926 unsigned int
3927 init_function_for_compilation (void)
3928 {
3929 reg_renumber = 0;
3930
3931 /* No prologue/epilogue insns yet. Make sure that these vectors are
3932 empty. */
3933 gcc_assert (VEC_length (int, prologue) == 0);
3934 gcc_assert (VEC_length (int, epilogue) == 0);
3935 gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
3936 return 0;
3937 }
3938
3939 struct rtl_opt_pass pass_init_function =
3940 {
3941 {
3942 RTL_PASS,
3943 NULL, /* name */
3944 NULL, /* gate */
3945 init_function_for_compilation, /* execute */
3946 NULL, /* sub */
3947 NULL, /* next */
3948 0, /* static_pass_number */
3949 0, /* tv_id */
3950 0, /* properties_required */
3951 0, /* properties_provided */
3952 0, /* properties_destroyed */
3953 0, /* todo_flags_start */
3954 0 /* todo_flags_finish */
3955 }
3956 };
3957
3958
3959 void
3960 expand_main_function (void)
3961 {
3962 #if (defined(INVOKE__main) \
3963 || (!defined(HAS_INIT_SECTION) \
3964 && !defined(INIT_SECTION_ASM_OP) \
3965 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
3966 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
3967 #endif
3968 }
3969 \f
3970 /* Expand code to initialize the stack_protect_guard. This is invoked at
3971 the beginning of a function to be protected. */
3972
3973 #ifndef HAVE_stack_protect_set
3974 # define HAVE_stack_protect_set 0
3975 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
3976 #endif
3977
3978 void
3979 stack_protect_prologue (void)
3980 {
3981 tree guard_decl = targetm.stack_protect_guard ();
3982 rtx x, y;
3983
3984 /* Avoid expand_expr here, because we don't want guard_decl pulled
3985 into registers unless absolutely necessary. And we know that
3986 crtl->stack_protect_guard is a local stack slot, so this skips
3987 all the fluff. */
3988 x = validize_mem (DECL_RTL (crtl->stack_protect_guard));
3989 y = validize_mem (DECL_RTL (guard_decl));
3990
3991 /* Allow the target to copy from Y to X without leaking Y into a
3992 register. */
3993 if (HAVE_stack_protect_set)
3994 {
3995 rtx insn = gen_stack_protect_set (x, y);
3996 if (insn)
3997 {
3998 emit_insn (insn);
3999 return;
4000 }
4001 }
4002
4003 /* Otherwise do a straight move. */
4004 emit_move_insn (x, y);
4005 }
4006
4007 /* Expand code to verify the stack_protect_guard. This is invoked at
4008 the end of a function to be protected. */
4009
4010 #ifndef HAVE_stack_protect_test
4011 # define HAVE_stack_protect_test 0
4012 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4013 #endif
4014
4015 void
4016 stack_protect_epilogue (void)
4017 {
4018 tree guard_decl = targetm.stack_protect_guard ();
4019 rtx label = gen_label_rtx ();
4020 rtx x, y, tmp;
4021
4022 /* Avoid expand_expr here, because we don't want guard_decl pulled
4023 into registers unless absolutely necessary. And we know that
4024 crtl->stack_protect_guard is a local stack slot, so this skips
4025 all the fluff. */
4026 x = validize_mem (DECL_RTL (crtl->stack_protect_guard));
4027 y = validize_mem (DECL_RTL (guard_decl));
4028
4029 /* Allow the target to compare Y with X without leaking either into
4030 a register. */
4031 switch (HAVE_stack_protect_test != 0)
4032 {
4033 case 1:
4034 tmp = gen_stack_protect_test (x, y, label);
4035 if (tmp)
4036 {
4037 emit_insn (tmp);
4038 break;
4039 }
4040 /* FALLTHRU */
4041
4042 default:
4043 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4044 break;
4045 }
4046
4047 /* The noreturn predictor has been moved to the tree level. The rtl-level
4048 predictors estimate this branch about 20%, which isn't enough to get
4049 things moved out of line. Since this is the only extant case of adding
4050 a noreturn function at the rtl level, it doesn't seem worth doing ought
4051 except adding the prediction by hand. */
4052 tmp = get_last_insn ();
4053 if (JUMP_P (tmp))
4054 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4055
4056 expand_expr_stmt (targetm.stack_protect_fail ());
4057 emit_label (label);
4058 }
4059 \f
4060 /* Start the RTL for a new function, and set variables used for
4061 emitting RTL.
4062 SUBR is the FUNCTION_DECL node.
4063 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4064 the function's parameters, which must be run at any return statement. */
4065
4066 void
4067 expand_function_start (tree subr)
4068 {
4069 /* Make sure volatile mem refs aren't considered
4070 valid operands of arithmetic insns. */
4071 init_recog_no_volatile ();
4072
4073 crtl->profile
4074 = (profile_flag
4075 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4076
4077 crtl->limit_stack
4078 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4079
4080 /* Make the label for return statements to jump to. Do not special
4081 case machines with special return instructions -- they will be
4082 handled later during jump, ifcvt, or epilogue creation. */
4083 return_label = gen_label_rtx ();
4084
4085 /* Initialize rtx used to return the value. */
4086 /* Do this before assign_parms so that we copy the struct value address
4087 before any library calls that assign parms might generate. */
4088
4089 /* Decide whether to return the value in memory or in a register. */
4090 if (aggregate_value_p (DECL_RESULT (subr), subr))
4091 {
4092 /* Returning something that won't go in a register. */
4093 rtx value_address = 0;
4094
4095 #ifdef PCC_STATIC_STRUCT_RETURN
4096 if (cfun->returns_pcc_struct)
4097 {
4098 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4099 value_address = assemble_static_space (size);
4100 }
4101 else
4102 #endif
4103 {
4104 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4105 /* Expect to be passed the address of a place to store the value.
4106 If it is passed as an argument, assign_parms will take care of
4107 it. */
4108 if (sv)
4109 {
4110 value_address = gen_reg_rtx (Pmode);
4111 emit_move_insn (value_address, sv);
4112 }
4113 }
4114 if (value_address)
4115 {
4116 rtx x = value_address;
4117 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4118 {
4119 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4120 set_mem_attributes (x, DECL_RESULT (subr), 1);
4121 }
4122 SET_DECL_RTL (DECL_RESULT (subr), x);
4123 }
4124 }
4125 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4126 /* If return mode is void, this decl rtl should not be used. */
4127 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4128 else
4129 {
4130 /* Compute the return values into a pseudo reg, which we will copy
4131 into the true return register after the cleanups are done. */
4132 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4133 if (TYPE_MODE (return_type) != BLKmode
4134 && targetm.calls.return_in_msb (return_type))
4135 /* expand_function_end will insert the appropriate padding in
4136 this case. Use the return value's natural (unpadded) mode
4137 within the function proper. */
4138 SET_DECL_RTL (DECL_RESULT (subr),
4139 gen_reg_rtx (TYPE_MODE (return_type)));
4140 else
4141 {
4142 /* In order to figure out what mode to use for the pseudo, we
4143 figure out what the mode of the eventual return register will
4144 actually be, and use that. */
4145 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4146
4147 /* Structures that are returned in registers are not
4148 aggregate_value_p, so we may see a PARALLEL or a REG. */
4149 if (REG_P (hard_reg))
4150 SET_DECL_RTL (DECL_RESULT (subr),
4151 gen_reg_rtx (GET_MODE (hard_reg)));
4152 else
4153 {
4154 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4155 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4156 }
4157 }
4158
4159 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4160 result to the real return register(s). */
4161 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4162 }
4163
4164 /* Initialize rtx for parameters and local variables.
4165 In some cases this requires emitting insns. */
4166 assign_parms (subr);
4167
4168 /* If function gets a static chain arg, store it. */
4169 if (cfun->static_chain_decl)
4170 {
4171 tree parm = cfun->static_chain_decl;
4172 rtx local = gen_reg_rtx (Pmode);
4173
4174 set_decl_incoming_rtl (parm, static_chain_incoming_rtx, false);
4175 SET_DECL_RTL (parm, local);
4176 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4177
4178 emit_move_insn (local, static_chain_incoming_rtx);
4179 }
4180
4181 /* If the function receives a non-local goto, then store the
4182 bits we need to restore the frame pointer. */
4183 if (cfun->nonlocal_goto_save_area)
4184 {
4185 tree t_save;
4186 rtx r_save;
4187
4188 /* ??? We need to do this save early. Unfortunately here is
4189 before the frame variable gets declared. Help out... */
4190 tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
4191 if (!DECL_RTL_SET_P (var))
4192 expand_decl (var);
4193
4194 t_save = build4 (ARRAY_REF, ptr_type_node,
4195 cfun->nonlocal_goto_save_area,
4196 integer_zero_node, NULL_TREE, NULL_TREE);
4197 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4198 r_save = convert_memory_address (Pmode, r_save);
4199
4200 emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
4201 update_nonlocal_goto_save_area ();
4202 }
4203
4204 /* The following was moved from init_function_start.
4205 The move is supposed to make sdb output more accurate. */
4206 /* Indicate the beginning of the function body,
4207 as opposed to parm setup. */
4208 emit_note (NOTE_INSN_FUNCTION_BEG);
4209
4210 gcc_assert (NOTE_P (get_last_insn ()));
4211
4212 parm_birth_insn = get_last_insn ();
4213
4214 if (crtl->profile)
4215 {
4216 #ifdef PROFILE_HOOK
4217 PROFILE_HOOK (current_function_funcdef_no);
4218 #endif
4219 }
4220
4221 /* After the display initializations is where the stack checking
4222 probe should go. */
4223 if(flag_stack_check)
4224 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4225
4226 /* Make sure there is a line number after the function entry setup code. */
4227 force_next_line_note ();
4228 }
4229 \f
4230 /* Undo the effects of init_dummy_function_start. */
4231 void
4232 expand_dummy_function_end (void)
4233 {
4234 gcc_assert (in_dummy_function);
4235
4236 /* End any sequences that failed to be closed due to syntax errors. */
4237 while (in_sequence_p ())
4238 end_sequence ();
4239
4240 /* Outside function body, can't compute type's actual size
4241 until next function's body starts. */
4242
4243 free_after_parsing (cfun);
4244 free_after_compilation (cfun);
4245 pop_cfun ();
4246 in_dummy_function = false;
4247 }
4248
4249 /* Call DOIT for each hard register used as a return value from
4250 the current function. */
4251
4252 void
4253 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4254 {
4255 rtx outgoing = crtl->return_rtx;
4256
4257 if (! outgoing)
4258 return;
4259
4260 if (REG_P (outgoing))
4261 (*doit) (outgoing, arg);
4262 else if (GET_CODE (outgoing) == PARALLEL)
4263 {
4264 int i;
4265
4266 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4267 {
4268 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4269
4270 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4271 (*doit) (x, arg);
4272 }
4273 }
4274 }
4275
4276 static void
4277 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4278 {
4279 emit_clobber (reg);
4280 }
4281
4282 void
4283 clobber_return_register (void)
4284 {
4285 diddle_return_value (do_clobber_return_reg, NULL);
4286
4287 /* In case we do use pseudo to return value, clobber it too. */
4288 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4289 {
4290 tree decl_result = DECL_RESULT (current_function_decl);
4291 rtx decl_rtl = DECL_RTL (decl_result);
4292 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4293 {
4294 do_clobber_return_reg (decl_rtl, NULL);
4295 }
4296 }
4297 }
4298
4299 static void
4300 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4301 {
4302 emit_use (reg);
4303 }
4304
4305 static void
4306 use_return_register (void)
4307 {
4308 diddle_return_value (do_use_return_reg, NULL);
4309 }
4310
4311 /* Possibly warn about unused parameters. */
4312 void
4313 do_warn_unused_parameter (tree fn)
4314 {
4315 tree decl;
4316
4317 for (decl = DECL_ARGUMENTS (fn);
4318 decl; decl = TREE_CHAIN (decl))
4319 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4320 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl)
4321 && !TREE_NO_WARNING (decl))
4322 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4323 }
4324
4325 static GTY(()) rtx initial_trampoline;
4326
4327 /* Generate RTL for the end of the current function. */
4328
4329 void
4330 expand_function_end (void)
4331 {
4332 rtx clobber_after;
4333
4334 /* If arg_pointer_save_area was referenced only from a nested
4335 function, we will not have initialized it yet. Do that now. */
4336 if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
4337 get_arg_pointer_save_area ();
4338
4339 /* If we are doing stack checking and this function makes calls,
4340 do a stack probe at the start of the function to ensure we have enough
4341 space for another stack frame. */
4342 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
4343 {
4344 rtx insn, seq;
4345
4346 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4347 if (CALL_P (insn))
4348 {
4349 start_sequence ();
4350 probe_stack_range (STACK_CHECK_PROTECT,
4351 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4352 seq = get_insns ();
4353 end_sequence ();
4354 emit_insn_before (seq, stack_check_probe_note);
4355 break;
4356 }
4357 }
4358
4359 /* End any sequences that failed to be closed due to syntax errors. */
4360 while (in_sequence_p ())
4361 end_sequence ();
4362
4363 clear_pending_stack_adjust ();
4364 do_pending_stack_adjust ();
4365
4366 /* Output a linenumber for the end of the function.
4367 SDB depends on this. */
4368 force_next_line_note ();
4369 set_curr_insn_source_location (input_location);
4370
4371 /* Before the return label (if any), clobber the return
4372 registers so that they are not propagated live to the rest of
4373 the function. This can only happen with functions that drop
4374 through; if there had been a return statement, there would
4375 have either been a return rtx, or a jump to the return label.
4376
4377 We delay actual code generation after the current_function_value_rtx
4378 is computed. */
4379 clobber_after = get_last_insn ();
4380
4381 /* Output the label for the actual return from the function. */
4382 emit_label (return_label);
4383
4384 if (USING_SJLJ_EXCEPTIONS)
4385 {
4386 /* Let except.c know where it should emit the call to unregister
4387 the function context for sjlj exceptions. */
4388 if (flag_exceptions)
4389 sjlj_emit_function_exit_after (get_last_insn ());
4390 }
4391 else
4392 {
4393 /* We want to ensure that instructions that may trap are not
4394 moved into the epilogue by scheduling, because we don't
4395 always emit unwind information for the epilogue. */
4396 if (flag_non_call_exceptions)
4397 emit_insn (gen_blockage ());
4398 }
4399
4400 /* If this is an implementation of throw, do what's necessary to
4401 communicate between __builtin_eh_return and the epilogue. */
4402 expand_eh_return ();
4403
4404 /* If scalar return value was computed in a pseudo-reg, or was a named
4405 return value that got dumped to the stack, copy that to the hard
4406 return register. */
4407 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4408 {
4409 tree decl_result = DECL_RESULT (current_function_decl);
4410 rtx decl_rtl = DECL_RTL (decl_result);
4411
4412 if (REG_P (decl_rtl)
4413 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4414 : DECL_REGISTER (decl_result))
4415 {
4416 rtx real_decl_rtl = crtl->return_rtx;
4417
4418 /* This should be set in assign_parms. */
4419 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
4420
4421 /* If this is a BLKmode structure being returned in registers,
4422 then use the mode computed in expand_return. Note that if
4423 decl_rtl is memory, then its mode may have been changed,
4424 but that crtl->return_rtx has not. */
4425 if (GET_MODE (real_decl_rtl) == BLKmode)
4426 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4427
4428 /* If a non-BLKmode return value should be padded at the least
4429 significant end of the register, shift it left by the appropriate
4430 amount. BLKmode results are handled using the group load/store
4431 machinery. */
4432 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
4433 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
4434 {
4435 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
4436 REGNO (real_decl_rtl)),
4437 decl_rtl);
4438 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
4439 }
4440 /* If a named return value dumped decl_return to memory, then
4441 we may need to re-do the PROMOTE_MODE signed/unsigned
4442 extension. */
4443 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4444 {
4445 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4446
4447 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4448 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4449 &unsignedp, 1);
4450
4451 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4452 }
4453 else if (GET_CODE (real_decl_rtl) == PARALLEL)
4454 {
4455 /* If expand_function_start has created a PARALLEL for decl_rtl,
4456 move the result to the real return registers. Otherwise, do
4457 a group load from decl_rtl for a named return. */
4458 if (GET_CODE (decl_rtl) == PARALLEL)
4459 emit_group_move (real_decl_rtl, decl_rtl);
4460 else
4461 emit_group_load (real_decl_rtl, decl_rtl,
4462 TREE_TYPE (decl_result),
4463 int_size_in_bytes (TREE_TYPE (decl_result)));
4464 }
4465 /* In the case of complex integer modes smaller than a word, we'll
4466 need to generate some non-trivial bitfield insertions. Do that
4467 on a pseudo and not the hard register. */
4468 else if (GET_CODE (decl_rtl) == CONCAT
4469 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
4470 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
4471 {
4472 int old_generating_concat_p;
4473 rtx tmp;
4474
4475 old_generating_concat_p = generating_concat_p;
4476 generating_concat_p = 0;
4477 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
4478 generating_concat_p = old_generating_concat_p;
4479
4480 emit_move_insn (tmp, decl_rtl);
4481 emit_move_insn (real_decl_rtl, tmp);
4482 }
4483 else
4484 emit_move_insn (real_decl_rtl, decl_rtl);
4485 }
4486 }
4487
4488 /* If returning a structure, arrange to return the address of the value
4489 in a place where debuggers expect to find it.
4490
4491 If returning a structure PCC style,
4492 the caller also depends on this value.
4493 And cfun->returns_pcc_struct is not necessarily set. */
4494 if (cfun->returns_struct
4495 || cfun->returns_pcc_struct)
4496 {
4497 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
4498 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4499 rtx outgoing;
4500
4501 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4502 type = TREE_TYPE (type);
4503 else
4504 value_address = XEXP (value_address, 0);
4505
4506 outgoing = targetm.calls.function_value (build_pointer_type (type),
4507 current_function_decl, true);
4508
4509 /* Mark this as a function return value so integrate will delete the
4510 assignment and USE below when inlining this function. */
4511 REG_FUNCTION_VALUE_P (outgoing) = 1;
4512
4513 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4514 value_address = convert_memory_address (GET_MODE (outgoing),
4515 value_address);
4516
4517 emit_move_insn (outgoing, value_address);
4518
4519 /* Show return register used to hold result (in this case the address
4520 of the result. */
4521 crtl->return_rtx = outgoing;
4522 }
4523
4524 /* Emit the actual code to clobber return register. */
4525 {
4526 rtx seq;
4527
4528 start_sequence ();
4529 clobber_return_register ();
4530 expand_naked_return ();
4531 seq = get_insns ();
4532 end_sequence ();
4533
4534 emit_insn_after (seq, clobber_after);
4535 }
4536
4537 /* Output the label for the naked return from the function. */
4538 emit_label (naked_return_label);
4539
4540 /* @@@ This is a kludge. We want to ensure that instructions that
4541 may trap are not moved into the epilogue by scheduling, because
4542 we don't always emit unwind information for the epilogue. */
4543 if (! USING_SJLJ_EXCEPTIONS && flag_non_call_exceptions)
4544 emit_insn (gen_blockage ());
4545
4546 /* If stack protection is enabled for this function, check the guard. */
4547 if (crtl->stack_protect_guard)
4548 stack_protect_epilogue ();
4549
4550 /* If we had calls to alloca, and this machine needs
4551 an accurate stack pointer to exit the function,
4552 insert some code to save and restore the stack pointer. */
4553 if (! EXIT_IGNORE_STACK
4554 && cfun->calls_alloca)
4555 {
4556 rtx tem = 0;
4557
4558 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4559 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4560 }
4561
4562 /* ??? This should no longer be necessary since stupid is no longer with
4563 us, but there are some parts of the compiler (eg reload_combine, and
4564 sh mach_dep_reorg) that still try and compute their own lifetime info
4565 instead of using the general framework. */
4566 use_return_register ();
4567 }
4568
4569 rtx
4570 get_arg_pointer_save_area (void)
4571 {
4572 rtx ret = arg_pointer_save_area;
4573
4574 if (! ret)
4575 {
4576 ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
4577 arg_pointer_save_area = ret;
4578 }
4579
4580 if (! crtl->arg_pointer_save_area_init)
4581 {
4582 rtx seq;
4583
4584 /* Save the arg pointer at the beginning of the function. The
4585 generated stack slot may not be a valid memory address, so we
4586 have to check it and fix it if necessary. */
4587 start_sequence ();
4588 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
4589 seq = get_insns ();
4590 end_sequence ();
4591
4592 push_topmost_sequence ();
4593 emit_insn_after (seq, entry_of_function ());
4594 pop_topmost_sequence ();
4595 }
4596
4597 return ret;
4598 }
4599 \f
4600 /* Extend a vector that records the INSN_UIDs of INSNS
4601 (a list of one or more insns). */
4602
4603 static void
4604 record_insns (rtx insns, VEC(int,heap) **vecp)
4605 {
4606 rtx tmp;
4607
4608 for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp))
4609 VEC_safe_push (int, heap, *vecp, INSN_UID (tmp));
4610 }
4611
4612 /* Set the locator of the insn chain starting at INSN to LOC. */
4613 static void
4614 set_insn_locators (rtx insn, int loc)
4615 {
4616 while (insn != NULL_RTX)
4617 {
4618 if (INSN_P (insn))
4619 INSN_LOCATOR (insn) = loc;
4620 insn = NEXT_INSN (insn);
4621 }
4622 }
4623
4624 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4625 be running after reorg, SEQUENCE rtl is possible. */
4626
4627 static int
4628 contains (const_rtx insn, VEC(int,heap) **vec)
4629 {
4630 int i, j;
4631
4632 if (NONJUMP_INSN_P (insn)
4633 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4634 {
4635 int count = 0;
4636 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4637 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4638 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i))
4639 == VEC_index (int, *vec, j))
4640 count++;
4641 return count;
4642 }
4643 else
4644 {
4645 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4646 if (INSN_UID (insn) == VEC_index (int, *vec, j))
4647 return 1;
4648 }
4649 return 0;
4650 }
4651
4652 int
4653 prologue_epilogue_contains (const_rtx insn)
4654 {
4655 if (contains (insn, &prologue))
4656 return 1;
4657 if (contains (insn, &epilogue))
4658 return 1;
4659 return 0;
4660 }
4661
4662 int
4663 sibcall_epilogue_contains (const_rtx insn)
4664 {
4665 if (sibcall_epilogue)
4666 return contains (insn, &sibcall_epilogue);
4667 return 0;
4668 }
4669
4670 #ifdef HAVE_return
4671 /* Insert gen_return at the end of block BB. This also means updating
4672 block_for_insn appropriately. */
4673
4674 static void
4675 emit_return_into_block (basic_block bb)
4676 {
4677 emit_jump_insn_after (gen_return (), BB_END (bb));
4678 }
4679 #endif /* HAVE_return */
4680
4681 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
4682 this into place with notes indicating where the prologue ends and where
4683 the epilogue begins. Update the basic block information when possible. */
4684
4685 static void
4686 thread_prologue_and_epilogue_insns (void)
4687 {
4688 int inserted = 0;
4689 edge e;
4690 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
4691 rtx seq;
4692 #endif
4693 #if defined (HAVE_epilogue) || defined(HAVE_return)
4694 rtx epilogue_end = NULL_RTX;
4695 #endif
4696 edge_iterator ei;
4697
4698 #ifdef HAVE_prologue
4699 if (HAVE_prologue)
4700 {
4701 start_sequence ();
4702 seq = gen_prologue ();
4703 emit_insn (seq);
4704
4705 /* Insert an explicit USE for the frame pointer
4706 if the profiling is on and the frame pointer is required. */
4707 if (crtl->profile && frame_pointer_needed)
4708 emit_use (hard_frame_pointer_rtx);
4709
4710 /* Retain a map of the prologue insns. */
4711 record_insns (seq, &prologue);
4712 emit_note (NOTE_INSN_PROLOGUE_END);
4713
4714 #ifndef PROFILE_BEFORE_PROLOGUE
4715 /* Ensure that instructions are not moved into the prologue when
4716 profiling is on. The call to the profiling routine can be
4717 emitted within the live range of a call-clobbered register. */
4718 if (crtl->profile)
4719 emit_insn (gen_blockage ());
4720 #endif
4721
4722 seq = get_insns ();
4723 end_sequence ();
4724 set_insn_locators (seq, prologue_locator);
4725
4726 /* Can't deal with multiple successors of the entry block
4727 at the moment. Function should always have at least one
4728 entry point. */
4729 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
4730
4731 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
4732 inserted = 1;
4733 }
4734 #endif
4735
4736 /* If the exit block has no non-fake predecessors, we don't need
4737 an epilogue. */
4738 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4739 if ((e->flags & EDGE_FAKE) == 0)
4740 break;
4741 if (e == NULL)
4742 goto epilogue_done;
4743
4744 #ifdef HAVE_return
4745 if (optimize && HAVE_return)
4746 {
4747 /* If we're allowed to generate a simple return instruction,
4748 then by definition we don't need a full epilogue. Examine
4749 the block that falls through to EXIT. If it does not
4750 contain any code, examine its predecessors and try to
4751 emit (conditional) return instructions. */
4752
4753 basic_block last;
4754 rtx label;
4755
4756 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4757 if (e->flags & EDGE_FALLTHRU)
4758 break;
4759 if (e == NULL)
4760 goto epilogue_done;
4761 last = e->src;
4762
4763 /* Verify that there are no active instructions in the last block. */
4764 label = BB_END (last);
4765 while (label && !LABEL_P (label))
4766 {
4767 if (active_insn_p (label))
4768 break;
4769 label = PREV_INSN (label);
4770 }
4771
4772 if (BB_HEAD (last) == label && LABEL_P (label))
4773 {
4774 edge_iterator ei2;
4775
4776 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
4777 {
4778 basic_block bb = e->src;
4779 rtx jump;
4780
4781 if (bb == ENTRY_BLOCK_PTR)
4782 {
4783 ei_next (&ei2);
4784 continue;
4785 }
4786
4787 jump = BB_END (bb);
4788 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
4789 {
4790 ei_next (&ei2);
4791 continue;
4792 }
4793
4794 /* If we have an unconditional jump, we can replace that
4795 with a simple return instruction. */
4796 if (simplejump_p (jump))
4797 {
4798 emit_return_into_block (bb);
4799 delete_insn (jump);
4800 }
4801
4802 /* If we have a conditional jump, we can try to replace
4803 that with a conditional return instruction. */
4804 else if (condjump_p (jump))
4805 {
4806 if (! redirect_jump (jump, 0, 0))
4807 {
4808 ei_next (&ei2);
4809 continue;
4810 }
4811
4812 /* If this block has only one successor, it both jumps
4813 and falls through to the fallthru block, so we can't
4814 delete the edge. */
4815 if (single_succ_p (bb))
4816 {
4817 ei_next (&ei2);
4818 continue;
4819 }
4820 }
4821 else
4822 {
4823 ei_next (&ei2);
4824 continue;
4825 }
4826
4827 /* Fix up the CFG for the successful change we just made. */
4828 redirect_edge_succ (e, EXIT_BLOCK_PTR);
4829 }
4830
4831 /* Emit a return insn for the exit fallthru block. Whether
4832 this is still reachable will be determined later. */
4833
4834 emit_barrier_after (BB_END (last));
4835 emit_return_into_block (last);
4836 epilogue_end = BB_END (last);
4837 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
4838 goto epilogue_done;
4839 }
4840 }
4841 #endif
4842 /* Find the edge that falls through to EXIT. Other edges may exist
4843 due to RETURN instructions, but those don't need epilogues.
4844 There really shouldn't be a mixture -- either all should have
4845 been converted or none, however... */
4846
4847 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4848 if (e->flags & EDGE_FALLTHRU)
4849 break;
4850 if (e == NULL)
4851 goto epilogue_done;
4852
4853 #ifdef HAVE_epilogue
4854 if (HAVE_epilogue)
4855 {
4856 start_sequence ();
4857 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
4858 seq = gen_epilogue ();
4859 emit_jump_insn (seq);
4860
4861 /* Retain a map of the epilogue insns. */
4862 record_insns (seq, &epilogue);
4863 set_insn_locators (seq, epilogue_locator);
4864
4865 seq = get_insns ();
4866 end_sequence ();
4867
4868 insert_insn_on_edge (seq, e);
4869 inserted = 1;
4870 }
4871 else
4872 #endif
4873 {
4874 basic_block cur_bb;
4875
4876 if (! next_active_insn (BB_END (e->src)))
4877 goto epilogue_done;
4878 /* We have a fall-through edge to the exit block, the source is not
4879 at the end of the function, and there will be an assembler epilogue
4880 at the end of the function.
4881 We can't use force_nonfallthru here, because that would try to
4882 use return. Inserting a jump 'by hand' is extremely messy, so
4883 we take advantage of cfg_layout_finalize using
4884 fixup_fallthru_exit_predecessor. */
4885 cfg_layout_initialize (0);
4886 FOR_EACH_BB (cur_bb)
4887 if (cur_bb->index >= NUM_FIXED_BLOCKS
4888 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
4889 cur_bb->aux = cur_bb->next_bb;
4890 cfg_layout_finalize ();
4891 }
4892 epilogue_done:
4893
4894 if (inserted)
4895 {
4896 commit_edge_insertions ();
4897
4898 /* The epilogue insns we inserted may cause the exit edge to no longer
4899 be fallthru. */
4900 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4901 {
4902 if (((e->flags & EDGE_FALLTHRU) != 0)
4903 && returnjump_p (BB_END (e->src)))
4904 e->flags &= ~EDGE_FALLTHRU;
4905 }
4906 }
4907
4908 #ifdef HAVE_sibcall_epilogue
4909 /* Emit sibling epilogues before any sibling call sites. */
4910 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
4911 {
4912 basic_block bb = e->src;
4913 rtx insn = BB_END (bb);
4914
4915 if (!CALL_P (insn)
4916 || ! SIBLING_CALL_P (insn))
4917 {
4918 ei_next (&ei);
4919 continue;
4920 }
4921
4922 start_sequence ();
4923 emit_insn (gen_sibcall_epilogue ());
4924 seq = get_insns ();
4925 end_sequence ();
4926
4927 /* Retain a map of the epilogue insns. Used in life analysis to
4928 avoid getting rid of sibcall epilogue insns. Do this before we
4929 actually emit the sequence. */
4930 record_insns (seq, &sibcall_epilogue);
4931 set_insn_locators (seq, epilogue_locator);
4932
4933 emit_insn_before (seq, insn);
4934 ei_next (&ei);
4935 }
4936 #endif
4937
4938 #ifdef HAVE_epilogue
4939 if (epilogue_end)
4940 {
4941 rtx insn, next;
4942
4943 /* Similarly, move any line notes that appear after the epilogue.
4944 There is no need, however, to be quite so anal about the existence
4945 of such a note. Also possibly move
4946 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
4947 info generation. */
4948 for (insn = epilogue_end; insn; insn = next)
4949 {
4950 next = NEXT_INSN (insn);
4951 if (NOTE_P (insn)
4952 && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
4953 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
4954 }
4955 }
4956 #endif
4957
4958 /* Threading the prologue and epilogue changes the artificial refs
4959 in the entry and exit blocks. */
4960 epilogue_completed = 1;
4961 df_update_entry_exit_and_calls ();
4962 }
4963
4964 /* Reposition the prologue-end and epilogue-begin notes after instruction
4965 scheduling and delayed branch scheduling. */
4966
4967 void
4968 reposition_prologue_and_epilogue_notes (void)
4969 {
4970 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
4971 rtx insn, last, note;
4972 int len;
4973
4974 if ((len = VEC_length (int, prologue)) > 0)
4975 {
4976 last = 0, note = 0;
4977
4978 /* Scan from the beginning until we reach the last prologue insn.
4979 We apparently can't depend on basic_block_{head,end} after
4980 reorg has run. */
4981 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4982 {
4983 if (NOTE_P (insn))
4984 {
4985 if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
4986 note = insn;
4987 }
4988 else if (contains (insn, &prologue))
4989 {
4990 last = insn;
4991 if (--len == 0)
4992 break;
4993 }
4994 }
4995
4996 if (last)
4997 {
4998 /* Find the prologue-end note if we haven't already, and
4999 move it to just after the last prologue insn. */
5000 if (note == 0)
5001 {
5002 for (note = last; (note = NEXT_INSN (note));)
5003 if (NOTE_P (note)
5004 && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
5005 break;
5006 }
5007
5008 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5009 if (LABEL_P (last))
5010 last = NEXT_INSN (last);
5011 reorder_insns (note, note, last);
5012 }
5013 }
5014
5015 if ((len = VEC_length (int, epilogue)) > 0)
5016 {
5017 last = 0, note = 0;
5018
5019 /* Scan from the end until we reach the first epilogue insn.
5020 We apparently can't depend on basic_block_{head,end} after
5021 reorg has run. */
5022 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
5023 {
5024 if (NOTE_P (insn))
5025 {
5026 if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
5027 note = insn;
5028 }
5029 else if (contains (insn, &epilogue))
5030 {
5031 last = insn;
5032 if (--len == 0)
5033 break;
5034 }
5035 }
5036
5037 if (last)
5038 {
5039 /* Find the epilogue-begin note if we haven't already, and
5040 move it to just before the first epilogue insn. */
5041 if (note == 0)
5042 {
5043 for (note = insn; (note = PREV_INSN (note));)
5044 if (NOTE_P (note)
5045 && NOTE_KIND (note) == NOTE_INSN_EPILOGUE_BEG)
5046 break;
5047 }
5048
5049 if (PREV_INSN (last) != note)
5050 reorder_insns (note, note, PREV_INSN (last));
5051 }
5052 }
5053 #endif /* HAVE_prologue or HAVE_epilogue */
5054 }
5055
5056 /* Returns the name of the current function. */
5057 const char *
5058 current_function_name (void)
5059 {
5060 return lang_hooks.decl_printable_name (cfun->decl, 2);
5061 }
5062
5063 /* Returns the raw (mangled) name of the current function. */
5064 const char *
5065 current_function_assembler_name (void)
5066 {
5067 return IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (cfun->decl));
5068 }
5069 \f
5070
5071 static unsigned int
5072 rest_of_handle_check_leaf_regs (void)
5073 {
5074 #ifdef LEAF_REGISTERS
5075 current_function_uses_only_leaf_regs
5076 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5077 #endif
5078 return 0;
5079 }
5080
5081 /* Insert a TYPE into the used types hash table of CFUN. */
5082 static void
5083 used_types_insert_helper (tree type, struct function *func)
5084 {
5085 if (type != NULL && func != NULL)
5086 {
5087 void **slot;
5088
5089 if (func->used_types_hash == NULL)
5090 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
5091 htab_eq_pointer, NULL);
5092 slot = htab_find_slot (func->used_types_hash, type, INSERT);
5093 if (*slot == NULL)
5094 *slot = type;
5095 }
5096 }
5097
5098 /* Given a type, insert it into the used hash table in cfun. */
5099 void
5100 used_types_insert (tree t)
5101 {
5102 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
5103 t = TREE_TYPE (t);
5104 t = TYPE_MAIN_VARIANT (t);
5105 if (debug_info_level > DINFO_LEVEL_NONE)
5106 used_types_insert_helper (t, cfun);
5107 }
5108
5109 struct rtl_opt_pass pass_leaf_regs =
5110 {
5111 {
5112 RTL_PASS,
5113 NULL, /* name */
5114 NULL, /* gate */
5115 rest_of_handle_check_leaf_regs, /* execute */
5116 NULL, /* sub */
5117 NULL, /* next */
5118 0, /* static_pass_number */
5119 0, /* tv_id */
5120 0, /* properties_required */
5121 0, /* properties_provided */
5122 0, /* properties_destroyed */
5123 0, /* todo_flags_start */
5124 0 /* todo_flags_finish */
5125 }
5126 };
5127
5128 static unsigned int
5129 rest_of_handle_thread_prologue_and_epilogue (void)
5130 {
5131 if (optimize)
5132 cleanup_cfg (CLEANUP_EXPENSIVE);
5133 /* On some machines, the prologue and epilogue code, or parts thereof,
5134 can be represented as RTL. Doing so lets us schedule insns between
5135 it and the rest of the code and also allows delayed branch
5136 scheduling to operate in the epilogue. */
5137
5138 thread_prologue_and_epilogue_insns ();
5139 return 0;
5140 }
5141
5142 struct rtl_opt_pass pass_thread_prologue_and_epilogue =
5143 {
5144 {
5145 RTL_PASS,
5146 "pro_and_epilogue", /* name */
5147 NULL, /* gate */
5148 rest_of_handle_thread_prologue_and_epilogue, /* execute */
5149 NULL, /* sub */
5150 NULL, /* next */
5151 0, /* static_pass_number */
5152 TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
5153 0, /* properties_required */
5154 0, /* properties_provided */
5155 0, /* properties_destroyed */
5156 TODO_verify_flow, /* todo_flags_start */
5157 TODO_dump_func |
5158 TODO_df_verify |
5159 TODO_df_finish | TODO_verify_rtl_sharing |
5160 TODO_ggc_collect /* todo_flags_finish */
5161 }
5162 };
5163 \f
5164
5165 /* This mini-pass fixes fall-out from SSA in asm statements that have
5166 in-out constraints. Say you start with
5167
5168 orig = inout;
5169 asm ("": "+mr" (inout));
5170 use (orig);
5171
5172 which is transformed very early to use explicit output and match operands:
5173
5174 orig = inout;
5175 asm ("": "=mr" (inout) : "0" (inout));
5176 use (orig);
5177
5178 Or, after SSA and copyprop,
5179
5180 asm ("": "=mr" (inout_2) : "0" (inout_1));
5181 use (inout_1);
5182
5183 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
5184 they represent two separate values, so they will get different pseudo
5185 registers during expansion. Then, since the two operands need to match
5186 per the constraints, but use different pseudo registers, reload can
5187 only register a reload for these operands. But reloads can only be
5188 satisfied by hardregs, not by memory, so we need a register for this
5189 reload, just because we are presented with non-matching operands.
5190 So, even though we allow memory for this operand, no memory can be
5191 used for it, just because the two operands don't match. This can
5192 cause reload failures on register-starved targets.
5193
5194 So it's a symptom of reload not being able to use memory for reloads
5195 or, alternatively it's also a symptom of both operands not coming into
5196 reload as matching (in which case the pseudo could go to memory just
5197 fine, as the alternative allows it, and no reload would be necessary).
5198 We fix the latter problem here, by transforming
5199
5200 asm ("": "=mr" (inout_2) : "0" (inout_1));
5201
5202 back to
5203
5204 inout_2 = inout_1;
5205 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
5206
5207 static void
5208 match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs)
5209 {
5210 int i;
5211 bool changed = false;
5212 rtx op = SET_SRC (p_sets[0]);
5213 int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
5214 rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
5215 bool *output_matched = XALLOCAVEC (bool, noutputs);
5216
5217 memset (output_matched, 0, noutputs * sizeof (bool));
5218 for (i = 0; i < ninputs; i++)
5219 {
5220 rtx input, output, insns;
5221 const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
5222 char *end;
5223 int match, j;
5224
5225 match = strtoul (constraint, &end, 10);
5226 if (end == constraint)
5227 continue;
5228
5229 gcc_assert (match < noutputs);
5230 output = SET_DEST (p_sets[match]);
5231 input = RTVEC_ELT (inputs, i);
5232 /* Only do the transformation for pseudos. */
5233 if (! REG_P (output)
5234 || rtx_equal_p (output, input)
5235 || (GET_MODE (input) != VOIDmode
5236 && GET_MODE (input) != GET_MODE (output)))
5237 continue;
5238
5239 /* We can't do anything if the output is also used as input,
5240 as we're going to overwrite it. */
5241 for (j = 0; j < ninputs; j++)
5242 if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
5243 break;
5244 if (j != ninputs)
5245 continue;
5246
5247 /* Avoid changing the same input several times. For
5248 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
5249 only change in once (to out1), rather than changing it
5250 first to out1 and afterwards to out2. */
5251 if (i > 0)
5252 {
5253 for (j = 0; j < noutputs; j++)
5254 if (output_matched[j] && input == SET_DEST (p_sets[j]))
5255 break;
5256 if (j != noutputs)
5257 continue;
5258 }
5259 output_matched[match] = true;
5260
5261 start_sequence ();
5262 emit_move_insn (output, input);
5263 insns = get_insns ();
5264 end_sequence ();
5265 emit_insn_before (insns, insn);
5266
5267 /* Now replace all mentions of the input with output. We can't
5268 just replace the occurrence in inputs[i], as the register might
5269 also be used in some other input (or even in an address of an
5270 output), which would mean possibly increasing the number of
5271 inputs by one (namely 'output' in addition), which might pose
5272 a too complicated problem for reload to solve. E.g. this situation:
5273
5274 asm ("" : "=r" (output), "=m" (input) : "0" (input))
5275
5276 Here 'input' is used in two occurrences as input (once for the
5277 input operand, once for the address in the second output operand).
5278 If we would replace only the occurrence of the input operand (to
5279 make the matching) we would be left with this:
5280
5281 output = input
5282 asm ("" : "=r" (output), "=m" (input) : "0" (output))
5283
5284 Now we suddenly have two different input values (containing the same
5285 value, but different pseudos) where we formerly had only one.
5286 With more complicated asms this might lead to reload failures
5287 which wouldn't have happen without this pass. So, iterate over
5288 all operands and replace all occurrences of the register used. */
5289 for (j = 0; j < noutputs; j++)
5290 if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
5291 && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
5292 SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
5293 input, output);
5294 for (j = 0; j < ninputs; j++)
5295 if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
5296 RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
5297 input, output);
5298
5299 changed = true;
5300 }
5301
5302 if (changed)
5303 df_insn_rescan (insn);
5304 }
5305
5306 static unsigned
5307 rest_of_match_asm_constraints (void)
5308 {
5309 basic_block bb;
5310 rtx insn, pat, *p_sets;
5311 int noutputs;
5312
5313 if (!crtl->has_asm_statement)
5314 return 0;
5315
5316 df_set_flags (DF_DEFER_INSN_RESCAN);
5317 FOR_EACH_BB (bb)
5318 {
5319 FOR_BB_INSNS (bb, insn)
5320 {
5321 if (!INSN_P (insn))
5322 continue;
5323
5324 pat = PATTERN (insn);
5325 if (GET_CODE (pat) == PARALLEL)
5326 p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
5327 else if (GET_CODE (pat) == SET)
5328 p_sets = &PATTERN (insn), noutputs = 1;
5329 else
5330 continue;
5331
5332 if (GET_CODE (*p_sets) == SET
5333 && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
5334 match_asm_constraints_1 (insn, p_sets, noutputs);
5335 }
5336 }
5337
5338 return TODO_df_finish;
5339 }
5340
5341 struct rtl_opt_pass pass_match_asm_constraints =
5342 {
5343 {
5344 RTL_PASS,
5345 "asmcons", /* name */
5346 NULL, /* gate */
5347 rest_of_match_asm_constraints, /* execute */
5348 NULL, /* sub */
5349 NULL, /* next */
5350 0, /* static_pass_number */
5351 0, /* tv_id */
5352 0, /* properties_required */
5353 0, /* properties_provided */
5354 0, /* properties_destroyed */
5355 0, /* todo_flags_start */
5356 TODO_dump_func /* todo_flags_finish */
5357 }
5358 };
5359
5360
5361 #include "gt-function.h"