1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
32 not get a hard register. */
36 #include "coretypes.h"
38 #include "rtl-error.h"
47 #include "hard-reg-set.h"
48 #include "insn-config.h"
51 #include "basic-block.h"
55 #include "langhooks.h"
57 #include "common/common-target.h"
59 #include "tree-pass.h"
63 #include "bb-reorder.h"
65 /* So we can assign to cfun in this file. */
68 #ifndef STACK_ALIGNMENT_NEEDED
69 #define STACK_ALIGNMENT_NEEDED 1
72 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
74 /* Some systems use __main in a way incompatible with its use in gcc, in these
75 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
76 give the same symbol without quotes for an alternative entry point. You
77 must define both, or neither. */
79 #define NAME__MAIN "__main"
82 /* Round a value to the lowest integer less than it that is a multiple of
83 the required alignment. Avoid using division in case the value is
84 negative. Assume the alignment is a power of two. */
85 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
87 /* Similar, but round to the next highest integer that meets the
89 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
91 /* Nonzero once virtual register instantiation has been done.
92 assign_stack_local uses frame_pointer_rtx when this is nonzero.
93 calls.c:emit_library_call_value_1 uses it to set up
94 post-instantiation libcalls. */
95 int virtuals_instantiated
;
97 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
98 static GTY(()) int funcdef_no
;
100 /* These variables hold pointers to functions to create and destroy
101 target specific, per-function data structures. */
102 struct machine_function
* (*init_machine_status
) (void);
104 /* The currently compiled function. */
105 struct function
*cfun
= 0;
107 /* These hashes record the prologue and epilogue insns. */
108 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def
)))
109 htab_t prologue_insn_hash
;
110 static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def
)))
111 htab_t epilogue_insn_hash
;
114 htab_t types_used_by_vars_hash
= NULL
;
115 vec
<tree
, va_gc
> *types_used_by_cur_var_decl
;
117 /* Forward declarations. */
119 static struct temp_slot
*find_temp_slot_from_address (rtx
);
120 static void pad_to_arg_alignment (struct args_size
*, int, struct args_size
*);
121 static void pad_below (struct args_size
*, enum machine_mode
, tree
);
122 static void reorder_blocks_1 (rtx
, tree
, vec
<tree
> *);
123 static int all_blocks (tree
, tree
*);
124 static tree
*get_block_vector (tree
, int *);
125 extern tree
debug_find_var_in_block_tree (tree
, tree
);
126 /* We always define `record_insns' even if it's not used so that we
127 can always export `prologue_epilogue_contains'. */
128 static void record_insns (rtx
, rtx
, htab_t
*) ATTRIBUTE_UNUSED
;
129 static bool contains (const_rtx
, htab_t
);
130 static void prepare_function_start (void);
131 static void do_clobber_return_reg (rtx
, void *);
132 static void do_use_return_reg (rtx
, void *);
133 static void set_insn_locations (rtx
, int) ATTRIBUTE_UNUSED
;
135 /* Stack of nested functions. */
136 /* Keep track of the cfun stack. */
138 typedef struct function
*function_p
;
140 static vec
<function_p
> function_context_stack
;
142 /* Save the current context for compilation of a nested function.
143 This is called from language-specific code. */
146 push_function_context (void)
149 allocate_struct_function (NULL
, false);
151 function_context_stack
.safe_push (cfun
);
155 /* Restore the last saved context, at the end of a nested function.
156 This function is called from language-specific code. */
159 pop_function_context (void)
161 struct function
*p
= function_context_stack
.pop ();
163 current_function_decl
= p
->decl
;
165 /* Reset variables that have known state during rtx generation. */
166 virtuals_instantiated
= 0;
167 generating_concat_p
= 1;
170 /* Clear out all parts of the state in F that can safely be discarded
171 after the function has been parsed, but not compiled, to let
172 garbage collection reclaim the memory. */
175 free_after_parsing (struct function
*f
)
180 /* Clear out all parts of the state in F that can safely be discarded
181 after the function has been compiled, to let garbage collection
182 reclaim the memory. */
185 free_after_compilation (struct function
*f
)
187 prologue_insn_hash
= NULL
;
188 epilogue_insn_hash
= NULL
;
190 free (crtl
->emit
.regno_pointer_align
);
192 memset (crtl
, 0, sizeof (struct rtl_data
));
197 regno_reg_rtx
= NULL
;
200 /* Return size needed for stack frame based on slots so far allocated.
201 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
202 the caller may have to do that. */
205 get_frame_size (void)
207 if (FRAME_GROWS_DOWNWARD
)
208 return -frame_offset
;
213 /* Issue an error message and return TRUE if frame OFFSET overflows in
214 the signed target pointer arithmetics for function FUNC. Otherwise
218 frame_offset_overflow (HOST_WIDE_INT offset
, tree func
)
220 unsigned HOST_WIDE_INT size
= FRAME_GROWS_DOWNWARD
? -offset
: offset
;
222 if (size
> ((unsigned HOST_WIDE_INT
) 1 << (GET_MODE_BITSIZE (Pmode
) - 1))
223 /* Leave room for the fixed part of the frame. */
224 - 64 * UNITS_PER_WORD
)
226 error_at (DECL_SOURCE_LOCATION (func
),
227 "total size of local objects too large");
234 /* Return stack slot alignment in bits for TYPE and MODE. */
237 get_stack_local_alignment (tree type
, enum machine_mode mode
)
239 unsigned int alignment
;
242 alignment
= BIGGEST_ALIGNMENT
;
244 alignment
= GET_MODE_ALIGNMENT (mode
);
246 /* Allow the frond-end to (possibly) increase the alignment of this
249 type
= lang_hooks
.types
.type_for_mode (mode
, 0);
251 return STACK_SLOT_ALIGNMENT (type
, mode
, alignment
);
254 /* Determine whether it is possible to fit a stack slot of size SIZE and
255 alignment ALIGNMENT into an area in the stack frame that starts at
256 frame offset START and has a length of LENGTH. If so, store the frame
257 offset to be used for the stack slot in *POFFSET and return true;
258 return false otherwise. This function will extend the frame size when
259 given a start/length pair that lies at the end of the frame. */
262 try_fit_stack_local (HOST_WIDE_INT start
, HOST_WIDE_INT length
,
263 HOST_WIDE_INT size
, unsigned int alignment
,
264 HOST_WIDE_INT
*poffset
)
266 HOST_WIDE_INT this_frame_offset
;
267 int frame_off
, frame_alignment
, frame_phase
;
269 /* Calculate how many bytes the start of local variables is off from
271 frame_alignment
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
272 frame_off
= STARTING_FRAME_OFFSET
% frame_alignment
;
273 frame_phase
= frame_off
? frame_alignment
- frame_off
: 0;
275 /* Round the frame offset to the specified alignment. */
277 /* We must be careful here, since FRAME_OFFSET might be negative and
278 division with a negative dividend isn't as well defined as we might
279 like. So we instead assume that ALIGNMENT is a power of two and
280 use logical operations which are unambiguous. */
281 if (FRAME_GROWS_DOWNWARD
)
283 = (FLOOR_ROUND (start
+ length
- size
- frame_phase
,
284 (unsigned HOST_WIDE_INT
) alignment
)
288 = (CEIL_ROUND (start
- frame_phase
,
289 (unsigned HOST_WIDE_INT
) alignment
)
292 /* See if it fits. If this space is at the edge of the frame,
293 consider extending the frame to make it fit. Our caller relies on
294 this when allocating a new slot. */
295 if (frame_offset
== start
&& this_frame_offset
< frame_offset
)
296 frame_offset
= this_frame_offset
;
297 else if (this_frame_offset
< start
)
299 else if (start
+ length
== frame_offset
300 && this_frame_offset
+ size
> start
+ length
)
301 frame_offset
= this_frame_offset
+ size
;
302 else if (this_frame_offset
+ size
> start
+ length
)
305 *poffset
= this_frame_offset
;
309 /* Create a new frame_space structure describing free space in the stack
310 frame beginning at START and ending at END, and chain it into the
311 function's frame_space_list. */
314 add_frame_space (HOST_WIDE_INT start
, HOST_WIDE_INT end
)
316 struct frame_space
*space
= ggc_alloc_frame_space ();
317 space
->next
= crtl
->frame_space_list
;
318 crtl
->frame_space_list
= space
;
319 space
->start
= start
;
320 space
->length
= end
- start
;
323 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
324 with machine mode MODE.
326 ALIGN controls the amount of alignment for the address of the slot:
327 0 means according to MODE,
328 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
329 -2 means use BITS_PER_UNIT,
330 positive specifies alignment boundary in bits.
332 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
333 alignment and ASLK_RECORD_PAD bit set if we should remember
334 extra space we allocated for alignment purposes. When we are
335 called from assign_stack_temp_for_type, it is not set so we don't
336 track the same stack slot in two independent lists.
338 We do not round to stack_boundary here. */
341 assign_stack_local_1 (enum machine_mode mode
, HOST_WIDE_INT size
,
345 int bigend_correction
= 0;
346 HOST_WIDE_INT slot_offset
= 0, old_frame_offset
;
347 unsigned int alignment
, alignment_in_bits
;
351 alignment
= get_stack_local_alignment (NULL
, mode
);
352 alignment
/= BITS_PER_UNIT
;
354 else if (align
== -1)
356 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
357 size
= CEIL_ROUND (size
, alignment
);
359 else if (align
== -2)
360 alignment
= 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
362 alignment
= align
/ BITS_PER_UNIT
;
364 alignment_in_bits
= alignment
* BITS_PER_UNIT
;
366 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
367 if (alignment_in_bits
> MAX_SUPPORTED_STACK_ALIGNMENT
)
369 alignment_in_bits
= MAX_SUPPORTED_STACK_ALIGNMENT
;
370 alignment
= alignment_in_bits
/ BITS_PER_UNIT
;
373 if (SUPPORTS_STACK_ALIGNMENT
)
375 if (crtl
->stack_alignment_estimated
< alignment_in_bits
)
377 if (!crtl
->stack_realign_processed
)
378 crtl
->stack_alignment_estimated
= alignment_in_bits
;
381 /* If stack is realigned and stack alignment value
382 hasn't been finalized, it is OK not to increase
383 stack_alignment_estimated. The bigger alignment
384 requirement is recorded in stack_alignment_needed
386 gcc_assert (!crtl
->stack_realign_finalized
);
387 if (!crtl
->stack_realign_needed
)
389 /* It is OK to reduce the alignment as long as the
390 requested size is 0 or the estimated stack
391 alignment >= mode alignment. */
392 gcc_assert ((kind
& ASLK_REDUCE_ALIGN
)
394 || (crtl
->stack_alignment_estimated
395 >= GET_MODE_ALIGNMENT (mode
)));
396 alignment_in_bits
= crtl
->stack_alignment_estimated
;
397 alignment
= alignment_in_bits
/ BITS_PER_UNIT
;
403 if (crtl
->stack_alignment_needed
< alignment_in_bits
)
404 crtl
->stack_alignment_needed
= alignment_in_bits
;
405 if (crtl
->max_used_stack_slot_alignment
< alignment_in_bits
)
406 crtl
->max_used_stack_slot_alignment
= alignment_in_bits
;
408 if (mode
!= BLKmode
|| size
!= 0)
410 if (kind
& ASLK_RECORD_PAD
)
412 struct frame_space
**psp
;
414 for (psp
= &crtl
->frame_space_list
; *psp
; psp
= &(*psp
)->next
)
416 struct frame_space
*space
= *psp
;
417 if (!try_fit_stack_local (space
->start
, space
->length
, size
,
418 alignment
, &slot_offset
))
421 if (slot_offset
> space
->start
)
422 add_frame_space (space
->start
, slot_offset
);
423 if (slot_offset
+ size
< space
->start
+ space
->length
)
424 add_frame_space (slot_offset
+ size
,
425 space
->start
+ space
->length
);
430 else if (!STACK_ALIGNMENT_NEEDED
)
432 slot_offset
= frame_offset
;
436 old_frame_offset
= frame_offset
;
438 if (FRAME_GROWS_DOWNWARD
)
440 frame_offset
-= size
;
441 try_fit_stack_local (frame_offset
, size
, size
, alignment
, &slot_offset
);
443 if (kind
& ASLK_RECORD_PAD
)
445 if (slot_offset
> frame_offset
)
446 add_frame_space (frame_offset
, slot_offset
);
447 if (slot_offset
+ size
< old_frame_offset
)
448 add_frame_space (slot_offset
+ size
, old_frame_offset
);
453 frame_offset
+= size
;
454 try_fit_stack_local (old_frame_offset
, size
, size
, alignment
, &slot_offset
);
456 if (kind
& ASLK_RECORD_PAD
)
458 if (slot_offset
> old_frame_offset
)
459 add_frame_space (old_frame_offset
, slot_offset
);
460 if (slot_offset
+ size
< frame_offset
)
461 add_frame_space (slot_offset
+ size
, frame_offset
);
466 /* On a big-endian machine, if we are allocating more space than we will use,
467 use the least significant bytes of those that are allocated. */
468 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
&& GET_MODE_SIZE (mode
) < size
)
469 bigend_correction
= size
- GET_MODE_SIZE (mode
);
471 /* If we have already instantiated virtual registers, return the actual
472 address relative to the frame pointer. */
473 if (virtuals_instantiated
)
474 addr
= plus_constant (Pmode
, frame_pointer_rtx
,
476 (slot_offset
+ bigend_correction
477 + STARTING_FRAME_OFFSET
, Pmode
));
479 addr
= plus_constant (Pmode
, virtual_stack_vars_rtx
,
481 (slot_offset
+ bigend_correction
,
484 x
= gen_rtx_MEM (mode
, addr
);
485 set_mem_align (x
, alignment_in_bits
);
486 MEM_NOTRAP_P (x
) = 1;
489 = gen_rtx_EXPR_LIST (VOIDmode
, x
, stack_slot_list
);
491 if (frame_offset_overflow (frame_offset
, current_function_decl
))
497 /* Wrap up assign_stack_local_1 with last parameter as false. */
500 assign_stack_local (enum machine_mode mode
, HOST_WIDE_INT size
, int align
)
502 return assign_stack_local_1 (mode
, size
, align
, ASLK_RECORD_PAD
);
505 /* In order to evaluate some expressions, such as function calls returning
506 structures in memory, we need to temporarily allocate stack locations.
507 We record each allocated temporary in the following structure.
509 Associated with each temporary slot is a nesting level. When we pop up
510 one level, all temporaries associated with the previous level are freed.
511 Normally, all temporaries are freed after the execution of the statement
512 in which they were created. However, if we are inside a ({...}) grouping,
513 the result may be in a temporary and hence must be preserved. If the
514 result could be in a temporary, we preserve it if we can determine which
515 one it is in. If we cannot determine which temporary may contain the
516 result, all temporaries are preserved. A temporary is preserved by
517 pretending it was allocated at the previous nesting level. */
519 struct GTY(()) temp_slot
{
520 /* Points to next temporary slot. */
521 struct temp_slot
*next
;
522 /* Points to previous temporary slot. */
523 struct temp_slot
*prev
;
524 /* The rtx to used to reference the slot. */
526 /* The size, in units, of the slot. */
528 /* The type of the object in the slot, or zero if it doesn't correspond
529 to a type. We use this to determine whether a slot can be reused.
530 It can be reused if objects of the type of the new slot will always
531 conflict with objects of the type of the old slot. */
533 /* The alignment (in bits) of the slot. */
535 /* Nonzero if this temporary is currently in use. */
537 /* Nesting level at which this slot is being used. */
539 /* The offset of the slot from the frame_pointer, including extra space
540 for alignment. This info is for combine_temp_slots. */
541 HOST_WIDE_INT base_offset
;
542 /* The size of the slot, including extra space for alignment. This
543 info is for combine_temp_slots. */
544 HOST_WIDE_INT full_size
;
547 /* A table of addresses that represent a stack slot. The table is a mapping
548 from address RTXen to a temp slot. */
549 static GTY((param_is(struct temp_slot_address_entry
))) htab_t temp_slot_address_table
;
550 static size_t n_temp_slots_in_use
;
552 /* Entry for the above hash table. */
553 struct GTY(()) temp_slot_address_entry
{
556 struct temp_slot
*temp_slot
;
559 /* Removes temporary slot TEMP from LIST. */
562 cut_slot_from_list (struct temp_slot
*temp
, struct temp_slot
**list
)
565 temp
->next
->prev
= temp
->prev
;
567 temp
->prev
->next
= temp
->next
;
571 temp
->prev
= temp
->next
= NULL
;
574 /* Inserts temporary slot TEMP to LIST. */
577 insert_slot_to_list (struct temp_slot
*temp
, struct temp_slot
**list
)
581 (*list
)->prev
= temp
;
586 /* Returns the list of used temp slots at LEVEL. */
588 static struct temp_slot
**
589 temp_slots_at_level (int level
)
591 if (level
>= (int) vec_safe_length (used_temp_slots
))
592 vec_safe_grow_cleared (used_temp_slots
, level
+ 1);
594 return &(*used_temp_slots
)[level
];
597 /* Returns the maximal temporary slot level. */
600 max_slot_level (void)
602 if (!used_temp_slots
)
605 return used_temp_slots
->length () - 1;
608 /* Moves temporary slot TEMP to LEVEL. */
611 move_slot_to_level (struct temp_slot
*temp
, int level
)
613 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
614 insert_slot_to_list (temp
, temp_slots_at_level (level
));
618 /* Make temporary slot TEMP available. */
621 make_slot_available (struct temp_slot
*temp
)
623 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
624 insert_slot_to_list (temp
, &avail_temp_slots
);
627 n_temp_slots_in_use
--;
630 /* Compute the hash value for an address -> temp slot mapping.
631 The value is cached on the mapping entry. */
633 temp_slot_address_compute_hash (struct temp_slot_address_entry
*t
)
635 int do_not_record
= 0;
636 return hash_rtx (t
->address
, GET_MODE (t
->address
),
637 &do_not_record
, NULL
, false);
640 /* Return the hash value for an address -> temp slot mapping. */
642 temp_slot_address_hash (const void *p
)
644 const struct temp_slot_address_entry
*t
;
645 t
= (const struct temp_slot_address_entry
*) p
;
649 /* Compare two address -> temp slot mapping entries. */
651 temp_slot_address_eq (const void *p1
, const void *p2
)
653 const struct temp_slot_address_entry
*t1
, *t2
;
654 t1
= (const struct temp_slot_address_entry
*) p1
;
655 t2
= (const struct temp_slot_address_entry
*) p2
;
656 return exp_equiv_p (t1
->address
, t2
->address
, 0, true);
659 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
661 insert_temp_slot_address (rtx address
, struct temp_slot
*temp_slot
)
664 struct temp_slot_address_entry
*t
= ggc_alloc_temp_slot_address_entry ();
665 t
->address
= address
;
666 t
->temp_slot
= temp_slot
;
667 t
->hash
= temp_slot_address_compute_hash (t
);
668 slot
= htab_find_slot_with_hash (temp_slot_address_table
, t
, t
->hash
, INSERT
);
672 /* Remove an address -> temp slot mapping entry if the temp slot is
673 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
675 remove_unused_temp_slot_addresses_1 (void **slot
, void *data ATTRIBUTE_UNUSED
)
677 const struct temp_slot_address_entry
*t
;
678 t
= (const struct temp_slot_address_entry
*) *slot
;
679 if (! t
->temp_slot
->in_use
)
680 htab_clear_slot (temp_slot_address_table
, slot
);
684 /* Remove all mappings of addresses to unused temp slots. */
686 remove_unused_temp_slot_addresses (void)
688 /* Use quicker clearing if there aren't any active temp slots. */
689 if (n_temp_slots_in_use
)
690 htab_traverse (temp_slot_address_table
,
691 remove_unused_temp_slot_addresses_1
,
694 htab_empty (temp_slot_address_table
);
697 /* Find the temp slot corresponding to the object at address X. */
699 static struct temp_slot
*
700 find_temp_slot_from_address (rtx x
)
703 struct temp_slot_address_entry tmp
, *t
;
705 /* First try the easy way:
706 See if X exists in the address -> temp slot mapping. */
708 tmp
.temp_slot
= NULL
;
709 tmp
.hash
= temp_slot_address_compute_hash (&tmp
);
710 t
= (struct temp_slot_address_entry
*)
711 htab_find_with_hash (temp_slot_address_table
, &tmp
, tmp
.hash
);
715 /* If we have a sum involving a register, see if it points to a temp
717 if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 0))
718 && (p
= find_temp_slot_from_address (XEXP (x
, 0))) != 0)
720 else if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 1))
721 && (p
= find_temp_slot_from_address (XEXP (x
, 1))) != 0)
724 /* Last resort: Address is a virtual stack var address. */
725 if (GET_CODE (x
) == PLUS
726 && XEXP (x
, 0) == virtual_stack_vars_rtx
727 && CONST_INT_P (XEXP (x
, 1)))
730 for (i
= max_slot_level (); i
>= 0; i
--)
731 for (p
= *temp_slots_at_level (i
); p
; p
= p
->next
)
733 if (INTVAL (XEXP (x
, 1)) >= p
->base_offset
734 && INTVAL (XEXP (x
, 1)) < p
->base_offset
+ p
->full_size
)
742 /* Allocate a temporary stack slot and record it for possible later
745 MODE is the machine mode to be given to the returned rtx.
747 SIZE is the size in units of the space required. We do no rounding here
748 since assign_stack_local will do any required rounding.
750 TYPE is the type that will be used for the stack slot. */
753 assign_stack_temp_for_type (enum machine_mode mode
, HOST_WIDE_INT size
,
757 struct temp_slot
*p
, *best_p
= 0, *selected
= NULL
, **pp
;
760 /* If SIZE is -1 it means that somebody tried to allocate a temporary
761 of a variable size. */
762 gcc_assert (size
!= -1);
764 align
= get_stack_local_alignment (type
, mode
);
766 /* Try to find an available, already-allocated temporary of the proper
767 mode which meets the size and alignment requirements. Choose the
768 smallest one with the closest alignment.
770 If assign_stack_temp is called outside of the tree->rtl expansion,
771 we cannot reuse the stack slots (that may still refer to
772 VIRTUAL_STACK_VARS_REGNUM). */
773 if (!virtuals_instantiated
)
775 for (p
= avail_temp_slots
; p
; p
= p
->next
)
777 if (p
->align
>= align
&& p
->size
>= size
778 && GET_MODE (p
->slot
) == mode
779 && objects_must_conflict_p (p
->type
, type
)
780 && (best_p
== 0 || best_p
->size
> p
->size
781 || (best_p
->size
== p
->size
&& best_p
->align
> p
->align
)))
783 if (p
->align
== align
&& p
->size
== size
)
786 cut_slot_from_list (selected
, &avail_temp_slots
);
795 /* Make our best, if any, the one to use. */
799 cut_slot_from_list (selected
, &avail_temp_slots
);
801 /* If there are enough aligned bytes left over, make them into a new
802 temp_slot so that the extra bytes don't get wasted. Do this only
803 for BLKmode slots, so that we can be sure of the alignment. */
804 if (GET_MODE (best_p
->slot
) == BLKmode
)
806 int alignment
= best_p
->align
/ BITS_PER_UNIT
;
807 HOST_WIDE_INT rounded_size
= CEIL_ROUND (size
, alignment
);
809 if (best_p
->size
- rounded_size
>= alignment
)
811 p
= ggc_alloc_temp_slot ();
813 p
->size
= best_p
->size
- rounded_size
;
814 p
->base_offset
= best_p
->base_offset
+ rounded_size
;
815 p
->full_size
= best_p
->full_size
- rounded_size
;
816 p
->slot
= adjust_address_nv (best_p
->slot
, BLKmode
, rounded_size
);
817 p
->align
= best_p
->align
;
818 p
->type
= best_p
->type
;
819 insert_slot_to_list (p
, &avail_temp_slots
);
821 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, p
->slot
,
824 best_p
->size
= rounded_size
;
825 best_p
->full_size
= rounded_size
;
830 /* If we still didn't find one, make a new temporary. */
833 HOST_WIDE_INT frame_offset_old
= frame_offset
;
835 p
= ggc_alloc_temp_slot ();
837 /* We are passing an explicit alignment request to assign_stack_local.
838 One side effect of that is assign_stack_local will not round SIZE
839 to ensure the frame offset remains suitably aligned.
841 So for requests which depended on the rounding of SIZE, we go ahead
842 and round it now. We also make sure ALIGNMENT is at least
843 BIGGEST_ALIGNMENT. */
844 gcc_assert (mode
!= BLKmode
|| align
== BIGGEST_ALIGNMENT
);
845 p
->slot
= assign_stack_local_1 (mode
,
855 /* The following slot size computation is necessary because we don't
856 know the actual size of the temporary slot until assign_stack_local
857 has performed all the frame alignment and size rounding for the
858 requested temporary. Note that extra space added for alignment
859 can be either above or below this stack slot depending on which
860 way the frame grows. We include the extra space if and only if it
861 is above this slot. */
862 if (FRAME_GROWS_DOWNWARD
)
863 p
->size
= frame_offset_old
- frame_offset
;
867 /* Now define the fields used by combine_temp_slots. */
868 if (FRAME_GROWS_DOWNWARD
)
870 p
->base_offset
= frame_offset
;
871 p
->full_size
= frame_offset_old
- frame_offset
;
875 p
->base_offset
= frame_offset_old
;
876 p
->full_size
= frame_offset
- frame_offset_old
;
885 p
->level
= temp_slot_level
;
886 n_temp_slots_in_use
++;
888 pp
= temp_slots_at_level (p
->level
);
889 insert_slot_to_list (p
, pp
);
890 insert_temp_slot_address (XEXP (p
->slot
, 0), p
);
892 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
893 slot
= gen_rtx_MEM (mode
, XEXP (p
->slot
, 0));
894 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, slot
, stack_slot_list
);
896 /* If we know the alias set for the memory that will be used, use
897 it. If there's no TYPE, then we don't know anything about the
898 alias set for the memory. */
899 set_mem_alias_set (slot
, type
? get_alias_set (type
) : 0);
900 set_mem_align (slot
, align
);
902 /* If a type is specified, set the relevant flags. */
904 MEM_VOLATILE_P (slot
) = TYPE_VOLATILE (type
);
905 MEM_NOTRAP_P (slot
) = 1;
910 /* Allocate a temporary stack slot and record it for possible later
911 reuse. First two arguments are same as in preceding function. */
914 assign_stack_temp (enum machine_mode mode
, HOST_WIDE_INT size
)
916 return assign_stack_temp_for_type (mode
, size
, NULL_TREE
);
919 /* Assign a temporary.
920 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
921 and so that should be used in error messages. In either case, we
922 allocate of the given type.
923 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
924 it is 0 if a register is OK.
925 DONT_PROMOTE is 1 if we should not promote values in register
929 assign_temp (tree type_or_decl
, int memory_required
,
930 int dont_promote ATTRIBUTE_UNUSED
)
933 enum machine_mode mode
;
938 if (DECL_P (type_or_decl
))
939 decl
= type_or_decl
, type
= TREE_TYPE (decl
);
941 decl
= NULL
, type
= type_or_decl
;
943 mode
= TYPE_MODE (type
);
945 unsignedp
= TYPE_UNSIGNED (type
);
948 if (mode
== BLKmode
|| memory_required
)
950 HOST_WIDE_INT size
= int_size_in_bytes (type
);
953 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
954 problems with allocating the stack space. */
958 /* Unfortunately, we don't yet know how to allocate variable-sized
959 temporaries. However, sometimes we can find a fixed upper limit on
960 the size, so try that instead. */
962 size
= max_int_size_in_bytes (type
);
964 /* The size of the temporary may be too large to fit into an integer. */
965 /* ??? Not sure this should happen except for user silliness, so limit
966 this to things that aren't compiler-generated temporaries. The
967 rest of the time we'll die in assign_stack_temp_for_type. */
968 if (decl
&& size
== -1
969 && TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
)
971 error ("size of variable %q+D is too large", decl
);
975 tmp
= assign_stack_temp_for_type (mode
, size
, type
);
981 mode
= promote_mode (type
, mode
, &unsignedp
);
984 return gen_reg_rtx (mode
);
987 /* Combine temporary stack slots which are adjacent on the stack.
989 This allows for better use of already allocated stack space. This is only
990 done for BLKmode slots because we can be sure that we won't have alignment
991 problems in this case. */
994 combine_temp_slots (void)
996 struct temp_slot
*p
, *q
, *next
, *next_q
;
999 /* We can't combine slots, because the information about which slot
1000 is in which alias set will be lost. */
1001 if (flag_strict_aliasing
)
1004 /* If there are a lot of temp slots, don't do anything unless
1005 high levels of optimization. */
1006 if (! flag_expensive_optimizations
)
1007 for (p
= avail_temp_slots
, num_slots
= 0; p
; p
= p
->next
, num_slots
++)
1008 if (num_slots
> 100 || (num_slots
> 10 && optimize
== 0))
1011 for (p
= avail_temp_slots
; p
; p
= next
)
1017 if (GET_MODE (p
->slot
) != BLKmode
)
1020 for (q
= p
->next
; q
; q
= next_q
)
1026 if (GET_MODE (q
->slot
) != BLKmode
)
1029 if (p
->base_offset
+ p
->full_size
== q
->base_offset
)
1031 /* Q comes after P; combine Q into P. */
1033 p
->full_size
+= q
->full_size
;
1036 else if (q
->base_offset
+ q
->full_size
== p
->base_offset
)
1038 /* P comes after Q; combine P into Q. */
1040 q
->full_size
+= p
->full_size
;
1045 cut_slot_from_list (q
, &avail_temp_slots
);
1048 /* Either delete P or advance past it. */
1050 cut_slot_from_list (p
, &avail_temp_slots
);
1054 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1055 slot that previously was known by OLD_RTX. */
1058 update_temp_slot_address (rtx old_rtx
, rtx new_rtx
)
1060 struct temp_slot
*p
;
1062 if (rtx_equal_p (old_rtx
, new_rtx
))
1065 p
= find_temp_slot_from_address (old_rtx
);
1067 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1068 NEW_RTX is a register, see if one operand of the PLUS is a
1069 temporary location. If so, NEW_RTX points into it. Otherwise,
1070 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1071 in common between them. If so, try a recursive call on those
1075 if (GET_CODE (old_rtx
) != PLUS
)
1078 if (REG_P (new_rtx
))
1080 update_temp_slot_address (XEXP (old_rtx
, 0), new_rtx
);
1081 update_temp_slot_address (XEXP (old_rtx
, 1), new_rtx
);
1084 else if (GET_CODE (new_rtx
) != PLUS
)
1087 if (rtx_equal_p (XEXP (old_rtx
, 0), XEXP (new_rtx
, 0)))
1088 update_temp_slot_address (XEXP (old_rtx
, 1), XEXP (new_rtx
, 1));
1089 else if (rtx_equal_p (XEXP (old_rtx
, 1), XEXP (new_rtx
, 0)))
1090 update_temp_slot_address (XEXP (old_rtx
, 0), XEXP (new_rtx
, 1));
1091 else if (rtx_equal_p (XEXP (old_rtx
, 0), XEXP (new_rtx
, 1)))
1092 update_temp_slot_address (XEXP (old_rtx
, 1), XEXP (new_rtx
, 0));
1093 else if (rtx_equal_p (XEXP (old_rtx
, 1), XEXP (new_rtx
, 1)))
1094 update_temp_slot_address (XEXP (old_rtx
, 0), XEXP (new_rtx
, 0));
1099 /* Otherwise add an alias for the temp's address. */
1100 insert_temp_slot_address (new_rtx
, p
);
1103 /* If X could be a reference to a temporary slot, mark that slot as
1104 belonging to the to one level higher than the current level. If X
1105 matched one of our slots, just mark that one. Otherwise, we can't
1106 easily predict which it is, so upgrade all of them.
1108 This is called when an ({...}) construct occurs and a statement
1109 returns a value in memory. */
1112 preserve_temp_slots (rtx x
)
1114 struct temp_slot
*p
= 0, *next
;
1119 /* If X is a register that is being used as a pointer, see if we have
1120 a temporary slot we know it points to. */
1121 if (REG_P (x
) && REG_POINTER (x
))
1122 p
= find_temp_slot_from_address (x
);
1124 /* If X is not in memory or is at a constant address, it cannot be in
1125 a temporary slot. */
1126 if (p
== 0 && (!MEM_P (x
) || CONSTANT_P (XEXP (x
, 0))))
1129 /* First see if we can find a match. */
1131 p
= find_temp_slot_from_address (XEXP (x
, 0));
1135 if (p
->level
== temp_slot_level
)
1136 move_slot_to_level (p
, temp_slot_level
- 1);
1140 /* Otherwise, preserve all non-kept slots at this level. */
1141 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1144 move_slot_to_level (p
, temp_slot_level
- 1);
1148 /* Free all temporaries used so far. This is normally called at the
1149 end of generating code for a statement. */
1152 free_temp_slots (void)
1154 struct temp_slot
*p
, *next
;
1155 bool some_available
= false;
1157 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1160 make_slot_available (p
);
1161 some_available
= true;
1166 remove_unused_temp_slot_addresses ();
1167 combine_temp_slots ();
1171 /* Push deeper into the nesting level for stack temporaries. */
1174 push_temp_slots (void)
1179 /* Pop a temporary nesting level. All slots in use in the current level
1183 pop_temp_slots (void)
1189 /* Initialize temporary slots. */
1192 init_temp_slots (void)
1194 /* We have not allocated any temporaries yet. */
1195 avail_temp_slots
= 0;
1196 vec_alloc (used_temp_slots
, 0);
1197 temp_slot_level
= 0;
1198 n_temp_slots_in_use
= 0;
1200 /* Set up the table to map addresses to temp slots. */
1201 if (! temp_slot_address_table
)
1202 temp_slot_address_table
= htab_create_ggc (32,
1203 temp_slot_address_hash
,
1204 temp_slot_address_eq
,
1207 htab_empty (temp_slot_address_table
);
1210 /* Functions and data structures to keep track of the values hard regs
1211 had at the start of the function. */
1213 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1214 and has_hard_reg_initial_val.. */
1215 typedef struct GTY(()) initial_value_pair
{
1218 } initial_value_pair
;
1219 /* ??? This could be a VEC but there is currently no way to define an
1220 opaque VEC type. This could be worked around by defining struct
1221 initial_value_pair in function.h. */
1222 typedef struct GTY(()) initial_value_struct
{
1225 initial_value_pair
* GTY ((length ("%h.num_entries"))) entries
;
1226 } initial_value_struct
;
1228 /* If a pseudo represents an initial hard reg (or expression), return
1229 it, else return NULL_RTX. */
1232 get_hard_reg_initial_reg (rtx reg
)
1234 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1240 for (i
= 0; i
< ivs
->num_entries
; i
++)
1241 if (rtx_equal_p (ivs
->entries
[i
].pseudo
, reg
))
1242 return ivs
->entries
[i
].hard_reg
;
1247 /* Make sure that there's a pseudo register of mode MODE that stores the
1248 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1251 get_hard_reg_initial_val (enum machine_mode mode
, unsigned int regno
)
1253 struct initial_value_struct
*ivs
;
1256 rv
= has_hard_reg_initial_val (mode
, regno
);
1260 ivs
= crtl
->hard_reg_initial_vals
;
1263 ivs
= ggc_alloc_initial_value_struct ();
1264 ivs
->num_entries
= 0;
1265 ivs
->max_entries
= 5;
1266 ivs
->entries
= ggc_alloc_vec_initial_value_pair (5);
1267 crtl
->hard_reg_initial_vals
= ivs
;
1270 if (ivs
->num_entries
>= ivs
->max_entries
)
1272 ivs
->max_entries
+= 5;
1273 ivs
->entries
= GGC_RESIZEVEC (initial_value_pair
, ivs
->entries
,
1277 ivs
->entries
[ivs
->num_entries
].hard_reg
= gen_rtx_REG (mode
, regno
);
1278 ivs
->entries
[ivs
->num_entries
].pseudo
= gen_reg_rtx (mode
);
1280 return ivs
->entries
[ivs
->num_entries
++].pseudo
;
1283 /* See if get_hard_reg_initial_val has been used to create a pseudo
1284 for the initial value of hard register REGNO in mode MODE. Return
1285 the associated pseudo if so, otherwise return NULL. */
1288 has_hard_reg_initial_val (enum machine_mode mode
, unsigned int regno
)
1290 struct initial_value_struct
*ivs
;
1293 ivs
= crtl
->hard_reg_initial_vals
;
1295 for (i
= 0; i
< ivs
->num_entries
; i
++)
1296 if (GET_MODE (ivs
->entries
[i
].hard_reg
) == mode
1297 && REGNO (ivs
->entries
[i
].hard_reg
) == regno
)
1298 return ivs
->entries
[i
].pseudo
;
1304 emit_initial_value_sets (void)
1306 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1314 for (i
= 0; i
< ivs
->num_entries
; i
++)
1315 emit_move_insn (ivs
->entries
[i
].pseudo
, ivs
->entries
[i
].hard_reg
);
1319 emit_insn_at_entry (seq
);
1323 /* Return the hardreg-pseudoreg initial values pair entry I and
1324 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1326 initial_value_entry (int i
, rtx
*hreg
, rtx
*preg
)
1328 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1329 if (!ivs
|| i
>= ivs
->num_entries
)
1332 *hreg
= ivs
->entries
[i
].hard_reg
;
1333 *preg
= ivs
->entries
[i
].pseudo
;
1337 /* These routines are responsible for converting virtual register references
1338 to the actual hard register references once RTL generation is complete.
1340 The following four variables are used for communication between the
1341 routines. They contain the offsets of the virtual registers from their
1342 respective hard registers. */
1344 static int in_arg_offset
;
1345 static int var_offset
;
1346 static int dynamic_offset
;
1347 static int out_arg_offset
;
1348 static int cfa_offset
;
1350 /* In most machines, the stack pointer register is equivalent to the bottom
1353 #ifndef STACK_POINTER_OFFSET
1354 #define STACK_POINTER_OFFSET 0
1357 /* If not defined, pick an appropriate default for the offset of dynamically
1358 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1359 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1361 #ifndef STACK_DYNAMIC_OFFSET
1363 /* The bottom of the stack points to the actual arguments. If
1364 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1365 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1366 stack space for register parameters is not pushed by the caller, but
1367 rather part of the fixed stack areas and hence not included in
1368 `crtl->outgoing_args_size'. Nevertheless, we must allow
1369 for it when allocating stack dynamic objects. */
1371 #if defined(REG_PARM_STACK_SPACE)
1372 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1373 ((ACCUMULATE_OUTGOING_ARGS \
1374 ? (crtl->outgoing_args_size \
1375 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1376 : REG_PARM_STACK_SPACE (FNDECL))) \
1377 : 0) + (STACK_POINTER_OFFSET))
1379 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1380 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0) \
1381 + (STACK_POINTER_OFFSET))
1386 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1387 is a virtual register, return the equivalent hard register and set the
1388 offset indirectly through the pointer. Otherwise, return 0. */
1391 instantiate_new_reg (rtx x
, HOST_WIDE_INT
*poffset
)
1394 HOST_WIDE_INT offset
;
1396 if (x
== virtual_incoming_args_rtx
)
1398 if (stack_realign_drap
)
1400 /* Replace virtual_incoming_args_rtx with internal arg
1401 pointer if DRAP is used to realign stack. */
1402 new_rtx
= crtl
->args
.internal_arg_pointer
;
1406 new_rtx
= arg_pointer_rtx
, offset
= in_arg_offset
;
1408 else if (x
== virtual_stack_vars_rtx
)
1409 new_rtx
= frame_pointer_rtx
, offset
= var_offset
;
1410 else if (x
== virtual_stack_dynamic_rtx
)
1411 new_rtx
= stack_pointer_rtx
, offset
= dynamic_offset
;
1412 else if (x
== virtual_outgoing_args_rtx
)
1413 new_rtx
= stack_pointer_rtx
, offset
= out_arg_offset
;
1414 else if (x
== virtual_cfa_rtx
)
1416 #ifdef FRAME_POINTER_CFA_OFFSET
1417 new_rtx
= frame_pointer_rtx
;
1419 new_rtx
= arg_pointer_rtx
;
1421 offset
= cfa_offset
;
1423 else if (x
== virtual_preferred_stack_boundary_rtx
)
1425 new_rtx
= GEN_INT (crtl
->preferred_stack_boundary
/ BITS_PER_UNIT
);
1435 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1436 Instantiate any virtual registers present inside of *LOC. The expression
1437 is simplified, as much as possible, but is not to be considered "valid"
1438 in any sense implied by the target. If any change is made, set CHANGED
1442 instantiate_virtual_regs_in_rtx (rtx
*loc
, void *data
)
1444 HOST_WIDE_INT offset
;
1445 bool *changed
= (bool *) data
;
1452 switch (GET_CODE (x
))
1455 new_rtx
= instantiate_new_reg (x
, &offset
);
1458 *loc
= plus_constant (GET_MODE (x
), new_rtx
, offset
);
1465 new_rtx
= instantiate_new_reg (XEXP (x
, 0), &offset
);
1468 new_rtx
= plus_constant (GET_MODE (x
), new_rtx
, offset
);
1469 *loc
= simplify_gen_binary (PLUS
, GET_MODE (x
), new_rtx
, XEXP (x
, 1));
1475 /* FIXME -- from old code */
1476 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1477 we can commute the PLUS and SUBREG because pointers into the
1478 frame are well-behaved. */
1488 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1489 matches the predicate for insn CODE operand OPERAND. */
1492 safe_insn_predicate (int code
, int operand
, rtx x
)
1494 return code
< 0 || insn_operand_matches ((enum insn_code
) code
, operand
, x
);
1497 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1498 registers present inside of insn. The result will be a valid insn. */
1501 instantiate_virtual_regs_in_insn (rtx insn
)
1503 HOST_WIDE_INT offset
;
1505 bool any_change
= false;
1506 rtx set
, new_rtx
, x
, seq
;
1508 /* There are some special cases to be handled first. */
1509 set
= single_set (insn
);
1512 /* We're allowed to assign to a virtual register. This is interpreted
1513 to mean that the underlying register gets assigned the inverse
1514 transformation. This is used, for example, in the handling of
1516 new_rtx
= instantiate_new_reg (SET_DEST (set
), &offset
);
1521 for_each_rtx (&SET_SRC (set
), instantiate_virtual_regs_in_rtx
, NULL
);
1522 x
= simplify_gen_binary (PLUS
, GET_MODE (new_rtx
), SET_SRC (set
),
1524 x
= force_operand (x
, new_rtx
);
1526 emit_move_insn (new_rtx
, x
);
1531 emit_insn_before (seq
, insn
);
1536 /* Handle a straight copy from a virtual register by generating a
1537 new add insn. The difference between this and falling through
1538 to the generic case is avoiding a new pseudo and eliminating a
1539 move insn in the initial rtl stream. */
1540 new_rtx
= instantiate_new_reg (SET_SRC (set
), &offset
);
1541 if (new_rtx
&& offset
!= 0
1542 && REG_P (SET_DEST (set
))
1543 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1547 x
= expand_simple_binop (GET_MODE (SET_DEST (set
)), PLUS
,
1548 new_rtx
, GEN_INT (offset
), SET_DEST (set
),
1549 1, OPTAB_LIB_WIDEN
);
1550 if (x
!= SET_DEST (set
))
1551 emit_move_insn (SET_DEST (set
), x
);
1556 emit_insn_before (seq
, insn
);
1561 extract_insn (insn
);
1562 insn_code
= INSN_CODE (insn
);
1564 /* Handle a plus involving a virtual register by determining if the
1565 operands remain valid if they're modified in place. */
1566 if (GET_CODE (SET_SRC (set
)) == PLUS
1567 && recog_data
.n_operands
>= 3
1568 && recog_data
.operand_loc
[1] == &XEXP (SET_SRC (set
), 0)
1569 && recog_data
.operand_loc
[2] == &XEXP (SET_SRC (set
), 1)
1570 && CONST_INT_P (recog_data
.operand
[2])
1571 && (new_rtx
= instantiate_new_reg (recog_data
.operand
[1], &offset
)))
1573 offset
+= INTVAL (recog_data
.operand
[2]);
1575 /* If the sum is zero, then replace with a plain move. */
1577 && REG_P (SET_DEST (set
))
1578 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1581 emit_move_insn (SET_DEST (set
), new_rtx
);
1585 emit_insn_before (seq
, insn
);
1590 x
= gen_int_mode (offset
, recog_data
.operand_mode
[2]);
1592 /* Using validate_change and apply_change_group here leaves
1593 recog_data in an invalid state. Since we know exactly what
1594 we want to check, do those two by hand. */
1595 if (safe_insn_predicate (insn_code
, 1, new_rtx
)
1596 && safe_insn_predicate (insn_code
, 2, x
))
1598 *recog_data
.operand_loc
[1] = recog_data
.operand
[1] = new_rtx
;
1599 *recog_data
.operand_loc
[2] = recog_data
.operand
[2] = x
;
1602 /* Fall through into the regular operand fixup loop in
1603 order to take care of operands other than 1 and 2. */
1609 extract_insn (insn
);
1610 insn_code
= INSN_CODE (insn
);
1613 /* In the general case, we expect virtual registers to appear only in
1614 operands, and then only as either bare registers or inside memories. */
1615 for (i
= 0; i
< recog_data
.n_operands
; ++i
)
1617 x
= recog_data
.operand
[i
];
1618 switch (GET_CODE (x
))
1622 rtx addr
= XEXP (x
, 0);
1623 bool changed
= false;
1625 for_each_rtx (&addr
, instantiate_virtual_regs_in_rtx
, &changed
);
1630 x
= replace_equiv_address (x
, addr
);
1631 /* It may happen that the address with the virtual reg
1632 was valid (e.g. based on the virtual stack reg, which might
1633 be acceptable to the predicates with all offsets), whereas
1634 the address now isn't anymore, for instance when the address
1635 is still offsetted, but the base reg isn't virtual-stack-reg
1636 anymore. Below we would do a force_reg on the whole operand,
1637 but this insn might actually only accept memory. Hence,
1638 before doing that last resort, try to reload the address into
1639 a register, so this operand stays a MEM. */
1640 if (!safe_insn_predicate (insn_code
, i
, x
))
1642 addr
= force_reg (GET_MODE (addr
), addr
);
1643 x
= replace_equiv_address (x
, addr
);
1648 emit_insn_before (seq
, insn
);
1653 new_rtx
= instantiate_new_reg (x
, &offset
);
1654 if (new_rtx
== NULL
)
1662 /* Careful, special mode predicates may have stuff in
1663 insn_data[insn_code].operand[i].mode that isn't useful
1664 to us for computing a new value. */
1665 /* ??? Recognize address_operand and/or "p" constraints
1666 to see if (plus new offset) is a valid before we put
1667 this through expand_simple_binop. */
1668 x
= expand_simple_binop (GET_MODE (x
), PLUS
, new_rtx
,
1669 GEN_INT (offset
), NULL_RTX
,
1670 1, OPTAB_LIB_WIDEN
);
1673 emit_insn_before (seq
, insn
);
1678 new_rtx
= instantiate_new_reg (SUBREG_REG (x
), &offset
);
1679 if (new_rtx
== NULL
)
1684 new_rtx
= expand_simple_binop (GET_MODE (new_rtx
), PLUS
, new_rtx
,
1685 GEN_INT (offset
), NULL_RTX
,
1686 1, OPTAB_LIB_WIDEN
);
1689 emit_insn_before (seq
, insn
);
1691 x
= simplify_gen_subreg (recog_data
.operand_mode
[i
], new_rtx
,
1692 GET_MODE (new_rtx
), SUBREG_BYTE (x
));
1700 /* At this point, X contains the new value for the operand.
1701 Validate the new value vs the insn predicate. Note that
1702 asm insns will have insn_code -1 here. */
1703 if (!safe_insn_predicate (insn_code
, i
, x
))
1708 gcc_assert (REGNO (x
) <= LAST_VIRTUAL_REGISTER
);
1709 x
= copy_to_reg (x
);
1712 x
= force_reg (insn_data
[insn_code
].operand
[i
].mode
, x
);
1716 emit_insn_before (seq
, insn
);
1719 *recog_data
.operand_loc
[i
] = recog_data
.operand
[i
] = x
;
1725 /* Propagate operand changes into the duplicates. */
1726 for (i
= 0; i
< recog_data
.n_dups
; ++i
)
1727 *recog_data
.dup_loc
[i
]
1728 = copy_rtx (recog_data
.operand
[(unsigned)recog_data
.dup_num
[i
]]);
1730 /* Force re-recognition of the instruction for validation. */
1731 INSN_CODE (insn
) = -1;
1734 if (asm_noperands (PATTERN (insn
)) >= 0)
1736 if (!check_asm_operands (PATTERN (insn
)))
1738 error_for_asm (insn
, "impossible constraint in %<asm%>");
1739 /* For asm goto, instead of fixing up all the edges
1740 just clear the template and clear input operands
1741 (asm goto doesn't have any output operands). */
1744 rtx asm_op
= extract_asm_operands (PATTERN (insn
));
1745 ASM_OPERANDS_TEMPLATE (asm_op
) = ggc_strdup ("");
1746 ASM_OPERANDS_INPUT_VEC (asm_op
) = rtvec_alloc (0);
1747 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op
) = rtvec_alloc (0);
1755 if (recog_memoized (insn
) < 0)
1756 fatal_insn_not_found (insn
);
1760 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1761 do any instantiation required. */
1764 instantiate_decl_rtl (rtx x
)
1771 /* If this is a CONCAT, recurse for the pieces. */
1772 if (GET_CODE (x
) == CONCAT
)
1774 instantiate_decl_rtl (XEXP (x
, 0));
1775 instantiate_decl_rtl (XEXP (x
, 1));
1779 /* If this is not a MEM, no need to do anything. Similarly if the
1780 address is a constant or a register that is not a virtual register. */
1785 if (CONSTANT_P (addr
)
1787 && (REGNO (addr
) < FIRST_VIRTUAL_REGISTER
1788 || REGNO (addr
) > LAST_VIRTUAL_REGISTER
)))
1791 for_each_rtx (&XEXP (x
, 0), instantiate_virtual_regs_in_rtx
, NULL
);
1794 /* Helper for instantiate_decls called via walk_tree: Process all decls
1795 in the given DECL_VALUE_EXPR. */
1798 instantiate_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1806 if (DECL_RTL_SET_P (t
))
1807 instantiate_decl_rtl (DECL_RTL (t
));
1808 if (TREE_CODE (t
) == PARM_DECL
&& DECL_NAMELESS (t
)
1809 && DECL_INCOMING_RTL (t
))
1810 instantiate_decl_rtl (DECL_INCOMING_RTL (t
));
1811 if ((TREE_CODE (t
) == VAR_DECL
1812 || TREE_CODE (t
) == RESULT_DECL
)
1813 && DECL_HAS_VALUE_EXPR_P (t
))
1815 tree v
= DECL_VALUE_EXPR (t
);
1816 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1823 /* Subroutine of instantiate_decls: Process all decls in the given
1824 BLOCK node and all its subblocks. */
1827 instantiate_decls_1 (tree let
)
1831 for (t
= BLOCK_VARS (let
); t
; t
= DECL_CHAIN (t
))
1833 if (DECL_RTL_SET_P (t
))
1834 instantiate_decl_rtl (DECL_RTL (t
));
1835 if (TREE_CODE (t
) == VAR_DECL
&& DECL_HAS_VALUE_EXPR_P (t
))
1837 tree v
= DECL_VALUE_EXPR (t
);
1838 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1842 /* Process all subblocks. */
1843 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= BLOCK_CHAIN (t
))
1844 instantiate_decls_1 (t
);
1847 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1848 all virtual registers in their DECL_RTL's. */
1851 instantiate_decls (tree fndecl
)
1856 /* Process all parameters of the function. */
1857 for (decl
= DECL_ARGUMENTS (fndecl
); decl
; decl
= DECL_CHAIN (decl
))
1859 instantiate_decl_rtl (DECL_RTL (decl
));
1860 instantiate_decl_rtl (DECL_INCOMING_RTL (decl
));
1861 if (DECL_HAS_VALUE_EXPR_P (decl
))
1863 tree v
= DECL_VALUE_EXPR (decl
);
1864 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1868 if ((decl
= DECL_RESULT (fndecl
))
1869 && TREE_CODE (decl
) == RESULT_DECL
)
1871 if (DECL_RTL_SET_P (decl
))
1872 instantiate_decl_rtl (DECL_RTL (decl
));
1873 if (DECL_HAS_VALUE_EXPR_P (decl
))
1875 tree v
= DECL_VALUE_EXPR (decl
);
1876 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1880 /* Now process all variables defined in the function or its subblocks. */
1881 instantiate_decls_1 (DECL_INITIAL (fndecl
));
1883 FOR_EACH_LOCAL_DECL (cfun
, ix
, decl
)
1884 if (DECL_RTL_SET_P (decl
))
1885 instantiate_decl_rtl (DECL_RTL (decl
));
1886 vec_free (cfun
->local_decls
);
1889 /* Pass through the INSNS of function FNDECL and convert virtual register
1890 references to hard register references. */
1893 instantiate_virtual_regs (void)
1897 /* Compute the offsets to use for this function. */
1898 in_arg_offset
= FIRST_PARM_OFFSET (current_function_decl
);
1899 var_offset
= STARTING_FRAME_OFFSET
;
1900 dynamic_offset
= STACK_DYNAMIC_OFFSET (current_function_decl
);
1901 out_arg_offset
= STACK_POINTER_OFFSET
;
1902 #ifdef FRAME_POINTER_CFA_OFFSET
1903 cfa_offset
= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
1905 cfa_offset
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
1908 /* Initialize recognition, indicating that volatile is OK. */
1911 /* Scan through all the insns, instantiating every virtual register still
1913 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1916 /* These patterns in the instruction stream can never be recognized.
1917 Fortunately, they shouldn't contain virtual registers either. */
1918 if (JUMP_TABLE_DATA_P (insn
)
1919 || GET_CODE (PATTERN (insn
)) == USE
1920 || GET_CODE (PATTERN (insn
)) == CLOBBER
1921 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
)
1923 else if (DEBUG_INSN_P (insn
))
1924 for_each_rtx (&INSN_VAR_LOCATION (insn
),
1925 instantiate_virtual_regs_in_rtx
, NULL
);
1927 instantiate_virtual_regs_in_insn (insn
);
1929 if (INSN_DELETED_P (insn
))
1932 for_each_rtx (®_NOTES (insn
), instantiate_virtual_regs_in_rtx
, NULL
);
1934 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1936 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn
),
1937 instantiate_virtual_regs_in_rtx
, NULL
);
1940 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1941 instantiate_decls (current_function_decl
);
1943 targetm
.instantiate_decls ();
1945 /* Indicate that, from now on, assign_stack_local should use
1946 frame_pointer_rtx. */
1947 virtuals_instantiated
= 1;
1952 struct rtl_opt_pass pass_instantiate_virtual_regs
=
1957 OPTGROUP_NONE
, /* optinfo_flags */
1959 instantiate_virtual_regs
, /* execute */
1962 0, /* static_pass_number */
1963 TV_NONE
, /* tv_id */
1964 0, /* properties_required */
1965 0, /* properties_provided */
1966 0, /* properties_destroyed */
1967 0, /* todo_flags_start */
1968 0 /* todo_flags_finish */
1973 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1974 This means a type for which function calls must pass an address to the
1975 function or get an address back from the function.
1976 EXP may be a type node or an expression (whose type is tested). */
1979 aggregate_value_p (const_tree exp
, const_tree fntype
)
1981 const_tree type
= (TYPE_P (exp
)) ? exp
: TREE_TYPE (exp
);
1982 int i
, regno
, nregs
;
1986 switch (TREE_CODE (fntype
))
1990 tree fndecl
= get_callee_fndecl (fntype
);
1992 ? TREE_TYPE (fndecl
)
1993 : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype
))));
1997 fntype
= TREE_TYPE (fntype
);
2002 case IDENTIFIER_NODE
:
2006 /* We don't expect other tree types here. */
2010 if (VOID_TYPE_P (type
))
2013 /* If a record should be passed the same as its first (and only) member
2014 don't pass it as an aggregate. */
2015 if (TREE_CODE (type
) == RECORD_TYPE
&& TYPE_TRANSPARENT_AGGR (type
))
2016 return aggregate_value_p (first_field (type
), fntype
);
2018 /* If the front end has decided that this needs to be passed by
2019 reference, do so. */
2020 if ((TREE_CODE (exp
) == PARM_DECL
|| TREE_CODE (exp
) == RESULT_DECL
)
2021 && DECL_BY_REFERENCE (exp
))
2024 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2025 if (fntype
&& TREE_ADDRESSABLE (fntype
))
2028 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2029 and thus can't be returned in registers. */
2030 if (TREE_ADDRESSABLE (type
))
2033 if (flag_pcc_struct_return
&& AGGREGATE_TYPE_P (type
))
2036 if (targetm
.calls
.return_in_memory (type
, fntype
))
2039 /* Make sure we have suitable call-clobbered regs to return
2040 the value in; if not, we must return it in memory. */
2041 reg
= hard_function_value (type
, 0, fntype
, 0);
2043 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2048 regno
= REGNO (reg
);
2049 nregs
= hard_regno_nregs
[regno
][TYPE_MODE (type
)];
2050 for (i
= 0; i
< nregs
; i
++)
2051 if (! call_used_regs
[regno
+ i
])
2057 /* Return true if we should assign DECL a pseudo register; false if it
2058 should live on the local stack. */
2061 use_register_for_decl (const_tree decl
)
2063 if (!targetm
.calls
.allocate_stack_slots_for_args())
2066 /* Honor volatile. */
2067 if (TREE_SIDE_EFFECTS (decl
))
2070 /* Honor addressability. */
2071 if (TREE_ADDRESSABLE (decl
))
2074 /* Only register-like things go in registers. */
2075 if (DECL_MODE (decl
) == BLKmode
)
2078 /* If -ffloat-store specified, don't put explicit float variables
2080 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2081 propagates values across these stores, and it probably shouldn't. */
2082 if (flag_float_store
&& FLOAT_TYPE_P (TREE_TYPE (decl
)))
2085 /* If we're not interested in tracking debugging information for
2086 this decl, then we can certainly put it in a register. */
2087 if (DECL_IGNORED_P (decl
))
2093 if (!DECL_REGISTER (decl
))
2096 switch (TREE_CODE (TREE_TYPE (decl
)))
2100 case QUAL_UNION_TYPE
:
2101 /* When not optimizing, disregard register keyword for variables with
2102 types containing methods, otherwise the methods won't be callable
2103 from the debugger. */
2104 if (TYPE_METHODS (TREE_TYPE (decl
)))
2114 /* Return true if TYPE should be passed by invisible reference. */
2117 pass_by_reference (CUMULATIVE_ARGS
*ca
, enum machine_mode mode
,
2118 tree type
, bool named_arg
)
2122 /* If this type contains non-trivial constructors, then it is
2123 forbidden for the middle-end to create any new copies. */
2124 if (TREE_ADDRESSABLE (type
))
2127 /* GCC post 3.4 passes *all* variable sized types by reference. */
2128 if (!TYPE_SIZE (type
) || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
2131 /* If a record type should be passed the same as its first (and only)
2132 member, use the type and mode of that member. */
2133 if (TREE_CODE (type
) == RECORD_TYPE
&& TYPE_TRANSPARENT_AGGR (type
))
2135 type
= TREE_TYPE (first_field (type
));
2136 mode
= TYPE_MODE (type
);
2140 return targetm
.calls
.pass_by_reference (pack_cumulative_args (ca
), mode
,
2144 /* Return true if TYPE, which is passed by reference, should be callee
2145 copied instead of caller copied. */
2148 reference_callee_copied (CUMULATIVE_ARGS
*ca
, enum machine_mode mode
,
2149 tree type
, bool named_arg
)
2151 if (type
&& TREE_ADDRESSABLE (type
))
2153 return targetm
.calls
.callee_copies (pack_cumulative_args (ca
), mode
, type
,
2157 /* Structures to communicate between the subroutines of assign_parms.
2158 The first holds data persistent across all parameters, the second
2159 is cleared out for each parameter. */
2161 struct assign_parm_data_all
2163 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2164 should become a job of the target or otherwise encapsulated. */
2165 CUMULATIVE_ARGS args_so_far_v
;
2166 cumulative_args_t args_so_far
;
2167 struct args_size stack_args_size
;
2168 tree function_result_decl
;
2170 rtx first_conversion_insn
;
2171 rtx last_conversion_insn
;
2172 HOST_WIDE_INT pretend_args_size
;
2173 HOST_WIDE_INT extra_pretend_bytes
;
2174 int reg_parm_stack_space
;
2177 struct assign_parm_data_one
2183 enum machine_mode nominal_mode
;
2184 enum machine_mode passed_mode
;
2185 enum machine_mode promoted_mode
;
2186 struct locate_and_pad_arg_data locate
;
2188 BOOL_BITFIELD named_arg
: 1;
2189 BOOL_BITFIELD passed_pointer
: 1;
2190 BOOL_BITFIELD on_stack
: 1;
2191 BOOL_BITFIELD loaded_in_reg
: 1;
2194 /* A subroutine of assign_parms. Initialize ALL. */
2197 assign_parms_initialize_all (struct assign_parm_data_all
*all
)
2199 tree fntype ATTRIBUTE_UNUSED
;
2201 memset (all
, 0, sizeof (*all
));
2203 fntype
= TREE_TYPE (current_function_decl
);
2205 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2206 INIT_CUMULATIVE_INCOMING_ARGS (all
->args_so_far_v
, fntype
, NULL_RTX
);
2208 INIT_CUMULATIVE_ARGS (all
->args_so_far_v
, fntype
, NULL_RTX
,
2209 current_function_decl
, -1);
2211 all
->args_so_far
= pack_cumulative_args (&all
->args_so_far_v
);
2213 #ifdef REG_PARM_STACK_SPACE
2214 all
->reg_parm_stack_space
= REG_PARM_STACK_SPACE (current_function_decl
);
2218 /* If ARGS contains entries with complex types, split the entry into two
2219 entries of the component type. Return a new list of substitutions are
2220 needed, else the old list. */
2223 split_complex_args (vec
<tree
> *args
)
2228 FOR_EACH_VEC_ELT (*args
, i
, p
)
2230 tree type
= TREE_TYPE (p
);
2231 if (TREE_CODE (type
) == COMPLEX_TYPE
2232 && targetm
.calls
.split_complex_arg (type
))
2235 tree subtype
= TREE_TYPE (type
);
2236 bool addressable
= TREE_ADDRESSABLE (p
);
2238 /* Rewrite the PARM_DECL's type with its component. */
2240 TREE_TYPE (p
) = subtype
;
2241 DECL_ARG_TYPE (p
) = TREE_TYPE (DECL_ARG_TYPE (p
));
2242 DECL_MODE (p
) = VOIDmode
;
2243 DECL_SIZE (p
) = NULL
;
2244 DECL_SIZE_UNIT (p
) = NULL
;
2245 /* If this arg must go in memory, put it in a pseudo here.
2246 We can't allow it to go in memory as per normal parms,
2247 because the usual place might not have the imag part
2248 adjacent to the real part. */
2249 DECL_ARTIFICIAL (p
) = addressable
;
2250 DECL_IGNORED_P (p
) = addressable
;
2251 TREE_ADDRESSABLE (p
) = 0;
2255 /* Build a second synthetic decl. */
2256 decl
= build_decl (EXPR_LOCATION (p
),
2257 PARM_DECL
, NULL_TREE
, subtype
);
2258 DECL_ARG_TYPE (decl
) = DECL_ARG_TYPE (p
);
2259 DECL_ARTIFICIAL (decl
) = addressable
;
2260 DECL_IGNORED_P (decl
) = addressable
;
2261 layout_decl (decl
, 0);
2262 args
->safe_insert (++i
, decl
);
2267 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2268 the hidden struct return argument, and (abi willing) complex args.
2269 Return the new parameter list. */
2272 assign_parms_augmented_arg_list (struct assign_parm_data_all
*all
)
2274 tree fndecl
= current_function_decl
;
2275 tree fntype
= TREE_TYPE (fndecl
);
2276 vec
<tree
> fnargs
= vNULL
;
2279 for (arg
= DECL_ARGUMENTS (fndecl
); arg
; arg
= DECL_CHAIN (arg
))
2280 fnargs
.safe_push (arg
);
2282 all
->orig_fnargs
= DECL_ARGUMENTS (fndecl
);
2284 /* If struct value address is treated as the first argument, make it so. */
2285 if (aggregate_value_p (DECL_RESULT (fndecl
), fndecl
)
2286 && ! cfun
->returns_pcc_struct
2287 && targetm
.calls
.struct_value_rtx (TREE_TYPE (fndecl
), 1) == 0)
2289 tree type
= build_pointer_type (TREE_TYPE (fntype
));
2292 decl
= build_decl (DECL_SOURCE_LOCATION (fndecl
),
2293 PARM_DECL
, get_identifier (".result_ptr"), type
);
2294 DECL_ARG_TYPE (decl
) = type
;
2295 DECL_ARTIFICIAL (decl
) = 1;
2296 DECL_NAMELESS (decl
) = 1;
2297 TREE_CONSTANT (decl
) = 1;
2299 DECL_CHAIN (decl
) = all
->orig_fnargs
;
2300 all
->orig_fnargs
= decl
;
2301 fnargs
.safe_insert (0, decl
);
2303 all
->function_result_decl
= decl
;
2306 /* If the target wants to split complex arguments into scalars, do so. */
2307 if (targetm
.calls
.split_complex_arg
)
2308 split_complex_args (&fnargs
);
2313 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2314 data for the parameter. Incorporate ABI specifics such as pass-by-
2315 reference and type promotion. */
2318 assign_parm_find_data_types (struct assign_parm_data_all
*all
, tree parm
,
2319 struct assign_parm_data_one
*data
)
2321 tree nominal_type
, passed_type
;
2322 enum machine_mode nominal_mode
, passed_mode
, promoted_mode
;
2325 memset (data
, 0, sizeof (*data
));
2327 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2329 data
->named_arg
= 1; /* No variadic parms. */
2330 else if (DECL_CHAIN (parm
))
2331 data
->named_arg
= 1; /* Not the last non-variadic parm. */
2332 else if (targetm
.calls
.strict_argument_naming (all
->args_so_far
))
2333 data
->named_arg
= 1; /* Only variadic ones are unnamed. */
2335 data
->named_arg
= 0; /* Treat as variadic. */
2337 nominal_type
= TREE_TYPE (parm
);
2338 passed_type
= DECL_ARG_TYPE (parm
);
2340 /* Look out for errors propagating this far. Also, if the parameter's
2341 type is void then its value doesn't matter. */
2342 if (TREE_TYPE (parm
) == error_mark_node
2343 /* This can happen after weird syntax errors
2344 or if an enum type is defined among the parms. */
2345 || TREE_CODE (parm
) != PARM_DECL
2346 || passed_type
== NULL
2347 || VOID_TYPE_P (nominal_type
))
2349 nominal_type
= passed_type
= void_type_node
;
2350 nominal_mode
= passed_mode
= promoted_mode
= VOIDmode
;
2354 /* Find mode of arg as it is passed, and mode of arg as it should be
2355 during execution of this function. */
2356 passed_mode
= TYPE_MODE (passed_type
);
2357 nominal_mode
= TYPE_MODE (nominal_type
);
2359 /* If the parm is to be passed as a transparent union or record, use the
2360 type of the first field for the tests below. We have already verified
2361 that the modes are the same. */
2362 if ((TREE_CODE (passed_type
) == UNION_TYPE
2363 || TREE_CODE (passed_type
) == RECORD_TYPE
)
2364 && TYPE_TRANSPARENT_AGGR (passed_type
))
2365 passed_type
= TREE_TYPE (first_field (passed_type
));
2367 /* See if this arg was passed by invisible reference. */
2368 if (pass_by_reference (&all
->args_so_far_v
, passed_mode
,
2369 passed_type
, data
->named_arg
))
2371 passed_type
= nominal_type
= build_pointer_type (passed_type
);
2372 data
->passed_pointer
= true;
2373 passed_mode
= nominal_mode
= Pmode
;
2376 /* Find mode as it is passed by the ABI. */
2377 unsignedp
= TYPE_UNSIGNED (passed_type
);
2378 promoted_mode
= promote_function_mode (passed_type
, passed_mode
, &unsignedp
,
2379 TREE_TYPE (current_function_decl
), 0);
2382 data
->nominal_type
= nominal_type
;
2383 data
->passed_type
= passed_type
;
2384 data
->nominal_mode
= nominal_mode
;
2385 data
->passed_mode
= passed_mode
;
2386 data
->promoted_mode
= promoted_mode
;
2389 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2392 assign_parms_setup_varargs (struct assign_parm_data_all
*all
,
2393 struct assign_parm_data_one
*data
, bool no_rtl
)
2395 int varargs_pretend_bytes
= 0;
2397 targetm
.calls
.setup_incoming_varargs (all
->args_so_far
,
2398 data
->promoted_mode
,
2400 &varargs_pretend_bytes
, no_rtl
);
2402 /* If the back-end has requested extra stack space, record how much is
2403 needed. Do not change pretend_args_size otherwise since it may be
2404 nonzero from an earlier partial argument. */
2405 if (varargs_pretend_bytes
> 0)
2406 all
->pretend_args_size
= varargs_pretend_bytes
;
2409 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2410 the incoming location of the current parameter. */
2413 assign_parm_find_entry_rtl (struct assign_parm_data_all
*all
,
2414 struct assign_parm_data_one
*data
)
2416 HOST_WIDE_INT pretend_bytes
= 0;
2420 if (data
->promoted_mode
== VOIDmode
)
2422 data
->entry_parm
= data
->stack_parm
= const0_rtx
;
2426 entry_parm
= targetm
.calls
.function_incoming_arg (all
->args_so_far
,
2427 data
->promoted_mode
,
2431 if (entry_parm
== 0)
2432 data
->promoted_mode
= data
->passed_mode
;
2434 /* Determine parm's home in the stack, in case it arrives in the stack
2435 or we should pretend it did. Compute the stack position and rtx where
2436 the argument arrives and its size.
2438 There is one complexity here: If this was a parameter that would
2439 have been passed in registers, but wasn't only because it is
2440 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2441 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2442 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2443 as it was the previous time. */
2444 in_regs
= entry_parm
!= 0;
2445 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2448 if (!in_regs
&& !data
->named_arg
)
2450 if (targetm
.calls
.pretend_outgoing_varargs_named (all
->args_so_far
))
2453 tem
= targetm
.calls
.function_incoming_arg (all
->args_so_far
,
2454 data
->promoted_mode
,
2455 data
->passed_type
, true);
2456 in_regs
= tem
!= NULL
;
2460 /* If this parameter was passed both in registers and in the stack, use
2461 the copy on the stack. */
2462 if (targetm
.calls
.must_pass_in_stack (data
->promoted_mode
,
2470 partial
= targetm
.calls
.arg_partial_bytes (all
->args_so_far
,
2471 data
->promoted_mode
,
2474 data
->partial
= partial
;
2476 /* The caller might already have allocated stack space for the
2477 register parameters. */
2478 if (partial
!= 0 && all
->reg_parm_stack_space
== 0)
2480 /* Part of this argument is passed in registers and part
2481 is passed on the stack. Ask the prologue code to extend
2482 the stack part so that we can recreate the full value.
2484 PRETEND_BYTES is the size of the registers we need to store.
2485 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2486 stack space that the prologue should allocate.
2488 Internally, gcc assumes that the argument pointer is aligned
2489 to STACK_BOUNDARY bits. This is used both for alignment
2490 optimizations (see init_emit) and to locate arguments that are
2491 aligned to more than PARM_BOUNDARY bits. We must preserve this
2492 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2493 a stack boundary. */
2495 /* We assume at most one partial arg, and it must be the first
2496 argument on the stack. */
2497 gcc_assert (!all
->extra_pretend_bytes
&& !all
->pretend_args_size
);
2499 pretend_bytes
= partial
;
2500 all
->pretend_args_size
= CEIL_ROUND (pretend_bytes
, STACK_BYTES
);
2502 /* We want to align relative to the actual stack pointer, so
2503 don't include this in the stack size until later. */
2504 all
->extra_pretend_bytes
= all
->pretend_args_size
;
2508 locate_and_pad_parm (data
->promoted_mode
, data
->passed_type
, in_regs
,
2509 entry_parm
? data
->partial
: 0, current_function_decl
,
2510 &all
->stack_args_size
, &data
->locate
);
2512 /* Update parm_stack_boundary if this parameter is passed in the
2514 if (!in_regs
&& crtl
->parm_stack_boundary
< data
->locate
.boundary
)
2515 crtl
->parm_stack_boundary
= data
->locate
.boundary
;
2517 /* Adjust offsets to include the pretend args. */
2518 pretend_bytes
= all
->extra_pretend_bytes
- pretend_bytes
;
2519 data
->locate
.slot_offset
.constant
+= pretend_bytes
;
2520 data
->locate
.offset
.constant
+= pretend_bytes
;
2522 data
->entry_parm
= entry_parm
;
2525 /* A subroutine of assign_parms. If there is actually space on the stack
2526 for this parm, count it in stack_args_size and return true. */
2529 assign_parm_is_stack_parm (struct assign_parm_data_all
*all
,
2530 struct assign_parm_data_one
*data
)
2532 /* Trivially true if we've no incoming register. */
2533 if (data
->entry_parm
== NULL
)
2535 /* Also true if we're partially in registers and partially not,
2536 since we've arranged to drop the entire argument on the stack. */
2537 else if (data
->partial
!= 0)
2539 /* Also true if the target says that it's passed in both registers
2540 and on the stack. */
2541 else if (GET_CODE (data
->entry_parm
) == PARALLEL
2542 && XEXP (XVECEXP (data
->entry_parm
, 0, 0), 0) == NULL_RTX
)
2544 /* Also true if the target says that there's stack allocated for
2545 all register parameters. */
2546 else if (all
->reg_parm_stack_space
> 0)
2548 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2552 all
->stack_args_size
.constant
+= data
->locate
.size
.constant
;
2553 if (data
->locate
.size
.var
)
2554 ADD_PARM_SIZE (all
->stack_args_size
, data
->locate
.size
.var
);
2559 /* A subroutine of assign_parms. Given that this parameter is allocated
2560 stack space by the ABI, find it. */
2563 assign_parm_find_stack_rtl (tree parm
, struct assign_parm_data_one
*data
)
2565 rtx offset_rtx
, stack_parm
;
2566 unsigned int align
, boundary
;
2568 /* If we're passing this arg using a reg, make its stack home the
2569 aligned stack slot. */
2570 if (data
->entry_parm
)
2571 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.slot_offset
);
2573 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.offset
);
2575 stack_parm
= crtl
->args
.internal_arg_pointer
;
2576 if (offset_rtx
!= const0_rtx
)
2577 stack_parm
= gen_rtx_PLUS (Pmode
, stack_parm
, offset_rtx
);
2578 stack_parm
= gen_rtx_MEM (data
->promoted_mode
, stack_parm
);
2580 if (!data
->passed_pointer
)
2582 set_mem_attributes (stack_parm
, parm
, 1);
2583 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2584 while promoted mode's size is needed. */
2585 if (data
->promoted_mode
!= BLKmode
2586 && data
->promoted_mode
!= DECL_MODE (parm
))
2588 set_mem_size (stack_parm
, GET_MODE_SIZE (data
->promoted_mode
));
2589 if (MEM_EXPR (stack_parm
) && MEM_OFFSET_KNOWN_P (stack_parm
))
2591 int offset
= subreg_lowpart_offset (DECL_MODE (parm
),
2592 data
->promoted_mode
);
2594 set_mem_offset (stack_parm
, MEM_OFFSET (stack_parm
) - offset
);
2599 boundary
= data
->locate
.boundary
;
2600 align
= BITS_PER_UNIT
;
2602 /* If we're padding upward, we know that the alignment of the slot
2603 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2604 intentionally forcing upward padding. Otherwise we have to come
2605 up with a guess at the alignment based on OFFSET_RTX. */
2606 if (data
->locate
.where_pad
!= downward
|| data
->entry_parm
)
2608 else if (CONST_INT_P (offset_rtx
))
2610 align
= INTVAL (offset_rtx
) * BITS_PER_UNIT
| boundary
;
2611 align
= align
& -align
;
2613 set_mem_align (stack_parm
, align
);
2615 if (data
->entry_parm
)
2616 set_reg_attrs_for_parm (data
->entry_parm
, stack_parm
);
2618 data
->stack_parm
= stack_parm
;
2621 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2622 always valid and contiguous. */
2625 assign_parm_adjust_entry_rtl (struct assign_parm_data_one
*data
)
2627 rtx entry_parm
= data
->entry_parm
;
2628 rtx stack_parm
= data
->stack_parm
;
2630 /* If this parm was passed part in regs and part in memory, pretend it
2631 arrived entirely in memory by pushing the register-part onto the stack.
2632 In the special case of a DImode or DFmode that is split, we could put
2633 it together in a pseudoreg directly, but for now that's not worth
2635 if (data
->partial
!= 0)
2637 /* Handle calls that pass values in multiple non-contiguous
2638 locations. The Irix 6 ABI has examples of this. */
2639 if (GET_CODE (entry_parm
) == PARALLEL
)
2640 emit_group_store (validize_mem (stack_parm
), entry_parm
,
2642 int_size_in_bytes (data
->passed_type
));
2645 gcc_assert (data
->partial
% UNITS_PER_WORD
== 0);
2646 move_block_from_reg (REGNO (entry_parm
), validize_mem (stack_parm
),
2647 data
->partial
/ UNITS_PER_WORD
);
2650 entry_parm
= stack_parm
;
2653 /* If we didn't decide this parm came in a register, by default it came
2655 else if (entry_parm
== NULL
)
2656 entry_parm
= stack_parm
;
2658 /* When an argument is passed in multiple locations, we can't make use
2659 of this information, but we can save some copying if the whole argument
2660 is passed in a single register. */
2661 else if (GET_CODE (entry_parm
) == PARALLEL
2662 && data
->nominal_mode
!= BLKmode
2663 && data
->passed_mode
!= BLKmode
)
2665 size_t i
, len
= XVECLEN (entry_parm
, 0);
2667 for (i
= 0; i
< len
; i
++)
2668 if (XEXP (XVECEXP (entry_parm
, 0, i
), 0) != NULL_RTX
2669 && REG_P (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2670 && (GET_MODE (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2671 == data
->passed_mode
)
2672 && INTVAL (XEXP (XVECEXP (entry_parm
, 0, i
), 1)) == 0)
2674 entry_parm
= XEXP (XVECEXP (entry_parm
, 0, i
), 0);
2679 data
->entry_parm
= entry_parm
;
2682 /* A subroutine of assign_parms. Reconstitute any values which were
2683 passed in multiple registers and would fit in a single register. */
2686 assign_parm_remove_parallels (struct assign_parm_data_one
*data
)
2688 rtx entry_parm
= data
->entry_parm
;
2690 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2691 This can be done with register operations rather than on the
2692 stack, even if we will store the reconstituted parameter on the
2694 if (GET_CODE (entry_parm
) == PARALLEL
&& GET_MODE (entry_parm
) != BLKmode
)
2696 rtx parmreg
= gen_reg_rtx (GET_MODE (entry_parm
));
2697 emit_group_store (parmreg
, entry_parm
, data
->passed_type
,
2698 GET_MODE_SIZE (GET_MODE (entry_parm
)));
2699 entry_parm
= parmreg
;
2702 data
->entry_parm
= entry_parm
;
2705 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2706 always valid and properly aligned. */
2709 assign_parm_adjust_stack_rtl (struct assign_parm_data_one
*data
)
2711 rtx stack_parm
= data
->stack_parm
;
2713 /* If we can't trust the parm stack slot to be aligned enough for its
2714 ultimate type, don't use that slot after entry. We'll make another
2715 stack slot, if we need one. */
2717 && ((STRICT_ALIGNMENT
2718 && GET_MODE_ALIGNMENT (data
->nominal_mode
) > MEM_ALIGN (stack_parm
))
2719 || (data
->nominal_type
2720 && TYPE_ALIGN (data
->nominal_type
) > MEM_ALIGN (stack_parm
)
2721 && MEM_ALIGN (stack_parm
) < PREFERRED_STACK_BOUNDARY
)))
2724 /* If parm was passed in memory, and we need to convert it on entry,
2725 don't store it back in that same slot. */
2726 else if (data
->entry_parm
== stack_parm
2727 && data
->nominal_mode
!= BLKmode
2728 && data
->nominal_mode
!= data
->passed_mode
)
2731 /* If stack protection is in effect for this function, don't leave any
2732 pointers in their passed stack slots. */
2733 else if (crtl
->stack_protect_guard
2734 && (flag_stack_protect
== 2
2735 || data
->passed_pointer
2736 || POINTER_TYPE_P (data
->nominal_type
)))
2739 data
->stack_parm
= stack_parm
;
2742 /* A subroutine of assign_parms. Return true if the current parameter
2743 should be stored as a BLKmode in the current frame. */
2746 assign_parm_setup_block_p (struct assign_parm_data_one
*data
)
2748 if (data
->nominal_mode
== BLKmode
)
2750 if (GET_MODE (data
->entry_parm
) == BLKmode
)
2753 #ifdef BLOCK_REG_PADDING
2754 /* Only assign_parm_setup_block knows how to deal with register arguments
2755 that are padded at the least significant end. */
2756 if (REG_P (data
->entry_parm
)
2757 && GET_MODE_SIZE (data
->promoted_mode
) < UNITS_PER_WORD
2758 && (BLOCK_REG_PADDING (data
->passed_mode
, data
->passed_type
, 1)
2759 == (BYTES_BIG_ENDIAN
? upward
: downward
)))
2766 /* A subroutine of assign_parms. Arrange for the parameter to be
2767 present and valid in DATA->STACK_RTL. */
2770 assign_parm_setup_block (struct assign_parm_data_all
*all
,
2771 tree parm
, struct assign_parm_data_one
*data
)
2773 rtx entry_parm
= data
->entry_parm
;
2774 rtx stack_parm
= data
->stack_parm
;
2776 HOST_WIDE_INT size_stored
;
2778 if (GET_CODE (entry_parm
) == PARALLEL
)
2779 entry_parm
= emit_group_move_into_temps (entry_parm
);
2781 size
= int_size_in_bytes (data
->passed_type
);
2782 size_stored
= CEIL_ROUND (size
, UNITS_PER_WORD
);
2783 if (stack_parm
== 0)
2785 DECL_ALIGN (parm
) = MAX (DECL_ALIGN (parm
), BITS_PER_WORD
);
2786 stack_parm
= assign_stack_local (BLKmode
, size_stored
,
2788 if (GET_MODE_SIZE (GET_MODE (entry_parm
)) == size
)
2789 PUT_MODE (stack_parm
, GET_MODE (entry_parm
));
2790 set_mem_attributes (stack_parm
, parm
, 1);
2793 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2794 calls that pass values in multiple non-contiguous locations. */
2795 if (REG_P (entry_parm
) || GET_CODE (entry_parm
) == PARALLEL
)
2799 /* Note that we will be storing an integral number of words.
2800 So we have to be careful to ensure that we allocate an
2801 integral number of words. We do this above when we call
2802 assign_stack_local if space was not allocated in the argument
2803 list. If it was, this will not work if PARM_BOUNDARY is not
2804 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2805 if it becomes a problem. Exception is when BLKmode arrives
2806 with arguments not conforming to word_mode. */
2808 if (data
->stack_parm
== 0)
2810 else if (GET_CODE (entry_parm
) == PARALLEL
)
2813 gcc_assert (!size
|| !(PARM_BOUNDARY
% BITS_PER_WORD
));
2815 mem
= validize_mem (stack_parm
);
2817 /* Handle values in multiple non-contiguous locations. */
2818 if (GET_CODE (entry_parm
) == PARALLEL
)
2820 push_to_sequence2 (all
->first_conversion_insn
,
2821 all
->last_conversion_insn
);
2822 emit_group_store (mem
, entry_parm
, data
->passed_type
, size
);
2823 all
->first_conversion_insn
= get_insns ();
2824 all
->last_conversion_insn
= get_last_insn ();
2831 /* If SIZE is that of a mode no bigger than a word, just use
2832 that mode's store operation. */
2833 else if (size
<= UNITS_PER_WORD
)
2835 enum machine_mode mode
2836 = mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
, 0);
2839 #ifdef BLOCK_REG_PADDING
2840 && (size
== UNITS_PER_WORD
2841 || (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
2842 != (BYTES_BIG_ENDIAN
? upward
: downward
)))
2848 /* We are really truncating a word_mode value containing
2849 SIZE bytes into a value of mode MODE. If such an
2850 operation requires no actual instructions, we can refer
2851 to the value directly in mode MODE, otherwise we must
2852 start with the register in word_mode and explicitly
2854 if (TRULY_NOOP_TRUNCATION (size
* BITS_PER_UNIT
, BITS_PER_WORD
))
2855 reg
= gen_rtx_REG (mode
, REGNO (entry_parm
));
2858 reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
2859 reg
= convert_to_mode (mode
, copy_to_reg (reg
), 1);
2861 emit_move_insn (change_address (mem
, mode
, 0), reg
);
2864 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2865 machine must be aligned to the left before storing
2866 to memory. Note that the previous test doesn't
2867 handle all cases (e.g. SIZE == 3). */
2868 else if (size
!= UNITS_PER_WORD
2869 #ifdef BLOCK_REG_PADDING
2870 && (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
2878 int by
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
2879 rtx reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
2881 x
= expand_shift (LSHIFT_EXPR
, word_mode
, reg
, by
, NULL_RTX
, 1);
2882 tem
= change_address (mem
, word_mode
, 0);
2883 emit_move_insn (tem
, x
);
2886 move_block_from_reg (REGNO (entry_parm
), mem
,
2887 size_stored
/ UNITS_PER_WORD
);
2890 move_block_from_reg (REGNO (entry_parm
), mem
,
2891 size_stored
/ UNITS_PER_WORD
);
2893 else if (data
->stack_parm
== 0)
2895 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
2896 emit_block_move (stack_parm
, data
->entry_parm
, GEN_INT (size
),
2898 all
->first_conversion_insn
= get_insns ();
2899 all
->last_conversion_insn
= get_last_insn ();
2903 data
->stack_parm
= stack_parm
;
2904 SET_DECL_RTL (parm
, stack_parm
);
2907 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2908 parameter. Get it there. Perform all ABI specified conversions. */
2911 assign_parm_setup_reg (struct assign_parm_data_all
*all
, tree parm
,
2912 struct assign_parm_data_one
*data
)
2914 rtx parmreg
, validated_mem
;
2915 rtx equiv_stack_parm
;
2916 enum machine_mode promoted_nominal_mode
;
2917 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (parm
));
2918 bool did_conversion
= false;
2919 bool need_conversion
, moved
;
2921 /* Store the parm in a pseudoregister during the function, but we may
2922 need to do it in a wider mode. Using 2 here makes the result
2923 consistent with promote_decl_mode and thus expand_expr_real_1. */
2924 promoted_nominal_mode
2925 = promote_function_mode (data
->nominal_type
, data
->nominal_mode
, &unsignedp
,
2926 TREE_TYPE (current_function_decl
), 2);
2928 parmreg
= gen_reg_rtx (promoted_nominal_mode
);
2930 if (!DECL_ARTIFICIAL (parm
))
2931 mark_user_reg (parmreg
);
2933 /* If this was an item that we received a pointer to,
2934 set DECL_RTL appropriately. */
2935 if (data
->passed_pointer
)
2937 rtx x
= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
->passed_type
)), parmreg
);
2938 set_mem_attributes (x
, parm
, 1);
2939 SET_DECL_RTL (parm
, x
);
2942 SET_DECL_RTL (parm
, parmreg
);
2944 assign_parm_remove_parallels (data
);
2946 /* Copy the value into the register, thus bridging between
2947 assign_parm_find_data_types and expand_expr_real_1. */
2949 equiv_stack_parm
= data
->stack_parm
;
2950 validated_mem
= validize_mem (data
->entry_parm
);
2952 need_conversion
= (data
->nominal_mode
!= data
->passed_mode
2953 || promoted_nominal_mode
!= data
->promoted_mode
);
2957 && GET_MODE_CLASS (data
->nominal_mode
) == MODE_INT
2958 && data
->nominal_mode
== data
->passed_mode
2959 && data
->nominal_mode
== GET_MODE (data
->entry_parm
))
2961 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2962 mode, by the caller. We now have to convert it to
2963 NOMINAL_MODE, if different. However, PARMREG may be in
2964 a different mode than NOMINAL_MODE if it is being stored
2967 If ENTRY_PARM is a hard register, it might be in a register
2968 not valid for operating in its mode (e.g., an odd-numbered
2969 register for a DFmode). In that case, moves are the only
2970 thing valid, so we can't do a convert from there. This
2971 occurs when the calling sequence allow such misaligned
2974 In addition, the conversion may involve a call, which could
2975 clobber parameters which haven't been copied to pseudo
2978 First, we try to emit an insn which performs the necessary
2979 conversion. We verify that this insn does not clobber any
2982 enum insn_code icode
;
2985 icode
= can_extend_p (promoted_nominal_mode
, data
->passed_mode
,
2989 op1
= validated_mem
;
2990 if (icode
!= CODE_FOR_nothing
2991 && insn_operand_matches (icode
, 0, op0
)
2992 && insn_operand_matches (icode
, 1, op1
))
2994 enum rtx_code code
= unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
;
2995 rtx insn
, insns
, t
= op1
;
2996 HARD_REG_SET hardregs
;
2999 /* If op1 is a hard register that is likely spilled, first
3000 force it into a pseudo, otherwise combiner might extend
3001 its lifetime too much. */
3002 if (GET_CODE (t
) == SUBREG
)
3005 && HARD_REGISTER_P (t
)
3006 && ! TEST_HARD_REG_BIT (fixed_reg_set
, REGNO (t
))
3007 && targetm
.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t
))))
3009 t
= gen_reg_rtx (GET_MODE (op1
));
3010 emit_move_insn (t
, op1
);
3014 insn
= gen_extend_insn (op0
, t
, promoted_nominal_mode
,
3015 data
->passed_mode
, unsignedp
);
3017 insns
= get_insns ();
3020 CLEAR_HARD_REG_SET (hardregs
);
3021 for (insn
= insns
; insn
&& moved
; insn
= NEXT_INSN (insn
))
3024 note_stores (PATTERN (insn
), record_hard_reg_sets
,
3026 if (!hard_reg_set_empty_p (hardregs
))
3035 if (equiv_stack_parm
!= NULL_RTX
)
3036 equiv_stack_parm
= gen_rtx_fmt_e (code
, GET_MODE (parmreg
),
3043 /* Nothing to do. */
3045 else if (need_conversion
)
3047 /* We did not have an insn to convert directly, or the sequence
3048 generated appeared unsafe. We must first copy the parm to a
3049 pseudo reg, and save the conversion until after all
3050 parameters have been moved. */
3053 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
3055 emit_move_insn (tempreg
, validated_mem
);
3057 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3058 tempreg
= convert_to_mode (data
->nominal_mode
, tempreg
, unsignedp
);
3060 if (GET_CODE (tempreg
) == SUBREG
3061 && GET_MODE (tempreg
) == data
->nominal_mode
3062 && REG_P (SUBREG_REG (tempreg
))
3063 && data
->nominal_mode
== data
->passed_mode
3064 && GET_MODE (SUBREG_REG (tempreg
)) == GET_MODE (data
->entry_parm
)
3065 && GET_MODE_SIZE (GET_MODE (tempreg
))
3066 < GET_MODE_SIZE (GET_MODE (data
->entry_parm
)))
3068 /* The argument is already sign/zero extended, so note it
3070 SUBREG_PROMOTED_VAR_P (tempreg
) = 1;
3071 SUBREG_PROMOTED_UNSIGNED_SET (tempreg
, unsignedp
);
3074 /* TREE_USED gets set erroneously during expand_assignment. */
3075 save_tree_used
= TREE_USED (parm
);
3076 expand_assignment (parm
, make_tree (data
->nominal_type
, tempreg
), false);
3077 TREE_USED (parm
) = save_tree_used
;
3078 all
->first_conversion_insn
= get_insns ();
3079 all
->last_conversion_insn
= get_last_insn ();
3082 did_conversion
= true;
3085 emit_move_insn (parmreg
, validated_mem
);
3087 /* If we were passed a pointer but the actual value can safely live
3088 in a register, put it in one. */
3089 if (data
->passed_pointer
3090 && TYPE_MODE (TREE_TYPE (parm
)) != BLKmode
3091 /* If by-reference argument was promoted, demote it. */
3092 && (TYPE_MODE (TREE_TYPE (parm
)) != GET_MODE (DECL_RTL (parm
))
3093 || use_register_for_decl (parm
)))
3095 /* We can't use nominal_mode, because it will have been set to
3096 Pmode above. We must use the actual mode of the parm. */
3097 parmreg
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm
)));
3098 mark_user_reg (parmreg
);
3100 if (GET_MODE (parmreg
) != GET_MODE (DECL_RTL (parm
)))
3102 rtx tempreg
= gen_reg_rtx (GET_MODE (DECL_RTL (parm
)));
3103 int unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (parm
));
3105 push_to_sequence2 (all
->first_conversion_insn
,
3106 all
->last_conversion_insn
);
3107 emit_move_insn (tempreg
, DECL_RTL (parm
));
3108 tempreg
= convert_to_mode (GET_MODE (parmreg
), tempreg
, unsigned_p
);
3109 emit_move_insn (parmreg
, tempreg
);
3110 all
->first_conversion_insn
= get_insns ();
3111 all
->last_conversion_insn
= get_last_insn ();
3114 did_conversion
= true;
3117 emit_move_insn (parmreg
, DECL_RTL (parm
));
3119 SET_DECL_RTL (parm
, parmreg
);
3121 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3123 data
->stack_parm
= NULL
;
3126 /* Mark the register as eliminable if we did no conversion and it was
3127 copied from memory at a fixed offset, and the arg pointer was not
3128 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3129 offset formed an invalid address, such memory-equivalences as we
3130 make here would screw up life analysis for it. */
3131 if (data
->nominal_mode
== data
->passed_mode
3133 && data
->stack_parm
!= 0
3134 && MEM_P (data
->stack_parm
)
3135 && data
->locate
.offset
.var
== 0
3136 && reg_mentioned_p (virtual_incoming_args_rtx
,
3137 XEXP (data
->stack_parm
, 0)))
3139 rtx linsn
= get_last_insn ();
3142 /* Mark complex types separately. */
3143 if (GET_CODE (parmreg
) == CONCAT
)
3145 enum machine_mode submode
3146 = GET_MODE_INNER (GET_MODE (parmreg
));
3147 int regnor
= REGNO (XEXP (parmreg
, 0));
3148 int regnoi
= REGNO (XEXP (parmreg
, 1));
3149 rtx stackr
= adjust_address_nv (data
->stack_parm
, submode
, 0);
3150 rtx stacki
= adjust_address_nv (data
->stack_parm
, submode
,
3151 GET_MODE_SIZE (submode
));
3153 /* Scan backwards for the set of the real and
3155 for (sinsn
= linsn
; sinsn
!= 0;
3156 sinsn
= prev_nonnote_insn (sinsn
))
3158 set
= single_set (sinsn
);
3162 if (SET_DEST (set
) == regno_reg_rtx
[regnoi
])
3163 set_unique_reg_note (sinsn
, REG_EQUIV
, stacki
);
3164 else if (SET_DEST (set
) == regno_reg_rtx
[regnor
])
3165 set_unique_reg_note (sinsn
, REG_EQUIV
, stackr
);
3169 set_dst_reg_note (linsn
, REG_EQUIV
, equiv_stack_parm
, parmreg
);
3172 /* For pointer data type, suggest pointer register. */
3173 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
3174 mark_reg_pointer (parmreg
,
3175 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
3178 /* A subroutine of assign_parms. Allocate stack space to hold the current
3179 parameter. Get it there. Perform all ABI specified conversions. */
3182 assign_parm_setup_stack (struct assign_parm_data_all
*all
, tree parm
,
3183 struct assign_parm_data_one
*data
)
3185 /* Value must be stored in the stack slot STACK_PARM during function
3187 bool to_conversion
= false;
3189 assign_parm_remove_parallels (data
);
3191 if (data
->promoted_mode
!= data
->nominal_mode
)
3193 /* Conversion is required. */
3194 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
3196 emit_move_insn (tempreg
, validize_mem (data
->entry_parm
));
3198 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3199 to_conversion
= true;
3201 data
->entry_parm
= convert_to_mode (data
->nominal_mode
, tempreg
,
3202 TYPE_UNSIGNED (TREE_TYPE (parm
)));
3204 if (data
->stack_parm
)
3206 int offset
= subreg_lowpart_offset (data
->nominal_mode
,
3207 GET_MODE (data
->stack_parm
));
3208 /* ??? This may need a big-endian conversion on sparc64. */
3210 = adjust_address (data
->stack_parm
, data
->nominal_mode
, 0);
3211 if (offset
&& MEM_OFFSET_KNOWN_P (data
->stack_parm
))
3212 set_mem_offset (data
->stack_parm
,
3213 MEM_OFFSET (data
->stack_parm
) + offset
);
3217 if (data
->entry_parm
!= data
->stack_parm
)
3221 if (data
->stack_parm
== 0)
3223 int align
= STACK_SLOT_ALIGNMENT (data
->passed_type
,
3224 GET_MODE (data
->entry_parm
),
3225 TYPE_ALIGN (data
->passed_type
));
3227 = assign_stack_local (GET_MODE (data
->entry_parm
),
3228 GET_MODE_SIZE (GET_MODE (data
->entry_parm
)),
3230 set_mem_attributes (data
->stack_parm
, parm
, 1);
3233 dest
= validize_mem (data
->stack_parm
);
3234 src
= validize_mem (data
->entry_parm
);
3238 /* Use a block move to handle potentially misaligned entry_parm. */
3240 push_to_sequence2 (all
->first_conversion_insn
,
3241 all
->last_conversion_insn
);
3242 to_conversion
= true;
3244 emit_block_move (dest
, src
,
3245 GEN_INT (int_size_in_bytes (data
->passed_type
)),
3249 emit_move_insn (dest
, src
);
3254 all
->first_conversion_insn
= get_insns ();
3255 all
->last_conversion_insn
= get_last_insn ();
3259 SET_DECL_RTL (parm
, data
->stack_parm
);
3262 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3263 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3266 assign_parms_unsplit_complex (struct assign_parm_data_all
*all
,
3270 tree orig_fnargs
= all
->orig_fnargs
;
3273 for (parm
= orig_fnargs
; parm
; parm
= TREE_CHAIN (parm
), ++i
)
3275 if (TREE_CODE (TREE_TYPE (parm
)) == COMPLEX_TYPE
3276 && targetm
.calls
.split_complex_arg (TREE_TYPE (parm
)))
3278 rtx tmp
, real
, imag
;
3279 enum machine_mode inner
= GET_MODE_INNER (DECL_MODE (parm
));
3281 real
= DECL_RTL (fnargs
[i
]);
3282 imag
= DECL_RTL (fnargs
[i
+ 1]);
3283 if (inner
!= GET_MODE (real
))
3285 real
= gen_lowpart_SUBREG (inner
, real
);
3286 imag
= gen_lowpart_SUBREG (inner
, imag
);
3289 if (TREE_ADDRESSABLE (parm
))
3292 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (parm
));
3293 int align
= STACK_SLOT_ALIGNMENT (TREE_TYPE (parm
),
3295 TYPE_ALIGN (TREE_TYPE (parm
)));
3297 /* split_complex_arg put the real and imag parts in
3298 pseudos. Move them to memory. */
3299 tmp
= assign_stack_local (DECL_MODE (parm
), size
, align
);
3300 set_mem_attributes (tmp
, parm
, 1);
3301 rmem
= adjust_address_nv (tmp
, inner
, 0);
3302 imem
= adjust_address_nv (tmp
, inner
, GET_MODE_SIZE (inner
));
3303 push_to_sequence2 (all
->first_conversion_insn
,
3304 all
->last_conversion_insn
);
3305 emit_move_insn (rmem
, real
);
3306 emit_move_insn (imem
, imag
);
3307 all
->first_conversion_insn
= get_insns ();
3308 all
->last_conversion_insn
= get_last_insn ();
3312 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
3313 SET_DECL_RTL (parm
, tmp
);
3315 real
= DECL_INCOMING_RTL (fnargs
[i
]);
3316 imag
= DECL_INCOMING_RTL (fnargs
[i
+ 1]);
3317 if (inner
!= GET_MODE (real
))
3319 real
= gen_lowpart_SUBREG (inner
, real
);
3320 imag
= gen_lowpart_SUBREG (inner
, imag
);
3322 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
3323 set_decl_incoming_rtl (parm
, tmp
, false);
3329 /* Assign RTL expressions to the function's parameters. This may involve
3330 copying them into registers and using those registers as the DECL_RTL. */
3333 assign_parms (tree fndecl
)
3335 struct assign_parm_data_all all
;
3340 crtl
->args
.internal_arg_pointer
3341 = targetm
.calls
.internal_arg_pointer ();
3343 assign_parms_initialize_all (&all
);
3344 fnargs
= assign_parms_augmented_arg_list (&all
);
3346 FOR_EACH_VEC_ELT (fnargs
, i
, parm
)
3348 struct assign_parm_data_one data
;
3350 /* Extract the type of PARM; adjust it according to ABI. */
3351 assign_parm_find_data_types (&all
, parm
, &data
);
3353 /* Early out for errors and void parameters. */
3354 if (data
.passed_mode
== VOIDmode
)
3356 SET_DECL_RTL (parm
, const0_rtx
);
3357 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
);
3361 /* Estimate stack alignment from parameter alignment. */
3362 if (SUPPORTS_STACK_ALIGNMENT
)
3365 = targetm
.calls
.function_arg_boundary (data
.promoted_mode
,
3367 align
= MINIMUM_ALIGNMENT (data
.passed_type
, data
.promoted_mode
,
3369 if (TYPE_ALIGN (data
.nominal_type
) > align
)
3370 align
= MINIMUM_ALIGNMENT (data
.nominal_type
,
3371 TYPE_MODE (data
.nominal_type
),
3372 TYPE_ALIGN (data
.nominal_type
));
3373 if (crtl
->stack_alignment_estimated
< align
)
3375 gcc_assert (!crtl
->stack_realign_processed
);
3376 crtl
->stack_alignment_estimated
= align
;
3380 if (cfun
->stdarg
&& !DECL_CHAIN (parm
))
3381 assign_parms_setup_varargs (&all
, &data
, false);
3383 /* Find out where the parameter arrives in this function. */
3384 assign_parm_find_entry_rtl (&all
, &data
);
3386 /* Find out where stack space for this parameter might be. */
3387 if (assign_parm_is_stack_parm (&all
, &data
))
3389 assign_parm_find_stack_rtl (parm
, &data
);
3390 assign_parm_adjust_entry_rtl (&data
);
3393 /* Record permanently how this parm was passed. */
3394 if (data
.passed_pointer
)
3397 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
.passed_type
)),
3399 set_decl_incoming_rtl (parm
, incoming_rtl
, true);
3402 set_decl_incoming_rtl (parm
, data
.entry_parm
, false);
3404 /* Update info on where next arg arrives in registers. */
3405 targetm
.calls
.function_arg_advance (all
.args_so_far
, data
.promoted_mode
,
3406 data
.passed_type
, data
.named_arg
);
3408 assign_parm_adjust_stack_rtl (&data
);
3410 if (assign_parm_setup_block_p (&data
))
3411 assign_parm_setup_block (&all
, parm
, &data
);
3412 else if (data
.passed_pointer
|| use_register_for_decl (parm
))
3413 assign_parm_setup_reg (&all
, parm
, &data
);
3415 assign_parm_setup_stack (&all
, parm
, &data
);
3418 if (targetm
.calls
.split_complex_arg
)
3419 assign_parms_unsplit_complex (&all
, fnargs
);
3423 /* Output all parameter conversion instructions (possibly including calls)
3424 now that all parameters have been copied out of hard registers. */
3425 emit_insn (all
.first_conversion_insn
);
3427 /* Estimate reload stack alignment from scalar return mode. */
3428 if (SUPPORTS_STACK_ALIGNMENT
)
3430 if (DECL_RESULT (fndecl
))
3432 tree type
= TREE_TYPE (DECL_RESULT (fndecl
));
3433 enum machine_mode mode
= TYPE_MODE (type
);
3437 && !AGGREGATE_TYPE_P (type
))
3439 unsigned int align
= GET_MODE_ALIGNMENT (mode
);
3440 if (crtl
->stack_alignment_estimated
< align
)
3442 gcc_assert (!crtl
->stack_realign_processed
);
3443 crtl
->stack_alignment_estimated
= align
;
3449 /* If we are receiving a struct value address as the first argument, set up
3450 the RTL for the function result. As this might require code to convert
3451 the transmitted address to Pmode, we do this here to ensure that possible
3452 preliminary conversions of the address have been emitted already. */
3453 if (all
.function_result_decl
)
3455 tree result
= DECL_RESULT (current_function_decl
);
3456 rtx addr
= DECL_RTL (all
.function_result_decl
);
3459 if (DECL_BY_REFERENCE (result
))
3461 SET_DECL_VALUE_EXPR (result
, all
.function_result_decl
);
3466 SET_DECL_VALUE_EXPR (result
,
3467 build1 (INDIRECT_REF
, TREE_TYPE (result
),
3468 all
.function_result_decl
));
3469 addr
= convert_memory_address (Pmode
, addr
);
3470 x
= gen_rtx_MEM (DECL_MODE (result
), addr
);
3471 set_mem_attributes (x
, result
, 1);
3474 DECL_HAS_VALUE_EXPR_P (result
) = 1;
3476 SET_DECL_RTL (result
, x
);
3479 /* We have aligned all the args, so add space for the pretend args. */
3480 crtl
->args
.pretend_args_size
= all
.pretend_args_size
;
3481 all
.stack_args_size
.constant
+= all
.extra_pretend_bytes
;
3482 crtl
->args
.size
= all
.stack_args_size
.constant
;
3484 /* Adjust function incoming argument size for alignment and
3487 #ifdef REG_PARM_STACK_SPACE
3488 crtl
->args
.size
= MAX (crtl
->args
.size
,
3489 REG_PARM_STACK_SPACE (fndecl
));
3492 crtl
->args
.size
= CEIL_ROUND (crtl
->args
.size
,
3493 PARM_BOUNDARY
/ BITS_PER_UNIT
);
3495 #ifdef ARGS_GROW_DOWNWARD
3496 crtl
->args
.arg_offset_rtx
3497 = (all
.stack_args_size
.var
== 0 ? GEN_INT (-all
.stack_args_size
.constant
)
3498 : expand_expr (size_diffop (all
.stack_args_size
.var
,
3499 size_int (-all
.stack_args_size
.constant
)),
3500 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
));
3502 crtl
->args
.arg_offset_rtx
= ARGS_SIZE_RTX (all
.stack_args_size
);
3505 /* See how many bytes, if any, of its args a function should try to pop
3508 crtl
->args
.pops_args
= targetm
.calls
.return_pops_args (fndecl
,
3512 /* For stdarg.h function, save info about
3513 regs and stack space used by the named args. */
3515 crtl
->args
.info
= all
.args_so_far_v
;
3517 /* Set the rtx used for the function return value. Put this in its
3518 own variable so any optimizers that need this information don't have
3519 to include tree.h. Do this here so it gets done when an inlined
3520 function gets output. */
3523 = (DECL_RTL_SET_P (DECL_RESULT (fndecl
))
3524 ? DECL_RTL (DECL_RESULT (fndecl
)) : NULL_RTX
);
3526 /* If scalar return value was computed in a pseudo-reg, or was a named
3527 return value that got dumped to the stack, copy that to the hard
3529 if (DECL_RTL_SET_P (DECL_RESULT (fndecl
)))
3531 tree decl_result
= DECL_RESULT (fndecl
);
3532 rtx decl_rtl
= DECL_RTL (decl_result
);
3534 if (REG_P (decl_rtl
)
3535 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
3536 : DECL_REGISTER (decl_result
))
3540 real_decl_rtl
= targetm
.calls
.function_value (TREE_TYPE (decl_result
),
3542 REG_FUNCTION_VALUE_P (real_decl_rtl
) = 1;
3543 /* The delay slot scheduler assumes that crtl->return_rtx
3544 holds the hard register containing the return value, not a
3545 temporary pseudo. */
3546 crtl
->return_rtx
= real_decl_rtl
;
3551 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3552 For all seen types, gimplify their sizes. */
3555 gimplify_parm_type (tree
*tp
, int *walk_subtrees
, void *data
)
3562 if (POINTER_TYPE_P (t
))
3564 else if (TYPE_SIZE (t
) && !TREE_CONSTANT (TYPE_SIZE (t
))
3565 && !TYPE_SIZES_GIMPLIFIED (t
))
3567 gimplify_type_sizes (t
, (gimple_seq
*) data
);
3575 /* Gimplify the parameter list for current_function_decl. This involves
3576 evaluating SAVE_EXPRs of variable sized parameters and generating code
3577 to implement callee-copies reference parameters. Returns a sequence of
3578 statements to add to the beginning of the function. */
3581 gimplify_parameters (void)
3583 struct assign_parm_data_all all
;
3585 gimple_seq stmts
= NULL
;
3589 assign_parms_initialize_all (&all
);
3590 fnargs
= assign_parms_augmented_arg_list (&all
);
3592 FOR_EACH_VEC_ELT (fnargs
, i
, parm
)
3594 struct assign_parm_data_one data
;
3596 /* Extract the type of PARM; adjust it according to ABI. */
3597 assign_parm_find_data_types (&all
, parm
, &data
);
3599 /* Early out for errors and void parameters. */
3600 if (data
.passed_mode
== VOIDmode
|| DECL_SIZE (parm
) == NULL
)
3603 /* Update info on where next arg arrives in registers. */
3604 targetm
.calls
.function_arg_advance (all
.args_so_far
, data
.promoted_mode
,
3605 data
.passed_type
, data
.named_arg
);
3607 /* ??? Once upon a time variable_size stuffed parameter list
3608 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3609 turned out to be less than manageable in the gimple world.
3610 Now we have to hunt them down ourselves. */
3611 walk_tree_without_duplicates (&data
.passed_type
,
3612 gimplify_parm_type
, &stmts
);
3614 if (TREE_CODE (DECL_SIZE_UNIT (parm
)) != INTEGER_CST
)
3616 gimplify_one_sizepos (&DECL_SIZE (parm
), &stmts
);
3617 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm
), &stmts
);
3620 if (data
.passed_pointer
)
3622 tree type
= TREE_TYPE (data
.passed_type
);
3623 if (reference_callee_copied (&all
.args_so_far_v
, TYPE_MODE (type
),
3624 type
, data
.named_arg
))
3628 /* For constant-sized objects, this is trivial; for
3629 variable-sized objects, we have to play games. */
3630 if (TREE_CODE (DECL_SIZE_UNIT (parm
)) == INTEGER_CST
3631 && !(flag_stack_check
== GENERIC_STACK_CHECK
3632 && compare_tree_int (DECL_SIZE_UNIT (parm
),
3633 STACK_CHECK_MAX_VAR_SIZE
) > 0))
3635 local
= create_tmp_var (type
, get_name (parm
));
3636 DECL_IGNORED_P (local
) = 0;
3637 /* If PARM was addressable, move that flag over
3638 to the local copy, as its address will be taken,
3639 not the PARMs. Keep the parms address taken
3640 as we'll query that flag during gimplification. */
3641 if (TREE_ADDRESSABLE (parm
))
3642 TREE_ADDRESSABLE (local
) = 1;
3643 else if (TREE_CODE (type
) == COMPLEX_TYPE
3644 || TREE_CODE (type
) == VECTOR_TYPE
)
3645 DECL_GIMPLE_REG_P (local
) = 1;
3649 tree ptr_type
, addr
;
3651 ptr_type
= build_pointer_type (type
);
3652 addr
= create_tmp_reg (ptr_type
, get_name (parm
));
3653 DECL_IGNORED_P (addr
) = 0;
3654 local
= build_fold_indirect_ref (addr
);
3656 t
= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
);
3657 t
= build_call_expr (t
, 2, DECL_SIZE_UNIT (parm
),
3658 size_int (DECL_ALIGN (parm
)));
3660 /* The call has been built for a variable-sized object. */
3661 CALL_ALLOCA_FOR_VAR_P (t
) = 1;
3662 t
= fold_convert (ptr_type
, t
);
3663 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
3664 gimplify_and_add (t
, &stmts
);
3667 gimplify_assign (local
, parm
, &stmts
);
3669 SET_DECL_VALUE_EXPR (parm
, local
);
3670 DECL_HAS_VALUE_EXPR_P (parm
) = 1;
3680 /* Compute the size and offset from the start of the stacked arguments for a
3681 parm passed in mode PASSED_MODE and with type TYPE.
3683 INITIAL_OFFSET_PTR points to the current offset into the stacked
3686 The starting offset and size for this parm are returned in
3687 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3688 nonzero, the offset is that of stack slot, which is returned in
3689 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3690 padding required from the initial offset ptr to the stack slot.
3692 IN_REGS is nonzero if the argument will be passed in registers. It will
3693 never be set if REG_PARM_STACK_SPACE is not defined.
3695 FNDECL is the function in which the argument was defined.
3697 There are two types of rounding that are done. The first, controlled by
3698 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3699 argument list to be aligned to the specific boundary (in bits). This
3700 rounding affects the initial and starting offsets, but not the argument
3703 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3704 optionally rounds the size of the parm to PARM_BOUNDARY. The
3705 initial offset is not affected by this rounding, while the size always
3706 is and the starting offset may be. */
3708 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3709 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3710 callers pass in the total size of args so far as
3711 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3714 locate_and_pad_parm (enum machine_mode passed_mode
, tree type
, int in_regs
,
3715 int partial
, tree fndecl ATTRIBUTE_UNUSED
,
3716 struct args_size
*initial_offset_ptr
,
3717 struct locate_and_pad_arg_data
*locate
)
3720 enum direction where_pad
;
3721 unsigned int boundary
, round_boundary
;
3722 int reg_parm_stack_space
= 0;
3723 int part_size_in_regs
;
3725 #ifdef REG_PARM_STACK_SPACE
3726 reg_parm_stack_space
= REG_PARM_STACK_SPACE (fndecl
);
3728 /* If we have found a stack parm before we reach the end of the
3729 area reserved for registers, skip that area. */
3732 if (reg_parm_stack_space
> 0)
3734 if (initial_offset_ptr
->var
)
3736 initial_offset_ptr
->var
3737 = size_binop (MAX_EXPR
, ARGS_SIZE_TREE (*initial_offset_ptr
),
3738 ssize_int (reg_parm_stack_space
));
3739 initial_offset_ptr
->constant
= 0;
3741 else if (initial_offset_ptr
->constant
< reg_parm_stack_space
)
3742 initial_offset_ptr
->constant
= reg_parm_stack_space
;
3745 #endif /* REG_PARM_STACK_SPACE */
3747 part_size_in_regs
= (reg_parm_stack_space
== 0 ? partial
: 0);
3750 = type
? size_in_bytes (type
) : size_int (GET_MODE_SIZE (passed_mode
));
3751 where_pad
= FUNCTION_ARG_PADDING (passed_mode
, type
);
3752 boundary
= targetm
.calls
.function_arg_boundary (passed_mode
, type
);
3753 round_boundary
= targetm
.calls
.function_arg_round_boundary (passed_mode
,
3755 locate
->where_pad
= where_pad
;
3757 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3758 if (boundary
> MAX_SUPPORTED_STACK_ALIGNMENT
)
3759 boundary
= MAX_SUPPORTED_STACK_ALIGNMENT
;
3761 locate
->boundary
= boundary
;
3763 if (SUPPORTS_STACK_ALIGNMENT
)
3765 /* stack_alignment_estimated can't change after stack has been
3767 if (crtl
->stack_alignment_estimated
< boundary
)
3769 if (!crtl
->stack_realign_processed
)
3770 crtl
->stack_alignment_estimated
= boundary
;
3773 /* If stack is realigned and stack alignment value
3774 hasn't been finalized, it is OK not to increase
3775 stack_alignment_estimated. The bigger alignment
3776 requirement is recorded in stack_alignment_needed
3778 gcc_assert (!crtl
->stack_realign_finalized
3779 && crtl
->stack_realign_needed
);
3784 /* Remember if the outgoing parameter requires extra alignment on the
3785 calling function side. */
3786 if (crtl
->stack_alignment_needed
< boundary
)
3787 crtl
->stack_alignment_needed
= boundary
;
3788 if (crtl
->preferred_stack_boundary
< boundary
)
3789 crtl
->preferred_stack_boundary
= boundary
;
3791 #ifdef ARGS_GROW_DOWNWARD
3792 locate
->slot_offset
.constant
= -initial_offset_ptr
->constant
;
3793 if (initial_offset_ptr
->var
)
3794 locate
->slot_offset
.var
= size_binop (MINUS_EXPR
, ssize_int (0),
3795 initial_offset_ptr
->var
);
3799 if (where_pad
!= none
3800 && (!host_integerp (sizetree
, 1)
3801 || (tree_low_cst (sizetree
, 1) * BITS_PER_UNIT
) % round_boundary
))
3802 s2
= round_up (s2
, round_boundary
/ BITS_PER_UNIT
);
3803 SUB_PARM_SIZE (locate
->slot_offset
, s2
);
3806 locate
->slot_offset
.constant
+= part_size_in_regs
;
3809 #ifdef REG_PARM_STACK_SPACE
3810 || REG_PARM_STACK_SPACE (fndecl
) > 0
3813 pad_to_arg_alignment (&locate
->slot_offset
, boundary
,
3814 &locate
->alignment_pad
);
3816 locate
->size
.constant
= (-initial_offset_ptr
->constant
3817 - locate
->slot_offset
.constant
);
3818 if (initial_offset_ptr
->var
)
3819 locate
->size
.var
= size_binop (MINUS_EXPR
,
3820 size_binop (MINUS_EXPR
,
3822 initial_offset_ptr
->var
),
3823 locate
->slot_offset
.var
);
3825 /* Pad_below needs the pre-rounded size to know how much to pad
3827 locate
->offset
= locate
->slot_offset
;
3828 if (where_pad
== downward
)
3829 pad_below (&locate
->offset
, passed_mode
, sizetree
);
3831 #else /* !ARGS_GROW_DOWNWARD */
3833 #ifdef REG_PARM_STACK_SPACE
3834 || REG_PARM_STACK_SPACE (fndecl
) > 0
3837 pad_to_arg_alignment (initial_offset_ptr
, boundary
,
3838 &locate
->alignment_pad
);
3839 locate
->slot_offset
= *initial_offset_ptr
;
3841 #ifdef PUSH_ROUNDING
3842 if (passed_mode
!= BLKmode
)
3843 sizetree
= size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree
)));
3846 /* Pad_below needs the pre-rounded size to know how much to pad below
3847 so this must be done before rounding up. */
3848 locate
->offset
= locate
->slot_offset
;
3849 if (where_pad
== downward
)
3850 pad_below (&locate
->offset
, passed_mode
, sizetree
);
3852 if (where_pad
!= none
3853 && (!host_integerp (sizetree
, 1)
3854 || (tree_low_cst (sizetree
, 1) * BITS_PER_UNIT
) % round_boundary
))
3855 sizetree
= round_up (sizetree
, round_boundary
/ BITS_PER_UNIT
);
3857 ADD_PARM_SIZE (locate
->size
, sizetree
);
3859 locate
->size
.constant
-= part_size_in_regs
;
3860 #endif /* ARGS_GROW_DOWNWARD */
3862 #ifdef FUNCTION_ARG_OFFSET
3863 locate
->offset
.constant
+= FUNCTION_ARG_OFFSET (passed_mode
, type
);
3867 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3868 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3871 pad_to_arg_alignment (struct args_size
*offset_ptr
, int boundary
,
3872 struct args_size
*alignment_pad
)
3874 tree save_var
= NULL_TREE
;
3875 HOST_WIDE_INT save_constant
= 0;
3876 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
3877 HOST_WIDE_INT sp_offset
= STACK_POINTER_OFFSET
;
3879 #ifdef SPARC_STACK_BOUNDARY_HACK
3880 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3881 the real alignment of %sp. However, when it does this, the
3882 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3883 if (SPARC_STACK_BOUNDARY_HACK
)
3887 if (boundary
> PARM_BOUNDARY
)
3889 save_var
= offset_ptr
->var
;
3890 save_constant
= offset_ptr
->constant
;
3893 alignment_pad
->var
= NULL_TREE
;
3894 alignment_pad
->constant
= 0;
3896 if (boundary
> BITS_PER_UNIT
)
3898 if (offset_ptr
->var
)
3900 tree sp_offset_tree
= ssize_int (sp_offset
);
3901 tree offset
= size_binop (PLUS_EXPR
,
3902 ARGS_SIZE_TREE (*offset_ptr
),
3904 #ifdef ARGS_GROW_DOWNWARD
3905 tree rounded
= round_down (offset
, boundary
/ BITS_PER_UNIT
);
3907 tree rounded
= round_up (offset
, boundary
/ BITS_PER_UNIT
);
3910 offset_ptr
->var
= size_binop (MINUS_EXPR
, rounded
, sp_offset_tree
);
3911 /* ARGS_SIZE_TREE includes constant term. */
3912 offset_ptr
->constant
= 0;
3913 if (boundary
> PARM_BOUNDARY
)
3914 alignment_pad
->var
= size_binop (MINUS_EXPR
, offset_ptr
->var
,
3919 offset_ptr
->constant
= -sp_offset
+
3920 #ifdef ARGS_GROW_DOWNWARD
3921 FLOOR_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
);
3923 CEIL_ROUND (offset_ptr
->constant
+ sp_offset
, boundary_in_bytes
);
3925 if (boundary
> PARM_BOUNDARY
)
3926 alignment_pad
->constant
= offset_ptr
->constant
- save_constant
;
3932 pad_below (struct args_size
*offset_ptr
, enum machine_mode passed_mode
, tree sizetree
)
3934 if (passed_mode
!= BLKmode
)
3936 if (GET_MODE_BITSIZE (passed_mode
) % PARM_BOUNDARY
)
3937 offset_ptr
->constant
3938 += (((GET_MODE_BITSIZE (passed_mode
) + PARM_BOUNDARY
- 1)
3939 / PARM_BOUNDARY
* PARM_BOUNDARY
/ BITS_PER_UNIT
)
3940 - GET_MODE_SIZE (passed_mode
));
3944 if (TREE_CODE (sizetree
) != INTEGER_CST
3945 || (TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)
3947 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3948 tree s2
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
3950 ADD_PARM_SIZE (*offset_ptr
, s2
);
3951 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
3957 /* True if register REGNO was alive at a place where `setjmp' was
3958 called and was set more than once or is an argument. Such regs may
3959 be clobbered by `longjmp'. */
3962 regno_clobbered_at_setjmp (bitmap setjmp_crosses
, int regno
)
3964 /* There appear to be cases where some local vars never reach the
3965 backend but have bogus regnos. */
3966 if (regno
>= max_reg_num ())
3969 return ((REG_N_SETS (regno
) > 1
3970 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR
), regno
))
3971 && REGNO_REG_SET_P (setjmp_crosses
, regno
));
3974 /* Walk the tree of blocks describing the binding levels within a
3975 function and warn about variables the might be killed by setjmp or
3976 vfork. This is done after calling flow_analysis before register
3977 allocation since that will clobber the pseudo-regs to hard
3981 setjmp_vars_warning (bitmap setjmp_crosses
, tree block
)
3985 for (decl
= BLOCK_VARS (block
); decl
; decl
= DECL_CHAIN (decl
))
3987 if (TREE_CODE (decl
) == VAR_DECL
3988 && DECL_RTL_SET_P (decl
)
3989 && REG_P (DECL_RTL (decl
))
3990 && regno_clobbered_at_setjmp (setjmp_crosses
, REGNO (DECL_RTL (decl
))))
3991 warning (OPT_Wclobbered
, "variable %q+D might be clobbered by"
3992 " %<longjmp%> or %<vfork%>", decl
);
3995 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= BLOCK_CHAIN (sub
))
3996 setjmp_vars_warning (setjmp_crosses
, sub
);
3999 /* Do the appropriate part of setjmp_vars_warning
4000 but for arguments instead of local variables. */
4003 setjmp_args_warning (bitmap setjmp_crosses
)
4006 for (decl
= DECL_ARGUMENTS (current_function_decl
);
4007 decl
; decl
= DECL_CHAIN (decl
))
4008 if (DECL_RTL (decl
) != 0
4009 && REG_P (DECL_RTL (decl
))
4010 && regno_clobbered_at_setjmp (setjmp_crosses
, REGNO (DECL_RTL (decl
))))
4011 warning (OPT_Wclobbered
,
4012 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4016 /* Generate warning messages for variables live across setjmp. */
4019 generate_setjmp_warnings (void)
4021 bitmap setjmp_crosses
= regstat_get_setjmp_crosses ();
4023 if (n_basic_blocks
== NUM_FIXED_BLOCKS
4024 || bitmap_empty_p (setjmp_crosses
))
4027 setjmp_vars_warning (setjmp_crosses
, DECL_INITIAL (current_function_decl
));
4028 setjmp_args_warning (setjmp_crosses
);
4032 /* Reverse the order of elements in the fragment chain T of blocks,
4033 and return the new head of the chain (old last element).
4034 In addition to that clear BLOCK_SAME_RANGE flags when needed
4035 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4036 its super fragment origin. */
4039 block_fragments_nreverse (tree t
)
4041 tree prev
= 0, block
, next
, prev_super
= 0;
4042 tree super
= BLOCK_SUPERCONTEXT (t
);
4043 if (BLOCK_FRAGMENT_ORIGIN (super
))
4044 super
= BLOCK_FRAGMENT_ORIGIN (super
);
4045 for (block
= t
; block
; block
= next
)
4047 next
= BLOCK_FRAGMENT_CHAIN (block
);
4048 BLOCK_FRAGMENT_CHAIN (block
) = prev
;
4049 if ((prev
&& !BLOCK_SAME_RANGE (prev
))
4050 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block
))
4052 BLOCK_SAME_RANGE (block
) = 0;
4053 prev_super
= BLOCK_SUPERCONTEXT (block
);
4054 BLOCK_SUPERCONTEXT (block
) = super
;
4057 t
= BLOCK_FRAGMENT_ORIGIN (t
);
4058 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t
))
4060 BLOCK_SAME_RANGE (t
) = 0;
4061 BLOCK_SUPERCONTEXT (t
) = super
;
4065 /* Reverse the order of elements in the chain T of blocks,
4066 and return the new head of the chain (old last element).
4067 Also do the same on subblocks and reverse the order of elements
4068 in BLOCK_FRAGMENT_CHAIN as well. */
4071 blocks_nreverse_all (tree t
)
4073 tree prev
= 0, block
, next
;
4074 for (block
= t
; block
; block
= next
)
4076 next
= BLOCK_CHAIN (block
);
4077 BLOCK_CHAIN (block
) = prev
;
4078 if (BLOCK_FRAGMENT_CHAIN (block
)
4079 && BLOCK_FRAGMENT_ORIGIN (block
) == NULL_TREE
)
4081 BLOCK_FRAGMENT_CHAIN (block
)
4082 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block
));
4083 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block
)))
4084 BLOCK_SAME_RANGE (block
) = 0;
4086 BLOCK_SUBBLOCKS (block
) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block
));
4093 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4094 and create duplicate blocks. */
4095 /* ??? Need an option to either create block fragments or to create
4096 abstract origin duplicates of a source block. It really depends
4097 on what optimization has been performed. */
4100 reorder_blocks (void)
4102 tree block
= DECL_INITIAL (current_function_decl
);
4103 vec
<tree
> block_stack
;
4105 if (block
== NULL_TREE
)
4108 block_stack
.create (10);
4110 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4111 clear_block_marks (block
);
4113 /* Prune the old trees away, so that they don't get in the way. */
4114 BLOCK_SUBBLOCKS (block
) = NULL_TREE
;
4115 BLOCK_CHAIN (block
) = NULL_TREE
;
4117 /* Recreate the block tree from the note nesting. */
4118 reorder_blocks_1 (get_insns (), block
, &block_stack
);
4119 BLOCK_SUBBLOCKS (block
) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block
));
4121 block_stack
.release ();
4124 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4127 clear_block_marks (tree block
)
4131 TREE_ASM_WRITTEN (block
) = 0;
4132 clear_block_marks (BLOCK_SUBBLOCKS (block
));
4133 block
= BLOCK_CHAIN (block
);
4138 reorder_blocks_1 (rtx insns
, tree current_block
, vec
<tree
> *p_block_stack
)
4141 tree prev_beg
= NULL_TREE
, prev_end
= NULL_TREE
;
4143 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
4147 if (NOTE_KIND (insn
) == NOTE_INSN_BLOCK_BEG
)
4149 tree block
= NOTE_BLOCK (insn
);
4152 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block
) == NULL_TREE
);
4156 BLOCK_SAME_RANGE (prev_end
) = 0;
4157 prev_end
= NULL_TREE
;
4159 /* If we have seen this block before, that means it now
4160 spans multiple address regions. Create a new fragment. */
4161 if (TREE_ASM_WRITTEN (block
))
4163 tree new_block
= copy_node (block
);
4165 BLOCK_SAME_RANGE (new_block
) = 0;
4166 BLOCK_FRAGMENT_ORIGIN (new_block
) = origin
;
4167 BLOCK_FRAGMENT_CHAIN (new_block
)
4168 = BLOCK_FRAGMENT_CHAIN (origin
);
4169 BLOCK_FRAGMENT_CHAIN (origin
) = new_block
;
4171 NOTE_BLOCK (insn
) = new_block
;
4175 if (prev_beg
== current_block
&& prev_beg
)
4176 BLOCK_SAME_RANGE (block
) = 1;
4180 BLOCK_SUBBLOCKS (block
) = 0;
4181 TREE_ASM_WRITTEN (block
) = 1;
4182 /* When there's only one block for the entire function,
4183 current_block == block and we mustn't do this, it
4184 will cause infinite recursion. */
4185 if (block
!= current_block
)
4188 if (block
!= origin
)
4189 gcc_assert (BLOCK_SUPERCONTEXT (origin
) == current_block
4190 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4193 if (p_block_stack
->is_empty ())
4194 super
= current_block
;
4197 super
= p_block_stack
->last ();
4198 gcc_assert (super
== current_block
4199 || BLOCK_FRAGMENT_ORIGIN (super
)
4202 BLOCK_SUPERCONTEXT (block
) = super
;
4203 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (current_block
);
4204 BLOCK_SUBBLOCKS (current_block
) = block
;
4205 current_block
= origin
;
4207 p_block_stack
->safe_push (block
);
4209 else if (NOTE_KIND (insn
) == NOTE_INSN_BLOCK_END
)
4211 NOTE_BLOCK (insn
) = p_block_stack
->pop ();
4212 current_block
= BLOCK_SUPERCONTEXT (current_block
);
4213 if (BLOCK_FRAGMENT_ORIGIN (current_block
))
4214 current_block
= BLOCK_FRAGMENT_ORIGIN (current_block
);
4215 prev_beg
= NULL_TREE
;
4216 prev_end
= BLOCK_SAME_RANGE (NOTE_BLOCK (insn
))
4217 ? NOTE_BLOCK (insn
) : NULL_TREE
;
4222 prev_beg
= NULL_TREE
;
4224 BLOCK_SAME_RANGE (prev_end
) = 0;
4225 prev_end
= NULL_TREE
;
4230 /* Reverse the order of elements in the chain T of blocks,
4231 and return the new head of the chain (old last element). */
4234 blocks_nreverse (tree t
)
4236 tree prev
= 0, block
, next
;
4237 for (block
= t
; block
; block
= next
)
4239 next
= BLOCK_CHAIN (block
);
4240 BLOCK_CHAIN (block
) = prev
;
4246 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4247 by modifying the last node in chain 1 to point to chain 2. */
4250 block_chainon (tree op1
, tree op2
)
4259 for (t1
= op1
; BLOCK_CHAIN (t1
); t1
= BLOCK_CHAIN (t1
))
4261 BLOCK_CHAIN (t1
) = op2
;
4263 #ifdef ENABLE_TREE_CHECKING
4266 for (t2
= op2
; t2
; t2
= BLOCK_CHAIN (t2
))
4267 gcc_assert (t2
!= t1
);
4274 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4275 non-NULL, list them all into VECTOR, in a depth-first preorder
4276 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4280 all_blocks (tree block
, tree
*vector
)
4286 TREE_ASM_WRITTEN (block
) = 0;
4288 /* Record this block. */
4290 vector
[n_blocks
] = block
;
4294 /* Record the subblocks, and their subblocks... */
4295 n_blocks
+= all_blocks (BLOCK_SUBBLOCKS (block
),
4296 vector
? vector
+ n_blocks
: 0);
4297 block
= BLOCK_CHAIN (block
);
4303 /* Return a vector containing all the blocks rooted at BLOCK. The
4304 number of elements in the vector is stored in N_BLOCKS_P. The
4305 vector is dynamically allocated; it is the caller's responsibility
4306 to call `free' on the pointer returned. */
4309 get_block_vector (tree block
, int *n_blocks_p
)
4313 *n_blocks_p
= all_blocks (block
, NULL
);
4314 block_vector
= XNEWVEC (tree
, *n_blocks_p
);
4315 all_blocks (block
, block_vector
);
4317 return block_vector
;
4320 static GTY(()) int next_block_index
= 2;
4322 /* Set BLOCK_NUMBER for all the blocks in FN. */
4325 number_blocks (tree fn
)
4331 /* For SDB and XCOFF debugging output, we start numbering the blocks
4332 from 1 within each function, rather than keeping a running
4334 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
4335 if (write_symbols
== SDB_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
4336 next_block_index
= 1;
4339 block_vector
= get_block_vector (DECL_INITIAL (fn
), &n_blocks
);
4341 /* The top-level BLOCK isn't numbered at all. */
4342 for (i
= 1; i
< n_blocks
; ++i
)
4343 /* We number the blocks from two. */
4344 BLOCK_NUMBER (block_vector
[i
]) = next_block_index
++;
4346 free (block_vector
);
4351 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4354 debug_find_var_in_block_tree (tree var
, tree block
)
4358 for (t
= BLOCK_VARS (block
); t
; t
= TREE_CHAIN (t
))
4362 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= TREE_CHAIN (t
))
4364 tree ret
= debug_find_var_in_block_tree (var
, t
);
4372 /* Keep track of whether we're in a dummy function context. If we are,
4373 we don't want to invoke the set_current_function hook, because we'll
4374 get into trouble if the hook calls target_reinit () recursively or
4375 when the initial initialization is not yet complete. */
4377 static bool in_dummy_function
;
4379 /* Invoke the target hook when setting cfun. Update the optimization options
4380 if the function uses different options than the default. */
4383 invoke_set_current_function_hook (tree fndecl
)
4385 if (!in_dummy_function
)
4387 tree opts
= ((fndecl
)
4388 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl
)
4389 : optimization_default_node
);
4392 opts
= optimization_default_node
;
4394 /* Change optimization options if needed. */
4395 if (optimization_current_node
!= opts
)
4397 optimization_current_node
= opts
;
4398 cl_optimization_restore (&global_options
, TREE_OPTIMIZATION (opts
));
4401 targetm
.set_current_function (fndecl
);
4402 this_fn_optabs
= this_target_optabs
;
4404 if (opts
!= optimization_default_node
)
4406 init_tree_optimization_optabs (opts
);
4407 if (TREE_OPTIMIZATION_OPTABS (opts
))
4408 this_fn_optabs
= (struct target_optabs
*)
4409 TREE_OPTIMIZATION_OPTABS (opts
);
4414 /* cfun should never be set directly; use this function. */
4417 set_cfun (struct function
*new_cfun
)
4419 if (cfun
!= new_cfun
)
4422 invoke_set_current_function_hook (new_cfun
? new_cfun
->decl
: NULL_TREE
);
4426 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4428 static vec
<function_p
> cfun_stack
;
4430 /* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4431 current_function_decl accordingly. */
4434 push_cfun (struct function
*new_cfun
)
4436 gcc_assert ((!cfun
&& !current_function_decl
)
4437 || (cfun
&& current_function_decl
== cfun
->decl
));
4438 cfun_stack
.safe_push (cfun
);
4439 current_function_decl
= new_cfun
? new_cfun
->decl
: NULL_TREE
;
4440 set_cfun (new_cfun
);
4443 /* Pop cfun from the stack. Also set current_function_decl accordingly. */
4448 struct function
*new_cfun
= cfun_stack
.pop ();
4449 /* When in_dummy_function, we do have a cfun but current_function_decl is
4450 NULL. We also allow pushing NULL cfun and subsequently changing
4451 current_function_decl to something else and have both restored by
4453 gcc_checking_assert (in_dummy_function
4455 || current_function_decl
== cfun
->decl
);
4456 set_cfun (new_cfun
);
4457 current_function_decl
= new_cfun
? new_cfun
->decl
: NULL_TREE
;
4460 /* Return value of funcdef and increase it. */
4462 get_next_funcdef_no (void)
4464 return funcdef_no
++;
4467 /* Return value of funcdef. */
4469 get_last_funcdef_no (void)
4474 /* Allocate a function structure for FNDECL and set its contents
4475 to the defaults. Set cfun to the newly-allocated object.
4476 Some of the helper functions invoked during initialization assume
4477 that cfun has already been set. Therefore, assign the new object
4478 directly into cfun and invoke the back end hook explicitly at the
4479 very end, rather than initializing a temporary and calling set_cfun
4482 ABSTRACT_P is true if this is a function that will never be seen by
4483 the middle-end. Such functions are front-end concepts (like C++
4484 function templates) that do not correspond directly to functions
4485 placed in object files. */
4488 allocate_struct_function (tree fndecl
, bool abstract_p
)
4490 tree fntype
= fndecl
? TREE_TYPE (fndecl
) : NULL_TREE
;
4492 cfun
= ggc_alloc_cleared_function ();
4494 init_eh_for_function ();
4496 if (init_machine_status
)
4497 cfun
->machine
= (*init_machine_status
) ();
4499 #ifdef OVERRIDE_ABI_FORMAT
4500 OVERRIDE_ABI_FORMAT (fndecl
);
4503 if (fndecl
!= NULL_TREE
)
4505 DECL_STRUCT_FUNCTION (fndecl
) = cfun
;
4506 cfun
->decl
= fndecl
;
4507 current_function_funcdef_no
= get_next_funcdef_no ();
4510 invoke_set_current_function_hook (fndecl
);
4512 if (fndecl
!= NULL_TREE
)
4514 tree result
= DECL_RESULT (fndecl
);
4515 if (!abstract_p
&& aggregate_value_p (result
, fndecl
))
4517 #ifdef PCC_STATIC_STRUCT_RETURN
4518 cfun
->returns_pcc_struct
= 1;
4520 cfun
->returns_struct
= 1;
4523 cfun
->stdarg
= stdarg_p (fntype
);
4525 /* Assume all registers in stdarg functions need to be saved. */
4526 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
4527 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
4529 /* ??? This could be set on a per-function basis by the front-end
4530 but is this worth the hassle? */
4531 cfun
->can_throw_non_call_exceptions
= flag_non_call_exceptions
;
4535 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4536 instead of just setting it. */
4539 push_struct_function (tree fndecl
)
4541 /* When in_dummy_function we might be in the middle of a pop_cfun and
4542 current_function_decl and cfun may not match. */
4543 gcc_assert (in_dummy_function
4544 || (!cfun
&& !current_function_decl
)
4545 || (cfun
&& current_function_decl
== cfun
->decl
));
4546 cfun_stack
.safe_push (cfun
);
4547 current_function_decl
= fndecl
;
4548 allocate_struct_function (fndecl
, false);
4551 /* Reset crtl and other non-struct-function variables to defaults as
4552 appropriate for emitting rtl at the start of a function. */
4555 prepare_function_start (void)
4557 gcc_assert (!crtl
->emit
.x_last_insn
);
4560 init_varasm_status ();
4562 default_rtl_profile ();
4564 if (flag_stack_usage_info
)
4566 cfun
->su
= ggc_alloc_cleared_stack_usage ();
4567 cfun
->su
->static_stack_size
= -1;
4570 cse_not_expected
= ! optimize
;
4572 /* Caller save not needed yet. */
4573 caller_save_needed
= 0;
4575 /* We haven't done register allocation yet. */
4578 /* Indicate that we have not instantiated virtual registers yet. */
4579 virtuals_instantiated
= 0;
4581 /* Indicate that we want CONCATs now. */
4582 generating_concat_p
= 1;
4584 /* Indicate we have no need of a frame pointer yet. */
4585 frame_pointer_needed
= 0;
4588 /* Initialize the rtl expansion mechanism so that we can do simple things
4589 like generate sequences. This is used to provide a context during global
4590 initialization of some passes. You must call expand_dummy_function_end
4591 to exit this context. */
4594 init_dummy_function_start (void)
4596 gcc_assert (!in_dummy_function
);
4597 in_dummy_function
= true;
4598 push_struct_function (NULL_TREE
);
4599 prepare_function_start ();
4602 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4603 and initialize static variables for generating RTL for the statements
4607 init_function_start (tree subr
)
4609 if (subr
&& DECL_STRUCT_FUNCTION (subr
))
4610 set_cfun (DECL_STRUCT_FUNCTION (subr
));
4612 allocate_struct_function (subr
, false);
4613 prepare_function_start ();
4614 decide_function_section (subr
);
4616 /* Warn if this value is an aggregate type,
4617 regardless of which calling convention we are using for it. */
4618 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
))))
4619 warning (OPT_Waggregate_return
, "function returns an aggregate");
4624 expand_main_function (void)
4626 #if (defined(INVOKE__main) \
4627 || (!defined(HAS_INIT_SECTION) \
4628 && !defined(INIT_SECTION_ASM_OP) \
4629 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
4630 emit_library_call (init_one_libfunc (NAME__MAIN
), LCT_NORMAL
, VOIDmode
, 0);
4634 /* Expand code to initialize the stack_protect_guard. This is invoked at
4635 the beginning of a function to be protected. */
4637 #ifndef HAVE_stack_protect_set
4638 # define HAVE_stack_protect_set 0
4639 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
4643 stack_protect_prologue (void)
4645 tree guard_decl
= targetm
.stack_protect_guard ();
4648 x
= expand_normal (crtl
->stack_protect_guard
);
4649 y
= expand_normal (guard_decl
);
4651 /* Allow the target to copy from Y to X without leaking Y into a
4653 if (HAVE_stack_protect_set
)
4655 rtx insn
= gen_stack_protect_set (x
, y
);
4663 /* Otherwise do a straight move. */
4664 emit_move_insn (x
, y
);
4667 /* Expand code to verify the stack_protect_guard. This is invoked at
4668 the end of a function to be protected. */
4670 #ifndef HAVE_stack_protect_test
4671 # define HAVE_stack_protect_test 0
4672 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4676 stack_protect_epilogue (void)
4678 tree guard_decl
= targetm
.stack_protect_guard ();
4679 rtx label
= gen_label_rtx ();
4682 x
= expand_normal (crtl
->stack_protect_guard
);
4683 y
= expand_normal (guard_decl
);
4685 /* Allow the target to compare Y with X without leaking either into
4687 switch (HAVE_stack_protect_test
!= 0)
4690 tmp
= gen_stack_protect_test (x
, y
, label
);
4699 emit_cmp_and_jump_insns (x
, y
, EQ
, NULL_RTX
, ptr_mode
, 1, label
);
4703 /* The noreturn predictor has been moved to the tree level. The rtl-level
4704 predictors estimate this branch about 20%, which isn't enough to get
4705 things moved out of line. Since this is the only extant case of adding
4706 a noreturn function at the rtl level, it doesn't seem worth doing ought
4707 except adding the prediction by hand. */
4708 tmp
= get_last_insn ();
4710 predict_insn_def (tmp
, PRED_NORETURN
, TAKEN
);
4712 expand_call (targetm
.stack_protect_fail (), NULL_RTX
, /*ignore=*/true);
4717 /* Start the RTL for a new function, and set variables used for
4719 SUBR is the FUNCTION_DECL node.
4720 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4721 the function's parameters, which must be run at any return statement. */
4724 expand_function_start (tree subr
)
4726 /* Make sure volatile mem refs aren't considered
4727 valid operands of arithmetic insns. */
4728 init_recog_no_volatile ();
4732 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr
));
4735 = (stack_limit_rtx
!= NULL_RTX
&& ! DECL_NO_LIMIT_STACK (subr
));
4737 /* Make the label for return statements to jump to. Do not special
4738 case machines with special return instructions -- they will be
4739 handled later during jump, ifcvt, or epilogue creation. */
4740 return_label
= gen_label_rtx ();
4742 /* Initialize rtx used to return the value. */
4743 /* Do this before assign_parms so that we copy the struct value address
4744 before any library calls that assign parms might generate. */
4746 /* Decide whether to return the value in memory or in a register. */
4747 if (aggregate_value_p (DECL_RESULT (subr
), subr
))
4749 /* Returning something that won't go in a register. */
4750 rtx value_address
= 0;
4752 #ifdef PCC_STATIC_STRUCT_RETURN
4753 if (cfun
->returns_pcc_struct
)
4755 int size
= int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr
)));
4756 value_address
= assemble_static_space (size
);
4761 rtx sv
= targetm
.calls
.struct_value_rtx (TREE_TYPE (subr
), 2);
4762 /* Expect to be passed the address of a place to store the value.
4763 If it is passed as an argument, assign_parms will take care of
4767 value_address
= gen_reg_rtx (Pmode
);
4768 emit_move_insn (value_address
, sv
);
4773 rtx x
= value_address
;
4774 if (!DECL_BY_REFERENCE (DECL_RESULT (subr
)))
4776 x
= gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr
)), x
);
4777 set_mem_attributes (x
, DECL_RESULT (subr
), 1);
4779 SET_DECL_RTL (DECL_RESULT (subr
), x
);
4782 else if (DECL_MODE (DECL_RESULT (subr
)) == VOIDmode
)
4783 /* If return mode is void, this decl rtl should not be used. */
4784 SET_DECL_RTL (DECL_RESULT (subr
), NULL_RTX
);
4787 /* Compute the return values into a pseudo reg, which we will copy
4788 into the true return register after the cleanups are done. */
4789 tree return_type
= TREE_TYPE (DECL_RESULT (subr
));
4790 if (TYPE_MODE (return_type
) != BLKmode
4791 && targetm
.calls
.return_in_msb (return_type
))
4792 /* expand_function_end will insert the appropriate padding in
4793 this case. Use the return value's natural (unpadded) mode
4794 within the function proper. */
4795 SET_DECL_RTL (DECL_RESULT (subr
),
4796 gen_reg_rtx (TYPE_MODE (return_type
)));
4799 /* In order to figure out what mode to use for the pseudo, we
4800 figure out what the mode of the eventual return register will
4801 actually be, and use that. */
4802 rtx hard_reg
= hard_function_value (return_type
, subr
, 0, 1);
4804 /* Structures that are returned in registers are not
4805 aggregate_value_p, so we may see a PARALLEL or a REG. */
4806 if (REG_P (hard_reg
))
4807 SET_DECL_RTL (DECL_RESULT (subr
),
4808 gen_reg_rtx (GET_MODE (hard_reg
)));
4811 gcc_assert (GET_CODE (hard_reg
) == PARALLEL
);
4812 SET_DECL_RTL (DECL_RESULT (subr
), gen_group_rtx (hard_reg
));
4816 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4817 result to the real return register(s). */
4818 DECL_REGISTER (DECL_RESULT (subr
)) = 1;
4821 /* Initialize rtx for parameters and local variables.
4822 In some cases this requires emitting insns. */
4823 assign_parms (subr
);
4825 /* If function gets a static chain arg, store it. */
4826 if (cfun
->static_chain_decl
)
4828 tree parm
= cfun
->static_chain_decl
;
4829 rtx local
, chain
, insn
;
4831 local
= gen_reg_rtx (Pmode
);
4832 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
4834 set_decl_incoming_rtl (parm
, chain
, false);
4835 SET_DECL_RTL (parm
, local
);
4836 mark_reg_pointer (local
, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
4838 insn
= emit_move_insn (local
, chain
);
4840 /* Mark the register as eliminable, similar to parameters. */
4842 && reg_mentioned_p (arg_pointer_rtx
, XEXP (chain
, 0)))
4843 set_dst_reg_note (insn
, REG_EQUIV
, chain
, local
);
4846 /* If the function receives a non-local goto, then store the
4847 bits we need to restore the frame pointer. */
4848 if (cfun
->nonlocal_goto_save_area
)
4853 tree var
= TREE_OPERAND (cfun
->nonlocal_goto_save_area
, 0);
4854 gcc_assert (DECL_RTL_SET_P (var
));
4856 t_save
= build4 (ARRAY_REF
,
4857 TREE_TYPE (TREE_TYPE (cfun
->nonlocal_goto_save_area
)),
4858 cfun
->nonlocal_goto_save_area
,
4859 integer_zero_node
, NULL_TREE
, NULL_TREE
);
4860 r_save
= expand_expr (t_save
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4861 gcc_assert (GET_MODE (r_save
) == Pmode
);
4863 emit_move_insn (r_save
, targetm
.builtin_setjmp_frame_value ());
4864 update_nonlocal_goto_save_area ();
4867 /* The following was moved from init_function_start.
4868 The move is supposed to make sdb output more accurate. */
4869 /* Indicate the beginning of the function body,
4870 as opposed to parm setup. */
4871 emit_note (NOTE_INSN_FUNCTION_BEG
);
4873 gcc_assert (NOTE_P (get_last_insn ()));
4875 parm_birth_insn
= get_last_insn ();
4880 PROFILE_HOOK (current_function_funcdef_no
);
4884 /* If we are doing generic stack checking, the probe should go here. */
4885 if (flag_stack_check
== GENERIC_STACK_CHECK
)
4886 stack_check_probe_note
= emit_note (NOTE_INSN_DELETED
);
4889 /* Undo the effects of init_dummy_function_start. */
4891 expand_dummy_function_end (void)
4893 gcc_assert (in_dummy_function
);
4895 /* End any sequences that failed to be closed due to syntax errors. */
4896 while (in_sequence_p ())
4899 /* Outside function body, can't compute type's actual size
4900 until next function's body starts. */
4902 free_after_parsing (cfun
);
4903 free_after_compilation (cfun
);
4905 in_dummy_function
= false;
4908 /* Call DOIT for each hard register used as a return value from
4909 the current function. */
4912 diddle_return_value (void (*doit
) (rtx
, void *), void *arg
)
4914 rtx outgoing
= crtl
->return_rtx
;
4919 if (REG_P (outgoing
))
4920 (*doit
) (outgoing
, arg
);
4921 else if (GET_CODE (outgoing
) == PARALLEL
)
4925 for (i
= 0; i
< XVECLEN (outgoing
, 0); i
++)
4927 rtx x
= XEXP (XVECEXP (outgoing
, 0, i
), 0);
4929 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
4936 do_clobber_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
4942 clobber_return_register (void)
4944 diddle_return_value (do_clobber_return_reg
, NULL
);
4946 /* In case we do use pseudo to return value, clobber it too. */
4947 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
4949 tree decl_result
= DECL_RESULT (current_function_decl
);
4950 rtx decl_rtl
= DECL_RTL (decl_result
);
4951 if (REG_P (decl_rtl
) && REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
)
4953 do_clobber_return_reg (decl_rtl
, NULL
);
4959 do_use_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
4965 use_return_register (void)
4967 diddle_return_value (do_use_return_reg
, NULL
);
4970 /* Possibly warn about unused parameters. */
4972 do_warn_unused_parameter (tree fn
)
4976 for (decl
= DECL_ARGUMENTS (fn
);
4977 decl
; decl
= DECL_CHAIN (decl
))
4978 if (!TREE_USED (decl
) && TREE_CODE (decl
) == PARM_DECL
4979 && DECL_NAME (decl
) && !DECL_ARTIFICIAL (decl
)
4980 && !TREE_NO_WARNING (decl
))
4981 warning (OPT_Wunused_parameter
, "unused parameter %q+D", decl
);
4984 static GTY(()) rtx initial_trampoline
;
4986 /* Generate RTL for the end of the current function. */
4989 expand_function_end (void)
4993 /* If arg_pointer_save_area was referenced only from a nested
4994 function, we will not have initialized it yet. Do that now. */
4995 if (arg_pointer_save_area
&& ! crtl
->arg_pointer_save_area_init
)
4996 get_arg_pointer_save_area ();
4998 /* If we are doing generic stack checking and this function makes calls,
4999 do a stack probe at the start of the function to ensure we have enough
5000 space for another stack frame. */
5001 if (flag_stack_check
== GENERIC_STACK_CHECK
)
5005 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5008 rtx max_frame_size
= GEN_INT (STACK_CHECK_MAX_FRAME_SIZE
);
5010 if (STACK_CHECK_MOVING_SP
)
5011 anti_adjust_stack_and_probe (max_frame_size
, true);
5013 probe_stack_range (STACK_OLD_CHECK_PROTECT
, max_frame_size
);
5016 set_insn_locations (seq
, prologue_location
);
5017 emit_insn_before (seq
, stack_check_probe_note
);
5022 /* End any sequences that failed to be closed due to syntax errors. */
5023 while (in_sequence_p ())
5026 clear_pending_stack_adjust ();
5027 do_pending_stack_adjust ();
5029 /* Output a linenumber for the end of the function.
5030 SDB depends on this. */
5031 set_curr_insn_location (input_location
);
5033 /* Before the return label (if any), clobber the return
5034 registers so that they are not propagated live to the rest of
5035 the function. This can only happen with functions that drop
5036 through; if there had been a return statement, there would
5037 have either been a return rtx, or a jump to the return label.
5039 We delay actual code generation after the current_function_value_rtx
5041 clobber_after
= get_last_insn ();
5043 /* Output the label for the actual return from the function. */
5044 emit_label (return_label
);
5046 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
)
5048 /* Let except.c know where it should emit the call to unregister
5049 the function context for sjlj exceptions. */
5050 if (flag_exceptions
)
5051 sjlj_emit_function_exit_after (get_last_insn ());
5055 /* We want to ensure that instructions that may trap are not
5056 moved into the epilogue by scheduling, because we don't
5057 always emit unwind information for the epilogue. */
5058 if (cfun
->can_throw_non_call_exceptions
)
5059 emit_insn (gen_blockage ());
5062 /* If this is an implementation of throw, do what's necessary to
5063 communicate between __builtin_eh_return and the epilogue. */
5064 expand_eh_return ();
5066 /* If scalar return value was computed in a pseudo-reg, or was a named
5067 return value that got dumped to the stack, copy that to the hard
5069 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
5071 tree decl_result
= DECL_RESULT (current_function_decl
);
5072 rtx decl_rtl
= DECL_RTL (decl_result
);
5074 if (REG_P (decl_rtl
)
5075 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
5076 : DECL_REGISTER (decl_result
))
5078 rtx real_decl_rtl
= crtl
->return_rtx
;
5080 /* This should be set in assign_parms. */
5081 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl
));
5083 /* If this is a BLKmode structure being returned in registers,
5084 then use the mode computed in expand_return. Note that if
5085 decl_rtl is memory, then its mode may have been changed,
5086 but that crtl->return_rtx has not. */
5087 if (GET_MODE (real_decl_rtl
) == BLKmode
)
5088 PUT_MODE (real_decl_rtl
, GET_MODE (decl_rtl
));
5090 /* If a non-BLKmode return value should be padded at the least
5091 significant end of the register, shift it left by the appropriate
5092 amount. BLKmode results are handled using the group load/store
5094 if (TYPE_MODE (TREE_TYPE (decl_result
)) != BLKmode
5095 && REG_P (real_decl_rtl
)
5096 && targetm
.calls
.return_in_msb (TREE_TYPE (decl_result
)))
5098 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl
),
5099 REGNO (real_decl_rtl
)),
5101 shift_return_value (GET_MODE (decl_rtl
), true, real_decl_rtl
);
5103 /* If a named return value dumped decl_return to memory, then
5104 we may need to re-do the PROMOTE_MODE signed/unsigned
5106 else if (GET_MODE (real_decl_rtl
) != GET_MODE (decl_rtl
))
5108 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (decl_result
));
5109 promote_function_mode (TREE_TYPE (decl_result
),
5110 GET_MODE (decl_rtl
), &unsignedp
,
5111 TREE_TYPE (current_function_decl
), 1);
5113 convert_move (real_decl_rtl
, decl_rtl
, unsignedp
);
5115 else if (GET_CODE (real_decl_rtl
) == PARALLEL
)
5117 /* If expand_function_start has created a PARALLEL for decl_rtl,
5118 move the result to the real return registers. Otherwise, do
5119 a group load from decl_rtl for a named return. */
5120 if (GET_CODE (decl_rtl
) == PARALLEL
)
5121 emit_group_move (real_decl_rtl
, decl_rtl
);
5123 emit_group_load (real_decl_rtl
, decl_rtl
,
5124 TREE_TYPE (decl_result
),
5125 int_size_in_bytes (TREE_TYPE (decl_result
)));
5127 /* In the case of complex integer modes smaller than a word, we'll
5128 need to generate some non-trivial bitfield insertions. Do that
5129 on a pseudo and not the hard register. */
5130 else if (GET_CODE (decl_rtl
) == CONCAT
5131 && GET_MODE_CLASS (GET_MODE (decl_rtl
)) == MODE_COMPLEX_INT
5132 && GET_MODE_BITSIZE (GET_MODE (decl_rtl
)) <= BITS_PER_WORD
)
5134 int old_generating_concat_p
;
5137 old_generating_concat_p
= generating_concat_p
;
5138 generating_concat_p
= 0;
5139 tmp
= gen_reg_rtx (GET_MODE (decl_rtl
));
5140 generating_concat_p
= old_generating_concat_p
;
5142 emit_move_insn (tmp
, decl_rtl
);
5143 emit_move_insn (real_decl_rtl
, tmp
);
5146 emit_move_insn (real_decl_rtl
, decl_rtl
);
5150 /* If returning a structure, arrange to return the address of the value
5151 in a place where debuggers expect to find it.
5153 If returning a structure PCC style,
5154 the caller also depends on this value.
5155 And cfun->returns_pcc_struct is not necessarily set. */
5156 if (cfun
->returns_struct
5157 || cfun
->returns_pcc_struct
)
5159 rtx value_address
= DECL_RTL (DECL_RESULT (current_function_decl
));
5160 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
5163 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
5164 type
= TREE_TYPE (type
);
5166 value_address
= XEXP (value_address
, 0);
5168 outgoing
= targetm
.calls
.function_value (build_pointer_type (type
),
5169 current_function_decl
, true);
5171 /* Mark this as a function return value so integrate will delete the
5172 assignment and USE below when inlining this function. */
5173 REG_FUNCTION_VALUE_P (outgoing
) = 1;
5175 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5176 value_address
= convert_memory_address (GET_MODE (outgoing
),
5179 emit_move_insn (outgoing
, value_address
);
5181 /* Show return register used to hold result (in this case the address
5183 crtl
->return_rtx
= outgoing
;
5186 /* Emit the actual code to clobber return register. */
5191 clobber_return_register ();
5195 emit_insn_after (seq
, clobber_after
);
5198 /* Output the label for the naked return from the function. */
5199 if (naked_return_label
)
5200 emit_label (naked_return_label
);
5202 /* @@@ This is a kludge. We want to ensure that instructions that
5203 may trap are not moved into the epilogue by scheduling, because
5204 we don't always emit unwind information for the epilogue. */
5205 if (cfun
->can_throw_non_call_exceptions
5206 && targetm_common
.except_unwind_info (&global_options
) != UI_SJLJ
)
5207 emit_insn (gen_blockage ());
5209 /* If stack protection is enabled for this function, check the guard. */
5210 if (crtl
->stack_protect_guard
)
5211 stack_protect_epilogue ();
5213 /* If we had calls to alloca, and this machine needs
5214 an accurate stack pointer to exit the function,
5215 insert some code to save and restore the stack pointer. */
5216 if (! EXIT_IGNORE_STACK
5217 && cfun
->calls_alloca
)
5222 emit_stack_save (SAVE_FUNCTION
, &tem
);
5225 emit_insn_before (seq
, parm_birth_insn
);
5227 emit_stack_restore (SAVE_FUNCTION
, tem
);
5230 /* ??? This should no longer be necessary since stupid is no longer with
5231 us, but there are some parts of the compiler (eg reload_combine, and
5232 sh mach_dep_reorg) that still try and compute their own lifetime info
5233 instead of using the general framework. */
5234 use_return_register ();
5238 get_arg_pointer_save_area (void)
5240 rtx ret
= arg_pointer_save_area
;
5244 ret
= assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
5245 arg_pointer_save_area
= ret
;
5248 if (! crtl
->arg_pointer_save_area_init
)
5252 /* Save the arg pointer at the beginning of the function. The
5253 generated stack slot may not be a valid memory address, so we
5254 have to check it and fix it if necessary. */
5256 emit_move_insn (validize_mem (ret
),
5257 crtl
->args
.internal_arg_pointer
);
5261 push_topmost_sequence ();
5262 emit_insn_after (seq
, entry_of_function ());
5263 pop_topmost_sequence ();
5265 crtl
->arg_pointer_save_area_init
= true;
5271 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5272 for the first time. */
5275 record_insns (rtx insns
, rtx end
, htab_t
*hashp
)
5278 htab_t hash
= *hashp
;
5282 = htab_create_ggc (17, htab_hash_pointer
, htab_eq_pointer
, NULL
);
5284 for (tmp
= insns
; tmp
!= end
; tmp
= NEXT_INSN (tmp
))
5286 void **slot
= htab_find_slot (hash
, tmp
, INSERT
);
5287 gcc_assert (*slot
== NULL
);
5292 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5293 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5294 insn, then record COPY as well. */
5297 maybe_copy_prologue_epilogue_insn (rtx insn
, rtx copy
)
5302 hash
= epilogue_insn_hash
;
5303 if (!hash
|| !htab_find (hash
, insn
))
5305 hash
= prologue_insn_hash
;
5306 if (!hash
|| !htab_find (hash
, insn
))
5310 slot
= htab_find_slot (hash
, copy
, INSERT
);
5311 gcc_assert (*slot
== NULL
);
5315 /* Set the location of the insn chain starting at INSN to LOC. */
5317 set_insn_locations (rtx insn
, int loc
)
5319 while (insn
!= NULL_RTX
)
5322 INSN_LOCATION (insn
) = loc
;
5323 insn
= NEXT_INSN (insn
);
5327 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5328 we can be running after reorg, SEQUENCE rtl is possible. */
5331 contains (const_rtx insn
, htab_t hash
)
5336 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
5339 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
5340 if (htab_find (hash
, XVECEXP (PATTERN (insn
), 0, i
)))
5345 return htab_find (hash
, insn
) != NULL
;
5349 prologue_epilogue_contains (const_rtx insn
)
5351 if (contains (insn
, prologue_insn_hash
))
5353 if (contains (insn
, epilogue_insn_hash
))
5358 #ifdef HAVE_simple_return
5360 /* Return true if INSN requires the stack frame to be set up.
5361 PROLOGUE_USED contains the hard registers used in the function
5362 prologue. SET_UP_BY_PROLOGUE is the set of registers we expect the
5363 prologue to set up for the function. */
5365 requires_stack_frame_p (rtx insn
, HARD_REG_SET prologue_used
,
5366 HARD_REG_SET set_up_by_prologue
)
5369 HARD_REG_SET hardregs
;
5373 return !SIBLING_CALL_P (insn
);
5375 /* We need a frame to get the unique CFA expected by the unwinder. */
5376 if (cfun
->can_throw_non_call_exceptions
&& can_throw_internal (insn
))
5379 CLEAR_HARD_REG_SET (hardregs
);
5380 for (df_rec
= DF_INSN_DEFS (insn
); *df_rec
; df_rec
++)
5382 rtx dreg
= DF_REF_REG (*df_rec
);
5387 add_to_hard_reg_set (&hardregs
, GET_MODE (dreg
),
5390 if (hard_reg_set_intersect_p (hardregs
, prologue_used
))
5392 AND_COMPL_HARD_REG_SET (hardregs
, call_used_reg_set
);
5393 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
5394 if (TEST_HARD_REG_BIT (hardregs
, regno
)
5395 && df_regs_ever_live_p (regno
))
5398 for (df_rec
= DF_INSN_USES (insn
); *df_rec
; df_rec
++)
5400 rtx reg
= DF_REF_REG (*df_rec
);
5405 add_to_hard_reg_set (&hardregs
, GET_MODE (reg
),
5408 if (hard_reg_set_intersect_p (hardregs
, set_up_by_prologue
))
5414 /* See whether BB has a single successor that uses [REGNO, END_REGNO),
5415 and if BB is its only predecessor. Return that block if so,
5416 otherwise return null. */
5419 next_block_for_reg (basic_block bb
, int regno
, int end_regno
)
5427 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5429 live
= df_get_live_in (e
->dest
);
5430 for (i
= regno
; i
< end_regno
; i
++)
5431 if (REGNO_REG_SET_P (live
, i
))
5433 if (live_edge
&& live_edge
!= e
)
5439 /* We can sometimes encounter dead code. Don't try to move it
5440 into the exit block. */
5441 if (!live_edge
|| live_edge
->dest
== EXIT_BLOCK_PTR
)
5444 /* Reject targets of abnormal edges. This is needed for correctness
5445 on ports like Alpha and MIPS, whose pic_offset_table_rtx can die on
5446 exception edges even though it is generally treated as call-saved
5447 for the majority of the compilation. Moving across abnormal edges
5448 isn't going to be interesting for shrink-wrap usage anyway. */
5449 if (live_edge
->flags
& EDGE_ABNORMAL
)
5452 if (EDGE_COUNT (live_edge
->dest
->preds
) > 1)
5455 return live_edge
->dest
;
5458 /* Try to move INSN from BB to a successor. Return true on success.
5459 USES and DEFS are the set of registers that are used and defined
5460 after INSN in BB. */
5463 move_insn_for_shrink_wrap (basic_block bb
, rtx insn
,
5464 const HARD_REG_SET uses
,
5465 const HARD_REG_SET defs
)
5468 bitmap live_out
, live_in
, bb_uses
, bb_defs
;
5469 unsigned int i
, dregno
, end_dregno
, sregno
, end_sregno
;
5470 basic_block next_block
;
5472 /* Look for a simple register copy. */
5473 set
= single_set (insn
);
5476 src
= SET_SRC (set
);
5477 dest
= SET_DEST (set
);
5478 if (!REG_P (dest
) || !REG_P (src
))
5481 /* Make sure that the source register isn't defined later in BB. */
5482 sregno
= REGNO (src
);
5483 end_sregno
= END_REGNO (src
);
5484 if (overlaps_hard_reg_set_p (defs
, GET_MODE (src
), sregno
))
5487 /* Make sure that the destination register isn't referenced later in BB. */
5488 dregno
= REGNO (dest
);
5489 end_dregno
= END_REGNO (dest
);
5490 if (overlaps_hard_reg_set_p (uses
, GET_MODE (dest
), dregno
)
5491 || overlaps_hard_reg_set_p (defs
, GET_MODE (dest
), dregno
))
5494 /* See whether there is a successor block to which we could move INSN. */
5495 next_block
= next_block_for_reg (bb
, dregno
, end_dregno
);
5499 /* At this point we are committed to moving INSN, but let's try to
5500 move it as far as we can. */
5503 live_out
= df_get_live_out (bb
);
5504 live_in
= df_get_live_in (next_block
);
5507 /* Check whether BB uses DEST or clobbers DEST. We need to add
5508 INSN to BB if so. Either way, DEST is no longer live on entry,
5509 except for any part that overlaps SRC (next loop). */
5510 bb_uses
= &DF_LR_BB_INFO (bb
)->use
;
5511 bb_defs
= &DF_LR_BB_INFO (bb
)->def
;
5512 for (i
= dregno
; i
< end_dregno
; i
++)
5514 if (REGNO_REG_SET_P (bb_uses
, i
) || REGNO_REG_SET_P (bb_defs
, i
))
5516 CLEAR_REGNO_REG_SET (live_out
, i
);
5517 CLEAR_REGNO_REG_SET (live_in
, i
);
5520 /* Check whether BB clobbers SRC. We need to add INSN to BB if so.
5521 Either way, SRC is now live on entry. */
5522 for (i
= sregno
; i
< end_sregno
; i
++)
5524 if (REGNO_REG_SET_P (bb_defs
, i
))
5526 SET_REGNO_REG_SET (live_out
, i
);
5527 SET_REGNO_REG_SET (live_in
, i
);
5530 /* If we don't need to add the move to BB, look for a single
5533 next_block
= next_block_for_reg (next_block
, dregno
, end_dregno
);
5537 /* BB now defines DEST. It only uses the parts of DEST that overlap SRC
5539 for (i
= dregno
; i
< end_dregno
; i
++)
5541 CLEAR_REGNO_REG_SET (bb_uses
, i
);
5542 SET_REGNO_REG_SET (bb_defs
, i
);
5545 /* BB now uses SRC. */
5546 for (i
= sregno
; i
< end_sregno
; i
++)
5547 SET_REGNO_REG_SET (bb_uses
, i
);
5549 emit_insn_after (PATTERN (insn
), bb_note (bb
));
5554 /* Look for register copies in the first block of the function, and move
5555 them down into successor blocks if the register is used only on one
5556 path. This exposes more opportunities for shrink-wrapping. These
5557 kinds of sets often occur when incoming argument registers are moved
5558 to call-saved registers because their values are live across one or
5559 more calls during the function. */
5562 prepare_shrink_wrap (basic_block entry_block
)
5565 HARD_REG_SET uses
, defs
;
5568 CLEAR_HARD_REG_SET (uses
);
5569 CLEAR_HARD_REG_SET (defs
);
5570 FOR_BB_INSNS_REVERSE_SAFE (entry_block
, insn
, curr
)
5571 if (NONDEBUG_INSN_P (insn
)
5572 && !move_insn_for_shrink_wrap (entry_block
, insn
, uses
, defs
))
5574 /* Add all defined registers to DEFs. */
5575 for (ref
= DF_INSN_DEFS (insn
); *ref
; ref
++)
5577 x
= DF_REF_REG (*ref
);
5578 if (REG_P (x
) && HARD_REGISTER_P (x
))
5579 SET_HARD_REG_BIT (defs
, REGNO (x
));
5582 /* Add all used registers to USESs. */
5583 for (ref
= DF_INSN_USES (insn
); *ref
; ref
++)
5585 x
= DF_REF_REG (*ref
);
5586 if (REG_P (x
) && HARD_REGISTER_P (x
))
5587 SET_HARD_REG_BIT (uses
, REGNO (x
));
5595 /* Insert use of return register before the end of BB. */
5598 emit_use_return_register_into_block (basic_block bb
)
5602 use_return_register ();
5607 if (reg_mentioned_p (cc0_rtx
, PATTERN (insn
)))
5608 insn
= prev_cc0_setter (insn
);
5610 emit_insn_before (seq
, insn
);
5614 /* Create a return pattern, either simple_return or return, depending on
5618 gen_return_pattern (bool simple_p
)
5620 #ifdef HAVE_simple_return
5621 return simple_p
? gen_simple_return () : gen_return ();
5623 gcc_assert (!simple_p
);
5624 return gen_return ();
5628 /* Insert an appropriate return pattern at the end of block BB. This
5629 also means updating block_for_insn appropriately. SIMPLE_P is
5630 the same as in gen_return_pattern and passed to it. */
5633 emit_return_into_block (bool simple_p
, basic_block bb
)
5636 jump
= emit_jump_insn_after (gen_return_pattern (simple_p
), BB_END (bb
));
5637 pat
= PATTERN (jump
);
5638 if (GET_CODE (pat
) == PARALLEL
)
5639 pat
= XVECEXP (pat
, 0, 0);
5640 gcc_assert (ANY_RETURN_P (pat
));
5641 JUMP_LABEL (jump
) = pat
;
5645 /* Set JUMP_LABEL for a return insn. */
5648 set_return_jump_label (rtx returnjump
)
5650 rtx pat
= PATTERN (returnjump
);
5651 if (GET_CODE (pat
) == PARALLEL
)
5652 pat
= XVECEXP (pat
, 0, 0);
5653 if (ANY_RETURN_P (pat
))
5654 JUMP_LABEL (returnjump
) = pat
;
5656 JUMP_LABEL (returnjump
) = ret_rtx
;
5659 #ifdef HAVE_simple_return
5660 /* Create a copy of BB instructions and insert at BEFORE. Redirect
5661 preds of BB to COPY_BB if they don't appear in NEED_PROLOGUE. */
5663 dup_block_and_redirect (basic_block bb
, basic_block copy_bb
, rtx before
,
5664 bitmap_head
*need_prologue
)
5668 rtx insn
= BB_END (bb
);
5670 /* We know BB has a single successor, so there is no need to copy a
5671 simple jump at the end of BB. */
5672 if (simplejump_p (insn
))
5673 insn
= PREV_INSN (insn
);
5676 duplicate_insn_chain (BB_HEAD (bb
), insn
);
5680 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5681 if (active_insn_p (insn
))
5683 fprintf (dump_file
, "Duplicating bb %d to bb %d, %u active insns.\n",
5684 bb
->index
, copy_bb
->index
, count
);
5686 insn
= get_insns ();
5688 emit_insn_before (insn
, before
);
5690 /* Redirect all the paths that need no prologue into copy_bb. */
5691 for (ei
= ei_start (bb
->preds
); (e
= ei_safe_edge (ei
)); )
5692 if (!bitmap_bit_p (need_prologue
, e
->src
->index
))
5694 int freq
= EDGE_FREQUENCY (e
);
5695 copy_bb
->count
+= e
->count
;
5696 copy_bb
->frequency
+= EDGE_FREQUENCY (e
);
5697 e
->dest
->count
-= e
->count
;
5698 if (e
->dest
->count
< 0)
5700 e
->dest
->frequency
-= freq
;
5701 if (e
->dest
->frequency
< 0)
5702 e
->dest
->frequency
= 0;
5703 redirect_edge_and_branch_force (e
, copy_bb
);
5711 #if defined (HAVE_return) || defined (HAVE_simple_return)
5712 /* Return true if there are any active insns between HEAD and TAIL. */
5714 active_insn_between (rtx head
, rtx tail
)
5718 if (active_insn_p (tail
))
5722 tail
= PREV_INSN (tail
);
5727 /* LAST_BB is a block that exits, and empty of active instructions.
5728 Examine its predecessors for jumps that can be converted to
5729 (conditional) returns. */
5731 convert_jumps_to_returns (basic_block last_bb
, bool simple_p
,
5732 vec
<edge
> unconverted ATTRIBUTE_UNUSED
)
5739 vec
<basic_block
> src_bbs
;
5741 src_bbs
.create (EDGE_COUNT (last_bb
->preds
));
5742 FOR_EACH_EDGE (e
, ei
, last_bb
->preds
)
5743 if (e
->src
!= ENTRY_BLOCK_PTR
)
5744 src_bbs
.quick_push (e
->src
);
5746 label
= BB_HEAD (last_bb
);
5748 FOR_EACH_VEC_ELT (src_bbs
, i
, bb
)
5750 rtx jump
= BB_END (bb
);
5752 if (!JUMP_P (jump
) || JUMP_LABEL (jump
) != label
)
5755 e
= find_edge (bb
, last_bb
);
5757 /* If we have an unconditional jump, we can replace that
5758 with a simple return instruction. */
5759 if (simplejump_p (jump
))
5761 /* The use of the return register might be present in the exit
5762 fallthru block. Either:
5763 - removing the use is safe, and we should remove the use in
5764 the exit fallthru block, or
5765 - removing the use is not safe, and we should add it here.
5766 For now, we conservatively choose the latter. Either of the
5767 2 helps in crossjumping. */
5768 emit_use_return_register_into_block (bb
);
5770 emit_return_into_block (simple_p
, bb
);
5774 /* If we have a conditional jump branching to the last
5775 block, we can try to replace that with a conditional
5776 return instruction. */
5777 else if (condjump_p (jump
))
5782 dest
= simple_return_rtx
;
5785 if (!redirect_jump (jump
, dest
, 0))
5787 #ifdef HAVE_simple_return
5792 "Failed to redirect bb %d branch.\n", bb
->index
);
5793 unconverted
.safe_push (e
);
5799 /* See comment in simplejump_p case above. */
5800 emit_use_return_register_into_block (bb
);
5802 /* If this block has only one successor, it both jumps
5803 and falls through to the fallthru block, so we can't
5805 if (single_succ_p (bb
))
5810 #ifdef HAVE_simple_return
5815 "Failed to redirect bb %d branch.\n", bb
->index
);
5816 unconverted
.safe_push (e
);
5822 /* Fix up the CFG for the successful change we just made. */
5823 redirect_edge_succ (e
, EXIT_BLOCK_PTR
);
5824 e
->flags
&= ~EDGE_CROSSING
;
5830 /* Emit a return insn for the exit fallthru block. */
5832 emit_return_for_exit (edge exit_fallthru_edge
, bool simple_p
)
5834 basic_block last_bb
= exit_fallthru_edge
->src
;
5836 if (JUMP_P (BB_END (last_bb
)))
5838 last_bb
= split_edge (exit_fallthru_edge
);
5839 exit_fallthru_edge
= single_succ_edge (last_bb
);
5841 emit_barrier_after (BB_END (last_bb
));
5842 emit_return_into_block (simple_p
, last_bb
);
5843 exit_fallthru_edge
->flags
&= ~EDGE_FALLTHRU
;
5849 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5850 this into place with notes indicating where the prologue ends and where
5851 the epilogue begins. Update the basic block information when possible.
5853 Notes on epilogue placement:
5854 There are several kinds of edges to the exit block:
5855 * a single fallthru edge from LAST_BB
5856 * possibly, edges from blocks containing sibcalls
5857 * possibly, fake edges from infinite loops
5859 The epilogue is always emitted on the fallthru edge from the last basic
5860 block in the function, LAST_BB, into the exit block.
5862 If LAST_BB is empty except for a label, it is the target of every
5863 other basic block in the function that ends in a return. If a
5864 target has a return or simple_return pattern (possibly with
5865 conditional variants), these basic blocks can be changed so that a
5866 return insn is emitted into them, and their target is adjusted to
5867 the real exit block.
5869 Notes on shrink wrapping: We implement a fairly conservative
5870 version of shrink-wrapping rather than the textbook one. We only
5871 generate a single prologue and a single epilogue. This is
5872 sufficient to catch a number of interesting cases involving early
5875 First, we identify the blocks that require the prologue to occur before
5876 them. These are the ones that modify a call-saved register, or reference
5877 any of the stack or frame pointer registers. To simplify things, we then
5878 mark everything reachable from these blocks as also requiring a prologue.
5879 This takes care of loops automatically, and avoids the need to examine
5880 whether MEMs reference the frame, since it is sufficient to check for
5881 occurrences of the stack or frame pointer.
5883 We then compute the set of blocks for which the need for a prologue
5884 is anticipatable (borrowing terminology from the shrink-wrapping
5885 description in Muchnick's book). These are the blocks which either
5886 require a prologue themselves, or those that have only successors
5887 where the prologue is anticipatable. The prologue needs to be
5888 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5889 is not. For the moment, we ensure that only one such edge exists.
5891 The epilogue is placed as described above, but we make a
5892 distinction between inserting return and simple_return patterns
5893 when modifying other blocks that end in a return. Blocks that end
5894 in a sibcall omit the sibcall_epilogue if the block is not in
5898 thread_prologue_and_epilogue_insns (void)
5901 #ifdef HAVE_simple_return
5902 vec
<edge
> unconverted_simple_returns
= vNULL
;
5903 bool nonempty_prologue
;
5904 bitmap_head bb_flags
;
5905 unsigned max_grow_size
;
5908 rtx seq ATTRIBUTE_UNUSED
, epilogue_end ATTRIBUTE_UNUSED
;
5909 rtx prologue_seq ATTRIBUTE_UNUSED
, split_prologue_seq ATTRIBUTE_UNUSED
;
5910 edge e
, entry_edge
, orig_entry_edge
, exit_fallthru_edge
;
5915 rtl_profile_for_bb (ENTRY_BLOCK_PTR
);
5919 epilogue_end
= NULL_RTX
;
5920 returnjump
= NULL_RTX
;
5922 /* Can't deal with multiple successors of the entry block at the
5923 moment. Function should always have at least one entry
5925 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR
));
5926 entry_edge
= single_succ_edge (ENTRY_BLOCK_PTR
);
5927 orig_entry_edge
= entry_edge
;
5929 split_prologue_seq
= NULL_RTX
;
5930 if (flag_split_stack
5931 && (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun
->decl
))
5934 #ifndef HAVE_split_stack_prologue
5937 gcc_assert (HAVE_split_stack_prologue
);
5940 emit_insn (gen_split_stack_prologue ());
5941 split_prologue_seq
= get_insns ();
5944 record_insns (split_prologue_seq
, NULL
, &prologue_insn_hash
);
5945 set_insn_locations (split_prologue_seq
, prologue_location
);
5949 prologue_seq
= NULL_RTX
;
5950 #ifdef HAVE_prologue
5954 seq
= gen_prologue ();
5957 /* Insert an explicit USE for the frame pointer
5958 if the profiling is on and the frame pointer is required. */
5959 if (crtl
->profile
&& frame_pointer_needed
)
5960 emit_use (hard_frame_pointer_rtx
);
5962 /* Retain a map of the prologue insns. */
5963 record_insns (seq
, NULL
, &prologue_insn_hash
);
5964 emit_note (NOTE_INSN_PROLOGUE_END
);
5966 /* Ensure that instructions are not moved into the prologue when
5967 profiling is on. The call to the profiling routine can be
5968 emitted within the live range of a call-clobbered register. */
5969 if (!targetm
.profile_before_prologue () && crtl
->profile
)
5970 emit_insn (gen_blockage ());
5972 prologue_seq
= get_insns ();
5974 set_insn_locations (prologue_seq
, prologue_location
);
5978 #ifdef HAVE_simple_return
5979 bitmap_initialize (&bb_flags
, &bitmap_default_obstack
);
5981 /* Try to perform a kind of shrink-wrapping, making sure the
5982 prologue/epilogue is emitted only around those parts of the
5983 function that require it. */
5985 nonempty_prologue
= false;
5986 for (seq
= prologue_seq
; seq
; seq
= NEXT_INSN (seq
))
5987 if (!NOTE_P (seq
) || NOTE_KIND (seq
) != NOTE_INSN_PROLOGUE_END
)
5989 nonempty_prologue
= true;
5993 if (flag_shrink_wrap
&& HAVE_simple_return
5994 && (targetm
.profile_before_prologue () || !crtl
->profile
)
5995 && nonempty_prologue
&& !crtl
->calls_eh_return
)
5997 HARD_REG_SET prologue_clobbered
, prologue_used
, live_on_edge
;
5998 struct hard_reg_set_container set_up_by_prologue
;
6000 vec
<basic_block
> vec
;
6002 bitmap_head bb_antic_flags
;
6003 bitmap_head bb_on_list
;
6004 bitmap_head bb_tail
;
6007 fprintf (dump_file
, "Attempting shrink-wrapping optimization.\n");
6009 /* Compute the registers set and used in the prologue. */
6010 CLEAR_HARD_REG_SET (prologue_clobbered
);
6011 CLEAR_HARD_REG_SET (prologue_used
);
6012 for (p_insn
= prologue_seq
; p_insn
; p_insn
= NEXT_INSN (p_insn
))
6014 HARD_REG_SET this_used
;
6015 if (!NONDEBUG_INSN_P (p_insn
))
6018 CLEAR_HARD_REG_SET (this_used
);
6019 note_uses (&PATTERN (p_insn
), record_hard_reg_uses
,
6021 AND_COMPL_HARD_REG_SET (this_used
, prologue_clobbered
);
6022 IOR_HARD_REG_SET (prologue_used
, this_used
);
6023 note_stores (PATTERN (p_insn
), record_hard_reg_sets
,
6024 &prologue_clobbered
);
6027 prepare_shrink_wrap (entry_edge
->dest
);
6029 bitmap_initialize (&bb_antic_flags
, &bitmap_default_obstack
);
6030 bitmap_initialize (&bb_on_list
, &bitmap_default_obstack
);
6031 bitmap_initialize (&bb_tail
, &bitmap_default_obstack
);
6033 /* Find the set of basic blocks that require a stack frame,
6034 and blocks that are too big to be duplicated. */
6036 vec
.create (n_basic_blocks
);
6038 CLEAR_HARD_REG_SET (set_up_by_prologue
.set
);
6039 add_to_hard_reg_set (&set_up_by_prologue
.set
, Pmode
,
6040 STACK_POINTER_REGNUM
);
6041 add_to_hard_reg_set (&set_up_by_prologue
.set
, Pmode
, ARG_POINTER_REGNUM
);
6042 if (frame_pointer_needed
)
6043 add_to_hard_reg_set (&set_up_by_prologue
.set
, Pmode
,
6044 HARD_FRAME_POINTER_REGNUM
);
6045 if (pic_offset_table_rtx
)
6046 add_to_hard_reg_set (&set_up_by_prologue
.set
, Pmode
,
6047 PIC_OFFSET_TABLE_REGNUM
);
6049 add_to_hard_reg_set (&set_up_by_prologue
.set
,
6050 GET_MODE (crtl
->drap_reg
),
6051 REGNO (crtl
->drap_reg
));
6052 if (targetm
.set_up_by_prologue
)
6053 targetm
.set_up_by_prologue (&set_up_by_prologue
);
6055 /* We don't use a different max size depending on
6056 optimize_bb_for_speed_p because increasing shrink-wrapping
6057 opportunities by duplicating tail blocks can actually result
6058 in an overall decrease in code size. */
6059 max_grow_size
= get_uncond_jump_length ();
6060 max_grow_size
*= PARAM_VALUE (PARAM_MAX_GROW_COPY_BB_INSNS
);
6067 FOR_BB_INSNS (bb
, insn
)
6068 if (NONDEBUG_INSN_P (insn
))
6070 if (requires_stack_frame_p (insn
, prologue_used
,
6071 set_up_by_prologue
.set
))
6073 if (bb
== entry_edge
->dest
)
6074 goto fail_shrinkwrap
;
6075 bitmap_set_bit (&bb_flags
, bb
->index
);
6076 vec
.quick_push (bb
);
6079 else if (size
<= max_grow_size
)
6081 size
+= get_attr_min_length (insn
);
6082 if (size
> max_grow_size
)
6083 bitmap_set_bit (&bb_on_list
, bb
->index
);
6088 /* Blocks that really need a prologue, or are too big for tails. */
6089 bitmap_ior_into (&bb_on_list
, &bb_flags
);
6091 /* For every basic block that needs a prologue, mark all blocks
6092 reachable from it, so as to ensure they are also seen as
6093 requiring a prologue. */
6094 while (!vec
.is_empty ())
6096 basic_block tmp_bb
= vec
.pop ();
6098 FOR_EACH_EDGE (e
, ei
, tmp_bb
->succs
)
6099 if (e
->dest
!= EXIT_BLOCK_PTR
6100 && bitmap_set_bit (&bb_flags
, e
->dest
->index
))
6101 vec
.quick_push (e
->dest
);
6104 /* Find the set of basic blocks that need no prologue, have a
6105 single successor, can be duplicated, meet a max size
6106 requirement, and go to the exit via like blocks. */
6107 vec
.quick_push (EXIT_BLOCK_PTR
);
6108 while (!vec
.is_empty ())
6110 basic_block tmp_bb
= vec
.pop ();
6112 FOR_EACH_EDGE (e
, ei
, tmp_bb
->preds
)
6113 if (single_succ_p (e
->src
)
6114 && !bitmap_bit_p (&bb_on_list
, e
->src
->index
)
6115 && can_duplicate_block_p (e
->src
))
6120 /* If there is predecessor of e->src which doesn't
6121 need prologue and the edge is complex,
6122 we might not be able to redirect the branch
6123 to a copy of e->src. */
6124 FOR_EACH_EDGE (pe
, pei
, e
->src
->preds
)
6125 if ((pe
->flags
& EDGE_COMPLEX
) != 0
6126 && !bitmap_bit_p (&bb_flags
, pe
->src
->index
))
6128 if (pe
== NULL
&& bitmap_set_bit (&bb_tail
, e
->src
->index
))
6129 vec
.quick_push (e
->src
);
6133 /* Now walk backwards from every block that is marked as needing
6134 a prologue to compute the bb_antic_flags bitmap. Exclude
6135 tail blocks; They can be duplicated to be used on paths not
6136 needing a prologue. */
6137 bitmap_clear (&bb_on_list
);
6138 bitmap_and_compl (&bb_antic_flags
, &bb_flags
, &bb_tail
);
6141 if (!bitmap_bit_p (&bb_antic_flags
, bb
->index
))
6143 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6144 if (!bitmap_bit_p (&bb_antic_flags
, e
->src
->index
)
6145 && bitmap_set_bit (&bb_on_list
, e
->src
->index
))
6146 vec
.quick_push (e
->src
);
6148 while (!vec
.is_empty ())
6150 basic_block tmp_bb
= vec
.pop ();
6151 bool all_set
= true;
6153 bitmap_clear_bit (&bb_on_list
, tmp_bb
->index
);
6154 FOR_EACH_EDGE (e
, ei
, tmp_bb
->succs
)
6155 if (!bitmap_bit_p (&bb_antic_flags
, e
->dest
->index
))
6163 bitmap_set_bit (&bb_antic_flags
, tmp_bb
->index
);
6164 FOR_EACH_EDGE (e
, ei
, tmp_bb
->preds
)
6165 if (!bitmap_bit_p (&bb_antic_flags
, e
->src
->index
)
6166 && bitmap_set_bit (&bb_on_list
, e
->src
->index
))
6167 vec
.quick_push (e
->src
);
6170 /* Find exactly one edge that leads to a block in ANTIC from
6171 a block that isn't. */
6172 if (!bitmap_bit_p (&bb_antic_flags
, entry_edge
->dest
->index
))
6175 if (!bitmap_bit_p (&bb_antic_flags
, bb
->index
))
6177 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6178 if (!bitmap_bit_p (&bb_antic_flags
, e
->src
->index
))
6180 if (entry_edge
!= orig_entry_edge
)
6182 entry_edge
= orig_entry_edge
;
6184 fprintf (dump_file
, "More than one candidate edge.\n");
6185 goto fail_shrinkwrap
;
6188 fprintf (dump_file
, "Found candidate edge for "
6189 "shrink-wrapping, %d->%d.\n", e
->src
->index
,
6195 if (entry_edge
!= orig_entry_edge
)
6197 /* Test whether the prologue is known to clobber any register
6198 (other than FP or SP) which are live on the edge. */
6199 CLEAR_HARD_REG_BIT (prologue_clobbered
, STACK_POINTER_REGNUM
);
6200 if (frame_pointer_needed
)
6201 CLEAR_HARD_REG_BIT (prologue_clobbered
, HARD_FRAME_POINTER_REGNUM
);
6202 REG_SET_TO_HARD_REG_SET (live_on_edge
,
6203 df_get_live_in (entry_edge
->dest
));
6204 if (hard_reg_set_intersect_p (live_on_edge
, prologue_clobbered
))
6206 entry_edge
= orig_entry_edge
;
6209 "Shrink-wrapping aborted due to clobber.\n");
6212 if (entry_edge
!= orig_entry_edge
)
6214 crtl
->shrink_wrapped
= true;
6216 fprintf (dump_file
, "Performing shrink-wrapping.\n");
6218 /* Find tail blocks reachable from both blocks needing a
6219 prologue and blocks not needing a prologue. */
6220 if (!bitmap_empty_p (&bb_tail
))
6223 bool some_pro
, some_no_pro
;
6224 if (!bitmap_bit_p (&bb_tail
, bb
->index
))
6226 some_pro
= some_no_pro
= false;
6227 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6229 if (bitmap_bit_p (&bb_flags
, e
->src
->index
))
6234 if (some_pro
&& some_no_pro
)
6235 vec
.quick_push (bb
);
6237 bitmap_clear_bit (&bb_tail
, bb
->index
);
6239 /* Find the head of each tail. */
6240 while (!vec
.is_empty ())
6242 basic_block tbb
= vec
.pop ();
6244 if (!bitmap_bit_p (&bb_tail
, tbb
->index
))
6247 while (single_succ_p (tbb
))
6249 tbb
= single_succ (tbb
);
6250 bitmap_clear_bit (&bb_tail
, tbb
->index
);
6253 /* Now duplicate the tails. */
6254 if (!bitmap_empty_p (&bb_tail
))
6255 FOR_EACH_BB_REVERSE (bb
)
6257 basic_block copy_bb
, tbb
;
6261 if (!bitmap_clear_bit (&bb_tail
, bb
->index
))
6264 /* Create a copy of BB, instructions and all, for
6265 use on paths that don't need a prologue.
6266 Ideal placement of the copy is on a fall-thru edge
6267 or after a block that would jump to the copy. */
6268 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6269 if (!bitmap_bit_p (&bb_flags
, e
->src
->index
)
6270 && single_succ_p (e
->src
))
6274 copy_bb
= create_basic_block (NEXT_INSN (BB_END (e
->src
)),
6276 BB_COPY_PARTITION (copy_bb
, e
->src
);
6280 /* Otherwise put the copy at the end of the function. */
6281 copy_bb
= create_basic_block (NULL_RTX
, NULL_RTX
,
6282 EXIT_BLOCK_PTR
->prev_bb
);
6283 BB_COPY_PARTITION (copy_bb
, bb
);
6286 insert_point
= emit_note_after (NOTE_INSN_DELETED
,
6288 emit_barrier_after (BB_END (copy_bb
));
6293 dup_block_and_redirect (tbb
, copy_bb
, insert_point
,
6295 tbb
= single_succ (tbb
);
6296 if (tbb
== EXIT_BLOCK_PTR
)
6298 e
= split_block (copy_bb
, PREV_INSN (insert_point
));
6302 /* Quiet verify_flow_info by (ab)using EDGE_FAKE.
6303 We have yet to add a simple_return to the tails,
6304 as we'd like to first convert_jumps_to_returns in
6305 case the block is no longer used after that. */
6307 if (CALL_P (PREV_INSN (insert_point
))
6308 && SIBLING_CALL_P (PREV_INSN (insert_point
)))
6309 eflags
= EDGE_SIBCALL
| EDGE_ABNORMAL
;
6310 make_single_succ_edge (copy_bb
, EXIT_BLOCK_PTR
, eflags
);
6312 /* verify_flow_info doesn't like a note after a
6314 delete_insn (insert_point
);
6315 if (bitmap_empty_p (&bb_tail
))
6321 bitmap_clear (&bb_tail
);
6322 bitmap_clear (&bb_antic_flags
);
6323 bitmap_clear (&bb_on_list
);
6328 if (split_prologue_seq
!= NULL_RTX
)
6330 insert_insn_on_edge (split_prologue_seq
, orig_entry_edge
);
6333 if (prologue_seq
!= NULL_RTX
)
6335 insert_insn_on_edge (prologue_seq
, entry_edge
);
6339 /* If the exit block has no non-fake predecessors, we don't need
6341 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
6342 if ((e
->flags
& EDGE_FAKE
) == 0)
6347 rtl_profile_for_bb (EXIT_BLOCK_PTR
);
6349 exit_fallthru_edge
= find_fallthru_edge (EXIT_BLOCK_PTR
->preds
);
6351 /* If we're allowed to generate a simple return instruction, then by
6352 definition we don't need a full epilogue. If the last basic
6353 block before the exit block does not contain active instructions,
6354 examine its predecessors and try to emit (conditional) return
6356 #ifdef HAVE_simple_return
6357 if (entry_edge
!= orig_entry_edge
)
6363 /* convert_jumps_to_returns may add to EXIT_BLOCK_PTR->preds
6364 (but won't remove). Stop at end of current preds. */
6365 last
= EDGE_COUNT (EXIT_BLOCK_PTR
->preds
);
6366 for (i
= 0; i
< last
; i
++)
6368 e
= EDGE_I (EXIT_BLOCK_PTR
->preds
, i
);
6369 if (LABEL_P (BB_HEAD (e
->src
))
6370 && !bitmap_bit_p (&bb_flags
, e
->src
->index
)
6371 && !active_insn_between (BB_HEAD (e
->src
), BB_END (e
->src
)))
6372 unconverted_simple_returns
6373 = convert_jumps_to_returns (e
->src
, true,
6374 unconverted_simple_returns
);
6378 if (exit_fallthru_edge
!= NULL
6379 && EDGE_COUNT (exit_fallthru_edge
->src
->preds
) != 0
6380 && !bitmap_bit_p (&bb_flags
, exit_fallthru_edge
->src
->index
))
6382 basic_block last_bb
;
6384 last_bb
= emit_return_for_exit (exit_fallthru_edge
, true);
6385 returnjump
= BB_END (last_bb
);
6386 exit_fallthru_edge
= NULL
;
6393 if (exit_fallthru_edge
== NULL
)
6398 basic_block last_bb
= exit_fallthru_edge
->src
;
6400 if (LABEL_P (BB_HEAD (last_bb
))
6401 && !active_insn_between (BB_HEAD (last_bb
), BB_END (last_bb
)))
6402 convert_jumps_to_returns (last_bb
, false, vNULL
);
6404 if (EDGE_COUNT (last_bb
->preds
) != 0
6405 && single_succ_p (last_bb
))
6407 last_bb
= emit_return_for_exit (exit_fallthru_edge
, false);
6408 epilogue_end
= returnjump
= BB_END (last_bb
);
6409 #ifdef HAVE_simple_return
6410 /* Emitting the return may add a basic block.
6411 Fix bb_flags for the added block. */
6412 if (last_bb
!= exit_fallthru_edge
->src
)
6413 bitmap_set_bit (&bb_flags
, last_bb
->index
);
6421 /* A small fib -- epilogue is not yet completed, but we wish to re-use
6422 this marker for the splits of EH_RETURN patterns, and nothing else
6423 uses the flag in the meantime. */
6424 epilogue_completed
= 1;
6426 #ifdef HAVE_eh_return
6427 /* Find non-fallthru edges that end with EH_RETURN instructions. On
6428 some targets, these get split to a special version of the epilogue
6429 code. In order to be able to properly annotate these with unwind
6430 info, try to split them now. If we get a valid split, drop an
6431 EPILOGUE_BEG note and mark the insns as epilogue insns. */
6432 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
6434 rtx prev
, last
, trial
;
6436 if (e
->flags
& EDGE_FALLTHRU
)
6438 last
= BB_END (e
->src
);
6439 if (!eh_returnjump_p (last
))
6442 prev
= PREV_INSN (last
);
6443 trial
= try_split (PATTERN (last
), last
, 1);
6447 record_insns (NEXT_INSN (prev
), NEXT_INSN (trial
), &epilogue_insn_hash
);
6448 emit_note_after (NOTE_INSN_EPILOGUE_BEG
, prev
);
6452 /* If nothing falls through into the exit block, we don't need an
6455 if (exit_fallthru_edge
== NULL
)
6458 #ifdef HAVE_epilogue
6462 epilogue_end
= emit_note (NOTE_INSN_EPILOGUE_BEG
);
6463 seq
= gen_epilogue ();
6465 emit_jump_insn (seq
);
6467 /* Retain a map of the epilogue insns. */
6468 record_insns (seq
, NULL
, &epilogue_insn_hash
);
6469 set_insn_locations (seq
, epilogue_location
);
6472 returnjump
= get_last_insn ();
6475 insert_insn_on_edge (seq
, exit_fallthru_edge
);
6478 if (JUMP_P (returnjump
))
6479 set_return_jump_label (returnjump
);
6486 if (! next_active_insn (BB_END (exit_fallthru_edge
->src
)))
6488 /* We have a fall-through edge to the exit block, the source is not
6489 at the end of the function, and there will be an assembler epilogue
6490 at the end of the function.
6491 We can't use force_nonfallthru here, because that would try to
6492 use return. Inserting a jump 'by hand' is extremely messy, so
6493 we take advantage of cfg_layout_finalize using
6494 fixup_fallthru_exit_predecessor. */
6495 cfg_layout_initialize (0);
6496 FOR_EACH_BB (cur_bb
)
6497 if (cur_bb
->index
>= NUM_FIXED_BLOCKS
6498 && cur_bb
->next_bb
->index
>= NUM_FIXED_BLOCKS
)
6499 cur_bb
->aux
= cur_bb
->next_bb
;
6500 cfg_layout_finalize ();
6505 default_rtl_profile ();
6511 commit_edge_insertions ();
6513 /* Look for basic blocks within the prologue insns. */
6514 blocks
= sbitmap_alloc (last_basic_block
);
6515 bitmap_clear (blocks
);
6516 bitmap_set_bit (blocks
, entry_edge
->dest
->index
);
6517 bitmap_set_bit (blocks
, orig_entry_edge
->dest
->index
);
6518 find_many_sub_basic_blocks (blocks
);
6519 sbitmap_free (blocks
);
6521 /* The epilogue insns we inserted may cause the exit edge to no longer
6523 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
6525 if (((e
->flags
& EDGE_FALLTHRU
) != 0)
6526 && returnjump_p (BB_END (e
->src
)))
6527 e
->flags
&= ~EDGE_FALLTHRU
;
6531 #ifdef HAVE_simple_return
6532 /* If there were branches to an empty LAST_BB which we tried to
6533 convert to conditional simple_returns, but couldn't for some
6534 reason, create a block to hold a simple_return insn and redirect
6535 those remaining edges. */
6536 if (!unconverted_simple_returns
.is_empty ())
6538 basic_block simple_return_block_hot
= NULL
;
6539 basic_block simple_return_block_cold
= NULL
;
6540 edge pending_edge_hot
= NULL
;
6541 edge pending_edge_cold
= NULL
;
6542 basic_block exit_pred
= EXIT_BLOCK_PTR
->prev_bb
;
6545 gcc_assert (entry_edge
!= orig_entry_edge
);
6547 /* See if we can reuse the last insn that was emitted for the
6549 if (returnjump
!= NULL_RTX
6550 && JUMP_LABEL (returnjump
) == simple_return_rtx
)
6552 e
= split_block (BLOCK_FOR_INSN (returnjump
), PREV_INSN (returnjump
));
6553 if (BB_PARTITION (e
->src
) == BB_HOT_PARTITION
)
6554 simple_return_block_hot
= e
->dest
;
6556 simple_return_block_cold
= e
->dest
;
6559 /* Also check returns we might need to add to tail blocks. */
6560 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
6561 if (EDGE_COUNT (e
->src
->preds
) != 0
6562 && (e
->flags
& EDGE_FAKE
) != 0
6563 && !bitmap_bit_p (&bb_flags
, e
->src
->index
))
6565 if (BB_PARTITION (e
->src
) == BB_HOT_PARTITION
)
6566 pending_edge_hot
= e
;
6568 pending_edge_cold
= e
;
6571 FOR_EACH_VEC_ELT (unconverted_simple_returns
, i
, e
)
6573 basic_block
*pdest_bb
;
6576 if (BB_PARTITION (e
->src
) == BB_HOT_PARTITION
)
6578 pdest_bb
= &simple_return_block_hot
;
6579 pending
= pending_edge_hot
;
6583 pdest_bb
= &simple_return_block_cold
;
6584 pending
= pending_edge_cold
;
6587 if (*pdest_bb
== NULL
&& pending
!= NULL
)
6589 emit_return_into_block (true, pending
->src
);
6590 pending
->flags
&= ~(EDGE_FALLTHRU
| EDGE_FAKE
);
6591 *pdest_bb
= pending
->src
;
6593 else if (*pdest_bb
== NULL
)
6598 bb
= create_basic_block (NULL
, NULL
, exit_pred
);
6599 BB_COPY_PARTITION (bb
, e
->src
);
6600 start
= emit_jump_insn_after (gen_simple_return (),
6602 JUMP_LABEL (start
) = simple_return_rtx
;
6603 emit_barrier_after (start
);
6606 make_edge (bb
, EXIT_BLOCK_PTR
, 0);
6608 redirect_edge_and_branch_force (e
, *pdest_bb
);
6610 unconverted_simple_returns
.release ();
6613 if (entry_edge
!= orig_entry_edge
)
6615 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
6616 if (EDGE_COUNT (e
->src
->preds
) != 0
6617 && (e
->flags
& EDGE_FAKE
) != 0
6618 && !bitmap_bit_p (&bb_flags
, e
->src
->index
))
6620 emit_return_into_block (true, e
->src
);
6621 e
->flags
&= ~(EDGE_FALLTHRU
| EDGE_FAKE
);
6626 #ifdef HAVE_sibcall_epilogue
6627 /* Emit sibling epilogues before any sibling call sites. */
6628 for (ei
= ei_start (EXIT_BLOCK_PTR
->preds
); (e
= ei_safe_edge (ei
)); )
6630 basic_block bb
= e
->src
;
6631 rtx insn
= BB_END (bb
);
6635 || ! SIBLING_CALL_P (insn
)
6636 #ifdef HAVE_simple_return
6637 || (entry_edge
!= orig_entry_edge
6638 && !bitmap_bit_p (&bb_flags
, bb
->index
))
6646 ep_seq
= gen_sibcall_epilogue ();
6650 emit_note (NOTE_INSN_EPILOGUE_BEG
);
6655 /* Retain a map of the epilogue insns. Used in life analysis to
6656 avoid getting rid of sibcall epilogue insns. Do this before we
6657 actually emit the sequence. */
6658 record_insns (seq
, NULL
, &epilogue_insn_hash
);
6659 set_insn_locations (seq
, epilogue_location
);
6661 emit_insn_before (seq
, insn
);
6667 #ifdef HAVE_epilogue
6672 /* Similarly, move any line notes that appear after the epilogue.
6673 There is no need, however, to be quite so anal about the existence
6674 of such a note. Also possibly move
6675 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6677 for (insn
= epilogue_end
; insn
; insn
= next
)
6679 next
= NEXT_INSN (insn
);
6681 && (NOTE_KIND (insn
) == NOTE_INSN_FUNCTION_BEG
))
6682 reorder_insns (insn
, insn
, PREV_INSN (epilogue_end
));
6687 #ifdef HAVE_simple_return
6688 bitmap_clear (&bb_flags
);
6691 /* Threading the prologue and epilogue changes the artificial refs
6692 in the entry and exit blocks. */
6693 epilogue_completed
= 1;
6694 df_update_entry_exit_and_calls ();
6697 /* Reposition the prologue-end and epilogue-begin notes after
6698 instruction scheduling. */
6701 reposition_prologue_and_epilogue_notes (void)
6703 #if defined (HAVE_prologue) || defined (HAVE_epilogue) \
6704 || defined (HAVE_sibcall_epilogue)
6705 /* Since the hash table is created on demand, the fact that it is
6706 non-null is a signal that it is non-empty. */
6707 if (prologue_insn_hash
!= NULL
)
6709 size_t len
= htab_elements (prologue_insn_hash
);
6710 rtx insn
, last
= NULL
, note
= NULL
;
6712 /* Scan from the beginning until we reach the last prologue insn. */
6713 /* ??? While we do have the CFG intact, there are two problems:
6714 (1) The prologue can contain loops (typically probing the stack),
6715 which means that the end of the prologue isn't in the first bb.
6716 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6717 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6721 if (NOTE_KIND (insn
) == NOTE_INSN_PROLOGUE_END
)
6724 else if (contains (insn
, prologue_insn_hash
))
6736 /* Scan forward looking for the PROLOGUE_END note. It should
6737 be right at the beginning of the block, possibly with other
6738 insn notes that got moved there. */
6739 for (note
= NEXT_INSN (last
); ; note
= NEXT_INSN (note
))
6742 && NOTE_KIND (note
) == NOTE_INSN_PROLOGUE_END
)
6747 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6749 last
= NEXT_INSN (last
);
6750 reorder_insns (note
, note
, last
);
6754 if (epilogue_insn_hash
!= NULL
)
6759 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR
->preds
)
6761 rtx insn
, first
= NULL
, note
= NULL
;
6762 basic_block bb
= e
->src
;
6764 /* Scan from the beginning until we reach the first epilogue insn. */
6765 FOR_BB_INSNS (bb
, insn
)
6769 if (NOTE_KIND (insn
) == NOTE_INSN_EPILOGUE_BEG
)
6776 else if (first
== NULL
&& contains (insn
, epilogue_insn_hash
))
6786 /* If the function has a single basic block, and no real
6787 epilogue insns (e.g. sibcall with no cleanup), the
6788 epilogue note can get scheduled before the prologue
6789 note. If we have frame related prologue insns, having
6790 them scanned during the epilogue will result in a crash.
6791 In this case re-order the epilogue note to just before
6792 the last insn in the block. */
6794 first
= BB_END (bb
);
6796 if (PREV_INSN (first
) != note
)
6797 reorder_insns (note
, note
, PREV_INSN (first
));
6801 #endif /* HAVE_prologue or HAVE_epilogue */
6804 /* Returns the name of function declared by FNDECL. */
6806 fndecl_name (tree fndecl
)
6810 return lang_hooks
.decl_printable_name (fndecl
, 2);
6813 /* Returns the name of function FN. */
6815 function_name (struct function
*fn
)
6817 tree fndecl
= (fn
== NULL
) ? NULL
: fn
->decl
;
6818 return fndecl_name (fndecl
);
6821 /* Returns the name of the current function. */
6823 current_function_name (void)
6825 return function_name (cfun
);
6830 rest_of_handle_check_leaf_regs (void)
6832 #ifdef LEAF_REGISTERS
6833 crtl
->uses_only_leaf_regs
6834 = optimize
> 0 && only_leaf_regs_used () && leaf_function_p ();
6839 /* Insert a TYPE into the used types hash table of CFUN. */
6842 used_types_insert_helper (tree type
, struct function
*func
)
6844 if (type
!= NULL
&& func
!= NULL
)
6848 if (func
->used_types_hash
== NULL
)
6849 func
->used_types_hash
= htab_create_ggc (37, htab_hash_pointer
,
6850 htab_eq_pointer
, NULL
);
6851 slot
= htab_find_slot (func
->used_types_hash
, type
, INSERT
);
6857 /* Given a type, insert it into the used hash table in cfun. */
6859 used_types_insert (tree t
)
6861 while (POINTER_TYPE_P (t
) || TREE_CODE (t
) == ARRAY_TYPE
)
6866 if (TREE_CODE (t
) == ERROR_MARK
)
6868 if (TYPE_NAME (t
) == NULL_TREE
6869 || TYPE_NAME (t
) == TYPE_NAME (TYPE_MAIN_VARIANT (t
)))
6870 t
= TYPE_MAIN_VARIANT (t
);
6871 if (debug_info_level
> DINFO_LEVEL_NONE
)
6874 used_types_insert_helper (t
, cfun
);
6877 /* So this might be a type referenced by a global variable.
6878 Record that type so that we can later decide to emit its
6879 debug information. */
6880 vec_safe_push (types_used_by_cur_var_decl
, t
);
6885 /* Helper to Hash a struct types_used_by_vars_entry. */
6888 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry
*entry
)
6890 gcc_assert (entry
&& entry
->var_decl
&& entry
->type
);
6892 return iterative_hash_object (entry
->type
,
6893 iterative_hash_object (entry
->var_decl
, 0));
6896 /* Hash function of the types_used_by_vars_entry hash table. */
6899 types_used_by_vars_do_hash (const void *x
)
6901 const struct types_used_by_vars_entry
*entry
=
6902 (const struct types_used_by_vars_entry
*) x
;
6904 return hash_types_used_by_vars_entry (entry
);
6907 /*Equality function of the types_used_by_vars_entry hash table. */
6910 types_used_by_vars_eq (const void *x1
, const void *x2
)
6912 const struct types_used_by_vars_entry
*e1
=
6913 (const struct types_used_by_vars_entry
*) x1
;
6914 const struct types_used_by_vars_entry
*e2
=
6915 (const struct types_used_by_vars_entry
*)x2
;
6917 return (e1
->var_decl
== e2
->var_decl
&& e1
->type
== e2
->type
);
6920 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6923 types_used_by_var_decl_insert (tree type
, tree var_decl
)
6925 if (type
!= NULL
&& var_decl
!= NULL
)
6928 struct types_used_by_vars_entry e
;
6929 e
.var_decl
= var_decl
;
6931 if (types_used_by_vars_hash
== NULL
)
6932 types_used_by_vars_hash
=
6933 htab_create_ggc (37, types_used_by_vars_do_hash
,
6934 types_used_by_vars_eq
, NULL
);
6935 slot
= htab_find_slot_with_hash (types_used_by_vars_hash
, &e
,
6936 hash_types_used_by_vars_entry (&e
), INSERT
);
6939 struct types_used_by_vars_entry
*entry
;
6940 entry
= ggc_alloc_types_used_by_vars_entry ();
6942 entry
->var_decl
= var_decl
;
6948 struct rtl_opt_pass pass_leaf_regs
=
6952 "*leaf_regs", /* name */
6953 OPTGROUP_NONE
, /* optinfo_flags */
6955 rest_of_handle_check_leaf_regs
, /* execute */
6958 0, /* static_pass_number */
6959 TV_NONE
, /* tv_id */
6960 0, /* properties_required */
6961 0, /* properties_provided */
6962 0, /* properties_destroyed */
6963 0, /* todo_flags_start */
6964 0 /* todo_flags_finish */
6969 rest_of_handle_thread_prologue_and_epilogue (void)
6972 cleanup_cfg (CLEANUP_EXPENSIVE
);
6974 /* On some machines, the prologue and epilogue code, or parts thereof,
6975 can be represented as RTL. Doing so lets us schedule insns between
6976 it and the rest of the code and also allows delayed branch
6977 scheduling to operate in the epilogue. */
6978 thread_prologue_and_epilogue_insns ();
6980 /* The stack usage info is finalized during prologue expansion. */
6981 if (flag_stack_usage_info
)
6982 output_stack_usage ();
6987 struct rtl_opt_pass pass_thread_prologue_and_epilogue
=
6991 "pro_and_epilogue", /* name */
6992 OPTGROUP_NONE
, /* optinfo_flags */
6994 rest_of_handle_thread_prologue_and_epilogue
, /* execute */
6997 0, /* static_pass_number */
6998 TV_THREAD_PROLOGUE_AND_EPILOGUE
, /* tv_id */
6999 0, /* properties_required */
7000 0, /* properties_provided */
7001 0, /* properties_destroyed */
7002 TODO_verify_flow
, /* todo_flags_start */
7004 TODO_df_finish
| TODO_verify_rtl_sharing
|
7005 TODO_ggc_collect
/* todo_flags_finish */
7010 /* This mini-pass fixes fall-out from SSA in asm statements that have
7011 in-out constraints. Say you start with
7014 asm ("": "+mr" (inout));
7017 which is transformed very early to use explicit output and match operands:
7020 asm ("": "=mr" (inout) : "0" (inout));
7023 Or, after SSA and copyprop,
7025 asm ("": "=mr" (inout_2) : "0" (inout_1));
7028 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
7029 they represent two separate values, so they will get different pseudo
7030 registers during expansion. Then, since the two operands need to match
7031 per the constraints, but use different pseudo registers, reload can
7032 only register a reload for these operands. But reloads can only be
7033 satisfied by hardregs, not by memory, so we need a register for this
7034 reload, just because we are presented with non-matching operands.
7035 So, even though we allow memory for this operand, no memory can be
7036 used for it, just because the two operands don't match. This can
7037 cause reload failures on register-starved targets.
7039 So it's a symptom of reload not being able to use memory for reloads
7040 or, alternatively it's also a symptom of both operands not coming into
7041 reload as matching (in which case the pseudo could go to memory just
7042 fine, as the alternative allows it, and no reload would be necessary).
7043 We fix the latter problem here, by transforming
7045 asm ("": "=mr" (inout_2) : "0" (inout_1));
7050 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
7053 match_asm_constraints_1 (rtx insn
, rtx
*p_sets
, int noutputs
)
7056 bool changed
= false;
7057 rtx op
= SET_SRC (p_sets
[0]);
7058 int ninputs
= ASM_OPERANDS_INPUT_LENGTH (op
);
7059 rtvec inputs
= ASM_OPERANDS_INPUT_VEC (op
);
7060 bool *output_matched
= XALLOCAVEC (bool, noutputs
);
7062 memset (output_matched
, 0, noutputs
* sizeof (bool));
7063 for (i
= 0; i
< ninputs
; i
++)
7065 rtx input
, output
, insns
;
7066 const char *constraint
= ASM_OPERANDS_INPUT_CONSTRAINT (op
, i
);
7070 if (*constraint
== '%')
7073 match
= strtoul (constraint
, &end
, 10);
7074 if (end
== constraint
)
7077 gcc_assert (match
< noutputs
);
7078 output
= SET_DEST (p_sets
[match
]);
7079 input
= RTVEC_ELT (inputs
, i
);
7080 /* Only do the transformation for pseudos. */
7081 if (! REG_P (output
)
7082 || rtx_equal_p (output
, input
)
7083 || (GET_MODE (input
) != VOIDmode
7084 && GET_MODE (input
) != GET_MODE (output
)))
7087 /* We can't do anything if the output is also used as input,
7088 as we're going to overwrite it. */
7089 for (j
= 0; j
< ninputs
; j
++)
7090 if (reg_overlap_mentioned_p (output
, RTVEC_ELT (inputs
, j
)))
7095 /* Avoid changing the same input several times. For
7096 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
7097 only change in once (to out1), rather than changing it
7098 first to out1 and afterwards to out2. */
7101 for (j
= 0; j
< noutputs
; j
++)
7102 if (output_matched
[j
] && input
== SET_DEST (p_sets
[j
]))
7107 output_matched
[match
] = true;
7110 emit_move_insn (output
, input
);
7111 insns
= get_insns ();
7113 emit_insn_before (insns
, insn
);
7115 /* Now replace all mentions of the input with output. We can't
7116 just replace the occurrence in inputs[i], as the register might
7117 also be used in some other input (or even in an address of an
7118 output), which would mean possibly increasing the number of
7119 inputs by one (namely 'output' in addition), which might pose
7120 a too complicated problem for reload to solve. E.g. this situation:
7122 asm ("" : "=r" (output), "=m" (input) : "0" (input))
7124 Here 'input' is used in two occurrences as input (once for the
7125 input operand, once for the address in the second output operand).
7126 If we would replace only the occurrence of the input operand (to
7127 make the matching) we would be left with this:
7130 asm ("" : "=r" (output), "=m" (input) : "0" (output))
7132 Now we suddenly have two different input values (containing the same
7133 value, but different pseudos) where we formerly had only one.
7134 With more complicated asms this might lead to reload failures
7135 which wouldn't have happen without this pass. So, iterate over
7136 all operands and replace all occurrences of the register used. */
7137 for (j
= 0; j
< noutputs
; j
++)
7138 if (!rtx_equal_p (SET_DEST (p_sets
[j
]), input
)
7139 && reg_overlap_mentioned_p (input
, SET_DEST (p_sets
[j
])))
7140 SET_DEST (p_sets
[j
]) = replace_rtx (SET_DEST (p_sets
[j
]),
7142 for (j
= 0; j
< ninputs
; j
++)
7143 if (reg_overlap_mentioned_p (input
, RTVEC_ELT (inputs
, j
)))
7144 RTVEC_ELT (inputs
, j
) = replace_rtx (RTVEC_ELT (inputs
, j
),
7151 df_insn_rescan (insn
);
7155 rest_of_match_asm_constraints (void)
7158 rtx insn
, pat
, *p_sets
;
7161 if (!crtl
->has_asm_statement
)
7164 df_set_flags (DF_DEFER_INSN_RESCAN
);
7167 FOR_BB_INSNS (bb
, insn
)
7172 pat
= PATTERN (insn
);
7173 if (GET_CODE (pat
) == PARALLEL
)
7174 p_sets
= &XVECEXP (pat
, 0, 0), noutputs
= XVECLEN (pat
, 0);
7175 else if (GET_CODE (pat
) == SET
)
7176 p_sets
= &PATTERN (insn
), noutputs
= 1;
7180 if (GET_CODE (*p_sets
) == SET
7181 && GET_CODE (SET_SRC (*p_sets
)) == ASM_OPERANDS
)
7182 match_asm_constraints_1 (insn
, p_sets
, noutputs
);
7186 return TODO_df_finish
;
7189 struct rtl_opt_pass pass_match_asm_constraints
=
7193 "asmcons", /* name */
7194 OPTGROUP_NONE
, /* optinfo_flags */
7196 rest_of_match_asm_constraints
, /* execute */
7199 0, /* static_pass_number */
7200 TV_NONE
, /* tv_id */
7201 0, /* properties_required */
7202 0, /* properties_provided */
7203 0, /* properties_destroyed */
7204 0, /* todo_flags_start */
7205 0 /* todo_flags_finish */
7210 #include "gt-function.h"