1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
32 not get a hard register. */
36 #include "coretypes.h"
41 #include "gimple-expr.h"
46 #include "stringpool.h"
53 #include "rtl-error.h"
54 #include "hard-reg-set.h"
56 #include "fold-const.h"
57 #include "stor-layout.h"
64 #include "optabs-tree.h"
66 #include "langhooks.h"
67 #include "common/common-target.h"
69 #include "tree-pass.h"
73 #include "cfgcleanup.h"
74 #include "cfgexpand.h"
75 #include "shrink-wrap.h"
80 #include "stringpool.h"
84 #include "function-abi.h"
86 /* So we can assign to cfun in this file. */
89 #ifndef STACK_ALIGNMENT_NEEDED
90 #define STACK_ALIGNMENT_NEEDED 1
93 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
95 /* Round a value to the lowest integer less than it that is a multiple of
96 the required alignment. Avoid using division in case the value is
97 negative. Assume the alignment is a power of two. */
98 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
100 /* Similar, but round to the next highest integer that meets the
102 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
104 /* Nonzero once virtual register instantiation has been done.
105 assign_stack_local uses frame_pointer_rtx when this is nonzero.
106 calls.c:emit_library_call_value_1 uses it to set up
107 post-instantiation libcalls. */
108 int virtuals_instantiated
;
110 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
111 static GTY(()) int funcdef_no
;
113 /* These variables hold pointers to functions to create and destroy
114 target specific, per-function data structures. */
115 struct machine_function
* (*init_machine_status
) (void);
117 /* The currently compiled function. */
118 struct function
*cfun
= 0;
120 /* These hashes record the prologue and epilogue insns. */
122 struct insn_cache_hasher
: ggc_cache_ptr_hash
<rtx_def
>
124 static hashval_t
hash (rtx x
) { return htab_hash_pointer (x
); }
125 static bool equal (rtx a
, rtx b
) { return a
== b
; }
129 hash_table
<insn_cache_hasher
> *prologue_insn_hash
;
131 hash_table
<insn_cache_hasher
> *epilogue_insn_hash
;
134 hash_table
<used_type_hasher
> *types_used_by_vars_hash
= NULL
;
135 vec
<tree
, va_gc
> *types_used_by_cur_var_decl
;
137 /* Forward declarations. */
139 static class temp_slot
*find_temp_slot_from_address (rtx
);
140 static void pad_to_arg_alignment (struct args_size
*, int, struct args_size
*);
141 static void pad_below (struct args_size
*, machine_mode
, tree
);
142 static void reorder_blocks_1 (rtx_insn
*, tree
, vec
<tree
> *);
143 static int all_blocks (tree
, tree
*);
144 static tree
*get_block_vector (tree
, int *);
145 extern tree
debug_find_var_in_block_tree (tree
, tree
);
146 /* We always define `record_insns' even if it's not used so that we
147 can always export `prologue_epilogue_contains'. */
148 static void record_insns (rtx_insn
*, rtx
, hash_table
<insn_cache_hasher
> **)
150 static bool contains (const rtx_insn
*, hash_table
<insn_cache_hasher
> *);
151 static void prepare_function_start (void);
152 static void do_clobber_return_reg (rtx
, void *);
153 static void do_use_return_reg (rtx
, void *);
156 /* Stack of nested functions. */
157 /* Keep track of the cfun stack. */
159 static vec
<function
*> function_context_stack
;
161 /* Save the current context for compilation of a nested function.
162 This is called from language-specific code. */
165 push_function_context (void)
168 allocate_struct_function (NULL
, false);
170 function_context_stack
.safe_push (cfun
);
174 /* Restore the last saved context, at the end of a nested function.
175 This function is called from language-specific code. */
178 pop_function_context (void)
180 struct function
*p
= function_context_stack
.pop ();
182 current_function_decl
= p
->decl
;
184 /* Reset variables that have known state during rtx generation. */
185 virtuals_instantiated
= 0;
186 generating_concat_p
= 1;
189 /* Clear out all parts of the state in F that can safely be discarded
190 after the function has been parsed, but not compiled, to let
191 garbage collection reclaim the memory. */
194 free_after_parsing (struct function
*f
)
199 /* Clear out all parts of the state in F that can safely be discarded
200 after the function has been compiled, to let garbage collection
201 reclaim the memory. */
204 free_after_compilation (struct function
*f
)
206 prologue_insn_hash
= NULL
;
207 epilogue_insn_hash
= NULL
;
209 free (crtl
->emit
.regno_pointer_align
);
211 memset (crtl
, 0, sizeof (struct rtl_data
));
215 f
->curr_properties
&= ~PROP_cfg
;
217 regno_reg_rtx
= NULL
;
220 /* Return size needed for stack frame based on slots so far allocated.
221 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
222 the caller may have to do that. */
225 get_frame_size (void)
227 if (FRAME_GROWS_DOWNWARD
)
228 return -frame_offset
;
233 /* Issue an error message and return TRUE if frame OFFSET overflows in
234 the signed target pointer arithmetics for function FUNC. Otherwise
238 frame_offset_overflow (poly_int64 offset
, tree func
)
240 poly_uint64 size
= FRAME_GROWS_DOWNWARD
? -offset
: offset
;
241 unsigned HOST_WIDE_INT limit
242 = ((HOST_WIDE_INT_1U
<< (GET_MODE_BITSIZE (Pmode
) - 1))
243 /* Leave room for the fixed part of the frame. */
244 - 64 * UNITS_PER_WORD
);
246 if (!coeffs_in_range_p (size
, 0U, limit
))
248 unsigned HOST_WIDE_INT hwisize
;
249 if (size
.is_constant (&hwisize
))
250 error_at (DECL_SOURCE_LOCATION (func
),
251 "total size of local objects %wu exceeds maximum %wu",
254 error_at (DECL_SOURCE_LOCATION (func
),
255 "total size of local objects exceeds maximum %wu",
263 /* Return the minimum spill slot alignment for a register of mode MODE. */
266 spill_slot_alignment (machine_mode mode ATTRIBUTE_UNUSED
)
268 return STACK_SLOT_ALIGNMENT (NULL_TREE
, mode
, GET_MODE_ALIGNMENT (mode
));
271 /* Return stack slot alignment in bits for TYPE and MODE. */
274 get_stack_local_alignment (tree type
, machine_mode mode
)
276 unsigned int alignment
;
279 alignment
= BIGGEST_ALIGNMENT
;
281 alignment
= GET_MODE_ALIGNMENT (mode
);
283 /* Allow the frond-end to (possibly) increase the alignment of this
286 type
= lang_hooks
.types
.type_for_mode (mode
, 0);
288 return STACK_SLOT_ALIGNMENT (type
, mode
, alignment
);
291 /* Determine whether it is possible to fit a stack slot of size SIZE and
292 alignment ALIGNMENT into an area in the stack frame that starts at
293 frame offset START and has a length of LENGTH. If so, store the frame
294 offset to be used for the stack slot in *POFFSET and return true;
295 return false otherwise. This function will extend the frame size when
296 given a start/length pair that lies at the end of the frame. */
299 try_fit_stack_local (poly_int64 start
, poly_int64 length
,
300 poly_int64 size
, unsigned int alignment
,
301 poly_int64_pod
*poffset
)
303 poly_int64 this_frame_offset
;
304 int frame_off
, frame_alignment
, frame_phase
;
306 /* Calculate how many bytes the start of local variables is off from
308 frame_alignment
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
309 frame_off
= targetm
.starting_frame_offset () % frame_alignment
;
310 frame_phase
= frame_off
? frame_alignment
- frame_off
: 0;
312 /* Round the frame offset to the specified alignment. */
314 if (FRAME_GROWS_DOWNWARD
)
316 = (aligned_lower_bound (start
+ length
- size
- frame_phase
, alignment
)
320 = aligned_upper_bound (start
- frame_phase
, alignment
) + frame_phase
;
322 /* See if it fits. If this space is at the edge of the frame,
323 consider extending the frame to make it fit. Our caller relies on
324 this when allocating a new slot. */
325 if (maybe_lt (this_frame_offset
, start
))
327 if (known_eq (frame_offset
, start
))
328 frame_offset
= this_frame_offset
;
332 else if (maybe_gt (this_frame_offset
+ size
, start
+ length
))
334 if (known_eq (frame_offset
, start
+ length
))
335 frame_offset
= this_frame_offset
+ size
;
340 *poffset
= this_frame_offset
;
344 /* Create a new frame_space structure describing free space in the stack
345 frame beginning at START and ending at END, and chain it into the
346 function's frame_space_list. */
349 add_frame_space (poly_int64 start
, poly_int64 end
)
351 class frame_space
*space
= ggc_alloc
<frame_space
> ();
352 space
->next
= crtl
->frame_space_list
;
353 crtl
->frame_space_list
= space
;
354 space
->start
= start
;
355 space
->length
= end
- start
;
358 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
359 with machine mode MODE.
361 ALIGN controls the amount of alignment for the address of the slot:
362 0 means according to MODE,
363 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
364 -2 means use BITS_PER_UNIT,
365 positive specifies alignment boundary in bits.
367 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
368 alignment and ASLK_RECORD_PAD bit set if we should remember
369 extra space we allocated for alignment purposes. When we are
370 called from assign_stack_temp_for_type, it is not set so we don't
371 track the same stack slot in two independent lists.
373 We do not round to stack_boundary here. */
376 assign_stack_local_1 (machine_mode mode
, poly_int64 size
,
380 poly_int64 bigend_correction
= 0;
381 poly_int64 slot_offset
= 0, old_frame_offset
;
382 unsigned int alignment
, alignment_in_bits
;
386 alignment
= get_stack_local_alignment (NULL
, mode
);
387 alignment
/= BITS_PER_UNIT
;
389 else if (align
== -1)
391 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
392 size
= aligned_upper_bound (size
, alignment
);
394 else if (align
== -2)
395 alignment
= 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
397 alignment
= align
/ BITS_PER_UNIT
;
399 alignment_in_bits
= alignment
* BITS_PER_UNIT
;
401 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
402 if (alignment_in_bits
> MAX_SUPPORTED_STACK_ALIGNMENT
)
404 alignment_in_bits
= MAX_SUPPORTED_STACK_ALIGNMENT
;
405 alignment
= MAX_SUPPORTED_STACK_ALIGNMENT
/ BITS_PER_UNIT
;
408 if (SUPPORTS_STACK_ALIGNMENT
)
410 if (crtl
->stack_alignment_estimated
< alignment_in_bits
)
412 if (!crtl
->stack_realign_processed
)
413 crtl
->stack_alignment_estimated
= alignment_in_bits
;
416 /* If stack is realigned and stack alignment value
417 hasn't been finalized, it is OK not to increase
418 stack_alignment_estimated. The bigger alignment
419 requirement is recorded in stack_alignment_needed
421 gcc_assert (!crtl
->stack_realign_finalized
);
422 if (!crtl
->stack_realign_needed
)
424 /* It is OK to reduce the alignment as long as the
425 requested size is 0 or the estimated stack
426 alignment >= mode alignment. */
427 gcc_assert ((kind
& ASLK_REDUCE_ALIGN
)
428 || known_eq (size
, 0)
429 || (crtl
->stack_alignment_estimated
430 >= GET_MODE_ALIGNMENT (mode
)));
431 alignment_in_bits
= crtl
->stack_alignment_estimated
;
432 alignment
= alignment_in_bits
/ BITS_PER_UNIT
;
438 if (crtl
->stack_alignment_needed
< alignment_in_bits
)
439 crtl
->stack_alignment_needed
= alignment_in_bits
;
440 if (crtl
->max_used_stack_slot_alignment
< alignment_in_bits
)
441 crtl
->max_used_stack_slot_alignment
= alignment_in_bits
;
443 if (mode
!= BLKmode
|| maybe_ne (size
, 0))
445 if (kind
& ASLK_RECORD_PAD
)
447 class frame_space
**psp
;
449 for (psp
= &crtl
->frame_space_list
; *psp
; psp
= &(*psp
)->next
)
451 class frame_space
*space
= *psp
;
452 if (!try_fit_stack_local (space
->start
, space
->length
, size
,
453 alignment
, &slot_offset
))
456 if (known_gt (slot_offset
, space
->start
))
457 add_frame_space (space
->start
, slot_offset
);
458 if (known_lt (slot_offset
+ size
, space
->start
+ space
->length
))
459 add_frame_space (slot_offset
+ size
,
460 space
->start
+ space
->length
);
465 else if (!STACK_ALIGNMENT_NEEDED
)
467 slot_offset
= frame_offset
;
471 old_frame_offset
= frame_offset
;
473 if (FRAME_GROWS_DOWNWARD
)
475 frame_offset
-= size
;
476 try_fit_stack_local (frame_offset
, size
, size
, alignment
, &slot_offset
);
478 if (kind
& ASLK_RECORD_PAD
)
480 if (known_gt (slot_offset
, frame_offset
))
481 add_frame_space (frame_offset
, slot_offset
);
482 if (known_lt (slot_offset
+ size
, old_frame_offset
))
483 add_frame_space (slot_offset
+ size
, old_frame_offset
);
488 frame_offset
+= size
;
489 try_fit_stack_local (old_frame_offset
, size
, size
, alignment
, &slot_offset
);
491 if (kind
& ASLK_RECORD_PAD
)
493 if (known_gt (slot_offset
, old_frame_offset
))
494 add_frame_space (old_frame_offset
, slot_offset
);
495 if (known_lt (slot_offset
+ size
, frame_offset
))
496 add_frame_space (slot_offset
+ size
, frame_offset
);
501 /* On a big-endian machine, if we are allocating more space than we will use,
502 use the least significant bytes of those that are allocated. */
505 /* The slot size can sometimes be smaller than the mode size;
506 e.g. the rs6000 port allocates slots with a vector mode
507 that have the size of only one element. However, the slot
508 size must always be ordered wrt to the mode size, in the
509 same way as for a subreg. */
510 gcc_checking_assert (ordered_p (GET_MODE_SIZE (mode
), size
));
511 if (BYTES_BIG_ENDIAN
&& maybe_lt (GET_MODE_SIZE (mode
), size
))
512 bigend_correction
= size
- GET_MODE_SIZE (mode
);
515 /* If we have already instantiated virtual registers, return the actual
516 address relative to the frame pointer. */
517 if (virtuals_instantiated
)
518 addr
= plus_constant (Pmode
, frame_pointer_rtx
,
520 (slot_offset
+ bigend_correction
521 + targetm
.starting_frame_offset (), Pmode
));
523 addr
= plus_constant (Pmode
, virtual_stack_vars_rtx
,
525 (slot_offset
+ bigend_correction
,
528 x
= gen_rtx_MEM (mode
, addr
);
529 set_mem_align (x
, alignment_in_bits
);
530 MEM_NOTRAP_P (x
) = 1;
532 vec_safe_push (stack_slot_list
, x
);
534 if (frame_offset_overflow (frame_offset
, current_function_decl
))
540 /* Wrap up assign_stack_local_1 with last parameter as false. */
543 assign_stack_local (machine_mode mode
, poly_int64 size
, int align
)
545 return assign_stack_local_1 (mode
, size
, align
, ASLK_RECORD_PAD
);
548 /* In order to evaluate some expressions, such as function calls returning
549 structures in memory, we need to temporarily allocate stack locations.
550 We record each allocated temporary in the following structure.
552 Associated with each temporary slot is a nesting level. When we pop up
553 one level, all temporaries associated with the previous level are freed.
554 Normally, all temporaries are freed after the execution of the statement
555 in which they were created. However, if we are inside a ({...}) grouping,
556 the result may be in a temporary and hence must be preserved. If the
557 result could be in a temporary, we preserve it if we can determine which
558 one it is in. If we cannot determine which temporary may contain the
559 result, all temporaries are preserved. A temporary is preserved by
560 pretending it was allocated at the previous nesting level. */
562 class GTY(()) temp_slot
{
564 /* Points to next temporary slot. */
565 class temp_slot
*next
;
566 /* Points to previous temporary slot. */
567 class temp_slot
*prev
;
568 /* The rtx to used to reference the slot. */
570 /* The size, in units, of the slot. */
572 /* The type of the object in the slot, or zero if it doesn't correspond
573 to a type. We use this to determine whether a slot can be reused.
574 It can be reused if objects of the type of the new slot will always
575 conflict with objects of the type of the old slot. */
577 /* The alignment (in bits) of the slot. */
579 /* Nonzero if this temporary is currently in use. */
581 /* Nesting level at which this slot is being used. */
583 /* The offset of the slot from the frame_pointer, including extra space
584 for alignment. This info is for combine_temp_slots. */
585 poly_int64 base_offset
;
586 /* The size of the slot, including extra space for alignment. This
587 info is for combine_temp_slots. */
588 poly_int64 full_size
;
591 /* Entry for the below hash table. */
592 struct GTY((for_user
)) temp_slot_address_entry
{
595 class temp_slot
*temp_slot
;
598 struct temp_address_hasher
: ggc_ptr_hash
<temp_slot_address_entry
>
600 static hashval_t
hash (temp_slot_address_entry
*);
601 static bool equal (temp_slot_address_entry
*, temp_slot_address_entry
*);
604 /* A table of addresses that represent a stack slot. The table is a mapping
605 from address RTXen to a temp slot. */
606 static GTY(()) hash_table
<temp_address_hasher
> *temp_slot_address_table
;
607 static size_t n_temp_slots_in_use
;
609 /* Removes temporary slot TEMP from LIST. */
612 cut_slot_from_list (class temp_slot
*temp
, class temp_slot
**list
)
615 temp
->next
->prev
= temp
->prev
;
617 temp
->prev
->next
= temp
->next
;
621 temp
->prev
= temp
->next
= NULL
;
624 /* Inserts temporary slot TEMP to LIST. */
627 insert_slot_to_list (class temp_slot
*temp
, class temp_slot
**list
)
631 (*list
)->prev
= temp
;
636 /* Returns the list of used temp slots at LEVEL. */
638 static class temp_slot
**
639 temp_slots_at_level (int level
)
641 if (level
>= (int) vec_safe_length (used_temp_slots
))
642 vec_safe_grow_cleared (used_temp_slots
, level
+ 1, true);
644 return &(*used_temp_slots
)[level
];
647 /* Returns the maximal temporary slot level. */
650 max_slot_level (void)
652 if (!used_temp_slots
)
655 return used_temp_slots
->length () - 1;
658 /* Moves temporary slot TEMP to LEVEL. */
661 move_slot_to_level (class temp_slot
*temp
, int level
)
663 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
664 insert_slot_to_list (temp
, temp_slots_at_level (level
));
668 /* Make temporary slot TEMP available. */
671 make_slot_available (class temp_slot
*temp
)
673 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
674 insert_slot_to_list (temp
, &avail_temp_slots
);
677 n_temp_slots_in_use
--;
680 /* Compute the hash value for an address -> temp slot mapping.
681 The value is cached on the mapping entry. */
683 temp_slot_address_compute_hash (struct temp_slot_address_entry
*t
)
685 int do_not_record
= 0;
686 return hash_rtx (t
->address
, GET_MODE (t
->address
),
687 &do_not_record
, NULL
, false);
690 /* Return the hash value for an address -> temp slot mapping. */
692 temp_address_hasher::hash (temp_slot_address_entry
*t
)
697 /* Compare two address -> temp slot mapping entries. */
699 temp_address_hasher::equal (temp_slot_address_entry
*t1
,
700 temp_slot_address_entry
*t2
)
702 return exp_equiv_p (t1
->address
, t2
->address
, 0, true);
705 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
707 insert_temp_slot_address (rtx address
, class temp_slot
*temp_slot
)
709 struct temp_slot_address_entry
*t
= ggc_alloc
<temp_slot_address_entry
> ();
710 t
->address
= copy_rtx (address
);
711 t
->temp_slot
= temp_slot
;
712 t
->hash
= temp_slot_address_compute_hash (t
);
713 *temp_slot_address_table
->find_slot_with_hash (t
, t
->hash
, INSERT
) = t
;
716 /* Remove an address -> temp slot mapping entry if the temp slot is
717 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
719 remove_unused_temp_slot_addresses_1 (temp_slot_address_entry
**slot
, void *)
721 const struct temp_slot_address_entry
*t
= *slot
;
722 if (! t
->temp_slot
->in_use
)
723 temp_slot_address_table
->clear_slot (slot
);
727 /* Remove all mappings of addresses to unused temp slots. */
729 remove_unused_temp_slot_addresses (void)
731 /* Use quicker clearing if there aren't any active temp slots. */
732 if (n_temp_slots_in_use
)
733 temp_slot_address_table
->traverse
734 <void *, remove_unused_temp_slot_addresses_1
> (NULL
);
736 temp_slot_address_table
->empty ();
739 /* Find the temp slot corresponding to the object at address X. */
741 static class temp_slot
*
742 find_temp_slot_from_address (rtx x
)
745 struct temp_slot_address_entry tmp
, *t
;
747 /* First try the easy way:
748 See if X exists in the address -> temp slot mapping. */
750 tmp
.temp_slot
= NULL
;
751 tmp
.hash
= temp_slot_address_compute_hash (&tmp
);
752 t
= temp_slot_address_table
->find_with_hash (&tmp
, tmp
.hash
);
756 /* If we have a sum involving a register, see if it points to a temp
758 if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 0))
759 && (p
= find_temp_slot_from_address (XEXP (x
, 0))) != 0)
761 else if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 1))
762 && (p
= find_temp_slot_from_address (XEXP (x
, 1))) != 0)
765 /* Last resort: Address is a virtual stack var address. */
767 if (strip_offset (x
, &offset
) == virtual_stack_vars_rtx
)
770 for (i
= max_slot_level (); i
>= 0; i
--)
771 for (p
= *temp_slots_at_level (i
); p
; p
= p
->next
)
772 if (known_in_range_p (offset
, p
->base_offset
, p
->full_size
))
779 /* Allocate a temporary stack slot and record it for possible later
782 MODE is the machine mode to be given to the returned rtx.
784 SIZE is the size in units of the space required. We do no rounding here
785 since assign_stack_local will do any required rounding.
787 TYPE is the type that will be used for the stack slot. */
790 assign_stack_temp_for_type (machine_mode mode
, poly_int64 size
, tree type
)
793 class temp_slot
*p
, *best_p
= 0, *selected
= NULL
, **pp
;
796 gcc_assert (known_size_p (size
));
798 align
= get_stack_local_alignment (type
, mode
);
800 /* Try to find an available, already-allocated temporary of the proper
801 mode which meets the size and alignment requirements. Choose the
802 smallest one with the closest alignment.
804 If assign_stack_temp is called outside of the tree->rtl expansion,
805 we cannot reuse the stack slots (that may still refer to
806 VIRTUAL_STACK_VARS_REGNUM). */
807 if (!virtuals_instantiated
)
809 for (p
= avail_temp_slots
; p
; p
= p
->next
)
811 if (p
->align
>= align
812 && known_ge (p
->size
, size
)
813 && GET_MODE (p
->slot
) == mode
814 && objects_must_conflict_p (p
->type
, type
)
816 || (known_eq (best_p
->size
, p
->size
)
817 ? best_p
->align
> p
->align
818 : known_ge (best_p
->size
, p
->size
))))
820 if (p
->align
== align
&& known_eq (p
->size
, size
))
823 cut_slot_from_list (selected
, &avail_temp_slots
);
832 /* Make our best, if any, the one to use. */
836 cut_slot_from_list (selected
, &avail_temp_slots
);
838 /* If there are enough aligned bytes left over, make them into a new
839 temp_slot so that the extra bytes don't get wasted. Do this only
840 for BLKmode slots, so that we can be sure of the alignment. */
841 if (GET_MODE (best_p
->slot
) == BLKmode
)
843 int alignment
= best_p
->align
/ BITS_PER_UNIT
;
844 poly_int64 rounded_size
= aligned_upper_bound (size
, alignment
);
846 if (known_ge (best_p
->size
- rounded_size
, alignment
))
848 p
= ggc_alloc
<temp_slot
> ();
850 p
->size
= best_p
->size
- rounded_size
;
851 p
->base_offset
= best_p
->base_offset
+ rounded_size
;
852 p
->full_size
= best_p
->full_size
- rounded_size
;
853 p
->slot
= adjust_address_nv (best_p
->slot
, BLKmode
, rounded_size
);
854 p
->align
= best_p
->align
;
855 p
->type
= best_p
->type
;
856 insert_slot_to_list (p
, &avail_temp_slots
);
858 vec_safe_push (stack_slot_list
, p
->slot
);
860 best_p
->size
= rounded_size
;
861 best_p
->full_size
= rounded_size
;
866 /* If we still didn't find one, make a new temporary. */
869 poly_int64 frame_offset_old
= frame_offset
;
871 p
= ggc_alloc
<temp_slot
> ();
873 /* We are passing an explicit alignment request to assign_stack_local.
874 One side effect of that is assign_stack_local will not round SIZE
875 to ensure the frame offset remains suitably aligned.
877 So for requests which depended on the rounding of SIZE, we go ahead
878 and round it now. We also make sure ALIGNMENT is at least
879 BIGGEST_ALIGNMENT. */
880 gcc_assert (mode
!= BLKmode
|| align
== BIGGEST_ALIGNMENT
);
881 p
->slot
= assign_stack_local_1 (mode
,
883 ? aligned_upper_bound (size
,
891 /* The following slot size computation is necessary because we don't
892 know the actual size of the temporary slot until assign_stack_local
893 has performed all the frame alignment and size rounding for the
894 requested temporary. Note that extra space added for alignment
895 can be either above or below this stack slot depending on which
896 way the frame grows. We include the extra space if and only if it
897 is above this slot. */
898 if (FRAME_GROWS_DOWNWARD
)
899 p
->size
= frame_offset_old
- frame_offset
;
903 /* Now define the fields used by combine_temp_slots. */
904 if (FRAME_GROWS_DOWNWARD
)
906 p
->base_offset
= frame_offset
;
907 p
->full_size
= frame_offset_old
- frame_offset
;
911 p
->base_offset
= frame_offset_old
;
912 p
->full_size
= frame_offset
- frame_offset_old
;
921 p
->level
= temp_slot_level
;
922 n_temp_slots_in_use
++;
924 pp
= temp_slots_at_level (p
->level
);
925 insert_slot_to_list (p
, pp
);
926 insert_temp_slot_address (XEXP (p
->slot
, 0), p
);
928 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
929 slot
= gen_rtx_MEM (mode
, XEXP (p
->slot
, 0));
930 vec_safe_push (stack_slot_list
, slot
);
932 /* If we know the alias set for the memory that will be used, use
933 it. If there's no TYPE, then we don't know anything about the
934 alias set for the memory. */
935 set_mem_alias_set (slot
, type
? get_alias_set (type
) : 0);
936 set_mem_align (slot
, align
);
938 /* If a type is specified, set the relevant flags. */
940 MEM_VOLATILE_P (slot
) = TYPE_VOLATILE (type
);
941 MEM_NOTRAP_P (slot
) = 1;
946 /* Allocate a temporary stack slot and record it for possible later
947 reuse. First two arguments are same as in preceding function. */
950 assign_stack_temp (machine_mode mode
, poly_int64 size
)
952 return assign_stack_temp_for_type (mode
, size
, NULL_TREE
);
955 /* Assign a temporary.
956 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
957 and so that should be used in error messages. In either case, we
958 allocate of the given type.
959 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
960 it is 0 if a register is OK.
961 DONT_PROMOTE is 1 if we should not promote values in register
965 assign_temp (tree type_or_decl
, int memory_required
,
966 int dont_promote ATTRIBUTE_UNUSED
)
974 if (DECL_P (type_or_decl
))
975 decl
= type_or_decl
, type
= TREE_TYPE (decl
);
977 decl
= NULL
, type
= type_or_decl
;
979 mode
= TYPE_MODE (type
);
981 unsignedp
= TYPE_UNSIGNED (type
);
984 /* Allocating temporaries of TREE_ADDRESSABLE type must be done in the front
985 end. See also create_tmp_var for the gimplification-time check. */
986 gcc_assert (!TREE_ADDRESSABLE (type
) && COMPLETE_TYPE_P (type
));
988 if (mode
== BLKmode
|| memory_required
)
993 /* Unfortunately, we don't yet know how to allocate variable-sized
994 temporaries. However, sometimes we can find a fixed upper limit on
995 the size, so try that instead. */
996 if (!poly_int_tree_p (TYPE_SIZE_UNIT (type
), &size
))
997 size
= max_int_size_in_bytes (type
);
999 /* Zero sized arrays are a GNU C extension. Set size to 1 to avoid
1000 problems with allocating the stack space. */
1001 if (known_eq (size
, 0))
1004 /* The size of the temporary may be too large to fit into an integer. */
1005 /* ??? Not sure this should happen except for user silliness, so limit
1006 this to things that aren't compiler-generated temporaries. The
1007 rest of the time we'll die in assign_stack_temp_for_type. */
1009 && !known_size_p (size
)
1010 && TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
)
1012 error ("size of variable %q+D is too large", decl
);
1016 tmp
= assign_stack_temp_for_type (mode
, size
, type
);
1022 mode
= promote_mode (type
, mode
, &unsignedp
);
1025 return gen_reg_rtx (mode
);
1028 /* Combine temporary stack slots which are adjacent on the stack.
1030 This allows for better use of already allocated stack space. This is only
1031 done for BLKmode slots because we can be sure that we won't have alignment
1032 problems in this case. */
1035 combine_temp_slots (void)
1037 class temp_slot
*p
, *q
, *next
, *next_q
;
1040 /* We can't combine slots, because the information about which slot
1041 is in which alias set will be lost. */
1042 if (flag_strict_aliasing
)
1045 /* If there are a lot of temp slots, don't do anything unless
1046 high levels of optimization. */
1047 if (! flag_expensive_optimizations
)
1048 for (p
= avail_temp_slots
, num_slots
= 0; p
; p
= p
->next
, num_slots
++)
1049 if (num_slots
> 100 || (num_slots
> 10 && optimize
== 0))
1052 for (p
= avail_temp_slots
; p
; p
= next
)
1058 if (GET_MODE (p
->slot
) != BLKmode
)
1061 for (q
= p
->next
; q
; q
= next_q
)
1067 if (GET_MODE (q
->slot
) != BLKmode
)
1070 if (known_eq (p
->base_offset
+ p
->full_size
, q
->base_offset
))
1072 /* Q comes after P; combine Q into P. */
1074 p
->full_size
+= q
->full_size
;
1077 else if (known_eq (q
->base_offset
+ q
->full_size
, p
->base_offset
))
1079 /* P comes after Q; combine P into Q. */
1081 q
->full_size
+= p
->full_size
;
1086 cut_slot_from_list (q
, &avail_temp_slots
);
1089 /* Either delete P or advance past it. */
1091 cut_slot_from_list (p
, &avail_temp_slots
);
1095 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1096 slot that previously was known by OLD_RTX. */
1099 update_temp_slot_address (rtx old_rtx
, rtx new_rtx
)
1103 if (rtx_equal_p (old_rtx
, new_rtx
))
1106 p
= find_temp_slot_from_address (old_rtx
);
1108 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1109 NEW_RTX is a register, see if one operand of the PLUS is a
1110 temporary location. If so, NEW_RTX points into it. Otherwise,
1111 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1112 in common between them. If so, try a recursive call on those
1116 if (GET_CODE (old_rtx
) != PLUS
)
1119 if (REG_P (new_rtx
))
1121 update_temp_slot_address (XEXP (old_rtx
, 0), new_rtx
);
1122 update_temp_slot_address (XEXP (old_rtx
, 1), new_rtx
);
1125 else if (GET_CODE (new_rtx
) != PLUS
)
1128 if (rtx_equal_p (XEXP (old_rtx
, 0), XEXP (new_rtx
, 0)))
1129 update_temp_slot_address (XEXP (old_rtx
, 1), XEXP (new_rtx
, 1));
1130 else if (rtx_equal_p (XEXP (old_rtx
, 1), XEXP (new_rtx
, 0)))
1131 update_temp_slot_address (XEXP (old_rtx
, 0), XEXP (new_rtx
, 1));
1132 else if (rtx_equal_p (XEXP (old_rtx
, 0), XEXP (new_rtx
, 1)))
1133 update_temp_slot_address (XEXP (old_rtx
, 1), XEXP (new_rtx
, 0));
1134 else if (rtx_equal_p (XEXP (old_rtx
, 1), XEXP (new_rtx
, 1)))
1135 update_temp_slot_address (XEXP (old_rtx
, 0), XEXP (new_rtx
, 0));
1140 /* Otherwise add an alias for the temp's address. */
1141 insert_temp_slot_address (new_rtx
, p
);
1144 /* If X could be a reference to a temporary slot, mark that slot as
1145 belonging to the to one level higher than the current level. If X
1146 matched one of our slots, just mark that one. Otherwise, we can't
1147 easily predict which it is, so upgrade all of them.
1149 This is called when an ({...}) construct occurs and a statement
1150 returns a value in memory. */
1153 preserve_temp_slots (rtx x
)
1155 class temp_slot
*p
= 0, *next
;
1160 /* If X is a register that is being used as a pointer, see if we have
1161 a temporary slot we know it points to. */
1162 if (REG_P (x
) && REG_POINTER (x
))
1163 p
= find_temp_slot_from_address (x
);
1165 /* If X is not in memory or is at a constant address, it cannot be in
1166 a temporary slot. */
1167 if (p
== 0 && (!MEM_P (x
) || CONSTANT_P (XEXP (x
, 0))))
1170 /* First see if we can find a match. */
1172 p
= find_temp_slot_from_address (XEXP (x
, 0));
1176 if (p
->level
== temp_slot_level
)
1177 move_slot_to_level (p
, temp_slot_level
- 1);
1181 /* Otherwise, preserve all non-kept slots at this level. */
1182 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1185 move_slot_to_level (p
, temp_slot_level
- 1);
1189 /* Free all temporaries used so far. This is normally called at the
1190 end of generating code for a statement. */
1193 free_temp_slots (void)
1195 class temp_slot
*p
, *next
;
1196 bool some_available
= false;
1198 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1201 make_slot_available (p
);
1202 some_available
= true;
1207 remove_unused_temp_slot_addresses ();
1208 combine_temp_slots ();
1212 /* Push deeper into the nesting level for stack temporaries. */
1215 push_temp_slots (void)
1220 /* Pop a temporary nesting level. All slots in use in the current level
1224 pop_temp_slots (void)
1230 /* Initialize temporary slots. */
1233 init_temp_slots (void)
1235 /* We have not allocated any temporaries yet. */
1236 avail_temp_slots
= 0;
1237 vec_alloc (used_temp_slots
, 0);
1238 temp_slot_level
= 0;
1239 n_temp_slots_in_use
= 0;
1241 /* Set up the table to map addresses to temp slots. */
1242 if (! temp_slot_address_table
)
1243 temp_slot_address_table
= hash_table
<temp_address_hasher
>::create_ggc (32);
1245 temp_slot_address_table
->empty ();
1248 /* Functions and data structures to keep track of the values hard regs
1249 had at the start of the function. */
1251 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1252 and has_hard_reg_initial_val.. */
1253 struct GTY(()) initial_value_pair
{
1257 /* ??? This could be a VEC but there is currently no way to define an
1258 opaque VEC type. This could be worked around by defining struct
1259 initial_value_pair in function.h. */
1260 struct GTY(()) initial_value_struct
{
1263 initial_value_pair
* GTY ((length ("%h.num_entries"))) entries
;
1266 /* If a pseudo represents an initial hard reg (or expression), return
1267 it, else return NULL_RTX. */
1270 get_hard_reg_initial_reg (rtx reg
)
1272 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1278 for (i
= 0; i
< ivs
->num_entries
; i
++)
1279 if (rtx_equal_p (ivs
->entries
[i
].pseudo
, reg
))
1280 return ivs
->entries
[i
].hard_reg
;
1285 /* Make sure that there's a pseudo register of mode MODE that stores the
1286 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1289 get_hard_reg_initial_val (machine_mode mode
, unsigned int regno
)
1291 struct initial_value_struct
*ivs
;
1294 rv
= has_hard_reg_initial_val (mode
, regno
);
1298 ivs
= crtl
->hard_reg_initial_vals
;
1301 ivs
= ggc_alloc
<initial_value_struct
> ();
1302 ivs
->num_entries
= 0;
1303 ivs
->max_entries
= 5;
1304 ivs
->entries
= ggc_vec_alloc
<initial_value_pair
> (5);
1305 crtl
->hard_reg_initial_vals
= ivs
;
1308 if (ivs
->num_entries
>= ivs
->max_entries
)
1310 ivs
->max_entries
+= 5;
1311 ivs
->entries
= GGC_RESIZEVEC (initial_value_pair
, ivs
->entries
,
1315 ivs
->entries
[ivs
->num_entries
].hard_reg
= gen_rtx_REG (mode
, regno
);
1316 ivs
->entries
[ivs
->num_entries
].pseudo
= gen_reg_rtx (mode
);
1318 return ivs
->entries
[ivs
->num_entries
++].pseudo
;
1321 /* See if get_hard_reg_initial_val has been used to create a pseudo
1322 for the initial value of hard register REGNO in mode MODE. Return
1323 the associated pseudo if so, otherwise return NULL. */
1326 has_hard_reg_initial_val (machine_mode mode
, unsigned int regno
)
1328 struct initial_value_struct
*ivs
;
1331 ivs
= crtl
->hard_reg_initial_vals
;
1333 for (i
= 0; i
< ivs
->num_entries
; i
++)
1334 if (GET_MODE (ivs
->entries
[i
].hard_reg
) == mode
1335 && REGNO (ivs
->entries
[i
].hard_reg
) == regno
)
1336 return ivs
->entries
[i
].pseudo
;
1342 emit_initial_value_sets (void)
1344 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1352 for (i
= 0; i
< ivs
->num_entries
; i
++)
1353 emit_move_insn (ivs
->entries
[i
].pseudo
, ivs
->entries
[i
].hard_reg
);
1357 emit_insn_at_entry (seq
);
1361 /* Return the hardreg-pseudoreg initial values pair entry I and
1362 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1364 initial_value_entry (int i
, rtx
*hreg
, rtx
*preg
)
1366 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1367 if (!ivs
|| i
>= ivs
->num_entries
)
1370 *hreg
= ivs
->entries
[i
].hard_reg
;
1371 *preg
= ivs
->entries
[i
].pseudo
;
1375 /* These routines are responsible for converting virtual register references
1376 to the actual hard register references once RTL generation is complete.
1378 The following four variables are used for communication between the
1379 routines. They contain the offsets of the virtual registers from their
1380 respective hard registers. */
1382 static poly_int64 in_arg_offset
;
1383 static poly_int64 var_offset
;
1384 static poly_int64 dynamic_offset
;
1385 static poly_int64 out_arg_offset
;
1386 static poly_int64 cfa_offset
;
1388 /* In most machines, the stack pointer register is equivalent to the bottom
1391 #ifndef STACK_POINTER_OFFSET
1392 #define STACK_POINTER_OFFSET 0
1395 #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1396 #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1399 /* If not defined, pick an appropriate default for the offset of dynamically
1400 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1401 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1403 #ifndef STACK_DYNAMIC_OFFSET
1405 /* The bottom of the stack points to the actual arguments. If
1406 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1407 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1408 stack space for register parameters is not pushed by the caller, but
1409 rather part of the fixed stack areas and hence not included in
1410 `crtl->outgoing_args_size'. Nevertheless, we must allow
1411 for it when allocating stack dynamic objects. */
1413 #ifdef INCOMING_REG_PARM_STACK_SPACE
1414 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1415 ((ACCUMULATE_OUTGOING_ARGS \
1416 ? (crtl->outgoing_args_size \
1417 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1418 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1419 : 0) + (STACK_POINTER_OFFSET))
1421 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1422 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : poly_int64 (0)) \
1423 + (STACK_POINTER_OFFSET))
1428 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1429 is a virtual register, return the equivalent hard register and set the
1430 offset indirectly through the pointer. Otherwise, return 0. */
1433 instantiate_new_reg (rtx x
, poly_int64_pod
*poffset
)
1438 if (x
== virtual_incoming_args_rtx
)
1440 if (stack_realign_drap
)
1442 /* Replace virtual_incoming_args_rtx with internal arg
1443 pointer if DRAP is used to realign stack. */
1444 new_rtx
= crtl
->args
.internal_arg_pointer
;
1448 new_rtx
= arg_pointer_rtx
, offset
= in_arg_offset
;
1450 else if (x
== virtual_stack_vars_rtx
)
1451 new_rtx
= frame_pointer_rtx
, offset
= var_offset
;
1452 else if (x
== virtual_stack_dynamic_rtx
)
1453 new_rtx
= stack_pointer_rtx
, offset
= dynamic_offset
;
1454 else if (x
== virtual_outgoing_args_rtx
)
1455 new_rtx
= stack_pointer_rtx
, offset
= out_arg_offset
;
1456 else if (x
== virtual_cfa_rtx
)
1458 #ifdef FRAME_POINTER_CFA_OFFSET
1459 new_rtx
= frame_pointer_rtx
;
1461 new_rtx
= arg_pointer_rtx
;
1463 offset
= cfa_offset
;
1465 else if (x
== virtual_preferred_stack_boundary_rtx
)
1467 new_rtx
= GEN_INT (crtl
->preferred_stack_boundary
/ BITS_PER_UNIT
);
1477 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1478 registers present inside of *LOC. The expression is simplified,
1479 as much as possible, but is not to be considered "valid" in any sense
1480 implied by the target. Return true if any change is made. */
1483 instantiate_virtual_regs_in_rtx (rtx
*loc
)
1487 bool changed
= false;
1488 subrtx_ptr_iterator::array_type array
;
1489 FOR_EACH_SUBRTX_PTR (iter
, array
, loc
, NONCONST
)
1496 switch (GET_CODE (x
))
1499 new_rtx
= instantiate_new_reg (x
, &offset
);
1502 *loc
= plus_constant (GET_MODE (x
), new_rtx
, offset
);
1505 iter
.skip_subrtxes ();
1509 new_rtx
= instantiate_new_reg (XEXP (x
, 0), &offset
);
1512 XEXP (x
, 0) = new_rtx
;
1513 *loc
= plus_constant (GET_MODE (x
), x
, offset
, true);
1515 iter
.skip_subrtxes ();
1519 /* FIXME -- from old code */
1520 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1521 we can commute the PLUS and SUBREG because pointers into the
1522 frame are well-behaved. */
1533 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1534 matches the predicate for insn CODE operand OPERAND. */
1537 safe_insn_predicate (int code
, int operand
, rtx x
)
1539 return code
< 0 || insn_operand_matches ((enum insn_code
) code
, operand
, x
);
1542 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1543 registers present inside of insn. The result will be a valid insn. */
1546 instantiate_virtual_regs_in_insn (rtx_insn
*insn
)
1550 bool any_change
= false;
1551 rtx set
, new_rtx
, x
;
1554 /* There are some special cases to be handled first. */
1555 set
= single_set (insn
);
1558 /* We're allowed to assign to a virtual register. This is interpreted
1559 to mean that the underlying register gets assigned the inverse
1560 transformation. This is used, for example, in the handling of
1562 new_rtx
= instantiate_new_reg (SET_DEST (set
), &offset
);
1567 instantiate_virtual_regs_in_rtx (&SET_SRC (set
));
1568 x
= simplify_gen_binary (PLUS
, GET_MODE (new_rtx
), SET_SRC (set
),
1569 gen_int_mode (-offset
, GET_MODE (new_rtx
)));
1570 x
= force_operand (x
, new_rtx
);
1572 emit_move_insn (new_rtx
, x
);
1577 emit_insn_before (seq
, insn
);
1582 /* Handle a straight copy from a virtual register by generating a
1583 new add insn. The difference between this and falling through
1584 to the generic case is avoiding a new pseudo and eliminating a
1585 move insn in the initial rtl stream. */
1586 new_rtx
= instantiate_new_reg (SET_SRC (set
), &offset
);
1588 && maybe_ne (offset
, 0)
1589 && REG_P (SET_DEST (set
))
1590 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1594 x
= expand_simple_binop (GET_MODE (SET_DEST (set
)), PLUS
, new_rtx
,
1595 gen_int_mode (offset
,
1596 GET_MODE (SET_DEST (set
))),
1597 SET_DEST (set
), 1, OPTAB_LIB_WIDEN
);
1598 if (x
!= SET_DEST (set
))
1599 emit_move_insn (SET_DEST (set
), x
);
1604 emit_insn_before (seq
, insn
);
1609 extract_insn (insn
);
1610 insn_code
= INSN_CODE (insn
);
1612 /* Handle a plus involving a virtual register by determining if the
1613 operands remain valid if they're modified in place. */
1615 if (GET_CODE (SET_SRC (set
)) == PLUS
1616 && recog_data
.n_operands
>= 3
1617 && recog_data
.operand_loc
[1] == &XEXP (SET_SRC (set
), 0)
1618 && recog_data
.operand_loc
[2] == &XEXP (SET_SRC (set
), 1)
1619 && poly_int_rtx_p (recog_data
.operand
[2], &delta
)
1620 && (new_rtx
= instantiate_new_reg (recog_data
.operand
[1], &offset
)))
1624 /* If the sum is zero, then replace with a plain move. */
1625 if (known_eq (offset
, 0)
1626 && REG_P (SET_DEST (set
))
1627 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1630 emit_move_insn (SET_DEST (set
), new_rtx
);
1634 emit_insn_before (seq
, insn
);
1639 x
= gen_int_mode (offset
, recog_data
.operand_mode
[2]);
1641 /* Using validate_change and apply_change_group here leaves
1642 recog_data in an invalid state. Since we know exactly what
1643 we want to check, do those two by hand. */
1644 if (safe_insn_predicate (insn_code
, 1, new_rtx
)
1645 && safe_insn_predicate (insn_code
, 2, x
))
1647 *recog_data
.operand_loc
[1] = recog_data
.operand
[1] = new_rtx
;
1648 *recog_data
.operand_loc
[2] = recog_data
.operand
[2] = x
;
1651 /* Fall through into the regular operand fixup loop in
1652 order to take care of operands other than 1 and 2. */
1658 extract_insn (insn
);
1659 insn_code
= INSN_CODE (insn
);
1662 /* In the general case, we expect virtual registers to appear only in
1663 operands, and then only as either bare registers or inside memories. */
1664 for (i
= 0; i
< recog_data
.n_operands
; ++i
)
1666 x
= recog_data
.operand
[i
];
1667 switch (GET_CODE (x
))
1671 rtx addr
= XEXP (x
, 0);
1673 if (!instantiate_virtual_regs_in_rtx (&addr
))
1677 x
= replace_equiv_address (x
, addr
, true);
1678 /* It may happen that the address with the virtual reg
1679 was valid (e.g. based on the virtual stack reg, which might
1680 be acceptable to the predicates with all offsets), whereas
1681 the address now isn't anymore, for instance when the address
1682 is still offsetted, but the base reg isn't virtual-stack-reg
1683 anymore. Below we would do a force_reg on the whole operand,
1684 but this insn might actually only accept memory. Hence,
1685 before doing that last resort, try to reload the address into
1686 a register, so this operand stays a MEM. */
1687 if (!safe_insn_predicate (insn_code
, i
, x
))
1689 addr
= force_reg (GET_MODE (addr
), addr
);
1690 x
= replace_equiv_address (x
, addr
, true);
1695 emit_insn_before (seq
, insn
);
1700 new_rtx
= instantiate_new_reg (x
, &offset
);
1701 if (new_rtx
== NULL
)
1703 if (known_eq (offset
, 0))
1709 /* Careful, special mode predicates may have stuff in
1710 insn_data[insn_code].operand[i].mode that isn't useful
1711 to us for computing a new value. */
1712 /* ??? Recognize address_operand and/or "p" constraints
1713 to see if (plus new offset) is a valid before we put
1714 this through expand_simple_binop. */
1715 x
= expand_simple_binop (GET_MODE (x
), PLUS
, new_rtx
,
1716 gen_int_mode (offset
, GET_MODE (x
)),
1717 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1720 emit_insn_before (seq
, insn
);
1725 new_rtx
= instantiate_new_reg (SUBREG_REG (x
), &offset
);
1726 if (new_rtx
== NULL
)
1728 if (maybe_ne (offset
, 0))
1731 new_rtx
= expand_simple_binop
1732 (GET_MODE (new_rtx
), PLUS
, new_rtx
,
1733 gen_int_mode (offset
, GET_MODE (new_rtx
)),
1734 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1737 emit_insn_before (seq
, insn
);
1739 x
= simplify_gen_subreg (recog_data
.operand_mode
[i
], new_rtx
,
1740 GET_MODE (new_rtx
), SUBREG_BYTE (x
));
1748 /* At this point, X contains the new value for the operand.
1749 Validate the new value vs the insn predicate. Note that
1750 asm insns will have insn_code -1 here. */
1751 if (!safe_insn_predicate (insn_code
, i
, x
))
1756 gcc_assert (REGNO (x
) <= LAST_VIRTUAL_REGISTER
);
1757 x
= copy_to_reg (x
);
1760 x
= force_reg (insn_data
[insn_code
].operand
[i
].mode
, x
);
1764 emit_insn_before (seq
, insn
);
1767 *recog_data
.operand_loc
[i
] = recog_data
.operand
[i
] = x
;
1773 /* Propagate operand changes into the duplicates. */
1774 for (i
= 0; i
< recog_data
.n_dups
; ++i
)
1775 *recog_data
.dup_loc
[i
]
1776 = copy_rtx (recog_data
.operand
[(unsigned)recog_data
.dup_num
[i
]]);
1778 /* Force re-recognition of the instruction for validation. */
1779 INSN_CODE (insn
) = -1;
1782 if (asm_noperands (PATTERN (insn
)) >= 0)
1784 if (!check_asm_operands (PATTERN (insn
)))
1786 error_for_asm (insn
, "impossible constraint in %<asm%>");
1787 /* For asm goto, instead of fixing up all the edges
1788 just clear the template and clear input and output operands
1789 and strip away clobbers. */
1792 rtx asm_op
= extract_asm_operands (PATTERN (insn
));
1793 PATTERN (insn
) = asm_op
;
1794 PUT_MODE (asm_op
, VOIDmode
);
1795 ASM_OPERANDS_TEMPLATE (asm_op
) = ggc_strdup ("");
1796 ASM_OPERANDS_OUTPUT_CONSTRAINT (asm_op
) = "";
1797 ASM_OPERANDS_OUTPUT_IDX (asm_op
) = 0;
1798 ASM_OPERANDS_INPUT_VEC (asm_op
) = rtvec_alloc (0);
1799 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op
) = rtvec_alloc (0);
1807 if (recog_memoized (insn
) < 0)
1808 fatal_insn_not_found (insn
);
1812 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1813 do any instantiation required. */
1816 instantiate_decl_rtl (rtx x
)
1823 /* If this is a CONCAT, recurse for the pieces. */
1824 if (GET_CODE (x
) == CONCAT
)
1826 instantiate_decl_rtl (XEXP (x
, 0));
1827 instantiate_decl_rtl (XEXP (x
, 1));
1831 /* If this is not a MEM, no need to do anything. Similarly if the
1832 address is a constant or a register that is not a virtual register. */
1837 if (CONSTANT_P (addr
)
1839 && (REGNO (addr
) < FIRST_VIRTUAL_REGISTER
1840 || REGNO (addr
) > LAST_VIRTUAL_REGISTER
)))
1843 instantiate_virtual_regs_in_rtx (&XEXP (x
, 0));
1846 /* Helper for instantiate_decls called via walk_tree: Process all decls
1847 in the given DECL_VALUE_EXPR. */
1850 instantiate_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1858 if (DECL_RTL_SET_P (t
))
1859 instantiate_decl_rtl (DECL_RTL (t
));
1860 if (TREE_CODE (t
) == PARM_DECL
&& DECL_NAMELESS (t
)
1861 && DECL_INCOMING_RTL (t
))
1862 instantiate_decl_rtl (DECL_INCOMING_RTL (t
));
1863 if ((VAR_P (t
) || TREE_CODE (t
) == RESULT_DECL
)
1864 && DECL_HAS_VALUE_EXPR_P (t
))
1866 tree v
= DECL_VALUE_EXPR (t
);
1867 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1874 /* Subroutine of instantiate_decls: Process all decls in the given
1875 BLOCK node and all its subblocks. */
1878 instantiate_decls_1 (tree let
)
1882 for (t
= BLOCK_VARS (let
); t
; t
= DECL_CHAIN (t
))
1884 if (DECL_RTL_SET_P (t
))
1885 instantiate_decl_rtl (DECL_RTL (t
));
1886 if (VAR_P (t
) && DECL_HAS_VALUE_EXPR_P (t
))
1888 tree v
= DECL_VALUE_EXPR (t
);
1889 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1893 /* Process all subblocks. */
1894 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= BLOCK_CHAIN (t
))
1895 instantiate_decls_1 (t
);
1898 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1899 all virtual registers in their DECL_RTL's. */
1902 instantiate_decls (tree fndecl
)
1907 /* Process all parameters of the function. */
1908 for (decl
= DECL_ARGUMENTS (fndecl
); decl
; decl
= DECL_CHAIN (decl
))
1910 instantiate_decl_rtl (DECL_RTL (decl
));
1911 instantiate_decl_rtl (DECL_INCOMING_RTL (decl
));
1912 if (DECL_HAS_VALUE_EXPR_P (decl
))
1914 tree v
= DECL_VALUE_EXPR (decl
);
1915 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1919 if ((decl
= DECL_RESULT (fndecl
))
1920 && TREE_CODE (decl
) == RESULT_DECL
)
1922 if (DECL_RTL_SET_P (decl
))
1923 instantiate_decl_rtl (DECL_RTL (decl
));
1924 if (DECL_HAS_VALUE_EXPR_P (decl
))
1926 tree v
= DECL_VALUE_EXPR (decl
);
1927 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1931 /* Process the saved static chain if it exists. */
1932 decl
= DECL_STRUCT_FUNCTION (fndecl
)->static_chain_decl
;
1933 if (decl
&& DECL_HAS_VALUE_EXPR_P (decl
))
1934 instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl
)));
1936 /* Now process all variables defined in the function or its subblocks. */
1937 if (DECL_INITIAL (fndecl
))
1938 instantiate_decls_1 (DECL_INITIAL (fndecl
));
1940 FOR_EACH_LOCAL_DECL (cfun
, ix
, decl
)
1941 if (DECL_RTL_SET_P (decl
))
1942 instantiate_decl_rtl (DECL_RTL (decl
));
1943 vec_free (cfun
->local_decls
);
1946 /* Pass through the INSNS of function FNDECL and convert virtual register
1947 references to hard register references. */
1950 instantiate_virtual_regs (void)
1954 /* Compute the offsets to use for this function. */
1955 in_arg_offset
= FIRST_PARM_OFFSET (current_function_decl
);
1956 var_offset
= targetm
.starting_frame_offset ();
1957 dynamic_offset
= STACK_DYNAMIC_OFFSET (current_function_decl
);
1958 out_arg_offset
= STACK_POINTER_OFFSET
;
1959 #ifdef FRAME_POINTER_CFA_OFFSET
1960 cfa_offset
= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
1962 cfa_offset
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
1965 /* Initialize recognition, indicating that volatile is OK. */
1968 /* Scan through all the insns, instantiating every virtual register still
1970 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1973 /* These patterns in the instruction stream can never be recognized.
1974 Fortunately, they shouldn't contain virtual registers either. */
1975 if (GET_CODE (PATTERN (insn
)) == USE
1976 || GET_CODE (PATTERN (insn
)) == CLOBBER
1977 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
1978 || DEBUG_MARKER_INSN_P (insn
))
1980 else if (DEBUG_BIND_INSN_P (insn
))
1981 instantiate_virtual_regs_in_rtx (INSN_VAR_LOCATION_PTR (insn
));
1983 instantiate_virtual_regs_in_insn (insn
);
1985 if (insn
->deleted ())
1988 instantiate_virtual_regs_in_rtx (®_NOTES (insn
));
1990 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1992 instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn
));
1995 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1996 instantiate_decls (current_function_decl
);
1998 targetm
.instantiate_decls ();
2000 /* Indicate that, from now on, assign_stack_local should use
2001 frame_pointer_rtx. */
2002 virtuals_instantiated
= 1;
2009 const pass_data pass_data_instantiate_virtual_regs
=
2011 RTL_PASS
, /* type */
2013 OPTGROUP_NONE
, /* optinfo_flags */
2014 TV_NONE
, /* tv_id */
2015 0, /* properties_required */
2016 0, /* properties_provided */
2017 0, /* properties_destroyed */
2018 0, /* todo_flags_start */
2019 0, /* todo_flags_finish */
2022 class pass_instantiate_virtual_regs
: public rtl_opt_pass
2025 pass_instantiate_virtual_regs (gcc::context
*ctxt
)
2026 : rtl_opt_pass (pass_data_instantiate_virtual_regs
, ctxt
)
2029 /* opt_pass methods: */
2030 virtual unsigned int execute (function
*)
2032 return instantiate_virtual_regs ();
2035 }; // class pass_instantiate_virtual_regs
2040 make_pass_instantiate_virtual_regs (gcc::context
*ctxt
)
2042 return new pass_instantiate_virtual_regs (ctxt
);
2046 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2047 This means a type for which function calls must pass an address to the
2048 function or get an address back from the function.
2049 EXP may be a type node or an expression (whose type is tested). */
2052 aggregate_value_p (const_tree exp
, const_tree fntype
)
2054 const_tree type
= (TYPE_P (exp
)) ? exp
: TREE_TYPE (exp
);
2055 int i
, regno
, nregs
;
2059 switch (TREE_CODE (fntype
))
2063 tree fndecl
= get_callee_fndecl (fntype
);
2065 fntype
= TREE_TYPE (fndecl
);
2066 else if (CALL_EXPR_FN (fntype
))
2067 fntype
= TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype
)));
2069 /* For internal functions, assume nothing needs to be
2070 returned in memory. */
2075 fntype
= TREE_TYPE (fntype
);
2080 case IDENTIFIER_NODE
:
2084 /* We don't expect other tree types here. */
2088 if (VOID_TYPE_P (type
))
2091 /* If a record should be passed the same as its first (and only) member
2092 don't pass it as an aggregate. */
2093 if (TREE_CODE (type
) == RECORD_TYPE
&& TYPE_TRANSPARENT_AGGR (type
))
2094 return aggregate_value_p (first_field (type
), fntype
);
2096 /* If the front end has decided that this needs to be passed by
2097 reference, do so. */
2098 if ((TREE_CODE (exp
) == PARM_DECL
|| TREE_CODE (exp
) == RESULT_DECL
)
2099 && DECL_BY_REFERENCE (exp
))
2102 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2103 if (fntype
&& TREE_ADDRESSABLE (fntype
))
2106 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2107 and thus can't be returned in registers. */
2108 if (TREE_ADDRESSABLE (type
))
2111 if (TYPE_EMPTY_P (type
))
2114 if (flag_pcc_struct_return
&& AGGREGATE_TYPE_P (type
))
2117 if (targetm
.calls
.return_in_memory (type
, fntype
))
2120 /* Make sure we have suitable call-clobbered regs to return
2121 the value in; if not, we must return it in memory. */
2122 reg
= hard_function_value (type
, 0, fntype
, 0);
2124 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2129 /* Use the default ABI if the type of the function isn't known.
2130 The scheme for handling interoperability between different ABIs
2131 requires us to be able to tell when we're calling a function with
2132 a nondefault ABI. */
2133 const predefined_function_abi
&abi
= (fntype
2134 ? fntype_abi (fntype
)
2135 : default_function_abi
);
2136 regno
= REGNO (reg
);
2137 nregs
= hard_regno_nregs (regno
, TYPE_MODE (type
));
2138 for (i
= 0; i
< nregs
; i
++)
2139 if (!fixed_regs
[regno
+ i
] && !abi
.clobbers_full_reg_p (regno
+ i
))
2145 /* Return true if we should assign DECL a pseudo register; false if it
2146 should live on the local stack. */
2149 use_register_for_decl (const_tree decl
)
2151 if (TREE_CODE (decl
) == SSA_NAME
)
2153 /* We often try to use the SSA_NAME, instead of its underlying
2154 decl, to get type information and guide decisions, to avoid
2155 differences of behavior between anonymous and named
2156 variables, but in this one case we have to go for the actual
2157 variable if there is one. The main reason is that, at least
2158 at -O0, we want to place user variables on the stack, but we
2159 don't mind using pseudos for anonymous or ignored temps.
2160 Should we take the SSA_NAME, we'd conclude all SSA_NAMEs
2161 should go in pseudos, whereas their corresponding variables
2162 might have to go on the stack. So, disregarding the decl
2163 here would negatively impact debug info at -O0, enable
2164 coalescing between SSA_NAMEs that ought to get different
2165 stack/pseudo assignments, and get the incoming argument
2166 processing thoroughly confused by PARM_DECLs expected to live
2167 in stack slots but assigned to pseudos. */
2168 if (!SSA_NAME_VAR (decl
))
2169 return TYPE_MODE (TREE_TYPE (decl
)) != BLKmode
2170 && !(flag_float_store
&& FLOAT_TYPE_P (TREE_TYPE (decl
)));
2172 decl
= SSA_NAME_VAR (decl
);
2175 /* Honor volatile. */
2176 if (TREE_SIDE_EFFECTS (decl
))
2179 /* Honor addressability. */
2180 if (TREE_ADDRESSABLE (decl
))
2183 /* RESULT_DECLs are a bit special in that they're assigned without
2184 regard to use_register_for_decl, but we generally only store in
2185 them. If we coalesce their SSA NAMEs, we'd better return a
2186 result that matches the assignment in expand_function_start. */
2187 if (TREE_CODE (decl
) == RESULT_DECL
)
2189 /* If it's not an aggregate, we're going to use a REG or a
2190 PARALLEL containing a REG. */
2191 if (!aggregate_value_p (decl
, current_function_decl
))
2194 /* If expand_function_start determines the return value, we'll
2195 use MEM if it's not by reference. */
2196 if (cfun
->returns_pcc_struct
2197 || (targetm
.calls
.struct_value_rtx
2198 (TREE_TYPE (current_function_decl
), 1)))
2199 return DECL_BY_REFERENCE (decl
);
2201 /* Otherwise, we're taking an extra all.function_result_decl
2202 argument. It's set up in assign_parms_augmented_arg_list,
2203 under the (negated) conditions above, and then it's used to
2204 set up the RESULT_DECL rtl in assign_params, after looping
2205 over all parameters. Now, if the RESULT_DECL is not by
2206 reference, we'll use a MEM either way. */
2207 if (!DECL_BY_REFERENCE (decl
))
2210 /* Otherwise, if RESULT_DECL is DECL_BY_REFERENCE, it will take
2211 the function_result_decl's assignment. Since it's a pointer,
2212 we can short-circuit a number of the tests below, and we must
2213 duplicate them because we don't have the function_result_decl
2215 if (!targetm
.calls
.allocate_stack_slots_for_args ())
2217 /* We don't set DECL_IGNORED_P for the function_result_decl. */
2220 if (cfun
->tail_call_marked
)
2222 /* We don't set DECL_REGISTER for the function_result_decl. */
2226 /* Only register-like things go in registers. */
2227 if (DECL_MODE (decl
) == BLKmode
)
2230 /* If -ffloat-store specified, don't put explicit float variables
2232 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2233 propagates values across these stores, and it probably shouldn't. */
2234 if (flag_float_store
&& FLOAT_TYPE_P (TREE_TYPE (decl
)))
2237 if (!targetm
.calls
.allocate_stack_slots_for_args ())
2240 /* If we're not interested in tracking debugging information for
2241 this decl, then we can certainly put it in a register. */
2242 if (DECL_IGNORED_P (decl
))
2248 /* Thunks force a tail call even at -O0 so we need to avoid creating a
2249 dangling reference in case the parameter is passed by reference. */
2250 if (TREE_CODE (decl
) == PARM_DECL
&& cfun
->tail_call_marked
)
2253 if (!DECL_REGISTER (decl
))
2256 /* When not optimizing, disregard register keyword for types that
2257 could have methods, otherwise the methods won't be callable from
2259 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (decl
)))
2265 /* Structures to communicate between the subroutines of assign_parms.
2266 The first holds data persistent across all parameters, the second
2267 is cleared out for each parameter. */
2269 struct assign_parm_data_all
2271 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2272 should become a job of the target or otherwise encapsulated. */
2273 CUMULATIVE_ARGS args_so_far_v
;
2274 cumulative_args_t args_so_far
;
2275 struct args_size stack_args_size
;
2276 tree function_result_decl
;
2278 rtx_insn
*first_conversion_insn
;
2279 rtx_insn
*last_conversion_insn
;
2280 HOST_WIDE_INT pretend_args_size
;
2281 HOST_WIDE_INT extra_pretend_bytes
;
2282 int reg_parm_stack_space
;
2285 struct assign_parm_data_one
2288 function_arg_info arg
;
2291 machine_mode nominal_mode
;
2292 machine_mode passed_mode
;
2293 struct locate_and_pad_arg_data locate
;
2297 /* A subroutine of assign_parms. Initialize ALL. */
2300 assign_parms_initialize_all (struct assign_parm_data_all
*all
)
2302 tree fntype ATTRIBUTE_UNUSED
;
2304 memset (all
, 0, sizeof (*all
));
2306 fntype
= TREE_TYPE (current_function_decl
);
2308 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2309 INIT_CUMULATIVE_INCOMING_ARGS (all
->args_so_far_v
, fntype
, NULL_RTX
);
2311 INIT_CUMULATIVE_ARGS (all
->args_so_far_v
, fntype
, NULL_RTX
,
2312 current_function_decl
, -1);
2314 all
->args_so_far
= pack_cumulative_args (&all
->args_so_far_v
);
2316 #ifdef INCOMING_REG_PARM_STACK_SPACE
2317 all
->reg_parm_stack_space
2318 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl
);
2322 /* If ARGS contains entries with complex types, split the entry into two
2323 entries of the component type. Return a new list of substitutions are
2324 needed, else the old list. */
2327 split_complex_args (vec
<tree
> *args
)
2332 FOR_EACH_VEC_ELT (*args
, i
, p
)
2334 tree type
= TREE_TYPE (p
);
2335 if (TREE_CODE (type
) == COMPLEX_TYPE
2336 && targetm
.calls
.split_complex_arg (type
))
2339 tree subtype
= TREE_TYPE (type
);
2340 bool addressable
= TREE_ADDRESSABLE (p
);
2342 /* Rewrite the PARM_DECL's type with its component. */
2344 TREE_TYPE (p
) = subtype
;
2345 DECL_ARG_TYPE (p
) = TREE_TYPE (DECL_ARG_TYPE (p
));
2346 SET_DECL_MODE (p
, VOIDmode
);
2347 DECL_SIZE (p
) = NULL
;
2348 DECL_SIZE_UNIT (p
) = NULL
;
2349 /* If this arg must go in memory, put it in a pseudo here.
2350 We can't allow it to go in memory as per normal parms,
2351 because the usual place might not have the imag part
2352 adjacent to the real part. */
2353 DECL_ARTIFICIAL (p
) = addressable
;
2354 DECL_IGNORED_P (p
) = addressable
;
2355 TREE_ADDRESSABLE (p
) = 0;
2359 /* Build a second synthetic decl. */
2360 decl
= build_decl (EXPR_LOCATION (p
),
2361 PARM_DECL
, NULL_TREE
, subtype
);
2362 DECL_ARG_TYPE (decl
) = DECL_ARG_TYPE (p
);
2363 DECL_ARTIFICIAL (decl
) = addressable
;
2364 DECL_IGNORED_P (decl
) = addressable
;
2365 layout_decl (decl
, 0);
2366 args
->safe_insert (++i
, decl
);
2371 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2372 the hidden struct return argument, and (abi willing) complex args.
2373 Return the new parameter list. */
2376 assign_parms_augmented_arg_list (struct assign_parm_data_all
*all
)
2378 tree fndecl
= current_function_decl
;
2379 tree fntype
= TREE_TYPE (fndecl
);
2380 vec
<tree
> fnargs
= vNULL
;
2383 for (arg
= DECL_ARGUMENTS (fndecl
); arg
; arg
= DECL_CHAIN (arg
))
2384 fnargs
.safe_push (arg
);
2386 all
->orig_fnargs
= DECL_ARGUMENTS (fndecl
);
2388 /* If struct value address is treated as the first argument, make it so. */
2389 if (aggregate_value_p (DECL_RESULT (fndecl
), fndecl
)
2390 && ! cfun
->returns_pcc_struct
2391 && targetm
.calls
.struct_value_rtx (TREE_TYPE (fndecl
), 1) == 0)
2393 tree type
= build_pointer_type (TREE_TYPE (fntype
));
2396 decl
= build_decl (DECL_SOURCE_LOCATION (fndecl
),
2397 PARM_DECL
, get_identifier (".result_ptr"), type
);
2398 DECL_ARG_TYPE (decl
) = type
;
2399 DECL_ARTIFICIAL (decl
) = 1;
2400 DECL_NAMELESS (decl
) = 1;
2401 TREE_CONSTANT (decl
) = 1;
2402 /* We don't set DECL_IGNORED_P or DECL_REGISTER here. If this
2403 changes, the end of the RESULT_DECL handling block in
2404 use_register_for_decl must be adjusted to match. */
2406 DECL_CHAIN (decl
) = all
->orig_fnargs
;
2407 all
->orig_fnargs
= decl
;
2408 fnargs
.safe_insert (0, decl
);
2410 all
->function_result_decl
= decl
;
2413 /* If the target wants to split complex arguments into scalars, do so. */
2414 if (targetm
.calls
.split_complex_arg
)
2415 split_complex_args (&fnargs
);
2420 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2421 data for the parameter. Incorporate ABI specifics such as pass-by-
2422 reference and type promotion. */
2425 assign_parm_find_data_types (struct assign_parm_data_all
*all
, tree parm
,
2426 struct assign_parm_data_one
*data
)
2430 #ifndef BROKEN_VALUE_INITIALIZATION
2431 *data
= assign_parm_data_one ();
2433 /* Old versions of GCC used to miscompile the above by only initializing
2434 the members with explicit constructors and copying garbage
2435 to the other members. */
2436 assign_parm_data_one zero_data
= {};
2440 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2442 data
->arg
.named
= 1; /* No variadic parms. */
2443 else if (DECL_CHAIN (parm
))
2444 data
->arg
.named
= 1; /* Not the last non-variadic parm. */
2445 else if (targetm
.calls
.strict_argument_naming (all
->args_so_far
))
2446 data
->arg
.named
= 1; /* Only variadic ones are unnamed. */
2448 data
->arg
.named
= 0; /* Treat as variadic. */
2450 data
->nominal_type
= TREE_TYPE (parm
);
2451 data
->arg
.type
= DECL_ARG_TYPE (parm
);
2453 /* Look out for errors propagating this far. Also, if the parameter's
2454 type is void then its value doesn't matter. */
2455 if (TREE_TYPE (parm
) == error_mark_node
2456 /* This can happen after weird syntax errors
2457 or if an enum type is defined among the parms. */
2458 || TREE_CODE (parm
) != PARM_DECL
2459 || data
->arg
.type
== NULL
2460 || VOID_TYPE_P (data
->nominal_type
))
2462 data
->nominal_type
= data
->arg
.type
= void_type_node
;
2463 data
->nominal_mode
= data
->passed_mode
= data
->arg
.mode
= VOIDmode
;
2467 /* Find mode of arg as it is passed, and mode of arg as it should be
2468 during execution of this function. */
2469 data
->passed_mode
= data
->arg
.mode
= TYPE_MODE (data
->arg
.type
);
2470 data
->nominal_mode
= TYPE_MODE (data
->nominal_type
);
2472 /* If the parm is to be passed as a transparent union or record, use the
2473 type of the first field for the tests below. We have already verified
2474 that the modes are the same. */
2475 if (RECORD_OR_UNION_TYPE_P (data
->arg
.type
)
2476 && TYPE_TRANSPARENT_AGGR (data
->arg
.type
))
2477 data
->arg
.type
= TREE_TYPE (first_field (data
->arg
.type
));
2479 /* See if this arg was passed by invisible reference. */
2480 if (apply_pass_by_reference_rules (&all
->args_so_far_v
, data
->arg
))
2482 data
->nominal_type
= data
->arg
.type
;
2483 data
->passed_mode
= data
->nominal_mode
= data
->arg
.mode
;
2486 /* Find mode as it is passed by the ABI. */
2487 unsignedp
= TYPE_UNSIGNED (data
->arg
.type
);
2489 = promote_function_mode (data
->arg
.type
, data
->arg
.mode
, &unsignedp
,
2490 TREE_TYPE (current_function_decl
), 0);
2493 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2496 assign_parms_setup_varargs (struct assign_parm_data_all
*all
,
2497 struct assign_parm_data_one
*data
, bool no_rtl
)
2499 int varargs_pretend_bytes
= 0;
2501 function_arg_info last_named_arg
= data
->arg
;
2502 last_named_arg
.named
= true;
2503 targetm
.calls
.setup_incoming_varargs (all
->args_so_far
, last_named_arg
,
2504 &varargs_pretend_bytes
, no_rtl
);
2506 /* If the back-end has requested extra stack space, record how much is
2507 needed. Do not change pretend_args_size otherwise since it may be
2508 nonzero from an earlier partial argument. */
2509 if (varargs_pretend_bytes
> 0)
2510 all
->pretend_args_size
= varargs_pretend_bytes
;
2513 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2514 the incoming location of the current parameter. */
2517 assign_parm_find_entry_rtl (struct assign_parm_data_all
*all
,
2518 struct assign_parm_data_one
*data
)
2520 HOST_WIDE_INT pretend_bytes
= 0;
2524 if (data
->arg
.mode
== VOIDmode
)
2526 data
->entry_parm
= data
->stack_parm
= const0_rtx
;
2530 targetm
.calls
.warn_parameter_passing_abi (all
->args_so_far
,
2533 entry_parm
= targetm
.calls
.function_incoming_arg (all
->args_so_far
,
2535 if (entry_parm
== 0)
2536 data
->arg
.mode
= data
->passed_mode
;
2538 /* Determine parm's home in the stack, in case it arrives in the stack
2539 or we should pretend it did. Compute the stack position and rtx where
2540 the argument arrives and its size.
2542 There is one complexity here: If this was a parameter that would
2543 have been passed in registers, but wasn't only because it is
2544 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2545 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2546 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2547 as it was the previous time. */
2548 in_regs
= (entry_parm
!= 0);
2549 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2552 if (!in_regs
&& !data
->arg
.named
)
2554 if (targetm
.calls
.pretend_outgoing_varargs_named (all
->args_so_far
))
2557 function_arg_info named_arg
= data
->arg
;
2558 named_arg
.named
= true;
2559 tem
= targetm
.calls
.function_incoming_arg (all
->args_so_far
,
2561 in_regs
= tem
!= NULL
;
2565 /* If this parameter was passed both in registers and in the stack, use
2566 the copy on the stack. */
2567 if (targetm
.calls
.must_pass_in_stack (data
->arg
))
2574 partial
= targetm
.calls
.arg_partial_bytes (all
->args_so_far
, data
->arg
);
2575 data
->partial
= partial
;
2577 /* The caller might already have allocated stack space for the
2578 register parameters. */
2579 if (partial
!= 0 && all
->reg_parm_stack_space
== 0)
2581 /* Part of this argument is passed in registers and part
2582 is passed on the stack. Ask the prologue code to extend
2583 the stack part so that we can recreate the full value.
2585 PRETEND_BYTES is the size of the registers we need to store.
2586 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2587 stack space that the prologue should allocate.
2589 Internally, gcc assumes that the argument pointer is aligned
2590 to STACK_BOUNDARY bits. This is used both for alignment
2591 optimizations (see init_emit) and to locate arguments that are
2592 aligned to more than PARM_BOUNDARY bits. We must preserve this
2593 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2594 a stack boundary. */
2596 /* We assume at most one partial arg, and it must be the first
2597 argument on the stack. */
2598 gcc_assert (!all
->extra_pretend_bytes
&& !all
->pretend_args_size
);
2600 pretend_bytes
= partial
;
2601 all
->pretend_args_size
= CEIL_ROUND (pretend_bytes
, STACK_BYTES
);
2603 /* We want to align relative to the actual stack pointer, so
2604 don't include this in the stack size until later. */
2605 all
->extra_pretend_bytes
= all
->pretend_args_size
;
2609 locate_and_pad_parm (data
->arg
.mode
, data
->arg
.type
, in_regs
,
2610 all
->reg_parm_stack_space
,
2611 entry_parm
? data
->partial
: 0, current_function_decl
,
2612 &all
->stack_args_size
, &data
->locate
);
2614 /* Update parm_stack_boundary if this parameter is passed in the
2616 if (!in_regs
&& crtl
->parm_stack_boundary
< data
->locate
.boundary
)
2617 crtl
->parm_stack_boundary
= data
->locate
.boundary
;
2619 /* Adjust offsets to include the pretend args. */
2620 pretend_bytes
= all
->extra_pretend_bytes
- pretend_bytes
;
2621 data
->locate
.slot_offset
.constant
+= pretend_bytes
;
2622 data
->locate
.offset
.constant
+= pretend_bytes
;
2624 data
->entry_parm
= entry_parm
;
2627 /* A subroutine of assign_parms. If there is actually space on the stack
2628 for this parm, count it in stack_args_size and return true. */
2631 assign_parm_is_stack_parm (struct assign_parm_data_all
*all
,
2632 struct assign_parm_data_one
*data
)
2634 /* Trivially true if we've no incoming register. */
2635 if (data
->entry_parm
== NULL
)
2637 /* Also true if we're partially in registers and partially not,
2638 since we've arranged to drop the entire argument on the stack. */
2639 else if (data
->partial
!= 0)
2641 /* Also true if the target says that it's passed in both registers
2642 and on the stack. */
2643 else if (GET_CODE (data
->entry_parm
) == PARALLEL
2644 && XEXP (XVECEXP (data
->entry_parm
, 0, 0), 0) == NULL_RTX
)
2646 /* Also true if the target says that there's stack allocated for
2647 all register parameters. */
2648 else if (all
->reg_parm_stack_space
> 0)
2650 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2654 all
->stack_args_size
.constant
+= data
->locate
.size
.constant
;
2655 if (data
->locate
.size
.var
)
2656 ADD_PARM_SIZE (all
->stack_args_size
, data
->locate
.size
.var
);
2661 /* A subroutine of assign_parms. Given that this parameter is allocated
2662 stack space by the ABI, find it. */
2665 assign_parm_find_stack_rtl (tree parm
, struct assign_parm_data_one
*data
)
2667 rtx offset_rtx
, stack_parm
;
2668 unsigned int align
, boundary
;
2670 /* If we're passing this arg using a reg, make its stack home the
2671 aligned stack slot. */
2672 if (data
->entry_parm
)
2673 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.slot_offset
);
2675 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.offset
);
2677 stack_parm
= crtl
->args
.internal_arg_pointer
;
2678 if (offset_rtx
!= const0_rtx
)
2679 stack_parm
= gen_rtx_PLUS (Pmode
, stack_parm
, offset_rtx
);
2680 stack_parm
= gen_rtx_MEM (data
->arg
.mode
, stack_parm
);
2682 if (!data
->arg
.pass_by_reference
)
2684 set_mem_attributes (stack_parm
, parm
, 1);
2685 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2686 while promoted mode's size is needed. */
2687 if (data
->arg
.mode
!= BLKmode
2688 && data
->arg
.mode
!= DECL_MODE (parm
))
2690 set_mem_size (stack_parm
, GET_MODE_SIZE (data
->arg
.mode
));
2691 if (MEM_EXPR (stack_parm
) && MEM_OFFSET_KNOWN_P (stack_parm
))
2693 poly_int64 offset
= subreg_lowpart_offset (DECL_MODE (parm
),
2695 if (maybe_ne (offset
, 0))
2696 set_mem_offset (stack_parm
, MEM_OFFSET (stack_parm
) - offset
);
2701 boundary
= data
->locate
.boundary
;
2702 align
= BITS_PER_UNIT
;
2704 /* If we're padding upward, we know that the alignment of the slot
2705 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2706 intentionally forcing upward padding. Otherwise we have to come
2707 up with a guess at the alignment based on OFFSET_RTX. */
2709 if (data
->locate
.where_pad
== PAD_NONE
|| data
->entry_parm
)
2711 else if (data
->locate
.where_pad
== PAD_UPWARD
)
2714 /* If the argument offset is actually more aligned than the nominal
2715 stack slot boundary, take advantage of that excess alignment.
2716 Don't make any assumptions if STACK_POINTER_OFFSET is in use. */
2717 if (poly_int_rtx_p (offset_rtx
, &offset
)
2718 && known_eq (STACK_POINTER_OFFSET
, 0))
2720 unsigned int offset_align
= known_alignment (offset
) * BITS_PER_UNIT
;
2721 if (offset_align
== 0 || offset_align
> STACK_BOUNDARY
)
2722 offset_align
= STACK_BOUNDARY
;
2723 align
= MAX (align
, offset_align
);
2726 else if (poly_int_rtx_p (offset_rtx
, &offset
))
2728 align
= least_bit_hwi (boundary
);
2729 unsigned int offset_align
= known_alignment (offset
) * BITS_PER_UNIT
;
2730 if (offset_align
!= 0)
2731 align
= MIN (align
, offset_align
);
2733 set_mem_align (stack_parm
, align
);
2735 if (data
->entry_parm
)
2736 set_reg_attrs_for_parm (data
->entry_parm
, stack_parm
);
2738 data
->stack_parm
= stack_parm
;
2741 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2742 always valid and contiguous. */
2745 assign_parm_adjust_entry_rtl (struct assign_parm_data_one
*data
)
2747 rtx entry_parm
= data
->entry_parm
;
2748 rtx stack_parm
= data
->stack_parm
;
2750 /* If this parm was passed part in regs and part in memory, pretend it
2751 arrived entirely in memory by pushing the register-part onto the stack.
2752 In the special case of a DImode or DFmode that is split, we could put
2753 it together in a pseudoreg directly, but for now that's not worth
2755 if (data
->partial
!= 0)
2757 /* Handle calls that pass values in multiple non-contiguous
2758 locations. The Irix 6 ABI has examples of this. */
2759 if (GET_CODE (entry_parm
) == PARALLEL
)
2760 emit_group_store (validize_mem (copy_rtx (stack_parm
)), entry_parm
,
2761 data
->arg
.type
, int_size_in_bytes (data
->arg
.type
));
2764 gcc_assert (data
->partial
% UNITS_PER_WORD
== 0);
2765 move_block_from_reg (REGNO (entry_parm
),
2766 validize_mem (copy_rtx (stack_parm
)),
2767 data
->partial
/ UNITS_PER_WORD
);
2770 entry_parm
= stack_parm
;
2773 /* If we didn't decide this parm came in a register, by default it came
2775 else if (entry_parm
== NULL
)
2776 entry_parm
= stack_parm
;
2778 /* When an argument is passed in multiple locations, we can't make use
2779 of this information, but we can save some copying if the whole argument
2780 is passed in a single register. */
2781 else if (GET_CODE (entry_parm
) == PARALLEL
2782 && data
->nominal_mode
!= BLKmode
2783 && data
->passed_mode
!= BLKmode
)
2785 size_t i
, len
= XVECLEN (entry_parm
, 0);
2787 for (i
= 0; i
< len
; i
++)
2788 if (XEXP (XVECEXP (entry_parm
, 0, i
), 0) != NULL_RTX
2789 && REG_P (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2790 && (GET_MODE (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2791 == data
->passed_mode
)
2792 && INTVAL (XEXP (XVECEXP (entry_parm
, 0, i
), 1)) == 0)
2794 entry_parm
= XEXP (XVECEXP (entry_parm
, 0, i
), 0);
2799 data
->entry_parm
= entry_parm
;
2802 /* A subroutine of assign_parms. Reconstitute any values which were
2803 passed in multiple registers and would fit in a single register. */
2806 assign_parm_remove_parallels (struct assign_parm_data_one
*data
)
2808 rtx entry_parm
= data
->entry_parm
;
2810 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2811 This can be done with register operations rather than on the
2812 stack, even if we will store the reconstituted parameter on the
2814 if (GET_CODE (entry_parm
) == PARALLEL
&& GET_MODE (entry_parm
) != BLKmode
)
2816 rtx parmreg
= gen_reg_rtx (GET_MODE (entry_parm
));
2817 emit_group_store (parmreg
, entry_parm
, data
->arg
.type
,
2818 GET_MODE_SIZE (GET_MODE (entry_parm
)));
2819 entry_parm
= parmreg
;
2822 data
->entry_parm
= entry_parm
;
2825 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2826 always valid and properly aligned. */
2829 assign_parm_adjust_stack_rtl (struct assign_parm_data_one
*data
)
2831 rtx stack_parm
= data
->stack_parm
;
2833 /* If we can't trust the parm stack slot to be aligned enough for its
2834 ultimate type, don't use that slot after entry. We'll make another
2835 stack slot, if we need one. */
2837 && ((GET_MODE_ALIGNMENT (data
->nominal_mode
) > MEM_ALIGN (stack_parm
)
2838 && ((optab_handler (movmisalign_optab
, data
->nominal_mode
)
2839 != CODE_FOR_nothing
)
2840 || targetm
.slow_unaligned_access (data
->nominal_mode
,
2841 MEM_ALIGN (stack_parm
))))
2842 || (data
->nominal_type
2843 && TYPE_ALIGN (data
->nominal_type
) > MEM_ALIGN (stack_parm
)
2844 && MEM_ALIGN (stack_parm
) < PREFERRED_STACK_BOUNDARY
)))
2847 /* If parm was passed in memory, and we need to convert it on entry,
2848 don't store it back in that same slot. */
2849 else if (data
->entry_parm
== stack_parm
2850 && data
->nominal_mode
!= BLKmode
2851 && data
->nominal_mode
!= data
->passed_mode
)
2854 /* If stack protection is in effect for this function, don't leave any
2855 pointers in their passed stack slots. */
2856 else if (crtl
->stack_protect_guard
2857 && (flag_stack_protect
== SPCT_FLAG_ALL
2858 || data
->arg
.pass_by_reference
2859 || POINTER_TYPE_P (data
->nominal_type
)))
2862 data
->stack_parm
= stack_parm
;
2865 /* A subroutine of assign_parms. Return true if the current parameter
2866 should be stored as a BLKmode in the current frame. */
2869 assign_parm_setup_block_p (struct assign_parm_data_one
*data
)
2871 if (data
->nominal_mode
== BLKmode
)
2873 if (GET_MODE (data
->entry_parm
) == BLKmode
)
2876 #ifdef BLOCK_REG_PADDING
2877 /* Only assign_parm_setup_block knows how to deal with register arguments
2878 that are padded at the least significant end. */
2879 if (REG_P (data
->entry_parm
)
2880 && known_lt (GET_MODE_SIZE (data
->arg
.mode
), UNITS_PER_WORD
)
2881 && (BLOCK_REG_PADDING (data
->passed_mode
, data
->arg
.type
, 1)
2882 == (BYTES_BIG_ENDIAN
? PAD_UPWARD
: PAD_DOWNWARD
)))
2889 /* A subroutine of assign_parms. Arrange for the parameter to be
2890 present and valid in DATA->STACK_RTL. */
2893 assign_parm_setup_block (struct assign_parm_data_all
*all
,
2894 tree parm
, struct assign_parm_data_one
*data
)
2896 rtx entry_parm
= data
->entry_parm
;
2897 rtx stack_parm
= data
->stack_parm
;
2898 rtx target_reg
= NULL_RTX
;
2899 bool in_conversion_seq
= false;
2901 HOST_WIDE_INT size_stored
;
2903 if (GET_CODE (entry_parm
) == PARALLEL
)
2904 entry_parm
= emit_group_move_into_temps (entry_parm
);
2906 /* If we want the parameter in a pseudo, don't use a stack slot. */
2907 if (is_gimple_reg (parm
) && use_register_for_decl (parm
))
2909 tree def
= ssa_default_def (cfun
, parm
);
2911 machine_mode mode
= promote_ssa_mode (def
, NULL
);
2912 rtx reg
= gen_reg_rtx (mode
);
2913 if (GET_CODE (reg
) != CONCAT
)
2918 /* Avoid allocating a stack slot, if there isn't one
2919 preallocated by the ABI. It might seem like we should
2920 always prefer a pseudo, but converting between
2921 floating-point and integer modes goes through the stack
2922 on various machines, so it's better to use the reserved
2923 stack slot than to risk wasting it and allocating more
2924 for the conversion. */
2925 if (stack_parm
== NULL_RTX
)
2927 int save
= generating_concat_p
;
2928 generating_concat_p
= 0;
2929 stack_parm
= gen_reg_rtx (mode
);
2930 generating_concat_p
= save
;
2933 data
->stack_parm
= NULL
;
2936 size
= int_size_in_bytes (data
->arg
.type
);
2937 size_stored
= CEIL_ROUND (size
, UNITS_PER_WORD
);
2938 if (stack_parm
== 0)
2940 HOST_WIDE_INT parm_align
2942 ? MAX (DECL_ALIGN (parm
), BITS_PER_WORD
) : DECL_ALIGN (parm
));
2944 SET_DECL_ALIGN (parm
, parm_align
);
2945 if (DECL_ALIGN (parm
) > MAX_SUPPORTED_STACK_ALIGNMENT
)
2947 rtx allocsize
= gen_int_mode (size_stored
, Pmode
);
2948 get_dynamic_stack_size (&allocsize
, 0, DECL_ALIGN (parm
), NULL
);
2949 stack_parm
= assign_stack_local (BLKmode
, UINTVAL (allocsize
),
2950 MAX_SUPPORTED_STACK_ALIGNMENT
);
2951 rtx addr
= align_dynamic_address (XEXP (stack_parm
, 0),
2953 mark_reg_pointer (addr
, DECL_ALIGN (parm
));
2954 stack_parm
= gen_rtx_MEM (GET_MODE (stack_parm
), addr
);
2955 MEM_NOTRAP_P (stack_parm
) = 1;
2958 stack_parm
= assign_stack_local (BLKmode
, size_stored
,
2960 if (known_eq (GET_MODE_SIZE (GET_MODE (entry_parm
)), size
))
2961 PUT_MODE (stack_parm
, GET_MODE (entry_parm
));
2962 set_mem_attributes (stack_parm
, parm
, 1);
2965 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2966 calls that pass values in multiple non-contiguous locations. */
2967 if (REG_P (entry_parm
) || GET_CODE (entry_parm
) == PARALLEL
)
2971 /* Note that we will be storing an integral number of words.
2972 So we have to be careful to ensure that we allocate an
2973 integral number of words. We do this above when we call
2974 assign_stack_local if space was not allocated in the argument
2975 list. If it was, this will not work if PARM_BOUNDARY is not
2976 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2977 if it becomes a problem. Exception is when BLKmode arrives
2978 with arguments not conforming to word_mode. */
2980 if (data
->stack_parm
== 0)
2982 else if (GET_CODE (entry_parm
) == PARALLEL
)
2985 gcc_assert (!size
|| !(PARM_BOUNDARY
% BITS_PER_WORD
));
2987 mem
= validize_mem (copy_rtx (stack_parm
));
2989 /* Handle values in multiple non-contiguous locations. */
2990 if (GET_CODE (entry_parm
) == PARALLEL
&& !MEM_P (mem
))
2991 emit_group_store (mem
, entry_parm
, data
->arg
.type
, size
);
2992 else if (GET_CODE (entry_parm
) == PARALLEL
)
2994 push_to_sequence2 (all
->first_conversion_insn
,
2995 all
->last_conversion_insn
);
2996 emit_group_store (mem
, entry_parm
, data
->arg
.type
, size
);
2997 all
->first_conversion_insn
= get_insns ();
2998 all
->last_conversion_insn
= get_last_insn ();
3000 in_conversion_seq
= true;
3006 /* If SIZE is that of a mode no bigger than a word, just use
3007 that mode's store operation. */
3008 else if (size
<= UNITS_PER_WORD
)
3010 unsigned int bits
= size
* BITS_PER_UNIT
;
3011 machine_mode mode
= int_mode_for_size (bits
, 0).else_blk ();
3014 #ifdef BLOCK_REG_PADDING
3015 && (size
== UNITS_PER_WORD
3016 || (BLOCK_REG_PADDING (mode
, data
->arg
.type
, 1)
3017 != (BYTES_BIG_ENDIAN
? PAD_UPWARD
: PAD_DOWNWARD
)))
3023 /* We are really truncating a word_mode value containing
3024 SIZE bytes into a value of mode MODE. If such an
3025 operation requires no actual instructions, we can refer
3026 to the value directly in mode MODE, otherwise we must
3027 start with the register in word_mode and explicitly
3029 if (mode
== word_mode
3030 || TRULY_NOOP_TRUNCATION_MODES_P (mode
, word_mode
))
3031 reg
= gen_rtx_REG (mode
, REGNO (entry_parm
));
3034 reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
3035 reg
= convert_to_mode (mode
, copy_to_reg (reg
), 1);
3037 emit_move_insn (change_address (mem
, mode
, 0), reg
);
3040 #ifdef BLOCK_REG_PADDING
3041 /* Storing the register in memory as a full word, as
3042 move_block_from_reg below would do, and then using the
3043 MEM in a smaller mode, has the effect of shifting right
3044 if BYTES_BIG_ENDIAN. If we're bypassing memory, the
3045 shifting must be explicit. */
3046 else if (!MEM_P (mem
))
3050 /* If the assert below fails, we should have taken the
3051 mode != BLKmode path above, unless we have downward
3052 padding of smaller-than-word arguments on a machine
3053 with little-endian bytes, which would likely require
3054 additional changes to work correctly. */
3055 gcc_checking_assert (BYTES_BIG_ENDIAN
3056 && (BLOCK_REG_PADDING (mode
,
3060 int by
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
3062 x
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
3063 x
= expand_shift (RSHIFT_EXPR
, word_mode
, x
, by
,
3065 x
= force_reg (word_mode
, x
);
3066 x
= gen_lowpart_SUBREG (GET_MODE (mem
), x
);
3068 emit_move_insn (mem
, x
);
3072 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
3073 machine must be aligned to the left before storing
3074 to memory. Note that the previous test doesn't
3075 handle all cases (e.g. SIZE == 3). */
3076 else if (size
!= UNITS_PER_WORD
3077 #ifdef BLOCK_REG_PADDING
3078 && (BLOCK_REG_PADDING (mode
, data
->arg
.type
, 1)
3086 int by
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
3087 rtx reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
3089 x
= expand_shift (LSHIFT_EXPR
, word_mode
, reg
, by
, NULL_RTX
, 1);
3090 tem
= change_address (mem
, word_mode
, 0);
3091 emit_move_insn (tem
, x
);
3094 move_block_from_reg (REGNO (entry_parm
), mem
,
3095 size_stored
/ UNITS_PER_WORD
);
3097 else if (!MEM_P (mem
))
3099 gcc_checking_assert (size
> UNITS_PER_WORD
);
3100 #ifdef BLOCK_REG_PADDING
3101 gcc_checking_assert (BLOCK_REG_PADDING (GET_MODE (mem
),
3105 emit_move_insn (mem
, entry_parm
);
3108 move_block_from_reg (REGNO (entry_parm
), mem
,
3109 size_stored
/ UNITS_PER_WORD
);
3111 else if (data
->stack_parm
== 0 && !TYPE_EMPTY_P (data
->arg
.type
))
3113 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3114 emit_block_move (stack_parm
, data
->entry_parm
, GEN_INT (size
),
3116 all
->first_conversion_insn
= get_insns ();
3117 all
->last_conversion_insn
= get_last_insn ();
3119 in_conversion_seq
= true;
3124 if (!in_conversion_seq
)
3125 emit_move_insn (target_reg
, stack_parm
);
3128 push_to_sequence2 (all
->first_conversion_insn
,
3129 all
->last_conversion_insn
);
3130 emit_move_insn (target_reg
, stack_parm
);
3131 all
->first_conversion_insn
= get_insns ();
3132 all
->last_conversion_insn
= get_last_insn ();
3135 stack_parm
= target_reg
;
3138 data
->stack_parm
= stack_parm
;
3139 set_parm_rtl (parm
, stack_parm
);
3142 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
3143 parameter. Get it there. Perform all ABI specified conversions. */
3146 assign_parm_setup_reg (struct assign_parm_data_all
*all
, tree parm
,
3147 struct assign_parm_data_one
*data
)
3149 rtx parmreg
, validated_mem
;
3150 rtx equiv_stack_parm
;
3151 machine_mode promoted_nominal_mode
;
3152 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (parm
));
3153 bool did_conversion
= false;
3154 bool need_conversion
, moved
;
3155 enum insn_code icode
;
3158 /* Store the parm in a pseudoregister during the function, but we may
3159 need to do it in a wider mode. Using 2 here makes the result
3160 consistent with promote_decl_mode and thus expand_expr_real_1. */
3161 promoted_nominal_mode
3162 = promote_function_mode (data
->nominal_type
, data
->nominal_mode
, &unsignedp
,
3163 TREE_TYPE (current_function_decl
), 2);
3165 parmreg
= gen_reg_rtx (promoted_nominal_mode
);
3166 if (!DECL_ARTIFICIAL (parm
))
3167 mark_user_reg (parmreg
);
3169 /* If this was an item that we received a pointer to,
3170 set rtl appropriately. */
3171 if (data
->arg
.pass_by_reference
)
3173 rtl
= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
->arg
.type
)), parmreg
);
3174 set_mem_attributes (rtl
, parm
, 1);
3179 assign_parm_remove_parallels (data
);
3181 /* Copy the value into the register, thus bridging between
3182 assign_parm_find_data_types and expand_expr_real_1. */
3184 equiv_stack_parm
= data
->stack_parm
;
3185 validated_mem
= validize_mem (copy_rtx (data
->entry_parm
));
3187 need_conversion
= (data
->nominal_mode
!= data
->passed_mode
3188 || promoted_nominal_mode
!= data
->arg
.mode
);
3192 && GET_MODE_CLASS (data
->nominal_mode
) == MODE_INT
3193 && data
->nominal_mode
== data
->passed_mode
3194 && data
->nominal_mode
== GET_MODE (data
->entry_parm
))
3196 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3197 mode, by the caller. We now have to convert it to
3198 NOMINAL_MODE, if different. However, PARMREG may be in
3199 a different mode than NOMINAL_MODE if it is being stored
3202 If ENTRY_PARM is a hard register, it might be in a register
3203 not valid for operating in its mode (e.g., an odd-numbered
3204 register for a DFmode). In that case, moves are the only
3205 thing valid, so we can't do a convert from there. This
3206 occurs when the calling sequence allow such misaligned
3209 In addition, the conversion may involve a call, which could
3210 clobber parameters which haven't been copied to pseudo
3213 First, we try to emit an insn which performs the necessary
3214 conversion. We verify that this insn does not clobber any
3219 icode
= can_extend_p (promoted_nominal_mode
, data
->passed_mode
,
3223 op1
= validated_mem
;
3224 if (icode
!= CODE_FOR_nothing
3225 && insn_operand_matches (icode
, 0, op0
)
3226 && insn_operand_matches (icode
, 1, op1
))
3228 enum rtx_code code
= unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
;
3229 rtx_insn
*insn
, *insns
;
3231 HARD_REG_SET hardregs
;
3234 /* If op1 is a hard register that is likely spilled, first
3235 force it into a pseudo, otherwise combiner might extend
3236 its lifetime too much. */
3237 if (GET_CODE (t
) == SUBREG
)
3240 && HARD_REGISTER_P (t
)
3241 && ! TEST_HARD_REG_BIT (fixed_reg_set
, REGNO (t
))
3242 && targetm
.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t
))))
3244 t
= gen_reg_rtx (GET_MODE (op1
));
3245 emit_move_insn (t
, op1
);
3249 rtx_insn
*pat
= gen_extend_insn (op0
, t
, promoted_nominal_mode
,
3250 data
->passed_mode
, unsignedp
);
3252 insns
= get_insns ();
3255 CLEAR_HARD_REG_SET (hardregs
);
3256 for (insn
= insns
; insn
&& moved
; insn
= NEXT_INSN (insn
))
3259 note_stores (insn
, record_hard_reg_sets
, &hardregs
);
3260 if (!hard_reg_set_empty_p (hardregs
))
3269 if (equiv_stack_parm
!= NULL_RTX
)
3270 equiv_stack_parm
= gen_rtx_fmt_e (code
, GET_MODE (parmreg
),
3277 /* Nothing to do. */
3279 else if (need_conversion
)
3281 /* We did not have an insn to convert directly, or the sequence
3282 generated appeared unsafe. We must first copy the parm to a
3283 pseudo reg, and save the conversion until after all
3284 parameters have been moved. */
3287 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
3289 emit_move_insn (tempreg
, validated_mem
);
3291 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3292 tempreg
= convert_to_mode (data
->nominal_mode
, tempreg
, unsignedp
);
3294 if (partial_subreg_p (tempreg
)
3295 && GET_MODE (tempreg
) == data
->nominal_mode
3296 && REG_P (SUBREG_REG (tempreg
))
3297 && data
->nominal_mode
== data
->passed_mode
3298 && GET_MODE (SUBREG_REG (tempreg
)) == GET_MODE (data
->entry_parm
))
3300 /* The argument is already sign/zero extended, so note it
3302 SUBREG_PROMOTED_VAR_P (tempreg
) = 1;
3303 SUBREG_PROMOTED_SET (tempreg
, unsignedp
);
3306 /* TREE_USED gets set erroneously during expand_assignment. */
3307 save_tree_used
= TREE_USED (parm
);
3308 SET_DECL_RTL (parm
, rtl
);
3309 expand_assignment (parm
, make_tree (data
->nominal_type
, tempreg
), false);
3310 SET_DECL_RTL (parm
, NULL_RTX
);
3311 TREE_USED (parm
) = save_tree_used
;
3312 all
->first_conversion_insn
= get_insns ();
3313 all
->last_conversion_insn
= get_last_insn ();
3316 did_conversion
= true;
3318 else if (MEM_P (data
->entry_parm
)
3319 && GET_MODE_ALIGNMENT (promoted_nominal_mode
)
3320 > MEM_ALIGN (data
->entry_parm
)
3321 && (((icode
= optab_handler (movmisalign_optab
,
3322 promoted_nominal_mode
))
3323 != CODE_FOR_nothing
)
3324 || targetm
.slow_unaligned_access (promoted_nominal_mode
,
3325 MEM_ALIGN (data
->entry_parm
))))
3327 if (icode
!= CODE_FOR_nothing
)
3328 emit_insn (GEN_FCN (icode
) (parmreg
, validated_mem
));
3330 rtl
= parmreg
= extract_bit_field (validated_mem
,
3331 GET_MODE_BITSIZE (promoted_nominal_mode
), 0,
3333 promoted_nominal_mode
, VOIDmode
, false, NULL
);
3336 emit_move_insn (parmreg
, validated_mem
);
3338 /* If we were passed a pointer but the actual value can live in a register,
3339 retrieve it and use it directly. Note that we cannot use nominal_mode,
3340 because it will have been set to Pmode above, we must use the actual mode
3341 of the parameter instead. */
3342 if (data
->arg
.pass_by_reference
&& TYPE_MODE (TREE_TYPE (parm
)) != BLKmode
)
3344 /* Use a stack slot for debugging purposes if possible. */
3345 if (use_register_for_decl (parm
))
3347 parmreg
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm
)));
3348 mark_user_reg (parmreg
);
3352 int align
= STACK_SLOT_ALIGNMENT (TREE_TYPE (parm
),
3353 TYPE_MODE (TREE_TYPE (parm
)),
3354 TYPE_ALIGN (TREE_TYPE (parm
)));
3356 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm
)),
3357 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm
))),
3359 set_mem_attributes (parmreg
, parm
, 1);
3362 /* We need to preserve an address based on VIRTUAL_STACK_VARS_REGNUM for
3363 the debug info in case it is not legitimate. */
3364 if (GET_MODE (parmreg
) != GET_MODE (rtl
))
3366 rtx tempreg
= gen_reg_rtx (GET_MODE (rtl
));
3367 int unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (parm
));
3369 push_to_sequence2 (all
->first_conversion_insn
,
3370 all
->last_conversion_insn
);
3371 emit_move_insn (tempreg
, rtl
);
3372 tempreg
= convert_to_mode (GET_MODE (parmreg
), tempreg
, unsigned_p
);
3373 emit_move_insn (MEM_P (parmreg
) ? copy_rtx (parmreg
) : parmreg
,
3375 all
->first_conversion_insn
= get_insns ();
3376 all
->last_conversion_insn
= get_last_insn ();
3379 did_conversion
= true;
3382 emit_move_insn (MEM_P (parmreg
) ? copy_rtx (parmreg
) : parmreg
, rtl
);
3386 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3388 data
->stack_parm
= NULL
;
3391 set_parm_rtl (parm
, rtl
);
3393 /* Mark the register as eliminable if we did no conversion and it was
3394 copied from memory at a fixed offset, and the arg pointer was not
3395 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3396 offset formed an invalid address, such memory-equivalences as we
3397 make here would screw up life analysis for it. */
3398 if (data
->nominal_mode
== data
->passed_mode
3400 && data
->stack_parm
!= 0
3401 && MEM_P (data
->stack_parm
)
3402 && data
->locate
.offset
.var
== 0
3403 && reg_mentioned_p (virtual_incoming_args_rtx
,
3404 XEXP (data
->stack_parm
, 0)))
3406 rtx_insn
*linsn
= get_last_insn ();
3410 /* Mark complex types separately. */
3411 if (GET_CODE (parmreg
) == CONCAT
)
3413 scalar_mode submode
= GET_MODE_INNER (GET_MODE (parmreg
));
3414 int regnor
= REGNO (XEXP (parmreg
, 0));
3415 int regnoi
= REGNO (XEXP (parmreg
, 1));
3416 rtx stackr
= adjust_address_nv (data
->stack_parm
, submode
, 0);
3417 rtx stacki
= adjust_address_nv (data
->stack_parm
, submode
,
3418 GET_MODE_SIZE (submode
));
3420 /* Scan backwards for the set of the real and
3422 for (sinsn
= linsn
; sinsn
!= 0;
3423 sinsn
= prev_nonnote_insn (sinsn
))
3425 set
= single_set (sinsn
);
3429 if (SET_DEST (set
) == regno_reg_rtx
[regnoi
])
3430 set_unique_reg_note (sinsn
, REG_EQUIV
, stacki
);
3431 else if (SET_DEST (set
) == regno_reg_rtx
[regnor
])
3432 set_unique_reg_note (sinsn
, REG_EQUIV
, stackr
);
3436 set_dst_reg_note (linsn
, REG_EQUIV
, equiv_stack_parm
, parmreg
);
3439 /* For pointer data type, suggest pointer register. */
3440 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
3441 mark_reg_pointer (parmreg
,
3442 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
3445 /* A subroutine of assign_parms. Allocate stack space to hold the current
3446 parameter. Get it there. Perform all ABI specified conversions. */
3449 assign_parm_setup_stack (struct assign_parm_data_all
*all
, tree parm
,
3450 struct assign_parm_data_one
*data
)
3452 /* Value must be stored in the stack slot STACK_PARM during function
3454 bool to_conversion
= false;
3456 assign_parm_remove_parallels (data
);
3458 if (data
->arg
.mode
!= data
->nominal_mode
)
3460 /* Conversion is required. */
3461 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
3463 emit_move_insn (tempreg
, validize_mem (copy_rtx (data
->entry_parm
)));
3465 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3466 to_conversion
= true;
3468 data
->entry_parm
= convert_to_mode (data
->nominal_mode
, tempreg
,
3469 TYPE_UNSIGNED (TREE_TYPE (parm
)));
3471 if (data
->stack_parm
)
3474 = subreg_lowpart_offset (data
->nominal_mode
,
3475 GET_MODE (data
->stack_parm
));
3476 /* ??? This may need a big-endian conversion on sparc64. */
3478 = adjust_address (data
->stack_parm
, data
->nominal_mode
, 0);
3479 if (maybe_ne (offset
, 0) && MEM_OFFSET_KNOWN_P (data
->stack_parm
))
3480 set_mem_offset (data
->stack_parm
,
3481 MEM_OFFSET (data
->stack_parm
) + offset
);
3485 if (data
->entry_parm
!= data
->stack_parm
)
3489 if (data
->stack_parm
== 0)
3491 int align
= STACK_SLOT_ALIGNMENT (data
->arg
.type
,
3492 GET_MODE (data
->entry_parm
),
3493 TYPE_ALIGN (data
->arg
.type
));
3494 if (align
< (int)GET_MODE_ALIGNMENT (GET_MODE (data
->entry_parm
))
3495 && ((optab_handler (movmisalign_optab
,
3496 GET_MODE (data
->entry_parm
))
3497 != CODE_FOR_nothing
)
3498 || targetm
.slow_unaligned_access (GET_MODE (data
->entry_parm
),
3500 align
= GET_MODE_ALIGNMENT (GET_MODE (data
->entry_parm
));
3502 = assign_stack_local (GET_MODE (data
->entry_parm
),
3503 GET_MODE_SIZE (GET_MODE (data
->entry_parm
)),
3505 align
= MEM_ALIGN (data
->stack_parm
);
3506 set_mem_attributes (data
->stack_parm
, parm
, 1);
3507 set_mem_align (data
->stack_parm
, align
);
3510 dest
= validize_mem (copy_rtx (data
->stack_parm
));
3511 src
= validize_mem (copy_rtx (data
->entry_parm
));
3513 if (TYPE_EMPTY_P (data
->arg
.type
))
3514 /* Empty types don't really need to be copied. */;
3515 else if (MEM_P (src
))
3517 /* Use a block move to handle potentially misaligned entry_parm. */
3519 push_to_sequence2 (all
->first_conversion_insn
,
3520 all
->last_conversion_insn
);
3521 to_conversion
= true;
3523 emit_block_move (dest
, src
,
3524 GEN_INT (int_size_in_bytes (data
->arg
.type
)),
3530 src
= force_reg (GET_MODE (src
), src
);
3531 emit_move_insn (dest
, src
);
3537 all
->first_conversion_insn
= get_insns ();
3538 all
->last_conversion_insn
= get_last_insn ();
3542 set_parm_rtl (parm
, data
->stack_parm
);
3545 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3546 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3549 assign_parms_unsplit_complex (struct assign_parm_data_all
*all
,
3553 tree orig_fnargs
= all
->orig_fnargs
;
3556 for (parm
= orig_fnargs
; parm
; parm
= TREE_CHAIN (parm
), ++i
)
3558 if (TREE_CODE (TREE_TYPE (parm
)) == COMPLEX_TYPE
3559 && targetm
.calls
.split_complex_arg (TREE_TYPE (parm
)))
3561 rtx tmp
, real
, imag
;
3562 scalar_mode inner
= GET_MODE_INNER (DECL_MODE (parm
));
3564 real
= DECL_RTL (fnargs
[i
]);
3565 imag
= DECL_RTL (fnargs
[i
+ 1]);
3566 if (inner
!= GET_MODE (real
))
3568 real
= gen_lowpart_SUBREG (inner
, real
);
3569 imag
= gen_lowpart_SUBREG (inner
, imag
);
3572 if (TREE_ADDRESSABLE (parm
))
3575 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (parm
));
3576 int align
= STACK_SLOT_ALIGNMENT (TREE_TYPE (parm
),
3578 TYPE_ALIGN (TREE_TYPE (parm
)));
3580 /* split_complex_arg put the real and imag parts in
3581 pseudos. Move them to memory. */
3582 tmp
= assign_stack_local (DECL_MODE (parm
), size
, align
);
3583 set_mem_attributes (tmp
, parm
, 1);
3584 rmem
= adjust_address_nv (tmp
, inner
, 0);
3585 imem
= adjust_address_nv (tmp
, inner
, GET_MODE_SIZE (inner
));
3586 push_to_sequence2 (all
->first_conversion_insn
,
3587 all
->last_conversion_insn
);
3588 emit_move_insn (rmem
, real
);
3589 emit_move_insn (imem
, imag
);
3590 all
->first_conversion_insn
= get_insns ();
3591 all
->last_conversion_insn
= get_last_insn ();
3595 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
3596 set_parm_rtl (parm
, tmp
);
3598 real
= DECL_INCOMING_RTL (fnargs
[i
]);
3599 imag
= DECL_INCOMING_RTL (fnargs
[i
+ 1]);
3600 if (inner
!= GET_MODE (real
))
3602 real
= gen_lowpart_SUBREG (inner
, real
);
3603 imag
= gen_lowpart_SUBREG (inner
, imag
);
3605 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
3606 set_decl_incoming_rtl (parm
, tmp
, false);
3612 /* Assign RTL expressions to the function's parameters. This may involve
3613 copying them into registers and using those registers as the DECL_RTL. */
3616 assign_parms (tree fndecl
)
3618 struct assign_parm_data_all all
;
3623 crtl
->args
.internal_arg_pointer
3624 = targetm
.calls
.internal_arg_pointer ();
3626 assign_parms_initialize_all (&all
);
3627 fnargs
= assign_parms_augmented_arg_list (&all
);
3629 FOR_EACH_VEC_ELT (fnargs
, i
, parm
)
3631 struct assign_parm_data_one data
;
3633 /* Extract the type of PARM; adjust it according to ABI. */
3634 assign_parm_find_data_types (&all
, parm
, &data
);
3636 /* Early out for errors and void parameters. */
3637 if (data
.passed_mode
== VOIDmode
)
3639 SET_DECL_RTL (parm
, const0_rtx
);
3640 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
);
3644 /* Estimate stack alignment from parameter alignment. */
3645 if (SUPPORTS_STACK_ALIGNMENT
)
3648 = targetm
.calls
.function_arg_boundary (data
.arg
.mode
,
3650 align
= MINIMUM_ALIGNMENT (data
.arg
.type
, data
.arg
.mode
, align
);
3651 if (TYPE_ALIGN (data
.nominal_type
) > align
)
3652 align
= MINIMUM_ALIGNMENT (data
.nominal_type
,
3653 TYPE_MODE (data
.nominal_type
),
3654 TYPE_ALIGN (data
.nominal_type
));
3655 if (crtl
->stack_alignment_estimated
< align
)
3657 gcc_assert (!crtl
->stack_realign_processed
);
3658 crtl
->stack_alignment_estimated
= align
;
3662 /* Find out where the parameter arrives in this function. */
3663 assign_parm_find_entry_rtl (&all
, &data
);
3665 /* Find out where stack space for this parameter might be. */
3666 if (assign_parm_is_stack_parm (&all
, &data
))
3668 assign_parm_find_stack_rtl (parm
, &data
);
3669 assign_parm_adjust_entry_rtl (&data
);
3670 /* For arguments that occupy no space in the parameter
3671 passing area, have non-zero size and have address taken,
3672 force creation of a stack slot so that they have distinct
3673 address from other parameters. */
3674 if (TYPE_EMPTY_P (data
.arg
.type
)
3675 && TREE_ADDRESSABLE (parm
)
3676 && data
.entry_parm
== data
.stack_parm
3677 && MEM_P (data
.entry_parm
)
3678 && int_size_in_bytes (data
.arg
.type
))
3679 data
.stack_parm
= NULL_RTX
;
3681 /* Record permanently how this parm was passed. */
3682 if (data
.arg
.pass_by_reference
)
3685 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
.arg
.type
)),
3687 set_decl_incoming_rtl (parm
, incoming_rtl
, true);
3690 set_decl_incoming_rtl (parm
, data
.entry_parm
, false);
3692 assign_parm_adjust_stack_rtl (&data
);
3694 if (assign_parm_setup_block_p (&data
))
3695 assign_parm_setup_block (&all
, parm
, &data
);
3696 else if (data
.arg
.pass_by_reference
|| use_register_for_decl (parm
))
3697 assign_parm_setup_reg (&all
, parm
, &data
);
3699 assign_parm_setup_stack (&all
, parm
, &data
);
3701 if (cfun
->stdarg
&& !DECL_CHAIN (parm
))
3702 assign_parms_setup_varargs (&all
, &data
, false);
3704 /* Update info on where next arg arrives in registers. */
3705 targetm
.calls
.function_arg_advance (all
.args_so_far
, data
.arg
);
3708 if (targetm
.calls
.split_complex_arg
)
3709 assign_parms_unsplit_complex (&all
, fnargs
);
3713 /* Output all parameter conversion instructions (possibly including calls)
3714 now that all parameters have been copied out of hard registers. */
3715 emit_insn (all
.first_conversion_insn
);
3717 /* Estimate reload stack alignment from scalar return mode. */
3718 if (SUPPORTS_STACK_ALIGNMENT
)
3720 if (DECL_RESULT (fndecl
))
3722 tree type
= TREE_TYPE (DECL_RESULT (fndecl
));
3723 machine_mode mode
= TYPE_MODE (type
);
3727 && !AGGREGATE_TYPE_P (type
))
3729 unsigned int align
= GET_MODE_ALIGNMENT (mode
);
3730 if (crtl
->stack_alignment_estimated
< align
)
3732 gcc_assert (!crtl
->stack_realign_processed
);
3733 crtl
->stack_alignment_estimated
= align
;
3739 /* If we are receiving a struct value address as the first argument, set up
3740 the RTL for the function result. As this might require code to convert
3741 the transmitted address to Pmode, we do this here to ensure that possible
3742 preliminary conversions of the address have been emitted already. */
3743 if (all
.function_result_decl
)
3745 tree result
= DECL_RESULT (current_function_decl
);
3746 rtx addr
= DECL_RTL (all
.function_result_decl
);
3749 if (DECL_BY_REFERENCE (result
))
3751 SET_DECL_VALUE_EXPR (result
, all
.function_result_decl
);
3756 SET_DECL_VALUE_EXPR (result
,
3757 build1 (INDIRECT_REF
, TREE_TYPE (result
),
3758 all
.function_result_decl
));
3759 addr
= convert_memory_address (Pmode
, addr
);
3760 x
= gen_rtx_MEM (DECL_MODE (result
), addr
);
3761 set_mem_attributes (x
, result
, 1);
3764 DECL_HAS_VALUE_EXPR_P (result
) = 1;
3766 set_parm_rtl (result
, x
);
3769 /* We have aligned all the args, so add space for the pretend args. */
3770 crtl
->args
.pretend_args_size
= all
.pretend_args_size
;
3771 all
.stack_args_size
.constant
+= all
.extra_pretend_bytes
;
3772 crtl
->args
.size
= all
.stack_args_size
.constant
;
3774 /* Adjust function incoming argument size for alignment and
3777 crtl
->args
.size
= upper_bound (crtl
->args
.size
, all
.reg_parm_stack_space
);
3778 crtl
->args
.size
= aligned_upper_bound (crtl
->args
.size
,
3779 PARM_BOUNDARY
/ BITS_PER_UNIT
);
3781 if (ARGS_GROW_DOWNWARD
)
3783 crtl
->args
.arg_offset_rtx
3784 = (all
.stack_args_size
.var
== 0
3785 ? gen_int_mode (-all
.stack_args_size
.constant
, Pmode
)
3786 : expand_expr (size_diffop (all
.stack_args_size
.var
,
3787 size_int (-all
.stack_args_size
.constant
)),
3788 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
));
3791 crtl
->args
.arg_offset_rtx
= ARGS_SIZE_RTX (all
.stack_args_size
);
3793 /* See how many bytes, if any, of its args a function should try to pop
3796 crtl
->args
.pops_args
= targetm
.calls
.return_pops_args (fndecl
,
3800 /* For stdarg.h function, save info about
3801 regs and stack space used by the named args. */
3803 crtl
->args
.info
= all
.args_so_far_v
;
3805 /* Set the rtx used for the function return value. Put this in its
3806 own variable so any optimizers that need this information don't have
3807 to include tree.h. Do this here so it gets done when an inlined
3808 function gets output. */
3811 = (DECL_RTL_SET_P (DECL_RESULT (fndecl
))
3812 ? DECL_RTL (DECL_RESULT (fndecl
)) : NULL_RTX
);
3814 /* If scalar return value was computed in a pseudo-reg, or was a named
3815 return value that got dumped to the stack, copy that to the hard
3817 if (DECL_RTL_SET_P (DECL_RESULT (fndecl
)))
3819 tree decl_result
= DECL_RESULT (fndecl
);
3820 rtx decl_rtl
= DECL_RTL (decl_result
);
3822 if (REG_P (decl_rtl
)
3823 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
3824 : DECL_REGISTER (decl_result
))
3828 real_decl_rtl
= targetm
.calls
.function_value (TREE_TYPE (decl_result
),
3830 REG_FUNCTION_VALUE_P (real_decl_rtl
) = 1;
3831 /* The delay slot scheduler assumes that crtl->return_rtx
3832 holds the hard register containing the return value, not a
3833 temporary pseudo. */
3834 crtl
->return_rtx
= real_decl_rtl
;
3839 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3840 For all seen types, gimplify their sizes. */
3843 gimplify_parm_type (tree
*tp
, int *walk_subtrees
, void *data
)
3850 if (POINTER_TYPE_P (t
))
3852 else if (TYPE_SIZE (t
) && !TREE_CONSTANT (TYPE_SIZE (t
))
3853 && !TYPE_SIZES_GIMPLIFIED (t
))
3855 gimplify_type_sizes (t
, (gimple_seq
*) data
);
3863 /* Gimplify the parameter list for current_function_decl. This involves
3864 evaluating SAVE_EXPRs of variable sized parameters and generating code
3865 to implement callee-copies reference parameters. Returns a sequence of
3866 statements to add to the beginning of the function. */
3869 gimplify_parameters (gimple_seq
*cleanup
)
3871 struct assign_parm_data_all all
;
3873 gimple_seq stmts
= NULL
;
3877 assign_parms_initialize_all (&all
);
3878 fnargs
= assign_parms_augmented_arg_list (&all
);
3880 FOR_EACH_VEC_ELT (fnargs
, i
, parm
)
3882 struct assign_parm_data_one data
;
3884 /* Extract the type of PARM; adjust it according to ABI. */
3885 assign_parm_find_data_types (&all
, parm
, &data
);
3887 /* Early out for errors and void parameters. */
3888 if (data
.passed_mode
== VOIDmode
|| DECL_SIZE (parm
) == NULL
)
3891 /* Update info on where next arg arrives in registers. */
3892 targetm
.calls
.function_arg_advance (all
.args_so_far
, data
.arg
);
3894 /* ??? Once upon a time variable_size stuffed parameter list
3895 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3896 turned out to be less than manageable in the gimple world.
3897 Now we have to hunt them down ourselves. */
3898 walk_tree_without_duplicates (&data
.arg
.type
,
3899 gimplify_parm_type
, &stmts
);
3901 if (TREE_CODE (DECL_SIZE_UNIT (parm
)) != INTEGER_CST
)
3903 gimplify_one_sizepos (&DECL_SIZE (parm
), &stmts
);
3904 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm
), &stmts
);
3907 if (data
.arg
.pass_by_reference
)
3909 tree type
= TREE_TYPE (data
.arg
.type
);
3910 function_arg_info
orig_arg (type
, data
.arg
.named
);
3911 if (reference_callee_copied (&all
.args_so_far_v
, orig_arg
))
3915 /* For constant-sized objects, this is trivial; for
3916 variable-sized objects, we have to play games. */
3917 if (TREE_CODE (DECL_SIZE_UNIT (parm
)) == INTEGER_CST
3918 && !(flag_stack_check
== GENERIC_STACK_CHECK
3919 && compare_tree_int (DECL_SIZE_UNIT (parm
),
3920 STACK_CHECK_MAX_VAR_SIZE
) > 0))
3922 local
= create_tmp_var (type
, get_name (parm
));
3923 DECL_IGNORED_P (local
) = 0;
3924 /* If PARM was addressable, move that flag over
3925 to the local copy, as its address will be taken,
3926 not the PARMs. Keep the parms address taken
3927 as we'll query that flag during gimplification. */
3928 if (TREE_ADDRESSABLE (parm
))
3929 TREE_ADDRESSABLE (local
) = 1;
3930 if (DECL_NOT_GIMPLE_REG_P (parm
))
3931 DECL_NOT_GIMPLE_REG_P (local
) = 1;
3933 if (!is_gimple_reg (local
)
3934 && flag_stack_reuse
!= SR_NONE
)
3936 tree clobber
= build_clobber (type
);
3937 gimple
*clobber_stmt
;
3938 clobber_stmt
= gimple_build_assign (local
, clobber
);
3939 gimple_seq_add_stmt (cleanup
, clobber_stmt
);
3944 tree ptr_type
, addr
;
3946 ptr_type
= build_pointer_type (type
);
3947 addr
= create_tmp_reg (ptr_type
, get_name (parm
));
3948 DECL_IGNORED_P (addr
) = 0;
3949 local
= build_fold_indirect_ref (addr
);
3951 t
= build_alloca_call_expr (DECL_SIZE_UNIT (parm
),
3953 max_int_size_in_bytes (type
));
3954 /* The call has been built for a variable-sized object. */
3955 CALL_ALLOCA_FOR_VAR_P (t
) = 1;
3956 t
= fold_convert (ptr_type
, t
);
3957 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
3958 gimplify_and_add (t
, &stmts
);
3961 gimplify_assign (local
, parm
, &stmts
);
3963 SET_DECL_VALUE_EXPR (parm
, local
);
3964 DECL_HAS_VALUE_EXPR_P (parm
) = 1;
3974 /* Compute the size and offset from the start of the stacked arguments for a
3975 parm passed in mode PASSED_MODE and with type TYPE.
3977 INITIAL_OFFSET_PTR points to the current offset into the stacked
3980 The starting offset and size for this parm are returned in
3981 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3982 nonzero, the offset is that of stack slot, which is returned in
3983 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3984 padding required from the initial offset ptr to the stack slot.
3986 IN_REGS is nonzero if the argument will be passed in registers. It will
3987 never be set if REG_PARM_STACK_SPACE is not defined.
3989 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
3990 for arguments which are passed in registers.
3992 FNDECL is the function in which the argument was defined.
3994 There are two types of rounding that are done. The first, controlled by
3995 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3996 argument list to be aligned to the specific boundary (in bits). This
3997 rounding affects the initial and starting offsets, but not the argument
4000 The second, controlled by TARGET_FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4001 optionally rounds the size of the parm to PARM_BOUNDARY. The
4002 initial offset is not affected by this rounding, while the size always
4003 is and the starting offset may be. */
4005 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
4006 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
4007 callers pass in the total size of args so far as
4008 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
4011 locate_and_pad_parm (machine_mode passed_mode
, tree type
, int in_regs
,
4012 int reg_parm_stack_space
, int partial
,
4013 tree fndecl ATTRIBUTE_UNUSED
,
4014 struct args_size
*initial_offset_ptr
,
4015 struct locate_and_pad_arg_data
*locate
)
4018 pad_direction where_pad
;
4019 unsigned int boundary
, round_boundary
;
4020 int part_size_in_regs
;
4022 /* If we have found a stack parm before we reach the end of the
4023 area reserved for registers, skip that area. */
4026 if (reg_parm_stack_space
> 0)
4028 if (initial_offset_ptr
->var
4029 || !ordered_p (initial_offset_ptr
->constant
,
4030 reg_parm_stack_space
))
4032 initial_offset_ptr
->var
4033 = size_binop (MAX_EXPR
, ARGS_SIZE_TREE (*initial_offset_ptr
),
4034 ssize_int (reg_parm_stack_space
));
4035 initial_offset_ptr
->constant
= 0;
4038 initial_offset_ptr
->constant
4039 = ordered_max (initial_offset_ptr
->constant
,
4040 reg_parm_stack_space
);
4044 part_size_in_regs
= (reg_parm_stack_space
== 0 ? partial
: 0);
4047 ? arg_size_in_bytes (type
)
4048 : size_int (GET_MODE_SIZE (passed_mode
)));
4049 where_pad
= targetm
.calls
.function_arg_padding (passed_mode
, type
);
4050 boundary
= targetm
.calls
.function_arg_boundary (passed_mode
, type
);
4051 round_boundary
= targetm
.calls
.function_arg_round_boundary (passed_mode
,
4053 locate
->where_pad
= where_pad
;
4055 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
4056 if (boundary
> MAX_SUPPORTED_STACK_ALIGNMENT
)
4057 boundary
= MAX_SUPPORTED_STACK_ALIGNMENT
;
4059 locate
->boundary
= boundary
;
4061 if (SUPPORTS_STACK_ALIGNMENT
)
4063 /* stack_alignment_estimated can't change after stack has been
4065 if (crtl
->stack_alignment_estimated
< boundary
)
4067 if (!crtl
->stack_realign_processed
)
4068 crtl
->stack_alignment_estimated
= boundary
;
4071 /* If stack is realigned and stack alignment value
4072 hasn't been finalized, it is OK not to increase
4073 stack_alignment_estimated. The bigger alignment
4074 requirement is recorded in stack_alignment_needed
4076 gcc_assert (!crtl
->stack_realign_finalized
4077 && crtl
->stack_realign_needed
);
4082 if (ARGS_GROW_DOWNWARD
)
4084 locate
->slot_offset
.constant
= -initial_offset_ptr
->constant
;
4085 if (initial_offset_ptr
->var
)
4086 locate
->slot_offset
.var
= size_binop (MINUS_EXPR
, ssize_int (0),
4087 initial_offset_ptr
->var
);
4091 if (where_pad
!= PAD_NONE
4092 && (!tree_fits_uhwi_p (sizetree
)
4093 || (tree_to_uhwi (sizetree
) * BITS_PER_UNIT
) % round_boundary
))
4094 s2
= round_up (s2
, round_boundary
/ BITS_PER_UNIT
);
4095 SUB_PARM_SIZE (locate
->slot_offset
, s2
);
4098 locate
->slot_offset
.constant
+= part_size_in_regs
;
4100 if (!in_regs
|| reg_parm_stack_space
> 0)
4101 pad_to_arg_alignment (&locate
->slot_offset
, boundary
,
4102 &locate
->alignment_pad
);
4104 locate
->size
.constant
= (-initial_offset_ptr
->constant
4105 - locate
->slot_offset
.constant
);
4106 if (initial_offset_ptr
->var
)
4107 locate
->size
.var
= size_binop (MINUS_EXPR
,
4108 size_binop (MINUS_EXPR
,
4110 initial_offset_ptr
->var
),
4111 locate
->slot_offset
.var
);
4113 /* Pad_below needs the pre-rounded size to know how much to pad
4115 locate
->offset
= locate
->slot_offset
;
4116 if (where_pad
== PAD_DOWNWARD
)
4117 pad_below (&locate
->offset
, passed_mode
, sizetree
);
4122 if (!in_regs
|| reg_parm_stack_space
> 0)
4123 pad_to_arg_alignment (initial_offset_ptr
, boundary
,
4124 &locate
->alignment_pad
);
4125 locate
->slot_offset
= *initial_offset_ptr
;
4127 #ifdef PUSH_ROUNDING
4128 if (passed_mode
!= BLKmode
)
4129 sizetree
= size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree
)));
4132 /* Pad_below needs the pre-rounded size to know how much to pad below
4133 so this must be done before rounding up. */
4134 locate
->offset
= locate
->slot_offset
;
4135 if (where_pad
== PAD_DOWNWARD
)
4136 pad_below (&locate
->offset
, passed_mode
, sizetree
);
4138 if (where_pad
!= PAD_NONE
4139 && (!tree_fits_uhwi_p (sizetree
)
4140 || (tree_to_uhwi (sizetree
) * BITS_PER_UNIT
) % round_boundary
))
4141 sizetree
= round_up (sizetree
, round_boundary
/ BITS_PER_UNIT
);
4143 ADD_PARM_SIZE (locate
->size
, sizetree
);
4145 locate
->size
.constant
-= part_size_in_regs
;
4148 locate
->offset
.constant
4149 += targetm
.calls
.function_arg_offset (passed_mode
, type
);
4152 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4153 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4156 pad_to_arg_alignment (struct args_size
*offset_ptr
, int boundary
,
4157 struct args_size
*alignment_pad
)
4159 tree save_var
= NULL_TREE
;
4160 poly_int64 save_constant
= 0;
4161 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
4162 poly_int64 sp_offset
= STACK_POINTER_OFFSET
;
4164 #ifdef SPARC_STACK_BOUNDARY_HACK
4165 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4166 the real alignment of %sp. However, when it does this, the
4167 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
4168 if (SPARC_STACK_BOUNDARY_HACK
)
4172 if (boundary
> PARM_BOUNDARY
)
4174 save_var
= offset_ptr
->var
;
4175 save_constant
= offset_ptr
->constant
;
4178 alignment_pad
->var
= NULL_TREE
;
4179 alignment_pad
->constant
= 0;
4181 if (boundary
> BITS_PER_UNIT
)
4185 || !known_misalignment (offset_ptr
->constant
+ sp_offset
,
4186 boundary_in_bytes
, &misalign
))
4188 tree sp_offset_tree
= ssize_int (sp_offset
);
4189 tree offset
= size_binop (PLUS_EXPR
,
4190 ARGS_SIZE_TREE (*offset_ptr
),
4193 if (ARGS_GROW_DOWNWARD
)
4194 rounded
= round_down (offset
, boundary
/ BITS_PER_UNIT
);
4196 rounded
= round_up (offset
, boundary
/ BITS_PER_UNIT
);
4198 offset_ptr
->var
= size_binop (MINUS_EXPR
, rounded
, sp_offset_tree
);
4199 /* ARGS_SIZE_TREE includes constant term. */
4200 offset_ptr
->constant
= 0;
4201 if (boundary
> PARM_BOUNDARY
)
4202 alignment_pad
->var
= size_binop (MINUS_EXPR
, offset_ptr
->var
,
4207 if (ARGS_GROW_DOWNWARD
)
4208 offset_ptr
->constant
-= misalign
;
4210 offset_ptr
->constant
+= -misalign
& (boundary_in_bytes
- 1);
4212 if (boundary
> PARM_BOUNDARY
)
4213 alignment_pad
->constant
= offset_ptr
->constant
- save_constant
;
4219 pad_below (struct args_size
*offset_ptr
, machine_mode passed_mode
, tree sizetree
)
4221 unsigned int align
= PARM_BOUNDARY
/ BITS_PER_UNIT
;
4223 if (passed_mode
!= BLKmode
4224 && known_misalignment (GET_MODE_SIZE (passed_mode
), align
, &misalign
))
4225 offset_ptr
->constant
+= -misalign
& (align
- 1);
4228 if (TREE_CODE (sizetree
) != INTEGER_CST
4229 || (TREE_INT_CST_LOW (sizetree
) & (align
- 1)) != 0)
4231 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4232 tree s2
= round_up (sizetree
, align
);
4234 ADD_PARM_SIZE (*offset_ptr
, s2
);
4235 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
4241 /* True if register REGNO was alive at a place where `setjmp' was
4242 called and was set more than once or is an argument. Such regs may
4243 be clobbered by `longjmp'. */
4246 regno_clobbered_at_setjmp (bitmap setjmp_crosses
, int regno
)
4248 /* There appear to be cases where some local vars never reach the
4249 backend but have bogus regnos. */
4250 if (regno
>= max_reg_num ())
4253 return ((REG_N_SETS (regno
) > 1
4254 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
4256 && REGNO_REG_SET_P (setjmp_crosses
, regno
));
4259 /* Walk the tree of blocks describing the binding levels within a
4260 function and warn about variables the might be killed by setjmp or
4261 vfork. This is done after calling flow_analysis before register
4262 allocation since that will clobber the pseudo-regs to hard
4266 setjmp_vars_warning (bitmap setjmp_crosses
, tree block
)
4270 for (decl
= BLOCK_VARS (block
); decl
; decl
= DECL_CHAIN (decl
))
4273 && DECL_RTL_SET_P (decl
)
4274 && REG_P (DECL_RTL (decl
))
4275 && regno_clobbered_at_setjmp (setjmp_crosses
, REGNO (DECL_RTL (decl
))))
4276 warning (OPT_Wclobbered
, "variable %q+D might be clobbered by"
4277 " %<longjmp%> or %<vfork%>", decl
);
4280 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= BLOCK_CHAIN (sub
))
4281 setjmp_vars_warning (setjmp_crosses
, sub
);
4284 /* Do the appropriate part of setjmp_vars_warning
4285 but for arguments instead of local variables. */
4288 setjmp_args_warning (bitmap setjmp_crosses
)
4291 for (decl
= DECL_ARGUMENTS (current_function_decl
);
4292 decl
; decl
= DECL_CHAIN (decl
))
4293 if (DECL_RTL (decl
) != 0
4294 && REG_P (DECL_RTL (decl
))
4295 && regno_clobbered_at_setjmp (setjmp_crosses
, REGNO (DECL_RTL (decl
))))
4296 warning (OPT_Wclobbered
,
4297 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4301 /* Generate warning messages for variables live across setjmp. */
4304 generate_setjmp_warnings (void)
4306 bitmap setjmp_crosses
= regstat_get_setjmp_crosses ();
4308 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
4309 || bitmap_empty_p (setjmp_crosses
))
4312 setjmp_vars_warning (setjmp_crosses
, DECL_INITIAL (current_function_decl
));
4313 setjmp_args_warning (setjmp_crosses
);
4317 /* Reverse the order of elements in the fragment chain T of blocks,
4318 and return the new head of the chain (old last element).
4319 In addition to that clear BLOCK_SAME_RANGE flags when needed
4320 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4321 its super fragment origin. */
4324 block_fragments_nreverse (tree t
)
4326 tree prev
= 0, block
, next
, prev_super
= 0;
4327 tree super
= BLOCK_SUPERCONTEXT (t
);
4328 if (BLOCK_FRAGMENT_ORIGIN (super
))
4329 super
= BLOCK_FRAGMENT_ORIGIN (super
);
4330 for (block
= t
; block
; block
= next
)
4332 next
= BLOCK_FRAGMENT_CHAIN (block
);
4333 BLOCK_FRAGMENT_CHAIN (block
) = prev
;
4334 if ((prev
&& !BLOCK_SAME_RANGE (prev
))
4335 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block
))
4337 BLOCK_SAME_RANGE (block
) = 0;
4338 prev_super
= BLOCK_SUPERCONTEXT (block
);
4339 BLOCK_SUPERCONTEXT (block
) = super
;
4342 t
= BLOCK_FRAGMENT_ORIGIN (t
);
4343 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t
))
4345 BLOCK_SAME_RANGE (t
) = 0;
4346 BLOCK_SUPERCONTEXT (t
) = super
;
4350 /* Reverse the order of elements in the chain T of blocks,
4351 and return the new head of the chain (old last element).
4352 Also do the same on subblocks and reverse the order of elements
4353 in BLOCK_FRAGMENT_CHAIN as well. */
4356 blocks_nreverse_all (tree t
)
4358 tree prev
= 0, block
, next
;
4359 for (block
= t
; block
; block
= next
)
4361 next
= BLOCK_CHAIN (block
);
4362 BLOCK_CHAIN (block
) = prev
;
4363 if (BLOCK_FRAGMENT_CHAIN (block
)
4364 && BLOCK_FRAGMENT_ORIGIN (block
) == NULL_TREE
)
4366 BLOCK_FRAGMENT_CHAIN (block
)
4367 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block
));
4368 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block
)))
4369 BLOCK_SAME_RANGE (block
) = 0;
4371 BLOCK_SUBBLOCKS (block
) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block
));
4378 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4379 and create duplicate blocks. */
4380 /* ??? Need an option to either create block fragments or to create
4381 abstract origin duplicates of a source block. It really depends
4382 on what optimization has been performed. */
4385 reorder_blocks (void)
4387 tree block
= DECL_INITIAL (current_function_decl
);
4389 if (block
== NULL_TREE
)
4392 auto_vec
<tree
, 10> block_stack
;
4394 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4395 clear_block_marks (block
);
4397 /* Prune the old trees away, so that they don't get in the way. */
4398 BLOCK_SUBBLOCKS (block
) = NULL_TREE
;
4399 BLOCK_CHAIN (block
) = NULL_TREE
;
4401 /* Recreate the block tree from the note nesting. */
4402 reorder_blocks_1 (get_insns (), block
, &block_stack
);
4403 BLOCK_SUBBLOCKS (block
) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block
));
4406 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4409 clear_block_marks (tree block
)
4413 TREE_ASM_WRITTEN (block
) = 0;
4414 clear_block_marks (BLOCK_SUBBLOCKS (block
));
4415 block
= BLOCK_CHAIN (block
);
4420 reorder_blocks_1 (rtx_insn
*insns
, tree current_block
,
4421 vec
<tree
> *p_block_stack
)
4424 tree prev_beg
= NULL_TREE
, prev_end
= NULL_TREE
;
4426 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
4430 if (NOTE_KIND (insn
) == NOTE_INSN_BLOCK_BEG
)
4432 tree block
= NOTE_BLOCK (insn
);
4435 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block
) == NULL_TREE
);
4439 BLOCK_SAME_RANGE (prev_end
) = 0;
4440 prev_end
= NULL_TREE
;
4442 /* If we have seen this block before, that means it now
4443 spans multiple address regions. Create a new fragment. */
4444 if (TREE_ASM_WRITTEN (block
))
4446 tree new_block
= copy_node (block
);
4448 BLOCK_SAME_RANGE (new_block
) = 0;
4449 BLOCK_FRAGMENT_ORIGIN (new_block
) = origin
;
4450 BLOCK_FRAGMENT_CHAIN (new_block
)
4451 = BLOCK_FRAGMENT_CHAIN (origin
);
4452 BLOCK_FRAGMENT_CHAIN (origin
) = new_block
;
4454 NOTE_BLOCK (insn
) = new_block
;
4458 if (prev_beg
== current_block
&& prev_beg
)
4459 BLOCK_SAME_RANGE (block
) = 1;
4463 BLOCK_SUBBLOCKS (block
) = 0;
4464 TREE_ASM_WRITTEN (block
) = 1;
4465 /* When there's only one block for the entire function,
4466 current_block == block and we mustn't do this, it
4467 will cause infinite recursion. */
4468 if (block
!= current_block
)
4471 if (block
!= origin
)
4472 gcc_assert (BLOCK_SUPERCONTEXT (origin
) == current_block
4473 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4476 if (p_block_stack
->is_empty ())
4477 super
= current_block
;
4480 super
= p_block_stack
->last ();
4481 gcc_assert (super
== current_block
4482 || BLOCK_FRAGMENT_ORIGIN (super
)
4485 BLOCK_SUPERCONTEXT (block
) = super
;
4486 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (current_block
);
4487 BLOCK_SUBBLOCKS (current_block
) = block
;
4488 current_block
= origin
;
4490 p_block_stack
->safe_push (block
);
4492 else if (NOTE_KIND (insn
) == NOTE_INSN_BLOCK_END
)
4494 NOTE_BLOCK (insn
) = p_block_stack
->pop ();
4495 current_block
= BLOCK_SUPERCONTEXT (current_block
);
4496 if (BLOCK_FRAGMENT_ORIGIN (current_block
))
4497 current_block
= BLOCK_FRAGMENT_ORIGIN (current_block
);
4498 prev_beg
= NULL_TREE
;
4499 prev_end
= BLOCK_SAME_RANGE (NOTE_BLOCK (insn
))
4500 ? NOTE_BLOCK (insn
) : NULL_TREE
;
4505 prev_beg
= NULL_TREE
;
4507 BLOCK_SAME_RANGE (prev_end
) = 0;
4508 prev_end
= NULL_TREE
;
4513 /* Reverse the order of elements in the chain T of blocks,
4514 and return the new head of the chain (old last element). */
4517 blocks_nreverse (tree t
)
4519 tree prev
= 0, block
, next
;
4520 for (block
= t
; block
; block
= next
)
4522 next
= BLOCK_CHAIN (block
);
4523 BLOCK_CHAIN (block
) = prev
;
4529 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4530 by modifying the last node in chain 1 to point to chain 2. */
4533 block_chainon (tree op1
, tree op2
)
4542 for (t1
= op1
; BLOCK_CHAIN (t1
); t1
= BLOCK_CHAIN (t1
))
4544 BLOCK_CHAIN (t1
) = op2
;
4546 #ifdef ENABLE_TREE_CHECKING
4549 for (t2
= op2
; t2
; t2
= BLOCK_CHAIN (t2
))
4550 gcc_assert (t2
!= t1
);
4557 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4558 non-NULL, list them all into VECTOR, in a depth-first preorder
4559 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4563 all_blocks (tree block
, tree
*vector
)
4569 TREE_ASM_WRITTEN (block
) = 0;
4571 /* Record this block. */
4573 vector
[n_blocks
] = block
;
4577 /* Record the subblocks, and their subblocks... */
4578 n_blocks
+= all_blocks (BLOCK_SUBBLOCKS (block
),
4579 vector
? vector
+ n_blocks
: 0);
4580 block
= BLOCK_CHAIN (block
);
4586 /* Return a vector containing all the blocks rooted at BLOCK. The
4587 number of elements in the vector is stored in N_BLOCKS_P. The
4588 vector is dynamically allocated; it is the caller's responsibility
4589 to call `free' on the pointer returned. */
4592 get_block_vector (tree block
, int *n_blocks_p
)
4596 *n_blocks_p
= all_blocks (block
, NULL
);
4597 block_vector
= XNEWVEC (tree
, *n_blocks_p
);
4598 all_blocks (block
, block_vector
);
4600 return block_vector
;
4603 static GTY(()) int next_block_index
= 2;
4605 /* Set BLOCK_NUMBER for all the blocks in FN. */
4608 number_blocks (tree fn
)
4614 /* For XCOFF debugging output, we start numbering the blocks
4615 from 1 within each function, rather than keeping a running
4617 #if defined (XCOFF_DEBUGGING_INFO)
4618 if (write_symbols
== XCOFF_DEBUG
)
4619 next_block_index
= 1;
4622 block_vector
= get_block_vector (DECL_INITIAL (fn
), &n_blocks
);
4624 /* The top-level BLOCK isn't numbered at all. */
4625 for (i
= 1; i
< n_blocks
; ++i
)
4626 /* We number the blocks from two. */
4627 BLOCK_NUMBER (block_vector
[i
]) = next_block_index
++;
4629 free (block_vector
);
4634 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4637 debug_find_var_in_block_tree (tree var
, tree block
)
4641 for (t
= BLOCK_VARS (block
); t
; t
= TREE_CHAIN (t
))
4645 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= TREE_CHAIN (t
))
4647 tree ret
= debug_find_var_in_block_tree (var
, t
);
4655 /* Keep track of whether we're in a dummy function context. If we are,
4656 we don't want to invoke the set_current_function hook, because we'll
4657 get into trouble if the hook calls target_reinit () recursively or
4658 when the initial initialization is not yet complete. */
4660 static bool in_dummy_function
;
4662 /* Invoke the target hook when setting cfun. Update the optimization options
4663 if the function uses different options than the default. */
4666 invoke_set_current_function_hook (tree fndecl
)
4668 if (!in_dummy_function
)
4670 tree opts
= ((fndecl
)
4671 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl
)
4672 : optimization_default_node
);
4675 opts
= optimization_default_node
;
4677 /* Change optimization options if needed. */
4678 if (optimization_current_node
!= opts
)
4680 optimization_current_node
= opts
;
4681 cl_optimization_restore (&global_options
, &global_options_set
,
4682 TREE_OPTIMIZATION (opts
));
4685 targetm
.set_current_function (fndecl
);
4686 this_fn_optabs
= this_target_optabs
;
4688 /* Initialize global alignment variables after op. */
4689 parse_alignment_opts ();
4691 if (opts
!= optimization_default_node
)
4693 init_tree_optimization_optabs (opts
);
4694 if (TREE_OPTIMIZATION_OPTABS (opts
))
4695 this_fn_optabs
= (struct target_optabs
*)
4696 TREE_OPTIMIZATION_OPTABS (opts
);
4701 /* cfun should never be set directly; use this function. */
4704 set_cfun (struct function
*new_cfun
, bool force
)
4706 if (cfun
!= new_cfun
|| force
)
4709 invoke_set_current_function_hook (new_cfun
? new_cfun
->decl
: NULL_TREE
);
4710 redirect_edge_var_map_empty ();
4714 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4716 static vec
<function
*> cfun_stack
;
4718 /* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4719 current_function_decl accordingly. */
4722 push_cfun (struct function
*new_cfun
)
4724 gcc_assert ((!cfun
&& !current_function_decl
)
4725 || (cfun
&& current_function_decl
== cfun
->decl
));
4726 cfun_stack
.safe_push (cfun
);
4727 current_function_decl
= new_cfun
? new_cfun
->decl
: NULL_TREE
;
4728 set_cfun (new_cfun
);
4731 /* Pop cfun from the stack. Also set current_function_decl accordingly. */
4736 struct function
*new_cfun
= cfun_stack
.pop ();
4737 /* When in_dummy_function, we do have a cfun but current_function_decl is
4738 NULL. We also allow pushing NULL cfun and subsequently changing
4739 current_function_decl to something else and have both restored by
4741 gcc_checking_assert (in_dummy_function
4743 || current_function_decl
== cfun
->decl
);
4744 set_cfun (new_cfun
);
4745 current_function_decl
= new_cfun
? new_cfun
->decl
: NULL_TREE
;
4748 /* Return value of funcdef and increase it. */
4750 get_next_funcdef_no (void)
4752 return funcdef_no
++;
4755 /* Return value of funcdef. */
4757 get_last_funcdef_no (void)
4762 /* Allocate and initialize the stack usage info data structure for the
4763 current function. */
4765 allocate_stack_usage_info (void)
4767 gcc_assert (!cfun
->su
);
4768 cfun
->su
= ggc_cleared_alloc
<stack_usage
> ();
4769 cfun
->su
->static_stack_size
= -1;
4772 /* Allocate a function structure for FNDECL and set its contents
4773 to the defaults. Set cfun to the newly-allocated object.
4774 Some of the helper functions invoked during initialization assume
4775 that cfun has already been set. Therefore, assign the new object
4776 directly into cfun and invoke the back end hook explicitly at the
4777 very end, rather than initializing a temporary and calling set_cfun
4780 ABSTRACT_P is true if this is a function that will never be seen by
4781 the middle-end. Such functions are front-end concepts (like C++
4782 function templates) that do not correspond directly to functions
4783 placed in object files. */
4786 allocate_struct_function (tree fndecl
, bool abstract_p
)
4788 tree fntype
= fndecl
? TREE_TYPE (fndecl
) : NULL_TREE
;
4790 cfun
= ggc_cleared_alloc
<function
> ();
4792 init_eh_for_function ();
4794 if (init_machine_status
)
4795 cfun
->machine
= (*init_machine_status
) ();
4797 #ifdef OVERRIDE_ABI_FORMAT
4798 OVERRIDE_ABI_FORMAT (fndecl
);
4801 if (fndecl
!= NULL_TREE
)
4803 DECL_STRUCT_FUNCTION (fndecl
) = cfun
;
4804 cfun
->decl
= fndecl
;
4805 current_function_funcdef_no
= get_next_funcdef_no ();
4808 invoke_set_current_function_hook (fndecl
);
4810 if (fndecl
!= NULL_TREE
)
4812 tree result
= DECL_RESULT (fndecl
);
4816 /* Now that we have activated any function-specific attributes
4817 that might affect layout, particularly vector modes, relayout
4818 each of the parameters and the result. */
4819 relayout_decl (result
);
4820 for (tree parm
= DECL_ARGUMENTS (fndecl
); parm
;
4821 parm
= DECL_CHAIN (parm
))
4822 relayout_decl (parm
);
4824 /* Similarly relayout the function decl. */
4825 targetm
.target_option
.relayout_function (fndecl
);
4828 if (!abstract_p
&& aggregate_value_p (result
, fndecl
))
4830 #ifdef PCC_STATIC_STRUCT_RETURN
4831 cfun
->returns_pcc_struct
= 1;
4833 cfun
->returns_struct
= 1;
4836 cfun
->stdarg
= stdarg_p (fntype
);
4838 /* Assume all registers in stdarg functions need to be saved. */
4839 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
4840 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
4842 /* ??? This could be set on a per-function basis by the front-end
4843 but is this worth the hassle? */
4844 cfun
->can_throw_non_call_exceptions
= flag_non_call_exceptions
;
4845 cfun
->can_delete_dead_exceptions
= flag_delete_dead_exceptions
;
4847 if (!profile_flag
&& !flag_instrument_function_entry_exit
)
4848 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl
) = 1;
4850 if (flag_callgraph_info
)
4851 allocate_stack_usage_info ();
4854 /* Don't enable begin stmt markers if var-tracking at assignments is
4855 disabled. The markers make little sense without the variable
4856 binding annotations among them. */
4857 cfun
->debug_nonbind_markers
= lang_hooks
.emits_begin_stmt
4858 && MAY_HAVE_DEBUG_MARKER_STMTS
;
4861 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4862 instead of just setting it. */
4865 push_struct_function (tree fndecl
)
4867 /* When in_dummy_function we might be in the middle of a pop_cfun and
4868 current_function_decl and cfun may not match. */
4869 gcc_assert (in_dummy_function
4870 || (!cfun
&& !current_function_decl
)
4871 || (cfun
&& current_function_decl
== cfun
->decl
));
4872 cfun_stack
.safe_push (cfun
);
4873 current_function_decl
= fndecl
;
4874 allocate_struct_function (fndecl
, false);
4877 /* Reset crtl and other non-struct-function variables to defaults as
4878 appropriate for emitting rtl at the start of a function. */
4881 prepare_function_start (void)
4883 gcc_assert (!get_last_insn ());
4885 if (in_dummy_function
)
4886 crtl
->abi
= &default_function_abi
;
4888 crtl
->abi
= &fndecl_abi (cfun
->decl
).base_abi ();
4892 init_varasm_status ();
4894 default_rtl_profile ();
4896 if (flag_stack_usage_info
&& !flag_callgraph_info
)
4897 allocate_stack_usage_info ();
4899 cse_not_expected
= ! optimize
;
4901 /* Caller save not needed yet. */
4902 caller_save_needed
= 0;
4904 /* We haven't done register allocation yet. */
4907 /* Indicate that we have not instantiated virtual registers yet. */
4908 virtuals_instantiated
= 0;
4910 /* Indicate that we want CONCATs now. */
4911 generating_concat_p
= 1;
4913 /* Indicate we have no need of a frame pointer yet. */
4914 frame_pointer_needed
= 0;
4918 push_dummy_function (bool with_decl
)
4920 tree fn_decl
, fn_type
, fn_result_decl
;
4922 gcc_assert (!in_dummy_function
);
4923 in_dummy_function
= true;
4927 fn_type
= build_function_type_list (void_type_node
, NULL_TREE
);
4928 fn_decl
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
, NULL_TREE
,
4930 fn_result_decl
= build_decl (UNKNOWN_LOCATION
, RESULT_DECL
,
4931 NULL_TREE
, void_type_node
);
4932 DECL_RESULT (fn_decl
) = fn_result_decl
;
4935 fn_decl
= NULL_TREE
;
4937 push_struct_function (fn_decl
);
4940 /* Initialize the rtl expansion mechanism so that we can do simple things
4941 like generate sequences. This is used to provide a context during global
4942 initialization of some passes. You must call expand_dummy_function_end
4943 to exit this context. */
4946 init_dummy_function_start (void)
4948 push_dummy_function (false);
4949 prepare_function_start ();
4952 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4953 and initialize static variables for generating RTL for the statements
4957 init_function_start (tree subr
)
4959 /* Initialize backend, if needed. */
4962 prepare_function_start ();
4963 decide_function_section (subr
);
4965 /* Warn if this value is an aggregate type,
4966 regardless of which calling convention we are using for it. */
4967 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
))))
4968 warning (OPT_Waggregate_return
, "function returns an aggregate");
4971 /* Expand code to verify the stack_protect_guard. This is invoked at
4972 the end of a function to be protected. */
4975 stack_protect_epilogue (void)
4977 tree guard_decl
= crtl
->stack_protect_guard_decl
;
4978 rtx_code_label
*label
= gen_label_rtx ();
4980 rtx_insn
*seq
= NULL
;
4982 x
= expand_normal (crtl
->stack_protect_guard
);
4984 if (targetm
.have_stack_protect_combined_test () && guard_decl
)
4986 gcc_assert (DECL_P (guard_decl
));
4987 y
= DECL_RTL (guard_decl
);
4988 /* Allow the target to compute address of Y and compare it with X without
4989 leaking Y into a register. This combined address + compare pattern
4990 allows the target to prevent spilling of any intermediate results by
4991 splitting it after register allocator. */
4992 seq
= targetm
.gen_stack_protect_combined_test (x
, y
, label
);
4997 y
= expand_normal (guard_decl
);
5001 /* Allow the target to compare Y with X without leaking either into
5003 if (targetm
.have_stack_protect_test ())
5004 seq
= targetm
.gen_stack_protect_test (x
, y
, label
);
5010 emit_cmp_and_jump_insns (x
, y
, EQ
, NULL_RTX
, ptr_mode
, 1, label
);
5012 /* The noreturn predictor has been moved to the tree level. The rtl-level
5013 predictors estimate this branch about 20%, which isn't enough to get
5014 things moved out of line. Since this is the only extant case of adding
5015 a noreturn function at the rtl level, it doesn't seem worth doing ought
5016 except adding the prediction by hand. */
5017 rtx_insn
*tmp
= get_last_insn ();
5019 predict_insn_def (tmp
, PRED_NORETURN
, TAKEN
);
5021 expand_call (targetm
.stack_protect_fail (), NULL_RTX
, /*ignore=*/true);
5026 /* Start the RTL for a new function, and set variables used for
5028 SUBR is the FUNCTION_DECL node.
5029 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5030 the function's parameters, which must be run at any return statement. */
5033 expand_function_start (tree subr
)
5035 /* Make sure volatile mem refs aren't considered
5036 valid operands of arithmetic insns. */
5037 init_recog_no_volatile ();
5041 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr
));
5044 = (stack_limit_rtx
!= NULL_RTX
&& ! DECL_NO_LIMIT_STACK (subr
));
5046 /* Make the label for return statements to jump to. Do not special
5047 case machines with special return instructions -- they will be
5048 handled later during jump, ifcvt, or epilogue creation. */
5049 return_label
= gen_label_rtx ();
5051 /* Initialize rtx used to return the value. */
5052 /* Do this before assign_parms so that we copy the struct value address
5053 before any library calls that assign parms might generate. */
5055 /* Decide whether to return the value in memory or in a register. */
5056 tree res
= DECL_RESULT (subr
);
5057 if (aggregate_value_p (res
, subr
))
5059 /* Returning something that won't go in a register. */
5060 rtx value_address
= 0;
5062 #ifdef PCC_STATIC_STRUCT_RETURN
5063 if (cfun
->returns_pcc_struct
)
5065 int size
= int_size_in_bytes (TREE_TYPE (res
));
5066 value_address
= assemble_static_space (size
);
5071 rtx sv
= targetm
.calls
.struct_value_rtx (TREE_TYPE (subr
), 2);
5072 /* Expect to be passed the address of a place to store the value.
5073 If it is passed as an argument, assign_parms will take care of
5077 value_address
= gen_reg_rtx (Pmode
);
5078 emit_move_insn (value_address
, sv
);
5083 rtx x
= value_address
;
5084 if (!DECL_BY_REFERENCE (res
))
5086 x
= gen_rtx_MEM (DECL_MODE (res
), x
);
5087 set_mem_attributes (x
, res
, 1);
5089 set_parm_rtl (res
, x
);
5092 else if (DECL_MODE (res
) == VOIDmode
)
5093 /* If return mode is void, this decl rtl should not be used. */
5094 set_parm_rtl (res
, NULL_RTX
);
5097 /* Compute the return values into a pseudo reg, which we will copy
5098 into the true return register after the cleanups are done. */
5099 tree return_type
= TREE_TYPE (res
);
5101 /* If we may coalesce this result, make sure it has the expected mode
5102 in case it was promoted. But we need not bother about BLKmode. */
5103 machine_mode promoted_mode
5104 = flag_tree_coalesce_vars
&& is_gimple_reg (res
)
5105 ? promote_ssa_mode (ssa_default_def (cfun
, res
), NULL
)
5108 if (promoted_mode
!= BLKmode
)
5109 set_parm_rtl (res
, gen_reg_rtx (promoted_mode
));
5110 else if (TYPE_MODE (return_type
) != BLKmode
5111 && targetm
.calls
.return_in_msb (return_type
))
5112 /* expand_function_end will insert the appropriate padding in
5113 this case. Use the return value's natural (unpadded) mode
5114 within the function proper. */
5115 set_parm_rtl (res
, gen_reg_rtx (TYPE_MODE (return_type
)));
5118 /* In order to figure out what mode to use for the pseudo, we
5119 figure out what the mode of the eventual return register will
5120 actually be, and use that. */
5121 rtx hard_reg
= hard_function_value (return_type
, subr
, 0, 1);
5123 /* Structures that are returned in registers are not
5124 aggregate_value_p, so we may see a PARALLEL or a REG. */
5125 if (REG_P (hard_reg
))
5126 set_parm_rtl (res
, gen_reg_rtx (GET_MODE (hard_reg
)));
5129 gcc_assert (GET_CODE (hard_reg
) == PARALLEL
);
5130 set_parm_rtl (res
, gen_group_rtx (hard_reg
));
5134 /* Set DECL_REGISTER flag so that expand_function_end will copy the
5135 result to the real return register(s). */
5136 DECL_REGISTER (res
) = 1;
5139 /* Initialize rtx for parameters and local variables.
5140 In some cases this requires emitting insns. */
5141 assign_parms (subr
);
5143 /* If function gets a static chain arg, store it. */
5144 if (cfun
->static_chain_decl
)
5146 tree parm
= cfun
->static_chain_decl
;
5151 local
= gen_reg_rtx (promote_decl_mode (parm
, &unsignedp
));
5152 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
5154 set_decl_incoming_rtl (parm
, chain
, false);
5155 set_parm_rtl (parm
, local
);
5156 mark_reg_pointer (local
, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
5158 if (GET_MODE (local
) != GET_MODE (chain
))
5160 convert_move (local
, chain
, unsignedp
);
5161 insn
= get_last_insn ();
5164 insn
= emit_move_insn (local
, chain
);
5166 /* Mark the register as eliminable, similar to parameters. */
5168 && reg_mentioned_p (arg_pointer_rtx
, XEXP (chain
, 0)))
5169 set_dst_reg_note (insn
, REG_EQUIV
, chain
, local
);
5171 /* If we aren't optimizing, save the static chain onto the stack. */
5174 tree saved_static_chain_decl
5175 = build_decl (DECL_SOURCE_LOCATION (parm
), VAR_DECL
,
5176 DECL_NAME (parm
), TREE_TYPE (parm
));
5177 rtx saved_static_chain_rtx
5178 = assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
5179 SET_DECL_RTL (saved_static_chain_decl
, saved_static_chain_rtx
);
5180 emit_move_insn (saved_static_chain_rtx
, chain
);
5181 SET_DECL_VALUE_EXPR (parm
, saved_static_chain_decl
);
5182 DECL_HAS_VALUE_EXPR_P (parm
) = 1;
5186 /* The following was moved from init_function_start.
5187 The move was supposed to make sdb output more accurate. */
5188 /* Indicate the beginning of the function body,
5189 as opposed to parm setup. */
5190 emit_note (NOTE_INSN_FUNCTION_BEG
);
5192 gcc_assert (NOTE_P (get_last_insn ()));
5194 parm_birth_insn
= get_last_insn ();
5196 /* If the function receives a non-local goto, then store the
5197 bits we need to restore the frame pointer. */
5198 if (cfun
->nonlocal_goto_save_area
)
5203 tree var
= TREE_OPERAND (cfun
->nonlocal_goto_save_area
, 0);
5204 gcc_assert (DECL_RTL_SET_P (var
));
5206 t_save
= build4 (ARRAY_REF
,
5207 TREE_TYPE (TREE_TYPE (cfun
->nonlocal_goto_save_area
)),
5208 cfun
->nonlocal_goto_save_area
,
5209 integer_zero_node
, NULL_TREE
, NULL_TREE
);
5210 r_save
= expand_expr (t_save
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5211 gcc_assert (GET_MODE (r_save
) == Pmode
);
5213 emit_move_insn (r_save
, hard_frame_pointer_rtx
);
5214 update_nonlocal_goto_save_area ();
5220 PROFILE_HOOK (current_function_funcdef_no
);
5224 /* If we are doing generic stack checking, the probe should go here. */
5225 if (flag_stack_check
== GENERIC_STACK_CHECK
)
5226 stack_check_probe_note
= emit_note (NOTE_INSN_DELETED
);
5230 pop_dummy_function (void)
5233 in_dummy_function
= false;
5236 /* Undo the effects of init_dummy_function_start. */
5238 expand_dummy_function_end (void)
5240 gcc_assert (in_dummy_function
);
5242 /* End any sequences that failed to be closed due to syntax errors. */
5243 while (in_sequence_p ())
5246 /* Outside function body, can't compute type's actual size
5247 until next function's body starts. */
5249 free_after_parsing (cfun
);
5250 free_after_compilation (cfun
);
5251 pop_dummy_function ();
5254 /* Helper for diddle_return_value. */
5257 diddle_return_value_1 (void (*doit
) (rtx
, void *), void *arg
, rtx outgoing
)
5262 if (REG_P (outgoing
))
5263 (*doit
) (outgoing
, arg
);
5264 else if (GET_CODE (outgoing
) == PARALLEL
)
5268 for (i
= 0; i
< XVECLEN (outgoing
, 0); i
++)
5270 rtx x
= XEXP (XVECEXP (outgoing
, 0, i
), 0);
5272 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5278 /* Call DOIT for each hard register used as a return value from
5279 the current function. */
5282 diddle_return_value (void (*doit
) (rtx
, void *), void *arg
)
5284 diddle_return_value_1 (doit
, arg
, crtl
->return_rtx
);
5288 do_clobber_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
5294 clobber_return_register (void)
5296 diddle_return_value (do_clobber_return_reg
, NULL
);
5298 /* In case we do use pseudo to return value, clobber it too. */
5299 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
5301 tree decl_result
= DECL_RESULT (current_function_decl
);
5302 rtx decl_rtl
= DECL_RTL (decl_result
);
5303 if (REG_P (decl_rtl
) && REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
)
5305 do_clobber_return_reg (decl_rtl
, NULL
);
5311 do_use_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
5317 use_return_register (void)
5319 diddle_return_value (do_use_return_reg
, NULL
);
5322 /* Generate RTL for the end of the current function. */
5325 expand_function_end (void)
5327 /* If arg_pointer_save_area was referenced only from a nested
5328 function, we will not have initialized it yet. Do that now. */
5329 if (arg_pointer_save_area
&& ! crtl
->arg_pointer_save_area_init
)
5330 get_arg_pointer_save_area ();
5332 /* If we are doing generic stack checking and this function makes calls,
5333 do a stack probe at the start of the function to ensure we have enough
5334 space for another stack frame. */
5335 if (flag_stack_check
== GENERIC_STACK_CHECK
)
5337 rtx_insn
*insn
, *seq
;
5339 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5342 rtx max_frame_size
= GEN_INT (STACK_CHECK_MAX_FRAME_SIZE
);
5344 if (STACK_CHECK_MOVING_SP
)
5345 anti_adjust_stack_and_probe (max_frame_size
, true);
5347 probe_stack_range (STACK_OLD_CHECK_PROTECT
, max_frame_size
);
5350 set_insn_locations (seq
, prologue_location
);
5351 emit_insn_before (seq
, stack_check_probe_note
);
5356 /* End any sequences that failed to be closed due to syntax errors. */
5357 while (in_sequence_p ())
5360 clear_pending_stack_adjust ();
5361 do_pending_stack_adjust ();
5363 /* Output a linenumber for the end of the function.
5364 SDB depended on this. */
5365 set_curr_insn_location (input_location
);
5367 /* Before the return label (if any), clobber the return
5368 registers so that they are not propagated live to the rest of
5369 the function. This can only happen with functions that drop
5370 through; if there had been a return statement, there would
5371 have either been a return rtx, or a jump to the return label.
5373 We delay actual code generation after the current_function_value_rtx
5375 rtx_insn
*clobber_after
= get_last_insn ();
5377 /* Output the label for the actual return from the function. */
5378 emit_label (return_label
);
5380 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
)
5382 /* Let except.c know where it should emit the call to unregister
5383 the function context for sjlj exceptions. */
5384 if (flag_exceptions
)
5385 sjlj_emit_function_exit_after (get_last_insn ());
5388 /* If this is an implementation of throw, do what's necessary to
5389 communicate between __builtin_eh_return and the epilogue. */
5390 expand_eh_return ();
5392 /* If stack protection is enabled for this function, check the guard. */
5393 if (crtl
->stack_protect_guard
5394 && targetm
.stack_protect_runtime_enabled_p ()
5395 && naked_return_label
== NULL_RTX
)
5396 stack_protect_epilogue ();
5398 /* If scalar return value was computed in a pseudo-reg, or was a named
5399 return value that got dumped to the stack, copy that to the hard
5401 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
5403 tree decl_result
= DECL_RESULT (current_function_decl
);
5404 rtx decl_rtl
= DECL_RTL (decl_result
);
5406 if (REG_P (decl_rtl
)
5407 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
5408 : DECL_REGISTER (decl_result
))
5410 rtx real_decl_rtl
= crtl
->return_rtx
;
5413 /* This should be set in assign_parms. */
5414 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl
));
5416 /* If this is a BLKmode structure being returned in registers,
5417 then use the mode computed in expand_return. Note that if
5418 decl_rtl is memory, then its mode may have been changed,
5419 but that crtl->return_rtx has not. */
5420 if (GET_MODE (real_decl_rtl
) == BLKmode
)
5421 PUT_MODE (real_decl_rtl
, GET_MODE (decl_rtl
));
5423 /* If a non-BLKmode return value should be padded at the least
5424 significant end of the register, shift it left by the appropriate
5425 amount. BLKmode results are handled using the group load/store
5427 if (TYPE_MODE (TREE_TYPE (decl_result
)) != BLKmode
5428 && REG_P (real_decl_rtl
)
5429 && targetm
.calls
.return_in_msb (TREE_TYPE (decl_result
)))
5431 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl
),
5432 REGNO (real_decl_rtl
)),
5434 shift_return_value (GET_MODE (decl_rtl
), true, real_decl_rtl
);
5436 else if (GET_CODE (real_decl_rtl
) == PARALLEL
)
5438 /* If expand_function_start has created a PARALLEL for decl_rtl,
5439 move the result to the real return registers. Otherwise, do
5440 a group load from decl_rtl for a named return. */
5441 if (GET_CODE (decl_rtl
) == PARALLEL
)
5442 emit_group_move (real_decl_rtl
, decl_rtl
);
5444 emit_group_load (real_decl_rtl
, decl_rtl
,
5445 TREE_TYPE (decl_result
),
5446 int_size_in_bytes (TREE_TYPE (decl_result
)));
5448 /* In the case of complex integer modes smaller than a word, we'll
5449 need to generate some non-trivial bitfield insertions. Do that
5450 on a pseudo and not the hard register. */
5451 else if (GET_CODE (decl_rtl
) == CONCAT
5452 && is_complex_int_mode (GET_MODE (decl_rtl
), &cmode
)
5453 && GET_MODE_BITSIZE (cmode
) <= BITS_PER_WORD
)
5455 int old_generating_concat_p
;
5458 old_generating_concat_p
= generating_concat_p
;
5459 generating_concat_p
= 0;
5460 tmp
= gen_reg_rtx (GET_MODE (decl_rtl
));
5461 generating_concat_p
= old_generating_concat_p
;
5463 emit_move_insn (tmp
, decl_rtl
);
5464 emit_move_insn (real_decl_rtl
, tmp
);
5466 /* If a named return value dumped decl_return to memory, then
5467 we may need to re-do the PROMOTE_MODE signed/unsigned
5469 else if (GET_MODE (real_decl_rtl
) != GET_MODE (decl_rtl
))
5471 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (decl_result
));
5472 promote_function_mode (TREE_TYPE (decl_result
),
5473 GET_MODE (decl_rtl
), &unsignedp
,
5474 TREE_TYPE (current_function_decl
), 1);
5476 convert_move (real_decl_rtl
, decl_rtl
, unsignedp
);
5479 emit_move_insn (real_decl_rtl
, decl_rtl
);
5483 /* If returning a structure, arrange to return the address of the value
5484 in a place where debuggers expect to find it.
5486 If returning a structure PCC style,
5487 the caller also depends on this value.
5488 And cfun->returns_pcc_struct is not necessarily set. */
5489 if ((cfun
->returns_struct
|| cfun
->returns_pcc_struct
)
5490 && !targetm
.calls
.omit_struct_return_reg
)
5492 rtx value_address
= DECL_RTL (DECL_RESULT (current_function_decl
));
5493 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
5496 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
5497 type
= TREE_TYPE (type
);
5499 value_address
= XEXP (value_address
, 0);
5501 outgoing
= targetm
.calls
.function_value (build_pointer_type (type
),
5502 current_function_decl
, true);
5504 /* Mark this as a function return value so integrate will delete the
5505 assignment and USE below when inlining this function. */
5506 REG_FUNCTION_VALUE_P (outgoing
) = 1;
5508 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5509 scalar_int_mode mode
= as_a
<scalar_int_mode
> (GET_MODE (outgoing
));
5510 value_address
= convert_memory_address (mode
, value_address
);
5512 emit_move_insn (outgoing
, value_address
);
5514 /* Show return register used to hold result (in this case the address
5516 crtl
->return_rtx
= outgoing
;
5519 /* Emit the actual code to clobber return register. Don't emit
5520 it if clobber_after is a barrier, then the previous basic block
5521 certainly doesn't fall thru into the exit block. */
5522 if (!BARRIER_P (clobber_after
))
5525 clobber_return_register ();
5526 rtx_insn
*seq
= get_insns ();
5529 emit_insn_after (seq
, clobber_after
);
5532 /* Output the label for the naked return from the function. */
5533 if (naked_return_label
)
5534 emit_label (naked_return_label
);
5536 /* @@@ This is a kludge. We want to ensure that instructions that
5537 may trap are not moved into the epilogue by scheduling, because
5538 we don't always emit unwind information for the epilogue. */
5539 if (cfun
->can_throw_non_call_exceptions
5540 && targetm_common
.except_unwind_info (&global_options
) != UI_SJLJ
)
5541 emit_insn (gen_blockage ());
5543 /* If stack protection is enabled for this function, check the guard. */
5544 if (crtl
->stack_protect_guard
5545 && targetm
.stack_protect_runtime_enabled_p ()
5546 && naked_return_label
)
5547 stack_protect_epilogue ();
5549 /* If we had calls to alloca, and this machine needs
5550 an accurate stack pointer to exit the function,
5551 insert some code to save and restore the stack pointer. */
5552 if (! EXIT_IGNORE_STACK
5553 && cfun
->calls_alloca
)
5558 emit_stack_save (SAVE_FUNCTION
, &tem
);
5559 rtx_insn
*seq
= get_insns ();
5561 emit_insn_before (seq
, parm_birth_insn
);
5563 emit_stack_restore (SAVE_FUNCTION
, tem
);
5566 /* ??? This should no longer be necessary since stupid is no longer with
5567 us, but there are some parts of the compiler (eg reload_combine, and
5568 sh mach_dep_reorg) that still try and compute their own lifetime info
5569 instead of using the general framework. */
5570 use_return_register ();
5574 get_arg_pointer_save_area (void)
5576 rtx ret
= arg_pointer_save_area
;
5580 ret
= assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
5581 arg_pointer_save_area
= ret
;
5584 if (! crtl
->arg_pointer_save_area_init
)
5586 /* Save the arg pointer at the beginning of the function. The
5587 generated stack slot may not be a valid memory address, so we
5588 have to check it and fix it if necessary. */
5590 emit_move_insn (validize_mem (copy_rtx (ret
)),
5591 crtl
->args
.internal_arg_pointer
);
5592 rtx_insn
*seq
= get_insns ();
5595 push_topmost_sequence ();
5596 emit_insn_after (seq
, entry_of_function ());
5597 pop_topmost_sequence ();
5599 crtl
->arg_pointer_save_area_init
= true;
5606 /* If debugging dumps are requested, dump information about how the
5607 target handled -fstack-check=clash for the prologue.
5609 PROBES describes what if any probes were emitted.
5611 RESIDUALS indicates if the prologue had any residual allocation
5612 (i.e. total allocation was not a multiple of PROBE_INTERVAL). */
5615 dump_stack_clash_frame_info (enum stack_clash_probes probes
, bool residuals
)
5622 case NO_PROBE_NO_FRAME
:
5624 "Stack clash no probe no stack adjustment in prologue.\n");
5626 case NO_PROBE_SMALL_FRAME
:
5628 "Stack clash no probe small stack adjustment in prologue.\n");
5631 fprintf (dump_file
, "Stack clash inline probes in prologue.\n");
5634 fprintf (dump_file
, "Stack clash probe loop in prologue.\n");
5639 fprintf (dump_file
, "Stack clash residual allocation in prologue.\n");
5641 fprintf (dump_file
, "Stack clash no residual allocation in prologue.\n");
5643 if (frame_pointer_needed
)
5644 fprintf (dump_file
, "Stack clash frame pointer needed.\n");
5646 fprintf (dump_file
, "Stack clash no frame pointer needed.\n");
5648 if (TREE_THIS_VOLATILE (cfun
->decl
))
5650 "Stack clash noreturn prologue, assuming no implicit"
5651 " probes in caller.\n");
5654 "Stack clash not noreturn prologue.\n");
5657 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5658 for the first time. */
5661 record_insns (rtx_insn
*insns
, rtx end
, hash_table
<insn_cache_hasher
> **hashp
)
5664 hash_table
<insn_cache_hasher
> *hash
= *hashp
;
5667 *hashp
= hash
= hash_table
<insn_cache_hasher
>::create_ggc (17);
5669 for (tmp
= insns
; tmp
!= end
; tmp
= NEXT_INSN (tmp
))
5671 rtx
*slot
= hash
->find_slot (tmp
, INSERT
);
5672 gcc_assert (*slot
== NULL
);
5677 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5678 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5679 insn, then record COPY as well. */
5682 maybe_copy_prologue_epilogue_insn (rtx insn
, rtx copy
)
5684 hash_table
<insn_cache_hasher
> *hash
;
5687 hash
= epilogue_insn_hash
;
5688 if (!hash
|| !hash
->find (insn
))
5690 hash
= prologue_insn_hash
;
5691 if (!hash
|| !hash
->find (insn
))
5695 slot
= hash
->find_slot (copy
, INSERT
);
5696 gcc_assert (*slot
== NULL
);
5700 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5701 we can be running after reorg, SEQUENCE rtl is possible. */
5704 contains (const rtx_insn
*insn
, hash_table
<insn_cache_hasher
> *hash
)
5709 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
5711 rtx_sequence
*seq
= as_a
<rtx_sequence
*> (PATTERN (insn
));
5713 for (i
= seq
->len () - 1; i
>= 0; i
--)
5714 if (hash
->find (seq
->element (i
)))
5719 return hash
->find (const_cast<rtx_insn
*> (insn
)) != NULL
;
5723 prologue_contains (const rtx_insn
*insn
)
5725 return contains (insn
, prologue_insn_hash
);
5729 epilogue_contains (const rtx_insn
*insn
)
5731 return contains (insn
, epilogue_insn_hash
);
5735 prologue_epilogue_contains (const rtx_insn
*insn
)
5737 if (contains (insn
, prologue_insn_hash
))
5739 if (contains (insn
, epilogue_insn_hash
))
5745 record_prologue_seq (rtx_insn
*seq
)
5747 record_insns (seq
, NULL
, &prologue_insn_hash
);
5751 record_epilogue_seq (rtx_insn
*seq
)
5753 record_insns (seq
, NULL
, &epilogue_insn_hash
);
5756 /* Set JUMP_LABEL for a return insn. */
5759 set_return_jump_label (rtx_insn
*returnjump
)
5761 rtx pat
= PATTERN (returnjump
);
5762 if (GET_CODE (pat
) == PARALLEL
)
5763 pat
= XVECEXP (pat
, 0, 0);
5764 if (ANY_RETURN_P (pat
))
5765 JUMP_LABEL (returnjump
) = pat
;
5767 JUMP_LABEL (returnjump
) = ret_rtx
;
5770 /* Return a sequence to be used as the split prologue for the current
5771 function, or NULL. */
5774 make_split_prologue_seq (void)
5776 if (!flag_split_stack
5777 || lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun
->decl
)))
5781 emit_insn (targetm
.gen_split_stack_prologue ());
5782 rtx_insn
*seq
= get_insns ();
5785 record_insns (seq
, NULL
, &prologue_insn_hash
);
5786 set_insn_locations (seq
, prologue_location
);
5791 /* Return a sequence to be used as the prologue for the current function,
5795 make_prologue_seq (void)
5797 if (!targetm
.have_prologue ())
5801 rtx_insn
*seq
= targetm
.gen_prologue ();
5804 /* Insert an explicit USE for the frame pointer
5805 if the profiling is on and the frame pointer is required. */
5806 if (crtl
->profile
&& frame_pointer_needed
)
5807 emit_use (hard_frame_pointer_rtx
);
5809 /* Retain a map of the prologue insns. */
5810 record_insns (seq
, NULL
, &prologue_insn_hash
);
5811 emit_note (NOTE_INSN_PROLOGUE_END
);
5813 /* Ensure that instructions are not moved into the prologue when
5814 profiling is on. The call to the profiling routine can be
5815 emitted within the live range of a call-clobbered register. */
5816 if (!targetm
.profile_before_prologue () && crtl
->profile
)
5817 emit_insn (gen_blockage ());
5821 set_insn_locations (seq
, prologue_location
);
5826 /* Emit a sequence of insns to zero the call-used registers before RET
5827 according to ZERO_REGS_TYPE. */
5830 gen_call_used_regs_seq (rtx_insn
*ret
, unsigned int zero_regs_type
)
5832 bool only_gpr
= true;
5833 bool only_used
= true;
5834 bool only_arg
= true;
5836 /* No need to zero call-used-regs in main (). */
5837 if (MAIN_NAME_P (DECL_NAME (current_function_decl
)))
5840 /* No need to zero call-used-regs if __builtin_eh_return is called
5841 since it isn't a normal function return. */
5842 if (crtl
->calls_eh_return
)
5845 /* If only_gpr is true, only zero call-used registers that are
5846 general-purpose registers; if only_used is true, only zero
5847 call-used registers that are used in the current function;
5848 if only_arg is true, only zero call-used registers that pass
5849 parameters defined by the flatform's calling conversion. */
5851 using namespace zero_regs_flags
;
5853 only_gpr
= zero_regs_type
& ONLY_GPR
;
5854 only_used
= zero_regs_type
& ONLY_USED
;
5855 only_arg
= zero_regs_type
& ONLY_ARG
;
5857 /* For each of the hard registers, we should zero it if:
5858 1. it is a call-used register;
5859 and 2. it is not a fixed register;
5860 and 3. it is not live at the return of the routine;
5861 and 4. it is general registor if only_gpr is true;
5862 and 5. it is used in the routine if only_used is true;
5863 and 6. it is a register that passes parameter if only_arg is true. */
5865 /* First, prepare the data flow information. */
5866 basic_block bb
= BLOCK_FOR_INSN (ret
);
5867 auto_bitmap live_out
;
5868 bitmap_copy (live_out
, df_get_live_out (bb
));
5869 df_simulate_initialize_backwards (bb
, live_out
);
5870 df_simulate_one_insn_backwards (bb
, ret
, live_out
);
5872 HARD_REG_SET selected_hardregs
;
5873 CLEAR_HARD_REG_SET (selected_hardregs
);
5874 for (unsigned int regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
5876 if (!crtl
->abi
->clobbers_full_reg_p (regno
))
5878 if (fixed_regs
[regno
])
5880 if (REGNO_REG_SET_P (live_out
, regno
))
5883 && !TEST_HARD_REG_BIT (reg_class_contents
[GENERAL_REGS
], regno
))
5885 if (only_used
&& !df_regs_ever_live_p (regno
))
5887 if (only_arg
&& !FUNCTION_ARG_REGNO_P (regno
))
5889 #ifdef LEAF_REG_REMAP
5890 if (crtl
->uses_only_leaf_regs
&& LEAF_REG_REMAP (regno
) < 0)
5894 /* Now this is a register that we might want to zero. */
5895 SET_HARD_REG_BIT (selected_hardregs
, regno
);
5898 if (hard_reg_set_empty_p (selected_hardregs
))
5901 /* Now that we have a hard register set that needs to be zeroed, pass it to
5902 target to generate zeroing sequence. */
5903 HARD_REG_SET zeroed_hardregs
;
5905 zeroed_hardregs
= targetm
.calls
.zero_call_used_regs (selected_hardregs
);
5906 rtx_insn
*seq
= get_insns ();
5910 /* Emit the memory blockage and register clobber asm volatile before
5911 the whole sequence. */
5913 expand_asm_reg_clobber_mem_blockage (zeroed_hardregs
);
5914 rtx_insn
*seq_barrier
= get_insns ();
5917 emit_insn_before (seq_barrier
, ret
);
5918 emit_insn_before (seq
, ret
);
5920 /* Update the data flow information. */
5921 crtl
->must_be_zero_on_return
|= zeroed_hardregs
;
5922 df_set_bb_dirty (EXIT_BLOCK_PTR_FOR_FN (cfun
));
5927 /* Return a sequence to be used as the epilogue for the current function,
5931 make_epilogue_seq (void)
5933 if (!targetm
.have_epilogue ())
5937 emit_note (NOTE_INSN_EPILOGUE_BEG
);
5938 rtx_insn
*seq
= targetm
.gen_epilogue ();
5940 emit_jump_insn (seq
);
5942 /* Retain a map of the epilogue insns. */
5943 record_insns (seq
, NULL
, &epilogue_insn_hash
);
5944 set_insn_locations (seq
, epilogue_location
);
5947 rtx_insn
*returnjump
= get_last_insn ();
5950 if (JUMP_P (returnjump
))
5951 set_return_jump_label (returnjump
);
5957 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5958 this into place with notes indicating where the prologue ends and where
5959 the epilogue begins. Update the basic block information when possible.
5961 Notes on epilogue placement:
5962 There are several kinds of edges to the exit block:
5963 * a single fallthru edge from LAST_BB
5964 * possibly, edges from blocks containing sibcalls
5965 * possibly, fake edges from infinite loops
5967 The epilogue is always emitted on the fallthru edge from the last basic
5968 block in the function, LAST_BB, into the exit block.
5970 If LAST_BB is empty except for a label, it is the target of every
5971 other basic block in the function that ends in a return. If a
5972 target has a return or simple_return pattern (possibly with
5973 conditional variants), these basic blocks can be changed so that a
5974 return insn is emitted into them, and their target is adjusted to
5975 the real exit block.
5977 Notes on shrink wrapping: We implement a fairly conservative
5978 version of shrink-wrapping rather than the textbook one. We only
5979 generate a single prologue and a single epilogue. This is
5980 sufficient to catch a number of interesting cases involving early
5983 First, we identify the blocks that require the prologue to occur before
5984 them. These are the ones that modify a call-saved register, or reference
5985 any of the stack or frame pointer registers. To simplify things, we then
5986 mark everything reachable from these blocks as also requiring a prologue.
5987 This takes care of loops automatically, and avoids the need to examine
5988 whether MEMs reference the frame, since it is sufficient to check for
5989 occurrences of the stack or frame pointer.
5991 We then compute the set of blocks for which the need for a prologue
5992 is anticipatable (borrowing terminology from the shrink-wrapping
5993 description in Muchnick's book). These are the blocks which either
5994 require a prologue themselves, or those that have only successors
5995 where the prologue is anticipatable. The prologue needs to be
5996 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5997 is not. For the moment, we ensure that only one such edge exists.
5999 The epilogue is placed as described above, but we make a
6000 distinction between inserting return and simple_return patterns
6001 when modifying other blocks that end in a return. Blocks that end
6002 in a sibcall omit the sibcall_epilogue if the block is not in
6006 thread_prologue_and_epilogue_insns (void)
6010 /* Can't deal with multiple successors of the entry block at the
6011 moment. Function should always have at least one entry
6013 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
6015 edge entry_edge
= single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
6016 edge orig_entry_edge
= entry_edge
;
6018 rtx_insn
*split_prologue_seq
= make_split_prologue_seq ();
6019 rtx_insn
*prologue_seq
= make_prologue_seq ();
6020 rtx_insn
*epilogue_seq
= make_epilogue_seq ();
6022 /* Try to perform a kind of shrink-wrapping, making sure the
6023 prologue/epilogue is emitted only around those parts of the
6024 function that require it. */
6025 try_shrink_wrapping (&entry_edge
, prologue_seq
);
6027 /* If the target can handle splitting the prologue/epilogue into separate
6028 components, try to shrink-wrap these components separately. */
6029 try_shrink_wrapping_separate (entry_edge
->dest
);
6031 /* If that did anything for any component we now need the generate the
6032 "main" prologue again. Because some targets require some of these
6033 to be called in a specific order (i386 requires the split prologue
6034 to be first, for example), we create all three sequences again here.
6035 If this does not work for some target, that target should not enable
6036 separate shrink-wrapping. */
6037 if (crtl
->shrink_wrapped_separate
)
6039 split_prologue_seq
= make_split_prologue_seq ();
6040 prologue_seq
= make_prologue_seq ();
6041 epilogue_seq
= make_epilogue_seq ();
6044 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
));
6046 /* A small fib -- epilogue is not yet completed, but we wish to re-use
6047 this marker for the splits of EH_RETURN patterns, and nothing else
6048 uses the flag in the meantime. */
6049 epilogue_completed
= 1;
6051 /* Find non-fallthru edges that end with EH_RETURN instructions. On
6052 some targets, these get split to a special version of the epilogue
6053 code. In order to be able to properly annotate these with unwind
6054 info, try to split them now. If we get a valid split, drop an
6055 EPILOGUE_BEG note and mark the insns as epilogue insns. */
6058 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
6060 rtx_insn
*prev
, *last
, *trial
;
6062 if (e
->flags
& EDGE_FALLTHRU
)
6064 last
= BB_END (e
->src
);
6065 if (!eh_returnjump_p (last
))
6068 prev
= PREV_INSN (last
);
6069 trial
= try_split (PATTERN (last
), last
, 1);
6073 record_insns (NEXT_INSN (prev
), NEXT_INSN (trial
), &epilogue_insn_hash
);
6074 emit_note_after (NOTE_INSN_EPILOGUE_BEG
, prev
);
6077 edge exit_fallthru_edge
= find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
);
6079 if (exit_fallthru_edge
)
6083 insert_insn_on_edge (epilogue_seq
, exit_fallthru_edge
);
6084 commit_edge_insertions ();
6086 /* The epilogue insns we inserted may cause the exit edge to no longer
6088 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
6090 if (((e
->flags
& EDGE_FALLTHRU
) != 0)
6091 && returnjump_p (BB_END (e
->src
)))
6092 e
->flags
&= ~EDGE_FALLTHRU
;
6095 else if (next_active_insn (BB_END (exit_fallthru_edge
->src
)))
6097 /* We have a fall-through edge to the exit block, the source is not
6098 at the end of the function, and there will be an assembler epilogue
6099 at the end of the function.
6100 We can't use force_nonfallthru here, because that would try to
6101 use return. Inserting a jump 'by hand' is extremely messy, so
6102 we take advantage of cfg_layout_finalize using
6103 fixup_fallthru_exit_predecessor. */
6104 cfg_layout_initialize (0);
6106 FOR_EACH_BB_FN (cur_bb
, cfun
)
6107 if (cur_bb
->index
>= NUM_FIXED_BLOCKS
6108 && cur_bb
->next_bb
->index
>= NUM_FIXED_BLOCKS
)
6109 cur_bb
->aux
= cur_bb
->next_bb
;
6110 cfg_layout_finalize ();
6114 /* Insert the prologue. */
6116 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
6118 if (split_prologue_seq
|| prologue_seq
)
6120 rtx_insn
*split_prologue_insn
= split_prologue_seq
;
6121 if (split_prologue_seq
)
6123 while (split_prologue_insn
&& !NONDEBUG_INSN_P (split_prologue_insn
))
6124 split_prologue_insn
= NEXT_INSN (split_prologue_insn
);
6125 insert_insn_on_edge (split_prologue_seq
, orig_entry_edge
);
6128 rtx_insn
*prologue_insn
= prologue_seq
;
6131 while (prologue_insn
&& !NONDEBUG_INSN_P (prologue_insn
))
6132 prologue_insn
= NEXT_INSN (prologue_insn
);
6133 insert_insn_on_edge (prologue_seq
, entry_edge
);
6136 commit_edge_insertions ();
6138 /* Look for basic blocks within the prologue insns. */
6139 if (split_prologue_insn
6140 && BLOCK_FOR_INSN (split_prologue_insn
) == NULL
)
6141 split_prologue_insn
= NULL
;
6143 && BLOCK_FOR_INSN (prologue_insn
) == NULL
)
6144 prologue_insn
= NULL
;
6145 if (split_prologue_insn
|| prologue_insn
)
6147 auto_sbitmap
blocks (last_basic_block_for_fn (cfun
));
6148 bitmap_clear (blocks
);
6149 if (split_prologue_insn
)
6150 bitmap_set_bit (blocks
,
6151 BLOCK_FOR_INSN (split_prologue_insn
)->index
);
6153 bitmap_set_bit (blocks
, BLOCK_FOR_INSN (prologue_insn
)->index
);
6154 find_many_sub_basic_blocks (blocks
);
6158 default_rtl_profile ();
6160 /* Emit sibling epilogues before any sibling call sites. */
6161 for (ei
= ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
);
6162 (e
= ei_safe_edge (ei
));
6165 /* Skip those already handled, the ones that run without prologue. */
6166 if (e
->flags
& EDGE_IGNORE
)
6168 e
->flags
&= ~EDGE_IGNORE
;
6172 rtx_insn
*insn
= BB_END (e
->src
);
6174 if (!(CALL_P (insn
) && SIBLING_CALL_P (insn
)))
6177 if (rtx_insn
*ep_seq
= targetm
.gen_sibcall_epilogue ())
6180 emit_note (NOTE_INSN_EPILOGUE_BEG
);
6182 rtx_insn
*seq
= get_insns ();
6185 /* Retain a map of the epilogue insns. Used in life analysis to
6186 avoid getting rid of sibcall epilogue insns. Do this before we
6187 actually emit the sequence. */
6188 record_insns (seq
, NULL
, &epilogue_insn_hash
);
6189 set_insn_locations (seq
, epilogue_location
);
6191 emit_insn_before (seq
, insn
);
6197 rtx_insn
*insn
, *next
;
6199 /* Similarly, move any line notes that appear after the epilogue.
6200 There is no need, however, to be quite so anal about the existence
6201 of such a note. Also possibly move
6202 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6204 for (insn
= epilogue_seq
; insn
; insn
= next
)
6206 next
= NEXT_INSN (insn
);
6208 && (NOTE_KIND (insn
) == NOTE_INSN_FUNCTION_BEG
))
6209 reorder_insns (insn
, insn
, PREV_INSN (epilogue_seq
));
6213 /* Threading the prologue and epilogue changes the artificial refs
6214 in the entry and exit blocks. */
6215 epilogue_completed
= 1;
6216 df_update_entry_exit_and_calls ();
6219 /* Reposition the prologue-end and epilogue-begin notes after
6220 instruction scheduling. */
6223 reposition_prologue_and_epilogue_notes (void)
6225 if (!targetm
.have_prologue ()
6226 && !targetm
.have_epilogue ()
6227 && !targetm
.have_sibcall_epilogue ())
6230 /* Since the hash table is created on demand, the fact that it is
6231 non-null is a signal that it is non-empty. */
6232 if (prologue_insn_hash
!= NULL
)
6234 size_t len
= prologue_insn_hash
->elements ();
6235 rtx_insn
*insn
, *last
= NULL
, *note
= NULL
;
6237 /* Scan from the beginning until we reach the last prologue insn. */
6238 /* ??? While we do have the CFG intact, there are two problems:
6239 (1) The prologue can contain loops (typically probing the stack),
6240 which means that the end of the prologue isn't in the first bb.
6241 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6242 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6246 if (NOTE_KIND (insn
) == NOTE_INSN_PROLOGUE_END
)
6249 else if (contains (insn
, prologue_insn_hash
))
6261 /* Scan forward looking for the PROLOGUE_END note. It should
6262 be right at the beginning of the block, possibly with other
6263 insn notes that got moved there. */
6264 for (note
= NEXT_INSN (last
); ; note
= NEXT_INSN (note
))
6267 && NOTE_KIND (note
) == NOTE_INSN_PROLOGUE_END
)
6272 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6274 last
= NEXT_INSN (last
);
6275 reorder_insns (note
, note
, last
);
6279 if (epilogue_insn_hash
!= NULL
)
6284 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
6286 rtx_insn
*insn
, *first
= NULL
, *note
= NULL
;
6287 basic_block bb
= e
->src
;
6289 /* Scan from the beginning until we reach the first epilogue insn. */
6290 FOR_BB_INSNS (bb
, insn
)
6294 if (NOTE_KIND (insn
) == NOTE_INSN_EPILOGUE_BEG
)
6301 else if (first
== NULL
&& contains (insn
, epilogue_insn_hash
))
6311 /* If the function has a single basic block, and no real
6312 epilogue insns (e.g. sibcall with no cleanup), the
6313 epilogue note can get scheduled before the prologue
6314 note. If we have frame related prologue insns, having
6315 them scanned during the epilogue will result in a crash.
6316 In this case re-order the epilogue note to just before
6317 the last insn in the block. */
6319 first
= BB_END (bb
);
6321 if (PREV_INSN (first
) != note
)
6322 reorder_insns (note
, note
, PREV_INSN (first
));
6328 /* Returns the name of function declared by FNDECL. */
6330 fndecl_name (tree fndecl
)
6334 return lang_hooks
.decl_printable_name (fndecl
, 1);
6337 /* Returns the name of function FN. */
6339 function_name (struct function
*fn
)
6341 tree fndecl
= (fn
== NULL
) ? NULL
: fn
->decl
;
6342 return fndecl_name (fndecl
);
6345 /* Returns the name of the current function. */
6347 current_function_name (void)
6349 return function_name (cfun
);
6354 rest_of_handle_check_leaf_regs (void)
6356 #ifdef LEAF_REGISTERS
6357 crtl
->uses_only_leaf_regs
6358 = optimize
> 0 && only_leaf_regs_used () && leaf_function_p ();
6363 /* Insert a TYPE into the used types hash table of CFUN. */
6366 used_types_insert_helper (tree type
, struct function
*func
)
6368 if (type
!= NULL
&& func
!= NULL
)
6370 if (func
->used_types_hash
== NULL
)
6371 func
->used_types_hash
= hash_set
<tree
>::create_ggc (37);
6373 func
->used_types_hash
->add (type
);
6377 /* Given a type, insert it into the used hash table in cfun. */
6379 used_types_insert (tree t
)
6381 while (POINTER_TYPE_P (t
) || TREE_CODE (t
) == ARRAY_TYPE
)
6386 if (TREE_CODE (t
) == ERROR_MARK
)
6388 if (TYPE_NAME (t
) == NULL_TREE
6389 || TYPE_NAME (t
) == TYPE_NAME (TYPE_MAIN_VARIANT (t
)))
6390 t
= TYPE_MAIN_VARIANT (t
);
6391 if (debug_info_level
> DINFO_LEVEL_NONE
)
6394 used_types_insert_helper (t
, cfun
);
6397 /* So this might be a type referenced by a global variable.
6398 Record that type so that we can later decide to emit its
6399 debug information. */
6400 vec_safe_push (types_used_by_cur_var_decl
, t
);
6405 /* Helper to Hash a struct types_used_by_vars_entry. */
6408 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry
*entry
)
6410 gcc_assert (entry
&& entry
->var_decl
&& entry
->type
);
6412 return iterative_hash_object (entry
->type
,
6413 iterative_hash_object (entry
->var_decl
, 0));
6416 /* Hash function of the types_used_by_vars_entry hash table. */
6419 used_type_hasher::hash (types_used_by_vars_entry
*entry
)
6421 return hash_types_used_by_vars_entry (entry
);
6424 /*Equality function of the types_used_by_vars_entry hash table. */
6427 used_type_hasher::equal (types_used_by_vars_entry
*e1
,
6428 types_used_by_vars_entry
*e2
)
6430 return (e1
->var_decl
== e2
->var_decl
&& e1
->type
== e2
->type
);
6433 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6436 types_used_by_var_decl_insert (tree type
, tree var_decl
)
6438 if (type
!= NULL
&& var_decl
!= NULL
)
6440 types_used_by_vars_entry
**slot
;
6441 struct types_used_by_vars_entry e
;
6442 e
.var_decl
= var_decl
;
6444 if (types_used_by_vars_hash
== NULL
)
6445 types_used_by_vars_hash
6446 = hash_table
<used_type_hasher
>::create_ggc (37);
6448 slot
= types_used_by_vars_hash
->find_slot (&e
, INSERT
);
6451 struct types_used_by_vars_entry
*entry
;
6452 entry
= ggc_alloc
<types_used_by_vars_entry
> ();
6454 entry
->var_decl
= var_decl
;
6462 const pass_data pass_data_leaf_regs
=
6464 RTL_PASS
, /* type */
6465 "*leaf_regs", /* name */
6466 OPTGROUP_NONE
, /* optinfo_flags */
6467 TV_NONE
, /* tv_id */
6468 0, /* properties_required */
6469 0, /* properties_provided */
6470 0, /* properties_destroyed */
6471 0, /* todo_flags_start */
6472 0, /* todo_flags_finish */
6475 class pass_leaf_regs
: public rtl_opt_pass
6478 pass_leaf_regs (gcc::context
*ctxt
)
6479 : rtl_opt_pass (pass_data_leaf_regs
, ctxt
)
6482 /* opt_pass methods: */
6483 virtual unsigned int execute (function
*)
6485 return rest_of_handle_check_leaf_regs ();
6488 }; // class pass_leaf_regs
6493 make_pass_leaf_regs (gcc::context
*ctxt
)
6495 return new pass_leaf_regs (ctxt
);
6499 rest_of_handle_thread_prologue_and_epilogue (void)
6501 /* prepare_shrink_wrap is sensitive to the block structure of the control
6502 flow graph, so clean it up first. */
6506 /* On some machines, the prologue and epilogue code, or parts thereof,
6507 can be represented as RTL. Doing so lets us schedule insns between
6508 it and the rest of the code and also allows delayed branch
6509 scheduling to operate in the epilogue. */
6510 thread_prologue_and_epilogue_insns ();
6512 /* Some non-cold blocks may now be only reachable from cold blocks.
6514 fixup_partitions ();
6516 /* Shrink-wrapping can result in unreachable edges in the epilogue,
6518 cleanup_cfg (optimize
? CLEANUP_EXPENSIVE
: 0);
6520 /* The stack usage info is finalized during prologue expansion. */
6521 if (flag_stack_usage_info
|| flag_callgraph_info
)
6522 output_stack_usage ();
6527 /* Record a final call to CALLEE at LOCATION. */
6530 record_final_call (tree callee
, location_t location
)
6532 struct callinfo_callee datum
= { location
, callee
};
6533 vec_safe_push (cfun
->su
->callees
, datum
);
6536 /* Record a dynamic allocation made for DECL_OR_EXP. */
6539 record_dynamic_alloc (tree decl_or_exp
)
6541 struct callinfo_dalloc datum
;
6543 if (DECL_P (decl_or_exp
))
6545 datum
.location
= DECL_SOURCE_LOCATION (decl_or_exp
);
6546 const char *name
= lang_hooks
.decl_printable_name (decl_or_exp
, 2);
6547 const char *dot
= strrchr (name
, '.');
6550 datum
.name
= ggc_strdup (name
);
6554 datum
.location
= EXPR_LOCATION (decl_or_exp
);
6558 vec_safe_push (cfun
->su
->dallocs
, datum
);
6563 const pass_data pass_data_thread_prologue_and_epilogue
=
6565 RTL_PASS
, /* type */
6566 "pro_and_epilogue", /* name */
6567 OPTGROUP_NONE
, /* optinfo_flags */
6568 TV_THREAD_PROLOGUE_AND_EPILOGUE
, /* tv_id */
6569 0, /* properties_required */
6570 0, /* properties_provided */
6571 0, /* properties_destroyed */
6572 0, /* todo_flags_start */
6573 ( TODO_df_verify
| TODO_df_finish
), /* todo_flags_finish */
6576 class pass_thread_prologue_and_epilogue
: public rtl_opt_pass
6579 pass_thread_prologue_and_epilogue (gcc::context
*ctxt
)
6580 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue
, ctxt
)
6583 /* opt_pass methods: */
6584 virtual unsigned int execute (function
*)
6586 return rest_of_handle_thread_prologue_and_epilogue ();
6589 }; // class pass_thread_prologue_and_epilogue
6594 make_pass_thread_prologue_and_epilogue (gcc::context
*ctxt
)
6596 return new pass_thread_prologue_and_epilogue (ctxt
);
6601 const pass_data pass_data_zero_call_used_regs
=
6603 RTL_PASS
, /* type */
6604 "zero_call_used_regs", /* name */
6605 OPTGROUP_NONE
, /* optinfo_flags */
6606 TV_NONE
, /* tv_id */
6607 0, /* properties_required */
6608 0, /* properties_provided */
6609 0, /* properties_destroyed */
6610 0, /* todo_flags_start */
6611 0, /* todo_flags_finish */
6614 class pass_zero_call_used_regs
: public rtl_opt_pass
6617 pass_zero_call_used_regs (gcc::context
*ctxt
)
6618 : rtl_opt_pass (pass_data_zero_call_used_regs
, ctxt
)
6621 /* opt_pass methods: */
6622 virtual unsigned int execute (function
*);
6624 }; // class pass_zero_call_used_regs
6627 pass_zero_call_used_regs::execute (function
*fun
)
6629 using namespace zero_regs_flags
;
6630 unsigned int zero_regs_type
= UNSET
;
6632 tree attr_zero_regs
= lookup_attribute ("zero_call_used_regs",
6633 DECL_ATTRIBUTES (fun
->decl
));
6635 /* Get the type of zero_call_used_regs from function attribute.
6636 We have filtered out invalid attribute values already at this point. */
6639 /* The TREE_VALUE of an attribute is a TREE_LIST whose TREE_VALUE
6640 is the attribute argument's value. */
6641 attr_zero_regs
= TREE_VALUE (attr_zero_regs
);
6642 gcc_assert (TREE_CODE (attr_zero_regs
) == TREE_LIST
);
6643 attr_zero_regs
= TREE_VALUE (attr_zero_regs
);
6644 gcc_assert (TREE_CODE (attr_zero_regs
) == STRING_CST
);
6646 for (unsigned int i
= 0; zero_call_used_regs_opts
[i
].name
!= NULL
; ++i
)
6647 if (strcmp (TREE_STRING_POINTER (attr_zero_regs
),
6648 zero_call_used_regs_opts
[i
].name
) == 0)
6650 zero_regs_type
= zero_call_used_regs_opts
[i
].flag
;
6655 if (!zero_regs_type
)
6656 zero_regs_type
= flag_zero_call_used_regs
;
6658 /* No need to zero call-used-regs when no user request is present. */
6659 if (!(zero_regs_type
& ENABLED
))
6665 /* This pass needs data flow information. */
6668 /* Iterate over the function's return instructions and insert any
6669 register zeroing required by the -fzero-call-used-regs command-line
6670 option or the "zero_call_used_regs" function attribute. */
6671 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
6673 rtx_insn
*insn
= BB_END (e
->src
);
6674 if (JUMP_P (insn
) && ANY_RETURN_P (JUMP_LABEL (insn
)))
6675 gen_call_used_regs_seq (insn
, zero_regs_type
);
6684 make_pass_zero_call_used_regs (gcc::context
*ctxt
)
6686 return new pass_zero_call_used_regs (ctxt
);
6689 /* If CONSTRAINT is a matching constraint, then return its number.
6690 Otherwise, return -1. */
6693 matching_constraint_num (const char *constraint
)
6695 if (*constraint
== '%')
6698 if (IN_RANGE (*constraint
, '0', '9'))
6699 return strtoul (constraint
, NULL
, 10);
6704 /* This mini-pass fixes fall-out from SSA in asm statements that have
6705 in-out constraints. Say you start with
6708 asm ("": "+mr" (inout));
6711 which is transformed very early to use explicit output and match operands:
6714 asm ("": "=mr" (inout) : "0" (inout));
6717 Or, after SSA and copyprop,
6719 asm ("": "=mr" (inout_2) : "0" (inout_1));
6722 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6723 they represent two separate values, so they will get different pseudo
6724 registers during expansion. Then, since the two operands need to match
6725 per the constraints, but use different pseudo registers, reload can
6726 only register a reload for these operands. But reloads can only be
6727 satisfied by hardregs, not by memory, so we need a register for this
6728 reload, just because we are presented with non-matching operands.
6729 So, even though we allow memory for this operand, no memory can be
6730 used for it, just because the two operands don't match. This can
6731 cause reload failures on register-starved targets.
6733 So it's a symptom of reload not being able to use memory for reloads
6734 or, alternatively it's also a symptom of both operands not coming into
6735 reload as matching (in which case the pseudo could go to memory just
6736 fine, as the alternative allows it, and no reload would be necessary).
6737 We fix the latter problem here, by transforming
6739 asm ("": "=mr" (inout_2) : "0" (inout_1));
6744 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6747 match_asm_constraints_1 (rtx_insn
*insn
, rtx
*p_sets
, int noutputs
)
6750 bool changed
= false;
6751 rtx op
= SET_SRC (p_sets
[0]);
6752 int ninputs
= ASM_OPERANDS_INPUT_LENGTH (op
);
6753 rtvec inputs
= ASM_OPERANDS_INPUT_VEC (op
);
6754 bool *output_matched
= XALLOCAVEC (bool, noutputs
);
6756 memset (output_matched
, 0, noutputs
* sizeof (bool));
6757 for (i
= 0; i
< ninputs
; i
++)
6761 const char *constraint
= ASM_OPERANDS_INPUT_CONSTRAINT (op
, i
);
6764 match
= matching_constraint_num (constraint
);
6768 gcc_assert (match
< noutputs
);
6769 output
= SET_DEST (p_sets
[match
]);
6770 input
= RTVEC_ELT (inputs
, i
);
6771 /* Only do the transformation for pseudos. */
6772 if (! REG_P (output
)
6773 || rtx_equal_p (output
, input
)
6774 || !(REG_P (input
) || SUBREG_P (input
)
6775 || MEM_P (input
) || CONSTANT_P (input
))
6776 || !general_operand (input
, GET_MODE (output
)))
6779 /* We can't do anything if the output is also used as input,
6780 as we're going to overwrite it. */
6781 for (j
= 0; j
< ninputs
; j
++)
6782 if (reg_overlap_mentioned_p (output
, RTVEC_ELT (inputs
, j
)))
6787 /* Avoid changing the same input several times. For
6788 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6789 only change it once (to out1), rather than changing it
6790 first to out1 and afterwards to out2. */
6793 for (j
= 0; j
< noutputs
; j
++)
6794 if (output_matched
[j
] && input
== SET_DEST (p_sets
[j
]))
6799 output_matched
[match
] = true;
6802 emit_move_insn (output
, copy_rtx (input
));
6803 insns
= get_insns ();
6805 emit_insn_before (insns
, insn
);
6807 constraint
= ASM_OPERANDS_OUTPUT_CONSTRAINT(SET_SRC(p_sets
[match
]));
6808 bool early_clobber_p
= strchr (constraint
, '&') != NULL
;
6810 /* Now replace all mentions of the input with output. We can't
6811 just replace the occurrence in inputs[i], as the register might
6812 also be used in some other input (or even in an address of an
6813 output), which would mean possibly increasing the number of
6814 inputs by one (namely 'output' in addition), which might pose
6815 a too complicated problem for reload to solve. E.g. this situation:
6817 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6819 Here 'input' is used in two occurrences as input (once for the
6820 input operand, once for the address in the second output operand).
6821 If we would replace only the occurrence of the input operand (to
6822 make the matching) we would be left with this:
6825 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6827 Now we suddenly have two different input values (containing the same
6828 value, but different pseudos) where we formerly had only one.
6829 With more complicated asms this might lead to reload failures
6830 which wouldn't have happen without this pass. So, iterate over
6831 all operands and replace all occurrences of the register used.
6833 However, if one or more of the 'input' uses have a non-matching
6834 constraint and the matched output operand is an early clobber
6835 operand, then do not replace the input operand, since by definition
6836 it conflicts with the output operand and cannot share the same
6837 register. See PR89313 for details. */
6839 for (j
= 0; j
< noutputs
; j
++)
6840 if (!rtx_equal_p (SET_DEST (p_sets
[j
]), input
)
6841 && reg_overlap_mentioned_p (input
, SET_DEST (p_sets
[j
])))
6842 SET_DEST (p_sets
[j
]) = replace_rtx (SET_DEST (p_sets
[j
]),
6844 for (j
= 0; j
< ninputs
; j
++)
6845 if (reg_overlap_mentioned_p (input
, RTVEC_ELT (inputs
, j
)))
6847 if (!early_clobber_p
6848 || match
== matching_constraint_num
6849 (ASM_OPERANDS_INPUT_CONSTRAINT (op
, j
)))
6850 RTVEC_ELT (inputs
, j
) = replace_rtx (RTVEC_ELT (inputs
, j
),
6858 df_insn_rescan (insn
);
6861 /* Add the decl D to the local_decls list of FUN. */
6864 add_local_decl (struct function
*fun
, tree d
)
6866 gcc_assert (VAR_P (d
));
6867 vec_safe_push (fun
->local_decls
, d
);
6872 const pass_data pass_data_match_asm_constraints
=
6874 RTL_PASS
, /* type */
6875 "asmcons", /* name */
6876 OPTGROUP_NONE
, /* optinfo_flags */
6877 TV_NONE
, /* tv_id */
6878 0, /* properties_required */
6879 0, /* properties_provided */
6880 0, /* properties_destroyed */
6881 0, /* todo_flags_start */
6882 0, /* todo_flags_finish */
6885 class pass_match_asm_constraints
: public rtl_opt_pass
6888 pass_match_asm_constraints (gcc::context
*ctxt
)
6889 : rtl_opt_pass (pass_data_match_asm_constraints
, ctxt
)
6892 /* opt_pass methods: */
6893 virtual unsigned int execute (function
*);
6895 }; // class pass_match_asm_constraints
6898 pass_match_asm_constraints::execute (function
*fun
)
6905 if (!crtl
->has_asm_statement
)
6908 df_set_flags (DF_DEFER_INSN_RESCAN
);
6909 FOR_EACH_BB_FN (bb
, fun
)
6911 FOR_BB_INSNS (bb
, insn
)
6916 pat
= PATTERN (insn
);
6917 if (GET_CODE (pat
) == PARALLEL
)
6918 p_sets
= &XVECEXP (pat
, 0, 0), noutputs
= XVECLEN (pat
, 0);
6919 else if (GET_CODE (pat
) == SET
)
6920 p_sets
= &PATTERN (insn
), noutputs
= 1;
6924 if (GET_CODE (*p_sets
) == SET
6925 && GET_CODE (SET_SRC (*p_sets
)) == ASM_OPERANDS
)
6926 match_asm_constraints_1 (insn
, p_sets
, noutputs
);
6930 return TODO_df_finish
;
6936 make_pass_match_asm_constraints (gcc::context
*ctxt
)
6938 return new pass_match_asm_constraints (ctxt
);
6942 #include "gt-function.h"