1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file handles the generation of rtl code from tree structure
21 at the level of the function as a whole.
22 It creates the rtl expressions for parameters and auto variables
23 and has full responsibility for allocating stack slots.
25 `expand_function_start' is called at the beginning of a function,
26 before the function body is parsed, and `expand_function_end' is
27 called after parsing the body.
29 Call `assign_stack_local' to allocate a stack slot for a local variable.
30 This is usually done during the RTL generation for the function body,
31 but it can also be done in the reload pass when a pseudo-register does
32 not get a hard register. */
36 #include "coretypes.h"
41 #include "gimple-expr.h"
46 #include "stringpool.h"
52 #include "rtl-error.h"
54 #include "fold-const.h"
55 #include "stor-layout.h"
62 #include "optabs-tree.h"
64 #include "langhooks.h"
65 #include "common/common-target.h"
67 #include "tree-pass.h"
71 #include "cfgcleanup.h"
72 #include "cfgexpand.h"
73 #include "shrink-wrap.h"
78 #include "stringpool.h"
83 /* So we can assign to cfun in this file. */
86 #ifndef STACK_ALIGNMENT_NEEDED
87 #define STACK_ALIGNMENT_NEEDED 1
90 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
92 /* Round a value to the lowest integer less than it that is a multiple of
93 the required alignment. Avoid using division in case the value is
94 negative. Assume the alignment is a power of two. */
95 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
97 /* Similar, but round to the next highest integer that meets the
99 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
101 /* Nonzero once virtual register instantiation has been done.
102 assign_stack_local uses frame_pointer_rtx when this is nonzero.
103 calls.c:emit_library_call_value_1 uses it to set up
104 post-instantiation libcalls. */
105 int virtuals_instantiated
;
107 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
108 static GTY(()) int funcdef_no
;
110 /* These variables hold pointers to functions to create and destroy
111 target specific, per-function data structures. */
112 struct machine_function
* (*init_machine_status
) (void);
114 /* The currently compiled function. */
115 struct function
*cfun
= 0;
117 /* These hashes record the prologue and epilogue insns. */
119 struct insn_cache_hasher
: ggc_cache_ptr_hash
<rtx_def
>
121 static hashval_t
hash (rtx x
) { return htab_hash_pointer (x
); }
122 static bool equal (rtx a
, rtx b
) { return a
== b
; }
126 hash_table
<insn_cache_hasher
> *prologue_insn_hash
;
128 hash_table
<insn_cache_hasher
> *epilogue_insn_hash
;
131 hash_table
<used_type_hasher
> *types_used_by_vars_hash
= NULL
;
132 vec
<tree
, va_gc
> *types_used_by_cur_var_decl
;
134 /* Forward declarations. */
136 static struct temp_slot
*find_temp_slot_from_address (rtx
);
137 static void pad_to_arg_alignment (struct args_size
*, int, struct args_size
*);
138 static void pad_below (struct args_size
*, machine_mode
, tree
);
139 static void reorder_blocks_1 (rtx_insn
*, tree
, vec
<tree
> *);
140 static int all_blocks (tree
, tree
*);
141 static tree
*get_block_vector (tree
, int *);
142 extern tree
debug_find_var_in_block_tree (tree
, tree
);
143 /* We always define `record_insns' even if it's not used so that we
144 can always export `prologue_epilogue_contains'. */
145 static void record_insns (rtx_insn
*, rtx
, hash_table
<insn_cache_hasher
> **)
147 static bool contains (const rtx_insn
*, hash_table
<insn_cache_hasher
> *);
148 static void prepare_function_start (void);
149 static void do_clobber_return_reg (rtx
, void *);
150 static void do_use_return_reg (rtx
, void *);
153 /* Stack of nested functions. */
154 /* Keep track of the cfun stack. */
156 static vec
<function
*> function_context_stack
;
158 /* Save the current context for compilation of a nested function.
159 This is called from language-specific code. */
162 push_function_context (void)
165 allocate_struct_function (NULL
, false);
167 function_context_stack
.safe_push (cfun
);
171 /* Restore the last saved context, at the end of a nested function.
172 This function is called from language-specific code. */
175 pop_function_context (void)
177 struct function
*p
= function_context_stack
.pop ();
179 current_function_decl
= p
->decl
;
181 /* Reset variables that have known state during rtx generation. */
182 virtuals_instantiated
= 0;
183 generating_concat_p
= 1;
186 /* Clear out all parts of the state in F that can safely be discarded
187 after the function has been parsed, but not compiled, to let
188 garbage collection reclaim the memory. */
191 free_after_parsing (struct function
*f
)
196 /* Clear out all parts of the state in F that can safely be discarded
197 after the function has been compiled, to let garbage collection
198 reclaim the memory. */
201 free_after_compilation (struct function
*f
)
203 prologue_insn_hash
= NULL
;
204 epilogue_insn_hash
= NULL
;
206 free (crtl
->emit
.regno_pointer_align
);
208 memset (crtl
, 0, sizeof (struct rtl_data
));
212 f
->curr_properties
&= ~PROP_cfg
;
214 regno_reg_rtx
= NULL
;
217 /* Return size needed for stack frame based on slots so far allocated.
218 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
219 the caller may have to do that. */
222 get_frame_size (void)
224 if (FRAME_GROWS_DOWNWARD
)
225 return -frame_offset
;
230 /* Issue an error message and return TRUE if frame OFFSET overflows in
231 the signed target pointer arithmetics for function FUNC. Otherwise
235 frame_offset_overflow (poly_int64 offset
, tree func
)
237 poly_uint64 size
= FRAME_GROWS_DOWNWARD
? -offset
: offset
;
238 unsigned HOST_WIDE_INT limit
239 = ((HOST_WIDE_INT_1U
<< (GET_MODE_BITSIZE (Pmode
) - 1))
240 /* Leave room for the fixed part of the frame. */
241 - 64 * UNITS_PER_WORD
);
243 if (!coeffs_in_range_p (size
, 0U, limit
))
245 unsigned HOST_WIDE_INT hwisize
;
246 if (size
.is_constant (&hwisize
))
247 error_at (DECL_SOURCE_LOCATION (func
),
248 "total size of local objects %wu exceeds maximum %wu",
251 error_at (DECL_SOURCE_LOCATION (func
),
252 "total size of local objects exceeds maximum %wu",
260 /* Return the minimum spill slot alignment for a register of mode MODE. */
263 spill_slot_alignment (machine_mode mode ATTRIBUTE_UNUSED
)
265 return STACK_SLOT_ALIGNMENT (NULL_TREE
, mode
, GET_MODE_ALIGNMENT (mode
));
268 /* Return stack slot alignment in bits for TYPE and MODE. */
271 get_stack_local_alignment (tree type
, machine_mode mode
)
273 unsigned int alignment
;
276 alignment
= BIGGEST_ALIGNMENT
;
278 alignment
= GET_MODE_ALIGNMENT (mode
);
280 /* Allow the frond-end to (possibly) increase the alignment of this
283 type
= lang_hooks
.types
.type_for_mode (mode
, 0);
285 return STACK_SLOT_ALIGNMENT (type
, mode
, alignment
);
288 /* Determine whether it is possible to fit a stack slot of size SIZE and
289 alignment ALIGNMENT into an area in the stack frame that starts at
290 frame offset START and has a length of LENGTH. If so, store the frame
291 offset to be used for the stack slot in *POFFSET and return true;
292 return false otherwise. This function will extend the frame size when
293 given a start/length pair that lies at the end of the frame. */
296 try_fit_stack_local (poly_int64 start
, poly_int64 length
,
297 poly_int64 size
, unsigned int alignment
,
298 poly_int64_pod
*poffset
)
300 poly_int64 this_frame_offset
;
301 int frame_off
, frame_alignment
, frame_phase
;
303 /* Calculate how many bytes the start of local variables is off from
305 frame_alignment
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
306 frame_off
= targetm
.starting_frame_offset () % frame_alignment
;
307 frame_phase
= frame_off
? frame_alignment
- frame_off
: 0;
309 /* Round the frame offset to the specified alignment. */
311 if (FRAME_GROWS_DOWNWARD
)
313 = (aligned_lower_bound (start
+ length
- size
- frame_phase
, alignment
)
317 = aligned_upper_bound (start
- frame_phase
, alignment
) + frame_phase
;
319 /* See if it fits. If this space is at the edge of the frame,
320 consider extending the frame to make it fit. Our caller relies on
321 this when allocating a new slot. */
322 if (maybe_lt (this_frame_offset
, start
))
324 if (known_eq (frame_offset
, start
))
325 frame_offset
= this_frame_offset
;
329 else if (maybe_gt (this_frame_offset
+ size
, start
+ length
))
331 if (known_eq (frame_offset
, start
+ length
))
332 frame_offset
= this_frame_offset
+ size
;
337 *poffset
= this_frame_offset
;
341 /* Create a new frame_space structure describing free space in the stack
342 frame beginning at START and ending at END, and chain it into the
343 function's frame_space_list. */
346 add_frame_space (poly_int64 start
, poly_int64 end
)
348 struct frame_space
*space
= ggc_alloc
<frame_space
> ();
349 space
->next
= crtl
->frame_space_list
;
350 crtl
->frame_space_list
= space
;
351 space
->start
= start
;
352 space
->length
= end
- start
;
355 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
356 with machine mode MODE.
358 ALIGN controls the amount of alignment for the address of the slot:
359 0 means according to MODE,
360 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
361 -2 means use BITS_PER_UNIT,
362 positive specifies alignment boundary in bits.
364 KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
365 alignment and ASLK_RECORD_PAD bit set if we should remember
366 extra space we allocated for alignment purposes. When we are
367 called from assign_stack_temp_for_type, it is not set so we don't
368 track the same stack slot in two independent lists.
370 We do not round to stack_boundary here. */
373 assign_stack_local_1 (machine_mode mode
, poly_int64 size
,
377 poly_int64 bigend_correction
= 0;
378 poly_int64 slot_offset
= 0, old_frame_offset
;
379 unsigned int alignment
, alignment_in_bits
;
383 alignment
= get_stack_local_alignment (NULL
, mode
);
384 alignment
/= BITS_PER_UNIT
;
386 else if (align
== -1)
388 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
389 size
= aligned_upper_bound (size
, alignment
);
391 else if (align
== -2)
392 alignment
= 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
394 alignment
= align
/ BITS_PER_UNIT
;
396 alignment_in_bits
= alignment
* BITS_PER_UNIT
;
398 /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */
399 if (alignment_in_bits
> MAX_SUPPORTED_STACK_ALIGNMENT
)
401 alignment_in_bits
= MAX_SUPPORTED_STACK_ALIGNMENT
;
402 alignment
= MAX_SUPPORTED_STACK_ALIGNMENT
/ BITS_PER_UNIT
;
405 if (SUPPORTS_STACK_ALIGNMENT
)
407 if (crtl
->stack_alignment_estimated
< alignment_in_bits
)
409 if (!crtl
->stack_realign_processed
)
410 crtl
->stack_alignment_estimated
= alignment_in_bits
;
413 /* If stack is realigned and stack alignment value
414 hasn't been finalized, it is OK not to increase
415 stack_alignment_estimated. The bigger alignment
416 requirement is recorded in stack_alignment_needed
418 gcc_assert (!crtl
->stack_realign_finalized
);
419 if (!crtl
->stack_realign_needed
)
421 /* It is OK to reduce the alignment as long as the
422 requested size is 0 or the estimated stack
423 alignment >= mode alignment. */
424 gcc_assert ((kind
& ASLK_REDUCE_ALIGN
)
425 || known_eq (size
, 0)
426 || (crtl
->stack_alignment_estimated
427 >= GET_MODE_ALIGNMENT (mode
)));
428 alignment_in_bits
= crtl
->stack_alignment_estimated
;
429 alignment
= alignment_in_bits
/ BITS_PER_UNIT
;
435 if (crtl
->stack_alignment_needed
< alignment_in_bits
)
436 crtl
->stack_alignment_needed
= alignment_in_bits
;
437 if (crtl
->max_used_stack_slot_alignment
< alignment_in_bits
)
438 crtl
->max_used_stack_slot_alignment
= alignment_in_bits
;
440 if (mode
!= BLKmode
|| maybe_ne (size
, 0))
442 if (kind
& ASLK_RECORD_PAD
)
444 struct frame_space
**psp
;
446 for (psp
= &crtl
->frame_space_list
; *psp
; psp
= &(*psp
)->next
)
448 struct frame_space
*space
= *psp
;
449 if (!try_fit_stack_local (space
->start
, space
->length
, size
,
450 alignment
, &slot_offset
))
453 if (known_gt (slot_offset
, space
->start
))
454 add_frame_space (space
->start
, slot_offset
);
455 if (known_lt (slot_offset
+ size
, space
->start
+ space
->length
))
456 add_frame_space (slot_offset
+ size
,
457 space
->start
+ space
->length
);
462 else if (!STACK_ALIGNMENT_NEEDED
)
464 slot_offset
= frame_offset
;
468 old_frame_offset
= frame_offset
;
470 if (FRAME_GROWS_DOWNWARD
)
472 frame_offset
-= size
;
473 try_fit_stack_local (frame_offset
, size
, size
, alignment
, &slot_offset
);
475 if (kind
& ASLK_RECORD_PAD
)
477 if (known_gt (slot_offset
, frame_offset
))
478 add_frame_space (frame_offset
, slot_offset
);
479 if (known_lt (slot_offset
+ size
, old_frame_offset
))
480 add_frame_space (slot_offset
+ size
, old_frame_offset
);
485 frame_offset
+= size
;
486 try_fit_stack_local (old_frame_offset
, size
, size
, alignment
, &slot_offset
);
488 if (kind
& ASLK_RECORD_PAD
)
490 if (known_gt (slot_offset
, old_frame_offset
))
491 add_frame_space (old_frame_offset
, slot_offset
);
492 if (known_lt (slot_offset
+ size
, frame_offset
))
493 add_frame_space (slot_offset
+ size
, frame_offset
);
498 /* On a big-endian machine, if we are allocating more space than we will use,
499 use the least significant bytes of those that are allocated. */
502 /* The slot size can sometimes be smaller than the mode size;
503 e.g. the rs6000 port allocates slots with a vector mode
504 that have the size of only one element. However, the slot
505 size must always be ordered wrt to the mode size, in the
506 same way as for a subreg. */
507 gcc_checking_assert (ordered_p (GET_MODE_SIZE (mode
), size
));
508 if (BYTES_BIG_ENDIAN
&& maybe_lt (GET_MODE_SIZE (mode
), size
))
509 bigend_correction
= size
- GET_MODE_SIZE (mode
);
512 /* If we have already instantiated virtual registers, return the actual
513 address relative to the frame pointer. */
514 if (virtuals_instantiated
)
515 addr
= plus_constant (Pmode
, frame_pointer_rtx
,
517 (slot_offset
+ bigend_correction
518 + targetm
.starting_frame_offset (), Pmode
));
520 addr
= plus_constant (Pmode
, virtual_stack_vars_rtx
,
522 (slot_offset
+ bigend_correction
,
525 x
= gen_rtx_MEM (mode
, addr
);
526 set_mem_align (x
, alignment_in_bits
);
527 MEM_NOTRAP_P (x
) = 1;
529 vec_safe_push (stack_slot_list
, x
);
531 if (frame_offset_overflow (frame_offset
, current_function_decl
))
537 /* Wrap up assign_stack_local_1 with last parameter as false. */
540 assign_stack_local (machine_mode mode
, poly_int64 size
, int align
)
542 return assign_stack_local_1 (mode
, size
, align
, ASLK_RECORD_PAD
);
545 /* In order to evaluate some expressions, such as function calls returning
546 structures in memory, we need to temporarily allocate stack locations.
547 We record each allocated temporary in the following structure.
549 Associated with each temporary slot is a nesting level. When we pop up
550 one level, all temporaries associated with the previous level are freed.
551 Normally, all temporaries are freed after the execution of the statement
552 in which they were created. However, if we are inside a ({...}) grouping,
553 the result may be in a temporary and hence must be preserved. If the
554 result could be in a temporary, we preserve it if we can determine which
555 one it is in. If we cannot determine which temporary may contain the
556 result, all temporaries are preserved. A temporary is preserved by
557 pretending it was allocated at the previous nesting level. */
559 struct GTY(()) temp_slot
{
560 /* Points to next temporary slot. */
561 struct temp_slot
*next
;
562 /* Points to previous temporary slot. */
563 struct temp_slot
*prev
;
564 /* The rtx to used to reference the slot. */
566 /* The size, in units, of the slot. */
568 /* The type of the object in the slot, or zero if it doesn't correspond
569 to a type. We use this to determine whether a slot can be reused.
570 It can be reused if objects of the type of the new slot will always
571 conflict with objects of the type of the old slot. */
573 /* The alignment (in bits) of the slot. */
575 /* Nonzero if this temporary is currently in use. */
577 /* Nesting level at which this slot is being used. */
579 /* The offset of the slot from the frame_pointer, including extra space
580 for alignment. This info is for combine_temp_slots. */
581 poly_int64 base_offset
;
582 /* The size of the slot, including extra space for alignment. This
583 info is for combine_temp_slots. */
584 poly_int64 full_size
;
587 /* Entry for the below hash table. */
588 struct GTY((for_user
)) temp_slot_address_entry
{
591 struct temp_slot
*temp_slot
;
594 struct temp_address_hasher
: ggc_ptr_hash
<temp_slot_address_entry
>
596 static hashval_t
hash (temp_slot_address_entry
*);
597 static bool equal (temp_slot_address_entry
*, temp_slot_address_entry
*);
600 /* A table of addresses that represent a stack slot. The table is a mapping
601 from address RTXen to a temp slot. */
602 static GTY(()) hash_table
<temp_address_hasher
> *temp_slot_address_table
;
603 static size_t n_temp_slots_in_use
;
605 /* Removes temporary slot TEMP from LIST. */
608 cut_slot_from_list (struct temp_slot
*temp
, struct temp_slot
**list
)
611 temp
->next
->prev
= temp
->prev
;
613 temp
->prev
->next
= temp
->next
;
617 temp
->prev
= temp
->next
= NULL
;
620 /* Inserts temporary slot TEMP to LIST. */
623 insert_slot_to_list (struct temp_slot
*temp
, struct temp_slot
**list
)
627 (*list
)->prev
= temp
;
632 /* Returns the list of used temp slots at LEVEL. */
634 static struct temp_slot
**
635 temp_slots_at_level (int level
)
637 if (level
>= (int) vec_safe_length (used_temp_slots
))
638 vec_safe_grow_cleared (used_temp_slots
, level
+ 1);
640 return &(*used_temp_slots
)[level
];
643 /* Returns the maximal temporary slot level. */
646 max_slot_level (void)
648 if (!used_temp_slots
)
651 return used_temp_slots
->length () - 1;
654 /* Moves temporary slot TEMP to LEVEL. */
657 move_slot_to_level (struct temp_slot
*temp
, int level
)
659 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
660 insert_slot_to_list (temp
, temp_slots_at_level (level
));
664 /* Make temporary slot TEMP available. */
667 make_slot_available (struct temp_slot
*temp
)
669 cut_slot_from_list (temp
, temp_slots_at_level (temp
->level
));
670 insert_slot_to_list (temp
, &avail_temp_slots
);
673 n_temp_slots_in_use
--;
676 /* Compute the hash value for an address -> temp slot mapping.
677 The value is cached on the mapping entry. */
679 temp_slot_address_compute_hash (struct temp_slot_address_entry
*t
)
681 int do_not_record
= 0;
682 return hash_rtx (t
->address
, GET_MODE (t
->address
),
683 &do_not_record
, NULL
, false);
686 /* Return the hash value for an address -> temp slot mapping. */
688 temp_address_hasher::hash (temp_slot_address_entry
*t
)
693 /* Compare two address -> temp slot mapping entries. */
695 temp_address_hasher::equal (temp_slot_address_entry
*t1
,
696 temp_slot_address_entry
*t2
)
698 return exp_equiv_p (t1
->address
, t2
->address
, 0, true);
701 /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */
703 insert_temp_slot_address (rtx address
, struct temp_slot
*temp_slot
)
705 struct temp_slot_address_entry
*t
= ggc_alloc
<temp_slot_address_entry
> ();
706 t
->address
= address
;
707 t
->temp_slot
= temp_slot
;
708 t
->hash
= temp_slot_address_compute_hash (t
);
709 *temp_slot_address_table
->find_slot_with_hash (t
, t
->hash
, INSERT
) = t
;
712 /* Remove an address -> temp slot mapping entry if the temp slot is
713 not in use anymore. Callback for remove_unused_temp_slot_addresses. */
715 remove_unused_temp_slot_addresses_1 (temp_slot_address_entry
**slot
, void *)
717 const struct temp_slot_address_entry
*t
= *slot
;
718 if (! t
->temp_slot
->in_use
)
719 temp_slot_address_table
->clear_slot (slot
);
723 /* Remove all mappings of addresses to unused temp slots. */
725 remove_unused_temp_slot_addresses (void)
727 /* Use quicker clearing if there aren't any active temp slots. */
728 if (n_temp_slots_in_use
)
729 temp_slot_address_table
->traverse
730 <void *, remove_unused_temp_slot_addresses_1
> (NULL
);
732 temp_slot_address_table
->empty ();
735 /* Find the temp slot corresponding to the object at address X. */
737 static struct temp_slot
*
738 find_temp_slot_from_address (rtx x
)
741 struct temp_slot_address_entry tmp
, *t
;
743 /* First try the easy way:
744 See if X exists in the address -> temp slot mapping. */
746 tmp
.temp_slot
= NULL
;
747 tmp
.hash
= temp_slot_address_compute_hash (&tmp
);
748 t
= temp_slot_address_table
->find_with_hash (&tmp
, tmp
.hash
);
752 /* If we have a sum involving a register, see if it points to a temp
754 if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 0))
755 && (p
= find_temp_slot_from_address (XEXP (x
, 0))) != 0)
757 else if (GET_CODE (x
) == PLUS
&& REG_P (XEXP (x
, 1))
758 && (p
= find_temp_slot_from_address (XEXP (x
, 1))) != 0)
761 /* Last resort: Address is a virtual stack var address. */
763 if (strip_offset (x
, &offset
) == virtual_stack_vars_rtx
)
766 for (i
= max_slot_level (); i
>= 0; i
--)
767 for (p
= *temp_slots_at_level (i
); p
; p
= p
->next
)
768 if (known_in_range_p (offset
, p
->base_offset
, p
->full_size
))
775 /* Allocate a temporary stack slot and record it for possible later
778 MODE is the machine mode to be given to the returned rtx.
780 SIZE is the size in units of the space required. We do no rounding here
781 since assign_stack_local will do any required rounding.
783 TYPE is the type that will be used for the stack slot. */
786 assign_stack_temp_for_type (machine_mode mode
, poly_int64 size
, tree type
)
789 struct temp_slot
*p
, *best_p
= 0, *selected
= NULL
, **pp
;
792 gcc_assert (known_size_p (size
));
794 align
= get_stack_local_alignment (type
, mode
);
796 /* Try to find an available, already-allocated temporary of the proper
797 mode which meets the size and alignment requirements. Choose the
798 smallest one with the closest alignment.
800 If assign_stack_temp is called outside of the tree->rtl expansion,
801 we cannot reuse the stack slots (that may still refer to
802 VIRTUAL_STACK_VARS_REGNUM). */
803 if (!virtuals_instantiated
)
805 for (p
= avail_temp_slots
; p
; p
= p
->next
)
807 if (p
->align
>= align
808 && known_ge (p
->size
, size
)
809 && GET_MODE (p
->slot
) == mode
810 && objects_must_conflict_p (p
->type
, type
)
812 || (known_eq (best_p
->size
, p
->size
)
813 ? best_p
->align
> p
->align
814 : known_ge (best_p
->size
, p
->size
))))
816 if (p
->align
== align
&& known_eq (p
->size
, size
))
819 cut_slot_from_list (selected
, &avail_temp_slots
);
828 /* Make our best, if any, the one to use. */
832 cut_slot_from_list (selected
, &avail_temp_slots
);
834 /* If there are enough aligned bytes left over, make them into a new
835 temp_slot so that the extra bytes don't get wasted. Do this only
836 for BLKmode slots, so that we can be sure of the alignment. */
837 if (GET_MODE (best_p
->slot
) == BLKmode
)
839 int alignment
= best_p
->align
/ BITS_PER_UNIT
;
840 poly_int64 rounded_size
= aligned_upper_bound (size
, alignment
);
842 if (known_ge (best_p
->size
- rounded_size
, alignment
))
844 p
= ggc_alloc
<temp_slot
> ();
846 p
->size
= best_p
->size
- rounded_size
;
847 p
->base_offset
= best_p
->base_offset
+ rounded_size
;
848 p
->full_size
= best_p
->full_size
- rounded_size
;
849 p
->slot
= adjust_address_nv (best_p
->slot
, BLKmode
, rounded_size
);
850 p
->align
= best_p
->align
;
851 p
->type
= best_p
->type
;
852 insert_slot_to_list (p
, &avail_temp_slots
);
854 vec_safe_push (stack_slot_list
, p
->slot
);
856 best_p
->size
= rounded_size
;
857 best_p
->full_size
= rounded_size
;
862 /* If we still didn't find one, make a new temporary. */
865 poly_int64 frame_offset_old
= frame_offset
;
867 p
= ggc_alloc
<temp_slot
> ();
869 /* We are passing an explicit alignment request to assign_stack_local.
870 One side effect of that is assign_stack_local will not round SIZE
871 to ensure the frame offset remains suitably aligned.
873 So for requests which depended on the rounding of SIZE, we go ahead
874 and round it now. We also make sure ALIGNMENT is at least
875 BIGGEST_ALIGNMENT. */
876 gcc_assert (mode
!= BLKmode
|| align
== BIGGEST_ALIGNMENT
);
877 p
->slot
= assign_stack_local_1 (mode
,
879 ? aligned_upper_bound (size
,
887 /* The following slot size computation is necessary because we don't
888 know the actual size of the temporary slot until assign_stack_local
889 has performed all the frame alignment and size rounding for the
890 requested temporary. Note that extra space added for alignment
891 can be either above or below this stack slot depending on which
892 way the frame grows. We include the extra space if and only if it
893 is above this slot. */
894 if (FRAME_GROWS_DOWNWARD
)
895 p
->size
= frame_offset_old
- frame_offset
;
899 /* Now define the fields used by combine_temp_slots. */
900 if (FRAME_GROWS_DOWNWARD
)
902 p
->base_offset
= frame_offset
;
903 p
->full_size
= frame_offset_old
- frame_offset
;
907 p
->base_offset
= frame_offset_old
;
908 p
->full_size
= frame_offset
- frame_offset_old
;
917 p
->level
= temp_slot_level
;
918 n_temp_slots_in_use
++;
920 pp
= temp_slots_at_level (p
->level
);
921 insert_slot_to_list (p
, pp
);
922 insert_temp_slot_address (XEXP (p
->slot
, 0), p
);
924 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
925 slot
= gen_rtx_MEM (mode
, XEXP (p
->slot
, 0));
926 vec_safe_push (stack_slot_list
, slot
);
928 /* If we know the alias set for the memory that will be used, use
929 it. If there's no TYPE, then we don't know anything about the
930 alias set for the memory. */
931 set_mem_alias_set (slot
, type
? get_alias_set (type
) : 0);
932 set_mem_align (slot
, align
);
934 /* If a type is specified, set the relevant flags. */
936 MEM_VOLATILE_P (slot
) = TYPE_VOLATILE (type
);
937 MEM_NOTRAP_P (slot
) = 1;
942 /* Allocate a temporary stack slot and record it for possible later
943 reuse. First two arguments are same as in preceding function. */
946 assign_stack_temp (machine_mode mode
, poly_int64 size
)
948 return assign_stack_temp_for_type (mode
, size
, NULL_TREE
);
951 /* Assign a temporary.
952 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
953 and so that should be used in error messages. In either case, we
954 allocate of the given type.
955 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
956 it is 0 if a register is OK.
957 DONT_PROMOTE is 1 if we should not promote values in register
961 assign_temp (tree type_or_decl
, int memory_required
,
962 int dont_promote ATTRIBUTE_UNUSED
)
970 if (DECL_P (type_or_decl
))
971 decl
= type_or_decl
, type
= TREE_TYPE (decl
);
973 decl
= NULL
, type
= type_or_decl
;
975 mode
= TYPE_MODE (type
);
977 unsignedp
= TYPE_UNSIGNED (type
);
980 /* Allocating temporaries of TREE_ADDRESSABLE type must be done in the front
981 end. See also create_tmp_var for the gimplification-time check. */
982 gcc_assert (!TREE_ADDRESSABLE (type
) && COMPLETE_TYPE_P (type
));
984 if (mode
== BLKmode
|| memory_required
)
989 /* Unfortunately, we don't yet know how to allocate variable-sized
990 temporaries. However, sometimes we can find a fixed upper limit on
991 the size, so try that instead. */
992 if (!poly_int_tree_p (TYPE_SIZE_UNIT (type
), &size
))
993 size
= max_int_size_in_bytes (type
);
995 /* Zero sized arrays are a GNU C extension. Set size to 1 to avoid
996 problems with allocating the stack space. */
997 if (known_eq (size
, 0))
1000 /* The size of the temporary may be too large to fit into an integer. */
1001 /* ??? Not sure this should happen except for user silliness, so limit
1002 this to things that aren't compiler-generated temporaries. The
1003 rest of the time we'll die in assign_stack_temp_for_type. */
1005 && !known_size_p (size
)
1006 && TREE_CODE (TYPE_SIZE_UNIT (type
)) == INTEGER_CST
)
1008 error ("size of variable %q+D is too large", decl
);
1012 tmp
= assign_stack_temp_for_type (mode
, size
, type
);
1018 mode
= promote_mode (type
, mode
, &unsignedp
);
1021 return gen_reg_rtx (mode
);
1024 /* Combine temporary stack slots which are adjacent on the stack.
1026 This allows for better use of already allocated stack space. This is only
1027 done for BLKmode slots because we can be sure that we won't have alignment
1028 problems in this case. */
1031 combine_temp_slots (void)
1033 struct temp_slot
*p
, *q
, *next
, *next_q
;
1036 /* We can't combine slots, because the information about which slot
1037 is in which alias set will be lost. */
1038 if (flag_strict_aliasing
)
1041 /* If there are a lot of temp slots, don't do anything unless
1042 high levels of optimization. */
1043 if (! flag_expensive_optimizations
)
1044 for (p
= avail_temp_slots
, num_slots
= 0; p
; p
= p
->next
, num_slots
++)
1045 if (num_slots
> 100 || (num_slots
> 10 && optimize
== 0))
1048 for (p
= avail_temp_slots
; p
; p
= next
)
1054 if (GET_MODE (p
->slot
) != BLKmode
)
1057 for (q
= p
->next
; q
; q
= next_q
)
1063 if (GET_MODE (q
->slot
) != BLKmode
)
1066 if (known_eq (p
->base_offset
+ p
->full_size
, q
->base_offset
))
1068 /* Q comes after P; combine Q into P. */
1070 p
->full_size
+= q
->full_size
;
1073 else if (known_eq (q
->base_offset
+ q
->full_size
, p
->base_offset
))
1075 /* P comes after Q; combine P into Q. */
1077 q
->full_size
+= p
->full_size
;
1082 cut_slot_from_list (q
, &avail_temp_slots
);
1085 /* Either delete P or advance past it. */
1087 cut_slot_from_list (p
, &avail_temp_slots
);
1091 /* Indicate that NEW_RTX is an alternate way of referring to the temp
1092 slot that previously was known by OLD_RTX. */
1095 update_temp_slot_address (rtx old_rtx
, rtx new_rtx
)
1097 struct temp_slot
*p
;
1099 if (rtx_equal_p (old_rtx
, new_rtx
))
1102 p
= find_temp_slot_from_address (old_rtx
);
1104 /* If we didn't find one, see if both OLD_RTX is a PLUS. If so, and
1105 NEW_RTX is a register, see if one operand of the PLUS is a
1106 temporary location. If so, NEW_RTX points into it. Otherwise,
1107 if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
1108 in common between them. If so, try a recursive call on those
1112 if (GET_CODE (old_rtx
) != PLUS
)
1115 if (REG_P (new_rtx
))
1117 update_temp_slot_address (XEXP (old_rtx
, 0), new_rtx
);
1118 update_temp_slot_address (XEXP (old_rtx
, 1), new_rtx
);
1121 else if (GET_CODE (new_rtx
) != PLUS
)
1124 if (rtx_equal_p (XEXP (old_rtx
, 0), XEXP (new_rtx
, 0)))
1125 update_temp_slot_address (XEXP (old_rtx
, 1), XEXP (new_rtx
, 1));
1126 else if (rtx_equal_p (XEXP (old_rtx
, 1), XEXP (new_rtx
, 0)))
1127 update_temp_slot_address (XEXP (old_rtx
, 0), XEXP (new_rtx
, 1));
1128 else if (rtx_equal_p (XEXP (old_rtx
, 0), XEXP (new_rtx
, 1)))
1129 update_temp_slot_address (XEXP (old_rtx
, 1), XEXP (new_rtx
, 0));
1130 else if (rtx_equal_p (XEXP (old_rtx
, 1), XEXP (new_rtx
, 1)))
1131 update_temp_slot_address (XEXP (old_rtx
, 0), XEXP (new_rtx
, 0));
1136 /* Otherwise add an alias for the temp's address. */
1137 insert_temp_slot_address (new_rtx
, p
);
1140 /* If X could be a reference to a temporary slot, mark that slot as
1141 belonging to the to one level higher than the current level. If X
1142 matched one of our slots, just mark that one. Otherwise, we can't
1143 easily predict which it is, so upgrade all of them.
1145 This is called when an ({...}) construct occurs and a statement
1146 returns a value in memory. */
1149 preserve_temp_slots (rtx x
)
1151 struct temp_slot
*p
= 0, *next
;
1156 /* If X is a register that is being used as a pointer, see if we have
1157 a temporary slot we know it points to. */
1158 if (REG_P (x
) && REG_POINTER (x
))
1159 p
= find_temp_slot_from_address (x
);
1161 /* If X is not in memory or is at a constant address, it cannot be in
1162 a temporary slot. */
1163 if (p
== 0 && (!MEM_P (x
) || CONSTANT_P (XEXP (x
, 0))))
1166 /* First see if we can find a match. */
1168 p
= find_temp_slot_from_address (XEXP (x
, 0));
1172 if (p
->level
== temp_slot_level
)
1173 move_slot_to_level (p
, temp_slot_level
- 1);
1177 /* Otherwise, preserve all non-kept slots at this level. */
1178 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1181 move_slot_to_level (p
, temp_slot_level
- 1);
1185 /* Free all temporaries used so far. This is normally called at the
1186 end of generating code for a statement. */
1189 free_temp_slots (void)
1191 struct temp_slot
*p
, *next
;
1192 bool some_available
= false;
1194 for (p
= *temp_slots_at_level (temp_slot_level
); p
; p
= next
)
1197 make_slot_available (p
);
1198 some_available
= true;
1203 remove_unused_temp_slot_addresses ();
1204 combine_temp_slots ();
1208 /* Push deeper into the nesting level for stack temporaries. */
1211 push_temp_slots (void)
1216 /* Pop a temporary nesting level. All slots in use in the current level
1220 pop_temp_slots (void)
1226 /* Initialize temporary slots. */
1229 init_temp_slots (void)
1231 /* We have not allocated any temporaries yet. */
1232 avail_temp_slots
= 0;
1233 vec_alloc (used_temp_slots
, 0);
1234 temp_slot_level
= 0;
1235 n_temp_slots_in_use
= 0;
1237 /* Set up the table to map addresses to temp slots. */
1238 if (! temp_slot_address_table
)
1239 temp_slot_address_table
= hash_table
<temp_address_hasher
>::create_ggc (32);
1241 temp_slot_address_table
->empty ();
1244 /* Functions and data structures to keep track of the values hard regs
1245 had at the start of the function. */
1247 /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
1248 and has_hard_reg_initial_val.. */
1249 struct GTY(()) initial_value_pair
{
1253 /* ??? This could be a VEC but there is currently no way to define an
1254 opaque VEC type. This could be worked around by defining struct
1255 initial_value_pair in function.h. */
1256 struct GTY(()) initial_value_struct
{
1259 initial_value_pair
* GTY ((length ("%h.num_entries"))) entries
;
1262 /* If a pseudo represents an initial hard reg (or expression), return
1263 it, else return NULL_RTX. */
1266 get_hard_reg_initial_reg (rtx reg
)
1268 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1274 for (i
= 0; i
< ivs
->num_entries
; i
++)
1275 if (rtx_equal_p (ivs
->entries
[i
].pseudo
, reg
))
1276 return ivs
->entries
[i
].hard_reg
;
1281 /* Make sure that there's a pseudo register of mode MODE that stores the
1282 initial value of hard register REGNO. Return an rtx for such a pseudo. */
1285 get_hard_reg_initial_val (machine_mode mode
, unsigned int regno
)
1287 struct initial_value_struct
*ivs
;
1290 rv
= has_hard_reg_initial_val (mode
, regno
);
1294 ivs
= crtl
->hard_reg_initial_vals
;
1297 ivs
= ggc_alloc
<initial_value_struct
> ();
1298 ivs
->num_entries
= 0;
1299 ivs
->max_entries
= 5;
1300 ivs
->entries
= ggc_vec_alloc
<initial_value_pair
> (5);
1301 crtl
->hard_reg_initial_vals
= ivs
;
1304 if (ivs
->num_entries
>= ivs
->max_entries
)
1306 ivs
->max_entries
+= 5;
1307 ivs
->entries
= GGC_RESIZEVEC (initial_value_pair
, ivs
->entries
,
1311 ivs
->entries
[ivs
->num_entries
].hard_reg
= gen_rtx_REG (mode
, regno
);
1312 ivs
->entries
[ivs
->num_entries
].pseudo
= gen_reg_rtx (mode
);
1314 return ivs
->entries
[ivs
->num_entries
++].pseudo
;
1317 /* See if get_hard_reg_initial_val has been used to create a pseudo
1318 for the initial value of hard register REGNO in mode MODE. Return
1319 the associated pseudo if so, otherwise return NULL. */
1322 has_hard_reg_initial_val (machine_mode mode
, unsigned int regno
)
1324 struct initial_value_struct
*ivs
;
1327 ivs
= crtl
->hard_reg_initial_vals
;
1329 for (i
= 0; i
< ivs
->num_entries
; i
++)
1330 if (GET_MODE (ivs
->entries
[i
].hard_reg
) == mode
1331 && REGNO (ivs
->entries
[i
].hard_reg
) == regno
)
1332 return ivs
->entries
[i
].pseudo
;
1338 emit_initial_value_sets (void)
1340 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1348 for (i
= 0; i
< ivs
->num_entries
; i
++)
1349 emit_move_insn (ivs
->entries
[i
].pseudo
, ivs
->entries
[i
].hard_reg
);
1353 emit_insn_at_entry (seq
);
1357 /* Return the hardreg-pseudoreg initial values pair entry I and
1358 TRUE if I is a valid entry, or FALSE if I is not a valid entry. */
1360 initial_value_entry (int i
, rtx
*hreg
, rtx
*preg
)
1362 struct initial_value_struct
*ivs
= crtl
->hard_reg_initial_vals
;
1363 if (!ivs
|| i
>= ivs
->num_entries
)
1366 *hreg
= ivs
->entries
[i
].hard_reg
;
1367 *preg
= ivs
->entries
[i
].pseudo
;
1371 /* These routines are responsible for converting virtual register references
1372 to the actual hard register references once RTL generation is complete.
1374 The following four variables are used for communication between the
1375 routines. They contain the offsets of the virtual registers from their
1376 respective hard registers. */
1378 static poly_int64 in_arg_offset
;
1379 static poly_int64 var_offset
;
1380 static poly_int64 dynamic_offset
;
1381 static poly_int64 out_arg_offset
;
1382 static poly_int64 cfa_offset
;
1384 /* In most machines, the stack pointer register is equivalent to the bottom
1387 #ifndef STACK_POINTER_OFFSET
1388 #define STACK_POINTER_OFFSET 0
1391 #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
1392 #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
1395 /* If not defined, pick an appropriate default for the offset of dynamically
1396 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1397 INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1399 #ifndef STACK_DYNAMIC_OFFSET
1401 /* The bottom of the stack points to the actual arguments. If
1402 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1403 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1404 stack space for register parameters is not pushed by the caller, but
1405 rather part of the fixed stack areas and hence not included in
1406 `crtl->outgoing_args_size'. Nevertheless, we must allow
1407 for it when allocating stack dynamic objects. */
1409 #ifdef INCOMING_REG_PARM_STACK_SPACE
1410 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1411 ((ACCUMULATE_OUTGOING_ARGS \
1412 ? (crtl->outgoing_args_size \
1413 + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
1414 : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
1415 : 0) + (STACK_POINTER_OFFSET))
1417 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1418 ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : poly_int64 (0)) \
1419 + (STACK_POINTER_OFFSET))
1424 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1425 is a virtual register, return the equivalent hard register and set the
1426 offset indirectly through the pointer. Otherwise, return 0. */
1429 instantiate_new_reg (rtx x
, poly_int64_pod
*poffset
)
1434 if (x
== virtual_incoming_args_rtx
)
1436 if (stack_realign_drap
)
1438 /* Replace virtual_incoming_args_rtx with internal arg
1439 pointer if DRAP is used to realign stack. */
1440 new_rtx
= crtl
->args
.internal_arg_pointer
;
1444 new_rtx
= arg_pointer_rtx
, offset
= in_arg_offset
;
1446 else if (x
== virtual_stack_vars_rtx
)
1447 new_rtx
= frame_pointer_rtx
, offset
= var_offset
;
1448 else if (x
== virtual_stack_dynamic_rtx
)
1449 new_rtx
= stack_pointer_rtx
, offset
= dynamic_offset
;
1450 else if (x
== virtual_outgoing_args_rtx
)
1451 new_rtx
= stack_pointer_rtx
, offset
= out_arg_offset
;
1452 else if (x
== virtual_cfa_rtx
)
1454 #ifdef FRAME_POINTER_CFA_OFFSET
1455 new_rtx
= frame_pointer_rtx
;
1457 new_rtx
= arg_pointer_rtx
;
1459 offset
= cfa_offset
;
1461 else if (x
== virtual_preferred_stack_boundary_rtx
)
1463 new_rtx
= GEN_INT (crtl
->preferred_stack_boundary
/ BITS_PER_UNIT
);
1473 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1474 registers present inside of *LOC. The expression is simplified,
1475 as much as possible, but is not to be considered "valid" in any sense
1476 implied by the target. Return true if any change is made. */
1479 instantiate_virtual_regs_in_rtx (rtx
*loc
)
1483 bool changed
= false;
1484 subrtx_ptr_iterator::array_type array
;
1485 FOR_EACH_SUBRTX_PTR (iter
, array
, loc
, NONCONST
)
1492 switch (GET_CODE (x
))
1495 new_rtx
= instantiate_new_reg (x
, &offset
);
1498 *loc
= plus_constant (GET_MODE (x
), new_rtx
, offset
);
1501 iter
.skip_subrtxes ();
1505 new_rtx
= instantiate_new_reg (XEXP (x
, 0), &offset
);
1508 XEXP (x
, 0) = new_rtx
;
1509 *loc
= plus_constant (GET_MODE (x
), x
, offset
, true);
1511 iter
.skip_subrtxes ();
1515 /* FIXME -- from old code */
1516 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1517 we can commute the PLUS and SUBREG because pointers into the
1518 frame are well-behaved. */
1529 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1530 matches the predicate for insn CODE operand OPERAND. */
1533 safe_insn_predicate (int code
, int operand
, rtx x
)
1535 return code
< 0 || insn_operand_matches ((enum insn_code
) code
, operand
, x
);
1538 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1539 registers present inside of insn. The result will be a valid insn. */
1542 instantiate_virtual_regs_in_insn (rtx_insn
*insn
)
1546 bool any_change
= false;
1547 rtx set
, new_rtx
, x
;
1550 /* There are some special cases to be handled first. */
1551 set
= single_set (insn
);
1554 /* We're allowed to assign to a virtual register. This is interpreted
1555 to mean that the underlying register gets assigned the inverse
1556 transformation. This is used, for example, in the handling of
1558 new_rtx
= instantiate_new_reg (SET_DEST (set
), &offset
);
1563 instantiate_virtual_regs_in_rtx (&SET_SRC (set
));
1564 x
= simplify_gen_binary (PLUS
, GET_MODE (new_rtx
), SET_SRC (set
),
1565 gen_int_mode (-offset
, GET_MODE (new_rtx
)));
1566 x
= force_operand (x
, new_rtx
);
1568 emit_move_insn (new_rtx
, x
);
1573 emit_insn_before (seq
, insn
);
1578 /* Handle a straight copy from a virtual register by generating a
1579 new add insn. The difference between this and falling through
1580 to the generic case is avoiding a new pseudo and eliminating a
1581 move insn in the initial rtl stream. */
1582 new_rtx
= instantiate_new_reg (SET_SRC (set
), &offset
);
1584 && maybe_ne (offset
, 0)
1585 && REG_P (SET_DEST (set
))
1586 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1590 x
= expand_simple_binop (GET_MODE (SET_DEST (set
)), PLUS
, new_rtx
,
1591 gen_int_mode (offset
,
1592 GET_MODE (SET_DEST (set
))),
1593 SET_DEST (set
), 1, OPTAB_LIB_WIDEN
);
1594 if (x
!= SET_DEST (set
))
1595 emit_move_insn (SET_DEST (set
), x
);
1600 emit_insn_before (seq
, insn
);
1605 extract_insn (insn
);
1606 insn_code
= INSN_CODE (insn
);
1608 /* Handle a plus involving a virtual register by determining if the
1609 operands remain valid if they're modified in place. */
1611 if (GET_CODE (SET_SRC (set
)) == PLUS
1612 && recog_data
.n_operands
>= 3
1613 && recog_data
.operand_loc
[1] == &XEXP (SET_SRC (set
), 0)
1614 && recog_data
.operand_loc
[2] == &XEXP (SET_SRC (set
), 1)
1615 && poly_int_rtx_p (recog_data
.operand
[2], &delta
)
1616 && (new_rtx
= instantiate_new_reg (recog_data
.operand
[1], &offset
)))
1620 /* If the sum is zero, then replace with a plain move. */
1621 if (known_eq (offset
, 0)
1622 && REG_P (SET_DEST (set
))
1623 && REGNO (SET_DEST (set
)) > LAST_VIRTUAL_REGISTER
)
1626 emit_move_insn (SET_DEST (set
), new_rtx
);
1630 emit_insn_before (seq
, insn
);
1635 x
= gen_int_mode (offset
, recog_data
.operand_mode
[2]);
1637 /* Using validate_change and apply_change_group here leaves
1638 recog_data in an invalid state. Since we know exactly what
1639 we want to check, do those two by hand. */
1640 if (safe_insn_predicate (insn_code
, 1, new_rtx
)
1641 && safe_insn_predicate (insn_code
, 2, x
))
1643 *recog_data
.operand_loc
[1] = recog_data
.operand
[1] = new_rtx
;
1644 *recog_data
.operand_loc
[2] = recog_data
.operand
[2] = x
;
1647 /* Fall through into the regular operand fixup loop in
1648 order to take care of operands other than 1 and 2. */
1654 extract_insn (insn
);
1655 insn_code
= INSN_CODE (insn
);
1658 /* In the general case, we expect virtual registers to appear only in
1659 operands, and then only as either bare registers or inside memories. */
1660 for (i
= 0; i
< recog_data
.n_operands
; ++i
)
1662 x
= recog_data
.operand
[i
];
1663 switch (GET_CODE (x
))
1667 rtx addr
= XEXP (x
, 0);
1669 if (!instantiate_virtual_regs_in_rtx (&addr
))
1673 x
= replace_equiv_address (x
, addr
, true);
1674 /* It may happen that the address with the virtual reg
1675 was valid (e.g. based on the virtual stack reg, which might
1676 be acceptable to the predicates with all offsets), whereas
1677 the address now isn't anymore, for instance when the address
1678 is still offsetted, but the base reg isn't virtual-stack-reg
1679 anymore. Below we would do a force_reg on the whole operand,
1680 but this insn might actually only accept memory. Hence,
1681 before doing that last resort, try to reload the address into
1682 a register, so this operand stays a MEM. */
1683 if (!safe_insn_predicate (insn_code
, i
, x
))
1685 addr
= force_reg (GET_MODE (addr
), addr
);
1686 x
= replace_equiv_address (x
, addr
, true);
1691 emit_insn_before (seq
, insn
);
1696 new_rtx
= instantiate_new_reg (x
, &offset
);
1697 if (new_rtx
== NULL
)
1699 if (known_eq (offset
, 0))
1705 /* Careful, special mode predicates may have stuff in
1706 insn_data[insn_code].operand[i].mode that isn't useful
1707 to us for computing a new value. */
1708 /* ??? Recognize address_operand and/or "p" constraints
1709 to see if (plus new offset) is a valid before we put
1710 this through expand_simple_binop. */
1711 x
= expand_simple_binop (GET_MODE (x
), PLUS
, new_rtx
,
1712 gen_int_mode (offset
, GET_MODE (x
)),
1713 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1716 emit_insn_before (seq
, insn
);
1721 new_rtx
= instantiate_new_reg (SUBREG_REG (x
), &offset
);
1722 if (new_rtx
== NULL
)
1724 if (maybe_ne (offset
, 0))
1727 new_rtx
= expand_simple_binop
1728 (GET_MODE (new_rtx
), PLUS
, new_rtx
,
1729 gen_int_mode (offset
, GET_MODE (new_rtx
)),
1730 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1733 emit_insn_before (seq
, insn
);
1735 x
= simplify_gen_subreg (recog_data
.operand_mode
[i
], new_rtx
,
1736 GET_MODE (new_rtx
), SUBREG_BYTE (x
));
1744 /* At this point, X contains the new value for the operand.
1745 Validate the new value vs the insn predicate. Note that
1746 asm insns will have insn_code -1 here. */
1747 if (!safe_insn_predicate (insn_code
, i
, x
))
1752 gcc_assert (REGNO (x
) <= LAST_VIRTUAL_REGISTER
);
1753 x
= copy_to_reg (x
);
1756 x
= force_reg (insn_data
[insn_code
].operand
[i
].mode
, x
);
1760 emit_insn_before (seq
, insn
);
1763 *recog_data
.operand_loc
[i
] = recog_data
.operand
[i
] = x
;
1769 /* Propagate operand changes into the duplicates. */
1770 for (i
= 0; i
< recog_data
.n_dups
; ++i
)
1771 *recog_data
.dup_loc
[i
]
1772 = copy_rtx (recog_data
.operand
[(unsigned)recog_data
.dup_num
[i
]]);
1774 /* Force re-recognition of the instruction for validation. */
1775 INSN_CODE (insn
) = -1;
1778 if (asm_noperands (PATTERN (insn
)) >= 0)
1780 if (!check_asm_operands (PATTERN (insn
)))
1782 error_for_asm (insn
, "impossible constraint in %<asm%>");
1783 /* For asm goto, instead of fixing up all the edges
1784 just clear the template and clear input operands
1785 (asm goto doesn't have any output operands). */
1788 rtx asm_op
= extract_asm_operands (PATTERN (insn
));
1789 ASM_OPERANDS_TEMPLATE (asm_op
) = ggc_strdup ("");
1790 ASM_OPERANDS_INPUT_VEC (asm_op
) = rtvec_alloc (0);
1791 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op
) = rtvec_alloc (0);
1799 if (recog_memoized (insn
) < 0)
1800 fatal_insn_not_found (insn
);
1804 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1805 do any instantiation required. */
1808 instantiate_decl_rtl (rtx x
)
1815 /* If this is a CONCAT, recurse for the pieces. */
1816 if (GET_CODE (x
) == CONCAT
)
1818 instantiate_decl_rtl (XEXP (x
, 0));
1819 instantiate_decl_rtl (XEXP (x
, 1));
1823 /* If this is not a MEM, no need to do anything. Similarly if the
1824 address is a constant or a register that is not a virtual register. */
1829 if (CONSTANT_P (addr
)
1831 && (REGNO (addr
) < FIRST_VIRTUAL_REGISTER
1832 || REGNO (addr
) > LAST_VIRTUAL_REGISTER
)))
1835 instantiate_virtual_regs_in_rtx (&XEXP (x
, 0));
1838 /* Helper for instantiate_decls called via walk_tree: Process all decls
1839 in the given DECL_VALUE_EXPR. */
1842 instantiate_expr (tree
*tp
, int *walk_subtrees
, void *data ATTRIBUTE_UNUSED
)
1850 if (DECL_RTL_SET_P (t
))
1851 instantiate_decl_rtl (DECL_RTL (t
));
1852 if (TREE_CODE (t
) == PARM_DECL
&& DECL_NAMELESS (t
)
1853 && DECL_INCOMING_RTL (t
))
1854 instantiate_decl_rtl (DECL_INCOMING_RTL (t
));
1855 if ((VAR_P (t
) || TREE_CODE (t
) == RESULT_DECL
)
1856 && DECL_HAS_VALUE_EXPR_P (t
))
1858 tree v
= DECL_VALUE_EXPR (t
);
1859 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1866 /* Subroutine of instantiate_decls: Process all decls in the given
1867 BLOCK node and all its subblocks. */
1870 instantiate_decls_1 (tree let
)
1874 for (t
= BLOCK_VARS (let
); t
; t
= DECL_CHAIN (t
))
1876 if (DECL_RTL_SET_P (t
))
1877 instantiate_decl_rtl (DECL_RTL (t
));
1878 if (VAR_P (t
) && DECL_HAS_VALUE_EXPR_P (t
))
1880 tree v
= DECL_VALUE_EXPR (t
);
1881 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1885 /* Process all subblocks. */
1886 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= BLOCK_CHAIN (t
))
1887 instantiate_decls_1 (t
);
1890 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1891 all virtual registers in their DECL_RTL's. */
1894 instantiate_decls (tree fndecl
)
1899 /* Process all parameters of the function. */
1900 for (decl
= DECL_ARGUMENTS (fndecl
); decl
; decl
= DECL_CHAIN (decl
))
1902 instantiate_decl_rtl (DECL_RTL (decl
));
1903 instantiate_decl_rtl (DECL_INCOMING_RTL (decl
));
1904 if (DECL_HAS_VALUE_EXPR_P (decl
))
1906 tree v
= DECL_VALUE_EXPR (decl
);
1907 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1911 if ((decl
= DECL_RESULT (fndecl
))
1912 && TREE_CODE (decl
) == RESULT_DECL
)
1914 if (DECL_RTL_SET_P (decl
))
1915 instantiate_decl_rtl (DECL_RTL (decl
));
1916 if (DECL_HAS_VALUE_EXPR_P (decl
))
1918 tree v
= DECL_VALUE_EXPR (decl
);
1919 walk_tree (&v
, instantiate_expr
, NULL
, NULL
);
1923 /* Process the saved static chain if it exists. */
1924 decl
= DECL_STRUCT_FUNCTION (fndecl
)->static_chain_decl
;
1925 if (decl
&& DECL_HAS_VALUE_EXPR_P (decl
))
1926 instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl
)));
1928 /* Now process all variables defined in the function or its subblocks. */
1929 if (DECL_INITIAL (fndecl
))
1930 instantiate_decls_1 (DECL_INITIAL (fndecl
));
1932 FOR_EACH_LOCAL_DECL (cfun
, ix
, decl
)
1933 if (DECL_RTL_SET_P (decl
))
1934 instantiate_decl_rtl (DECL_RTL (decl
));
1935 vec_free (cfun
->local_decls
);
1938 /* Pass through the INSNS of function FNDECL and convert virtual register
1939 references to hard register references. */
1942 instantiate_virtual_regs (void)
1946 /* Compute the offsets to use for this function. */
1947 in_arg_offset
= FIRST_PARM_OFFSET (current_function_decl
);
1948 var_offset
= targetm
.starting_frame_offset ();
1949 dynamic_offset
= STACK_DYNAMIC_OFFSET (current_function_decl
);
1950 out_arg_offset
= STACK_POINTER_OFFSET
;
1951 #ifdef FRAME_POINTER_CFA_OFFSET
1952 cfa_offset
= FRAME_POINTER_CFA_OFFSET (current_function_decl
);
1954 cfa_offset
= ARG_POINTER_CFA_OFFSET (current_function_decl
);
1957 /* Initialize recognition, indicating that volatile is OK. */
1960 /* Scan through all the insns, instantiating every virtual register still
1962 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
1965 /* These patterns in the instruction stream can never be recognized.
1966 Fortunately, they shouldn't contain virtual registers either. */
1967 if (GET_CODE (PATTERN (insn
)) == USE
1968 || GET_CODE (PATTERN (insn
)) == CLOBBER
1969 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
1970 || DEBUG_MARKER_INSN_P (insn
))
1972 else if (DEBUG_BIND_INSN_P (insn
))
1973 instantiate_virtual_regs_in_rtx (INSN_VAR_LOCATION_PTR (insn
));
1975 instantiate_virtual_regs_in_insn (insn
);
1977 if (insn
->deleted ())
1980 instantiate_virtual_regs_in_rtx (®_NOTES (insn
));
1982 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1984 instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn
));
1987 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1988 instantiate_decls (current_function_decl
);
1990 targetm
.instantiate_decls ();
1992 /* Indicate that, from now on, assign_stack_local should use
1993 frame_pointer_rtx. */
1994 virtuals_instantiated
= 1;
2001 const pass_data pass_data_instantiate_virtual_regs
=
2003 RTL_PASS
, /* type */
2005 OPTGROUP_NONE
, /* optinfo_flags */
2006 TV_NONE
, /* tv_id */
2007 0, /* properties_required */
2008 0, /* properties_provided */
2009 0, /* properties_destroyed */
2010 0, /* todo_flags_start */
2011 0, /* todo_flags_finish */
2014 class pass_instantiate_virtual_regs
: public rtl_opt_pass
2017 pass_instantiate_virtual_regs (gcc::context
*ctxt
)
2018 : rtl_opt_pass (pass_data_instantiate_virtual_regs
, ctxt
)
2021 /* opt_pass methods: */
2022 virtual unsigned int execute (function
*)
2024 return instantiate_virtual_regs ();
2027 }; // class pass_instantiate_virtual_regs
2032 make_pass_instantiate_virtual_regs (gcc::context
*ctxt
)
2034 return new pass_instantiate_virtual_regs (ctxt
);
2038 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
2039 This means a type for which function calls must pass an address to the
2040 function or get an address back from the function.
2041 EXP may be a type node or an expression (whose type is tested). */
2044 aggregate_value_p (const_tree exp
, const_tree fntype
)
2046 const_tree type
= (TYPE_P (exp
)) ? exp
: TREE_TYPE (exp
);
2047 int i
, regno
, nregs
;
2051 switch (TREE_CODE (fntype
))
2055 tree fndecl
= get_callee_fndecl (fntype
);
2057 fntype
= TREE_TYPE (fndecl
);
2058 else if (CALL_EXPR_FN (fntype
))
2059 fntype
= TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype
)));
2061 /* For internal functions, assume nothing needs to be
2062 returned in memory. */
2067 fntype
= TREE_TYPE (fntype
);
2072 case IDENTIFIER_NODE
:
2076 /* We don't expect other tree types here. */
2080 if (VOID_TYPE_P (type
))
2083 /* If a record should be passed the same as its first (and only) member
2084 don't pass it as an aggregate. */
2085 if (TREE_CODE (type
) == RECORD_TYPE
&& TYPE_TRANSPARENT_AGGR (type
))
2086 return aggregate_value_p (first_field (type
), fntype
);
2088 /* If the front end has decided that this needs to be passed by
2089 reference, do so. */
2090 if ((TREE_CODE (exp
) == PARM_DECL
|| TREE_CODE (exp
) == RESULT_DECL
)
2091 && DECL_BY_REFERENCE (exp
))
2094 /* Function types that are TREE_ADDRESSABLE force return in memory. */
2095 if (fntype
&& TREE_ADDRESSABLE (fntype
))
2098 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
2099 and thus can't be returned in registers. */
2100 if (TREE_ADDRESSABLE (type
))
2103 if (TYPE_EMPTY_P (type
))
2106 if (flag_pcc_struct_return
&& AGGREGATE_TYPE_P (type
))
2109 if (targetm
.calls
.return_in_memory (type
, fntype
))
2112 /* Make sure we have suitable call-clobbered regs to return
2113 the value in; if not, we must return it in memory. */
2114 reg
= hard_function_value (type
, 0, fntype
, 0);
2116 /* If we have something other than a REG (e.g. a PARALLEL), then assume
2121 regno
= REGNO (reg
);
2122 nregs
= hard_regno_nregs (regno
, TYPE_MODE (type
));
2123 for (i
= 0; i
< nregs
; i
++)
2124 if (! call_used_regs
[regno
+ i
])
2130 /* Return true if we should assign DECL a pseudo register; false if it
2131 should live on the local stack. */
2134 use_register_for_decl (const_tree decl
)
2136 if (TREE_CODE (decl
) == SSA_NAME
)
2138 /* We often try to use the SSA_NAME, instead of its underlying
2139 decl, to get type information and guide decisions, to avoid
2140 differences of behavior between anonymous and named
2141 variables, but in this one case we have to go for the actual
2142 variable if there is one. The main reason is that, at least
2143 at -O0, we want to place user variables on the stack, but we
2144 don't mind using pseudos for anonymous or ignored temps.
2145 Should we take the SSA_NAME, we'd conclude all SSA_NAMEs
2146 should go in pseudos, whereas their corresponding variables
2147 might have to go on the stack. So, disregarding the decl
2148 here would negatively impact debug info at -O0, enable
2149 coalescing between SSA_NAMEs that ought to get different
2150 stack/pseudo assignments, and get the incoming argument
2151 processing thoroughly confused by PARM_DECLs expected to live
2152 in stack slots but assigned to pseudos. */
2153 if (!SSA_NAME_VAR (decl
))
2154 return TYPE_MODE (TREE_TYPE (decl
)) != BLKmode
2155 && !(flag_float_store
&& FLOAT_TYPE_P (TREE_TYPE (decl
)));
2157 decl
= SSA_NAME_VAR (decl
);
2160 /* Honor volatile. */
2161 if (TREE_SIDE_EFFECTS (decl
))
2164 /* Honor addressability. */
2165 if (TREE_ADDRESSABLE (decl
))
2168 /* RESULT_DECLs are a bit special in that they're assigned without
2169 regard to use_register_for_decl, but we generally only store in
2170 them. If we coalesce their SSA NAMEs, we'd better return a
2171 result that matches the assignment in expand_function_start. */
2172 if (TREE_CODE (decl
) == RESULT_DECL
)
2174 /* If it's not an aggregate, we're going to use a REG or a
2175 PARALLEL containing a REG. */
2176 if (!aggregate_value_p (decl
, current_function_decl
))
2179 /* If expand_function_start determines the return value, we'll
2180 use MEM if it's not by reference. */
2181 if (cfun
->returns_pcc_struct
2182 || (targetm
.calls
.struct_value_rtx
2183 (TREE_TYPE (current_function_decl
), 1)))
2184 return DECL_BY_REFERENCE (decl
);
2186 /* Otherwise, we're taking an extra all.function_result_decl
2187 argument. It's set up in assign_parms_augmented_arg_list,
2188 under the (negated) conditions above, and then it's used to
2189 set up the RESULT_DECL rtl in assign_params, after looping
2190 over all parameters. Now, if the RESULT_DECL is not by
2191 reference, we'll use a MEM either way. */
2192 if (!DECL_BY_REFERENCE (decl
))
2195 /* Otherwise, if RESULT_DECL is DECL_BY_REFERENCE, it will take
2196 the function_result_decl's assignment. Since it's a pointer,
2197 we can short-circuit a number of the tests below, and we must
2198 duplicat e them because we don't have the
2199 function_result_decl to test. */
2200 if (!targetm
.calls
.allocate_stack_slots_for_args ())
2202 /* We don't set DECL_IGNORED_P for the function_result_decl. */
2205 /* We don't set DECL_REGISTER for the function_result_decl. */
2209 /* Only register-like things go in registers. */
2210 if (DECL_MODE (decl
) == BLKmode
)
2213 /* If -ffloat-store specified, don't put explicit float variables
2215 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
2216 propagates values across these stores, and it probably shouldn't. */
2217 if (flag_float_store
&& FLOAT_TYPE_P (TREE_TYPE (decl
)))
2220 if (!targetm
.calls
.allocate_stack_slots_for_args ())
2223 /* If we're not interested in tracking debugging information for
2224 this decl, then we can certainly put it in a register. */
2225 if (DECL_IGNORED_P (decl
))
2231 if (!DECL_REGISTER (decl
))
2234 /* When not optimizing, disregard register keyword for types that
2235 could have methods, otherwise the methods won't be callable from
2237 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (decl
)))
2243 /* Structures to communicate between the subroutines of assign_parms.
2244 The first holds data persistent across all parameters, the second
2245 is cleared out for each parameter. */
2247 struct assign_parm_data_all
2249 /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
2250 should become a job of the target or otherwise encapsulated. */
2251 CUMULATIVE_ARGS args_so_far_v
;
2252 cumulative_args_t args_so_far
;
2253 struct args_size stack_args_size
;
2254 tree function_result_decl
;
2256 rtx_insn
*first_conversion_insn
;
2257 rtx_insn
*last_conversion_insn
;
2258 HOST_WIDE_INT pretend_args_size
;
2259 HOST_WIDE_INT extra_pretend_bytes
;
2260 int reg_parm_stack_space
;
2263 struct assign_parm_data_one
2269 machine_mode nominal_mode
;
2270 machine_mode passed_mode
;
2271 machine_mode promoted_mode
;
2272 struct locate_and_pad_arg_data locate
;
2274 BOOL_BITFIELD named_arg
: 1;
2275 BOOL_BITFIELD passed_pointer
: 1;
2276 BOOL_BITFIELD on_stack
: 1;
2277 BOOL_BITFIELD loaded_in_reg
: 1;
2280 /* A subroutine of assign_parms. Initialize ALL. */
2283 assign_parms_initialize_all (struct assign_parm_data_all
*all
)
2285 tree fntype ATTRIBUTE_UNUSED
;
2287 memset (all
, 0, sizeof (*all
));
2289 fntype
= TREE_TYPE (current_function_decl
);
2291 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
2292 INIT_CUMULATIVE_INCOMING_ARGS (all
->args_so_far_v
, fntype
, NULL_RTX
);
2294 INIT_CUMULATIVE_ARGS (all
->args_so_far_v
, fntype
, NULL_RTX
,
2295 current_function_decl
, -1);
2297 all
->args_so_far
= pack_cumulative_args (&all
->args_so_far_v
);
2299 #ifdef INCOMING_REG_PARM_STACK_SPACE
2300 all
->reg_parm_stack_space
2301 = INCOMING_REG_PARM_STACK_SPACE (current_function_decl
);
2305 /* If ARGS contains entries with complex types, split the entry into two
2306 entries of the component type. Return a new list of substitutions are
2307 needed, else the old list. */
2310 split_complex_args (vec
<tree
> *args
)
2315 FOR_EACH_VEC_ELT (*args
, i
, p
)
2317 tree type
= TREE_TYPE (p
);
2318 if (TREE_CODE (type
) == COMPLEX_TYPE
2319 && targetm
.calls
.split_complex_arg (type
))
2322 tree subtype
= TREE_TYPE (type
);
2323 bool addressable
= TREE_ADDRESSABLE (p
);
2325 /* Rewrite the PARM_DECL's type with its component. */
2327 TREE_TYPE (p
) = subtype
;
2328 DECL_ARG_TYPE (p
) = TREE_TYPE (DECL_ARG_TYPE (p
));
2329 SET_DECL_MODE (p
, VOIDmode
);
2330 DECL_SIZE (p
) = NULL
;
2331 DECL_SIZE_UNIT (p
) = NULL
;
2332 /* If this arg must go in memory, put it in a pseudo here.
2333 We can't allow it to go in memory as per normal parms,
2334 because the usual place might not have the imag part
2335 adjacent to the real part. */
2336 DECL_ARTIFICIAL (p
) = addressable
;
2337 DECL_IGNORED_P (p
) = addressable
;
2338 TREE_ADDRESSABLE (p
) = 0;
2342 /* Build a second synthetic decl. */
2343 decl
= build_decl (EXPR_LOCATION (p
),
2344 PARM_DECL
, NULL_TREE
, subtype
);
2345 DECL_ARG_TYPE (decl
) = DECL_ARG_TYPE (p
);
2346 DECL_ARTIFICIAL (decl
) = addressable
;
2347 DECL_IGNORED_P (decl
) = addressable
;
2348 layout_decl (decl
, 0);
2349 args
->safe_insert (++i
, decl
);
2354 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2355 the hidden struct return argument, and (abi willing) complex args.
2356 Return the new parameter list. */
2359 assign_parms_augmented_arg_list (struct assign_parm_data_all
*all
)
2361 tree fndecl
= current_function_decl
;
2362 tree fntype
= TREE_TYPE (fndecl
);
2363 vec
<tree
> fnargs
= vNULL
;
2366 for (arg
= DECL_ARGUMENTS (fndecl
); arg
; arg
= DECL_CHAIN (arg
))
2367 fnargs
.safe_push (arg
);
2369 all
->orig_fnargs
= DECL_ARGUMENTS (fndecl
);
2371 /* If struct value address is treated as the first argument, make it so. */
2372 if (aggregate_value_p (DECL_RESULT (fndecl
), fndecl
)
2373 && ! cfun
->returns_pcc_struct
2374 && targetm
.calls
.struct_value_rtx (TREE_TYPE (fndecl
), 1) == 0)
2376 tree type
= build_pointer_type (TREE_TYPE (fntype
));
2379 decl
= build_decl (DECL_SOURCE_LOCATION (fndecl
),
2380 PARM_DECL
, get_identifier (".result_ptr"), type
);
2381 DECL_ARG_TYPE (decl
) = type
;
2382 DECL_ARTIFICIAL (decl
) = 1;
2383 DECL_NAMELESS (decl
) = 1;
2384 TREE_CONSTANT (decl
) = 1;
2385 /* We don't set DECL_IGNORED_P or DECL_REGISTER here. If this
2386 changes, the end of the RESULT_DECL handling block in
2387 use_register_for_decl must be adjusted to match. */
2389 DECL_CHAIN (decl
) = all
->orig_fnargs
;
2390 all
->orig_fnargs
= decl
;
2391 fnargs
.safe_insert (0, decl
);
2393 all
->function_result_decl
= decl
;
2396 /* If the target wants to split complex arguments into scalars, do so. */
2397 if (targetm
.calls
.split_complex_arg
)
2398 split_complex_args (&fnargs
);
2403 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2404 data for the parameter. Incorporate ABI specifics such as pass-by-
2405 reference and type promotion. */
2408 assign_parm_find_data_types (struct assign_parm_data_all
*all
, tree parm
,
2409 struct assign_parm_data_one
*data
)
2411 tree nominal_type
, passed_type
;
2412 machine_mode nominal_mode
, passed_mode
, promoted_mode
;
2415 memset (data
, 0, sizeof (*data
));
2417 /* NAMED_ARG is a misnomer. We really mean 'non-variadic'. */
2419 data
->named_arg
= 1; /* No variadic parms. */
2420 else if (DECL_CHAIN (parm
))
2421 data
->named_arg
= 1; /* Not the last non-variadic parm. */
2422 else if (targetm
.calls
.strict_argument_naming (all
->args_so_far
))
2423 data
->named_arg
= 1; /* Only variadic ones are unnamed. */
2425 data
->named_arg
= 0; /* Treat as variadic. */
2427 nominal_type
= TREE_TYPE (parm
);
2428 passed_type
= DECL_ARG_TYPE (parm
);
2430 /* Look out for errors propagating this far. Also, if the parameter's
2431 type is void then its value doesn't matter. */
2432 if (TREE_TYPE (parm
) == error_mark_node
2433 /* This can happen after weird syntax errors
2434 or if an enum type is defined among the parms. */
2435 || TREE_CODE (parm
) != PARM_DECL
2436 || passed_type
== NULL
2437 || VOID_TYPE_P (nominal_type
))
2439 nominal_type
= passed_type
= void_type_node
;
2440 nominal_mode
= passed_mode
= promoted_mode
= VOIDmode
;
2444 /* Find mode of arg as it is passed, and mode of arg as it should be
2445 during execution of this function. */
2446 passed_mode
= TYPE_MODE (passed_type
);
2447 nominal_mode
= TYPE_MODE (nominal_type
);
2449 /* If the parm is to be passed as a transparent union or record, use the
2450 type of the first field for the tests below. We have already verified
2451 that the modes are the same. */
2452 if ((TREE_CODE (passed_type
) == UNION_TYPE
2453 || TREE_CODE (passed_type
) == RECORD_TYPE
)
2454 && TYPE_TRANSPARENT_AGGR (passed_type
))
2455 passed_type
= TREE_TYPE (first_field (passed_type
));
2457 /* See if this arg was passed by invisible reference. */
2458 if (pass_by_reference (&all
->args_so_far_v
, passed_mode
,
2459 passed_type
, data
->named_arg
))
2461 passed_type
= nominal_type
= build_pointer_type (passed_type
);
2462 data
->passed_pointer
= true;
2463 passed_mode
= nominal_mode
= TYPE_MODE (nominal_type
);
2466 /* Find mode as it is passed by the ABI. */
2467 unsignedp
= TYPE_UNSIGNED (passed_type
);
2468 promoted_mode
= promote_function_mode (passed_type
, passed_mode
, &unsignedp
,
2469 TREE_TYPE (current_function_decl
), 0);
2472 data
->nominal_type
= nominal_type
;
2473 data
->passed_type
= passed_type
;
2474 data
->nominal_mode
= nominal_mode
;
2475 data
->passed_mode
= passed_mode
;
2476 data
->promoted_mode
= promoted_mode
;
2479 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2482 assign_parms_setup_varargs (struct assign_parm_data_all
*all
,
2483 struct assign_parm_data_one
*data
, bool no_rtl
)
2485 int varargs_pretend_bytes
= 0;
2487 targetm
.calls
.setup_incoming_varargs (all
->args_so_far
,
2488 data
->promoted_mode
,
2490 &varargs_pretend_bytes
, no_rtl
);
2492 /* If the back-end has requested extra stack space, record how much is
2493 needed. Do not change pretend_args_size otherwise since it may be
2494 nonzero from an earlier partial argument. */
2495 if (varargs_pretend_bytes
> 0)
2496 all
->pretend_args_size
= varargs_pretend_bytes
;
2499 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2500 the incoming location of the current parameter. */
2503 assign_parm_find_entry_rtl (struct assign_parm_data_all
*all
,
2504 struct assign_parm_data_one
*data
)
2506 HOST_WIDE_INT pretend_bytes
= 0;
2510 if (data
->promoted_mode
== VOIDmode
)
2512 data
->entry_parm
= data
->stack_parm
= const0_rtx
;
2516 targetm
.calls
.warn_parameter_passing_abi (all
->args_so_far
,
2519 entry_parm
= targetm
.calls
.function_incoming_arg (all
->args_so_far
,
2520 data
->promoted_mode
,
2524 if (entry_parm
== 0)
2525 data
->promoted_mode
= data
->passed_mode
;
2527 /* Determine parm's home in the stack, in case it arrives in the stack
2528 or we should pretend it did. Compute the stack position and rtx where
2529 the argument arrives and its size.
2531 There is one complexity here: If this was a parameter that would
2532 have been passed in registers, but wasn't only because it is
2533 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2534 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2535 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2536 as it was the previous time. */
2537 in_regs
= (entry_parm
!= 0);
2538 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2541 if (!in_regs
&& !data
->named_arg
)
2543 if (targetm
.calls
.pretend_outgoing_varargs_named (all
->args_so_far
))
2546 tem
= targetm
.calls
.function_incoming_arg (all
->args_so_far
,
2547 data
->promoted_mode
,
2548 data
->passed_type
, true);
2549 in_regs
= tem
!= NULL
;
2553 /* If this parameter was passed both in registers and in the stack, use
2554 the copy on the stack. */
2555 if (targetm
.calls
.must_pass_in_stack (data
->promoted_mode
,
2563 partial
= targetm
.calls
.arg_partial_bytes (all
->args_so_far
,
2564 data
->promoted_mode
,
2567 data
->partial
= partial
;
2569 /* The caller might already have allocated stack space for the
2570 register parameters. */
2571 if (partial
!= 0 && all
->reg_parm_stack_space
== 0)
2573 /* Part of this argument is passed in registers and part
2574 is passed on the stack. Ask the prologue code to extend
2575 the stack part so that we can recreate the full value.
2577 PRETEND_BYTES is the size of the registers we need to store.
2578 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2579 stack space that the prologue should allocate.
2581 Internally, gcc assumes that the argument pointer is aligned
2582 to STACK_BOUNDARY bits. This is used both for alignment
2583 optimizations (see init_emit) and to locate arguments that are
2584 aligned to more than PARM_BOUNDARY bits. We must preserve this
2585 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2586 a stack boundary. */
2588 /* We assume at most one partial arg, and it must be the first
2589 argument on the stack. */
2590 gcc_assert (!all
->extra_pretend_bytes
&& !all
->pretend_args_size
);
2592 pretend_bytes
= partial
;
2593 all
->pretend_args_size
= CEIL_ROUND (pretend_bytes
, STACK_BYTES
);
2595 /* We want to align relative to the actual stack pointer, so
2596 don't include this in the stack size until later. */
2597 all
->extra_pretend_bytes
= all
->pretend_args_size
;
2601 locate_and_pad_parm (data
->promoted_mode
, data
->passed_type
, in_regs
,
2602 all
->reg_parm_stack_space
,
2603 entry_parm
? data
->partial
: 0, current_function_decl
,
2604 &all
->stack_args_size
, &data
->locate
);
2606 /* Update parm_stack_boundary if this parameter is passed in the
2608 if (!in_regs
&& crtl
->parm_stack_boundary
< data
->locate
.boundary
)
2609 crtl
->parm_stack_boundary
= data
->locate
.boundary
;
2611 /* Adjust offsets to include the pretend args. */
2612 pretend_bytes
= all
->extra_pretend_bytes
- pretend_bytes
;
2613 data
->locate
.slot_offset
.constant
+= pretend_bytes
;
2614 data
->locate
.offset
.constant
+= pretend_bytes
;
2616 data
->entry_parm
= entry_parm
;
2619 /* A subroutine of assign_parms. If there is actually space on the stack
2620 for this parm, count it in stack_args_size and return true. */
2623 assign_parm_is_stack_parm (struct assign_parm_data_all
*all
,
2624 struct assign_parm_data_one
*data
)
2626 /* Trivially true if we've no incoming register. */
2627 if (data
->entry_parm
== NULL
)
2629 /* Also true if we're partially in registers and partially not,
2630 since we've arranged to drop the entire argument on the stack. */
2631 else if (data
->partial
!= 0)
2633 /* Also true if the target says that it's passed in both registers
2634 and on the stack. */
2635 else if (GET_CODE (data
->entry_parm
) == PARALLEL
2636 && XEXP (XVECEXP (data
->entry_parm
, 0, 0), 0) == NULL_RTX
)
2638 /* Also true if the target says that there's stack allocated for
2639 all register parameters. */
2640 else if (all
->reg_parm_stack_space
> 0)
2642 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2646 all
->stack_args_size
.constant
+= data
->locate
.size
.constant
;
2647 if (data
->locate
.size
.var
)
2648 ADD_PARM_SIZE (all
->stack_args_size
, data
->locate
.size
.var
);
2653 /* A subroutine of assign_parms. Given that this parameter is allocated
2654 stack space by the ABI, find it. */
2657 assign_parm_find_stack_rtl (tree parm
, struct assign_parm_data_one
*data
)
2659 rtx offset_rtx
, stack_parm
;
2660 unsigned int align
, boundary
;
2662 /* If we're passing this arg using a reg, make its stack home the
2663 aligned stack slot. */
2664 if (data
->entry_parm
)
2665 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.slot_offset
);
2667 offset_rtx
= ARGS_SIZE_RTX (data
->locate
.offset
);
2669 stack_parm
= crtl
->args
.internal_arg_pointer
;
2670 if (offset_rtx
!= const0_rtx
)
2671 stack_parm
= gen_rtx_PLUS (Pmode
, stack_parm
, offset_rtx
);
2672 stack_parm
= gen_rtx_MEM (data
->promoted_mode
, stack_parm
);
2674 if (!data
->passed_pointer
)
2676 set_mem_attributes (stack_parm
, parm
, 1);
2677 /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
2678 while promoted mode's size is needed. */
2679 if (data
->promoted_mode
!= BLKmode
2680 && data
->promoted_mode
!= DECL_MODE (parm
))
2682 set_mem_size (stack_parm
, GET_MODE_SIZE (data
->promoted_mode
));
2683 if (MEM_EXPR (stack_parm
) && MEM_OFFSET_KNOWN_P (stack_parm
))
2685 poly_int64 offset
= subreg_lowpart_offset (DECL_MODE (parm
),
2686 data
->promoted_mode
);
2687 if (maybe_ne (offset
, 0))
2688 set_mem_offset (stack_parm
, MEM_OFFSET (stack_parm
) - offset
);
2693 boundary
= data
->locate
.boundary
;
2694 align
= BITS_PER_UNIT
;
2696 /* If we're padding upward, we know that the alignment of the slot
2697 is TARGET_FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2698 intentionally forcing upward padding. Otherwise we have to come
2699 up with a guess at the alignment based on OFFSET_RTX. */
2701 if (data
->locate
.where_pad
!= PAD_DOWNWARD
|| data
->entry_parm
)
2703 else if (poly_int_rtx_p (offset_rtx
, &offset
))
2705 align
= least_bit_hwi (boundary
);
2706 unsigned int offset_align
= known_alignment (offset
) * BITS_PER_UNIT
;
2707 if (offset_align
!= 0)
2708 align
= MIN (align
, offset_align
);
2710 set_mem_align (stack_parm
, align
);
2712 if (data
->entry_parm
)
2713 set_reg_attrs_for_parm (data
->entry_parm
, stack_parm
);
2715 data
->stack_parm
= stack_parm
;
2718 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2719 always valid and contiguous. */
2722 assign_parm_adjust_entry_rtl (struct assign_parm_data_one
*data
)
2724 rtx entry_parm
= data
->entry_parm
;
2725 rtx stack_parm
= data
->stack_parm
;
2727 /* If this parm was passed part in regs and part in memory, pretend it
2728 arrived entirely in memory by pushing the register-part onto the stack.
2729 In the special case of a DImode or DFmode that is split, we could put
2730 it together in a pseudoreg directly, but for now that's not worth
2732 if (data
->partial
!= 0)
2734 /* Handle calls that pass values in multiple non-contiguous
2735 locations. The Irix 6 ABI has examples of this. */
2736 if (GET_CODE (entry_parm
) == PARALLEL
)
2737 emit_group_store (validize_mem (copy_rtx (stack_parm
)), entry_parm
,
2739 int_size_in_bytes (data
->passed_type
));
2742 gcc_assert (data
->partial
% UNITS_PER_WORD
== 0);
2743 move_block_from_reg (REGNO (entry_parm
),
2744 validize_mem (copy_rtx (stack_parm
)),
2745 data
->partial
/ UNITS_PER_WORD
);
2748 entry_parm
= stack_parm
;
2751 /* If we didn't decide this parm came in a register, by default it came
2753 else if (entry_parm
== NULL
)
2754 entry_parm
= stack_parm
;
2756 /* When an argument is passed in multiple locations, we can't make use
2757 of this information, but we can save some copying if the whole argument
2758 is passed in a single register. */
2759 else if (GET_CODE (entry_parm
) == PARALLEL
2760 && data
->nominal_mode
!= BLKmode
2761 && data
->passed_mode
!= BLKmode
)
2763 size_t i
, len
= XVECLEN (entry_parm
, 0);
2765 for (i
= 0; i
< len
; i
++)
2766 if (XEXP (XVECEXP (entry_parm
, 0, i
), 0) != NULL_RTX
2767 && REG_P (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2768 && (GET_MODE (XEXP (XVECEXP (entry_parm
, 0, i
), 0))
2769 == data
->passed_mode
)
2770 && INTVAL (XEXP (XVECEXP (entry_parm
, 0, i
), 1)) == 0)
2772 entry_parm
= XEXP (XVECEXP (entry_parm
, 0, i
), 0);
2777 data
->entry_parm
= entry_parm
;
2780 /* A subroutine of assign_parms. Reconstitute any values which were
2781 passed in multiple registers and would fit in a single register. */
2784 assign_parm_remove_parallels (struct assign_parm_data_one
*data
)
2786 rtx entry_parm
= data
->entry_parm
;
2788 /* Convert the PARALLEL to a REG of the same mode as the parallel.
2789 This can be done with register operations rather than on the
2790 stack, even if we will store the reconstituted parameter on the
2792 if (GET_CODE (entry_parm
) == PARALLEL
&& GET_MODE (entry_parm
) != BLKmode
)
2794 rtx parmreg
= gen_reg_rtx (GET_MODE (entry_parm
));
2795 emit_group_store (parmreg
, entry_parm
, data
->passed_type
,
2796 GET_MODE_SIZE (GET_MODE (entry_parm
)));
2797 entry_parm
= parmreg
;
2800 data
->entry_parm
= entry_parm
;
2803 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2804 always valid and properly aligned. */
2807 assign_parm_adjust_stack_rtl (struct assign_parm_data_one
*data
)
2809 rtx stack_parm
= data
->stack_parm
;
2811 /* If we can't trust the parm stack slot to be aligned enough for its
2812 ultimate type, don't use that slot after entry. We'll make another
2813 stack slot, if we need one. */
2815 && ((STRICT_ALIGNMENT
2816 && GET_MODE_ALIGNMENT (data
->nominal_mode
) > MEM_ALIGN (stack_parm
))
2817 || (data
->nominal_type
2818 && TYPE_ALIGN (data
->nominal_type
) > MEM_ALIGN (stack_parm
)
2819 && MEM_ALIGN (stack_parm
) < PREFERRED_STACK_BOUNDARY
)))
2822 /* If parm was passed in memory, and we need to convert it on entry,
2823 don't store it back in that same slot. */
2824 else if (data
->entry_parm
== stack_parm
2825 && data
->nominal_mode
!= BLKmode
2826 && data
->nominal_mode
!= data
->passed_mode
)
2829 /* If stack protection is in effect for this function, don't leave any
2830 pointers in their passed stack slots. */
2831 else if (crtl
->stack_protect_guard
2832 && (flag_stack_protect
== 2
2833 || data
->passed_pointer
2834 || POINTER_TYPE_P (data
->nominal_type
)))
2837 data
->stack_parm
= stack_parm
;
2840 /* A subroutine of assign_parms. Return true if the current parameter
2841 should be stored as a BLKmode in the current frame. */
2844 assign_parm_setup_block_p (struct assign_parm_data_one
*data
)
2846 if (data
->nominal_mode
== BLKmode
)
2848 if (GET_MODE (data
->entry_parm
) == BLKmode
)
2851 #ifdef BLOCK_REG_PADDING
2852 /* Only assign_parm_setup_block knows how to deal with register arguments
2853 that are padded at the least significant end. */
2854 if (REG_P (data
->entry_parm
)
2855 && known_lt (GET_MODE_SIZE (data
->promoted_mode
), UNITS_PER_WORD
)
2856 && (BLOCK_REG_PADDING (data
->passed_mode
, data
->passed_type
, 1)
2857 == (BYTES_BIG_ENDIAN
? PAD_UPWARD
: PAD_DOWNWARD
)))
2864 /* A subroutine of assign_parms. Arrange for the parameter to be
2865 present and valid in DATA->STACK_RTL. */
2868 assign_parm_setup_block (struct assign_parm_data_all
*all
,
2869 tree parm
, struct assign_parm_data_one
*data
)
2871 rtx entry_parm
= data
->entry_parm
;
2872 rtx stack_parm
= data
->stack_parm
;
2873 rtx target_reg
= NULL_RTX
;
2874 bool in_conversion_seq
= false;
2876 HOST_WIDE_INT size_stored
;
2878 if (GET_CODE (entry_parm
) == PARALLEL
)
2879 entry_parm
= emit_group_move_into_temps (entry_parm
);
2881 /* If we want the parameter in a pseudo, don't use a stack slot. */
2882 if (is_gimple_reg (parm
) && use_register_for_decl (parm
))
2884 tree def
= ssa_default_def (cfun
, parm
);
2886 machine_mode mode
= promote_ssa_mode (def
, NULL
);
2887 rtx reg
= gen_reg_rtx (mode
);
2888 if (GET_CODE (reg
) != CONCAT
)
2893 /* Avoid allocating a stack slot, if there isn't one
2894 preallocated by the ABI. It might seem like we should
2895 always prefer a pseudo, but converting between
2896 floating-point and integer modes goes through the stack
2897 on various machines, so it's better to use the reserved
2898 stack slot than to risk wasting it and allocating more
2899 for the conversion. */
2900 if (stack_parm
== NULL_RTX
)
2902 int save
= generating_concat_p
;
2903 generating_concat_p
= 0;
2904 stack_parm
= gen_reg_rtx (mode
);
2905 generating_concat_p
= save
;
2908 data
->stack_parm
= NULL
;
2911 size
= int_size_in_bytes (data
->passed_type
);
2912 size_stored
= CEIL_ROUND (size
, UNITS_PER_WORD
);
2913 if (stack_parm
== 0)
2915 SET_DECL_ALIGN (parm
, MAX (DECL_ALIGN (parm
), BITS_PER_WORD
));
2916 if (DECL_ALIGN (parm
) > MAX_SUPPORTED_STACK_ALIGNMENT
)
2918 rtx allocsize
= gen_int_mode (size_stored
, Pmode
);
2919 get_dynamic_stack_size (&allocsize
, 0, DECL_ALIGN (parm
), NULL
);
2920 stack_parm
= assign_stack_local (BLKmode
, UINTVAL (allocsize
),
2921 MAX_SUPPORTED_STACK_ALIGNMENT
);
2922 rtx addr
= align_dynamic_address (XEXP (stack_parm
, 0),
2924 mark_reg_pointer (addr
, DECL_ALIGN (parm
));
2925 stack_parm
= gen_rtx_MEM (GET_MODE (stack_parm
), addr
);
2926 MEM_NOTRAP_P (stack_parm
) = 1;
2929 stack_parm
= assign_stack_local (BLKmode
, size_stored
,
2931 if (known_eq (GET_MODE_SIZE (GET_MODE (entry_parm
)), size
))
2932 PUT_MODE (stack_parm
, GET_MODE (entry_parm
));
2933 set_mem_attributes (stack_parm
, parm
, 1);
2936 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2937 calls that pass values in multiple non-contiguous locations. */
2938 if (REG_P (entry_parm
) || GET_CODE (entry_parm
) == PARALLEL
)
2942 /* Note that we will be storing an integral number of words.
2943 So we have to be careful to ensure that we allocate an
2944 integral number of words. We do this above when we call
2945 assign_stack_local if space was not allocated in the argument
2946 list. If it was, this will not work if PARM_BOUNDARY is not
2947 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2948 if it becomes a problem. Exception is when BLKmode arrives
2949 with arguments not conforming to word_mode. */
2951 if (data
->stack_parm
== 0)
2953 else if (GET_CODE (entry_parm
) == PARALLEL
)
2956 gcc_assert (!size
|| !(PARM_BOUNDARY
% BITS_PER_WORD
));
2958 mem
= validize_mem (copy_rtx (stack_parm
));
2960 /* Handle values in multiple non-contiguous locations. */
2961 if (GET_CODE (entry_parm
) == PARALLEL
&& !MEM_P (mem
))
2962 emit_group_store (mem
, entry_parm
, data
->passed_type
, size
);
2963 else if (GET_CODE (entry_parm
) == PARALLEL
)
2965 push_to_sequence2 (all
->first_conversion_insn
,
2966 all
->last_conversion_insn
);
2967 emit_group_store (mem
, entry_parm
, data
->passed_type
, size
);
2968 all
->first_conversion_insn
= get_insns ();
2969 all
->last_conversion_insn
= get_last_insn ();
2971 in_conversion_seq
= true;
2977 /* If SIZE is that of a mode no bigger than a word, just use
2978 that mode's store operation. */
2979 else if (size
<= UNITS_PER_WORD
)
2981 unsigned int bits
= size
* BITS_PER_UNIT
;
2982 machine_mode mode
= int_mode_for_size (bits
, 0).else_blk ();
2985 #ifdef BLOCK_REG_PADDING
2986 && (size
== UNITS_PER_WORD
2987 || (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
2988 != (BYTES_BIG_ENDIAN
? PAD_UPWARD
: PAD_DOWNWARD
)))
2994 /* We are really truncating a word_mode value containing
2995 SIZE bytes into a value of mode MODE. If such an
2996 operation requires no actual instructions, we can refer
2997 to the value directly in mode MODE, otherwise we must
2998 start with the register in word_mode and explicitly
3000 if (targetm
.truly_noop_truncation (size
* BITS_PER_UNIT
,
3002 reg
= gen_rtx_REG (mode
, REGNO (entry_parm
));
3005 reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
3006 reg
= convert_to_mode (mode
, copy_to_reg (reg
), 1);
3008 emit_move_insn (change_address (mem
, mode
, 0), reg
);
3011 #ifdef BLOCK_REG_PADDING
3012 /* Storing the register in memory as a full word, as
3013 move_block_from_reg below would do, and then using the
3014 MEM in a smaller mode, has the effect of shifting right
3015 if BYTES_BIG_ENDIAN. If we're bypassing memory, the
3016 shifting must be explicit. */
3017 else if (!MEM_P (mem
))
3021 /* If the assert below fails, we should have taken the
3022 mode != BLKmode path above, unless we have downward
3023 padding of smaller-than-word arguments on a machine
3024 with little-endian bytes, which would likely require
3025 additional changes to work correctly. */
3026 gcc_checking_assert (BYTES_BIG_ENDIAN
3027 && (BLOCK_REG_PADDING (mode
,
3028 data
->passed_type
, 1)
3031 int by
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
3033 x
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
3034 x
= expand_shift (RSHIFT_EXPR
, word_mode
, x
, by
,
3036 x
= force_reg (word_mode
, x
);
3037 x
= gen_lowpart_SUBREG (GET_MODE (mem
), x
);
3039 emit_move_insn (mem
, x
);
3043 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
3044 machine must be aligned to the left before storing
3045 to memory. Note that the previous test doesn't
3046 handle all cases (e.g. SIZE == 3). */
3047 else if (size
!= UNITS_PER_WORD
3048 #ifdef BLOCK_REG_PADDING
3049 && (BLOCK_REG_PADDING (mode
, data
->passed_type
, 1)
3057 int by
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
3058 rtx reg
= gen_rtx_REG (word_mode
, REGNO (entry_parm
));
3060 x
= expand_shift (LSHIFT_EXPR
, word_mode
, reg
, by
, NULL_RTX
, 1);
3061 tem
= change_address (mem
, word_mode
, 0);
3062 emit_move_insn (tem
, x
);
3065 move_block_from_reg (REGNO (entry_parm
), mem
,
3066 size_stored
/ UNITS_PER_WORD
);
3068 else if (!MEM_P (mem
))
3070 gcc_checking_assert (size
> UNITS_PER_WORD
);
3071 #ifdef BLOCK_REG_PADDING
3072 gcc_checking_assert (BLOCK_REG_PADDING (GET_MODE (mem
),
3073 data
->passed_type
, 0)
3076 emit_move_insn (mem
, entry_parm
);
3079 move_block_from_reg (REGNO (entry_parm
), mem
,
3080 size_stored
/ UNITS_PER_WORD
);
3082 else if (data
->stack_parm
== 0)
3084 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3085 emit_block_move (stack_parm
, data
->entry_parm
, GEN_INT (size
),
3087 all
->first_conversion_insn
= get_insns ();
3088 all
->last_conversion_insn
= get_last_insn ();
3090 in_conversion_seq
= true;
3095 if (!in_conversion_seq
)
3096 emit_move_insn (target_reg
, stack_parm
);
3099 push_to_sequence2 (all
->first_conversion_insn
,
3100 all
->last_conversion_insn
);
3101 emit_move_insn (target_reg
, stack_parm
);
3102 all
->first_conversion_insn
= get_insns ();
3103 all
->last_conversion_insn
= get_last_insn ();
3106 stack_parm
= target_reg
;
3109 data
->stack_parm
= stack_parm
;
3110 set_parm_rtl (parm
, stack_parm
);
3113 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
3114 parameter. Get it there. Perform all ABI specified conversions. */
3117 assign_parm_setup_reg (struct assign_parm_data_all
*all
, tree parm
,
3118 struct assign_parm_data_one
*data
)
3120 rtx parmreg
, validated_mem
;
3121 rtx equiv_stack_parm
;
3122 machine_mode promoted_nominal_mode
;
3123 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (parm
));
3124 bool did_conversion
= false;
3125 bool need_conversion
, moved
;
3128 /* Store the parm in a pseudoregister during the function, but we may
3129 need to do it in a wider mode. Using 2 here makes the result
3130 consistent with promote_decl_mode and thus expand_expr_real_1. */
3131 promoted_nominal_mode
3132 = promote_function_mode (data
->nominal_type
, data
->nominal_mode
, &unsignedp
,
3133 TREE_TYPE (current_function_decl
), 2);
3135 parmreg
= gen_reg_rtx (promoted_nominal_mode
);
3136 if (!DECL_ARTIFICIAL (parm
))
3137 mark_user_reg (parmreg
);
3139 /* If this was an item that we received a pointer to,
3140 set rtl appropriately. */
3141 if (data
->passed_pointer
)
3143 rtl
= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
->passed_type
)), parmreg
);
3144 set_mem_attributes (rtl
, parm
, 1);
3149 assign_parm_remove_parallels (data
);
3151 /* Copy the value into the register, thus bridging between
3152 assign_parm_find_data_types and expand_expr_real_1. */
3154 equiv_stack_parm
= data
->stack_parm
;
3155 validated_mem
= validize_mem (copy_rtx (data
->entry_parm
));
3157 need_conversion
= (data
->nominal_mode
!= data
->passed_mode
3158 || promoted_nominal_mode
!= data
->promoted_mode
);
3162 && GET_MODE_CLASS (data
->nominal_mode
) == MODE_INT
3163 && data
->nominal_mode
== data
->passed_mode
3164 && data
->nominal_mode
== GET_MODE (data
->entry_parm
))
3166 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3167 mode, by the caller. We now have to convert it to
3168 NOMINAL_MODE, if different. However, PARMREG may be in
3169 a different mode than NOMINAL_MODE if it is being stored
3172 If ENTRY_PARM is a hard register, it might be in a register
3173 not valid for operating in its mode (e.g., an odd-numbered
3174 register for a DFmode). In that case, moves are the only
3175 thing valid, so we can't do a convert from there. This
3176 occurs when the calling sequence allow such misaligned
3179 In addition, the conversion may involve a call, which could
3180 clobber parameters which haven't been copied to pseudo
3183 First, we try to emit an insn which performs the necessary
3184 conversion. We verify that this insn does not clobber any
3187 enum insn_code icode
;
3190 icode
= can_extend_p (promoted_nominal_mode
, data
->passed_mode
,
3194 op1
= validated_mem
;
3195 if (icode
!= CODE_FOR_nothing
3196 && insn_operand_matches (icode
, 0, op0
)
3197 && insn_operand_matches (icode
, 1, op1
))
3199 enum rtx_code code
= unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
;
3200 rtx_insn
*insn
, *insns
;
3202 HARD_REG_SET hardregs
;
3205 /* If op1 is a hard register that is likely spilled, first
3206 force it into a pseudo, otherwise combiner might extend
3207 its lifetime too much. */
3208 if (GET_CODE (t
) == SUBREG
)
3211 && HARD_REGISTER_P (t
)
3212 && ! TEST_HARD_REG_BIT (fixed_reg_set
, REGNO (t
))
3213 && targetm
.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t
))))
3215 t
= gen_reg_rtx (GET_MODE (op1
));
3216 emit_move_insn (t
, op1
);
3220 rtx_insn
*pat
= gen_extend_insn (op0
, t
, promoted_nominal_mode
,
3221 data
->passed_mode
, unsignedp
);
3223 insns
= get_insns ();
3226 CLEAR_HARD_REG_SET (hardregs
);
3227 for (insn
= insns
; insn
&& moved
; insn
= NEXT_INSN (insn
))
3230 note_stores (PATTERN (insn
), record_hard_reg_sets
,
3232 if (!hard_reg_set_empty_p (hardregs
))
3241 if (equiv_stack_parm
!= NULL_RTX
)
3242 equiv_stack_parm
= gen_rtx_fmt_e (code
, GET_MODE (parmreg
),
3249 /* Nothing to do. */
3251 else if (need_conversion
)
3253 /* We did not have an insn to convert directly, or the sequence
3254 generated appeared unsafe. We must first copy the parm to a
3255 pseudo reg, and save the conversion until after all
3256 parameters have been moved. */
3259 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
3261 emit_move_insn (tempreg
, validated_mem
);
3263 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3264 tempreg
= convert_to_mode (data
->nominal_mode
, tempreg
, unsignedp
);
3266 if (partial_subreg_p (tempreg
)
3267 && GET_MODE (tempreg
) == data
->nominal_mode
3268 && REG_P (SUBREG_REG (tempreg
))
3269 && data
->nominal_mode
== data
->passed_mode
3270 && GET_MODE (SUBREG_REG (tempreg
)) == GET_MODE (data
->entry_parm
))
3272 /* The argument is already sign/zero extended, so note it
3274 SUBREG_PROMOTED_VAR_P (tempreg
) = 1;
3275 SUBREG_PROMOTED_SET (tempreg
, unsignedp
);
3278 /* TREE_USED gets set erroneously during expand_assignment. */
3279 save_tree_used
= TREE_USED (parm
);
3280 SET_DECL_RTL (parm
, rtl
);
3281 expand_assignment (parm
, make_tree (data
->nominal_type
, tempreg
), false);
3282 SET_DECL_RTL (parm
, NULL_RTX
);
3283 TREE_USED (parm
) = save_tree_used
;
3284 all
->first_conversion_insn
= get_insns ();
3285 all
->last_conversion_insn
= get_last_insn ();
3288 did_conversion
= true;
3291 emit_move_insn (parmreg
, validated_mem
);
3293 /* If we were passed a pointer but the actual value can safely live
3294 in a register, retrieve it and use it directly. */
3295 if (data
->passed_pointer
&& TYPE_MODE (TREE_TYPE (parm
)) != BLKmode
)
3297 /* We can't use nominal_mode, because it will have been set to
3298 Pmode above. We must use the actual mode of the parm. */
3299 if (use_register_for_decl (parm
))
3301 parmreg
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm
)));
3302 mark_user_reg (parmreg
);
3306 int align
= STACK_SLOT_ALIGNMENT (TREE_TYPE (parm
),
3307 TYPE_MODE (TREE_TYPE (parm
)),
3308 TYPE_ALIGN (TREE_TYPE (parm
)));
3310 = assign_stack_local (TYPE_MODE (TREE_TYPE (parm
)),
3311 GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm
))),
3313 set_mem_attributes (parmreg
, parm
, 1);
3316 /* We need to preserve an address based on VIRTUAL_STACK_VARS_REGNUM for
3317 the debug info in case it is not legitimate. */
3318 if (GET_MODE (parmreg
) != GET_MODE (rtl
))
3320 rtx tempreg
= gen_reg_rtx (GET_MODE (rtl
));
3321 int unsigned_p
= TYPE_UNSIGNED (TREE_TYPE (parm
));
3323 push_to_sequence2 (all
->first_conversion_insn
,
3324 all
->last_conversion_insn
);
3325 emit_move_insn (tempreg
, rtl
);
3326 tempreg
= convert_to_mode (GET_MODE (parmreg
), tempreg
, unsigned_p
);
3327 emit_move_insn (MEM_P (parmreg
) ? copy_rtx (parmreg
) : parmreg
,
3329 all
->first_conversion_insn
= get_insns ();
3330 all
->last_conversion_insn
= get_last_insn ();
3333 did_conversion
= true;
3336 emit_move_insn (MEM_P (parmreg
) ? copy_rtx (parmreg
) : parmreg
, rtl
);
3340 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3342 data
->stack_parm
= NULL
;
3345 set_parm_rtl (parm
, rtl
);
3347 /* Mark the register as eliminable if we did no conversion and it was
3348 copied from memory at a fixed offset, and the arg pointer was not
3349 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
3350 offset formed an invalid address, such memory-equivalences as we
3351 make here would screw up life analysis for it. */
3352 if (data
->nominal_mode
== data
->passed_mode
3354 && data
->stack_parm
!= 0
3355 && MEM_P (data
->stack_parm
)
3356 && data
->locate
.offset
.var
== 0
3357 && reg_mentioned_p (virtual_incoming_args_rtx
,
3358 XEXP (data
->stack_parm
, 0)))
3360 rtx_insn
*linsn
= get_last_insn ();
3364 /* Mark complex types separately. */
3365 if (GET_CODE (parmreg
) == CONCAT
)
3367 scalar_mode submode
= GET_MODE_INNER (GET_MODE (parmreg
));
3368 int regnor
= REGNO (XEXP (parmreg
, 0));
3369 int regnoi
= REGNO (XEXP (parmreg
, 1));
3370 rtx stackr
= adjust_address_nv (data
->stack_parm
, submode
, 0);
3371 rtx stacki
= adjust_address_nv (data
->stack_parm
, submode
,
3372 GET_MODE_SIZE (submode
));
3374 /* Scan backwards for the set of the real and
3376 for (sinsn
= linsn
; sinsn
!= 0;
3377 sinsn
= prev_nonnote_insn (sinsn
))
3379 set
= single_set (sinsn
);
3383 if (SET_DEST (set
) == regno_reg_rtx
[regnoi
])
3384 set_unique_reg_note (sinsn
, REG_EQUIV
, stacki
);
3385 else if (SET_DEST (set
) == regno_reg_rtx
[regnor
])
3386 set_unique_reg_note (sinsn
, REG_EQUIV
, stackr
);
3390 set_dst_reg_note (linsn
, REG_EQUIV
, equiv_stack_parm
, parmreg
);
3393 /* For pointer data type, suggest pointer register. */
3394 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
3395 mark_reg_pointer (parmreg
,
3396 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
3399 /* A subroutine of assign_parms. Allocate stack space to hold the current
3400 parameter. Get it there. Perform all ABI specified conversions. */
3403 assign_parm_setup_stack (struct assign_parm_data_all
*all
, tree parm
,
3404 struct assign_parm_data_one
*data
)
3406 /* Value must be stored in the stack slot STACK_PARM during function
3408 bool to_conversion
= false;
3410 assign_parm_remove_parallels (data
);
3412 if (data
->promoted_mode
!= data
->nominal_mode
)
3414 /* Conversion is required. */
3415 rtx tempreg
= gen_reg_rtx (GET_MODE (data
->entry_parm
));
3417 emit_move_insn (tempreg
, validize_mem (copy_rtx (data
->entry_parm
)));
3419 push_to_sequence2 (all
->first_conversion_insn
, all
->last_conversion_insn
);
3420 to_conversion
= true;
3422 data
->entry_parm
= convert_to_mode (data
->nominal_mode
, tempreg
,
3423 TYPE_UNSIGNED (TREE_TYPE (parm
)));
3425 if (data
->stack_parm
)
3428 = subreg_lowpart_offset (data
->nominal_mode
,
3429 GET_MODE (data
->stack_parm
));
3430 /* ??? This may need a big-endian conversion on sparc64. */
3432 = adjust_address (data
->stack_parm
, data
->nominal_mode
, 0);
3433 if (maybe_ne (offset
, 0) && MEM_OFFSET_KNOWN_P (data
->stack_parm
))
3434 set_mem_offset (data
->stack_parm
,
3435 MEM_OFFSET (data
->stack_parm
) + offset
);
3439 if (data
->entry_parm
!= data
->stack_parm
)
3443 if (data
->stack_parm
== 0)
3445 int align
= STACK_SLOT_ALIGNMENT (data
->passed_type
,
3446 GET_MODE (data
->entry_parm
),
3447 TYPE_ALIGN (data
->passed_type
));
3449 = assign_stack_local (GET_MODE (data
->entry_parm
),
3450 GET_MODE_SIZE (GET_MODE (data
->entry_parm
)),
3452 set_mem_attributes (data
->stack_parm
, parm
, 1);
3455 dest
= validize_mem (copy_rtx (data
->stack_parm
));
3456 src
= validize_mem (copy_rtx (data
->entry_parm
));
3460 /* Use a block move to handle potentially misaligned entry_parm. */
3462 push_to_sequence2 (all
->first_conversion_insn
,
3463 all
->last_conversion_insn
);
3464 to_conversion
= true;
3466 emit_block_move (dest
, src
,
3467 GEN_INT (int_size_in_bytes (data
->passed_type
)),
3473 src
= force_reg (GET_MODE (src
), src
);
3474 emit_move_insn (dest
, src
);
3480 all
->first_conversion_insn
= get_insns ();
3481 all
->last_conversion_insn
= get_last_insn ();
3485 set_parm_rtl (parm
, data
->stack_parm
);
3488 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
3489 undo the frobbing that we did in assign_parms_augmented_arg_list. */
3492 assign_parms_unsplit_complex (struct assign_parm_data_all
*all
,
3496 tree orig_fnargs
= all
->orig_fnargs
;
3499 for (parm
= orig_fnargs
; parm
; parm
= TREE_CHAIN (parm
), ++i
)
3501 if (TREE_CODE (TREE_TYPE (parm
)) == COMPLEX_TYPE
3502 && targetm
.calls
.split_complex_arg (TREE_TYPE (parm
)))
3504 rtx tmp
, real
, imag
;
3505 scalar_mode inner
= GET_MODE_INNER (DECL_MODE (parm
));
3507 real
= DECL_RTL (fnargs
[i
]);
3508 imag
= DECL_RTL (fnargs
[i
+ 1]);
3509 if (inner
!= GET_MODE (real
))
3511 real
= gen_lowpart_SUBREG (inner
, real
);
3512 imag
= gen_lowpart_SUBREG (inner
, imag
);
3515 if (TREE_ADDRESSABLE (parm
))
3518 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (parm
));
3519 int align
= STACK_SLOT_ALIGNMENT (TREE_TYPE (parm
),
3521 TYPE_ALIGN (TREE_TYPE (parm
)));
3523 /* split_complex_arg put the real and imag parts in
3524 pseudos. Move them to memory. */
3525 tmp
= assign_stack_local (DECL_MODE (parm
), size
, align
);
3526 set_mem_attributes (tmp
, parm
, 1);
3527 rmem
= adjust_address_nv (tmp
, inner
, 0);
3528 imem
= adjust_address_nv (tmp
, inner
, GET_MODE_SIZE (inner
));
3529 push_to_sequence2 (all
->first_conversion_insn
,
3530 all
->last_conversion_insn
);
3531 emit_move_insn (rmem
, real
);
3532 emit_move_insn (imem
, imag
);
3533 all
->first_conversion_insn
= get_insns ();
3534 all
->last_conversion_insn
= get_last_insn ();
3538 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
3539 set_parm_rtl (parm
, tmp
);
3541 real
= DECL_INCOMING_RTL (fnargs
[i
]);
3542 imag
= DECL_INCOMING_RTL (fnargs
[i
+ 1]);
3543 if (inner
!= GET_MODE (real
))
3545 real
= gen_lowpart_SUBREG (inner
, real
);
3546 imag
= gen_lowpart_SUBREG (inner
, imag
);
3548 tmp
= gen_rtx_CONCAT (DECL_MODE (parm
), real
, imag
);
3549 set_decl_incoming_rtl (parm
, tmp
, false);
3555 /* Assign RTL expressions to the function's parameters. This may involve
3556 copying them into registers and using those registers as the DECL_RTL. */
3559 assign_parms (tree fndecl
)
3561 struct assign_parm_data_all all
;
3566 crtl
->args
.internal_arg_pointer
3567 = targetm
.calls
.internal_arg_pointer ();
3569 assign_parms_initialize_all (&all
);
3570 fnargs
= assign_parms_augmented_arg_list (&all
);
3572 FOR_EACH_VEC_ELT (fnargs
, i
, parm
)
3574 struct assign_parm_data_one data
;
3576 /* Extract the type of PARM; adjust it according to ABI. */
3577 assign_parm_find_data_types (&all
, parm
, &data
);
3579 /* Early out for errors and void parameters. */
3580 if (data
.passed_mode
== VOIDmode
)
3582 SET_DECL_RTL (parm
, const0_rtx
);
3583 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
);
3587 /* Estimate stack alignment from parameter alignment. */
3588 if (SUPPORTS_STACK_ALIGNMENT
)
3591 = targetm
.calls
.function_arg_boundary (data
.promoted_mode
,
3593 align
= MINIMUM_ALIGNMENT (data
.passed_type
, data
.promoted_mode
,
3595 if (TYPE_ALIGN (data
.nominal_type
) > align
)
3596 align
= MINIMUM_ALIGNMENT (data
.nominal_type
,
3597 TYPE_MODE (data
.nominal_type
),
3598 TYPE_ALIGN (data
.nominal_type
));
3599 if (crtl
->stack_alignment_estimated
< align
)
3601 gcc_assert (!crtl
->stack_realign_processed
);
3602 crtl
->stack_alignment_estimated
= align
;
3606 /* Find out where the parameter arrives in this function. */
3607 assign_parm_find_entry_rtl (&all
, &data
);
3609 /* Find out where stack space for this parameter might be. */
3610 if (assign_parm_is_stack_parm (&all
, &data
))
3612 assign_parm_find_stack_rtl (parm
, &data
);
3613 assign_parm_adjust_entry_rtl (&data
);
3615 /* Record permanently how this parm was passed. */
3616 if (data
.passed_pointer
)
3619 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data
.passed_type
)),
3621 set_decl_incoming_rtl (parm
, incoming_rtl
, true);
3624 set_decl_incoming_rtl (parm
, data
.entry_parm
, false);
3626 assign_parm_adjust_stack_rtl (&data
);
3628 if (assign_parm_setup_block_p (&data
))
3629 assign_parm_setup_block (&all
, parm
, &data
);
3630 else if (data
.passed_pointer
|| use_register_for_decl (parm
))
3631 assign_parm_setup_reg (&all
, parm
, &data
);
3633 assign_parm_setup_stack (&all
, parm
, &data
);
3635 if (cfun
->stdarg
&& !DECL_CHAIN (parm
))
3636 assign_parms_setup_varargs (&all
, &data
, false);
3638 /* Update info on where next arg arrives in registers. */
3639 targetm
.calls
.function_arg_advance (all
.args_so_far
, data
.promoted_mode
,
3640 data
.passed_type
, data
.named_arg
);
3643 if (targetm
.calls
.split_complex_arg
)
3644 assign_parms_unsplit_complex (&all
, fnargs
);
3648 /* Output all parameter conversion instructions (possibly including calls)
3649 now that all parameters have been copied out of hard registers. */
3650 emit_insn (all
.first_conversion_insn
);
3652 /* Estimate reload stack alignment from scalar return mode. */
3653 if (SUPPORTS_STACK_ALIGNMENT
)
3655 if (DECL_RESULT (fndecl
))
3657 tree type
= TREE_TYPE (DECL_RESULT (fndecl
));
3658 machine_mode mode
= TYPE_MODE (type
);
3662 && !AGGREGATE_TYPE_P (type
))
3664 unsigned int align
= GET_MODE_ALIGNMENT (mode
);
3665 if (crtl
->stack_alignment_estimated
< align
)
3667 gcc_assert (!crtl
->stack_realign_processed
);
3668 crtl
->stack_alignment_estimated
= align
;
3674 /* If we are receiving a struct value address as the first argument, set up
3675 the RTL for the function result. As this might require code to convert
3676 the transmitted address to Pmode, we do this here to ensure that possible
3677 preliminary conversions of the address have been emitted already. */
3678 if (all
.function_result_decl
)
3680 tree result
= DECL_RESULT (current_function_decl
);
3681 rtx addr
= DECL_RTL (all
.function_result_decl
);
3684 if (DECL_BY_REFERENCE (result
))
3686 SET_DECL_VALUE_EXPR (result
, all
.function_result_decl
);
3691 SET_DECL_VALUE_EXPR (result
,
3692 build1 (INDIRECT_REF
, TREE_TYPE (result
),
3693 all
.function_result_decl
));
3694 addr
= convert_memory_address (Pmode
, addr
);
3695 x
= gen_rtx_MEM (DECL_MODE (result
), addr
);
3696 set_mem_attributes (x
, result
, 1);
3699 DECL_HAS_VALUE_EXPR_P (result
) = 1;
3701 set_parm_rtl (result
, x
);
3704 /* We have aligned all the args, so add space for the pretend args. */
3705 crtl
->args
.pretend_args_size
= all
.pretend_args_size
;
3706 all
.stack_args_size
.constant
+= all
.extra_pretend_bytes
;
3707 crtl
->args
.size
= all
.stack_args_size
.constant
;
3709 /* Adjust function incoming argument size for alignment and
3712 crtl
->args
.size
= upper_bound (crtl
->args
.size
, all
.reg_parm_stack_space
);
3713 crtl
->args
.size
= aligned_upper_bound (crtl
->args
.size
,
3714 PARM_BOUNDARY
/ BITS_PER_UNIT
);
3716 if (ARGS_GROW_DOWNWARD
)
3718 crtl
->args
.arg_offset_rtx
3719 = (all
.stack_args_size
.var
== 0
3720 ? gen_int_mode (-all
.stack_args_size
.constant
, Pmode
)
3721 : expand_expr (size_diffop (all
.stack_args_size
.var
,
3722 size_int (-all
.stack_args_size
.constant
)),
3723 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
));
3726 crtl
->args
.arg_offset_rtx
= ARGS_SIZE_RTX (all
.stack_args_size
);
3728 /* See how many bytes, if any, of its args a function should try to pop
3731 crtl
->args
.pops_args
= targetm
.calls
.return_pops_args (fndecl
,
3735 /* For stdarg.h function, save info about
3736 regs and stack space used by the named args. */
3738 crtl
->args
.info
= all
.args_so_far_v
;
3740 /* Set the rtx used for the function return value. Put this in its
3741 own variable so any optimizers that need this information don't have
3742 to include tree.h. Do this here so it gets done when an inlined
3743 function gets output. */
3746 = (DECL_RTL_SET_P (DECL_RESULT (fndecl
))
3747 ? DECL_RTL (DECL_RESULT (fndecl
)) : NULL_RTX
);
3749 /* If scalar return value was computed in a pseudo-reg, or was a named
3750 return value that got dumped to the stack, copy that to the hard
3752 if (DECL_RTL_SET_P (DECL_RESULT (fndecl
)))
3754 tree decl_result
= DECL_RESULT (fndecl
);
3755 rtx decl_rtl
= DECL_RTL (decl_result
);
3757 if (REG_P (decl_rtl
)
3758 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
3759 : DECL_REGISTER (decl_result
))
3763 real_decl_rtl
= targetm
.calls
.function_value (TREE_TYPE (decl_result
),
3765 REG_FUNCTION_VALUE_P (real_decl_rtl
) = 1;
3766 /* The delay slot scheduler assumes that crtl->return_rtx
3767 holds the hard register containing the return value, not a
3768 temporary pseudo. */
3769 crtl
->return_rtx
= real_decl_rtl
;
3774 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3775 For all seen types, gimplify their sizes. */
3778 gimplify_parm_type (tree
*tp
, int *walk_subtrees
, void *data
)
3785 if (POINTER_TYPE_P (t
))
3787 else if (TYPE_SIZE (t
) && !TREE_CONSTANT (TYPE_SIZE (t
))
3788 && !TYPE_SIZES_GIMPLIFIED (t
))
3790 gimplify_type_sizes (t
, (gimple_seq
*) data
);
3798 /* Gimplify the parameter list for current_function_decl. This involves
3799 evaluating SAVE_EXPRs of variable sized parameters and generating code
3800 to implement callee-copies reference parameters. Returns a sequence of
3801 statements to add to the beginning of the function. */
3804 gimplify_parameters (gimple_seq
*cleanup
)
3806 struct assign_parm_data_all all
;
3808 gimple_seq stmts
= NULL
;
3812 assign_parms_initialize_all (&all
);
3813 fnargs
= assign_parms_augmented_arg_list (&all
);
3815 FOR_EACH_VEC_ELT (fnargs
, i
, parm
)
3817 struct assign_parm_data_one data
;
3819 /* Extract the type of PARM; adjust it according to ABI. */
3820 assign_parm_find_data_types (&all
, parm
, &data
);
3822 /* Early out for errors and void parameters. */
3823 if (data
.passed_mode
== VOIDmode
|| DECL_SIZE (parm
) == NULL
)
3826 /* Update info on where next arg arrives in registers. */
3827 targetm
.calls
.function_arg_advance (all
.args_so_far
, data
.promoted_mode
,
3828 data
.passed_type
, data
.named_arg
);
3830 /* ??? Once upon a time variable_size stuffed parameter list
3831 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3832 turned out to be less than manageable in the gimple world.
3833 Now we have to hunt them down ourselves. */
3834 walk_tree_without_duplicates (&data
.passed_type
,
3835 gimplify_parm_type
, &stmts
);
3837 if (TREE_CODE (DECL_SIZE_UNIT (parm
)) != INTEGER_CST
)
3839 gimplify_one_sizepos (&DECL_SIZE (parm
), &stmts
);
3840 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm
), &stmts
);
3843 if (data
.passed_pointer
)
3845 tree type
= TREE_TYPE (data
.passed_type
);
3846 if (reference_callee_copied (&all
.args_so_far_v
, TYPE_MODE (type
),
3847 type
, data
.named_arg
))
3851 /* For constant-sized objects, this is trivial; for
3852 variable-sized objects, we have to play games. */
3853 if (TREE_CODE (DECL_SIZE_UNIT (parm
)) == INTEGER_CST
3854 && !(flag_stack_check
== GENERIC_STACK_CHECK
3855 && compare_tree_int (DECL_SIZE_UNIT (parm
),
3856 STACK_CHECK_MAX_VAR_SIZE
) > 0))
3858 local
= create_tmp_var (type
, get_name (parm
));
3859 DECL_IGNORED_P (local
) = 0;
3860 /* If PARM was addressable, move that flag over
3861 to the local copy, as its address will be taken,
3862 not the PARMs. Keep the parms address taken
3863 as we'll query that flag during gimplification. */
3864 if (TREE_ADDRESSABLE (parm
))
3865 TREE_ADDRESSABLE (local
) = 1;
3866 else if (TREE_CODE (type
) == COMPLEX_TYPE
3867 || TREE_CODE (type
) == VECTOR_TYPE
)
3868 DECL_GIMPLE_REG_P (local
) = 1;
3870 if (!is_gimple_reg (local
)
3871 && flag_stack_reuse
!= SR_NONE
)
3873 tree clobber
= build_constructor (type
, NULL
);
3874 gimple
*clobber_stmt
;
3875 TREE_THIS_VOLATILE (clobber
) = 1;
3876 clobber_stmt
= gimple_build_assign (local
, clobber
);
3877 gimple_seq_add_stmt (cleanup
, clobber_stmt
);
3882 tree ptr_type
, addr
;
3884 ptr_type
= build_pointer_type (type
);
3885 addr
= create_tmp_reg (ptr_type
, get_name (parm
));
3886 DECL_IGNORED_P (addr
) = 0;
3887 local
= build_fold_indirect_ref (addr
);
3889 t
= build_alloca_call_expr (DECL_SIZE_UNIT (parm
),
3891 max_int_size_in_bytes (type
));
3892 /* The call has been built for a variable-sized object. */
3893 CALL_ALLOCA_FOR_VAR_P (t
) = 1;
3894 t
= fold_convert (ptr_type
, t
);
3895 t
= build2 (MODIFY_EXPR
, TREE_TYPE (addr
), addr
, t
);
3896 gimplify_and_add (t
, &stmts
);
3899 gimplify_assign (local
, parm
, &stmts
);
3901 SET_DECL_VALUE_EXPR (parm
, local
);
3902 DECL_HAS_VALUE_EXPR_P (parm
) = 1;
3912 /* Compute the size and offset from the start of the stacked arguments for a
3913 parm passed in mode PASSED_MODE and with type TYPE.
3915 INITIAL_OFFSET_PTR points to the current offset into the stacked
3918 The starting offset and size for this parm are returned in
3919 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3920 nonzero, the offset is that of stack slot, which is returned in
3921 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3922 padding required from the initial offset ptr to the stack slot.
3924 IN_REGS is nonzero if the argument will be passed in registers. It will
3925 never be set if REG_PARM_STACK_SPACE is not defined.
3927 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
3928 for arguments which are passed in registers.
3930 FNDECL is the function in which the argument was defined.
3932 There are two types of rounding that are done. The first, controlled by
3933 TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
3934 argument list to be aligned to the specific boundary (in bits). This
3935 rounding affects the initial and starting offsets, but not the argument
3938 The second, controlled by TARGET_FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3939 optionally rounds the size of the parm to PARM_BOUNDARY. The
3940 initial offset is not affected by this rounding, while the size always
3941 is and the starting offset may be. */
3943 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3944 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3945 callers pass in the total size of args so far as
3946 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3949 locate_and_pad_parm (machine_mode passed_mode
, tree type
, int in_regs
,
3950 int reg_parm_stack_space
, int partial
,
3951 tree fndecl ATTRIBUTE_UNUSED
,
3952 struct args_size
*initial_offset_ptr
,
3953 struct locate_and_pad_arg_data
*locate
)
3956 pad_direction where_pad
;
3957 unsigned int boundary
, round_boundary
;
3958 int part_size_in_regs
;
3960 /* If we have found a stack parm before we reach the end of the
3961 area reserved for registers, skip that area. */
3964 if (reg_parm_stack_space
> 0)
3966 if (initial_offset_ptr
->var
3967 || !ordered_p (initial_offset_ptr
->constant
,
3968 reg_parm_stack_space
))
3970 initial_offset_ptr
->var
3971 = size_binop (MAX_EXPR
, ARGS_SIZE_TREE (*initial_offset_ptr
),
3972 ssize_int (reg_parm_stack_space
));
3973 initial_offset_ptr
->constant
= 0;
3976 initial_offset_ptr
->constant
3977 = ordered_max (initial_offset_ptr
->constant
,
3978 reg_parm_stack_space
);
3982 part_size_in_regs
= (reg_parm_stack_space
== 0 ? partial
: 0);
3985 ? arg_size_in_bytes (type
)
3986 : size_int (GET_MODE_SIZE (passed_mode
)));
3987 where_pad
= targetm
.calls
.function_arg_padding (passed_mode
, type
);
3988 boundary
= targetm
.calls
.function_arg_boundary (passed_mode
, type
);
3989 round_boundary
= targetm
.calls
.function_arg_round_boundary (passed_mode
,
3991 locate
->where_pad
= where_pad
;
3993 /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT. */
3994 if (boundary
> MAX_SUPPORTED_STACK_ALIGNMENT
)
3995 boundary
= MAX_SUPPORTED_STACK_ALIGNMENT
;
3997 locate
->boundary
= boundary
;
3999 if (SUPPORTS_STACK_ALIGNMENT
)
4001 /* stack_alignment_estimated can't change after stack has been
4003 if (crtl
->stack_alignment_estimated
< boundary
)
4005 if (!crtl
->stack_realign_processed
)
4006 crtl
->stack_alignment_estimated
= boundary
;
4009 /* If stack is realigned and stack alignment value
4010 hasn't been finalized, it is OK not to increase
4011 stack_alignment_estimated. The bigger alignment
4012 requirement is recorded in stack_alignment_needed
4014 gcc_assert (!crtl
->stack_realign_finalized
4015 && crtl
->stack_realign_needed
);
4020 /* Remember if the outgoing parameter requires extra alignment on the
4021 calling function side. */
4022 if (crtl
->stack_alignment_needed
< boundary
)
4023 crtl
->stack_alignment_needed
= boundary
;
4024 if (crtl
->preferred_stack_boundary
< boundary
)
4025 crtl
->preferred_stack_boundary
= boundary
;
4027 if (ARGS_GROW_DOWNWARD
)
4029 locate
->slot_offset
.constant
= -initial_offset_ptr
->constant
;
4030 if (initial_offset_ptr
->var
)
4031 locate
->slot_offset
.var
= size_binop (MINUS_EXPR
, ssize_int (0),
4032 initial_offset_ptr
->var
);
4036 if (where_pad
!= PAD_NONE
4037 && (!tree_fits_uhwi_p (sizetree
)
4038 || (tree_to_uhwi (sizetree
) * BITS_PER_UNIT
) % round_boundary
))
4039 s2
= round_up (s2
, round_boundary
/ BITS_PER_UNIT
);
4040 SUB_PARM_SIZE (locate
->slot_offset
, s2
);
4043 locate
->slot_offset
.constant
+= part_size_in_regs
;
4045 if (!in_regs
|| reg_parm_stack_space
> 0)
4046 pad_to_arg_alignment (&locate
->slot_offset
, boundary
,
4047 &locate
->alignment_pad
);
4049 locate
->size
.constant
= (-initial_offset_ptr
->constant
4050 - locate
->slot_offset
.constant
);
4051 if (initial_offset_ptr
->var
)
4052 locate
->size
.var
= size_binop (MINUS_EXPR
,
4053 size_binop (MINUS_EXPR
,
4055 initial_offset_ptr
->var
),
4056 locate
->slot_offset
.var
);
4058 /* Pad_below needs the pre-rounded size to know how much to pad
4060 locate
->offset
= locate
->slot_offset
;
4061 if (where_pad
== PAD_DOWNWARD
)
4062 pad_below (&locate
->offset
, passed_mode
, sizetree
);
4067 if (!in_regs
|| reg_parm_stack_space
> 0)
4068 pad_to_arg_alignment (initial_offset_ptr
, boundary
,
4069 &locate
->alignment_pad
);
4070 locate
->slot_offset
= *initial_offset_ptr
;
4072 #ifdef PUSH_ROUNDING
4073 if (passed_mode
!= BLKmode
)
4074 sizetree
= size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree
)));
4077 /* Pad_below needs the pre-rounded size to know how much to pad below
4078 so this must be done before rounding up. */
4079 locate
->offset
= locate
->slot_offset
;
4080 if (where_pad
== PAD_DOWNWARD
)
4081 pad_below (&locate
->offset
, passed_mode
, sizetree
);
4083 if (where_pad
!= PAD_NONE
4084 && (!tree_fits_uhwi_p (sizetree
)
4085 || (tree_to_uhwi (sizetree
) * BITS_PER_UNIT
) % round_boundary
))
4086 sizetree
= round_up (sizetree
, round_boundary
/ BITS_PER_UNIT
);
4088 ADD_PARM_SIZE (locate
->size
, sizetree
);
4090 locate
->size
.constant
-= part_size_in_regs
;
4093 locate
->offset
.constant
4094 += targetm
.calls
.function_arg_offset (passed_mode
, type
);
4097 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4098 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4101 pad_to_arg_alignment (struct args_size
*offset_ptr
, int boundary
,
4102 struct args_size
*alignment_pad
)
4104 tree save_var
= NULL_TREE
;
4105 poly_int64 save_constant
= 0;
4106 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
4107 poly_int64 sp_offset
= STACK_POINTER_OFFSET
;
4109 #ifdef SPARC_STACK_BOUNDARY_HACK
4110 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
4111 the real alignment of %sp. However, when it does this, the
4112 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
4113 if (SPARC_STACK_BOUNDARY_HACK
)
4117 if (boundary
> PARM_BOUNDARY
)
4119 save_var
= offset_ptr
->var
;
4120 save_constant
= offset_ptr
->constant
;
4123 alignment_pad
->var
= NULL_TREE
;
4124 alignment_pad
->constant
= 0;
4126 if (boundary
> BITS_PER_UNIT
)
4130 || !known_misalignment (offset_ptr
->constant
+ sp_offset
,
4131 boundary_in_bytes
, &misalign
))
4133 tree sp_offset_tree
= ssize_int (sp_offset
);
4134 tree offset
= size_binop (PLUS_EXPR
,
4135 ARGS_SIZE_TREE (*offset_ptr
),
4138 if (ARGS_GROW_DOWNWARD
)
4139 rounded
= round_down (offset
, boundary
/ BITS_PER_UNIT
);
4141 rounded
= round_up (offset
, boundary
/ BITS_PER_UNIT
);
4143 offset_ptr
->var
= size_binop (MINUS_EXPR
, rounded
, sp_offset_tree
);
4144 /* ARGS_SIZE_TREE includes constant term. */
4145 offset_ptr
->constant
= 0;
4146 if (boundary
> PARM_BOUNDARY
)
4147 alignment_pad
->var
= size_binop (MINUS_EXPR
, offset_ptr
->var
,
4152 if (ARGS_GROW_DOWNWARD
)
4153 offset_ptr
->constant
-= misalign
;
4155 offset_ptr
->constant
+= -misalign
& (boundary_in_bytes
- 1);
4157 if (boundary
> PARM_BOUNDARY
)
4158 alignment_pad
->constant
= offset_ptr
->constant
- save_constant
;
4164 pad_below (struct args_size
*offset_ptr
, machine_mode passed_mode
, tree sizetree
)
4166 unsigned int align
= PARM_BOUNDARY
/ BITS_PER_UNIT
;
4168 if (passed_mode
!= BLKmode
4169 && known_misalignment (GET_MODE_SIZE (passed_mode
), align
, &misalign
))
4170 offset_ptr
->constant
+= -misalign
& (align
- 1);
4173 if (TREE_CODE (sizetree
) != INTEGER_CST
4174 || (TREE_INT_CST_LOW (sizetree
) & (align
- 1)) != 0)
4176 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4177 tree s2
= round_up (sizetree
, align
);
4179 ADD_PARM_SIZE (*offset_ptr
, s2
);
4180 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
4186 /* True if register REGNO was alive at a place where `setjmp' was
4187 called and was set more than once or is an argument. Such regs may
4188 be clobbered by `longjmp'. */
4191 regno_clobbered_at_setjmp (bitmap setjmp_crosses
, int regno
)
4193 /* There appear to be cases where some local vars never reach the
4194 backend but have bogus regnos. */
4195 if (regno
>= max_reg_num ())
4198 return ((REG_N_SETS (regno
) > 1
4199 || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
4201 && REGNO_REG_SET_P (setjmp_crosses
, regno
));
4204 /* Walk the tree of blocks describing the binding levels within a
4205 function and warn about variables the might be killed by setjmp or
4206 vfork. This is done after calling flow_analysis before register
4207 allocation since that will clobber the pseudo-regs to hard
4211 setjmp_vars_warning (bitmap setjmp_crosses
, tree block
)
4215 for (decl
= BLOCK_VARS (block
); decl
; decl
= DECL_CHAIN (decl
))
4218 && DECL_RTL_SET_P (decl
)
4219 && REG_P (DECL_RTL (decl
))
4220 && regno_clobbered_at_setjmp (setjmp_crosses
, REGNO (DECL_RTL (decl
))))
4221 warning (OPT_Wclobbered
, "variable %q+D might be clobbered by"
4222 " %<longjmp%> or %<vfork%>", decl
);
4225 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= BLOCK_CHAIN (sub
))
4226 setjmp_vars_warning (setjmp_crosses
, sub
);
4229 /* Do the appropriate part of setjmp_vars_warning
4230 but for arguments instead of local variables. */
4233 setjmp_args_warning (bitmap setjmp_crosses
)
4236 for (decl
= DECL_ARGUMENTS (current_function_decl
);
4237 decl
; decl
= DECL_CHAIN (decl
))
4238 if (DECL_RTL (decl
) != 0
4239 && REG_P (DECL_RTL (decl
))
4240 && regno_clobbered_at_setjmp (setjmp_crosses
, REGNO (DECL_RTL (decl
))))
4241 warning (OPT_Wclobbered
,
4242 "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
4246 /* Generate warning messages for variables live across setjmp. */
4249 generate_setjmp_warnings (void)
4251 bitmap setjmp_crosses
= regstat_get_setjmp_crosses ();
4253 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
4254 || bitmap_empty_p (setjmp_crosses
))
4257 setjmp_vars_warning (setjmp_crosses
, DECL_INITIAL (current_function_decl
));
4258 setjmp_args_warning (setjmp_crosses
);
4262 /* Reverse the order of elements in the fragment chain T of blocks,
4263 and return the new head of the chain (old last element).
4264 In addition to that clear BLOCK_SAME_RANGE flags when needed
4265 and adjust BLOCK_SUPERCONTEXT from the super fragment to
4266 its super fragment origin. */
4269 block_fragments_nreverse (tree t
)
4271 tree prev
= 0, block
, next
, prev_super
= 0;
4272 tree super
= BLOCK_SUPERCONTEXT (t
);
4273 if (BLOCK_FRAGMENT_ORIGIN (super
))
4274 super
= BLOCK_FRAGMENT_ORIGIN (super
);
4275 for (block
= t
; block
; block
= next
)
4277 next
= BLOCK_FRAGMENT_CHAIN (block
);
4278 BLOCK_FRAGMENT_CHAIN (block
) = prev
;
4279 if ((prev
&& !BLOCK_SAME_RANGE (prev
))
4280 || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block
))
4282 BLOCK_SAME_RANGE (block
) = 0;
4283 prev_super
= BLOCK_SUPERCONTEXT (block
);
4284 BLOCK_SUPERCONTEXT (block
) = super
;
4287 t
= BLOCK_FRAGMENT_ORIGIN (t
);
4288 if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t
))
4290 BLOCK_SAME_RANGE (t
) = 0;
4291 BLOCK_SUPERCONTEXT (t
) = super
;
4295 /* Reverse the order of elements in the chain T of blocks,
4296 and return the new head of the chain (old last element).
4297 Also do the same on subblocks and reverse the order of elements
4298 in BLOCK_FRAGMENT_CHAIN as well. */
4301 blocks_nreverse_all (tree t
)
4303 tree prev
= 0, block
, next
;
4304 for (block
= t
; block
; block
= next
)
4306 next
= BLOCK_CHAIN (block
);
4307 BLOCK_CHAIN (block
) = prev
;
4308 if (BLOCK_FRAGMENT_CHAIN (block
)
4309 && BLOCK_FRAGMENT_ORIGIN (block
) == NULL_TREE
)
4311 BLOCK_FRAGMENT_CHAIN (block
)
4312 = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block
));
4313 if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block
)))
4314 BLOCK_SAME_RANGE (block
) = 0;
4316 BLOCK_SUBBLOCKS (block
) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block
));
4323 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
4324 and create duplicate blocks. */
4325 /* ??? Need an option to either create block fragments or to create
4326 abstract origin duplicates of a source block. It really depends
4327 on what optimization has been performed. */
4330 reorder_blocks (void)
4332 tree block
= DECL_INITIAL (current_function_decl
);
4334 if (block
== NULL_TREE
)
4337 auto_vec
<tree
, 10> block_stack
;
4339 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
4340 clear_block_marks (block
);
4342 /* Prune the old trees away, so that they don't get in the way. */
4343 BLOCK_SUBBLOCKS (block
) = NULL_TREE
;
4344 BLOCK_CHAIN (block
) = NULL_TREE
;
4346 /* Recreate the block tree from the note nesting. */
4347 reorder_blocks_1 (get_insns (), block
, &block_stack
);
4348 BLOCK_SUBBLOCKS (block
) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block
));
4351 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
4354 clear_block_marks (tree block
)
4358 TREE_ASM_WRITTEN (block
) = 0;
4359 clear_block_marks (BLOCK_SUBBLOCKS (block
));
4360 block
= BLOCK_CHAIN (block
);
4365 reorder_blocks_1 (rtx_insn
*insns
, tree current_block
,
4366 vec
<tree
> *p_block_stack
)
4369 tree prev_beg
= NULL_TREE
, prev_end
= NULL_TREE
;
4371 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
4375 if (NOTE_KIND (insn
) == NOTE_INSN_BLOCK_BEG
)
4377 tree block
= NOTE_BLOCK (insn
);
4380 gcc_assert (BLOCK_FRAGMENT_ORIGIN (block
) == NULL_TREE
);
4384 BLOCK_SAME_RANGE (prev_end
) = 0;
4385 prev_end
= NULL_TREE
;
4387 /* If we have seen this block before, that means it now
4388 spans multiple address regions. Create a new fragment. */
4389 if (TREE_ASM_WRITTEN (block
))
4391 tree new_block
= copy_node (block
);
4393 BLOCK_SAME_RANGE (new_block
) = 0;
4394 BLOCK_FRAGMENT_ORIGIN (new_block
) = origin
;
4395 BLOCK_FRAGMENT_CHAIN (new_block
)
4396 = BLOCK_FRAGMENT_CHAIN (origin
);
4397 BLOCK_FRAGMENT_CHAIN (origin
) = new_block
;
4399 NOTE_BLOCK (insn
) = new_block
;
4403 if (prev_beg
== current_block
&& prev_beg
)
4404 BLOCK_SAME_RANGE (block
) = 1;
4408 BLOCK_SUBBLOCKS (block
) = 0;
4409 TREE_ASM_WRITTEN (block
) = 1;
4410 /* When there's only one block for the entire function,
4411 current_block == block and we mustn't do this, it
4412 will cause infinite recursion. */
4413 if (block
!= current_block
)
4416 if (block
!= origin
)
4417 gcc_assert (BLOCK_SUPERCONTEXT (origin
) == current_block
4418 || BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
4421 if (p_block_stack
->is_empty ())
4422 super
= current_block
;
4425 super
= p_block_stack
->last ();
4426 gcc_assert (super
== current_block
4427 || BLOCK_FRAGMENT_ORIGIN (super
)
4430 BLOCK_SUPERCONTEXT (block
) = super
;
4431 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (current_block
);
4432 BLOCK_SUBBLOCKS (current_block
) = block
;
4433 current_block
= origin
;
4435 p_block_stack
->safe_push (block
);
4437 else if (NOTE_KIND (insn
) == NOTE_INSN_BLOCK_END
)
4439 NOTE_BLOCK (insn
) = p_block_stack
->pop ();
4440 current_block
= BLOCK_SUPERCONTEXT (current_block
);
4441 if (BLOCK_FRAGMENT_ORIGIN (current_block
))
4442 current_block
= BLOCK_FRAGMENT_ORIGIN (current_block
);
4443 prev_beg
= NULL_TREE
;
4444 prev_end
= BLOCK_SAME_RANGE (NOTE_BLOCK (insn
))
4445 ? NOTE_BLOCK (insn
) : NULL_TREE
;
4450 prev_beg
= NULL_TREE
;
4452 BLOCK_SAME_RANGE (prev_end
) = 0;
4453 prev_end
= NULL_TREE
;
4458 /* Reverse the order of elements in the chain T of blocks,
4459 and return the new head of the chain (old last element). */
4462 blocks_nreverse (tree t
)
4464 tree prev
= 0, block
, next
;
4465 for (block
= t
; block
; block
= next
)
4467 next
= BLOCK_CHAIN (block
);
4468 BLOCK_CHAIN (block
) = prev
;
4474 /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
4475 by modifying the last node in chain 1 to point to chain 2. */
4478 block_chainon (tree op1
, tree op2
)
4487 for (t1
= op1
; BLOCK_CHAIN (t1
); t1
= BLOCK_CHAIN (t1
))
4489 BLOCK_CHAIN (t1
) = op2
;
4491 #ifdef ENABLE_TREE_CHECKING
4494 for (t2
= op2
; t2
; t2
= BLOCK_CHAIN (t2
))
4495 gcc_assert (t2
!= t1
);
4502 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
4503 non-NULL, list them all into VECTOR, in a depth-first preorder
4504 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
4508 all_blocks (tree block
, tree
*vector
)
4514 TREE_ASM_WRITTEN (block
) = 0;
4516 /* Record this block. */
4518 vector
[n_blocks
] = block
;
4522 /* Record the subblocks, and their subblocks... */
4523 n_blocks
+= all_blocks (BLOCK_SUBBLOCKS (block
),
4524 vector
? vector
+ n_blocks
: 0);
4525 block
= BLOCK_CHAIN (block
);
4531 /* Return a vector containing all the blocks rooted at BLOCK. The
4532 number of elements in the vector is stored in N_BLOCKS_P. The
4533 vector is dynamically allocated; it is the caller's responsibility
4534 to call `free' on the pointer returned. */
4537 get_block_vector (tree block
, int *n_blocks_p
)
4541 *n_blocks_p
= all_blocks (block
, NULL
);
4542 block_vector
= XNEWVEC (tree
, *n_blocks_p
);
4543 all_blocks (block
, block_vector
);
4545 return block_vector
;
4548 static GTY(()) int next_block_index
= 2;
4550 /* Set BLOCK_NUMBER for all the blocks in FN. */
4553 number_blocks (tree fn
)
4559 /* For XCOFF debugging output, we start numbering the blocks
4560 from 1 within each function, rather than keeping a running
4562 #if defined (XCOFF_DEBUGGING_INFO)
4563 if (write_symbols
== XCOFF_DEBUG
)
4564 next_block_index
= 1;
4567 block_vector
= get_block_vector (DECL_INITIAL (fn
), &n_blocks
);
4569 /* The top-level BLOCK isn't numbered at all. */
4570 for (i
= 1; i
< n_blocks
; ++i
)
4571 /* We number the blocks from two. */
4572 BLOCK_NUMBER (block_vector
[i
]) = next_block_index
++;
4574 free (block_vector
);
4579 /* If VAR is present in a subblock of BLOCK, return the subblock. */
4582 debug_find_var_in_block_tree (tree var
, tree block
)
4586 for (t
= BLOCK_VARS (block
); t
; t
= TREE_CHAIN (t
))
4590 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= TREE_CHAIN (t
))
4592 tree ret
= debug_find_var_in_block_tree (var
, t
);
4600 /* Keep track of whether we're in a dummy function context. If we are,
4601 we don't want to invoke the set_current_function hook, because we'll
4602 get into trouble if the hook calls target_reinit () recursively or
4603 when the initial initialization is not yet complete. */
4605 static bool in_dummy_function
;
4607 /* Invoke the target hook when setting cfun. Update the optimization options
4608 if the function uses different options than the default. */
4611 invoke_set_current_function_hook (tree fndecl
)
4613 if (!in_dummy_function
)
4615 tree opts
= ((fndecl
)
4616 ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl
)
4617 : optimization_default_node
);
4620 opts
= optimization_default_node
;
4622 /* Change optimization options if needed. */
4623 if (optimization_current_node
!= opts
)
4625 optimization_current_node
= opts
;
4626 cl_optimization_restore (&global_options
, TREE_OPTIMIZATION (opts
));
4629 targetm
.set_current_function (fndecl
);
4630 this_fn_optabs
= this_target_optabs
;
4632 /* Initialize global alignment variables after op. */
4633 parse_alignment_opts ();
4635 if (opts
!= optimization_default_node
)
4637 init_tree_optimization_optabs (opts
);
4638 if (TREE_OPTIMIZATION_OPTABS (opts
))
4639 this_fn_optabs
= (struct target_optabs
*)
4640 TREE_OPTIMIZATION_OPTABS (opts
);
4645 /* cfun should never be set directly; use this function. */
4648 set_cfun (struct function
*new_cfun
, bool force
)
4650 if (cfun
!= new_cfun
|| force
)
4653 invoke_set_current_function_hook (new_cfun
? new_cfun
->decl
: NULL_TREE
);
4654 redirect_edge_var_map_empty ();
4658 /* Initialized with NOGC, making this poisonous to the garbage collector. */
4660 static vec
<function
*> cfun_stack
;
4662 /* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
4663 current_function_decl accordingly. */
4666 push_cfun (struct function
*new_cfun
)
4668 gcc_assert ((!cfun
&& !current_function_decl
)
4669 || (cfun
&& current_function_decl
== cfun
->decl
));
4670 cfun_stack
.safe_push (cfun
);
4671 current_function_decl
= new_cfun
? new_cfun
->decl
: NULL_TREE
;
4672 set_cfun (new_cfun
);
4675 /* Pop cfun from the stack. Also set current_function_decl accordingly. */
4680 struct function
*new_cfun
= cfun_stack
.pop ();
4681 /* When in_dummy_function, we do have a cfun but current_function_decl is
4682 NULL. We also allow pushing NULL cfun and subsequently changing
4683 current_function_decl to something else and have both restored by
4685 gcc_checking_assert (in_dummy_function
4687 || current_function_decl
== cfun
->decl
);
4688 set_cfun (new_cfun
);
4689 current_function_decl
= new_cfun
? new_cfun
->decl
: NULL_TREE
;
4692 /* Return value of funcdef and increase it. */
4694 get_next_funcdef_no (void)
4696 return funcdef_no
++;
4699 /* Return value of funcdef. */
4701 get_last_funcdef_no (void)
4706 /* Allocate a function structure for FNDECL and set its contents
4707 to the defaults. Set cfun to the newly-allocated object.
4708 Some of the helper functions invoked during initialization assume
4709 that cfun has already been set. Therefore, assign the new object
4710 directly into cfun and invoke the back end hook explicitly at the
4711 very end, rather than initializing a temporary and calling set_cfun
4714 ABSTRACT_P is true if this is a function that will never be seen by
4715 the middle-end. Such functions are front-end concepts (like C++
4716 function templates) that do not correspond directly to functions
4717 placed in object files. */
4720 allocate_struct_function (tree fndecl
, bool abstract_p
)
4722 tree fntype
= fndecl
? TREE_TYPE (fndecl
) : NULL_TREE
;
4724 cfun
= ggc_cleared_alloc
<function
> ();
4726 init_eh_for_function ();
4728 if (init_machine_status
)
4729 cfun
->machine
= (*init_machine_status
) ();
4731 #ifdef OVERRIDE_ABI_FORMAT
4732 OVERRIDE_ABI_FORMAT (fndecl
);
4735 if (fndecl
!= NULL_TREE
)
4737 DECL_STRUCT_FUNCTION (fndecl
) = cfun
;
4738 cfun
->decl
= fndecl
;
4739 current_function_funcdef_no
= get_next_funcdef_no ();
4742 invoke_set_current_function_hook (fndecl
);
4744 if (fndecl
!= NULL_TREE
)
4746 tree result
= DECL_RESULT (fndecl
);
4750 /* Now that we have activated any function-specific attributes
4751 that might affect layout, particularly vector modes, relayout
4752 each of the parameters and the result. */
4753 relayout_decl (result
);
4754 for (tree parm
= DECL_ARGUMENTS (fndecl
); parm
;
4755 parm
= DECL_CHAIN (parm
))
4756 relayout_decl (parm
);
4758 /* Similarly relayout the function decl. */
4759 targetm
.target_option
.relayout_function (fndecl
);
4762 if (!abstract_p
&& aggregate_value_p (result
, fndecl
))
4764 #ifdef PCC_STATIC_STRUCT_RETURN
4765 cfun
->returns_pcc_struct
= 1;
4767 cfun
->returns_struct
= 1;
4770 cfun
->stdarg
= stdarg_p (fntype
);
4772 /* Assume all registers in stdarg functions need to be saved. */
4773 cfun
->va_list_gpr_size
= VA_LIST_MAX_GPR_SIZE
;
4774 cfun
->va_list_fpr_size
= VA_LIST_MAX_FPR_SIZE
;
4776 /* ??? This could be set on a per-function basis by the front-end
4777 but is this worth the hassle? */
4778 cfun
->can_throw_non_call_exceptions
= flag_non_call_exceptions
;
4779 cfun
->can_delete_dead_exceptions
= flag_delete_dead_exceptions
;
4781 if (!profile_flag
&& !flag_instrument_function_entry_exit
)
4782 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl
) = 1;
4785 /* Don't enable begin stmt markers if var-tracking at assignments is
4786 disabled. The markers make little sense without the variable
4787 binding annotations among them. */
4788 cfun
->debug_nonbind_markers
= lang_hooks
.emits_begin_stmt
4789 && MAY_HAVE_DEBUG_MARKER_STMTS
;
4792 /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
4793 instead of just setting it. */
4796 push_struct_function (tree fndecl
)
4798 /* When in_dummy_function we might be in the middle of a pop_cfun and
4799 current_function_decl and cfun may not match. */
4800 gcc_assert (in_dummy_function
4801 || (!cfun
&& !current_function_decl
)
4802 || (cfun
&& current_function_decl
== cfun
->decl
));
4803 cfun_stack
.safe_push (cfun
);
4804 current_function_decl
= fndecl
;
4805 allocate_struct_function (fndecl
, false);
4808 /* Reset crtl and other non-struct-function variables to defaults as
4809 appropriate for emitting rtl at the start of a function. */
4812 prepare_function_start (void)
4814 gcc_assert (!get_last_insn ());
4817 init_varasm_status ();
4819 default_rtl_profile ();
4821 if (flag_stack_usage_info
)
4823 cfun
->su
= ggc_cleared_alloc
<stack_usage
> ();
4824 cfun
->su
->static_stack_size
= -1;
4827 cse_not_expected
= ! optimize
;
4829 /* Caller save not needed yet. */
4830 caller_save_needed
= 0;
4832 /* We haven't done register allocation yet. */
4835 /* Indicate that we have not instantiated virtual registers yet. */
4836 virtuals_instantiated
= 0;
4838 /* Indicate that we want CONCATs now. */
4839 generating_concat_p
= 1;
4841 /* Indicate we have no need of a frame pointer yet. */
4842 frame_pointer_needed
= 0;
4846 push_dummy_function (bool with_decl
)
4848 tree fn_decl
, fn_type
, fn_result_decl
;
4850 gcc_assert (!in_dummy_function
);
4851 in_dummy_function
= true;
4855 fn_type
= build_function_type_list (void_type_node
, NULL_TREE
);
4856 fn_decl
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
, NULL_TREE
,
4858 fn_result_decl
= build_decl (UNKNOWN_LOCATION
, RESULT_DECL
,
4859 NULL_TREE
, void_type_node
);
4860 DECL_RESULT (fn_decl
) = fn_result_decl
;
4863 fn_decl
= NULL_TREE
;
4865 push_struct_function (fn_decl
);
4868 /* Initialize the rtl expansion mechanism so that we can do simple things
4869 like generate sequences. This is used to provide a context during global
4870 initialization of some passes. You must call expand_dummy_function_end
4871 to exit this context. */
4874 init_dummy_function_start (void)
4876 push_dummy_function (false);
4877 prepare_function_start ();
4880 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4881 and initialize static variables for generating RTL for the statements
4885 init_function_start (tree subr
)
4887 /* Initialize backend, if needed. */
4890 prepare_function_start ();
4891 decide_function_section (subr
);
4893 /* Warn if this value is an aggregate type,
4894 regardless of which calling convention we are using for it. */
4895 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
))))
4896 warning (OPT_Waggregate_return
, "function returns an aggregate");
4899 /* Expand code to verify the stack_protect_guard. This is invoked at
4900 the end of a function to be protected. */
4903 stack_protect_epilogue (void)
4905 tree guard_decl
= crtl
->stack_protect_guard_decl
;
4906 rtx_code_label
*label
= gen_label_rtx ();
4908 rtx_insn
*seq
= NULL
;
4910 x
= expand_normal (crtl
->stack_protect_guard
);
4912 if (targetm
.have_stack_protect_combined_test () && guard_decl
)
4914 gcc_assert (DECL_P (guard_decl
));
4915 y
= DECL_RTL (guard_decl
);
4916 /* Allow the target to compute address of Y and compare it with X without
4917 leaking Y into a register. This combined address + compare pattern
4918 allows the target to prevent spilling of any intermediate results by
4919 splitting it after register allocator. */
4920 seq
= targetm
.gen_stack_protect_combined_test (x
, y
, label
);
4925 y
= expand_normal (guard_decl
);
4929 /* Allow the target to compare Y with X without leaking either into
4931 if (targetm
.have_stack_protect_test ())
4932 seq
= targetm
.gen_stack_protect_test (x
, y
, label
);
4938 emit_cmp_and_jump_insns (x
, y
, EQ
, NULL_RTX
, ptr_mode
, 1, label
);
4940 /* The noreturn predictor has been moved to the tree level. The rtl-level
4941 predictors estimate this branch about 20%, which isn't enough to get
4942 things moved out of line. Since this is the only extant case of adding
4943 a noreturn function at the rtl level, it doesn't seem worth doing ought
4944 except adding the prediction by hand. */
4945 rtx_insn
*tmp
= get_last_insn ();
4947 predict_insn_def (tmp
, PRED_NORETURN
, TAKEN
);
4949 expand_call (targetm
.stack_protect_fail (), NULL_RTX
, /*ignore=*/true);
4954 /* Start the RTL for a new function, and set variables used for
4956 SUBR is the FUNCTION_DECL node.
4957 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4958 the function's parameters, which must be run at any return statement. */
4961 expand_function_start (tree subr
)
4963 /* Make sure volatile mem refs aren't considered
4964 valid operands of arithmetic insns. */
4965 init_recog_no_volatile ();
4969 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr
));
4972 = (stack_limit_rtx
!= NULL_RTX
&& ! DECL_NO_LIMIT_STACK (subr
));
4974 /* Make the label for return statements to jump to. Do not special
4975 case machines with special return instructions -- they will be
4976 handled later during jump, ifcvt, or epilogue creation. */
4977 return_label
= gen_label_rtx ();
4979 /* Initialize rtx used to return the value. */
4980 /* Do this before assign_parms so that we copy the struct value address
4981 before any library calls that assign parms might generate. */
4983 /* Decide whether to return the value in memory or in a register. */
4984 tree res
= DECL_RESULT (subr
);
4985 if (aggregate_value_p (res
, subr
))
4987 /* Returning something that won't go in a register. */
4988 rtx value_address
= 0;
4990 #ifdef PCC_STATIC_STRUCT_RETURN
4991 if (cfun
->returns_pcc_struct
)
4993 int size
= int_size_in_bytes (TREE_TYPE (res
));
4994 value_address
= assemble_static_space (size
);
4999 rtx sv
= targetm
.calls
.struct_value_rtx (TREE_TYPE (subr
), 2);
5000 /* Expect to be passed the address of a place to store the value.
5001 If it is passed as an argument, assign_parms will take care of
5005 value_address
= gen_reg_rtx (Pmode
);
5006 emit_move_insn (value_address
, sv
);
5011 rtx x
= value_address
;
5012 if (!DECL_BY_REFERENCE (res
))
5014 x
= gen_rtx_MEM (DECL_MODE (res
), x
);
5015 set_mem_attributes (x
, res
, 1);
5017 set_parm_rtl (res
, x
);
5020 else if (DECL_MODE (res
) == VOIDmode
)
5021 /* If return mode is void, this decl rtl should not be used. */
5022 set_parm_rtl (res
, NULL_RTX
);
5025 /* Compute the return values into a pseudo reg, which we will copy
5026 into the true return register after the cleanups are done. */
5027 tree return_type
= TREE_TYPE (res
);
5029 /* If we may coalesce this result, make sure it has the expected mode
5030 in case it was promoted. But we need not bother about BLKmode. */
5031 machine_mode promoted_mode
5032 = flag_tree_coalesce_vars
&& is_gimple_reg (res
)
5033 ? promote_ssa_mode (ssa_default_def (cfun
, res
), NULL
)
5036 if (promoted_mode
!= BLKmode
)
5037 set_parm_rtl (res
, gen_reg_rtx (promoted_mode
));
5038 else if (TYPE_MODE (return_type
) != BLKmode
5039 && targetm
.calls
.return_in_msb (return_type
))
5040 /* expand_function_end will insert the appropriate padding in
5041 this case. Use the return value's natural (unpadded) mode
5042 within the function proper. */
5043 set_parm_rtl (res
, gen_reg_rtx (TYPE_MODE (return_type
)));
5046 /* In order to figure out what mode to use for the pseudo, we
5047 figure out what the mode of the eventual return register will
5048 actually be, and use that. */
5049 rtx hard_reg
= hard_function_value (return_type
, subr
, 0, 1);
5051 /* Structures that are returned in registers are not
5052 aggregate_value_p, so we may see a PARALLEL or a REG. */
5053 if (REG_P (hard_reg
))
5054 set_parm_rtl (res
, gen_reg_rtx (GET_MODE (hard_reg
)));
5057 gcc_assert (GET_CODE (hard_reg
) == PARALLEL
);
5058 set_parm_rtl (res
, gen_group_rtx (hard_reg
));
5062 /* Set DECL_REGISTER flag so that expand_function_end will copy the
5063 result to the real return register(s). */
5064 DECL_REGISTER (res
) = 1;
5067 /* Initialize rtx for parameters and local variables.
5068 In some cases this requires emitting insns. */
5069 assign_parms (subr
);
5071 /* If function gets a static chain arg, store it. */
5072 if (cfun
->static_chain_decl
)
5074 tree parm
= cfun
->static_chain_decl
;
5079 local
= gen_reg_rtx (promote_decl_mode (parm
, &unsignedp
));
5080 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
5082 set_decl_incoming_rtl (parm
, chain
, false);
5083 set_parm_rtl (parm
, local
);
5084 mark_reg_pointer (local
, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
))));
5086 if (GET_MODE (local
) != GET_MODE (chain
))
5088 convert_move (local
, chain
, unsignedp
);
5089 insn
= get_last_insn ();
5092 insn
= emit_move_insn (local
, chain
);
5094 /* Mark the register as eliminable, similar to parameters. */
5096 && reg_mentioned_p (arg_pointer_rtx
, XEXP (chain
, 0)))
5097 set_dst_reg_note (insn
, REG_EQUIV
, chain
, local
);
5099 /* If we aren't optimizing, save the static chain onto the stack. */
5102 tree saved_static_chain_decl
5103 = build_decl (DECL_SOURCE_LOCATION (parm
), VAR_DECL
,
5104 DECL_NAME (parm
), TREE_TYPE (parm
));
5105 rtx saved_static_chain_rtx
5106 = assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
5107 SET_DECL_RTL (saved_static_chain_decl
, saved_static_chain_rtx
);
5108 emit_move_insn (saved_static_chain_rtx
, chain
);
5109 SET_DECL_VALUE_EXPR (parm
, saved_static_chain_decl
);
5110 DECL_HAS_VALUE_EXPR_P (parm
) = 1;
5114 /* The following was moved from init_function_start.
5115 The move was supposed to make sdb output more accurate. */
5116 /* Indicate the beginning of the function body,
5117 as opposed to parm setup. */
5118 emit_note (NOTE_INSN_FUNCTION_BEG
);
5120 gcc_assert (NOTE_P (get_last_insn ()));
5122 parm_birth_insn
= get_last_insn ();
5124 /* If the function receives a non-local goto, then store the
5125 bits we need to restore the frame pointer. */
5126 if (cfun
->nonlocal_goto_save_area
)
5131 tree var
= TREE_OPERAND (cfun
->nonlocal_goto_save_area
, 0);
5132 gcc_assert (DECL_RTL_SET_P (var
));
5134 t_save
= build4 (ARRAY_REF
,
5135 TREE_TYPE (TREE_TYPE (cfun
->nonlocal_goto_save_area
)),
5136 cfun
->nonlocal_goto_save_area
,
5137 integer_zero_node
, NULL_TREE
, NULL_TREE
);
5138 r_save
= expand_expr (t_save
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5139 gcc_assert (GET_MODE (r_save
) == Pmode
);
5141 emit_move_insn (r_save
, targetm
.builtin_setjmp_frame_value ());
5142 update_nonlocal_goto_save_area ();
5148 PROFILE_HOOK (current_function_funcdef_no
);
5152 /* If we are doing generic stack checking, the probe should go here. */
5153 if (flag_stack_check
== GENERIC_STACK_CHECK
)
5154 stack_check_probe_note
= emit_note (NOTE_INSN_DELETED
);
5158 pop_dummy_function (void)
5161 in_dummy_function
= false;
5164 /* Undo the effects of init_dummy_function_start. */
5166 expand_dummy_function_end (void)
5168 gcc_assert (in_dummy_function
);
5170 /* End any sequences that failed to be closed due to syntax errors. */
5171 while (in_sequence_p ())
5174 /* Outside function body, can't compute type's actual size
5175 until next function's body starts. */
5177 free_after_parsing (cfun
);
5178 free_after_compilation (cfun
);
5179 pop_dummy_function ();
5182 /* Helper for diddle_return_value. */
5185 diddle_return_value_1 (void (*doit
) (rtx
, void *), void *arg
, rtx outgoing
)
5190 if (REG_P (outgoing
))
5191 (*doit
) (outgoing
, arg
);
5192 else if (GET_CODE (outgoing
) == PARALLEL
)
5196 for (i
= 0; i
< XVECLEN (outgoing
, 0); i
++)
5198 rtx x
= XEXP (XVECEXP (outgoing
, 0, i
), 0);
5200 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
5206 /* Call DOIT for each hard register used as a return value from
5207 the current function. */
5210 diddle_return_value (void (*doit
) (rtx
, void *), void *arg
)
5212 diddle_return_value_1 (doit
, arg
, crtl
->return_rtx
);
5216 do_clobber_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
5222 clobber_return_register (void)
5224 diddle_return_value (do_clobber_return_reg
, NULL
);
5226 /* In case we do use pseudo to return value, clobber it too. */
5227 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
5229 tree decl_result
= DECL_RESULT (current_function_decl
);
5230 rtx decl_rtl
= DECL_RTL (decl_result
);
5231 if (REG_P (decl_rtl
) && REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
)
5233 do_clobber_return_reg (decl_rtl
, NULL
);
5239 do_use_return_reg (rtx reg
, void *arg ATTRIBUTE_UNUSED
)
5245 use_return_register (void)
5247 diddle_return_value (do_use_return_reg
, NULL
);
5250 /* Set the location of the insn chain starting at INSN to LOC. */
5253 set_insn_locations (rtx_insn
*insn
, int loc
)
5255 while (insn
!= NULL
)
5258 INSN_LOCATION (insn
) = loc
;
5259 insn
= NEXT_INSN (insn
);
5263 /* Generate RTL for the end of the current function. */
5266 expand_function_end (void)
5268 /* If arg_pointer_save_area was referenced only from a nested
5269 function, we will not have initialized it yet. Do that now. */
5270 if (arg_pointer_save_area
&& ! crtl
->arg_pointer_save_area_init
)
5271 get_arg_pointer_save_area ();
5273 /* If we are doing generic stack checking and this function makes calls,
5274 do a stack probe at the start of the function to ensure we have enough
5275 space for another stack frame. */
5276 if (flag_stack_check
== GENERIC_STACK_CHECK
)
5278 rtx_insn
*insn
, *seq
;
5280 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5283 rtx max_frame_size
= GEN_INT (STACK_CHECK_MAX_FRAME_SIZE
);
5285 if (STACK_CHECK_MOVING_SP
)
5286 anti_adjust_stack_and_probe (max_frame_size
, true);
5288 probe_stack_range (STACK_OLD_CHECK_PROTECT
, max_frame_size
);
5291 set_insn_locations (seq
, prologue_location
);
5292 emit_insn_before (seq
, stack_check_probe_note
);
5297 /* End any sequences that failed to be closed due to syntax errors. */
5298 while (in_sequence_p ())
5301 clear_pending_stack_adjust ();
5302 do_pending_stack_adjust ();
5304 /* Output a linenumber for the end of the function.
5305 SDB depended on this. */
5306 set_curr_insn_location (input_location
);
5308 /* Before the return label (if any), clobber the return
5309 registers so that they are not propagated live to the rest of
5310 the function. This can only happen with functions that drop
5311 through; if there had been a return statement, there would
5312 have either been a return rtx, or a jump to the return label.
5314 We delay actual code generation after the current_function_value_rtx
5316 rtx_insn
*clobber_after
= get_last_insn ();
5318 /* Output the label for the actual return from the function. */
5319 emit_label (return_label
);
5321 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
)
5323 /* Let except.c know where it should emit the call to unregister
5324 the function context for sjlj exceptions. */
5325 if (flag_exceptions
)
5326 sjlj_emit_function_exit_after (get_last_insn ());
5329 /* If this is an implementation of throw, do what's necessary to
5330 communicate between __builtin_eh_return and the epilogue. */
5331 expand_eh_return ();
5333 /* If scalar return value was computed in a pseudo-reg, or was a named
5334 return value that got dumped to the stack, copy that to the hard
5336 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl
)))
5338 tree decl_result
= DECL_RESULT (current_function_decl
);
5339 rtx decl_rtl
= DECL_RTL (decl_result
);
5341 if (REG_P (decl_rtl
)
5342 ? REGNO (decl_rtl
) >= FIRST_PSEUDO_REGISTER
5343 : DECL_REGISTER (decl_result
))
5345 rtx real_decl_rtl
= crtl
->return_rtx
;
5348 /* This should be set in assign_parms. */
5349 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl
));
5351 /* If this is a BLKmode structure being returned in registers,
5352 then use the mode computed in expand_return. Note that if
5353 decl_rtl is memory, then its mode may have been changed,
5354 but that crtl->return_rtx has not. */
5355 if (GET_MODE (real_decl_rtl
) == BLKmode
)
5356 PUT_MODE (real_decl_rtl
, GET_MODE (decl_rtl
));
5358 /* If a non-BLKmode return value should be padded at the least
5359 significant end of the register, shift it left by the appropriate
5360 amount. BLKmode results are handled using the group load/store
5362 if (TYPE_MODE (TREE_TYPE (decl_result
)) != BLKmode
5363 && REG_P (real_decl_rtl
)
5364 && targetm
.calls
.return_in_msb (TREE_TYPE (decl_result
)))
5366 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl
),
5367 REGNO (real_decl_rtl
)),
5369 shift_return_value (GET_MODE (decl_rtl
), true, real_decl_rtl
);
5371 else if (GET_CODE (real_decl_rtl
) == PARALLEL
)
5373 /* If expand_function_start has created a PARALLEL for decl_rtl,
5374 move the result to the real return registers. Otherwise, do
5375 a group load from decl_rtl for a named return. */
5376 if (GET_CODE (decl_rtl
) == PARALLEL
)
5377 emit_group_move (real_decl_rtl
, decl_rtl
);
5379 emit_group_load (real_decl_rtl
, decl_rtl
,
5380 TREE_TYPE (decl_result
),
5381 int_size_in_bytes (TREE_TYPE (decl_result
)));
5383 /* In the case of complex integer modes smaller than a word, we'll
5384 need to generate some non-trivial bitfield insertions. Do that
5385 on a pseudo and not the hard register. */
5386 else if (GET_CODE (decl_rtl
) == CONCAT
5387 && is_complex_int_mode (GET_MODE (decl_rtl
), &cmode
)
5388 && GET_MODE_BITSIZE (cmode
) <= BITS_PER_WORD
)
5390 int old_generating_concat_p
;
5393 old_generating_concat_p
= generating_concat_p
;
5394 generating_concat_p
= 0;
5395 tmp
= gen_reg_rtx (GET_MODE (decl_rtl
));
5396 generating_concat_p
= old_generating_concat_p
;
5398 emit_move_insn (tmp
, decl_rtl
);
5399 emit_move_insn (real_decl_rtl
, tmp
);
5401 /* If a named return value dumped decl_return to memory, then
5402 we may need to re-do the PROMOTE_MODE signed/unsigned
5404 else if (GET_MODE (real_decl_rtl
) != GET_MODE (decl_rtl
))
5406 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (decl_result
));
5407 promote_function_mode (TREE_TYPE (decl_result
),
5408 GET_MODE (decl_rtl
), &unsignedp
,
5409 TREE_TYPE (current_function_decl
), 1);
5411 convert_move (real_decl_rtl
, decl_rtl
, unsignedp
);
5414 emit_move_insn (real_decl_rtl
, decl_rtl
);
5418 /* If returning a structure, arrange to return the address of the value
5419 in a place where debuggers expect to find it.
5421 If returning a structure PCC style,
5422 the caller also depends on this value.
5423 And cfun->returns_pcc_struct is not necessarily set. */
5424 if ((cfun
->returns_struct
|| cfun
->returns_pcc_struct
)
5425 && !targetm
.calls
.omit_struct_return_reg
)
5427 rtx value_address
= DECL_RTL (DECL_RESULT (current_function_decl
));
5428 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
5431 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl
)))
5432 type
= TREE_TYPE (type
);
5434 value_address
= XEXP (value_address
, 0);
5436 outgoing
= targetm
.calls
.function_value (build_pointer_type (type
),
5437 current_function_decl
, true);
5439 /* Mark this as a function return value so integrate will delete the
5440 assignment and USE below when inlining this function. */
5441 REG_FUNCTION_VALUE_P (outgoing
) = 1;
5443 /* The address may be ptr_mode and OUTGOING may be Pmode. */
5444 scalar_int_mode mode
= as_a
<scalar_int_mode
> (GET_MODE (outgoing
));
5445 value_address
= convert_memory_address (mode
, value_address
);
5447 emit_move_insn (outgoing
, value_address
);
5449 /* Show return register used to hold result (in this case the address
5451 crtl
->return_rtx
= outgoing
;
5454 /* Emit the actual code to clobber return register. Don't emit
5455 it if clobber_after is a barrier, then the previous basic block
5456 certainly doesn't fall thru into the exit block. */
5457 if (!BARRIER_P (clobber_after
))
5460 clobber_return_register ();
5461 rtx_insn
*seq
= get_insns ();
5464 emit_insn_after (seq
, clobber_after
);
5467 /* Output the label for the naked return from the function. */
5468 if (naked_return_label
)
5469 emit_label (naked_return_label
);
5471 /* @@@ This is a kludge. We want to ensure that instructions that
5472 may trap are not moved into the epilogue by scheduling, because
5473 we don't always emit unwind information for the epilogue. */
5474 if (cfun
->can_throw_non_call_exceptions
5475 && targetm_common
.except_unwind_info (&global_options
) != UI_SJLJ
)
5476 emit_insn (gen_blockage ());
5478 /* If stack protection is enabled for this function, check the guard. */
5479 if (crtl
->stack_protect_guard
&& targetm
.stack_protect_runtime_enabled_p ())
5480 stack_protect_epilogue ();
5482 /* If we had calls to alloca, and this machine needs
5483 an accurate stack pointer to exit the function,
5484 insert some code to save and restore the stack pointer. */
5485 if (! EXIT_IGNORE_STACK
5486 && cfun
->calls_alloca
)
5491 emit_stack_save (SAVE_FUNCTION
, &tem
);
5492 rtx_insn
*seq
= get_insns ();
5494 emit_insn_before (seq
, parm_birth_insn
);
5496 emit_stack_restore (SAVE_FUNCTION
, tem
);
5499 /* ??? This should no longer be necessary since stupid is no longer with
5500 us, but there are some parts of the compiler (eg reload_combine, and
5501 sh mach_dep_reorg) that still try and compute their own lifetime info
5502 instead of using the general framework. */
5503 use_return_register ();
5507 get_arg_pointer_save_area (void)
5509 rtx ret
= arg_pointer_save_area
;
5513 ret
= assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
5514 arg_pointer_save_area
= ret
;
5517 if (! crtl
->arg_pointer_save_area_init
)
5519 /* Save the arg pointer at the beginning of the function. The
5520 generated stack slot may not be a valid memory address, so we
5521 have to check it and fix it if necessary. */
5523 emit_move_insn (validize_mem (copy_rtx (ret
)),
5524 crtl
->args
.internal_arg_pointer
);
5525 rtx_insn
*seq
= get_insns ();
5528 push_topmost_sequence ();
5529 emit_insn_after (seq
, entry_of_function ());
5530 pop_topmost_sequence ();
5532 crtl
->arg_pointer_save_area_init
= true;
5539 /* If debugging dumps are requested, dump information about how the
5540 target handled -fstack-check=clash for the prologue.
5542 PROBES describes what if any probes were emitted.
5544 RESIDUALS indicates if the prologue had any residual allocation
5545 (i.e. total allocation was not a multiple of PROBE_INTERVAL). */
5548 dump_stack_clash_frame_info (enum stack_clash_probes probes
, bool residuals
)
5555 case NO_PROBE_NO_FRAME
:
5557 "Stack clash no probe no stack adjustment in prologue.\n");
5559 case NO_PROBE_SMALL_FRAME
:
5561 "Stack clash no probe small stack adjustment in prologue.\n");
5564 fprintf (dump_file
, "Stack clash inline probes in prologue.\n");
5567 fprintf (dump_file
, "Stack clash probe loop in prologue.\n");
5572 fprintf (dump_file
, "Stack clash residual allocation in prologue.\n");
5574 fprintf (dump_file
, "Stack clash no residual allocation in prologue.\n");
5576 if (frame_pointer_needed
)
5577 fprintf (dump_file
, "Stack clash frame pointer needed.\n");
5579 fprintf (dump_file
, "Stack clash no frame pointer needed.\n");
5581 if (TREE_THIS_VOLATILE (cfun
->decl
))
5583 "Stack clash noreturn prologue, assuming no implicit"
5584 " probes in caller.\n");
5587 "Stack clash not noreturn prologue.\n");
5590 /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
5591 for the first time. */
5594 record_insns (rtx_insn
*insns
, rtx end
, hash_table
<insn_cache_hasher
> **hashp
)
5597 hash_table
<insn_cache_hasher
> *hash
= *hashp
;
5600 *hashp
= hash
= hash_table
<insn_cache_hasher
>::create_ggc (17);
5602 for (tmp
= insns
; tmp
!= end
; tmp
= NEXT_INSN (tmp
))
5604 rtx
*slot
= hash
->find_slot (tmp
, INSERT
);
5605 gcc_assert (*slot
== NULL
);
5610 /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
5611 basic block, splitting or peepholes. If INSN is a prologue or epilogue
5612 insn, then record COPY as well. */
5615 maybe_copy_prologue_epilogue_insn (rtx insn
, rtx copy
)
5617 hash_table
<insn_cache_hasher
> *hash
;
5620 hash
= epilogue_insn_hash
;
5621 if (!hash
|| !hash
->find (insn
))
5623 hash
= prologue_insn_hash
;
5624 if (!hash
|| !hash
->find (insn
))
5628 slot
= hash
->find_slot (copy
, INSERT
);
5629 gcc_assert (*slot
== NULL
);
5633 /* Determine if any INSNs in HASH are, or are part of, INSN. Because
5634 we can be running after reorg, SEQUENCE rtl is possible. */
5637 contains (const rtx_insn
*insn
, hash_table
<insn_cache_hasher
> *hash
)
5642 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
5644 rtx_sequence
*seq
= as_a
<rtx_sequence
*> (PATTERN (insn
));
5646 for (i
= seq
->len () - 1; i
>= 0; i
--)
5647 if (hash
->find (seq
->element (i
)))
5652 return hash
->find (const_cast<rtx_insn
*> (insn
)) != NULL
;
5656 prologue_contains (const rtx_insn
*insn
)
5658 return contains (insn
, prologue_insn_hash
);
5662 epilogue_contains (const rtx_insn
*insn
)
5664 return contains (insn
, epilogue_insn_hash
);
5668 prologue_epilogue_contains (const rtx_insn
*insn
)
5670 if (contains (insn
, prologue_insn_hash
))
5672 if (contains (insn
, epilogue_insn_hash
))
5678 record_prologue_seq (rtx_insn
*seq
)
5680 record_insns (seq
, NULL
, &prologue_insn_hash
);
5684 record_epilogue_seq (rtx_insn
*seq
)
5686 record_insns (seq
, NULL
, &epilogue_insn_hash
);
5689 /* Set JUMP_LABEL for a return insn. */
5692 set_return_jump_label (rtx_insn
*returnjump
)
5694 rtx pat
= PATTERN (returnjump
);
5695 if (GET_CODE (pat
) == PARALLEL
)
5696 pat
= XVECEXP (pat
, 0, 0);
5697 if (ANY_RETURN_P (pat
))
5698 JUMP_LABEL (returnjump
) = pat
;
5700 JUMP_LABEL (returnjump
) = ret_rtx
;
5703 /* Return a sequence to be used as the split prologue for the current
5704 function, or NULL. */
5707 make_split_prologue_seq (void)
5709 if (!flag_split_stack
5710 || lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun
->decl
)))
5714 emit_insn (targetm
.gen_split_stack_prologue ());
5715 rtx_insn
*seq
= get_insns ();
5718 record_insns (seq
, NULL
, &prologue_insn_hash
);
5719 set_insn_locations (seq
, prologue_location
);
5724 /* Return a sequence to be used as the prologue for the current function,
5728 make_prologue_seq (void)
5730 if (!targetm
.have_prologue ())
5734 rtx_insn
*seq
= targetm
.gen_prologue ();
5737 /* Insert an explicit USE for the frame pointer
5738 if the profiling is on and the frame pointer is required. */
5739 if (crtl
->profile
&& frame_pointer_needed
)
5740 emit_use (hard_frame_pointer_rtx
);
5742 /* Retain a map of the prologue insns. */
5743 record_insns (seq
, NULL
, &prologue_insn_hash
);
5744 emit_note (NOTE_INSN_PROLOGUE_END
);
5746 /* Ensure that instructions are not moved into the prologue when
5747 profiling is on. The call to the profiling routine can be
5748 emitted within the live range of a call-clobbered register. */
5749 if (!targetm
.profile_before_prologue () && crtl
->profile
)
5750 emit_insn (gen_blockage ());
5754 set_insn_locations (seq
, prologue_location
);
5759 /* Return a sequence to be used as the epilogue for the current function,
5763 make_epilogue_seq (void)
5765 if (!targetm
.have_epilogue ())
5769 emit_note (NOTE_INSN_EPILOGUE_BEG
);
5770 rtx_insn
*seq
= targetm
.gen_epilogue ();
5772 emit_jump_insn (seq
);
5774 /* Retain a map of the epilogue insns. */
5775 record_insns (seq
, NULL
, &epilogue_insn_hash
);
5776 set_insn_locations (seq
, epilogue_location
);
5779 rtx_insn
*returnjump
= get_last_insn ();
5782 if (JUMP_P (returnjump
))
5783 set_return_jump_label (returnjump
);
5789 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5790 this into place with notes indicating where the prologue ends and where
5791 the epilogue begins. Update the basic block information when possible.
5793 Notes on epilogue placement:
5794 There are several kinds of edges to the exit block:
5795 * a single fallthru edge from LAST_BB
5796 * possibly, edges from blocks containing sibcalls
5797 * possibly, fake edges from infinite loops
5799 The epilogue is always emitted on the fallthru edge from the last basic
5800 block in the function, LAST_BB, into the exit block.
5802 If LAST_BB is empty except for a label, it is the target of every
5803 other basic block in the function that ends in a return. If a
5804 target has a return or simple_return pattern (possibly with
5805 conditional variants), these basic blocks can be changed so that a
5806 return insn is emitted into them, and their target is adjusted to
5807 the real exit block.
5809 Notes on shrink wrapping: We implement a fairly conservative
5810 version of shrink-wrapping rather than the textbook one. We only
5811 generate a single prologue and a single epilogue. This is
5812 sufficient to catch a number of interesting cases involving early
5815 First, we identify the blocks that require the prologue to occur before
5816 them. These are the ones that modify a call-saved register, or reference
5817 any of the stack or frame pointer registers. To simplify things, we then
5818 mark everything reachable from these blocks as also requiring a prologue.
5819 This takes care of loops automatically, and avoids the need to examine
5820 whether MEMs reference the frame, since it is sufficient to check for
5821 occurrences of the stack or frame pointer.
5823 We then compute the set of blocks for which the need for a prologue
5824 is anticipatable (borrowing terminology from the shrink-wrapping
5825 description in Muchnick's book). These are the blocks which either
5826 require a prologue themselves, or those that have only successors
5827 where the prologue is anticipatable. The prologue needs to be
5828 inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
5829 is not. For the moment, we ensure that only one such edge exists.
5831 The epilogue is placed as described above, but we make a
5832 distinction between inserting return and simple_return patterns
5833 when modifying other blocks that end in a return. Blocks that end
5834 in a sibcall omit the sibcall_epilogue if the block is not in
5838 thread_prologue_and_epilogue_insns (void)
5842 /* Can't deal with multiple successors of the entry block at the
5843 moment. Function should always have at least one entry
5845 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
5847 edge entry_edge
= single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5848 edge orig_entry_edge
= entry_edge
;
5850 rtx_insn
*split_prologue_seq
= make_split_prologue_seq ();
5851 rtx_insn
*prologue_seq
= make_prologue_seq ();
5852 rtx_insn
*epilogue_seq
= make_epilogue_seq ();
5854 /* Try to perform a kind of shrink-wrapping, making sure the
5855 prologue/epilogue is emitted only around those parts of the
5856 function that require it. */
5857 try_shrink_wrapping (&entry_edge
, prologue_seq
);
5859 /* If the target can handle splitting the prologue/epilogue into separate
5860 components, try to shrink-wrap these components separately. */
5861 try_shrink_wrapping_separate (entry_edge
->dest
);
5863 /* If that did anything for any component we now need the generate the
5864 "main" prologue again. Because some targets require some of these
5865 to be called in a specific order (i386 requires the split prologue
5866 to be first, for example), we create all three sequences again here.
5867 If this does not work for some target, that target should not enable
5868 separate shrink-wrapping. */
5869 if (crtl
->shrink_wrapped_separate
)
5871 split_prologue_seq
= make_split_prologue_seq ();
5872 prologue_seq
= make_prologue_seq ();
5873 epilogue_seq
= make_epilogue_seq ();
5876 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
));
5878 /* A small fib -- epilogue is not yet completed, but we wish to re-use
5879 this marker for the splits of EH_RETURN patterns, and nothing else
5880 uses the flag in the meantime. */
5881 epilogue_completed
= 1;
5883 /* Find non-fallthru edges that end with EH_RETURN instructions. On
5884 some targets, these get split to a special version of the epilogue
5885 code. In order to be able to properly annotate these with unwind
5886 info, try to split them now. If we get a valid split, drop an
5887 EPILOGUE_BEG note and mark the insns as epilogue insns. */
5890 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
5892 rtx_insn
*prev
, *last
, *trial
;
5894 if (e
->flags
& EDGE_FALLTHRU
)
5896 last
= BB_END (e
->src
);
5897 if (!eh_returnjump_p (last
))
5900 prev
= PREV_INSN (last
);
5901 trial
= try_split (PATTERN (last
), last
, 1);
5905 record_insns (NEXT_INSN (prev
), NEXT_INSN (trial
), &epilogue_insn_hash
);
5906 emit_note_after (NOTE_INSN_EPILOGUE_BEG
, prev
);
5909 edge exit_fallthru_edge
= find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
);
5911 if (exit_fallthru_edge
)
5915 insert_insn_on_edge (epilogue_seq
, exit_fallthru_edge
);
5916 commit_edge_insertions ();
5918 /* The epilogue insns we inserted may cause the exit edge to no longer
5920 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
5922 if (((e
->flags
& EDGE_FALLTHRU
) != 0)
5923 && returnjump_p (BB_END (e
->src
)))
5924 e
->flags
&= ~EDGE_FALLTHRU
;
5927 else if (next_active_insn (BB_END (exit_fallthru_edge
->src
)))
5929 /* We have a fall-through edge to the exit block, the source is not
5930 at the end of the function, and there will be an assembler epilogue
5931 at the end of the function.
5932 We can't use force_nonfallthru here, because that would try to
5933 use return. Inserting a jump 'by hand' is extremely messy, so
5934 we take advantage of cfg_layout_finalize using
5935 fixup_fallthru_exit_predecessor. */
5936 cfg_layout_initialize (0);
5938 FOR_EACH_BB_FN (cur_bb
, cfun
)
5939 if (cur_bb
->index
>= NUM_FIXED_BLOCKS
5940 && cur_bb
->next_bb
->index
>= NUM_FIXED_BLOCKS
)
5941 cur_bb
->aux
= cur_bb
->next_bb
;
5942 cfg_layout_finalize ();
5946 /* Insert the prologue. */
5948 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5950 if (split_prologue_seq
|| prologue_seq
)
5952 rtx_insn
*split_prologue_insn
= split_prologue_seq
;
5953 if (split_prologue_seq
)
5955 while (split_prologue_insn
&& !NONDEBUG_INSN_P (split_prologue_insn
))
5956 split_prologue_insn
= NEXT_INSN (split_prologue_insn
);
5957 insert_insn_on_edge (split_prologue_seq
, orig_entry_edge
);
5960 rtx_insn
*prologue_insn
= prologue_seq
;
5963 while (prologue_insn
&& !NONDEBUG_INSN_P (prologue_insn
))
5964 prologue_insn
= NEXT_INSN (prologue_insn
);
5965 insert_insn_on_edge (prologue_seq
, entry_edge
);
5968 commit_edge_insertions ();
5970 /* Look for basic blocks within the prologue insns. */
5971 if (split_prologue_insn
5972 && BLOCK_FOR_INSN (split_prologue_insn
) == NULL
)
5973 split_prologue_insn
= NULL
;
5975 && BLOCK_FOR_INSN (prologue_insn
) == NULL
)
5976 prologue_insn
= NULL
;
5977 if (split_prologue_insn
|| prologue_insn
)
5979 auto_sbitmap
blocks (last_basic_block_for_fn (cfun
));
5980 bitmap_clear (blocks
);
5981 if (split_prologue_insn
)
5982 bitmap_set_bit (blocks
,
5983 BLOCK_FOR_INSN (split_prologue_insn
)->index
);
5985 bitmap_set_bit (blocks
, BLOCK_FOR_INSN (prologue_insn
)->index
);
5986 find_many_sub_basic_blocks (blocks
);
5990 default_rtl_profile ();
5992 /* Emit sibling epilogues before any sibling call sites. */
5993 for (ei
= ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
);
5994 (e
= ei_safe_edge (ei
));
5997 /* Skip those already handled, the ones that run without prologue. */
5998 if (e
->flags
& EDGE_IGNORE
)
6000 e
->flags
&= ~EDGE_IGNORE
;
6004 rtx_insn
*insn
= BB_END (e
->src
);
6006 if (!(CALL_P (insn
) && SIBLING_CALL_P (insn
)))
6009 if (rtx_insn
*ep_seq
= targetm
.gen_sibcall_epilogue ())
6012 emit_note (NOTE_INSN_EPILOGUE_BEG
);
6014 rtx_insn
*seq
= get_insns ();
6017 /* Retain a map of the epilogue insns. Used in life analysis to
6018 avoid getting rid of sibcall epilogue insns. Do this before we
6019 actually emit the sequence. */
6020 record_insns (seq
, NULL
, &epilogue_insn_hash
);
6021 set_insn_locations (seq
, epilogue_location
);
6023 emit_insn_before (seq
, insn
);
6029 rtx_insn
*insn
, *next
;
6031 /* Similarly, move any line notes that appear after the epilogue.
6032 There is no need, however, to be quite so anal about the existence
6033 of such a note. Also possibly move
6034 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
6036 for (insn
= epilogue_seq
; insn
; insn
= next
)
6038 next
= NEXT_INSN (insn
);
6040 && (NOTE_KIND (insn
) == NOTE_INSN_FUNCTION_BEG
))
6041 reorder_insns (insn
, insn
, PREV_INSN (epilogue_seq
));
6045 /* Threading the prologue and epilogue changes the artificial refs
6046 in the entry and exit blocks. */
6047 epilogue_completed
= 1;
6048 df_update_entry_exit_and_calls ();
6051 /* Reposition the prologue-end and epilogue-begin notes after
6052 instruction scheduling. */
6055 reposition_prologue_and_epilogue_notes (void)
6057 if (!targetm
.have_prologue ()
6058 && !targetm
.have_epilogue ()
6059 && !targetm
.have_sibcall_epilogue ())
6062 /* Since the hash table is created on demand, the fact that it is
6063 non-null is a signal that it is non-empty. */
6064 if (prologue_insn_hash
!= NULL
)
6066 size_t len
= prologue_insn_hash
->elements ();
6067 rtx_insn
*insn
, *last
= NULL
, *note
= NULL
;
6069 /* Scan from the beginning until we reach the last prologue insn. */
6070 /* ??? While we do have the CFG intact, there are two problems:
6071 (1) The prologue can contain loops (typically probing the stack),
6072 which means that the end of the prologue isn't in the first bb.
6073 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */
6074 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6078 if (NOTE_KIND (insn
) == NOTE_INSN_PROLOGUE_END
)
6081 else if (contains (insn
, prologue_insn_hash
))
6093 /* Scan forward looking for the PROLOGUE_END note. It should
6094 be right at the beginning of the block, possibly with other
6095 insn notes that got moved there. */
6096 for (note
= NEXT_INSN (last
); ; note
= NEXT_INSN (note
))
6099 && NOTE_KIND (note
) == NOTE_INSN_PROLOGUE_END
)
6104 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
6106 last
= NEXT_INSN (last
);
6107 reorder_insns (note
, note
, last
);
6111 if (epilogue_insn_hash
!= NULL
)
6116 FOR_EACH_EDGE (e
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
6118 rtx_insn
*insn
, *first
= NULL
, *note
= NULL
;
6119 basic_block bb
= e
->src
;
6121 /* Scan from the beginning until we reach the first epilogue insn. */
6122 FOR_BB_INSNS (bb
, insn
)
6126 if (NOTE_KIND (insn
) == NOTE_INSN_EPILOGUE_BEG
)
6133 else if (first
== NULL
&& contains (insn
, epilogue_insn_hash
))
6143 /* If the function has a single basic block, and no real
6144 epilogue insns (e.g. sibcall with no cleanup), the
6145 epilogue note can get scheduled before the prologue
6146 note. If we have frame related prologue insns, having
6147 them scanned during the epilogue will result in a crash.
6148 In this case re-order the epilogue note to just before
6149 the last insn in the block. */
6151 first
= BB_END (bb
);
6153 if (PREV_INSN (first
) != note
)
6154 reorder_insns (note
, note
, PREV_INSN (first
));
6160 /* Returns the name of function declared by FNDECL. */
6162 fndecl_name (tree fndecl
)
6166 return lang_hooks
.decl_printable_name (fndecl
, 1);
6169 /* Returns the name of function FN. */
6171 function_name (struct function
*fn
)
6173 tree fndecl
= (fn
== NULL
) ? NULL
: fn
->decl
;
6174 return fndecl_name (fndecl
);
6177 /* Returns the name of the current function. */
6179 current_function_name (void)
6181 return function_name (cfun
);
6186 rest_of_handle_check_leaf_regs (void)
6188 #ifdef LEAF_REGISTERS
6189 crtl
->uses_only_leaf_regs
6190 = optimize
> 0 && only_leaf_regs_used () && leaf_function_p ();
6195 /* Insert a TYPE into the used types hash table of CFUN. */
6198 used_types_insert_helper (tree type
, struct function
*func
)
6200 if (type
!= NULL
&& func
!= NULL
)
6202 if (func
->used_types_hash
== NULL
)
6203 func
->used_types_hash
= hash_set
<tree
>::create_ggc (37);
6205 func
->used_types_hash
->add (type
);
6209 /* Given a type, insert it into the used hash table in cfun. */
6211 used_types_insert (tree t
)
6213 while (POINTER_TYPE_P (t
) || TREE_CODE (t
) == ARRAY_TYPE
)
6218 if (TREE_CODE (t
) == ERROR_MARK
)
6220 if (TYPE_NAME (t
) == NULL_TREE
6221 || TYPE_NAME (t
) == TYPE_NAME (TYPE_MAIN_VARIANT (t
)))
6222 t
= TYPE_MAIN_VARIANT (t
);
6223 if (debug_info_level
> DINFO_LEVEL_NONE
)
6226 used_types_insert_helper (t
, cfun
);
6229 /* So this might be a type referenced by a global variable.
6230 Record that type so that we can later decide to emit its
6231 debug information. */
6232 vec_safe_push (types_used_by_cur_var_decl
, t
);
6237 /* Helper to Hash a struct types_used_by_vars_entry. */
6240 hash_types_used_by_vars_entry (const struct types_used_by_vars_entry
*entry
)
6242 gcc_assert (entry
&& entry
->var_decl
&& entry
->type
);
6244 return iterative_hash_object (entry
->type
,
6245 iterative_hash_object (entry
->var_decl
, 0));
6248 /* Hash function of the types_used_by_vars_entry hash table. */
6251 used_type_hasher::hash (types_used_by_vars_entry
*entry
)
6253 return hash_types_used_by_vars_entry (entry
);
6256 /*Equality function of the types_used_by_vars_entry hash table. */
6259 used_type_hasher::equal (types_used_by_vars_entry
*e1
,
6260 types_used_by_vars_entry
*e2
)
6262 return (e1
->var_decl
== e2
->var_decl
&& e1
->type
== e2
->type
);
6265 /* Inserts an entry into the types_used_by_vars_hash hash table. */
6268 types_used_by_var_decl_insert (tree type
, tree var_decl
)
6270 if (type
!= NULL
&& var_decl
!= NULL
)
6272 types_used_by_vars_entry
**slot
;
6273 struct types_used_by_vars_entry e
;
6274 e
.var_decl
= var_decl
;
6276 if (types_used_by_vars_hash
== NULL
)
6277 types_used_by_vars_hash
6278 = hash_table
<used_type_hasher
>::create_ggc (37);
6280 slot
= types_used_by_vars_hash
->find_slot (&e
, INSERT
);
6283 struct types_used_by_vars_entry
*entry
;
6284 entry
= ggc_alloc
<types_used_by_vars_entry
> ();
6286 entry
->var_decl
= var_decl
;
6294 const pass_data pass_data_leaf_regs
=
6296 RTL_PASS
, /* type */
6297 "*leaf_regs", /* name */
6298 OPTGROUP_NONE
, /* optinfo_flags */
6299 TV_NONE
, /* tv_id */
6300 0, /* properties_required */
6301 0, /* properties_provided */
6302 0, /* properties_destroyed */
6303 0, /* todo_flags_start */
6304 0, /* todo_flags_finish */
6307 class pass_leaf_regs
: public rtl_opt_pass
6310 pass_leaf_regs (gcc::context
*ctxt
)
6311 : rtl_opt_pass (pass_data_leaf_regs
, ctxt
)
6314 /* opt_pass methods: */
6315 virtual unsigned int execute (function
*)
6317 return rest_of_handle_check_leaf_regs ();
6320 }; // class pass_leaf_regs
6325 make_pass_leaf_regs (gcc::context
*ctxt
)
6327 return new pass_leaf_regs (ctxt
);
6331 rest_of_handle_thread_prologue_and_epilogue (void)
6333 /* prepare_shrink_wrap is sensitive to the block structure of the control
6334 flow graph, so clean it up first. */
6338 /* On some machines, the prologue and epilogue code, or parts thereof,
6339 can be represented as RTL. Doing so lets us schedule insns between
6340 it and the rest of the code and also allows delayed branch
6341 scheduling to operate in the epilogue. */
6342 thread_prologue_and_epilogue_insns ();
6344 /* Some non-cold blocks may now be only reachable from cold blocks.
6346 fixup_partitions ();
6348 /* Shrink-wrapping can result in unreachable edges in the epilogue,
6350 cleanup_cfg (optimize
? CLEANUP_EXPENSIVE
: 0);
6352 /* The stack usage info is finalized during prologue expansion. */
6353 if (flag_stack_usage_info
)
6354 output_stack_usage ();
6361 const pass_data pass_data_thread_prologue_and_epilogue
=
6363 RTL_PASS
, /* type */
6364 "pro_and_epilogue", /* name */
6365 OPTGROUP_NONE
, /* optinfo_flags */
6366 TV_THREAD_PROLOGUE_AND_EPILOGUE
, /* tv_id */
6367 0, /* properties_required */
6368 0, /* properties_provided */
6369 0, /* properties_destroyed */
6370 0, /* todo_flags_start */
6371 ( TODO_df_verify
| TODO_df_finish
), /* todo_flags_finish */
6374 class pass_thread_prologue_and_epilogue
: public rtl_opt_pass
6377 pass_thread_prologue_and_epilogue (gcc::context
*ctxt
)
6378 : rtl_opt_pass (pass_data_thread_prologue_and_epilogue
, ctxt
)
6381 /* opt_pass methods: */
6382 virtual unsigned int execute (function
*)
6384 return rest_of_handle_thread_prologue_and_epilogue ();
6387 }; // class pass_thread_prologue_and_epilogue
6392 make_pass_thread_prologue_and_epilogue (gcc::context
*ctxt
)
6394 return new pass_thread_prologue_and_epilogue (ctxt
);
6398 /* This mini-pass fixes fall-out from SSA in asm statements that have
6399 in-out constraints. Say you start with
6402 asm ("": "+mr" (inout));
6405 which is transformed very early to use explicit output and match operands:
6408 asm ("": "=mr" (inout) : "0" (inout));
6411 Or, after SSA and copyprop,
6413 asm ("": "=mr" (inout_2) : "0" (inout_1));
6416 Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
6417 they represent two separate values, so they will get different pseudo
6418 registers during expansion. Then, since the two operands need to match
6419 per the constraints, but use different pseudo registers, reload can
6420 only register a reload for these operands. But reloads can only be
6421 satisfied by hardregs, not by memory, so we need a register for this
6422 reload, just because we are presented with non-matching operands.
6423 So, even though we allow memory for this operand, no memory can be
6424 used for it, just because the two operands don't match. This can
6425 cause reload failures on register-starved targets.
6427 So it's a symptom of reload not being able to use memory for reloads
6428 or, alternatively it's also a symptom of both operands not coming into
6429 reload as matching (in which case the pseudo could go to memory just
6430 fine, as the alternative allows it, and no reload would be necessary).
6431 We fix the latter problem here, by transforming
6433 asm ("": "=mr" (inout_2) : "0" (inout_1));
6438 asm ("": "=mr" (inout_2) : "0" (inout_2)); */
6441 match_asm_constraints_1 (rtx_insn
*insn
, rtx
*p_sets
, int noutputs
)
6444 bool changed
= false;
6445 rtx op
= SET_SRC (p_sets
[0]);
6446 int ninputs
= ASM_OPERANDS_INPUT_LENGTH (op
);
6447 rtvec inputs
= ASM_OPERANDS_INPUT_VEC (op
);
6448 bool *output_matched
= XALLOCAVEC (bool, noutputs
);
6450 memset (output_matched
, 0, noutputs
* sizeof (bool));
6451 for (i
= 0; i
< ninputs
; i
++)
6455 const char *constraint
= ASM_OPERANDS_INPUT_CONSTRAINT (op
, i
);
6459 if (*constraint
== '%')
6462 match
= strtoul (constraint
, &end
, 10);
6463 if (end
== constraint
)
6466 gcc_assert (match
< noutputs
);
6467 output
= SET_DEST (p_sets
[match
]);
6468 input
= RTVEC_ELT (inputs
, i
);
6469 /* Only do the transformation for pseudos. */
6470 if (! REG_P (output
)
6471 || rtx_equal_p (output
, input
)
6472 || !(REG_P (input
) || SUBREG_P (input
)
6473 || MEM_P (input
) || CONSTANT_P (input
))
6474 || !general_operand (input
, GET_MODE (output
)))
6477 /* We can't do anything if the output is also used as input,
6478 as we're going to overwrite it. */
6479 for (j
= 0; j
< ninputs
; j
++)
6480 if (reg_overlap_mentioned_p (output
, RTVEC_ELT (inputs
, j
)))
6485 /* Avoid changing the same input several times. For
6486 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
6487 only change in once (to out1), rather than changing it
6488 first to out1 and afterwards to out2. */
6491 for (j
= 0; j
< noutputs
; j
++)
6492 if (output_matched
[j
] && input
== SET_DEST (p_sets
[j
]))
6497 output_matched
[match
] = true;
6500 emit_move_insn (output
, copy_rtx (input
));
6501 insns
= get_insns ();
6503 emit_insn_before (insns
, insn
);
6505 /* Now replace all mentions of the input with output. We can't
6506 just replace the occurrence in inputs[i], as the register might
6507 also be used in some other input (or even in an address of an
6508 output), which would mean possibly increasing the number of
6509 inputs by one (namely 'output' in addition), which might pose
6510 a too complicated problem for reload to solve. E.g. this situation:
6512 asm ("" : "=r" (output), "=m" (input) : "0" (input))
6514 Here 'input' is used in two occurrences as input (once for the
6515 input operand, once for the address in the second output operand).
6516 If we would replace only the occurrence of the input operand (to
6517 make the matching) we would be left with this:
6520 asm ("" : "=r" (output), "=m" (input) : "0" (output))
6522 Now we suddenly have two different input values (containing the same
6523 value, but different pseudos) where we formerly had only one.
6524 With more complicated asms this might lead to reload failures
6525 which wouldn't have happen without this pass. So, iterate over
6526 all operands and replace all occurrences of the register used. */
6527 for (j
= 0; j
< noutputs
; j
++)
6528 if (!rtx_equal_p (SET_DEST (p_sets
[j
]), input
)
6529 && reg_overlap_mentioned_p (input
, SET_DEST (p_sets
[j
])))
6530 SET_DEST (p_sets
[j
]) = replace_rtx (SET_DEST (p_sets
[j
]),
6532 for (j
= 0; j
< ninputs
; j
++)
6533 if (reg_overlap_mentioned_p (input
, RTVEC_ELT (inputs
, j
)))
6534 RTVEC_ELT (inputs
, j
) = replace_rtx (RTVEC_ELT (inputs
, j
),
6541 df_insn_rescan (insn
);
6544 /* Add the decl D to the local_decls list of FUN. */
6547 add_local_decl (struct function
*fun
, tree d
)
6549 gcc_assert (VAR_P (d
));
6550 vec_safe_push (fun
->local_decls
, d
);
6555 const pass_data pass_data_match_asm_constraints
=
6557 RTL_PASS
, /* type */
6558 "asmcons", /* name */
6559 OPTGROUP_NONE
, /* optinfo_flags */
6560 TV_NONE
, /* tv_id */
6561 0, /* properties_required */
6562 0, /* properties_provided */
6563 0, /* properties_destroyed */
6564 0, /* todo_flags_start */
6565 0, /* todo_flags_finish */
6568 class pass_match_asm_constraints
: public rtl_opt_pass
6571 pass_match_asm_constraints (gcc::context
*ctxt
)
6572 : rtl_opt_pass (pass_data_match_asm_constraints
, ctxt
)
6575 /* opt_pass methods: */
6576 virtual unsigned int execute (function
*);
6578 }; // class pass_match_asm_constraints
6581 pass_match_asm_constraints::execute (function
*fun
)
6588 if (!crtl
->has_asm_statement
)
6591 df_set_flags (DF_DEFER_INSN_RESCAN
);
6592 FOR_EACH_BB_FN (bb
, fun
)
6594 FOR_BB_INSNS (bb
, insn
)
6599 pat
= PATTERN (insn
);
6600 if (GET_CODE (pat
) == PARALLEL
)
6601 p_sets
= &XVECEXP (pat
, 0, 0), noutputs
= XVECLEN (pat
, 0);
6602 else if (GET_CODE (pat
) == SET
)
6603 p_sets
= &PATTERN (insn
), noutputs
= 1;
6607 if (GET_CODE (*p_sets
) == SET
6608 && GET_CODE (SET_SRC (*p_sets
)) == ASM_OPERANDS
)
6609 match_asm_constraints_1 (insn
, p_sets
, noutputs
);
6613 return TODO_df_finish
;
6619 make_pass_match_asm_constraints (gcc::context
*ctxt
)
6621 return new pass_match_asm_constraints (ctxt
);
6625 #include "gt-function.h"