1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-95, 1996 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
47 #include "insn-flags.h"
49 #include "insn-codes.h"
51 #include "hard-reg-set.h"
52 #include "insn-config.h"
55 #include "basic-block.h"
60 /* Some systems use __main in a way incompatible with its use in gcc, in these
61 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
62 give the same symbol without quotes for an alternative entry point. You
63 must define both, or neither. */
65 #define NAME__MAIN "__main"
66 #define SYMBOL__MAIN __main
69 /* Round a value to the lowest integer less than it that is a multiple of
70 the required alignment. Avoid using division in case the value is
71 negative. Assume the alignment is a power of two. */
72 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
74 /* Similar, but round to the next highest integer that meets the
76 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
78 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
79 during rtl generation. If they are different register numbers, this is
80 always true. It may also be true if
81 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
82 generation. See fix_lexical_addr for details. */
84 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
85 #define NEED_SEPARATE_AP
88 /* Number of bytes of args popped by function being compiled on its return.
89 Zero if no bytes are to be popped.
90 May affect compilation of return insn or of function epilogue. */
92 int current_function_pops_args
;
94 /* Nonzero if function being compiled needs to be given an address
95 where the value should be stored. */
97 int current_function_returns_struct
;
99 /* Nonzero if function being compiled needs to
100 return the address of where it has put a structure value. */
102 int current_function_returns_pcc_struct
;
104 /* Nonzero if function being compiled needs to be passed a static chain. */
106 int current_function_needs_context
;
108 /* Nonzero if function being compiled can call setjmp. */
110 int current_function_calls_setjmp
;
112 /* Nonzero if function being compiled can call longjmp. */
114 int current_function_calls_longjmp
;
116 /* Nonzero if function being compiled receives nonlocal gotos
117 from nested functions. */
119 int current_function_has_nonlocal_label
;
121 /* Nonzero if function being compiled has nonlocal gotos to parent
124 int current_function_has_nonlocal_goto
;
126 /* Nonzero if function being compiled contains nested functions. */
128 int current_function_contains_functions
;
130 /* Nonzero if function being compiled can call alloca,
131 either as a subroutine or builtin. */
133 int current_function_calls_alloca
;
135 /* Nonzero if the current function returns a pointer type */
137 int current_function_returns_pointer
;
139 /* If some insns can be deferred to the delay slots of the epilogue, the
140 delay list for them is recorded here. */
142 rtx current_function_epilogue_delay_list
;
144 /* If function's args have a fixed size, this is that size, in bytes.
146 May affect compilation of return insn or of function epilogue. */
148 int current_function_args_size
;
150 /* # bytes the prologue should push and pretend that the caller pushed them.
151 The prologue must do this, but only if parms can be passed in registers. */
153 int current_function_pretend_args_size
;
155 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
156 defined, the needed space is pushed by the prologue. */
158 int current_function_outgoing_args_size
;
160 /* This is the offset from the arg pointer to the place where the first
161 anonymous arg can be found, if there is one. */
163 rtx current_function_arg_offset_rtx
;
165 /* Nonzero if current function uses varargs.h or equivalent.
166 Zero for functions that use stdarg.h. */
168 int current_function_varargs
;
170 /* Nonzero if current function uses stdarg.h or equivalent.
171 Zero for functions that use varargs.h. */
173 int current_function_stdarg
;
175 /* Quantities of various kinds of registers
176 used for the current function's args. */
178 CUMULATIVE_ARGS current_function_args_info
;
180 /* Name of function now being compiled. */
182 char *current_function_name
;
184 /* If non-zero, an RTL expression for that location at which the current
185 function returns its result. Always equal to
186 DECL_RTL (DECL_RESULT (current_function_decl)), but provided
187 independently of the tree structures. */
189 rtx current_function_return_rtx
;
191 /* Nonzero if the current function uses the constant pool. */
193 int current_function_uses_const_pool
;
195 /* Nonzero if the current function uses pic_offset_table_rtx. */
196 int current_function_uses_pic_offset_table
;
198 /* The arg pointer hard register, or the pseudo into which it was copied. */
199 rtx current_function_internal_arg_pointer
;
201 /* The FUNCTION_DECL for an inline function currently being expanded. */
202 tree inline_function_decl
;
204 /* Number of function calls seen so far in current function. */
206 int function_call_count
;
208 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
209 (labels to which there can be nonlocal gotos from nested functions)
212 tree nonlocal_labels
;
214 /* RTX for stack slot that holds the current handler for nonlocal gotos.
215 Zero when function does not have nonlocal labels. */
217 rtx nonlocal_goto_handler_slot
;
219 /* RTX for stack slot that holds the stack pointer value to restore
221 Zero when function does not have nonlocal labels. */
223 rtx nonlocal_goto_stack_level
;
225 /* Label that will go on parm cleanup code, if any.
226 Jumping to this label runs cleanup code for parameters, if
227 such code must be run. Following this code is the logical return label. */
231 /* Label that will go on function epilogue.
232 Jumping to this label serves as a "return" instruction
233 on machines which require execution of the epilogue on all returns. */
237 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
238 So we can mark them all live at the end of the function, if nonopt. */
241 /* List (chain of EXPR_LISTs) of all stack slots in this function.
242 Made for the sake of unshare_all_rtl. */
245 /* Chain of all RTL_EXPRs that have insns in them. */
248 /* Label to jump back to for tail recursion, or 0 if we have
249 not yet needed one for this function. */
250 rtx tail_recursion_label
;
252 /* Place after which to insert the tail_recursion_label if we need one. */
253 rtx tail_recursion_reentry
;
255 /* Location at which to save the argument pointer if it will need to be
256 referenced. There are two cases where this is done: if nonlocal gotos
257 exist, or if vars stored at an offset from the argument pointer will be
258 needed by inner routines. */
260 rtx arg_pointer_save_area
;
262 /* Offset to end of allocated area of stack frame.
263 If stack grows down, this is the address of the last stack slot allocated.
264 If stack grows up, this is the address for the next slot. */
267 /* List (chain of TREE_LISTs) of static chains for containing functions.
268 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
269 in an RTL_EXPR in the TREE_VALUE. */
270 static tree context_display
;
272 /* List (chain of TREE_LISTs) of trampolines for nested functions.
273 The trampoline sets up the static chain and jumps to the function.
274 We supply the trampoline's address when the function's address is requested.
276 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
277 in an RTL_EXPR in the TREE_VALUE. */
278 static tree trampoline_list
;
280 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
281 static rtx parm_birth_insn
;
284 /* Nonzero if a stack slot has been generated whose address is not
285 actually valid. It means that the generated rtl must all be scanned
286 to detect and correct the invalid addresses where they occur. */
287 static int invalid_stack_slot
;
290 /* Last insn of those whose job was to put parms into their nominal homes. */
291 static rtx last_parm_insn
;
293 /* 1 + last pseudo register number used for loading a copy
294 of a parameter of this function. */
295 static int max_parm_reg
;
297 /* Vector indexed by REGNO, containing location on stack in which
298 to put the parm which is nominally in pseudo register REGNO,
299 if we discover that that parm must go in the stack. */
300 static rtx
*parm_reg_stack_loc
;
302 #if 0 /* Turned off because 0 seems to work just as well. */
303 /* Cleanup lists are required for binding levels regardless of whether
304 that binding level has cleanups or not. This node serves as the
305 cleanup list whenever an empty list is required. */
306 static tree empty_cleanup_list
;
309 /* Nonzero once virtual register instantiation has been done.
310 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
311 static int virtuals_instantiated
;
313 /* These variables hold pointers to functions to
314 save and restore machine-specific data,
315 in push_function_context and pop_function_context. */
316 void (*save_machine_status
) PROTO((struct function
*));
317 void (*restore_machine_status
) PROTO((struct function
*));
319 /* Nonzero if we need to distinguish between the return value of this function
320 and the return value of a function called by this function. This helps
323 extern int rtx_equal_function_value_matters
;
324 extern tree sequence_rtl_expr
;
326 /* In order to evaluate some expressions, such as function calls returning
327 structures in memory, we need to temporarily allocate stack locations.
328 We record each allocated temporary in the following structure.
330 Associated with each temporary slot is a nesting level. When we pop up
331 one level, all temporaries associated with the previous level are freed.
332 Normally, all temporaries are freed after the execution of the statement
333 in which they were created. However, if we are inside a ({...}) grouping,
334 the result may be in a temporary and hence must be preserved. If the
335 result could be in a temporary, we preserve it if we can determine which
336 one it is in. If we cannot determine which temporary may contain the
337 result, all temporaries are preserved. A temporary is preserved by
338 pretending it was allocated at the previous nesting level.
340 Automatic variables are also assigned temporary slots, at the nesting
341 level where they are defined. They are marked a "kept" so that
342 free_temp_slots will not free them. */
346 /* Points to next temporary slot. */
347 struct temp_slot
*next
;
348 /* The rtx to used to reference the slot. */
350 /* The rtx used to represent the address if not the address of the
351 slot above. May be an EXPR_LIST if multiple addresses exist. */
353 /* The size, in units, of the slot. */
355 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
357 /* Non-zero if this temporary is currently in use. */
359 /* Non-zero if this temporary has its address taken. */
361 /* Nesting level at which this slot is being used. */
363 /* Non-zero if this should survive a call to free_temp_slots. */
365 /* The offset of the slot from the frame_pointer, including extra space
366 for alignment. This info is for combine_temp_slots. */
368 /* The size of the slot, including extra space for alignment. This
369 info is for combine_temp_slots. */
373 /* List of all temporaries allocated, both available and in use. */
375 struct temp_slot
*temp_slots
;
377 /* Current nesting level for temporaries. */
381 /* The FUNCTION_DECL node for the current function. */
382 static tree this_function_decl
;
384 /* Callinfo pointer for the current function. */
385 static rtx this_function_callinfo
;
387 /* The label in the bytecode file of this function's actual bytecode.
389 static char *this_function_bytecode
;
391 /* The call description vector for the current function. */
392 static rtx this_function_calldesc
;
394 /* Size of the local variables allocated for the current function. */
397 /* Current depth of the bytecode evaluation stack. */
400 /* Maximum depth of the evaluation stack in this function. */
403 /* Current depth in statement expressions. */
404 static int stmt_expr_depth
;
406 /* This structure is used to record MEMs or pseudos used to replace VAR, any
407 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
408 maintain this list in case two operands of an insn were required to match;
409 in that case we must ensure we use the same replacement. */
411 struct fixup_replacement
415 struct fixup_replacement
*next
;
418 /* Forward declarations. */
420 static struct temp_slot
*find_temp_slot_from_address
PROTO((rtx
));
421 static void put_reg_into_stack
PROTO((struct function
*, rtx
, tree
,
422 enum machine_mode
, enum machine_mode
,
424 static void fixup_var_refs
PROTO((rtx
, enum machine_mode
, int));
425 static struct fixup_replacement
426 *find_fixup_replacement
PROTO((struct fixup_replacement
**, rtx
));
427 static void fixup_var_refs_insns
PROTO((rtx
, enum machine_mode
, int,
429 static void fixup_var_refs_1
PROTO((rtx
, enum machine_mode
, rtx
*, rtx
,
430 struct fixup_replacement
**));
431 static rtx fixup_memory_subreg
PROTO((rtx
, rtx
, int));
432 static rtx walk_fixup_memory_subreg
PROTO((rtx
, rtx
, int));
433 static rtx fixup_stack_1
PROTO((rtx
, rtx
));
434 static void optimize_bit_field
PROTO((rtx
, rtx
, rtx
*));
435 static void instantiate_decls
PROTO((tree
, int));
436 static void instantiate_decls_1
PROTO((tree
, int));
437 static void instantiate_decl
PROTO((rtx
, int, int));
438 static int instantiate_virtual_regs_1
PROTO((rtx
*, rtx
, int));
439 static void delete_handlers
PROTO((void));
440 static void pad_to_arg_alignment
PROTO((struct args_size
*, int));
441 static void pad_below
PROTO((struct args_size
*, enum machine_mode
,
443 static tree round_down
PROTO((tree
, int));
444 static rtx round_trampoline_addr
PROTO((rtx
));
445 static tree blocks_nreverse
PROTO((tree
));
446 static int all_blocks
PROTO((tree
, tree
*));
447 static int *record_insns
PROTO((rtx
));
448 static int contains
PROTO((rtx
, int *));
450 /* Pointer to chain of `struct function' for containing functions. */
451 struct function
*outer_function_chain
;
453 /* Given a function decl for a containing function,
454 return the `struct function' for it. */
457 find_function_data (decl
)
461 for (p
= outer_function_chain
; p
; p
= p
->next
)
467 /* Save the current context for compilation of a nested function.
468 This is called from language-specific code.
469 The caller is responsible for saving any language-specific status,
470 since this function knows only about language-independent variables. */
473 push_function_context_to (context
)
476 struct function
*p
= (struct function
*) xmalloc (sizeof (struct function
));
478 p
->next
= outer_function_chain
;
479 outer_function_chain
= p
;
481 p
->name
= current_function_name
;
482 p
->decl
= current_function_decl
;
483 p
->pops_args
= current_function_pops_args
;
484 p
->returns_struct
= current_function_returns_struct
;
485 p
->returns_pcc_struct
= current_function_returns_pcc_struct
;
486 p
->returns_pointer
= current_function_returns_pointer
;
487 p
->needs_context
= current_function_needs_context
;
488 p
->calls_setjmp
= current_function_calls_setjmp
;
489 p
->calls_longjmp
= current_function_calls_longjmp
;
490 p
->calls_alloca
= current_function_calls_alloca
;
491 p
->has_nonlocal_label
= current_function_has_nonlocal_label
;
492 p
->has_nonlocal_goto
= current_function_has_nonlocal_goto
;
493 p
->contains_functions
= current_function_contains_functions
;
494 p
->args_size
= current_function_args_size
;
495 p
->pretend_args_size
= current_function_pretend_args_size
;
496 p
->arg_offset_rtx
= current_function_arg_offset_rtx
;
497 p
->varargs
= current_function_varargs
;
498 p
->stdarg
= current_function_stdarg
;
499 p
->uses_const_pool
= current_function_uses_const_pool
;
500 p
->uses_pic_offset_table
= current_function_uses_pic_offset_table
;
501 p
->internal_arg_pointer
= current_function_internal_arg_pointer
;
502 p
->max_parm_reg
= max_parm_reg
;
503 p
->parm_reg_stack_loc
= parm_reg_stack_loc
;
504 p
->outgoing_args_size
= current_function_outgoing_args_size
;
505 p
->return_rtx
= current_function_return_rtx
;
506 p
->nonlocal_goto_handler_slot
= nonlocal_goto_handler_slot
;
507 p
->nonlocal_goto_stack_level
= nonlocal_goto_stack_level
;
508 p
->nonlocal_labels
= nonlocal_labels
;
509 p
->cleanup_label
= cleanup_label
;
510 p
->return_label
= return_label
;
511 p
->save_expr_regs
= save_expr_regs
;
512 p
->stack_slot_list
= stack_slot_list
;
513 p
->parm_birth_insn
= parm_birth_insn
;
514 p
->frame_offset
= frame_offset
;
515 p
->tail_recursion_label
= tail_recursion_label
;
516 p
->tail_recursion_reentry
= tail_recursion_reentry
;
517 p
->arg_pointer_save_area
= arg_pointer_save_area
;
518 p
->rtl_expr_chain
= rtl_expr_chain
;
519 p
->last_parm_insn
= last_parm_insn
;
520 p
->context_display
= context_display
;
521 p
->trampoline_list
= trampoline_list
;
522 p
->function_call_count
= function_call_count
;
523 p
->temp_slots
= temp_slots
;
524 p
->temp_slot_level
= temp_slot_level
;
525 p
->fixup_var_refs_queue
= 0;
526 p
->epilogue_delay_list
= current_function_epilogue_delay_list
;
528 save_tree_status (p
, context
);
529 save_storage_status (p
);
530 save_emit_status (p
);
532 save_expr_status (p
);
533 save_stmt_status (p
);
534 save_varasm_status (p
);
536 if (save_machine_status
)
537 (*save_machine_status
) (p
);
541 push_function_context ()
543 push_function_context_to (current_function_decl
);
546 /* Restore the last saved context, at the end of a nested function.
547 This function is called from language-specific code. */
550 pop_function_context_from (context
)
553 struct function
*p
= outer_function_chain
;
555 outer_function_chain
= p
->next
;
557 current_function_contains_functions
558 = p
->contains_functions
|| p
->inline_obstacks
559 || context
== current_function_decl
;
560 current_function_name
= p
->name
;
561 current_function_decl
= p
->decl
;
562 current_function_pops_args
= p
->pops_args
;
563 current_function_returns_struct
= p
->returns_struct
;
564 current_function_returns_pcc_struct
= p
->returns_pcc_struct
;
565 current_function_returns_pointer
= p
->returns_pointer
;
566 current_function_needs_context
= p
->needs_context
;
567 current_function_calls_setjmp
= p
->calls_setjmp
;
568 current_function_calls_longjmp
= p
->calls_longjmp
;
569 current_function_calls_alloca
= p
->calls_alloca
;
570 current_function_has_nonlocal_label
= p
->has_nonlocal_label
;
571 current_function_has_nonlocal_goto
= p
->has_nonlocal_goto
;
572 current_function_args_size
= p
->args_size
;
573 current_function_pretend_args_size
= p
->pretend_args_size
;
574 current_function_arg_offset_rtx
= p
->arg_offset_rtx
;
575 current_function_varargs
= p
->varargs
;
576 current_function_stdarg
= p
->stdarg
;
577 current_function_uses_const_pool
= p
->uses_const_pool
;
578 current_function_uses_pic_offset_table
= p
->uses_pic_offset_table
;
579 current_function_internal_arg_pointer
= p
->internal_arg_pointer
;
580 max_parm_reg
= p
->max_parm_reg
;
581 parm_reg_stack_loc
= p
->parm_reg_stack_loc
;
582 current_function_outgoing_args_size
= p
->outgoing_args_size
;
583 current_function_return_rtx
= p
->return_rtx
;
584 nonlocal_goto_handler_slot
= p
->nonlocal_goto_handler_slot
;
585 nonlocal_goto_stack_level
= p
->nonlocal_goto_stack_level
;
586 nonlocal_labels
= p
->nonlocal_labels
;
587 cleanup_label
= p
->cleanup_label
;
588 return_label
= p
->return_label
;
589 save_expr_regs
= p
->save_expr_regs
;
590 stack_slot_list
= p
->stack_slot_list
;
591 parm_birth_insn
= p
->parm_birth_insn
;
592 frame_offset
= p
->frame_offset
;
593 tail_recursion_label
= p
->tail_recursion_label
;
594 tail_recursion_reentry
= p
->tail_recursion_reentry
;
595 arg_pointer_save_area
= p
->arg_pointer_save_area
;
596 rtl_expr_chain
= p
->rtl_expr_chain
;
597 last_parm_insn
= p
->last_parm_insn
;
598 context_display
= p
->context_display
;
599 trampoline_list
= p
->trampoline_list
;
600 function_call_count
= p
->function_call_count
;
601 temp_slots
= p
->temp_slots
;
602 temp_slot_level
= p
->temp_slot_level
;
603 current_function_epilogue_delay_list
= p
->epilogue_delay_list
;
606 restore_tree_status (p
);
607 restore_storage_status (p
);
608 restore_expr_status (p
);
609 restore_emit_status (p
);
610 restore_stmt_status (p
);
611 restore_varasm_status (p
);
613 if (restore_machine_status
)
614 (*restore_machine_status
) (p
);
616 /* Finish doing put_var_into_stack for any of our variables
617 which became addressable during the nested function. */
619 struct var_refs_queue
*queue
= p
->fixup_var_refs_queue
;
620 for (; queue
; queue
= queue
->next
)
621 fixup_var_refs (queue
->modified
, queue
->promoted_mode
, queue
->unsignedp
);
626 /* Reset variables that have known state during rtx generation. */
627 rtx_equal_function_value_matters
= 1;
628 virtuals_instantiated
= 0;
631 void pop_function_context ()
633 pop_function_context_from (current_function_decl
);
636 /* Allocate fixed slots in the stack frame of the current function. */
638 /* Return size needed for stack frame based on slots so far allocated.
639 This size counts from zero. It is not rounded to STACK_BOUNDARY;
640 the caller may have to do that. */
645 #ifdef FRAME_GROWS_DOWNWARD
646 return -frame_offset
;
652 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
653 with machine mode MODE.
655 ALIGN controls the amount of alignment for the address of the slot:
656 0 means according to MODE,
657 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
658 positive specifies alignment boundary in bits.
660 We do not round to stack_boundary here. */
663 assign_stack_local (mode
, size
, align
)
664 enum machine_mode mode
;
668 register rtx x
, addr
;
669 int bigend_correction
= 0;
674 alignment
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
676 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
678 else if (align
== -1)
680 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
681 size
= CEIL_ROUND (size
, alignment
);
684 alignment
= align
/ BITS_PER_UNIT
;
686 /* Round frame offset to that alignment.
687 We must be careful here, since FRAME_OFFSET might be negative and
688 division with a negative dividend isn't as well defined as we might
689 like. So we instead assume that ALIGNMENT is a power of two and
690 use logical operations which are unambiguous. */
691 #ifdef FRAME_GROWS_DOWNWARD
692 frame_offset
= FLOOR_ROUND (frame_offset
, alignment
);
694 frame_offset
= CEIL_ROUND (frame_offset
, alignment
);
697 /* On a big-endian machine, if we are allocating more space than we will use,
698 use the least significant bytes of those that are allocated. */
699 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
)
700 bigend_correction
= size
- GET_MODE_SIZE (mode
);
702 #ifdef FRAME_GROWS_DOWNWARD
703 frame_offset
-= size
;
706 /* If we have already instantiated virtual registers, return the actual
707 address relative to the frame pointer. */
708 if (virtuals_instantiated
)
709 addr
= plus_constant (frame_pointer_rtx
,
710 (frame_offset
+ bigend_correction
711 + STARTING_FRAME_OFFSET
));
713 addr
= plus_constant (virtual_stack_vars_rtx
,
714 frame_offset
+ bigend_correction
);
716 #ifndef FRAME_GROWS_DOWNWARD
717 frame_offset
+= size
;
720 x
= gen_rtx (MEM
, mode
, addr
);
722 stack_slot_list
= gen_rtx (EXPR_LIST
, VOIDmode
, x
, stack_slot_list
);
727 /* Assign a stack slot in a containing function.
728 First three arguments are same as in preceding function.
729 The last argument specifies the function to allocate in. */
732 assign_outer_stack_local (mode
, size
, align
, function
)
733 enum machine_mode mode
;
736 struct function
*function
;
738 register rtx x
, addr
;
739 int bigend_correction
= 0;
742 /* Allocate in the memory associated with the function in whose frame
744 push_obstacks (function
->function_obstack
,
745 function
->function_maybepermanent_obstack
);
749 alignment
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
751 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
753 else if (align
== -1)
755 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
756 size
= CEIL_ROUND (size
, alignment
);
759 alignment
= align
/ BITS_PER_UNIT
;
761 /* Round frame offset to that alignment. */
762 #ifdef FRAME_GROWS_DOWNWARD
763 function
->frame_offset
= FLOOR_ROUND (function
->frame_offset
, alignment
);
765 function
->frame_offset
= CEIL_ROUND (function
->frame_offset
, alignment
);
768 /* On a big-endian machine, if we are allocating more space than we will use,
769 use the least significant bytes of those that are allocated. */
770 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
)
771 bigend_correction
= size
- GET_MODE_SIZE (mode
);
773 #ifdef FRAME_GROWS_DOWNWARD
774 function
->frame_offset
-= size
;
776 addr
= plus_constant (virtual_stack_vars_rtx
,
777 function
->frame_offset
+ bigend_correction
);
778 #ifndef FRAME_GROWS_DOWNWARD
779 function
->frame_offset
+= size
;
782 x
= gen_rtx (MEM
, mode
, addr
);
784 function
->stack_slot_list
785 = gen_rtx (EXPR_LIST
, VOIDmode
, x
, function
->stack_slot_list
);
792 /* Allocate a temporary stack slot and record it for possible later
795 MODE is the machine mode to be given to the returned rtx.
797 SIZE is the size in units of the space required. We do no rounding here
798 since assign_stack_local will do any required rounding.
800 KEEP is 1 if this slot is to be retained after a call to
801 free_temp_slots. Automatic variables for a block are allocated
802 with this flag. KEEP is 2, if we allocate a longer term temporary,
803 whose lifetime is controlled by CLEANUP_POINT_EXPRs. */
806 assign_stack_temp (mode
, size
, keep
)
807 enum machine_mode mode
;
811 struct temp_slot
*p
, *best_p
= 0;
813 /* If SIZE is -1 it means that somebody tried to allocate a temporary
814 of a variable size. */
818 /* First try to find an available, already-allocated temporary that is the
819 exact size we require. */
820 for (p
= temp_slots
; p
; p
= p
->next
)
821 if (p
->size
== size
&& GET_MODE (p
->slot
) == mode
&& ! p
->in_use
)
824 /* If we didn't find, one, try one that is larger than what we want. We
825 find the smallest such. */
827 for (p
= temp_slots
; p
; p
= p
->next
)
828 if (p
->size
> size
&& GET_MODE (p
->slot
) == mode
&& ! p
->in_use
829 && (best_p
== 0 || best_p
->size
> p
->size
))
832 /* Make our best, if any, the one to use. */
835 /* If there are enough aligned bytes left over, make them into a new
836 temp_slot so that the extra bytes don't get wasted. Do this only
837 for BLKmode slots, so that we can be sure of the alignment. */
838 if (GET_MODE (best_p
->slot
) == BLKmode
)
840 int alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
841 int rounded_size
= CEIL_ROUND (size
, alignment
);
843 if (best_p
->size
- rounded_size
>= alignment
)
845 p
= (struct temp_slot
*) oballoc (sizeof (struct temp_slot
));
846 p
->in_use
= p
->addr_taken
= 0;
847 p
->size
= best_p
->size
- rounded_size
;
848 p
->base_offset
= best_p
->base_offset
+ rounded_size
;
849 p
->full_size
= best_p
->full_size
- rounded_size
;
850 p
->slot
= gen_rtx (MEM
, BLKmode
,
851 plus_constant (XEXP (best_p
->slot
, 0),
855 p
->next
= temp_slots
;
858 stack_slot_list
= gen_rtx (EXPR_LIST
, VOIDmode
, p
->slot
,
861 best_p
->size
= rounded_size
;
862 best_p
->full_size
= rounded_size
;
869 /* If we still didn't find one, make a new temporary. */
872 int frame_offset_old
= frame_offset
;
873 p
= (struct temp_slot
*) oballoc (sizeof (struct temp_slot
));
874 /* If the temp slot mode doesn't indicate the alignment,
875 use the largest possible, so no one will be disappointed. */
876 p
->slot
= assign_stack_local (mode
, size
, mode
== BLKmode
? -1 : 0);
877 /* The following slot size computation is necessary because we don't
878 know the actual size of the temporary slot until assign_stack_local
879 has performed all the frame alignment and size rounding for the
880 requested temporary. Note that extra space added for alignment
881 can be either above or below this stack slot depending on which
882 way the frame grows. We include the extra space if and only if it
883 is above this slot. */
884 #ifdef FRAME_GROWS_DOWNWARD
885 p
->size
= frame_offset_old
- frame_offset
;
889 /* Now define the fields used by combine_temp_slots. */
890 #ifdef FRAME_GROWS_DOWNWARD
891 p
->base_offset
= frame_offset
;
892 p
->full_size
= frame_offset_old
- frame_offset
;
894 p
->base_offset
= frame_offset_old
;
895 p
->full_size
= frame_offset
- frame_offset_old
;
898 p
->next
= temp_slots
;
904 p
->rtl_expr
= sequence_rtl_expr
;
908 p
->level
= target_temp_slot_level
;
913 p
->level
= temp_slot_level
;
919 /* Assign a temporary of given TYPE.
920 KEEP is as for assign_stack_temp.
921 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
922 it is 0 if a register is OK.
923 DONT_PROMOTE is 1 if we should not promote values in register
927 assign_temp (type
, keep
, memory_required
, dont_promote
)
933 enum machine_mode mode
= TYPE_MODE (type
);
934 int unsignedp
= TREE_UNSIGNED (type
);
936 if (mode
== BLKmode
|| memory_required
)
938 int size
= int_size_in_bytes (type
);
941 /* Unfortunately, we don't yet know how to allocate variable-sized
942 temporaries. However, sometimes we have a fixed upper limit on
943 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
944 instead. This is the case for Chill variable-sized strings. */
945 if (size
== -1 && TREE_CODE (type
) == ARRAY_TYPE
946 && TYPE_ARRAY_MAX_SIZE (type
) != NULL_TREE
947 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type
)) == INTEGER_CST
)
948 size
= TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type
));
950 tmp
= assign_stack_temp (mode
, size
, keep
);
951 MEM_IN_STRUCT_P (tmp
) = AGGREGATE_TYPE_P (type
);
955 #ifndef PROMOTE_FOR_CALL_ONLY
957 mode
= promote_mode (type
, mode
, &unsignedp
, 0);
960 return gen_reg_rtx (mode
);
963 /* Combine temporary stack slots which are adjacent on the stack.
965 This allows for better use of already allocated stack space. This is only
966 done for BLKmode slots because we can be sure that we won't have alignment
967 problems in this case. */
970 combine_temp_slots ()
972 struct temp_slot
*p
, *q
;
973 struct temp_slot
*prev_p
, *prev_q
;
974 /* Determine where to free back to after this function. */
975 rtx free_pointer
= rtx_alloc (CONST_INT
);
977 for (p
= temp_slots
, prev_p
= 0; p
; p
= prev_p
? prev_p
->next
: temp_slots
)
980 if (! p
->in_use
&& GET_MODE (p
->slot
) == BLKmode
)
981 for (q
= p
->next
, prev_q
= p
; q
; q
= prev_q
->next
)
984 if (! q
->in_use
&& GET_MODE (q
->slot
) == BLKmode
)
986 if (p
->base_offset
+ p
->full_size
== q
->base_offset
)
988 /* Q comes after P; combine Q into P. */
990 p
->full_size
+= q
->full_size
;
993 else if (q
->base_offset
+ q
->full_size
== p
->base_offset
)
995 /* P comes after Q; combine P into Q. */
997 q
->full_size
+= p
->full_size
;
1002 /* Either delete Q or advance past it. */
1004 prev_q
->next
= q
->next
;
1008 /* Either delete P or advance past it. */
1012 prev_p
->next
= p
->next
;
1014 temp_slots
= p
->next
;
1020 /* Free all the RTL made by plus_constant. */
1021 rtx_free (free_pointer
);
1024 /* Find the temp slot corresponding to the object at address X. */
1026 static struct temp_slot
*
1027 find_temp_slot_from_address (x
)
1030 struct temp_slot
*p
;
1033 for (p
= temp_slots
; p
; p
= p
->next
)
1037 else if (XEXP (p
->slot
, 0) == x
1041 else if (p
->address
!= 0 && GET_CODE (p
->address
) == EXPR_LIST
)
1042 for (next
= p
->address
; next
; next
= XEXP (next
, 1))
1043 if (XEXP (next
, 0) == x
)
1050 /* Indicate that NEW is an alternate way of referring to the temp slot
1051 that previous was known by OLD. */
1054 update_temp_slot_address (old
, new)
1057 struct temp_slot
*p
= find_temp_slot_from_address (old
);
1059 /* If none, return. Else add NEW as an alias. */
1062 else if (p
->address
== 0)
1066 if (GET_CODE (p
->address
) != EXPR_LIST
)
1067 p
->address
= gen_rtx (EXPR_LIST
, VOIDmode
, p
->address
, NULL_RTX
);
1069 p
->address
= gen_rtx (EXPR_LIST
, VOIDmode
, new, p
->address
);
1073 /* If X could be a reference to a temporary slot, mark the fact that its
1074 address was taken. */
1077 mark_temp_addr_taken (x
)
1080 struct temp_slot
*p
;
1085 /* If X is not in memory or is at a constant address, it cannot be in
1086 a temporary slot. */
1087 if (GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0)))
1090 p
= find_temp_slot_from_address (XEXP (x
, 0));
1095 /* If X could be a reference to a temporary slot, mark that slot as
1096 belonging to the to one level higher than the current level. If X
1097 matched one of our slots, just mark that one. Otherwise, we can't
1098 easily predict which it is, so upgrade all of them. Kept slots
1099 need not be touched.
1101 This is called when an ({...}) construct occurs and a statement
1102 returns a value in memory. */
1105 preserve_temp_slots (x
)
1108 struct temp_slot
*p
= 0;
1110 /* If there is no result, we still might have some objects whose address
1111 were taken, so we need to make sure they stay around. */
1114 for (p
= temp_slots
; p
; p
= p
->next
)
1115 if (p
->in_use
&& p
->level
== temp_slot_level
&& p
->addr_taken
)
1121 /* If X is a register that is being used as a pointer, see if we have
1122 a temporary slot we know it points to. To be consistent with
1123 the code below, we really should preserve all non-kept slots
1124 if we can't find a match, but that seems to be much too costly. */
1125 if (GET_CODE (x
) == REG
&& REGNO_POINTER_FLAG (REGNO (x
)))
1126 p
= find_temp_slot_from_address (x
);
1128 /* If X is not in memory or is at a constant address, it cannot be in
1129 a temporary slot, but it can contain something whose address was
1131 if (p
== 0 && (GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0))))
1133 for (p
= temp_slots
; p
; p
= p
->next
)
1134 if (p
->in_use
&& p
->level
== temp_slot_level
&& p
->addr_taken
)
1140 /* First see if we can find a match. */
1142 p
= find_temp_slot_from_address (XEXP (x
, 0));
1146 /* Move everything at our level whose address was taken to our new
1147 level in case we used its address. */
1148 struct temp_slot
*q
;
1150 if (p
->level
== temp_slot_level
)
1152 for (q
= temp_slots
; q
; q
= q
->next
)
1153 if (q
!= p
&& q
->addr_taken
&& q
->level
== p
->level
)
1162 /* Otherwise, preserve all non-kept slots at this level. */
1163 for (p
= temp_slots
; p
; p
= p
->next
)
1164 if (p
->in_use
&& p
->level
== temp_slot_level
&& ! p
->keep
)
1168 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1169 with that RTL_EXPR, promote it into a temporary slot at the present
1170 level so it will not be freed when we free slots made in the
1174 preserve_rtl_expr_result (x
)
1177 struct temp_slot
*p
;
1179 /* If X is not in memory or is at a constant address, it cannot be in
1180 a temporary slot. */
1181 if (x
== 0 || GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0)))
1184 /* If we can find a match, move it to our level unless it is already at
1186 p
= find_temp_slot_from_address (XEXP (x
, 0));
1189 p
->level
= MIN (p
->level
, temp_slot_level
);
1196 /* Free all temporaries used so far. This is normally called at the end
1197 of generating code for a statement. Don't free any temporaries
1198 currently in use for an RTL_EXPR that hasn't yet been emitted.
1199 We could eventually do better than this since it can be reused while
1200 generating the same RTL_EXPR, but this is complex and probably not
1206 struct temp_slot
*p
;
1208 for (p
= temp_slots
; p
; p
= p
->next
)
1209 if (p
->in_use
&& p
->level
== temp_slot_level
&& ! p
->keep
1210 && p
->rtl_expr
== 0)
1213 combine_temp_slots ();
1216 /* Free all temporary slots used in T, an RTL_EXPR node. */
1219 free_temps_for_rtl_expr (t
)
1222 struct temp_slot
*p
;
1224 for (p
= temp_slots
; p
; p
= p
->next
)
1225 if (p
->rtl_expr
== t
)
1228 combine_temp_slots ();
1231 /* Push deeper into the nesting level for stack temporaries. */
1239 /* Pop a temporary nesting level. All slots in use in the current level
1245 struct temp_slot
*p
;
1247 for (p
= temp_slots
; p
; p
= p
->next
)
1248 if (p
->in_use
&& p
->level
== temp_slot_level
&& p
->rtl_expr
== 0)
1251 combine_temp_slots ();
1256 /* Initialize temporary slots. */
1261 /* We have not allocated any temporaries yet. */
1263 temp_slot_level
= 0;
1264 target_temp_slot_level
= 0;
1267 /* Retroactively move an auto variable from a register to a stack slot.
1268 This is done when an address-reference to the variable is seen. */
1271 put_var_into_stack (decl
)
1275 enum machine_mode promoted_mode
, decl_mode
;
1276 struct function
*function
= 0;
1279 if (output_bytecode
)
1282 context
= decl_function_context (decl
);
1284 /* Get the current rtl used for this object and it's original mode. */
1285 reg
= TREE_CODE (decl
) == SAVE_EXPR
? SAVE_EXPR_RTL (decl
) : DECL_RTL (decl
);
1287 /* No need to do anything if decl has no rtx yet
1288 since in that case caller is setting TREE_ADDRESSABLE
1289 and a stack slot will be assigned when the rtl is made. */
1293 /* Get the declared mode for this object. */
1294 decl_mode
= (TREE_CODE (decl
) == SAVE_EXPR
? TYPE_MODE (TREE_TYPE (decl
))
1295 : DECL_MODE (decl
));
1296 /* Get the mode it's actually stored in. */
1297 promoted_mode
= GET_MODE (reg
);
1299 /* If this variable comes from an outer function,
1300 find that function's saved context. */
1301 if (context
!= current_function_decl
)
1302 for (function
= outer_function_chain
; function
; function
= function
->next
)
1303 if (function
->decl
== context
)
1306 /* If this is a variable-size object with a pseudo to address it,
1307 put that pseudo into the stack, if the var is nonlocal. */
1308 if (DECL_NONLOCAL (decl
)
1309 && GET_CODE (reg
) == MEM
1310 && GET_CODE (XEXP (reg
, 0)) == REG
1311 && REGNO (XEXP (reg
, 0)) > LAST_VIRTUAL_REGISTER
)
1313 reg
= XEXP (reg
, 0);
1314 decl_mode
= promoted_mode
= GET_MODE (reg
);
1317 /* Now we should have a value that resides in one or more pseudo regs. */
1319 if (GET_CODE (reg
) == REG
)
1320 put_reg_into_stack (function
, reg
, TREE_TYPE (decl
),
1321 promoted_mode
, decl_mode
, TREE_SIDE_EFFECTS (decl
));
1322 else if (GET_CODE (reg
) == CONCAT
)
1324 /* A CONCAT contains two pseudos; put them both in the stack.
1325 We do it so they end up consecutive. */
1326 enum machine_mode part_mode
= GET_MODE (XEXP (reg
, 0));
1327 tree part_type
= TREE_TYPE (TREE_TYPE (decl
));
1328 #ifdef FRAME_GROWS_DOWNWARD
1329 /* Since part 0 should have a lower address, do it second. */
1330 put_reg_into_stack (function
, XEXP (reg
, 1), part_type
, part_mode
,
1331 part_mode
, TREE_SIDE_EFFECTS (decl
));
1332 put_reg_into_stack (function
, XEXP (reg
, 0), part_type
, part_mode
,
1333 part_mode
, TREE_SIDE_EFFECTS (decl
));
1335 put_reg_into_stack (function
, XEXP (reg
, 0), part_type
, part_mode
,
1336 part_mode
, TREE_SIDE_EFFECTS (decl
));
1337 put_reg_into_stack (function
, XEXP (reg
, 1), part_type
, part_mode
,
1338 part_mode
, TREE_SIDE_EFFECTS (decl
));
1341 /* Change the CONCAT into a combined MEM for both parts. */
1342 PUT_CODE (reg
, MEM
);
1343 MEM_VOLATILE_P (reg
) = MEM_VOLATILE_P (XEXP (reg
, 0));
1345 /* The two parts are in memory order already.
1346 Use the lower parts address as ours. */
1347 XEXP (reg
, 0) = XEXP (XEXP (reg
, 0), 0);
1348 /* Prevent sharing of rtl that might lose. */
1349 if (GET_CODE (XEXP (reg
, 0)) == PLUS
)
1350 XEXP (reg
, 0) = copy_rtx (XEXP (reg
, 0));
1354 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1355 into the stack frame of FUNCTION (0 means the current function).
1356 DECL_MODE is the machine mode of the user-level data type.
1357 PROMOTED_MODE is the machine mode of the register.
1358 VOLATILE_P is nonzero if this is for a "volatile" decl. */
1361 put_reg_into_stack (function
, reg
, type
, promoted_mode
, decl_mode
, volatile_p
)
1362 struct function
*function
;
1365 enum machine_mode promoted_mode
, decl_mode
;
1372 if (REGNO (reg
) < function
->max_parm_reg
)
1373 new = function
->parm_reg_stack_loc
[REGNO (reg
)];
1375 new = assign_outer_stack_local (decl_mode
, GET_MODE_SIZE (decl_mode
),
1380 if (REGNO (reg
) < max_parm_reg
)
1381 new = parm_reg_stack_loc
[REGNO (reg
)];
1383 new = assign_stack_local (decl_mode
, GET_MODE_SIZE (decl_mode
), 0);
1386 PUT_MODE (reg
, decl_mode
);
1387 XEXP (reg
, 0) = XEXP (new, 0);
1388 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1389 MEM_VOLATILE_P (reg
) = volatile_p
;
1390 PUT_CODE (reg
, MEM
);
1392 /* If this is a memory ref that contains aggregate components,
1393 mark it as such for cse and loop optimize. */
1394 MEM_IN_STRUCT_P (reg
) = AGGREGATE_TYPE_P (type
);
1396 /* Now make sure that all refs to the variable, previously made
1397 when it was a register, are fixed up to be valid again. */
1400 struct var_refs_queue
*temp
;
1402 /* Variable is inherited; fix it up when we get back to its function. */
1403 push_obstacks (function
->function_obstack
,
1404 function
->function_maybepermanent_obstack
);
1406 /* See comment in restore_tree_status in tree.c for why this needs to be
1407 on saveable obstack. */
1409 = (struct var_refs_queue
*) savealloc (sizeof (struct var_refs_queue
));
1410 temp
->modified
= reg
;
1411 temp
->promoted_mode
= promoted_mode
;
1412 temp
->unsignedp
= TREE_UNSIGNED (type
);
1413 temp
->next
= function
->fixup_var_refs_queue
;
1414 function
->fixup_var_refs_queue
= temp
;
1418 /* Variable is local; fix it up now. */
1419 fixup_var_refs (reg
, promoted_mode
, TREE_UNSIGNED (type
));
1423 fixup_var_refs (var
, promoted_mode
, unsignedp
)
1425 enum machine_mode promoted_mode
;
1429 rtx first_insn
= get_insns ();
1430 struct sequence_stack
*stack
= sequence_stack
;
1431 tree rtl_exps
= rtl_expr_chain
;
1433 /* Must scan all insns for stack-refs that exceed the limit. */
1434 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
, first_insn
, stack
== 0);
1436 /* Scan all pending sequences too. */
1437 for (; stack
; stack
= stack
->next
)
1439 push_to_sequence (stack
->first
);
1440 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
,
1441 stack
->first
, stack
->next
!= 0);
1442 /* Update remembered end of sequence
1443 in case we added an insn at the end. */
1444 stack
->last
= get_last_insn ();
1448 /* Scan all waiting RTL_EXPRs too. */
1449 for (pending
= rtl_exps
; pending
; pending
= TREE_CHAIN (pending
))
1451 rtx seq
= RTL_EXPR_SEQUENCE (TREE_VALUE (pending
));
1452 if (seq
!= const0_rtx
&& seq
!= 0)
1454 push_to_sequence (seq
);
1455 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
, seq
, 0);
1461 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1462 some part of an insn. Return a struct fixup_replacement whose OLD
1463 value is equal to X. Allocate a new structure if no such entry exists. */
1465 static struct fixup_replacement
*
1466 find_fixup_replacement (replacements
, x
)
1467 struct fixup_replacement
**replacements
;
1470 struct fixup_replacement
*p
;
1472 /* See if we have already replaced this. */
1473 for (p
= *replacements
; p
&& p
->old
!= x
; p
= p
->next
)
1478 p
= (struct fixup_replacement
*) oballoc (sizeof (struct fixup_replacement
));
1481 p
->next
= *replacements
;
1488 /* Scan the insn-chain starting with INSN for refs to VAR
1489 and fix them up. TOPLEVEL is nonzero if this chain is the
1490 main chain of insns for the current function. */
1493 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
, insn
, toplevel
)
1495 enum machine_mode promoted_mode
;
1504 rtx next
= NEXT_INSN (insn
);
1506 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
1508 /* If this is a CLOBBER of VAR, delete it.
1510 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1511 and REG_RETVAL notes too. */
1512 if (GET_CODE (PATTERN (insn
)) == CLOBBER
1513 && XEXP (PATTERN (insn
), 0) == var
)
1515 if ((note
= find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
)) != 0)
1516 /* The REG_LIBCALL note will go away since we are going to
1517 turn INSN into a NOTE, so just delete the
1518 corresponding REG_RETVAL note. */
1519 remove_note (XEXP (note
, 0),
1520 find_reg_note (XEXP (note
, 0), REG_RETVAL
,
1523 /* In unoptimized compilation, we shouldn't call delete_insn
1524 except in jump.c doing warnings. */
1525 PUT_CODE (insn
, NOTE
);
1526 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
1527 NOTE_SOURCE_FILE (insn
) = 0;
1530 /* The insn to load VAR from a home in the arglist
1531 is now a no-op. When we see it, just delete it. */
1533 && GET_CODE (PATTERN (insn
)) == SET
1534 && SET_DEST (PATTERN (insn
)) == var
1535 /* If this represents the result of an insn group,
1536 don't delete the insn. */
1537 && find_reg_note (insn
, REG_RETVAL
, NULL_RTX
) == 0
1538 && rtx_equal_p (SET_SRC (PATTERN (insn
)), var
))
1540 /* In unoptimized compilation, we shouldn't call delete_insn
1541 except in jump.c doing warnings. */
1542 PUT_CODE (insn
, NOTE
);
1543 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
1544 NOTE_SOURCE_FILE (insn
) = 0;
1545 if (insn
== last_parm_insn
)
1546 last_parm_insn
= PREV_INSN (next
);
1550 struct fixup_replacement
*replacements
= 0;
1551 rtx next_insn
= NEXT_INSN (insn
);
1553 #ifdef SMALL_REGISTER_CLASSES
1554 /* If the insn that copies the results of a CALL_INSN
1555 into a pseudo now references VAR, we have to use an
1556 intermediate pseudo since we want the life of the
1557 return value register to be only a single insn.
1559 If we don't use an intermediate pseudo, such things as
1560 address computations to make the address of VAR valid
1561 if it is not can be placed between the CALL_INSN and INSN.
1563 To make sure this doesn't happen, we record the destination
1564 of the CALL_INSN and see if the next insn uses both that
1567 if (call_dest
!= 0 && GET_CODE (insn
) == INSN
1568 && reg_mentioned_p (var
, PATTERN (insn
))
1569 && reg_mentioned_p (call_dest
, PATTERN (insn
)))
1571 rtx temp
= gen_reg_rtx (GET_MODE (call_dest
));
1573 emit_insn_before (gen_move_insn (temp
, call_dest
), insn
);
1575 PATTERN (insn
) = replace_rtx (PATTERN (insn
),
1579 if (GET_CODE (insn
) == CALL_INSN
1580 && GET_CODE (PATTERN (insn
)) == SET
)
1581 call_dest
= SET_DEST (PATTERN (insn
));
1582 else if (GET_CODE (insn
) == CALL_INSN
1583 && GET_CODE (PATTERN (insn
)) == PARALLEL
1584 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
1585 call_dest
= SET_DEST (XVECEXP (PATTERN (insn
), 0, 0));
1590 /* See if we have to do anything to INSN now that VAR is in
1591 memory. If it needs to be loaded into a pseudo, use a single
1592 pseudo for the entire insn in case there is a MATCH_DUP
1593 between two operands. We pass a pointer to the head of
1594 a list of struct fixup_replacements. If fixup_var_refs_1
1595 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1596 it will record them in this list.
1598 If it allocated a pseudo for any replacement, we copy into
1601 fixup_var_refs_1 (var
, promoted_mode
, &PATTERN (insn
), insn
,
1604 /* If this is last_parm_insn, and any instructions were output
1605 after it to fix it up, then we must set last_parm_insn to
1606 the last such instruction emitted. */
1607 if (insn
== last_parm_insn
)
1608 last_parm_insn
= PREV_INSN (next_insn
);
1610 while (replacements
)
1612 if (GET_CODE (replacements
->new) == REG
)
1617 /* OLD might be a (subreg (mem)). */
1618 if (GET_CODE (replacements
->old
) == SUBREG
)
1620 = fixup_memory_subreg (replacements
->old
, insn
, 0);
1623 = fixup_stack_1 (replacements
->old
, insn
);
1625 insert_before
= insn
;
1627 /* If we are changing the mode, do a conversion.
1628 This might be wasteful, but combine.c will
1629 eliminate much of the waste. */
1631 if (GET_MODE (replacements
->new)
1632 != GET_MODE (replacements
->old
))
1635 convert_move (replacements
->new,
1636 replacements
->old
, unsignedp
);
1637 seq
= gen_sequence ();
1641 seq
= gen_move_insn (replacements
->new,
1644 emit_insn_before (seq
, insert_before
);
1647 replacements
= replacements
->next
;
1651 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1652 But don't touch other insns referred to by reg-notes;
1653 we will get them elsewhere. */
1654 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
1655 if (GET_CODE (note
) != INSN_LIST
)
1657 = walk_fixup_memory_subreg (XEXP (note
, 0), insn
, 1);
1663 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1664 See if the rtx expression at *LOC in INSN needs to be changed.
1666 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1667 contain a list of original rtx's and replacements. If we find that we need
1668 to modify this insn by replacing a memory reference with a pseudo or by
1669 making a new MEM to implement a SUBREG, we consult that list to see if
1670 we have already chosen a replacement. If none has already been allocated,
1671 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1672 or the SUBREG, as appropriate, to the pseudo. */
1675 fixup_var_refs_1 (var
, promoted_mode
, loc
, insn
, replacements
)
1677 enum machine_mode promoted_mode
;
1680 struct fixup_replacement
**replacements
;
1683 register rtx x
= *loc
;
1684 RTX_CODE code
= GET_CODE (x
);
1686 register rtx tem
, tem1
;
1687 struct fixup_replacement
*replacement
;
1694 /* If we already have a replacement, use it. Otherwise,
1695 try to fix up this address in case it is invalid. */
1697 replacement
= find_fixup_replacement (replacements
, var
);
1698 if (replacement
->new)
1700 *loc
= replacement
->new;
1704 *loc
= replacement
->new = x
= fixup_stack_1 (x
, insn
);
1706 /* Unless we are forcing memory to register or we changed the mode,
1707 we can leave things the way they are if the insn is valid. */
1709 INSN_CODE (insn
) = -1;
1710 if (! flag_force_mem
&& GET_MODE (x
) == promoted_mode
1711 && recog_memoized (insn
) >= 0)
1714 *loc
= replacement
->new = gen_reg_rtx (promoted_mode
);
1718 /* If X contains VAR, we need to unshare it here so that we update
1719 each occurrence separately. But all identical MEMs in one insn
1720 must be replaced with the same rtx because of the possibility of
1723 if (reg_mentioned_p (var
, x
))
1725 replacement
= find_fixup_replacement (replacements
, x
);
1726 if (replacement
->new == 0)
1727 replacement
->new = copy_most_rtx (x
, var
);
1729 *loc
= x
= replacement
->new;
1745 /* Note that in some cases those types of expressions are altered
1746 by optimize_bit_field, and do not survive to get here. */
1747 if (XEXP (x
, 0) == var
1748 || (GET_CODE (XEXP (x
, 0)) == SUBREG
1749 && SUBREG_REG (XEXP (x
, 0)) == var
))
1751 /* Get TEM as a valid MEM in the mode presently in the insn.
1753 We don't worry about the possibility of MATCH_DUP here; it
1754 is highly unlikely and would be tricky to handle. */
1757 if (GET_CODE (tem
) == SUBREG
)
1758 tem
= fixup_memory_subreg (tem
, insn
, 1);
1759 tem
= fixup_stack_1 (tem
, insn
);
1761 /* Unless we want to load from memory, get TEM into the proper mode
1762 for an extract from memory. This can only be done if the
1763 extract is at a constant position and length. */
1765 if (! flag_force_mem
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
1766 && GET_CODE (XEXP (x
, 2)) == CONST_INT
1767 && ! mode_dependent_address_p (XEXP (tem
, 0))
1768 && ! MEM_VOLATILE_P (tem
))
1770 enum machine_mode wanted_mode
= VOIDmode
;
1771 enum machine_mode is_mode
= GET_MODE (tem
);
1772 int width
= INTVAL (XEXP (x
, 1));
1773 int pos
= INTVAL (XEXP (x
, 2));
1776 if (GET_CODE (x
) == ZERO_EXTRACT
)
1777 wanted_mode
= insn_operand_mode
[(int) CODE_FOR_extzv
][1];
1780 if (GET_CODE (x
) == SIGN_EXTRACT
)
1781 wanted_mode
= insn_operand_mode
[(int) CODE_FOR_extv
][1];
1783 /* If we have a narrower mode, we can do something. */
1784 if (wanted_mode
!= VOIDmode
1785 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
1787 int offset
= pos
/ BITS_PER_UNIT
;
1788 rtx old_pos
= XEXP (x
, 2);
1791 /* If the bytes and bits are counted differently, we
1792 must adjust the offset. */
1793 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
1794 offset
= (GET_MODE_SIZE (is_mode
)
1795 - GET_MODE_SIZE (wanted_mode
) - offset
);
1797 pos
%= GET_MODE_BITSIZE (wanted_mode
);
1799 newmem
= gen_rtx (MEM
, wanted_mode
,
1800 plus_constant (XEXP (tem
, 0), offset
));
1801 RTX_UNCHANGING_P (newmem
) = RTX_UNCHANGING_P (tem
);
1802 MEM_VOLATILE_P (newmem
) = MEM_VOLATILE_P (tem
);
1803 MEM_IN_STRUCT_P (newmem
) = MEM_IN_STRUCT_P (tem
);
1805 /* Make the change and see if the insn remains valid. */
1806 INSN_CODE (insn
) = -1;
1807 XEXP (x
, 0) = newmem
;
1808 XEXP (x
, 2) = GEN_INT (pos
);
1810 if (recog_memoized (insn
) >= 0)
1813 /* Otherwise, restore old position. XEXP (x, 0) will be
1815 XEXP (x
, 2) = old_pos
;
1819 /* If we get here, the bitfield extract insn can't accept a memory
1820 reference. Copy the input into a register. */
1822 tem1
= gen_reg_rtx (GET_MODE (tem
));
1823 emit_insn_before (gen_move_insn (tem1
, tem
), insn
);
1830 if (SUBREG_REG (x
) == var
)
1832 /* If this is a special SUBREG made because VAR was promoted
1833 from a wider mode, replace it with VAR and call ourself
1834 recursively, this time saying that the object previously
1835 had its current mode (by virtue of the SUBREG). */
1837 if (SUBREG_PROMOTED_VAR_P (x
))
1840 fixup_var_refs_1 (var
, GET_MODE (var
), loc
, insn
, replacements
);
1844 /* If this SUBREG makes VAR wider, it has become a paradoxical
1845 SUBREG with VAR in memory, but these aren't allowed at this
1846 stage of the compilation. So load VAR into a pseudo and take
1847 a SUBREG of that pseudo. */
1848 if (GET_MODE_SIZE (GET_MODE (x
)) > GET_MODE_SIZE (GET_MODE (var
)))
1850 replacement
= find_fixup_replacement (replacements
, var
);
1851 if (replacement
->new == 0)
1852 replacement
->new = gen_reg_rtx (GET_MODE (var
));
1853 SUBREG_REG (x
) = replacement
->new;
1857 /* See if we have already found a replacement for this SUBREG.
1858 If so, use it. Otherwise, make a MEM and see if the insn
1859 is recognized. If not, or if we should force MEM into a register,
1860 make a pseudo for this SUBREG. */
1861 replacement
= find_fixup_replacement (replacements
, x
);
1862 if (replacement
->new)
1864 *loc
= replacement
->new;
1868 replacement
->new = *loc
= fixup_memory_subreg (x
, insn
, 0);
1870 INSN_CODE (insn
) = -1;
1871 if (! flag_force_mem
&& recog_memoized (insn
) >= 0)
1874 *loc
= replacement
->new = gen_reg_rtx (GET_MODE (x
));
1880 /* First do special simplification of bit-field references. */
1881 if (GET_CODE (SET_DEST (x
)) == SIGN_EXTRACT
1882 || GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
)
1883 optimize_bit_field (x
, insn
, 0);
1884 if (GET_CODE (SET_SRC (x
)) == SIGN_EXTRACT
1885 || GET_CODE (SET_SRC (x
)) == ZERO_EXTRACT
)
1886 optimize_bit_field (x
, insn
, NULL_PTR
);
1888 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1889 insn into a pseudo and store the low part of the pseudo into VAR. */
1890 if (GET_CODE (SET_DEST (x
)) == SUBREG
1891 && SUBREG_REG (SET_DEST (x
)) == var
1892 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
1893 > GET_MODE_SIZE (GET_MODE (var
))))
1895 SET_DEST (x
) = tem
= gen_reg_rtx (GET_MODE (SET_DEST (x
)));
1896 emit_insn_after (gen_move_insn (var
, gen_lowpart (GET_MODE (var
),
1903 rtx dest
= SET_DEST (x
);
1904 rtx src
= SET_SRC (x
);
1905 rtx outerdest
= dest
;
1907 while (GET_CODE (dest
) == SUBREG
|| GET_CODE (dest
) == STRICT_LOW_PART
1908 || GET_CODE (dest
) == SIGN_EXTRACT
1909 || GET_CODE (dest
) == ZERO_EXTRACT
)
1910 dest
= XEXP (dest
, 0);
1912 if (GET_CODE (src
) == SUBREG
)
1913 src
= XEXP (src
, 0);
1915 /* If VAR does not appear at the top level of the SET
1916 just scan the lower levels of the tree. */
1918 if (src
!= var
&& dest
!= var
)
1921 /* We will need to rerecognize this insn. */
1922 INSN_CODE (insn
) = -1;
1925 if (GET_CODE (outerdest
) == ZERO_EXTRACT
&& dest
== var
)
1927 /* Since this case will return, ensure we fixup all the
1929 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (outerdest
, 1),
1930 insn
, replacements
);
1931 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (outerdest
, 2),
1932 insn
, replacements
);
1933 fixup_var_refs_1 (var
, promoted_mode
, &SET_SRC (x
),
1934 insn
, replacements
);
1936 tem
= XEXP (outerdest
, 0);
1938 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
1939 that may appear inside a ZERO_EXTRACT.
1940 This was legitimate when the MEM was a REG. */
1941 if (GET_CODE (tem
) == SUBREG
1942 && SUBREG_REG (tem
) == var
)
1943 tem
= fixup_memory_subreg (tem
, insn
, 1);
1945 tem
= fixup_stack_1 (tem
, insn
);
1947 if (GET_CODE (XEXP (outerdest
, 1)) == CONST_INT
1948 && GET_CODE (XEXP (outerdest
, 2)) == CONST_INT
1949 && ! mode_dependent_address_p (XEXP (tem
, 0))
1950 && ! MEM_VOLATILE_P (tem
))
1952 enum machine_mode wanted_mode
1953 = insn_operand_mode
[(int) CODE_FOR_insv
][0];
1954 enum machine_mode is_mode
= GET_MODE (tem
);
1955 int width
= INTVAL (XEXP (outerdest
, 1));
1956 int pos
= INTVAL (XEXP (outerdest
, 2));
1958 /* If we have a narrower mode, we can do something. */
1959 if (GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
1961 int offset
= pos
/ BITS_PER_UNIT
;
1962 rtx old_pos
= XEXP (outerdest
, 2);
1965 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
1966 offset
= (GET_MODE_SIZE (is_mode
)
1967 - GET_MODE_SIZE (wanted_mode
) - offset
);
1969 pos
%= GET_MODE_BITSIZE (wanted_mode
);
1971 newmem
= gen_rtx (MEM
, wanted_mode
,
1972 plus_constant (XEXP (tem
, 0), offset
));
1973 RTX_UNCHANGING_P (newmem
) = RTX_UNCHANGING_P (tem
);
1974 MEM_VOLATILE_P (newmem
) = MEM_VOLATILE_P (tem
);
1975 MEM_IN_STRUCT_P (newmem
) = MEM_IN_STRUCT_P (tem
);
1977 /* Make the change and see if the insn remains valid. */
1978 INSN_CODE (insn
) = -1;
1979 XEXP (outerdest
, 0) = newmem
;
1980 XEXP (outerdest
, 2) = GEN_INT (pos
);
1982 if (recog_memoized (insn
) >= 0)
1985 /* Otherwise, restore old position. XEXP (x, 0) will be
1987 XEXP (outerdest
, 2) = old_pos
;
1991 /* If we get here, the bit-field store doesn't allow memory
1992 or isn't located at a constant position. Load the value into
1993 a register, do the store, and put it back into memory. */
1995 tem1
= gen_reg_rtx (GET_MODE (tem
));
1996 emit_insn_before (gen_move_insn (tem1
, tem
), insn
);
1997 emit_insn_after (gen_move_insn (tem
, tem1
), insn
);
1998 XEXP (outerdest
, 0) = tem1
;
2003 /* STRICT_LOW_PART is a no-op on memory references
2004 and it can cause combinations to be unrecognizable,
2007 if (dest
== var
&& GET_CODE (SET_DEST (x
)) == STRICT_LOW_PART
)
2008 SET_DEST (x
) = XEXP (SET_DEST (x
), 0);
2010 /* A valid insn to copy VAR into or out of a register
2011 must be left alone, to avoid an infinite loop here.
2012 If the reference to VAR is by a subreg, fix that up,
2013 since SUBREG is not valid for a memref.
2014 Also fix up the address of the stack slot.
2016 Note that we must not try to recognize the insn until
2017 after we know that we have valid addresses and no
2018 (subreg (mem ...) ...) constructs, since these interfere
2019 with determining the validity of the insn. */
2021 if ((SET_SRC (x
) == var
2022 || (GET_CODE (SET_SRC (x
)) == SUBREG
2023 && SUBREG_REG (SET_SRC (x
)) == var
))
2024 && (GET_CODE (SET_DEST (x
)) == REG
2025 || (GET_CODE (SET_DEST (x
)) == SUBREG
2026 && GET_CODE (SUBREG_REG (SET_DEST (x
))) == REG
))
2027 && GET_MODE (var
) == promoted_mode
2028 && x
== single_set (insn
))
2032 replacement
= find_fixup_replacement (replacements
, SET_SRC (x
));
2033 if (replacement
->new)
2034 SET_SRC (x
) = replacement
->new;
2035 else if (GET_CODE (SET_SRC (x
)) == SUBREG
)
2036 SET_SRC (x
) = replacement
->new
2037 = fixup_memory_subreg (SET_SRC (x
), insn
, 0);
2039 SET_SRC (x
) = replacement
->new
2040 = fixup_stack_1 (SET_SRC (x
), insn
);
2042 if (recog_memoized (insn
) >= 0)
2045 /* INSN is not valid, but we know that we want to
2046 copy SET_SRC (x) to SET_DEST (x) in some way. So
2047 we generate the move and see whether it requires more
2048 than one insn. If it does, we emit those insns and
2049 delete INSN. Otherwise, we an just replace the pattern
2050 of INSN; we have already verified above that INSN has
2051 no other function that to do X. */
2053 pat
= gen_move_insn (SET_DEST (x
), SET_SRC (x
));
2054 if (GET_CODE (pat
) == SEQUENCE
)
2056 emit_insn_after (pat
, insn
);
2057 PUT_CODE (insn
, NOTE
);
2058 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2059 NOTE_SOURCE_FILE (insn
) = 0;
2062 PATTERN (insn
) = pat
;
2067 if ((SET_DEST (x
) == var
2068 || (GET_CODE (SET_DEST (x
)) == SUBREG
2069 && SUBREG_REG (SET_DEST (x
)) == var
))
2070 && (GET_CODE (SET_SRC (x
)) == REG
2071 || (GET_CODE (SET_SRC (x
)) == SUBREG
2072 && GET_CODE (SUBREG_REG (SET_SRC (x
))) == REG
))
2073 && GET_MODE (var
) == promoted_mode
2074 && x
== single_set (insn
))
2078 if (GET_CODE (SET_DEST (x
)) == SUBREG
)
2079 SET_DEST (x
) = fixup_memory_subreg (SET_DEST (x
), insn
, 0);
2081 SET_DEST (x
) = fixup_stack_1 (SET_DEST (x
), insn
);
2083 if (recog_memoized (insn
) >= 0)
2086 pat
= gen_move_insn (SET_DEST (x
), SET_SRC (x
));
2087 if (GET_CODE (pat
) == SEQUENCE
)
2089 emit_insn_after (pat
, insn
);
2090 PUT_CODE (insn
, NOTE
);
2091 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2092 NOTE_SOURCE_FILE (insn
) = 0;
2095 PATTERN (insn
) = pat
;
2100 /* Otherwise, storing into VAR must be handled specially
2101 by storing into a temporary and copying that into VAR
2102 with a new insn after this one. Note that this case
2103 will be used when storing into a promoted scalar since
2104 the insn will now have different modes on the input
2105 and output and hence will be invalid (except for the case
2106 of setting it to a constant, which does not need any
2107 change if it is valid). We generate extra code in that case,
2108 but combine.c will eliminate it. */
2113 rtx fixeddest
= SET_DEST (x
);
2115 /* STRICT_LOW_PART can be discarded, around a MEM. */
2116 if (GET_CODE (fixeddest
) == STRICT_LOW_PART
)
2117 fixeddest
= XEXP (fixeddest
, 0);
2118 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2119 if (GET_CODE (fixeddest
) == SUBREG
)
2121 fixeddest
= fixup_memory_subreg (fixeddest
, insn
, 0);
2122 promoted_mode
= GET_MODE (fixeddest
);
2125 fixeddest
= fixup_stack_1 (fixeddest
, insn
);
2127 temp
= gen_reg_rtx (promoted_mode
);
2129 emit_insn_after (gen_move_insn (fixeddest
,
2130 gen_lowpart (GET_MODE (fixeddest
),
2134 SET_DEST (x
) = temp
;
2139 /* Nothing special about this RTX; fix its operands. */
2141 fmt
= GET_RTX_FORMAT (code
);
2142 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2145 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (x
, i
), insn
, replacements
);
2149 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2150 fixup_var_refs_1 (var
, promoted_mode
, &XVECEXP (x
, i
, j
),
2151 insn
, replacements
);
2156 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2157 return an rtx (MEM:m1 newaddr) which is equivalent.
2158 If any insns must be emitted to compute NEWADDR, put them before INSN.
2160 UNCRITICAL nonzero means accept paradoxical subregs.
2161 This is used for subregs found inside of ZERO_EXTRACTs and in REG_NOTES. */
2164 fixup_memory_subreg (x
, insn
, uncritical
)
2169 int offset
= SUBREG_WORD (x
) * UNITS_PER_WORD
;
2170 rtx addr
= XEXP (SUBREG_REG (x
), 0);
2171 enum machine_mode mode
= GET_MODE (x
);
2174 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2175 if (GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
)))
2179 if (BYTES_BIG_ENDIAN
)
2180 offset
+= (MIN (UNITS_PER_WORD
, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
2181 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (mode
)));
2182 addr
= plus_constant (addr
, offset
);
2183 if (!flag_force_addr
&& memory_address_p (mode
, addr
))
2184 /* Shortcut if no insns need be emitted. */
2185 return change_address (SUBREG_REG (x
), mode
, addr
);
2187 result
= change_address (SUBREG_REG (x
), mode
, addr
);
2188 emit_insn_before (gen_sequence (), insn
);
2193 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2194 Replace subexpressions of X in place.
2195 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2196 Otherwise return X, with its contents possibly altered.
2198 If any insns must be emitted to compute NEWADDR, put them before INSN.
2200 UNCRITICAL is as in fixup_memory_subreg. */
2203 walk_fixup_memory_subreg (x
, insn
, uncritical
)
2208 register enum rtx_code code
;
2215 code
= GET_CODE (x
);
2217 if (code
== SUBREG
&& GET_CODE (SUBREG_REG (x
)) == MEM
)
2218 return fixup_memory_subreg (x
, insn
, uncritical
);
2220 /* Nothing special about this RTX; fix its operands. */
2222 fmt
= GET_RTX_FORMAT (code
);
2223 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2226 XEXP (x
, i
) = walk_fixup_memory_subreg (XEXP (x
, i
), insn
, uncritical
);
2230 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2232 = walk_fixup_memory_subreg (XVECEXP (x
, i
, j
), insn
, uncritical
);
2238 /* For each memory ref within X, if it refers to a stack slot
2239 with an out of range displacement, put the address in a temp register
2240 (emitting new insns before INSN to load these registers)
2241 and alter the memory ref to use that register.
2242 Replace each such MEM rtx with a copy, to avoid clobberage. */
2245 fixup_stack_1 (x
, insn
)
2250 register RTX_CODE code
= GET_CODE (x
);
2255 register rtx ad
= XEXP (x
, 0);
2256 /* If we have address of a stack slot but it's not valid
2257 (displacement is too large), compute the sum in a register. */
2258 if (GET_CODE (ad
) == PLUS
2259 && GET_CODE (XEXP (ad
, 0)) == REG
2260 && ((REGNO (XEXP (ad
, 0)) >= FIRST_VIRTUAL_REGISTER
2261 && REGNO (XEXP (ad
, 0)) <= LAST_VIRTUAL_REGISTER
)
2262 || XEXP (ad
, 0) == current_function_internal_arg_pointer
)
2263 && GET_CODE (XEXP (ad
, 1)) == CONST_INT
)
2266 if (memory_address_p (GET_MODE (x
), ad
))
2270 temp
= copy_to_reg (ad
);
2271 seq
= gen_sequence ();
2273 emit_insn_before (seq
, insn
);
2274 return change_address (x
, VOIDmode
, temp
);
2279 fmt
= GET_RTX_FORMAT (code
);
2280 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2283 XEXP (x
, i
) = fixup_stack_1 (XEXP (x
, i
), insn
);
2287 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2288 XVECEXP (x
, i
, j
) = fixup_stack_1 (XVECEXP (x
, i
, j
), insn
);
2294 /* Optimization: a bit-field instruction whose field
2295 happens to be a byte or halfword in memory
2296 can be changed to a move instruction.
2298 We call here when INSN is an insn to examine or store into a bit-field.
2299 BODY is the SET-rtx to be altered.
2301 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2302 (Currently this is called only from function.c, and EQUIV_MEM
2306 optimize_bit_field (body
, insn
, equiv_mem
)
2311 register rtx bitfield
;
2314 enum machine_mode mode
;
2316 if (GET_CODE (SET_DEST (body
)) == SIGN_EXTRACT
2317 || GET_CODE (SET_DEST (body
)) == ZERO_EXTRACT
)
2318 bitfield
= SET_DEST (body
), destflag
= 1;
2320 bitfield
= SET_SRC (body
), destflag
= 0;
2322 /* First check that the field being stored has constant size and position
2323 and is in fact a byte or halfword suitably aligned. */
2325 if (GET_CODE (XEXP (bitfield
, 1)) == CONST_INT
2326 && GET_CODE (XEXP (bitfield
, 2)) == CONST_INT
2327 && ((mode
= mode_for_size (INTVAL (XEXP (bitfield
, 1)), MODE_INT
, 1))
2329 && INTVAL (XEXP (bitfield
, 2)) % INTVAL (XEXP (bitfield
, 1)) == 0)
2331 register rtx memref
= 0;
2333 /* Now check that the containing word is memory, not a register,
2334 and that it is safe to change the machine mode. */
2336 if (GET_CODE (XEXP (bitfield
, 0)) == MEM
)
2337 memref
= XEXP (bitfield
, 0);
2338 else if (GET_CODE (XEXP (bitfield
, 0)) == REG
2340 memref
= equiv_mem
[REGNO (XEXP (bitfield
, 0))];
2341 else if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
2342 && GET_CODE (SUBREG_REG (XEXP (bitfield
, 0))) == MEM
)
2343 memref
= SUBREG_REG (XEXP (bitfield
, 0));
2344 else if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
2346 && GET_CODE (SUBREG_REG (XEXP (bitfield
, 0))) == REG
)
2347 memref
= equiv_mem
[REGNO (SUBREG_REG (XEXP (bitfield
, 0)))];
2350 && ! mode_dependent_address_p (XEXP (memref
, 0))
2351 && ! MEM_VOLATILE_P (memref
))
2353 /* Now adjust the address, first for any subreg'ing
2354 that we are now getting rid of,
2355 and then for which byte of the word is wanted. */
2357 register int offset
= INTVAL (XEXP (bitfield
, 2));
2360 /* Adjust OFFSET to count bits from low-address byte. */
2361 if (BITS_BIG_ENDIAN
!= BYTES_BIG_ENDIAN
)
2362 offset
= (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield
, 0)))
2363 - offset
- INTVAL (XEXP (bitfield
, 1)));
2365 /* Adjust OFFSET to count bytes from low-address byte. */
2366 offset
/= BITS_PER_UNIT
;
2367 if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
)
2369 offset
+= SUBREG_WORD (XEXP (bitfield
, 0)) * UNITS_PER_WORD
;
2370 if (BYTES_BIG_ENDIAN
)
2371 offset
-= (MIN (UNITS_PER_WORD
,
2372 GET_MODE_SIZE (GET_MODE (XEXP (bitfield
, 0))))
2373 - MIN (UNITS_PER_WORD
,
2374 GET_MODE_SIZE (GET_MODE (memref
))));
2378 memref
= change_address (memref
, mode
,
2379 plus_constant (XEXP (memref
, 0), offset
));
2380 insns
= get_insns ();
2382 emit_insns_before (insns
, insn
);
2384 /* Store this memory reference where
2385 we found the bit field reference. */
2389 validate_change (insn
, &SET_DEST (body
), memref
, 1);
2390 if (! CONSTANT_ADDRESS_P (SET_SRC (body
)))
2392 rtx src
= SET_SRC (body
);
2393 while (GET_CODE (src
) == SUBREG
2394 && SUBREG_WORD (src
) == 0)
2395 src
= SUBREG_REG (src
);
2396 if (GET_MODE (src
) != GET_MODE (memref
))
2397 src
= gen_lowpart (GET_MODE (memref
), SET_SRC (body
));
2398 validate_change (insn
, &SET_SRC (body
), src
, 1);
2400 else if (GET_MODE (SET_SRC (body
)) != VOIDmode
2401 && GET_MODE (SET_SRC (body
)) != GET_MODE (memref
))
2402 /* This shouldn't happen because anything that didn't have
2403 one of these modes should have got converted explicitly
2404 and then referenced through a subreg.
2405 This is so because the original bit-field was
2406 handled by agg_mode and so its tree structure had
2407 the same mode that memref now has. */
2412 rtx dest
= SET_DEST (body
);
2414 while (GET_CODE (dest
) == SUBREG
2415 && SUBREG_WORD (dest
) == 0
2416 && (GET_MODE_CLASS (GET_MODE (dest
))
2417 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest
)))))
2418 dest
= SUBREG_REG (dest
);
2420 validate_change (insn
, &SET_DEST (body
), dest
, 1);
2422 if (GET_MODE (dest
) == GET_MODE (memref
))
2423 validate_change (insn
, &SET_SRC (body
), memref
, 1);
2426 /* Convert the mem ref to the destination mode. */
2427 rtx newreg
= gen_reg_rtx (GET_MODE (dest
));
2430 convert_move (newreg
, memref
,
2431 GET_CODE (SET_SRC (body
)) == ZERO_EXTRACT
);
2435 validate_change (insn
, &SET_SRC (body
), newreg
, 1);
2439 /* See if we can convert this extraction or insertion into
2440 a simple move insn. We might not be able to do so if this
2441 was, for example, part of a PARALLEL.
2443 If we succeed, write out any needed conversions. If we fail,
2444 it is hard to guess why we failed, so don't do anything
2445 special; just let the optimization be suppressed. */
2447 if (apply_change_group () && seq
)
2448 emit_insns_before (seq
, insn
);
2453 /* These routines are responsible for converting virtual register references
2454 to the actual hard register references once RTL generation is complete.
2456 The following four variables are used for communication between the
2457 routines. They contain the offsets of the virtual registers from their
2458 respective hard registers. */
2460 static int in_arg_offset
;
2461 static int var_offset
;
2462 static int dynamic_offset
;
2463 static int out_arg_offset
;
2465 /* In most machines, the stack pointer register is equivalent to the bottom
2468 #ifndef STACK_POINTER_OFFSET
2469 #define STACK_POINTER_OFFSET 0
2472 /* If not defined, pick an appropriate default for the offset of dynamically
2473 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2474 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2476 #ifndef STACK_DYNAMIC_OFFSET
2478 #ifdef ACCUMULATE_OUTGOING_ARGS
2479 /* The bottom of the stack points to the actual arguments. If
2480 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2481 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2482 stack space for register parameters is not pushed by the caller, but
2483 rather part of the fixed stack areas and hence not included in
2484 `current_function_outgoing_args_size'. Nevertheless, we must allow
2485 for it when allocating stack dynamic objects. */
2487 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2488 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2489 (current_function_outgoing_args_size \
2490 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2493 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2494 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2498 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2502 /* Pass through the INSNS of function FNDECL and convert virtual register
2503 references to hard register references. */
2506 instantiate_virtual_regs (fndecl
, insns
)
2512 /* Compute the offsets to use for this function. */
2513 in_arg_offset
= FIRST_PARM_OFFSET (fndecl
);
2514 var_offset
= STARTING_FRAME_OFFSET
;
2515 dynamic_offset
= STACK_DYNAMIC_OFFSET (fndecl
);
2516 out_arg_offset
= STACK_POINTER_OFFSET
;
2518 /* Scan all variables and parameters of this function. For each that is
2519 in memory, instantiate all virtual registers if the result is a valid
2520 address. If not, we do it later. That will handle most uses of virtual
2521 regs on many machines. */
2522 instantiate_decls (fndecl
, 1);
2524 /* Initialize recognition, indicating that volatile is OK. */
2527 /* Scan through all the insns, instantiating every virtual register still
2529 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
2530 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == JUMP_INSN
2531 || GET_CODE (insn
) == CALL_INSN
)
2533 instantiate_virtual_regs_1 (&PATTERN (insn
), insn
, 1);
2534 instantiate_virtual_regs_1 (®_NOTES (insn
), NULL_RTX
, 0);
2537 /* Now instantiate the remaining register equivalences for debugging info.
2538 These will not be valid addresses. */
2539 instantiate_decls (fndecl
, 0);
2541 /* Indicate that, from now on, assign_stack_local should use
2542 frame_pointer_rtx. */
2543 virtuals_instantiated
= 1;
2546 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
2547 all virtual registers in their DECL_RTL's.
2549 If VALID_ONLY, do this only if the resulting address is still valid.
2550 Otherwise, always do it. */
2553 instantiate_decls (fndecl
, valid_only
)
2559 if (DECL_SAVED_INSNS (fndecl
))
2560 /* When compiling an inline function, the obstack used for
2561 rtl allocation is the maybepermanent_obstack. Calling
2562 `resume_temporary_allocation' switches us back to that
2563 obstack while we process this function's parameters. */
2564 resume_temporary_allocation ();
2566 /* Process all parameters of the function. */
2567 for (decl
= DECL_ARGUMENTS (fndecl
); decl
; decl
= TREE_CHAIN (decl
))
2569 instantiate_decl (DECL_RTL (decl
), int_size_in_bytes (TREE_TYPE (decl
)),
2571 instantiate_decl (DECL_INCOMING_RTL (decl
),
2572 int_size_in_bytes (TREE_TYPE (decl
)), valid_only
);
2575 /* Now process all variables defined in the function or its subblocks. */
2576 instantiate_decls_1 (DECL_INITIAL (fndecl
), valid_only
);
2578 if (DECL_INLINE (fndecl
) || DECL_DEFER_OUTPUT (fndecl
))
2580 /* Save all rtl allocated for this function by raising the
2581 high-water mark on the maybepermanent_obstack. */
2583 /* All further rtl allocation is now done in the current_obstack. */
2584 rtl_in_current_obstack ();
2588 /* Subroutine of instantiate_decls: Process all decls in the given
2589 BLOCK node and all its subblocks. */
2592 instantiate_decls_1 (let
, valid_only
)
2598 for (t
= BLOCK_VARS (let
); t
; t
= TREE_CHAIN (t
))
2599 instantiate_decl (DECL_RTL (t
), int_size_in_bytes (TREE_TYPE (t
)),
2602 /* Process all subblocks. */
2603 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= TREE_CHAIN (t
))
2604 instantiate_decls_1 (t
, valid_only
);
2607 /* Subroutine of the preceding procedures: Given RTL representing a
2608 decl and the size of the object, do any instantiation required.
2610 If VALID_ONLY is non-zero, it means that the RTL should only be
2611 changed if the new address is valid. */
2614 instantiate_decl (x
, size
, valid_only
)
2619 enum machine_mode mode
;
2622 /* If this is not a MEM, no need to do anything. Similarly if the
2623 address is a constant or a register that is not a virtual register. */
2625 if (x
== 0 || GET_CODE (x
) != MEM
)
2629 if (CONSTANT_P (addr
)
2630 || (GET_CODE (addr
) == REG
2631 && (REGNO (addr
) < FIRST_VIRTUAL_REGISTER
2632 || REGNO (addr
) > LAST_VIRTUAL_REGISTER
)))
2635 /* If we should only do this if the address is valid, copy the address.
2636 We need to do this so we can undo any changes that might make the
2637 address invalid. This copy is unfortunate, but probably can't be
2641 addr
= copy_rtx (addr
);
2643 instantiate_virtual_regs_1 (&addr
, NULL_RTX
, 0);
2648 /* Now verify that the resulting address is valid for every integer or
2649 floating-point mode up to and including SIZE bytes long. We do this
2650 since the object might be accessed in any mode and frame addresses
2653 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2654 mode
!= VOIDmode
&& GET_MODE_SIZE (mode
) <= size
;
2655 mode
= GET_MODE_WIDER_MODE (mode
))
2656 if (! memory_address_p (mode
, addr
))
2659 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
2660 mode
!= VOIDmode
&& GET_MODE_SIZE (mode
) <= size
;
2661 mode
= GET_MODE_WIDER_MODE (mode
))
2662 if (! memory_address_p (mode
, addr
))
2665 /* Otherwise, put back the address, now that we have updated it and we
2666 know it is valid. */
2671 /* Given a pointer to a piece of rtx and an optional pointer to the
2672 containing object, instantiate any virtual registers present in it.
2674 If EXTRA_INSNS, we always do the replacement and generate
2675 any extra insns before OBJECT. If it zero, we do nothing if replacement
2678 Return 1 if we either had nothing to do or if we were able to do the
2679 needed replacement. Return 0 otherwise; we only return zero if
2680 EXTRA_INSNS is zero.
2682 We first try some simple transformations to avoid the creation of extra
2686 instantiate_virtual_regs_1 (loc
, object
, extra_insns
)
2700 /* Re-start here to avoid recursion in common cases. */
2707 code
= GET_CODE (x
);
2709 /* Check for some special cases. */
2726 /* We are allowed to set the virtual registers. This means that
2727 that the actual register should receive the source minus the
2728 appropriate offset. This is used, for example, in the handling
2729 of non-local gotos. */
2730 if (SET_DEST (x
) == virtual_incoming_args_rtx
)
2731 new = arg_pointer_rtx
, offset
= - in_arg_offset
;
2732 else if (SET_DEST (x
) == virtual_stack_vars_rtx
)
2733 new = frame_pointer_rtx
, offset
= - var_offset
;
2734 else if (SET_DEST (x
) == virtual_stack_dynamic_rtx
)
2735 new = stack_pointer_rtx
, offset
= - dynamic_offset
;
2736 else if (SET_DEST (x
) == virtual_outgoing_args_rtx
)
2737 new = stack_pointer_rtx
, offset
= - out_arg_offset
;
2741 /* The only valid sources here are PLUS or REG. Just do
2742 the simplest possible thing to handle them. */
2743 if (GET_CODE (SET_SRC (x
)) != REG
2744 && GET_CODE (SET_SRC (x
)) != PLUS
)
2748 if (GET_CODE (SET_SRC (x
)) != REG
)
2749 temp
= force_operand (SET_SRC (x
), NULL_RTX
);
2752 temp
= force_operand (plus_constant (temp
, offset
), NULL_RTX
);
2756 emit_insns_before (seq
, object
);
2759 if (!validate_change (object
, &SET_SRC (x
), temp
, 0)
2766 instantiate_virtual_regs_1 (&SET_DEST (x
), object
, extra_insns
);
2771 /* Handle special case of virtual register plus constant. */
2772 if (CONSTANT_P (XEXP (x
, 1)))
2774 rtx old
, new_offset
;
2776 /* Check for (plus (plus VIRT foo) (const_int)) first. */
2777 if (GET_CODE (XEXP (x
, 0)) == PLUS
)
2779 rtx inner
= XEXP (XEXP (x
, 0), 0);
2781 if (inner
== virtual_incoming_args_rtx
)
2782 new = arg_pointer_rtx
, offset
= in_arg_offset
;
2783 else if (inner
== virtual_stack_vars_rtx
)
2784 new = frame_pointer_rtx
, offset
= var_offset
;
2785 else if (inner
== virtual_stack_dynamic_rtx
)
2786 new = stack_pointer_rtx
, offset
= dynamic_offset
;
2787 else if (inner
== virtual_outgoing_args_rtx
)
2788 new = stack_pointer_rtx
, offset
= out_arg_offset
;
2795 instantiate_virtual_regs_1 (&XEXP (XEXP (x
, 0), 1), object
,
2797 new = gen_rtx (PLUS
, Pmode
, new, XEXP (XEXP (x
, 0), 1));
2800 else if (XEXP (x
, 0) == virtual_incoming_args_rtx
)
2801 new = arg_pointer_rtx
, offset
= in_arg_offset
;
2802 else if (XEXP (x
, 0) == virtual_stack_vars_rtx
)
2803 new = frame_pointer_rtx
, offset
= var_offset
;
2804 else if (XEXP (x
, 0) == virtual_stack_dynamic_rtx
)
2805 new = stack_pointer_rtx
, offset
= dynamic_offset
;
2806 else if (XEXP (x
, 0) == virtual_outgoing_args_rtx
)
2807 new = stack_pointer_rtx
, offset
= out_arg_offset
;
2810 /* We know the second operand is a constant. Unless the
2811 first operand is a REG (which has been already checked),
2812 it needs to be checked. */
2813 if (GET_CODE (XEXP (x
, 0)) != REG
)
2821 new_offset
= plus_constant (XEXP (x
, 1), offset
);
2823 /* If the new constant is zero, try to replace the sum with just
2825 if (new_offset
== const0_rtx
2826 && validate_change (object
, loc
, new, 0))
2829 /* Next try to replace the register and new offset.
2830 There are two changes to validate here and we can't assume that
2831 in the case of old offset equals new just changing the register
2832 will yield a valid insn. In the interests of a little efficiency,
2833 however, we only call validate change once (we don't queue up the
2834 changes and then call apply_change_group). */
2838 ? ! validate_change (object
, &XEXP (x
, 0), new, 0)
2839 : (XEXP (x
, 0) = new,
2840 ! validate_change (object
, &XEXP (x
, 1), new_offset
, 0)))
2848 /* Otherwise copy the new constant into a register and replace
2849 constant with that register. */
2850 temp
= gen_reg_rtx (Pmode
);
2852 if (validate_change (object
, &XEXP (x
, 1), temp
, 0))
2853 emit_insn_before (gen_move_insn (temp
, new_offset
), object
);
2856 /* If that didn't work, replace this expression with a
2857 register containing the sum. */
2860 new = gen_rtx (PLUS
, Pmode
, new, new_offset
);
2863 temp
= force_operand (new, NULL_RTX
);
2867 emit_insns_before (seq
, object
);
2868 if (! validate_change (object
, loc
, temp
, 0)
2869 && ! validate_replace_rtx (x
, temp
, object
))
2877 /* Fall through to generic two-operand expression case. */
2883 case DIV
: case UDIV
:
2884 case MOD
: case UMOD
:
2885 case AND
: case IOR
: case XOR
:
2886 case ROTATERT
: case ROTATE
:
2887 case ASHIFTRT
: case LSHIFTRT
: case ASHIFT
:
2889 case GE
: case GT
: case GEU
: case GTU
:
2890 case LE
: case LT
: case LEU
: case LTU
:
2891 if (XEXP (x
, 1) && ! CONSTANT_P (XEXP (x
, 1)))
2892 instantiate_virtual_regs_1 (&XEXP (x
, 1), object
, extra_insns
);
2897 /* Most cases of MEM that convert to valid addresses have already been
2898 handled by our scan of regno_reg_rtx. The only special handling we
2899 need here is to make a copy of the rtx to ensure it isn't being
2900 shared if we have to change it to a pseudo.
2902 If the rtx is a simple reference to an address via a virtual register,
2903 it can potentially be shared. In such cases, first try to make it
2904 a valid address, which can also be shared. Otherwise, copy it and
2907 First check for common cases that need no processing. These are
2908 usually due to instantiation already being done on a previous instance
2912 if (CONSTANT_ADDRESS_P (temp
)
2913 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2914 || temp
== arg_pointer_rtx
2916 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2917 || temp
== hard_frame_pointer_rtx
2919 || temp
== frame_pointer_rtx
)
2922 if (GET_CODE (temp
) == PLUS
2923 && CONSTANT_ADDRESS_P (XEXP (temp
, 1))
2924 && (XEXP (temp
, 0) == frame_pointer_rtx
2925 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2926 || XEXP (temp
, 0) == hard_frame_pointer_rtx
2928 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2929 || XEXP (temp
, 0) == arg_pointer_rtx
2934 if (temp
== virtual_stack_vars_rtx
2935 || temp
== virtual_incoming_args_rtx
2936 || (GET_CODE (temp
) == PLUS
2937 && CONSTANT_ADDRESS_P (XEXP (temp
, 1))
2938 && (XEXP (temp
, 0) == virtual_stack_vars_rtx
2939 || XEXP (temp
, 0) == virtual_incoming_args_rtx
)))
2941 /* This MEM may be shared. If the substitution can be done without
2942 the need to generate new pseudos, we want to do it in place
2943 so all copies of the shared rtx benefit. The call below will
2944 only make substitutions if the resulting address is still
2947 Note that we cannot pass X as the object in the recursive call
2948 since the insn being processed may not allow all valid
2949 addresses. However, if we were not passed on object, we can
2950 only modify X without copying it if X will have a valid
2953 ??? Also note that this can still lose if OBJECT is an insn that
2954 has less restrictions on an address that some other insn.
2955 In that case, we will modify the shared address. This case
2956 doesn't seem very likely, though. */
2958 if (instantiate_virtual_regs_1 (&XEXP (x
, 0),
2959 object
? object
: x
, 0))
2962 /* Otherwise make a copy and process that copy. We copy the entire
2963 RTL expression since it might be a PLUS which could also be
2965 *loc
= x
= copy_rtx (x
);
2968 /* Fall through to generic unary operation case. */
2972 case STRICT_LOW_PART
:
2974 case PRE_DEC
: case PRE_INC
: case POST_DEC
: case POST_INC
:
2975 case SIGN_EXTEND
: case ZERO_EXTEND
:
2976 case TRUNCATE
: case FLOAT_EXTEND
: case FLOAT_TRUNCATE
:
2977 case FLOAT
: case FIX
:
2978 case UNSIGNED_FIX
: case UNSIGNED_FLOAT
:
2982 /* These case either have just one operand or we know that we need not
2983 check the rest of the operands. */
2988 /* Try to replace with a PLUS. If that doesn't work, compute the sum
2989 in front of this insn and substitute the temporary. */
2990 if (x
== virtual_incoming_args_rtx
)
2991 new = arg_pointer_rtx
, offset
= in_arg_offset
;
2992 else if (x
== virtual_stack_vars_rtx
)
2993 new = frame_pointer_rtx
, offset
= var_offset
;
2994 else if (x
== virtual_stack_dynamic_rtx
)
2995 new = stack_pointer_rtx
, offset
= dynamic_offset
;
2996 else if (x
== virtual_outgoing_args_rtx
)
2997 new = stack_pointer_rtx
, offset
= out_arg_offset
;
3001 temp
= plus_constant (new, offset
);
3002 if (!validate_change (object
, loc
, temp
, 0))
3008 temp
= force_operand (temp
, NULL_RTX
);
3012 emit_insns_before (seq
, object
);
3013 if (! validate_change (object
, loc
, temp
, 0)
3014 && ! validate_replace_rtx (x
, temp
, object
))
3022 /* Scan all subexpressions. */
3023 fmt
= GET_RTX_FORMAT (code
);
3024 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
3027 if (!instantiate_virtual_regs_1 (&XEXP (x
, i
), object
, extra_insns
))
3030 else if (*fmt
== 'E')
3031 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3032 if (! instantiate_virtual_regs_1 (&XVECEXP (x
, i
, j
), object
,
3039 /* Optimization: assuming this function does not receive nonlocal gotos,
3040 delete the handlers for such, as well as the insns to establish
3041 and disestablish them. */
3047 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3049 /* Delete the handler by turning off the flag that would
3050 prevent jump_optimize from deleting it.
3051 Also permit deletion of the nonlocal labels themselves
3052 if nothing local refers to them. */
3053 if (GET_CODE (insn
) == CODE_LABEL
)
3057 LABEL_PRESERVE_P (insn
) = 0;
3059 /* Remove it from the nonlocal_label list, to avoid confusing
3061 for (t
= nonlocal_labels
, last_t
= 0; t
;
3062 last_t
= t
, t
= TREE_CHAIN (t
))
3063 if (DECL_RTL (TREE_VALUE (t
)) == insn
)
3068 nonlocal_labels
= TREE_CHAIN (nonlocal_labels
);
3070 TREE_CHAIN (last_t
) = TREE_CHAIN (t
);
3073 if (GET_CODE (insn
) == INSN
3074 && ((nonlocal_goto_handler_slot
!= 0
3075 && reg_mentioned_p (nonlocal_goto_handler_slot
, PATTERN (insn
)))
3076 || (nonlocal_goto_stack_level
!= 0
3077 && reg_mentioned_p (nonlocal_goto_stack_level
,
3083 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3084 of the current function. */
3087 nonlocal_label_rtx_list ()
3092 for (t
= nonlocal_labels
; t
; t
= TREE_CHAIN (t
))
3093 x
= gen_rtx (EXPR_LIST
, VOIDmode
, label_rtx (TREE_VALUE (t
)), x
);
3098 /* Output a USE for any register use in RTL.
3099 This is used with -noreg to mark the extent of lifespan
3100 of any registers used in a user-visible variable's DECL_RTL. */
3106 if (GET_CODE (rtl
) == REG
)
3107 /* This is a register variable. */
3108 emit_insn (gen_rtx (USE
, VOIDmode
, rtl
));
3109 else if (GET_CODE (rtl
) == MEM
3110 && GET_CODE (XEXP (rtl
, 0)) == REG
3111 && (REGNO (XEXP (rtl
, 0)) < FIRST_VIRTUAL_REGISTER
3112 || REGNO (XEXP (rtl
, 0)) > LAST_VIRTUAL_REGISTER
)
3113 && XEXP (rtl
, 0) != current_function_internal_arg_pointer
)
3114 /* This is a variable-sized structure. */
3115 emit_insn (gen_rtx (USE
, VOIDmode
, XEXP (rtl
, 0)));
3118 /* Like use_variable except that it outputs the USEs after INSN
3119 instead of at the end of the insn-chain. */
3122 use_variable_after (rtl
, insn
)
3125 if (GET_CODE (rtl
) == REG
)
3126 /* This is a register variable. */
3127 emit_insn_after (gen_rtx (USE
, VOIDmode
, rtl
), insn
);
3128 else if (GET_CODE (rtl
) == MEM
3129 && GET_CODE (XEXP (rtl
, 0)) == REG
3130 && (REGNO (XEXP (rtl
, 0)) < FIRST_VIRTUAL_REGISTER
3131 || REGNO (XEXP (rtl
, 0)) > LAST_VIRTUAL_REGISTER
)
3132 && XEXP (rtl
, 0) != current_function_internal_arg_pointer
)
3133 /* This is a variable-sized structure. */
3134 emit_insn_after (gen_rtx (USE
, VOIDmode
, XEXP (rtl
, 0)), insn
);
3140 return max_parm_reg
;
3143 /* Return the first insn following those generated by `assign_parms'. */
3146 get_first_nonparm_insn ()
3149 return NEXT_INSN (last_parm_insn
);
3150 return get_insns ();
3153 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3154 Crash if there is none. */
3157 get_first_block_beg ()
3159 register rtx searcher
;
3160 register rtx insn
= get_first_nonparm_insn ();
3162 for (searcher
= insn
; searcher
; searcher
= NEXT_INSN (searcher
))
3163 if (GET_CODE (searcher
) == NOTE
3164 && NOTE_LINE_NUMBER (searcher
) == NOTE_INSN_BLOCK_BEG
)
3167 abort (); /* Invalid call to this function. (See comments above.) */
3171 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3172 This means a type for which function calls must pass an address to the
3173 function or get an address back from the function.
3174 EXP may be a type node or an expression (whose type is tested). */
3177 aggregate_value_p (exp
)
3180 int i
, regno
, nregs
;
3183 if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 't')
3186 type
= TREE_TYPE (exp
);
3188 if (RETURN_IN_MEMORY (type
))
3190 /* Types that are TREE_ADDRESSABLE must be contructed in memory,
3191 and thus can't be returned in registers. */
3192 if (TREE_ADDRESSABLE (type
))
3194 if (flag_pcc_struct_return
&& AGGREGATE_TYPE_P (type
))
3196 /* Make sure we have suitable call-clobbered regs to return
3197 the value in; if not, we must return it in memory. */
3198 reg
= hard_function_value (type
, 0);
3199 regno
= REGNO (reg
);
3200 nregs
= HARD_REGNO_NREGS (regno
, TYPE_MODE (type
));
3201 for (i
= 0; i
< nregs
; i
++)
3202 if (! call_used_regs
[regno
+ i
])
3207 /* Assign RTL expressions to the function's parameters.
3208 This may involve copying them into registers and using
3209 those registers as the RTL for them.
3211 If SECOND_TIME is non-zero it means that this function is being
3212 called a second time. This is done by integrate.c when a function's
3213 compilation is deferred. We need to come back here in case the
3214 FUNCTION_ARG macro computes items needed for the rest of the compilation
3215 (such as changing which registers are fixed or caller-saved). But suppress
3216 writing any insns or setting DECL_RTL of anything in this case. */
3219 assign_parms (fndecl
, second_time
)
3224 register rtx entry_parm
= 0;
3225 register rtx stack_parm
= 0;
3226 CUMULATIVE_ARGS args_so_far
;
3227 enum machine_mode promoted_mode
, passed_mode
;
3228 enum machine_mode nominal_mode
, promoted_nominal_mode
;
3230 /* Total space needed so far for args on the stack,
3231 given as a constant and a tree-expression. */
3232 struct args_size stack_args_size
;
3233 tree fntype
= TREE_TYPE (fndecl
);
3234 tree fnargs
= DECL_ARGUMENTS (fndecl
);
3235 /* This is used for the arg pointer when referring to stack args. */
3236 rtx internal_arg_pointer
;
3237 /* This is a dummy PARM_DECL that we used for the function result if
3238 the function returns a structure. */
3239 tree function_result_decl
= 0;
3240 int nparmregs
= list_length (fnargs
) + LAST_VIRTUAL_REGISTER
+ 1;
3241 int varargs_setup
= 0;
3242 rtx conversion_insns
= 0;
3244 /* Nonzero if the last arg is named `__builtin_va_alist',
3245 which is used on some machines for old-fashioned non-ANSI varargs.h;
3246 this should be stuck onto the stack as if it had arrived there. */
3248 = (current_function_varargs
3250 && (parm
= tree_last (fnargs
)) != 0
3252 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm
)),
3253 "__builtin_va_alist")));
3255 /* Nonzero if function takes extra anonymous args.
3256 This means the last named arg must be on the stack
3257 right before the anonymous ones. */
3259 = (TYPE_ARG_TYPES (fntype
) != 0
3260 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
3261 != void_type_node
));
3263 current_function_stdarg
= stdarg
;
3265 /* If the reg that the virtual arg pointer will be translated into is
3266 not a fixed reg or is the stack pointer, make a copy of the virtual
3267 arg pointer, and address parms via the copy. The frame pointer is
3268 considered fixed even though it is not marked as such.
3270 The second time through, simply use ap to avoid generating rtx. */
3272 if ((ARG_POINTER_REGNUM
== STACK_POINTER_REGNUM
3273 || ! (fixed_regs
[ARG_POINTER_REGNUM
]
3274 || ARG_POINTER_REGNUM
== FRAME_POINTER_REGNUM
))
3276 internal_arg_pointer
= copy_to_reg (virtual_incoming_args_rtx
);
3278 internal_arg_pointer
= virtual_incoming_args_rtx
;
3279 current_function_internal_arg_pointer
= internal_arg_pointer
;
3281 stack_args_size
.constant
= 0;
3282 stack_args_size
.var
= 0;
3284 /* If struct value address is treated as the first argument, make it so. */
3285 if (aggregate_value_p (DECL_RESULT (fndecl
))
3286 && ! current_function_returns_pcc_struct
3287 && struct_value_incoming_rtx
== 0)
3289 tree type
= build_pointer_type (TREE_TYPE (fntype
));
3291 function_result_decl
= build_decl (PARM_DECL
, NULL_TREE
, type
);
3293 DECL_ARG_TYPE (function_result_decl
) = type
;
3294 TREE_CHAIN (function_result_decl
) = fnargs
;
3295 fnargs
= function_result_decl
;
3298 parm_reg_stack_loc
= (rtx
*) oballoc (nparmregs
* sizeof (rtx
));
3299 bzero ((char *) parm_reg_stack_loc
, nparmregs
* sizeof (rtx
));
3301 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3302 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far
, fntype
, NULL_RTX
);
3304 INIT_CUMULATIVE_ARGS (args_so_far
, fntype
, NULL_RTX
, 0);
3307 /* We haven't yet found an argument that we must push and pretend the
3309 current_function_pretend_args_size
= 0;
3311 for (parm
= fnargs
; parm
; parm
= TREE_CHAIN (parm
))
3313 int aggregate
= AGGREGATE_TYPE_P (TREE_TYPE (parm
));
3314 struct args_size stack_offset
;
3315 struct args_size arg_size
;
3316 int passed_pointer
= 0;
3317 int did_conversion
= 0;
3318 tree passed_type
= DECL_ARG_TYPE (parm
);
3319 tree nominal_type
= TREE_TYPE (parm
);
3321 /* Set LAST_NAMED if this is last named arg before some
3322 anonymous args. We treat it as if it were anonymous too. */
3323 int last_named
= ((TREE_CHAIN (parm
) == 0
3324 || DECL_NAME (TREE_CHAIN (parm
)) == 0)
3325 && (stdarg
|| current_function_varargs
));
3327 if (TREE_TYPE (parm
) == error_mark_node
3328 /* This can happen after weird syntax errors
3329 or if an enum type is defined among the parms. */
3330 || TREE_CODE (parm
) != PARM_DECL
3331 || passed_type
== NULL
)
3333 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
) = gen_rtx (MEM
, BLKmode
,
3335 TREE_USED (parm
) = 1;
3339 /* For varargs.h function, save info about regs and stack space
3340 used by the individual args, not including the va_alist arg. */
3341 if (hide_last_arg
&& last_named
)
3342 current_function_args_info
= args_so_far
;
3344 /* Find mode of arg as it is passed, and mode of arg
3345 as it should be during execution of this function. */
3346 passed_mode
= TYPE_MODE (passed_type
);
3347 nominal_mode
= TYPE_MODE (nominal_type
);
3349 /* If the parm's mode is VOID, its value doesn't matter,
3350 and avoid the usual things like emit_move_insn that could crash. */
3351 if (nominal_mode
== VOIDmode
)
3353 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
) = const0_rtx
;
3357 /* If the parm is to be passed as a transparent union, use the
3358 type of the first field for the tests below. We have already
3359 verified that the modes are the same. */
3360 if (DECL_TRANSPARENT_UNION (parm
)
3361 || TYPE_TRANSPARENT_UNION (passed_type
))
3362 passed_type
= TREE_TYPE (TYPE_FIELDS (passed_type
));
3364 /* See if this arg was passed by invisible reference. It is if
3365 it is an object whose size depends on the contents of the
3366 object itself or if the machine requires these objects be passed
3369 if ((TREE_CODE (TYPE_SIZE (passed_type
)) != INTEGER_CST
3370 && contains_placeholder_p (TYPE_SIZE (passed_type
)))
3371 || TREE_ADDRESSABLE (passed_type
)
3372 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3373 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far
, passed_mode
,
3374 passed_type
, ! last_named
)
3378 passed_type
= nominal_type
= build_pointer_type (passed_type
);
3380 passed_mode
= nominal_mode
= Pmode
;
3383 promoted_mode
= passed_mode
;
3385 #ifdef PROMOTE_FUNCTION_ARGS
3386 /* Compute the mode in which the arg is actually extended to. */
3387 promoted_mode
= promote_mode (passed_type
, promoted_mode
, &unsignedp
, 1);
3390 /* Let machine desc say which reg (if any) the parm arrives in.
3391 0 means it arrives on the stack. */
3392 #ifdef FUNCTION_INCOMING_ARG
3393 entry_parm
= FUNCTION_INCOMING_ARG (args_so_far
, promoted_mode
,
3394 passed_type
, ! last_named
);
3396 entry_parm
= FUNCTION_ARG (args_so_far
, promoted_mode
,
3397 passed_type
, ! last_named
);
3400 if (entry_parm
== 0)
3401 promoted_mode
= passed_mode
;
3403 #ifdef SETUP_INCOMING_VARARGS
3404 /* If this is the last named parameter, do any required setup for
3405 varargs or stdargs. We need to know about the case of this being an
3406 addressable type, in which case we skip the registers it
3407 would have arrived in.
3409 For stdargs, LAST_NAMED will be set for two parameters, the one that
3410 is actually the last named, and the dummy parameter. We only
3411 want to do this action once.
3413 Also, indicate when RTL generation is to be suppressed. */
3414 if (last_named
&& !varargs_setup
)
3416 SETUP_INCOMING_VARARGS (args_so_far
, promoted_mode
, passed_type
,
3417 current_function_pretend_args_size
,
3423 /* Determine parm's home in the stack,
3424 in case it arrives in the stack or we should pretend it did.
3426 Compute the stack position and rtx where the argument arrives
3429 There is one complexity here: If this was a parameter that would
3430 have been passed in registers, but wasn't only because it is
3431 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
3432 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
3433 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
3434 0 as it was the previous time. */
3436 locate_and_pad_parm (promoted_mode
, passed_type
,
3437 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3440 #ifdef FUNCTION_INCOMING_ARG
3441 FUNCTION_INCOMING_ARG (args_so_far
, promoted_mode
,
3444 || varargs_setup
)) != 0,
3446 FUNCTION_ARG (args_so_far
, promoted_mode
,
3448 ! last_named
|| varargs_setup
) != 0,
3451 fndecl
, &stack_args_size
, &stack_offset
, &arg_size
);
3455 rtx offset_rtx
= ARGS_SIZE_RTX (stack_offset
);
3457 if (offset_rtx
== const0_rtx
)
3458 stack_parm
= gen_rtx (MEM
, promoted_mode
, internal_arg_pointer
);
3460 stack_parm
= gen_rtx (MEM
, promoted_mode
,
3461 gen_rtx (PLUS
, Pmode
,
3462 internal_arg_pointer
, offset_rtx
));
3464 /* If this is a memory ref that contains aggregate components,
3465 mark it as such for cse and loop optimize. Likewise if it
3467 MEM_IN_STRUCT_P (stack_parm
) = aggregate
;
3468 RTX_UNCHANGING_P (stack_parm
) = TREE_READONLY (parm
);
3471 /* If this parameter was passed both in registers and in the stack,
3472 use the copy on the stack. */
3473 if (MUST_PASS_IN_STACK (promoted_mode
, passed_type
))
3476 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3477 /* If this parm was passed part in regs and part in memory,
3478 pretend it arrived entirely in memory
3479 by pushing the register-part onto the stack.
3481 In the special case of a DImode or DFmode that is split,
3482 we could put it together in a pseudoreg directly,
3483 but for now that's not worth bothering with. */
3487 int nregs
= FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, promoted_mode
,
3488 passed_type
, ! last_named
);
3492 current_function_pretend_args_size
3493 = (((nregs
* UNITS_PER_WORD
) + (PARM_BOUNDARY
/ BITS_PER_UNIT
) - 1)
3494 / (PARM_BOUNDARY
/ BITS_PER_UNIT
)
3495 * (PARM_BOUNDARY
/ BITS_PER_UNIT
));
3499 /* Handle calls that pass values in multiple non-contiguous
3500 locations. The Irix 6 ABI has examples of this. */
3501 if (GET_CODE (entry_parm
) == PARALLEL
)
3502 emit_group_store (validize_mem (stack_parm
),
3505 move_block_from_reg (REGNO (entry_parm
),
3506 validize_mem (stack_parm
), nregs
,
3507 int_size_in_bytes (TREE_TYPE (parm
)));
3509 entry_parm
= stack_parm
;
3514 /* If we didn't decide this parm came in a register,
3515 by default it came on the stack. */
3516 if (entry_parm
== 0)
3517 entry_parm
= stack_parm
;
3519 /* Record permanently how this parm was passed. */
3521 DECL_INCOMING_RTL (parm
) = entry_parm
;
3523 /* If there is actually space on the stack for this parm,
3524 count it in stack_args_size; otherwise set stack_parm to 0
3525 to indicate there is no preallocated stack slot for the parm. */
3527 if (entry_parm
== stack_parm
3528 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
3529 /* On some machines, even if a parm value arrives in a register
3530 there is still an (uninitialized) stack slot allocated for it.
3532 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
3533 whether this parameter already has a stack slot allocated,
3534 because an arg block exists only if current_function_args_size
3535 is larger than some threshold, and we haven't calculated that
3536 yet. So, for now, we just assume that stack slots never exist
3538 || REG_PARM_STACK_SPACE (fndecl
) > 0
3542 stack_args_size
.constant
+= arg_size
.constant
;
3544 ADD_PARM_SIZE (stack_args_size
, arg_size
.var
);
3547 /* No stack slot was pushed for this parm. */
3550 /* Update info on where next arg arrives in registers. */
3552 FUNCTION_ARG_ADVANCE (args_so_far
, promoted_mode
,
3553 passed_type
, ! last_named
);
3555 /* If this is our second time through, we are done with this parm. */
3559 /* If we can't trust the parm stack slot to be aligned enough
3560 for its ultimate type, don't use that slot after entry.
3561 We'll make another stack slot, if we need one. */
3563 int thisparm_boundary
3564 = FUNCTION_ARG_BOUNDARY (promoted_mode
, passed_type
);
3566 if (GET_MODE_ALIGNMENT (nominal_mode
) > thisparm_boundary
)
3570 /* If parm was passed in memory, and we need to convert it on entry,
3571 don't store it back in that same slot. */
3573 && nominal_mode
!= BLKmode
&& nominal_mode
!= passed_mode
)
3577 /* Now adjust STACK_PARM to the mode and precise location
3578 where this parameter should live during execution,
3579 if we discover that it must live in the stack during execution.
3580 To make debuggers happier on big-endian machines, we store
3581 the value in the last bytes of the space available. */
3583 if (nominal_mode
!= BLKmode
&& nominal_mode
!= passed_mode
3588 if (BYTES_BIG_ENDIAN
3589 && GET_MODE_SIZE (nominal_mode
) < UNITS_PER_WORD
)
3590 stack_offset
.constant
+= (GET_MODE_SIZE (passed_mode
)
3591 - GET_MODE_SIZE (nominal_mode
));
3593 offset_rtx
= ARGS_SIZE_RTX (stack_offset
);
3594 if (offset_rtx
== const0_rtx
)
3595 stack_parm
= gen_rtx (MEM
, nominal_mode
, internal_arg_pointer
);
3597 stack_parm
= gen_rtx (MEM
, nominal_mode
,
3598 gen_rtx (PLUS
, Pmode
,
3599 internal_arg_pointer
, offset_rtx
));
3601 /* If this is a memory ref that contains aggregate components,
3602 mark it as such for cse and loop optimize. */
3603 MEM_IN_STRUCT_P (stack_parm
) = aggregate
;
3608 /* We need this "use" info, because the gcc-register->stack-register
3609 converter in reg-stack.c needs to know which registers are active
3610 at the start of the function call. The actual parameter loading
3611 instructions are not always available then anymore, since they might
3612 have been optimised away. */
3614 if (GET_CODE (entry_parm
) == REG
&& !(hide_last_arg
&& last_named
))
3615 emit_insn (gen_rtx (USE
, GET_MODE (entry_parm
), entry_parm
));
3618 /* ENTRY_PARM is an RTX for the parameter as it arrives,
3619 in the mode in which it arrives.
3620 STACK_PARM is an RTX for a stack slot where the parameter can live
3621 during the function (in case we want to put it there).
3622 STACK_PARM is 0 if no stack slot was pushed for it.
3624 Now output code if necessary to convert ENTRY_PARM to
3625 the type in which this function declares it,
3626 and store that result in an appropriate place,
3627 which may be a pseudo reg, may be STACK_PARM,
3628 or may be a local stack slot if STACK_PARM is 0.
3630 Set DECL_RTL to that place. */
3632 if (nominal_mode
== BLKmode
|| GET_CODE (entry_parm
) == PARALLEL
)
3634 /* If a BLKmode arrives in registers, copy it to a stack slot.
3635 Handle calls that pass values in multiple non-contiguous
3636 locations. The Irix 6 ABI has examples of this. */
3637 if (GET_CODE (entry_parm
) == REG
3638 || GET_CODE (entry_parm
) == PARALLEL
)
3641 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm
)),
3644 /* Note that we will be storing an integral number of words.
3645 So we have to be careful to ensure that we allocate an
3646 integral number of words. We do this below in the
3647 assign_stack_local if space was not allocated in the argument
3648 list. If it was, this will not work if PARM_BOUNDARY is not
3649 a multiple of BITS_PER_WORD. It isn't clear how to fix this
3650 if it becomes a problem. */
3652 if (stack_parm
== 0)
3655 = assign_stack_local (GET_MODE (entry_parm
),
3658 /* If this is a memory ref that contains aggregate
3659 components, mark it as such for cse and loop optimize. */
3660 MEM_IN_STRUCT_P (stack_parm
) = aggregate
;
3663 else if (PARM_BOUNDARY
% BITS_PER_WORD
!= 0)
3666 if (TREE_READONLY (parm
))
3667 RTX_UNCHANGING_P (stack_parm
) = 1;
3669 /* Handle calls that pass values in multiple non-contiguous
3670 locations. The Irix 6 ABI has examples of this. */
3671 if (GET_CODE (entry_parm
) == PARALLEL
)
3672 emit_group_store (validize_mem (stack_parm
), entry_parm
);
3674 move_block_from_reg (REGNO (entry_parm
),
3675 validize_mem (stack_parm
),
3676 size_stored
/ UNITS_PER_WORD
,
3677 int_size_in_bytes (TREE_TYPE (parm
)));
3679 DECL_RTL (parm
) = stack_parm
;
3681 else if (! ((obey_regdecls
&& ! DECL_REGISTER (parm
)
3682 && ! DECL_INLINE (fndecl
))
3683 /* layout_decl may set this. */
3684 || TREE_ADDRESSABLE (parm
)
3685 || TREE_SIDE_EFFECTS (parm
)
3686 /* If -ffloat-store specified, don't put explicit
3687 float variables into registers. */
3688 || (flag_float_store
3689 && TREE_CODE (TREE_TYPE (parm
)) == REAL_TYPE
))
3690 /* Always assign pseudo to structure return or item passed
3691 by invisible reference. */
3692 || passed_pointer
|| parm
== function_result_decl
)
3694 /* Store the parm in a pseudoregister during the function, but we
3695 may need to do it in a wider mode. */
3697 register rtx parmreg
;
3698 int regno
, regnoi
, regnor
;
3700 unsignedp
= TREE_UNSIGNED (TREE_TYPE (parm
));
3702 promoted_nominal_mode
3703 = promote_mode (TREE_TYPE (parm
), nominal_mode
, &unsignedp
, 0);
3705 parmreg
= gen_reg_rtx (promoted_nominal_mode
);
3706 REG_USERVAR_P (parmreg
) = 1;
3708 /* If this was an item that we received a pointer to, set DECL_RTL
3713 = gen_rtx (MEM
, TYPE_MODE (TREE_TYPE (passed_type
)), parmreg
);
3714 MEM_IN_STRUCT_P (DECL_RTL (parm
)) = aggregate
;
3717 DECL_RTL (parm
) = parmreg
;
3719 /* Copy the value into the register. */
3720 if (nominal_mode
!= passed_mode
3721 || promoted_nominal_mode
!= promoted_mode
)
3723 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3724 mode, by the caller. We now have to convert it to
3725 NOMINAL_MODE, if different. However, PARMREG may be in
3726 a diffent mode than NOMINAL_MODE if it is being stored
3729 If ENTRY_PARM is a hard register, it might be in a register
3730 not valid for operating in its mode (e.g., an odd-numbered
3731 register for a DFmode). In that case, moves are the only
3732 thing valid, so we can't do a convert from there. This
3733 occurs when the calling sequence allow such misaligned
3736 In addition, the conversion may involve a call, which could
3737 clobber parameters which haven't been copied to pseudo
3738 registers yet. Therefore, we must first copy the parm to
3739 a pseudo reg here, and save the conversion until after all
3740 parameters have been moved. */
3742 rtx tempreg
= gen_reg_rtx (GET_MODE (entry_parm
));
3744 emit_move_insn (tempreg
, validize_mem (entry_parm
));
3746 push_to_sequence (conversion_insns
);
3747 tempreg
= convert_to_mode (nominal_mode
, tempreg
, unsignedp
);
3749 expand_assignment (parm
,
3750 make_tree (nominal_type
, tempreg
), 0, 0);
3751 conversion_insns
= get_insns ();
3756 emit_move_insn (parmreg
, validize_mem (entry_parm
));
3758 /* If we were passed a pointer but the actual value
3759 can safely live in a register, put it in one. */
3760 if (passed_pointer
&& TYPE_MODE (TREE_TYPE (parm
)) != BLKmode
3761 && ! ((obey_regdecls
&& ! DECL_REGISTER (parm
)
3762 && ! DECL_INLINE (fndecl
))
3763 /* layout_decl may set this. */
3764 || TREE_ADDRESSABLE (parm
)
3765 || TREE_SIDE_EFFECTS (parm
)
3766 /* If -ffloat-store specified, don't put explicit
3767 float variables into registers. */
3768 || (flag_float_store
3769 && TREE_CODE (TREE_TYPE (parm
)) == REAL_TYPE
)))
3771 /* We can't use nominal_mode, because it will have been set to
3772 Pmode above. We must use the actual mode of the parm. */
3773 parmreg
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm
)));
3774 REG_USERVAR_P (parmreg
) = 1;
3775 emit_move_insn (parmreg
, DECL_RTL (parm
));
3776 DECL_RTL (parm
) = parmreg
;
3777 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3781 #ifdef FUNCTION_ARG_CALLEE_COPIES
3782 /* If we are passed an arg by reference and it is our responsibility
3783 to make a copy, do it now.
3784 PASSED_TYPE and PASSED mode now refer to the pointer, not the
3785 original argument, so we must recreate them in the call to
3786 FUNCTION_ARG_CALLEE_COPIES. */
3787 /* ??? Later add code to handle the case that if the argument isn't
3788 modified, don't do the copy. */
3790 else if (passed_pointer
3791 && FUNCTION_ARG_CALLEE_COPIES (args_so_far
,
3792 TYPE_MODE (DECL_ARG_TYPE (parm
)),
3793 DECL_ARG_TYPE (parm
),
3795 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm
)))
3798 tree type
= DECL_ARG_TYPE (parm
);
3800 /* This sequence may involve a library call perhaps clobbering
3801 registers that haven't been copied to pseudos yet. */
3803 push_to_sequence (conversion_insns
);
3805 if (TYPE_SIZE (type
) == 0
3806 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
3807 /* This is a variable sized object. */
3808 copy
= gen_rtx (MEM
, BLKmode
,
3809 allocate_dynamic_stack_space
3810 (expr_size (parm
), NULL_RTX
,
3811 TYPE_ALIGN (type
)));
3813 copy
= assign_stack_temp (TYPE_MODE (type
),
3814 int_size_in_bytes (type
), 1);
3815 MEM_IN_STRUCT_P (copy
) = AGGREGATE_TYPE_P (type
);
3817 store_expr (parm
, copy
, 0);
3818 emit_move_insn (parmreg
, XEXP (copy
, 0));
3819 conversion_insns
= get_insns ();
3823 #endif /* FUNCTION_ARG_CALLEE_COPIES */
3825 /* In any case, record the parm's desired stack location
3826 in case we later discover it must live in the stack.
3828 If it is a COMPLEX value, store the stack location for both
3831 if (GET_CODE (parmreg
) == CONCAT
)
3832 regno
= MAX (REGNO (XEXP (parmreg
, 0)), REGNO (XEXP (parmreg
, 1)));
3834 regno
= REGNO (parmreg
);
3836 if (regno
>= nparmregs
)
3839 int old_nparmregs
= nparmregs
;
3841 nparmregs
= regno
+ 5;
3842 new = (rtx
*) oballoc (nparmregs
* sizeof (rtx
));
3843 bcopy ((char *) parm_reg_stack_loc
, (char *) new,
3844 old_nparmregs
* sizeof (rtx
));
3845 bzero ((char *) (new + old_nparmregs
),
3846 (nparmregs
- old_nparmregs
) * sizeof (rtx
));
3847 parm_reg_stack_loc
= new;
3850 if (GET_CODE (parmreg
) == CONCAT
)
3852 enum machine_mode submode
= GET_MODE (XEXP (parmreg
, 0));
3854 regnor
= REGNO (gen_realpart (submode
, parmreg
));
3855 regnoi
= REGNO (gen_imagpart (submode
, parmreg
));
3857 if (stack_parm
!= 0)
3859 parm_reg_stack_loc
[regnor
]
3860 = gen_realpart (submode
, stack_parm
);
3861 parm_reg_stack_loc
[regnoi
]
3862 = gen_imagpart (submode
, stack_parm
);
3866 parm_reg_stack_loc
[regnor
] = 0;
3867 parm_reg_stack_loc
[regnoi
] = 0;
3871 parm_reg_stack_loc
[REGNO (parmreg
)] = stack_parm
;
3873 /* Mark the register as eliminable if we did no conversion
3874 and it was copied from memory at a fixed offset,
3875 and the arg pointer was not copied to a pseudo-reg.
3876 If the arg pointer is a pseudo reg or the offset formed
3877 an invalid address, such memory-equivalences
3878 as we make here would screw up life analysis for it. */
3879 if (nominal_mode
== passed_mode
3881 && GET_CODE (entry_parm
) == MEM
3882 && entry_parm
== stack_parm
3883 && stack_offset
.var
== 0
3884 && reg_mentioned_p (virtual_incoming_args_rtx
,
3885 XEXP (entry_parm
, 0)))
3887 rtx linsn
= get_last_insn ();
3890 /* Mark complex types separately. */
3891 if (GET_CODE (parmreg
) == CONCAT
)
3892 /* Scan backwards for the set of the real and
3894 for (sinsn
= linsn
; sinsn
!= 0;
3895 sinsn
= prev_nonnote_insn (sinsn
))
3897 set
= single_set (sinsn
);
3899 && SET_DEST (set
) == regno_reg_rtx
[regnoi
])
3901 = gen_rtx (EXPR_LIST
, REG_EQUIV
,
3902 parm_reg_stack_loc
[regnoi
],
3905 && SET_DEST (set
) == regno_reg_rtx
[regnor
])
3907 = gen_rtx (EXPR_LIST
, REG_EQUIV
,
3908 parm_reg_stack_loc
[regnor
],
3911 else if ((set
= single_set (linsn
)) != 0
3912 && SET_DEST (set
) == parmreg
)
3914 = gen_rtx (EXPR_LIST
, REG_EQUIV
,
3915 entry_parm
, REG_NOTES (linsn
));
3918 /* For pointer data type, suggest pointer register. */
3919 if (TREE_CODE (TREE_TYPE (parm
)) == POINTER_TYPE
)
3920 mark_reg_pointer (parmreg
,
3921 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
)))
3926 /* Value must be stored in the stack slot STACK_PARM
3927 during function execution. */
3929 if (promoted_mode
!= nominal_mode
)
3931 /* Conversion is required. */
3932 rtx tempreg
= gen_reg_rtx (GET_MODE (entry_parm
));
3934 emit_move_insn (tempreg
, validize_mem (entry_parm
));
3936 push_to_sequence (conversion_insns
);
3937 entry_parm
= convert_to_mode (nominal_mode
, tempreg
,
3938 TREE_UNSIGNED (TREE_TYPE (parm
)));
3939 conversion_insns
= get_insns ();
3944 if (entry_parm
!= stack_parm
)
3946 if (stack_parm
== 0)
3949 = assign_stack_local (GET_MODE (entry_parm
),
3950 GET_MODE_SIZE (GET_MODE (entry_parm
)), 0);
3951 /* If this is a memory ref that contains aggregate components,
3952 mark it as such for cse and loop optimize. */
3953 MEM_IN_STRUCT_P (stack_parm
) = aggregate
;
3956 if (promoted_mode
!= nominal_mode
)
3958 push_to_sequence (conversion_insns
);
3959 emit_move_insn (validize_mem (stack_parm
),
3960 validize_mem (entry_parm
));
3961 conversion_insns
= get_insns ();
3965 emit_move_insn (validize_mem (stack_parm
),
3966 validize_mem (entry_parm
));
3969 DECL_RTL (parm
) = stack_parm
;
3972 /* If this "parameter" was the place where we are receiving the
3973 function's incoming structure pointer, set up the result. */
3974 if (parm
== function_result_decl
)
3976 tree result
= DECL_RESULT (fndecl
);
3977 tree restype
= TREE_TYPE (result
);
3980 = gen_rtx (MEM
, DECL_MODE (result
), DECL_RTL (parm
));
3982 MEM_IN_STRUCT_P (DECL_RTL (result
)) = AGGREGATE_TYPE_P (restype
);
3985 if (TREE_THIS_VOLATILE (parm
))
3986 MEM_VOLATILE_P (DECL_RTL (parm
)) = 1;
3987 if (TREE_READONLY (parm
))
3988 RTX_UNCHANGING_P (DECL_RTL (parm
)) = 1;
3991 /* Output all parameter conversion instructions (possibly including calls)
3992 now that all parameters have been copied out of hard registers. */
3993 emit_insns (conversion_insns
);
3995 max_parm_reg
= max_reg_num ();
3996 last_parm_insn
= get_last_insn ();
3998 current_function_args_size
= stack_args_size
.constant
;
4000 /* Adjust function incoming argument size for alignment and
4003 #ifdef REG_PARM_STACK_SPACE
4004 #ifndef MAYBE_REG_PARM_STACK_SPACE
4005 current_function_args_size
= MAX (current_function_args_size
,
4006 REG_PARM_STACK_SPACE (fndecl
));
4010 #ifdef STACK_BOUNDARY
4011 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4013 current_function_args_size
4014 = ((current_function_args_size
+ STACK_BYTES
- 1)
4015 / STACK_BYTES
) * STACK_BYTES
;
4018 #ifdef ARGS_GROW_DOWNWARD
4019 current_function_arg_offset_rtx
4020 = (stack_args_size
.var
== 0 ? GEN_INT (-stack_args_size
.constant
)
4021 : expand_expr (size_binop (MINUS_EXPR
, stack_args_size
.var
,
4022 size_int (-stack_args_size
.constant
)),
4023 NULL_RTX
, VOIDmode
, 0));
4025 current_function_arg_offset_rtx
= ARGS_SIZE_RTX (stack_args_size
);
4028 /* See how many bytes, if any, of its args a function should try to pop
4031 current_function_pops_args
= RETURN_POPS_ARGS (fndecl
, TREE_TYPE (fndecl
),
4032 current_function_args_size
);
4034 /* For stdarg.h function, save info about
4035 regs and stack space used by the named args. */
4038 current_function_args_info
= args_so_far
;
4040 /* Set the rtx used for the function return value. Put this in its
4041 own variable so any optimizers that need this information don't have
4042 to include tree.h. Do this here so it gets done when an inlined
4043 function gets output. */
4045 current_function_return_rtx
= DECL_RTL (DECL_RESULT (fndecl
));
4048 /* Indicate whether REGNO is an incoming argument to the current function
4049 that was promoted to a wider mode. If so, return the RTX for the
4050 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4051 that REGNO is promoted from and whether the promotion was signed or
4054 #ifdef PROMOTE_FUNCTION_ARGS
4057 promoted_input_arg (regno
, pmode
, punsignedp
)
4059 enum machine_mode
*pmode
;
4064 for (arg
= DECL_ARGUMENTS (current_function_decl
); arg
;
4065 arg
= TREE_CHAIN (arg
))
4066 if (GET_CODE (DECL_INCOMING_RTL (arg
)) == REG
4067 && REGNO (DECL_INCOMING_RTL (arg
)) == regno
4068 && TYPE_MODE (DECL_ARG_TYPE (arg
)) == TYPE_MODE (TREE_TYPE (arg
)))
4070 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg
));
4071 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (arg
));
4073 mode
= promote_mode (TREE_TYPE (arg
), mode
, &unsignedp
, 1);
4074 if (mode
== GET_MODE (DECL_INCOMING_RTL (arg
))
4075 && mode
!= DECL_MODE (arg
))
4077 *pmode
= DECL_MODE (arg
);
4078 *punsignedp
= unsignedp
;
4079 return DECL_INCOMING_RTL (arg
);
4088 /* Compute the size and offset from the start of the stacked arguments for a
4089 parm passed in mode PASSED_MODE and with type TYPE.
4091 INITIAL_OFFSET_PTR points to the current offset into the stacked
4094 The starting offset and size for this parm are returned in *OFFSET_PTR
4095 and *ARG_SIZE_PTR, respectively.
4097 IN_REGS is non-zero if the argument will be passed in registers. It will
4098 never be set if REG_PARM_STACK_SPACE is not defined.
4100 FNDECL is the function in which the argument was defined.
4102 There are two types of rounding that are done. The first, controlled by
4103 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4104 list to be aligned to the specific boundary (in bits). This rounding
4105 affects the initial and starting offsets, but not the argument size.
4107 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4108 optionally rounds the size of the parm to PARM_BOUNDARY. The
4109 initial offset is not affected by this rounding, while the size always
4110 is and the starting offset may be. */
4112 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4113 initial_offset_ptr is positive because locate_and_pad_parm's
4114 callers pass in the total size of args so far as
4115 initial_offset_ptr. arg_size_ptr is always positive.*/
4118 locate_and_pad_parm (passed_mode
, type
, in_regs
, fndecl
,
4119 initial_offset_ptr
, offset_ptr
, arg_size_ptr
)
4120 enum machine_mode passed_mode
;
4124 struct args_size
*initial_offset_ptr
;
4125 struct args_size
*offset_ptr
;
4126 struct args_size
*arg_size_ptr
;
4129 = type
? size_in_bytes (type
) : size_int (GET_MODE_SIZE (passed_mode
));
4130 enum direction where_pad
= FUNCTION_ARG_PADDING (passed_mode
, type
);
4131 int boundary
= FUNCTION_ARG_BOUNDARY (passed_mode
, type
);
4132 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
4133 int reg_parm_stack_space
= 0;
4135 #ifdef REG_PARM_STACK_SPACE
4136 /* If we have found a stack parm before we reach the end of the
4137 area reserved for registers, skip that area. */
4140 #ifdef MAYBE_REG_PARM_STACK_SPACE
4141 reg_parm_stack_space
= MAYBE_REG_PARM_STACK_SPACE
;
4143 reg_parm_stack_space
= REG_PARM_STACK_SPACE (fndecl
);
4145 if (reg_parm_stack_space
> 0)
4147 if (initial_offset_ptr
->var
)
4149 initial_offset_ptr
->var
4150 = size_binop (MAX_EXPR
, ARGS_SIZE_TREE (*initial_offset_ptr
),
4151 size_int (reg_parm_stack_space
));
4152 initial_offset_ptr
->constant
= 0;
4154 else if (initial_offset_ptr
->constant
< reg_parm_stack_space
)
4155 initial_offset_ptr
->constant
= reg_parm_stack_space
;
4158 #endif /* REG_PARM_STACK_SPACE */
4160 arg_size_ptr
->var
= 0;
4161 arg_size_ptr
->constant
= 0;
4163 #ifdef ARGS_GROW_DOWNWARD
4164 if (initial_offset_ptr
->var
)
4166 offset_ptr
->constant
= 0;
4167 offset_ptr
->var
= size_binop (MINUS_EXPR
, integer_zero_node
,
4168 initial_offset_ptr
->var
);
4172 offset_ptr
->constant
= - initial_offset_ptr
->constant
;
4173 offset_ptr
->var
= 0;
4175 if (where_pad
!= none
4176 && (TREE_CODE (sizetree
) != INTEGER_CST
4177 || ((TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)))
4178 sizetree
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
4179 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
4180 if (where_pad
!= downward
)
4181 pad_to_arg_alignment (offset_ptr
, boundary
);
4182 if (initial_offset_ptr
->var
)
4184 arg_size_ptr
->var
= size_binop (MINUS_EXPR
,
4185 size_binop (MINUS_EXPR
,
4187 initial_offset_ptr
->var
),
4192 arg_size_ptr
->constant
= (- initial_offset_ptr
->constant
-
4193 offset_ptr
->constant
);
4195 #else /* !ARGS_GROW_DOWNWARD */
4196 pad_to_arg_alignment (initial_offset_ptr
, boundary
);
4197 *offset_ptr
= *initial_offset_ptr
;
4199 #ifdef PUSH_ROUNDING
4200 if (passed_mode
!= BLKmode
)
4201 sizetree
= size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree
)));
4204 /* Pad_below needs the pre-rounded size to know how much to pad below
4205 so this must be done before rounding up. */
4206 if (where_pad
== downward
4207 /* However, BLKmode args passed in regs have their padding done elsewhere.
4208 The stack slot must be able to hold the entire register. */
4209 && !(in_regs
&& passed_mode
== BLKmode
))
4210 pad_below (offset_ptr
, passed_mode
, sizetree
);
4212 if (where_pad
!= none
4213 && (TREE_CODE (sizetree
) != INTEGER_CST
4214 || ((TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)))
4215 sizetree
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
4217 ADD_PARM_SIZE (*arg_size_ptr
, sizetree
);
4218 #endif /* ARGS_GROW_DOWNWARD */
4221 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4222 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4225 pad_to_arg_alignment (offset_ptr
, boundary
)
4226 struct args_size
*offset_ptr
;
4229 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
4231 if (boundary
> BITS_PER_UNIT
)
4233 if (offset_ptr
->var
)
4236 #ifdef ARGS_GROW_DOWNWARD
4241 (ARGS_SIZE_TREE (*offset_ptr
),
4242 boundary
/ BITS_PER_UNIT
);
4243 offset_ptr
->constant
= 0; /*?*/
4246 offset_ptr
->constant
=
4247 #ifdef ARGS_GROW_DOWNWARD
4248 FLOOR_ROUND (offset_ptr
->constant
, boundary_in_bytes
);
4250 CEIL_ROUND (offset_ptr
->constant
, boundary_in_bytes
);
4256 pad_below (offset_ptr
, passed_mode
, sizetree
)
4257 struct args_size
*offset_ptr
;
4258 enum machine_mode passed_mode
;
4261 if (passed_mode
!= BLKmode
)
4263 if (GET_MODE_BITSIZE (passed_mode
) % PARM_BOUNDARY
)
4264 offset_ptr
->constant
4265 += (((GET_MODE_BITSIZE (passed_mode
) + PARM_BOUNDARY
- 1)
4266 / PARM_BOUNDARY
* PARM_BOUNDARY
/ BITS_PER_UNIT
)
4267 - GET_MODE_SIZE (passed_mode
));
4271 if (TREE_CODE (sizetree
) != INTEGER_CST
4272 || (TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)
4274 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4275 tree s2
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
4277 ADD_PARM_SIZE (*offset_ptr
, s2
);
4278 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
4284 round_down (value
, divisor
)
4288 return size_binop (MULT_EXPR
,
4289 size_binop (FLOOR_DIV_EXPR
, value
, size_int (divisor
)),
4290 size_int (divisor
));
4293 /* Walk the tree of blocks describing the binding levels within a function
4294 and warn about uninitialized variables.
4295 This is done after calling flow_analysis and before global_alloc
4296 clobbers the pseudo-regs to hard regs. */
4299 uninitialized_vars_warning (block
)
4302 register tree decl
, sub
;
4303 for (decl
= BLOCK_VARS (block
); decl
; decl
= TREE_CHAIN (decl
))
4305 if (TREE_CODE (decl
) == VAR_DECL
4306 /* These warnings are unreliable for and aggregates
4307 because assigning the fields one by one can fail to convince
4308 flow.c that the entire aggregate was initialized.
4309 Unions are troublesome because members may be shorter. */
4310 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl
))
4311 && DECL_RTL (decl
) != 0
4312 && GET_CODE (DECL_RTL (decl
)) == REG
4313 && regno_uninitialized (REGNO (DECL_RTL (decl
))))
4314 warning_with_decl (decl
,
4315 "`%s' might be used uninitialized in this function");
4316 if (TREE_CODE (decl
) == VAR_DECL
4317 && DECL_RTL (decl
) != 0
4318 && GET_CODE (DECL_RTL (decl
)) == REG
4319 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
4320 warning_with_decl (decl
,
4321 "variable `%s' might be clobbered by `longjmp' or `vfork'");
4323 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= TREE_CHAIN (sub
))
4324 uninitialized_vars_warning (sub
);
4327 /* Do the appropriate part of uninitialized_vars_warning
4328 but for arguments instead of local variables. */
4331 setjmp_args_warning ()
4334 for (decl
= DECL_ARGUMENTS (current_function_decl
);
4335 decl
; decl
= TREE_CHAIN (decl
))
4336 if (DECL_RTL (decl
) != 0
4337 && GET_CODE (DECL_RTL (decl
)) == REG
4338 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
4339 warning_with_decl (decl
, "argument `%s' might be clobbered by `longjmp' or `vfork'");
4342 /* If this function call setjmp, put all vars into the stack
4343 unless they were declared `register'. */
4346 setjmp_protect (block
)
4349 register tree decl
, sub
;
4350 for (decl
= BLOCK_VARS (block
); decl
; decl
= TREE_CHAIN (decl
))
4351 if ((TREE_CODE (decl
) == VAR_DECL
4352 || TREE_CODE (decl
) == PARM_DECL
)
4353 && DECL_RTL (decl
) != 0
4354 && GET_CODE (DECL_RTL (decl
)) == REG
4355 /* If this variable came from an inline function, it must be
4356 that it's life doesn't overlap the setjmp. If there was a
4357 setjmp in the function, it would already be in memory. We
4358 must exclude such variable because their DECL_RTL might be
4359 set to strange things such as virtual_stack_vars_rtx. */
4360 && ! DECL_FROM_INLINE (decl
)
4362 #ifdef NON_SAVING_SETJMP
4363 /* If longjmp doesn't restore the registers,
4364 don't put anything in them. */
4368 ! DECL_REGISTER (decl
)))
4369 put_var_into_stack (decl
);
4370 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= TREE_CHAIN (sub
))
4371 setjmp_protect (sub
);
4374 /* Like the previous function, but for args instead of local variables. */
4377 setjmp_protect_args ()
4379 register tree decl
, sub
;
4380 for (decl
= DECL_ARGUMENTS (current_function_decl
);
4381 decl
; decl
= TREE_CHAIN (decl
))
4382 if ((TREE_CODE (decl
) == VAR_DECL
4383 || TREE_CODE (decl
) == PARM_DECL
)
4384 && DECL_RTL (decl
) != 0
4385 && GET_CODE (DECL_RTL (decl
)) == REG
4387 /* If longjmp doesn't restore the registers,
4388 don't put anything in them. */
4389 #ifdef NON_SAVING_SETJMP
4393 ! DECL_REGISTER (decl
)))
4394 put_var_into_stack (decl
);
4397 /* Return the context-pointer register corresponding to DECL,
4398 or 0 if it does not need one. */
4401 lookup_static_chain (decl
)
4404 tree context
= decl_function_context (decl
);
4408 || (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_NO_STATIC_CHAIN (decl
)))
4411 /* We treat inline_function_decl as an alias for the current function
4412 because that is the inline function whose vars, types, etc.
4413 are being merged into the current function.
4414 See expand_inline_function. */
4415 if (context
== current_function_decl
|| context
== inline_function_decl
)
4416 return virtual_stack_vars_rtx
;
4418 for (link
= context_display
; link
; link
= TREE_CHAIN (link
))
4419 if (TREE_PURPOSE (link
) == context
)
4420 return RTL_EXPR_RTL (TREE_VALUE (link
));
4425 /* Convert a stack slot address ADDR for variable VAR
4426 (from a containing function)
4427 into an address valid in this function (using a static chain). */
4430 fix_lexical_addr (addr
, var
)
4436 tree context
= decl_function_context (var
);
4437 struct function
*fp
;
4440 /* If this is the present function, we need not do anything. */
4441 if (context
== current_function_decl
|| context
== inline_function_decl
)
4444 for (fp
= outer_function_chain
; fp
; fp
= fp
->next
)
4445 if (fp
->decl
== context
)
4451 /* Decode given address as base reg plus displacement. */
4452 if (GET_CODE (addr
) == REG
)
4453 basereg
= addr
, displacement
= 0;
4454 else if (GET_CODE (addr
) == PLUS
&& GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
4455 basereg
= XEXP (addr
, 0), displacement
= INTVAL (XEXP (addr
, 1));
4459 /* We accept vars reached via the containing function's
4460 incoming arg pointer and via its stack variables pointer. */
4461 if (basereg
== fp
->internal_arg_pointer
)
4463 /* If reached via arg pointer, get the arg pointer value
4464 out of that function's stack frame.
4466 There are two cases: If a separate ap is needed, allocate a
4467 slot in the outer function for it and dereference it that way.
4468 This is correct even if the real ap is actually a pseudo.
4469 Otherwise, just adjust the offset from the frame pointer to
4472 #ifdef NEED_SEPARATE_AP
4475 if (fp
->arg_pointer_save_area
== 0)
4476 fp
->arg_pointer_save_area
4477 = assign_outer_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0, fp
);
4479 addr
= fix_lexical_addr (XEXP (fp
->arg_pointer_save_area
, 0), var
);
4480 addr
= memory_address (Pmode
, addr
);
4482 base
= copy_to_reg (gen_rtx (MEM
, Pmode
, addr
));
4484 displacement
+= (FIRST_PARM_OFFSET (context
) - STARTING_FRAME_OFFSET
);
4485 base
= lookup_static_chain (var
);
4489 else if (basereg
== virtual_stack_vars_rtx
)
4491 /* This is the same code as lookup_static_chain, duplicated here to
4492 avoid an extra call to decl_function_context. */
4495 for (link
= context_display
; link
; link
= TREE_CHAIN (link
))
4496 if (TREE_PURPOSE (link
) == context
)
4498 base
= RTL_EXPR_RTL (TREE_VALUE (link
));
4506 /* Use same offset, relative to appropriate static chain or argument
4508 return plus_constant (base
, displacement
);
4511 /* Return the address of the trampoline for entering nested fn FUNCTION.
4512 If necessary, allocate a trampoline (in the stack frame)
4513 and emit rtl to initialize its contents (at entry to this function). */
4516 trampoline_address (function
)
4522 struct function
*fp
;
4525 /* Find an existing trampoline and return it. */
4526 for (link
= trampoline_list
; link
; link
= TREE_CHAIN (link
))
4527 if (TREE_PURPOSE (link
) == function
)
4529 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link
)), 0));
4531 for (fp
= outer_function_chain
; fp
; fp
= fp
->next
)
4532 for (link
= fp
->trampoline_list
; link
; link
= TREE_CHAIN (link
))
4533 if (TREE_PURPOSE (link
) == function
)
4535 tramp
= fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link
)), 0),
4537 return round_trampoline_addr (tramp
);
4540 /* None exists; we must make one. */
4542 /* Find the `struct function' for the function containing FUNCTION. */
4544 fn_context
= decl_function_context (function
);
4545 if (fn_context
!= current_function_decl
)
4546 for (fp
= outer_function_chain
; fp
; fp
= fp
->next
)
4547 if (fp
->decl
== fn_context
)
4550 /* Allocate run-time space for this trampoline
4551 (usually in the defining function's stack frame). */
4552 #ifdef ALLOCATE_TRAMPOLINE
4553 tramp
= ALLOCATE_TRAMPOLINE (fp
);
4555 /* If rounding needed, allocate extra space
4556 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
4557 #ifdef TRAMPOLINE_ALIGNMENT
4558 #define TRAMPOLINE_REAL_SIZE \
4559 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
4561 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
4564 tramp
= assign_outer_stack_local (BLKmode
, TRAMPOLINE_REAL_SIZE
, 0, fp
);
4566 tramp
= assign_stack_local (BLKmode
, TRAMPOLINE_REAL_SIZE
, 0);
4569 /* Record the trampoline for reuse and note it for later initialization
4570 by expand_function_end. */
4573 push_obstacks (fp
->function_maybepermanent_obstack
,
4574 fp
->function_maybepermanent_obstack
);
4575 rtlexp
= make_node (RTL_EXPR
);
4576 RTL_EXPR_RTL (rtlexp
) = tramp
;
4577 fp
->trampoline_list
= tree_cons (function
, rtlexp
, fp
->trampoline_list
);
4582 /* Make the RTL_EXPR node temporary, not momentary, so that the
4583 trampoline_list doesn't become garbage. */
4584 int momentary
= suspend_momentary ();
4585 rtlexp
= make_node (RTL_EXPR
);
4586 resume_momentary (momentary
);
4588 RTL_EXPR_RTL (rtlexp
) = tramp
;
4589 trampoline_list
= tree_cons (function
, rtlexp
, trampoline_list
);
4592 tramp
= fix_lexical_addr (XEXP (tramp
, 0), function
);
4593 return round_trampoline_addr (tramp
);
4596 /* Given a trampoline address,
4597 round it to multiple of TRAMPOLINE_ALIGNMENT. */
4600 round_trampoline_addr (tramp
)
4603 #ifdef TRAMPOLINE_ALIGNMENT
4604 /* Round address up to desired boundary. */
4605 rtx temp
= gen_reg_rtx (Pmode
);
4606 temp
= expand_binop (Pmode
, add_optab
, tramp
,
4607 GEN_INT (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1),
4608 temp
, 0, OPTAB_LIB_WIDEN
);
4609 tramp
= expand_binop (Pmode
, and_optab
, temp
,
4610 GEN_INT (- TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
),
4611 temp
, 0, OPTAB_LIB_WIDEN
);
4616 /* The functions identify_blocks and reorder_blocks provide a way to
4617 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
4618 duplicate portions of the RTL code. Call identify_blocks before
4619 changing the RTL, and call reorder_blocks after. */
4621 /* Put all this function's BLOCK nodes including those that are chained
4622 onto the first block into a vector, and return it.
4623 Also store in each NOTE for the beginning or end of a block
4624 the index of that block in the vector.
4625 The arguments are BLOCK, the chain of top-level blocks of the function,
4626 and INSNS, the insn chain of the function. */
4629 identify_blocks (block
, insns
)
4637 int next_block_number
= 1;
4638 int current_block_number
= 1;
4644 n_blocks
= all_blocks (block
, 0);
4645 block_vector
= (tree
*) xmalloc (n_blocks
* sizeof (tree
));
4646 block_stack
= (int *) alloca (n_blocks
* sizeof (int));
4648 all_blocks (block
, block_vector
);
4650 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
4651 if (GET_CODE (insn
) == NOTE
)
4653 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
)
4655 block_stack
[depth
++] = current_block_number
;
4656 current_block_number
= next_block_number
;
4657 NOTE_BLOCK_NUMBER (insn
) = next_block_number
++;
4659 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
)
4661 current_block_number
= block_stack
[--depth
];
4662 NOTE_BLOCK_NUMBER (insn
) = current_block_number
;
4666 if (n_blocks
!= next_block_number
)
4669 return block_vector
;
4672 /* Given BLOCK_VECTOR which was returned by identify_blocks,
4673 and a revised instruction chain, rebuild the tree structure
4674 of BLOCK nodes to correspond to the new order of RTL.
4675 The new block tree is inserted below TOP_BLOCK.
4676 Returns the current top-level block. */
4679 reorder_blocks (block_vector
, block
, insns
)
4684 tree current_block
= block
;
4687 if (block_vector
== 0)
4690 /* Prune the old trees away, so that it doesn't get in the way. */
4691 BLOCK_SUBBLOCKS (current_block
) = 0;
4692 BLOCK_CHAIN (current_block
) = 0;
4694 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
4695 if (GET_CODE (insn
) == NOTE
)
4697 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
)
4699 tree block
= block_vector
[NOTE_BLOCK_NUMBER (insn
)];
4700 /* If we have seen this block before, copy it. */
4701 if (TREE_ASM_WRITTEN (block
))
4702 block
= copy_node (block
);
4703 BLOCK_SUBBLOCKS (block
) = 0;
4704 TREE_ASM_WRITTEN (block
) = 1;
4705 BLOCK_SUPERCONTEXT (block
) = current_block
;
4706 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (current_block
);
4707 BLOCK_SUBBLOCKS (current_block
) = block
;
4708 current_block
= block
;
4709 NOTE_SOURCE_FILE (insn
) = 0;
4711 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
)
4713 BLOCK_SUBBLOCKS (current_block
)
4714 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block
));
4715 current_block
= BLOCK_SUPERCONTEXT (current_block
);
4716 NOTE_SOURCE_FILE (insn
) = 0;
4720 BLOCK_SUBBLOCKS (current_block
)
4721 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block
));
4722 return current_block
;
4725 /* Reverse the order of elements in the chain T of blocks,
4726 and return the new head of the chain (old last element). */
4732 register tree prev
= 0, decl
, next
;
4733 for (decl
= t
; decl
; decl
= next
)
4735 next
= BLOCK_CHAIN (decl
);
4736 BLOCK_CHAIN (decl
) = prev
;
4742 /* Count the subblocks of the list starting with BLOCK, and list them
4743 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
4747 all_blocks (block
, vector
)
4755 TREE_ASM_WRITTEN (block
) = 0;
4757 /* Record this block. */
4759 vector
[n_blocks
] = block
;
4763 /* Record the subblocks, and their subblocks... */
4764 n_blocks
+= all_blocks (BLOCK_SUBBLOCKS (block
),
4765 vector
? vector
+ n_blocks
: 0);
4766 block
= BLOCK_CHAIN (block
);
4772 /* Build bytecode call descriptor for function SUBR. */
4775 bc_build_calldesc (subr
)
4778 tree calldesc
= 0, arg
;
4781 /* Build the argument description vector in reverse order. */
4782 DECL_ARGUMENTS (subr
) = nreverse (DECL_ARGUMENTS (subr
));
4785 for (arg
= DECL_ARGUMENTS (subr
); arg
; arg
= TREE_CHAIN (arg
))
4789 calldesc
= tree_cons ((tree
) 0, size_in_bytes (TREE_TYPE (arg
)), calldesc
);
4790 calldesc
= tree_cons ((tree
) 0, bc_runtime_type_code (TREE_TYPE (arg
)), calldesc
);
4793 DECL_ARGUMENTS (subr
) = nreverse (DECL_ARGUMENTS (subr
));
4795 /* Prepend the function's return type. */
4796 calldesc
= tree_cons ((tree
) 0,
4797 size_in_bytes (TREE_TYPE (TREE_TYPE (subr
))),
4800 calldesc
= tree_cons ((tree
) 0,
4801 bc_runtime_type_code (TREE_TYPE (TREE_TYPE (subr
))),
4804 /* Prepend the arg count. */
4805 calldesc
= tree_cons ((tree
) 0, build_int_2 (nargs
, 0), calldesc
);
4807 /* Output the call description vector and get its address. */
4808 calldesc
= build_nt (CONSTRUCTOR
, (tree
) 0, calldesc
);
4809 TREE_TYPE (calldesc
) = build_array_type (integer_type_node
,
4810 build_index_type (build_int_2 (nargs
* 2, 0)));
4812 return output_constant_def (calldesc
);
4816 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4817 and initialize static variables for generating RTL for the statements
4821 init_function_start (subr
, filename
, line
)
4828 if (output_bytecode
)
4830 this_function_decl
= subr
;
4831 this_function_calldesc
= bc_build_calldesc (subr
);
4832 local_vars_size
= 0;
4834 max_stack_depth
= 0;
4835 stmt_expr_depth
= 0;
4839 init_stmt_for_function ();
4841 cse_not_expected
= ! optimize
;
4843 /* Caller save not needed yet. */
4844 caller_save_needed
= 0;
4846 /* No stack slots have been made yet. */
4847 stack_slot_list
= 0;
4849 /* There is no stack slot for handling nonlocal gotos. */
4850 nonlocal_goto_handler_slot
= 0;
4851 nonlocal_goto_stack_level
= 0;
4853 /* No labels have been declared for nonlocal use. */
4854 nonlocal_labels
= 0;
4856 /* No function calls so far in this function. */
4857 function_call_count
= 0;
4859 /* No parm regs have been allocated.
4860 (This is important for output_inline_function.) */
4861 max_parm_reg
= LAST_VIRTUAL_REGISTER
+ 1;
4863 /* Initialize the RTL mechanism. */
4866 /* Initialize the queue of pending postincrement and postdecrements,
4867 and some other info in expr.c. */
4870 /* We haven't done register allocation yet. */
4873 init_const_rtx_hash_table ();
4875 current_function_name
= (*decl_printable_name
) (subr
, &junk
);
4877 /* Nonzero if this is a nested function that uses a static chain. */
4879 current_function_needs_context
4880 = (decl_function_context (current_function_decl
) != 0
4881 && ! DECL_NO_STATIC_CHAIN (current_function_decl
));
4883 /* Set if a call to setjmp is seen. */
4884 current_function_calls_setjmp
= 0;
4886 /* Set if a call to longjmp is seen. */
4887 current_function_calls_longjmp
= 0;
4889 current_function_calls_alloca
= 0;
4890 current_function_has_nonlocal_label
= 0;
4891 current_function_has_nonlocal_goto
= 0;
4892 current_function_contains_functions
= 0;
4894 current_function_returns_pcc_struct
= 0;
4895 current_function_returns_struct
= 0;
4896 current_function_epilogue_delay_list
= 0;
4897 current_function_uses_const_pool
= 0;
4898 current_function_uses_pic_offset_table
= 0;
4900 /* We have not yet needed to make a label to jump to for tail-recursion. */
4901 tail_recursion_label
= 0;
4903 /* We haven't had a need to make a save area for ap yet. */
4905 arg_pointer_save_area
= 0;
4907 /* No stack slots allocated yet. */
4910 /* No SAVE_EXPRs in this function yet. */
4913 /* No RTL_EXPRs in this function yet. */
4916 /* Set up to allocate temporaries. */
4919 /* Within function body, compute a type's size as soon it is laid out. */
4920 immediate_size_expand
++;
4922 /* We haven't made any trampolines for this function yet. */
4923 trampoline_list
= 0;
4925 init_pending_stack_adjust ();
4926 inhibit_defer_pop
= 0;
4928 current_function_outgoing_args_size
= 0;
4930 /* Prevent ever trying to delete the first instruction of a function.
4931 Also tell final how to output a linenum before the function prologue. */
4932 emit_line_note (filename
, line
);
4934 /* Make sure first insn is a note even if we don't want linenums.
4935 This makes sure the first insn will never be deleted.
4936 Also, final expects a note to appear there. */
4937 emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
4939 /* Set flags used by final.c. */
4940 if (aggregate_value_p (DECL_RESULT (subr
)))
4942 #ifdef PCC_STATIC_STRUCT_RETURN
4943 current_function_returns_pcc_struct
= 1;
4945 current_function_returns_struct
= 1;
4948 /* Warn if this value is an aggregate type,
4949 regardless of which calling convention we are using for it. */
4950 if (warn_aggregate_return
4951 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
))))
4952 warning ("function returns an aggregate");
4954 current_function_returns_pointer
4955 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr
)));
4957 /* Indicate that we need to distinguish between the return value of the
4958 present function and the return value of a function being called. */
4959 rtx_equal_function_value_matters
= 1;
4961 /* Indicate that we have not instantiated virtual registers yet. */
4962 virtuals_instantiated
= 0;
4964 /* Indicate we have no need of a frame pointer yet. */
4965 frame_pointer_needed
= 0;
4967 /* By default assume not varargs or stdarg. */
4968 current_function_varargs
= 0;
4969 current_function_stdarg
= 0;
4972 /* Indicate that the current function uses extra args
4973 not explicitly mentioned in the argument list in any fashion. */
4978 current_function_varargs
= 1;
4981 /* Expand a call to __main at the beginning of a possible main function. */
4983 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
4984 #undef HAS_INIT_SECTION
4985 #define HAS_INIT_SECTION
4989 expand_main_function ()
4991 if (!output_bytecode
)
4993 /* The zero below avoids a possible parse error */
4995 #if !defined (HAS_INIT_SECTION)
4996 emit_library_call (gen_rtx (SYMBOL_REF
, Pmode
, NAME__MAIN
), 0,
4998 #endif /* not HAS_INIT_SECTION */
5002 extern struct obstack permanent_obstack
;
5004 /* Expand start of bytecode function. See comment at
5005 expand_function_start below for details. */
5008 bc_expand_function_start (subr
, parms_have_cleanups
)
5010 int parms_have_cleanups
;
5012 char label
[20], *name
;
5017 if (TREE_PUBLIC (subr
))
5018 bc_globalize_label (IDENTIFIER_POINTER (DECL_NAME (subr
)));
5020 #ifdef DEBUG_PRINT_CODE
5021 fprintf (stderr
, "\n<func %s>\n", IDENTIFIER_POINTER (DECL_NAME (subr
)));
5024 for (argsz
= 0, thisarg
= DECL_ARGUMENTS (subr
); thisarg
; thisarg
= TREE_CHAIN (thisarg
))
5026 if (DECL_RTL (thisarg
))
5027 abort (); /* Should be NULL here I think. */
5028 else if (TREE_CONSTANT (DECL_SIZE (thisarg
)))
5030 DECL_RTL (thisarg
) = bc_gen_rtx ((char *) 0, argsz
, (struct bc_label
*) 0);
5031 argsz
+= TREE_INT_CST_LOW (DECL_SIZE (thisarg
));
5035 /* Variable-sized objects are pointers to their storage. */
5036 DECL_RTL (thisarg
) = bc_gen_rtx ((char *) 0, argsz
, (struct bc_label
*) 0);
5037 argsz
+= POINTER_SIZE
;
5041 bc_begin_function (xstrdup (IDENTIFIER_POINTER (DECL_NAME (subr
))));
5043 ASM_GENERATE_INTERNAL_LABEL (label
, "LX", nlab
);
5046 name
= (char *) obstack_copy0 (&permanent_obstack
, label
, strlen (label
));
5047 this_function_callinfo
= bc_gen_rtx (name
, 0, (struct bc_label
*) 0);
5048 this_function_bytecode
=
5049 bc_emit_trampoline (BYTECODE_LABEL (this_function_callinfo
));
5053 /* Expand end of bytecode function. See details the comment of
5054 expand_function_end(), below. */
5057 bc_expand_function_end ()
5061 expand_null_return ();
5063 /* Emit any fixup code. This must be done before the call to
5064 to BC_END_FUNCTION (), since that will cause the bytecode
5065 segment to be finished off and closed. */
5067 expand_fixups (NULL_RTX
);
5069 ptrconsts
= bc_end_function ();
5071 bc_align_const (2 /* INT_ALIGN */);
5073 /* If this changes also make sure to change bc-interp.h! */
5075 bc_emit_const_labeldef (BYTECODE_LABEL (this_function_callinfo
));
5076 bc_emit_const ((char *) &max_stack_depth
, sizeof max_stack_depth
);
5077 bc_emit_const ((char *) &local_vars_size
, sizeof local_vars_size
);
5078 bc_emit_const_labelref (this_function_bytecode
, 0);
5079 bc_emit_const_labelref (ptrconsts
, 0);
5080 bc_emit_const_labelref (BYTECODE_LABEL (this_function_calldesc
), 0);
5084 /* Start the RTL for a new function, and set variables used for
5086 SUBR is the FUNCTION_DECL node.
5087 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5088 the function's parameters, which must be run at any return statement. */
5091 expand_function_start (subr
, parms_have_cleanups
)
5093 int parms_have_cleanups
;
5099 if (output_bytecode
)
5101 bc_expand_function_start (subr
, parms_have_cleanups
);
5105 /* Make sure volatile mem refs aren't considered
5106 valid operands of arithmetic insns. */
5107 init_recog_no_volatile ();
5109 /* If function gets a static chain arg, store it in the stack frame.
5110 Do this first, so it gets the first stack slot offset. */
5111 if (current_function_needs_context
)
5113 last_ptr
= assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
5115 #ifdef SMALL_REGISTER_CLASSES
5116 /* Delay copying static chain if it is not a register to avoid
5117 conflicts with regs used for parameters. */
5118 if (GET_CODE (static_chain_incoming_rtx
) == REG
)
5120 emit_move_insn (last_ptr
, static_chain_incoming_rtx
);
5123 /* If the parameters of this function need cleaning up, get a label
5124 for the beginning of the code which executes those cleanups. This must
5125 be done before doing anything with return_label. */
5126 if (parms_have_cleanups
)
5127 cleanup_label
= gen_label_rtx ();
5131 /* Make the label for return statements to jump to, if this machine
5132 does not have a one-instruction return and uses an epilogue,
5133 or if it returns a structure, or if it has parm cleanups. */
5135 if (cleanup_label
== 0 && HAVE_return
5136 && ! current_function_returns_pcc_struct
5137 && ! (current_function_returns_struct
&& ! optimize
))
5140 return_label
= gen_label_rtx ();
5142 return_label
= gen_label_rtx ();
5145 /* Initialize rtx used to return the value. */
5146 /* Do this before assign_parms so that we copy the struct value address
5147 before any library calls that assign parms might generate. */
5149 /* Decide whether to return the value in memory or in a register. */
5150 if (aggregate_value_p (DECL_RESULT (subr
)))
5152 /* Returning something that won't go in a register. */
5153 register rtx value_address
= 0;
5155 #ifdef PCC_STATIC_STRUCT_RETURN
5156 if (current_function_returns_pcc_struct
)
5158 int size
= int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr
)));
5159 value_address
= assemble_static_space (size
);
5164 /* Expect to be passed the address of a place to store the value.
5165 If it is passed as an argument, assign_parms will take care of
5167 if (struct_value_incoming_rtx
)
5169 value_address
= gen_reg_rtx (Pmode
);
5170 emit_move_insn (value_address
, struct_value_incoming_rtx
);
5175 DECL_RTL (DECL_RESULT (subr
))
5176 = gen_rtx (MEM
, DECL_MODE (DECL_RESULT (subr
)), value_address
);
5177 MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr
)))
5178 = AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
)));
5181 else if (DECL_MODE (DECL_RESULT (subr
)) == VOIDmode
)
5182 /* If return mode is void, this decl rtl should not be used. */
5183 DECL_RTL (DECL_RESULT (subr
)) = 0;
5184 else if (parms_have_cleanups
)
5186 /* If function will end with cleanup code for parms,
5187 compute the return values into a pseudo reg,
5188 which we will copy into the true return register
5189 after the cleanups are done. */
5191 enum machine_mode mode
= DECL_MODE (DECL_RESULT (subr
));
5193 #ifdef PROMOTE_FUNCTION_RETURN
5194 tree type
= TREE_TYPE (DECL_RESULT (subr
));
5195 int unsignedp
= TREE_UNSIGNED (type
);
5197 mode
= promote_mode (type
, mode
, &unsignedp
, 1);
5200 DECL_RTL (DECL_RESULT (subr
)) = gen_reg_rtx (mode
);
5203 /* Scalar, returned in a register. */
5205 #ifdef FUNCTION_OUTGOING_VALUE
5206 DECL_RTL (DECL_RESULT (subr
))
5207 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr
)), subr
);
5209 DECL_RTL (DECL_RESULT (subr
))
5210 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr
)), subr
);
5213 /* Mark this reg as the function's return value. */
5214 if (GET_CODE (DECL_RTL (DECL_RESULT (subr
))) == REG
)
5216 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr
))) = 1;
5217 /* Needed because we may need to move this to memory
5218 in case it's a named return value whose address is taken. */
5219 DECL_REGISTER (DECL_RESULT (subr
)) = 1;
5223 /* Initialize rtx for parameters and local variables.
5224 In some cases this requires emitting insns. */
5226 assign_parms (subr
, 0);
5228 #ifdef SMALL_REGISTER_CLASSES
5229 /* Copy the static chain now if it wasn't a register. The delay is to
5230 avoid conflicts with the parameter passing registers. */
5232 if (current_function_needs_context
)
5233 if (GET_CODE (static_chain_incoming_rtx
) != REG
)
5234 emit_move_insn (last_ptr
, static_chain_incoming_rtx
);
5237 /* The following was moved from init_function_start.
5238 The move is supposed to make sdb output more accurate. */
5239 /* Indicate the beginning of the function body,
5240 as opposed to parm setup. */
5241 emit_note (NULL_PTR
, NOTE_INSN_FUNCTION_BEG
);
5243 /* If doing stupid allocation, mark parms as born here. */
5245 if (GET_CODE (get_last_insn ()) != NOTE
)
5246 emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
5247 parm_birth_insn
= get_last_insn ();
5251 for (i
= LAST_VIRTUAL_REGISTER
+ 1; i
< max_parm_reg
; i
++)
5252 use_variable (regno_reg_rtx
[i
]);
5254 if (current_function_internal_arg_pointer
!= virtual_incoming_args_rtx
)
5255 use_variable (current_function_internal_arg_pointer
);
5258 context_display
= 0;
5259 if (current_function_needs_context
)
5261 /* Fetch static chain values for containing functions. */
5262 tem
= decl_function_context (current_function_decl
);
5263 /* If not doing stupid register allocation copy the static chain
5264 pointer into a pseudo. If we have small register classes, copy
5265 the value from memory if static_chain_incoming_rtx is a REG. If
5266 we do stupid register allocation, we use the stack address
5268 if (tem
&& ! obey_regdecls
)
5270 #ifdef SMALL_REGISTER_CLASSES
5271 /* If the static chain originally came in a register, put it back
5272 there, then move it out in the next insn. The reason for
5273 this peculiar code is to satisfy function integration. */
5274 if (GET_CODE (static_chain_incoming_rtx
) == REG
)
5275 emit_move_insn (static_chain_incoming_rtx
, last_ptr
);
5278 last_ptr
= copy_to_reg (static_chain_incoming_rtx
);
5283 tree rtlexp
= make_node (RTL_EXPR
);
5285 RTL_EXPR_RTL (rtlexp
) = last_ptr
;
5286 context_display
= tree_cons (tem
, rtlexp
, context_display
);
5287 tem
= decl_function_context (tem
);
5290 /* Chain thru stack frames, assuming pointer to next lexical frame
5291 is found at the place we always store it. */
5292 #ifdef FRAME_GROWS_DOWNWARD
5293 last_ptr
= plus_constant (last_ptr
, - GET_MODE_SIZE (Pmode
));
5295 last_ptr
= copy_to_reg (gen_rtx (MEM
, Pmode
,
5296 memory_address (Pmode
, last_ptr
)));
5298 /* If we are not optimizing, ensure that we know that this
5299 piece of context is live over the entire function. */
5301 save_expr_regs
= gen_rtx (EXPR_LIST
, VOIDmode
, last_ptr
,
5306 /* After the display initializations is where the tail-recursion label
5307 should go, if we end up needing one. Ensure we have a NOTE here
5308 since some things (like trampolines) get placed before this. */
5309 tail_recursion_reentry
= emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
5311 /* Evaluate now the sizes of any types declared among the arguments. */
5312 for (tem
= nreverse (get_pending_sizes ()); tem
; tem
= TREE_CHAIN (tem
))
5313 expand_expr (TREE_VALUE (tem
), const0_rtx
, VOIDmode
, 0);
5315 /* Make sure there is a line number after the function entry setup code. */
5316 force_next_line_note ();
5319 /* Generate RTL for the end of the current function.
5320 FILENAME and LINE are the current position in the source file.
5322 It is up to language-specific callers to do cleanups for parameters--
5323 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5326 expand_function_end (filename
, line
, end_bindings
)
5334 #ifdef TRAMPOLINE_TEMPLATE
5335 static rtx initial_trampoline
;
5338 if (output_bytecode
)
5340 bc_expand_function_end ();
5344 #ifdef NON_SAVING_SETJMP
5345 /* Don't put any variables in registers if we call setjmp
5346 on a machine that fails to restore the registers. */
5347 if (NON_SAVING_SETJMP
&& current_function_calls_setjmp
)
5349 if (DECL_INITIAL (current_function_decl
) != error_mark_node
)
5350 setjmp_protect (DECL_INITIAL (current_function_decl
));
5352 setjmp_protect_args ();
5356 /* Save the argument pointer if a save area was made for it. */
5357 if (arg_pointer_save_area
)
5359 rtx x
= gen_move_insn (arg_pointer_save_area
, virtual_incoming_args_rtx
);
5360 emit_insn_before (x
, tail_recursion_reentry
);
5363 /* Initialize any trampolines required by this function. */
5364 for (link
= trampoline_list
; link
; link
= TREE_CHAIN (link
))
5366 tree function
= TREE_PURPOSE (link
);
5367 rtx context
= lookup_static_chain (function
);
5368 rtx tramp
= RTL_EXPR_RTL (TREE_VALUE (link
));
5372 #ifdef TRAMPOLINE_TEMPLATE
5373 /* First make sure this compilation has a template for
5374 initializing trampolines. */
5375 if (initial_trampoline
== 0)
5377 end_temporary_allocation ();
5379 = gen_rtx (MEM
, BLKmode
, assemble_trampoline_template ());
5380 resume_temporary_allocation ();
5384 /* Generate insns to initialize the trampoline. */
5386 tramp
= round_trampoline_addr (XEXP (tramp
, 0));
5387 #ifdef TRAMPOLINE_TEMPLATE
5388 blktramp
= change_address (initial_trampoline
, BLKmode
, tramp
);
5389 emit_block_move (blktramp
, initial_trampoline
,
5390 GEN_INT (TRAMPOLINE_SIZE
),
5391 FUNCTION_BOUNDARY
/ BITS_PER_UNIT
);
5393 INITIALIZE_TRAMPOLINE (tramp
, XEXP (DECL_RTL (function
), 0), context
);
5397 /* Put those insns at entry to the containing function (this one). */
5398 emit_insns_before (seq
, tail_recursion_reentry
);
5401 /* Warn about unused parms if extra warnings were specified. */
5402 if (warn_unused
&& extra_warnings
)
5406 for (decl
= DECL_ARGUMENTS (current_function_decl
);
5407 decl
; decl
= TREE_CHAIN (decl
))
5408 if (! TREE_USED (decl
) && TREE_CODE (decl
) == PARM_DECL
5409 && DECL_NAME (decl
) && ! DECL_ARTIFICIAL (decl
))
5410 warning_with_decl (decl
, "unused parameter `%s'");
5413 /* Delete handlers for nonlocal gotos if nothing uses them. */
5414 if (nonlocal_goto_handler_slot
!= 0 && !current_function_has_nonlocal_label
)
5417 /* End any sequences that failed to be closed due to syntax errors. */
5418 while (in_sequence_p ())
5421 /* Outside function body, can't compute type's actual size
5422 until next function's body starts. */
5423 immediate_size_expand
--;
5425 /* If doing stupid register allocation,
5426 mark register parms as dying here. */
5431 for (i
= LAST_VIRTUAL_REGISTER
+ 1; i
< max_parm_reg
; i
++)
5432 use_variable (regno_reg_rtx
[i
]);
5434 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
5436 for (tem
= save_expr_regs
; tem
; tem
= XEXP (tem
, 1))
5438 use_variable (XEXP (tem
, 0));
5439 use_variable_after (XEXP (tem
, 0), parm_birth_insn
);
5442 if (current_function_internal_arg_pointer
!= virtual_incoming_args_rtx
)
5443 use_variable (current_function_internal_arg_pointer
);
5446 clear_pending_stack_adjust ();
5447 do_pending_stack_adjust ();
5449 /* Mark the end of the function body.
5450 If control reaches this insn, the function can drop through
5451 without returning a value. */
5452 emit_note (NULL_PTR
, NOTE_INSN_FUNCTION_END
);
5454 /* Output a linenumber for the end of the function.
5455 SDB depends on this. */
5456 emit_line_note_force (filename
, line
);
5458 /* Output the label for the actual return from the function,
5459 if one is expected. This happens either because a function epilogue
5460 is used instead of a return instruction, or because a return was done
5461 with a goto in order to run local cleanups, or because of pcc-style
5462 structure returning. */
5465 emit_label (return_label
);
5467 /* C++ uses this. */
5469 expand_end_bindings (0, 0, 0);
5471 /* If we had calls to alloca, and this machine needs
5472 an accurate stack pointer to exit the function,
5473 insert some code to save and restore the stack pointer. */
5474 #ifdef EXIT_IGNORE_STACK
5475 if (! EXIT_IGNORE_STACK
)
5477 if (current_function_calls_alloca
)
5481 emit_stack_save (SAVE_FUNCTION
, &tem
, parm_birth_insn
);
5482 emit_stack_restore (SAVE_FUNCTION
, tem
, NULL_RTX
);
5485 /* If scalar return value was computed in a pseudo-reg,
5486 copy that to the hard return register. */
5487 if (DECL_RTL (DECL_RESULT (current_function_decl
)) != 0
5488 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl
))) == REG
5489 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl
)))
5490 >= FIRST_PSEUDO_REGISTER
))
5492 rtx real_decl_result
;
5494 #ifdef FUNCTION_OUTGOING_VALUE
5496 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl
)),
5497 current_function_decl
);
5500 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl
)),
5501 current_function_decl
);
5503 REG_FUNCTION_VALUE_P (real_decl_result
) = 1;
5504 emit_move_insn (real_decl_result
,
5505 DECL_RTL (DECL_RESULT (current_function_decl
)));
5506 emit_insn (gen_rtx (USE
, VOIDmode
, real_decl_result
));
5509 /* If returning a structure, arrange to return the address of the value
5510 in a place where debuggers expect to find it.
5512 If returning a structure PCC style,
5513 the caller also depends on this value.
5514 And current_function_returns_pcc_struct is not necessarily set. */
5515 if (current_function_returns_struct
5516 || current_function_returns_pcc_struct
)
5518 rtx value_address
= XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
5519 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
5520 #ifdef FUNCTION_OUTGOING_VALUE
5522 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type
),
5523 current_function_decl
);
5526 = FUNCTION_VALUE (build_pointer_type (type
),
5527 current_function_decl
);
5530 /* Mark this as a function return value so integrate will delete the
5531 assignment and USE below when inlining this function. */
5532 REG_FUNCTION_VALUE_P (outgoing
) = 1;
5534 emit_move_insn (outgoing
, value_address
);
5535 use_variable (outgoing
);
5538 /* Output a return insn if we are using one.
5539 Otherwise, let the rtl chain end here, to drop through
5540 into the epilogue. */
5545 emit_jump_insn (gen_return ());
5550 /* Fix up any gotos that jumped out to the outermost
5551 binding level of the function.
5552 Must follow emitting RETURN_LABEL. */
5554 /* If you have any cleanups to do at this point,
5555 and they need to create temporary variables,
5556 then you will lose. */
5557 expand_fixups (get_insns ());
5560 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
5562 static int *prologue
;
5563 static int *epilogue
;
5565 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
5566 or a single insn). */
5569 record_insns (insns
)
5574 if (GET_CODE (insns
) == SEQUENCE
)
5576 int len
= XVECLEN (insns
, 0);
5577 vec
= (int *) oballoc ((len
+ 1) * sizeof (int));
5580 vec
[len
] = INSN_UID (XVECEXP (insns
, 0, len
));
5584 vec
= (int *) oballoc (2 * sizeof (int));
5585 vec
[0] = INSN_UID (insns
);
5591 /* Determine how many INSN_UIDs in VEC are part of INSN. */
5594 contains (insn
, vec
)
5600 if (GET_CODE (insn
) == INSN
5601 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
5604 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
5605 for (j
= 0; vec
[j
]; j
++)
5606 if (INSN_UID (XVECEXP (PATTERN (insn
), 0, i
)) == vec
[j
])
5612 for (j
= 0; vec
[j
]; j
++)
5613 if (INSN_UID (insn
) == vec
[j
])
5619 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5620 this into place with notes indicating where the prologue ends and where
5621 the epilogue begins. Update the basic block information when possible. */
5624 thread_prologue_and_epilogue_insns (f
)
5627 #ifdef HAVE_prologue
5630 rtx head
, seq
, insn
;
5632 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
5633 prologue insns and a NOTE_INSN_PROLOGUE_END. */
5634 emit_note_after (NOTE_INSN_PROLOGUE_END
, f
);
5635 seq
= gen_prologue ();
5636 head
= emit_insn_after (seq
, f
);
5638 /* Include the new prologue insns in the first block. Ignore them
5639 if they form a basic block unto themselves. */
5640 if (basic_block_head
&& n_basic_blocks
5641 && GET_CODE (basic_block_head
[0]) != CODE_LABEL
)
5642 basic_block_head
[0] = NEXT_INSN (f
);
5644 /* Retain a map of the prologue insns. */
5645 prologue
= record_insns (GET_CODE (seq
) == SEQUENCE
? seq
: head
);
5651 #ifdef HAVE_epilogue
5654 rtx insn
= get_last_insn ();
5655 rtx prev
= prev_nonnote_insn (insn
);
5657 /* If we end with a BARRIER, we don't need an epilogue. */
5658 if (! (prev
&& GET_CODE (prev
) == BARRIER
))
5664 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
5665 epilogue insns, the USE insns at the end of a function,
5666 the jump insn that returns, and then a BARRIER. */
5668 /* Move the USE insns at the end of a function onto a list. */
5670 && GET_CODE (prev
) == INSN
5671 && GET_CODE (PATTERN (prev
)) == USE
)
5674 prev
= prev_nonnote_insn (prev
);
5676 NEXT_INSN (PREV_INSN (tem
)) = NEXT_INSN (tem
);
5677 PREV_INSN (NEXT_INSN (tem
)) = PREV_INSN (tem
);
5680 NEXT_INSN (tem
) = first_use
;
5681 PREV_INSN (first_use
) = tem
;
5688 emit_barrier_after (insn
);
5690 seq
= gen_epilogue ();
5691 tail
= emit_jump_insn_after (seq
, insn
);
5693 /* Insert the USE insns immediately before the return insn, which
5694 must be the first instruction before the final barrier. */
5697 tem
= prev_nonnote_insn (get_last_insn ());
5698 NEXT_INSN (PREV_INSN (tem
)) = first_use
;
5699 PREV_INSN (first_use
) = PREV_INSN (tem
);
5700 PREV_INSN (tem
) = last_use
;
5701 NEXT_INSN (last_use
) = tem
;
5704 emit_note_after (NOTE_INSN_EPILOGUE_BEG
, insn
);
5706 /* Include the new epilogue insns in the last block. Ignore
5707 them if they form a basic block unto themselves. */
5708 if (basic_block_end
&& n_basic_blocks
5709 && GET_CODE (basic_block_end
[n_basic_blocks
- 1]) != JUMP_INSN
)
5710 basic_block_end
[n_basic_blocks
- 1] = tail
;
5712 /* Retain a map of the epilogue insns. */
5713 epilogue
= record_insns (GET_CODE (seq
) == SEQUENCE
? seq
: tail
);
5721 /* Reposition the prologue-end and epilogue-begin notes after instruction
5722 scheduling and delayed branch scheduling. */
5725 reposition_prologue_and_epilogue_notes (f
)
5728 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5729 /* Reposition the prologue and epilogue notes. */
5737 register rtx insn
, note
= 0;
5739 /* Scan from the beginning until we reach the last prologue insn.
5740 We apparently can't depend on basic_block_{head,end} after
5742 for (len
= 0; prologue
[len
]; len
++)
5744 for (insn
= f
; len
&& insn
; insn
= NEXT_INSN (insn
))
5746 if (GET_CODE (insn
) == NOTE
)
5748 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_PROLOGUE_END
)
5751 else if ((len
-= contains (insn
, prologue
)) == 0)
5753 /* Find the prologue-end note if we haven't already, and
5754 move it to just after the last prologue insn. */
5757 for (note
= insn
; note
= NEXT_INSN (note
);)
5758 if (GET_CODE (note
) == NOTE
5759 && NOTE_LINE_NUMBER (note
) == NOTE_INSN_PROLOGUE_END
)
5762 next
= NEXT_INSN (note
);
5763 prev
= PREV_INSN (note
);
5765 NEXT_INSN (prev
) = next
;
5767 PREV_INSN (next
) = prev
;
5768 add_insn_after (note
, insn
);
5775 register rtx insn
, note
= 0;
5777 /* Scan from the end until we reach the first epilogue insn.
5778 We apparently can't depend on basic_block_{head,end} after
5780 for (len
= 0; epilogue
[len
]; len
++)
5782 for (insn
= get_last_insn (); len
&& insn
; insn
= PREV_INSN (insn
))
5784 if (GET_CODE (insn
) == NOTE
)
5786 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EPILOGUE_BEG
)
5789 else if ((len
-= contains (insn
, epilogue
)) == 0)
5791 /* Find the epilogue-begin note if we haven't already, and
5792 move it to just before the first epilogue insn. */
5795 for (note
= insn
; note
= PREV_INSN (note
);)
5796 if (GET_CODE (note
) == NOTE
5797 && NOTE_LINE_NUMBER (note
) == NOTE_INSN_EPILOGUE_BEG
)
5800 next
= NEXT_INSN (note
);
5801 prev
= PREV_INSN (note
);
5803 NEXT_INSN (prev
) = next
;
5805 PREV_INSN (next
) = prev
;
5806 add_insn_after (note
, PREV_INSN (insn
));
5811 #endif /* HAVE_prologue or HAVE_epilogue */