1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-95, 1996 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
47 #include "insn-flags.h"
49 #include "insn-codes.h"
51 #include "hard-reg-set.h"
52 #include "insn-config.h"
55 #include "basic-block.h"
60 /* Some systems use __main in a way incompatible with its use in gcc, in these
61 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
62 give the same symbol without quotes for an alternative entry point. You
63 must define both, or neither. */
65 #define NAME__MAIN "__main"
66 #define SYMBOL__MAIN __main
69 /* Round a value to the lowest integer less than it that is a multiple of
70 the required alignment. Avoid using division in case the value is
71 negative. Assume the alignment is a power of two. */
72 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
74 /* Similar, but round to the next highest integer that meets the
76 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
78 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
79 during rtl generation. If they are different register numbers, this is
80 always true. It may also be true if
81 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
82 generation. See fix_lexical_addr for details. */
84 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
85 #define NEED_SEPARATE_AP
88 /* Number of bytes of args popped by function being compiled on its return.
89 Zero if no bytes are to be popped.
90 May affect compilation of return insn or of function epilogue. */
92 int current_function_pops_args
;
94 /* Nonzero if function being compiled needs to be given an address
95 where the value should be stored. */
97 int current_function_returns_struct
;
99 /* Nonzero if function being compiled needs to
100 return the address of where it has put a structure value. */
102 int current_function_returns_pcc_struct
;
104 /* Nonzero if function being compiled needs to be passed a static chain. */
106 int current_function_needs_context
;
108 /* Nonzero if function being compiled can call setjmp. */
110 int current_function_calls_setjmp
;
112 /* Nonzero if function being compiled can call longjmp. */
114 int current_function_calls_longjmp
;
116 /* Nonzero if function being compiled receives nonlocal gotos
117 from nested functions. */
119 int current_function_has_nonlocal_label
;
121 /* Nonzero if function being compiled has nonlocal gotos to parent
124 int current_function_has_nonlocal_goto
;
126 /* Nonzero if function being compiled contains nested functions. */
128 int current_function_contains_functions
;
130 /* Nonzero if function being compiled can call alloca,
131 either as a subroutine or builtin. */
133 int current_function_calls_alloca
;
135 /* Nonzero if the current function returns a pointer type */
137 int current_function_returns_pointer
;
139 /* If some insns can be deferred to the delay slots of the epilogue, the
140 delay list for them is recorded here. */
142 rtx current_function_epilogue_delay_list
;
144 /* If function's args have a fixed size, this is that size, in bytes.
146 May affect compilation of return insn or of function epilogue. */
148 int current_function_args_size
;
150 /* # bytes the prologue should push and pretend that the caller pushed them.
151 The prologue must do this, but only if parms can be passed in registers. */
153 int current_function_pretend_args_size
;
155 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
156 defined, the needed space is pushed by the prologue. */
158 int current_function_outgoing_args_size
;
160 /* This is the offset from the arg pointer to the place where the first
161 anonymous arg can be found, if there is one. */
163 rtx current_function_arg_offset_rtx
;
165 /* Nonzero if current function uses varargs.h or equivalent.
166 Zero for functions that use stdarg.h. */
168 int current_function_varargs
;
170 /* Nonzero if current function uses stdarg.h or equivalent.
171 Zero for functions that use varargs.h. */
173 int current_function_stdarg
;
175 /* Quantities of various kinds of registers
176 used for the current function's args. */
178 CUMULATIVE_ARGS current_function_args_info
;
180 /* Name of function now being compiled. */
182 char *current_function_name
;
184 /* If non-zero, an RTL expression for that location at which the current
185 function returns its result. Always equal to
186 DECL_RTL (DECL_RESULT (current_function_decl)), but provided
187 independently of the tree structures. */
189 rtx current_function_return_rtx
;
191 /* Nonzero if the current function uses the constant pool. */
193 int current_function_uses_const_pool
;
195 /* Nonzero if the current function uses pic_offset_table_rtx. */
196 int current_function_uses_pic_offset_table
;
198 /* The arg pointer hard register, or the pseudo into which it was copied. */
199 rtx current_function_internal_arg_pointer
;
201 /* The FUNCTION_DECL for an inline function currently being expanded. */
202 tree inline_function_decl
;
204 /* Number of function calls seen so far in current function. */
206 int function_call_count
;
208 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
209 (labels to which there can be nonlocal gotos from nested functions)
212 tree nonlocal_labels
;
214 /* RTX for stack slot that holds the current handler for nonlocal gotos.
215 Zero when function does not have nonlocal labels. */
217 rtx nonlocal_goto_handler_slot
;
219 /* RTX for stack slot that holds the stack pointer value to restore
221 Zero when function does not have nonlocal labels. */
223 rtx nonlocal_goto_stack_level
;
225 /* Label that will go on parm cleanup code, if any.
226 Jumping to this label runs cleanup code for parameters, if
227 such code must be run. Following this code is the logical return label. */
231 /* Label that will go on function epilogue.
232 Jumping to this label serves as a "return" instruction
233 on machines which require execution of the epilogue on all returns. */
237 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
238 So we can mark them all live at the end of the function, if nonopt. */
241 /* List (chain of EXPR_LISTs) of all stack slots in this function.
242 Made for the sake of unshare_all_rtl. */
245 /* Chain of all RTL_EXPRs that have insns in them. */
248 /* Label to jump back to for tail recursion, or 0 if we have
249 not yet needed one for this function. */
250 rtx tail_recursion_label
;
252 /* Place after which to insert the tail_recursion_label if we need one. */
253 rtx tail_recursion_reentry
;
255 /* Location at which to save the argument pointer if it will need to be
256 referenced. There are two cases where this is done: if nonlocal gotos
257 exist, or if vars stored at an offset from the argument pointer will be
258 needed by inner routines. */
260 rtx arg_pointer_save_area
;
262 /* Offset to end of allocated area of stack frame.
263 If stack grows down, this is the address of the last stack slot allocated.
264 If stack grows up, this is the address for the next slot. */
267 /* List (chain of TREE_LISTs) of static chains for containing functions.
268 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
269 in an RTL_EXPR in the TREE_VALUE. */
270 static tree context_display
;
272 /* List (chain of TREE_LISTs) of trampolines for nested functions.
273 The trampoline sets up the static chain and jumps to the function.
274 We supply the trampoline's address when the function's address is requested.
276 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
277 in an RTL_EXPR in the TREE_VALUE. */
278 static tree trampoline_list
;
280 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
281 static rtx parm_birth_insn
;
284 /* Nonzero if a stack slot has been generated whose address is not
285 actually valid. It means that the generated rtl must all be scanned
286 to detect and correct the invalid addresses where they occur. */
287 static int invalid_stack_slot
;
290 /* Last insn of those whose job was to put parms into their nominal homes. */
291 static rtx last_parm_insn
;
293 /* 1 + last pseudo register number used for loading a copy
294 of a parameter of this function. */
295 static int max_parm_reg
;
297 /* Vector indexed by REGNO, containing location on stack in which
298 to put the parm which is nominally in pseudo register REGNO,
299 if we discover that that parm must go in the stack. */
300 static rtx
*parm_reg_stack_loc
;
302 #if 0 /* Turned off because 0 seems to work just as well. */
303 /* Cleanup lists are required for binding levels regardless of whether
304 that binding level has cleanups or not. This node serves as the
305 cleanup list whenever an empty list is required. */
306 static tree empty_cleanup_list
;
309 /* Nonzero once virtual register instantiation has been done.
310 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
311 static int virtuals_instantiated
;
313 /* These variables hold pointers to functions to
314 save and restore machine-specific data,
315 in push_function_context and pop_function_context. */
316 void (*save_machine_status
) PROTO((struct function
*));
317 void (*restore_machine_status
) PROTO((struct function
*));
319 /* Nonzero if we need to distinguish between the return value of this function
320 and the return value of a function called by this function. This helps
323 extern int rtx_equal_function_value_matters
;
324 extern tree sequence_rtl_expr
;
326 /* In order to evaluate some expressions, such as function calls returning
327 structures in memory, we need to temporarily allocate stack locations.
328 We record each allocated temporary in the following structure.
330 Associated with each temporary slot is a nesting level. When we pop up
331 one level, all temporaries associated with the previous level are freed.
332 Normally, all temporaries are freed after the execution of the statement
333 in which they were created. However, if we are inside a ({...}) grouping,
334 the result may be in a temporary and hence must be preserved. If the
335 result could be in a temporary, we preserve it if we can determine which
336 one it is in. If we cannot determine which temporary may contain the
337 result, all temporaries are preserved. A temporary is preserved by
338 pretending it was allocated at the previous nesting level.
340 Automatic variables are also assigned temporary slots, at the nesting
341 level where they are defined. They are marked a "kept" so that
342 free_temp_slots will not free them. */
346 /* Points to next temporary slot. */
347 struct temp_slot
*next
;
348 /* The rtx to used to reference the slot. */
350 /* The rtx used to represent the address if not the address of the
351 slot above. May be an EXPR_LIST if multiple addresses exist. */
353 /* The size, in units, of the slot. */
355 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
357 /* Non-zero if this temporary is currently in use. */
359 /* Non-zero if this temporary has its address taken. */
361 /* Nesting level at which this slot is being used. */
363 /* Non-zero if this should survive a call to free_temp_slots. */
365 /* The offset of the slot from the frame_pointer, including extra space
366 for alignment. This info is for combine_temp_slots. */
368 /* The size of the slot, including extra space for alignment. This
369 info is for combine_temp_slots. */
373 /* List of all temporaries allocated, both available and in use. */
375 struct temp_slot
*temp_slots
;
377 /* Current nesting level for temporaries. */
381 /* The FUNCTION_DECL node for the current function. */
382 static tree this_function_decl
;
384 /* Callinfo pointer for the current function. */
385 static rtx this_function_callinfo
;
387 /* The label in the bytecode file of this function's actual bytecode.
389 static char *this_function_bytecode
;
391 /* The call description vector for the current function. */
392 static rtx this_function_calldesc
;
394 /* Size of the local variables allocated for the current function. */
397 /* Current depth of the bytecode evaluation stack. */
400 /* Maximum depth of the evaluation stack in this function. */
403 /* Current depth in statement expressions. */
404 static int stmt_expr_depth
;
406 /* This structure is used to record MEMs or pseudos used to replace VAR, any
407 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
408 maintain this list in case two operands of an insn were required to match;
409 in that case we must ensure we use the same replacement. */
411 struct fixup_replacement
415 struct fixup_replacement
*next
;
418 /* Forward declarations. */
420 static struct temp_slot
*find_temp_slot_from_address
PROTO((rtx
));
421 static void put_reg_into_stack
PROTO((struct function
*, rtx
, tree
,
422 enum machine_mode
, enum machine_mode
,
424 static void fixup_var_refs
PROTO((rtx
, enum machine_mode
, int));
425 static struct fixup_replacement
426 *find_fixup_replacement
PROTO((struct fixup_replacement
**, rtx
));
427 static void fixup_var_refs_insns
PROTO((rtx
, enum machine_mode
, int,
429 static void fixup_var_refs_1
PROTO((rtx
, enum machine_mode
, rtx
*, rtx
,
430 struct fixup_replacement
**));
431 static rtx fixup_memory_subreg
PROTO((rtx
, rtx
, int));
432 static rtx walk_fixup_memory_subreg
PROTO((rtx
, rtx
, int));
433 static rtx fixup_stack_1
PROTO((rtx
, rtx
));
434 static void optimize_bit_field
PROTO((rtx
, rtx
, rtx
*));
435 static void instantiate_decls
PROTO((tree
, int));
436 static void instantiate_decls_1
PROTO((tree
, int));
437 static void instantiate_decl
PROTO((rtx
, int, int));
438 static int instantiate_virtual_regs_1
PROTO((rtx
*, rtx
, int));
439 static void delete_handlers
PROTO((void));
440 static void pad_to_arg_alignment
PROTO((struct args_size
*, int));
441 static void pad_below
PROTO((struct args_size
*, enum machine_mode
,
443 static tree round_down
PROTO((tree
, int));
444 static rtx round_trampoline_addr
PROTO((rtx
));
445 static tree blocks_nreverse
PROTO((tree
));
446 static int all_blocks
PROTO((tree
, tree
*));
447 static int *record_insns
PROTO((rtx
));
448 static int contains
PROTO((rtx
, int *));
450 /* Pointer to chain of `struct function' for containing functions. */
451 struct function
*outer_function_chain
;
453 /* Given a function decl for a containing function,
454 return the `struct function' for it. */
457 find_function_data (decl
)
461 for (p
= outer_function_chain
; p
; p
= p
->next
)
467 /* Save the current context for compilation of a nested function.
468 This is called from language-specific code.
469 The caller is responsible for saving any language-specific status,
470 since this function knows only about language-independent variables. */
473 push_function_context_to (context
)
476 struct function
*p
= (struct function
*) xmalloc (sizeof (struct function
));
478 p
->next
= outer_function_chain
;
479 outer_function_chain
= p
;
481 p
->name
= current_function_name
;
482 p
->decl
= current_function_decl
;
483 p
->pops_args
= current_function_pops_args
;
484 p
->returns_struct
= current_function_returns_struct
;
485 p
->returns_pcc_struct
= current_function_returns_pcc_struct
;
486 p
->returns_pointer
= current_function_returns_pointer
;
487 p
->needs_context
= current_function_needs_context
;
488 p
->calls_setjmp
= current_function_calls_setjmp
;
489 p
->calls_longjmp
= current_function_calls_longjmp
;
490 p
->calls_alloca
= current_function_calls_alloca
;
491 p
->has_nonlocal_label
= current_function_has_nonlocal_label
;
492 p
->has_nonlocal_goto
= current_function_has_nonlocal_goto
;
493 p
->contains_functions
= current_function_contains_functions
;
494 p
->args_size
= current_function_args_size
;
495 p
->pretend_args_size
= current_function_pretend_args_size
;
496 p
->arg_offset_rtx
= current_function_arg_offset_rtx
;
497 p
->varargs
= current_function_varargs
;
498 p
->stdarg
= current_function_stdarg
;
499 p
->uses_const_pool
= current_function_uses_const_pool
;
500 p
->uses_pic_offset_table
= current_function_uses_pic_offset_table
;
501 p
->internal_arg_pointer
= current_function_internal_arg_pointer
;
502 p
->max_parm_reg
= max_parm_reg
;
503 p
->parm_reg_stack_loc
= parm_reg_stack_loc
;
504 p
->outgoing_args_size
= current_function_outgoing_args_size
;
505 p
->return_rtx
= current_function_return_rtx
;
506 p
->nonlocal_goto_handler_slot
= nonlocal_goto_handler_slot
;
507 p
->nonlocal_goto_stack_level
= nonlocal_goto_stack_level
;
508 p
->nonlocal_labels
= nonlocal_labels
;
509 p
->cleanup_label
= cleanup_label
;
510 p
->return_label
= return_label
;
511 p
->save_expr_regs
= save_expr_regs
;
512 p
->stack_slot_list
= stack_slot_list
;
513 p
->parm_birth_insn
= parm_birth_insn
;
514 p
->frame_offset
= frame_offset
;
515 p
->tail_recursion_label
= tail_recursion_label
;
516 p
->tail_recursion_reentry
= tail_recursion_reentry
;
517 p
->arg_pointer_save_area
= arg_pointer_save_area
;
518 p
->rtl_expr_chain
= rtl_expr_chain
;
519 p
->last_parm_insn
= last_parm_insn
;
520 p
->context_display
= context_display
;
521 p
->trampoline_list
= trampoline_list
;
522 p
->function_call_count
= function_call_count
;
523 p
->temp_slots
= temp_slots
;
524 p
->temp_slot_level
= temp_slot_level
;
525 p
->fixup_var_refs_queue
= 0;
526 p
->epilogue_delay_list
= current_function_epilogue_delay_list
;
528 save_tree_status (p
, context
);
529 save_storage_status (p
);
530 save_emit_status (p
);
532 save_expr_status (p
);
533 save_stmt_status (p
);
534 save_varasm_status (p
);
536 if (save_machine_status
)
537 (*save_machine_status
) (p
);
541 push_function_context ()
543 push_function_context_to (current_function_decl
);
546 /* Restore the last saved context, at the end of a nested function.
547 This function is called from language-specific code. */
550 pop_function_context_from (context
)
553 struct function
*p
= outer_function_chain
;
555 outer_function_chain
= p
->next
;
557 current_function_contains_functions
558 = p
->contains_functions
|| p
->inline_obstacks
559 || context
== current_function_decl
;
560 current_function_name
= p
->name
;
561 current_function_decl
= p
->decl
;
562 current_function_pops_args
= p
->pops_args
;
563 current_function_returns_struct
= p
->returns_struct
;
564 current_function_returns_pcc_struct
= p
->returns_pcc_struct
;
565 current_function_returns_pointer
= p
->returns_pointer
;
566 current_function_needs_context
= p
->needs_context
;
567 current_function_calls_setjmp
= p
->calls_setjmp
;
568 current_function_calls_longjmp
= p
->calls_longjmp
;
569 current_function_calls_alloca
= p
->calls_alloca
;
570 current_function_has_nonlocal_label
= p
->has_nonlocal_label
;
571 current_function_has_nonlocal_goto
= p
->has_nonlocal_goto
;
572 current_function_args_size
= p
->args_size
;
573 current_function_pretend_args_size
= p
->pretend_args_size
;
574 current_function_arg_offset_rtx
= p
->arg_offset_rtx
;
575 current_function_varargs
= p
->varargs
;
576 current_function_stdarg
= p
->stdarg
;
577 current_function_uses_const_pool
= p
->uses_const_pool
;
578 current_function_uses_pic_offset_table
= p
->uses_pic_offset_table
;
579 current_function_internal_arg_pointer
= p
->internal_arg_pointer
;
580 max_parm_reg
= p
->max_parm_reg
;
581 parm_reg_stack_loc
= p
->parm_reg_stack_loc
;
582 current_function_outgoing_args_size
= p
->outgoing_args_size
;
583 current_function_return_rtx
= p
->return_rtx
;
584 nonlocal_goto_handler_slot
= p
->nonlocal_goto_handler_slot
;
585 nonlocal_goto_stack_level
= p
->nonlocal_goto_stack_level
;
586 nonlocal_labels
= p
->nonlocal_labels
;
587 cleanup_label
= p
->cleanup_label
;
588 return_label
= p
->return_label
;
589 save_expr_regs
= p
->save_expr_regs
;
590 stack_slot_list
= p
->stack_slot_list
;
591 parm_birth_insn
= p
->parm_birth_insn
;
592 frame_offset
= p
->frame_offset
;
593 tail_recursion_label
= p
->tail_recursion_label
;
594 tail_recursion_reentry
= p
->tail_recursion_reentry
;
595 arg_pointer_save_area
= p
->arg_pointer_save_area
;
596 rtl_expr_chain
= p
->rtl_expr_chain
;
597 last_parm_insn
= p
->last_parm_insn
;
598 context_display
= p
->context_display
;
599 trampoline_list
= p
->trampoline_list
;
600 function_call_count
= p
->function_call_count
;
601 temp_slots
= p
->temp_slots
;
602 temp_slot_level
= p
->temp_slot_level
;
603 current_function_epilogue_delay_list
= p
->epilogue_delay_list
;
606 restore_tree_status (p
);
607 restore_storage_status (p
);
608 restore_expr_status (p
);
609 restore_emit_status (p
);
610 restore_stmt_status (p
);
611 restore_varasm_status (p
);
613 if (restore_machine_status
)
614 (*restore_machine_status
) (p
);
616 /* Finish doing put_var_into_stack for any of our variables
617 which became addressable during the nested function. */
619 struct var_refs_queue
*queue
= p
->fixup_var_refs_queue
;
620 for (; queue
; queue
= queue
->next
)
621 fixup_var_refs (queue
->modified
, queue
->promoted_mode
, queue
->unsignedp
);
626 /* Reset variables that have known state during rtx generation. */
627 rtx_equal_function_value_matters
= 1;
628 virtuals_instantiated
= 0;
631 void pop_function_context ()
633 pop_function_context_from (current_function_decl
);
636 /* Allocate fixed slots in the stack frame of the current function. */
638 /* Return size needed for stack frame based on slots so far allocated.
639 This size counts from zero. It is not rounded to STACK_BOUNDARY;
640 the caller may have to do that. */
645 #ifdef FRAME_GROWS_DOWNWARD
646 return -frame_offset
;
652 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
653 with machine mode MODE.
655 ALIGN controls the amount of alignment for the address of the slot:
656 0 means according to MODE,
657 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
658 positive specifies alignment boundary in bits.
660 We do not round to stack_boundary here. */
663 assign_stack_local (mode
, size
, align
)
664 enum machine_mode mode
;
668 register rtx x
, addr
;
669 int bigend_correction
= 0;
674 alignment
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
676 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
678 else if (align
== -1)
680 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
681 size
= CEIL_ROUND (size
, alignment
);
684 alignment
= align
/ BITS_PER_UNIT
;
686 /* Round frame offset to that alignment.
687 We must be careful here, since FRAME_OFFSET might be negative and
688 division with a negative dividend isn't as well defined as we might
689 like. So we instead assume that ALIGNMENT is a power of two and
690 use logical operations which are unambiguous. */
691 #ifdef FRAME_GROWS_DOWNWARD
692 frame_offset
= FLOOR_ROUND (frame_offset
, alignment
);
694 frame_offset
= CEIL_ROUND (frame_offset
, alignment
);
697 /* On a big-endian machine, if we are allocating more space than we will use,
698 use the least significant bytes of those that are allocated. */
699 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
)
700 bigend_correction
= size
- GET_MODE_SIZE (mode
);
702 #ifdef FRAME_GROWS_DOWNWARD
703 frame_offset
-= size
;
706 /* If we have already instantiated virtual registers, return the actual
707 address relative to the frame pointer. */
708 if (virtuals_instantiated
)
709 addr
= plus_constant (frame_pointer_rtx
,
710 (frame_offset
+ bigend_correction
711 + STARTING_FRAME_OFFSET
));
713 addr
= plus_constant (virtual_stack_vars_rtx
,
714 frame_offset
+ bigend_correction
);
716 #ifndef FRAME_GROWS_DOWNWARD
717 frame_offset
+= size
;
720 x
= gen_rtx (MEM
, mode
, addr
);
722 stack_slot_list
= gen_rtx (EXPR_LIST
, VOIDmode
, x
, stack_slot_list
);
727 /* Assign a stack slot in a containing function.
728 First three arguments are same as in preceding function.
729 The last argument specifies the function to allocate in. */
732 assign_outer_stack_local (mode
, size
, align
, function
)
733 enum machine_mode mode
;
736 struct function
*function
;
738 register rtx x
, addr
;
739 int bigend_correction
= 0;
742 /* Allocate in the memory associated with the function in whose frame
744 push_obstacks (function
->function_obstack
,
745 function
->function_maybepermanent_obstack
);
749 alignment
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
751 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
753 else if (align
== -1)
755 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
756 size
= CEIL_ROUND (size
, alignment
);
759 alignment
= align
/ BITS_PER_UNIT
;
761 /* Round frame offset to that alignment. */
762 #ifdef FRAME_GROWS_DOWNWARD
763 function
->frame_offset
= FLOOR_ROUND (function
->frame_offset
, alignment
);
765 function
->frame_offset
= CEIL_ROUND (function
->frame_offset
, alignment
);
768 /* On a big-endian machine, if we are allocating more space than we will use,
769 use the least significant bytes of those that are allocated. */
770 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
)
771 bigend_correction
= size
- GET_MODE_SIZE (mode
);
773 #ifdef FRAME_GROWS_DOWNWARD
774 function
->frame_offset
-= size
;
776 addr
= plus_constant (virtual_stack_vars_rtx
,
777 function
->frame_offset
+ bigend_correction
);
778 #ifndef FRAME_GROWS_DOWNWARD
779 function
->frame_offset
+= size
;
782 x
= gen_rtx (MEM
, mode
, addr
);
784 function
->stack_slot_list
785 = gen_rtx (EXPR_LIST
, VOIDmode
, x
, function
->stack_slot_list
);
792 /* Allocate a temporary stack slot and record it for possible later
795 MODE is the machine mode to be given to the returned rtx.
797 SIZE is the size in units of the space required. We do no rounding here
798 since assign_stack_local will do any required rounding.
800 KEEP is 1 if this slot is to be retained after a call to
801 free_temp_slots. Automatic variables for a block are allocated
802 with this flag. KEEP is 2, if we allocate a longer term temporary,
803 whose lifetime is controlled by CLEANUP_POINT_EXPRs. */
806 assign_stack_temp (mode
, size
, keep
)
807 enum machine_mode mode
;
811 struct temp_slot
*p
, *best_p
= 0;
813 /* If SIZE is -1 it means that somebody tried to allocate a temporary
814 of a variable size. */
818 /* First try to find an available, already-allocated temporary that is the
819 exact size we require. */
820 for (p
= temp_slots
; p
; p
= p
->next
)
821 if (p
->size
== size
&& GET_MODE (p
->slot
) == mode
&& ! p
->in_use
)
824 /* If we didn't find, one, try one that is larger than what we want. We
825 find the smallest such. */
827 for (p
= temp_slots
; p
; p
= p
->next
)
828 if (p
->size
> size
&& GET_MODE (p
->slot
) == mode
&& ! p
->in_use
829 && (best_p
== 0 || best_p
->size
> p
->size
))
832 /* Make our best, if any, the one to use. */
835 /* If there are enough aligned bytes left over, make them into a new
836 temp_slot so that the extra bytes don't get wasted. Do this only
837 for BLKmode slots, so that we can be sure of the alignment. */
838 if (GET_MODE (best_p
->slot
) == BLKmode
)
840 int alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
841 int rounded_size
= CEIL_ROUND (size
, alignment
);
843 if (best_p
->size
- rounded_size
>= alignment
)
845 p
= (struct temp_slot
*) oballoc (sizeof (struct temp_slot
));
846 p
->in_use
= p
->addr_taken
= 0;
847 p
->size
= best_p
->size
- rounded_size
;
848 p
->base_offset
= best_p
->base_offset
+ rounded_size
;
849 p
->full_size
= best_p
->full_size
- rounded_size
;
850 p
->slot
= gen_rtx (MEM
, BLKmode
,
851 plus_constant (XEXP (best_p
->slot
, 0),
855 p
->next
= temp_slots
;
858 stack_slot_list
= gen_rtx (EXPR_LIST
, VOIDmode
, p
->slot
,
861 best_p
->size
= rounded_size
;
862 best_p
->full_size
= rounded_size
;
869 /* If we still didn't find one, make a new temporary. */
872 int frame_offset_old
= frame_offset
;
873 p
= (struct temp_slot
*) oballoc (sizeof (struct temp_slot
));
874 /* If the temp slot mode doesn't indicate the alignment,
875 use the largest possible, so no one will be disappointed. */
876 p
->slot
= assign_stack_local (mode
, size
, mode
== BLKmode
? -1 : 0);
877 /* The following slot size computation is necessary because we don't
878 know the actual size of the temporary slot until assign_stack_local
879 has performed all the frame alignment and size rounding for the
880 requested temporary. Note that extra space added for alignment
881 can be either above or below this stack slot depending on which
882 way the frame grows. We include the extra space if and only if it
883 is above this slot. */
884 #ifdef FRAME_GROWS_DOWNWARD
885 p
->size
= frame_offset_old
- frame_offset
;
889 /* Now define the fields used by combine_temp_slots. */
890 #ifdef FRAME_GROWS_DOWNWARD
891 p
->base_offset
= frame_offset
;
892 p
->full_size
= frame_offset_old
- frame_offset
;
894 p
->base_offset
= frame_offset_old
;
895 p
->full_size
= frame_offset
- frame_offset_old
;
898 p
->next
= temp_slots
;
904 p
->rtl_expr
= sequence_rtl_expr
;
908 p
->level
= target_temp_slot_level
;
913 p
->level
= temp_slot_level
;
919 /* Assign a temporary of given TYPE.
920 KEEP is as for assign_stack_temp.
921 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
922 it is 0 if a register is OK.
923 DONT_PROMOTE is 1 if we should not promote values in register
927 assign_temp (type
, keep
, memory_required
, dont_promote
)
933 enum machine_mode mode
= TYPE_MODE (type
);
934 int unsignedp
= TREE_UNSIGNED (type
);
936 if (mode
== BLKmode
|| memory_required
)
938 int size
= int_size_in_bytes (type
);
941 /* Unfortunately, we don't yet know how to allocate variable-sized
942 temporaries. However, sometimes we have a fixed upper limit on
943 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
944 instead. This is the case for Chill variable-sized strings. */
945 if (size
== -1 && TREE_CODE (type
) == ARRAY_TYPE
946 && TYPE_ARRAY_MAX_SIZE (type
) != NULL_TREE
947 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type
)) == INTEGER_CST
)
948 size
= TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type
));
950 tmp
= assign_stack_temp (mode
, size
, keep
);
951 MEM_IN_STRUCT_P (tmp
) = AGGREGATE_TYPE_P (type
);
955 #ifndef PROMOTE_FOR_CALL_ONLY
957 mode
= promote_mode (type
, mode
, &unsignedp
, 0);
960 return gen_reg_rtx (mode
);
963 /* Combine temporary stack slots which are adjacent on the stack.
965 This allows for better use of already allocated stack space. This is only
966 done for BLKmode slots because we can be sure that we won't have alignment
967 problems in this case. */
970 combine_temp_slots ()
972 struct temp_slot
*p
, *q
;
973 struct temp_slot
*prev_p
, *prev_q
;
974 /* Determine where to free back to after this function. */
975 rtx free_pointer
= rtx_alloc (CONST_INT
);
977 for (p
= temp_slots
, prev_p
= 0; p
; p
= prev_p
? prev_p
->next
: temp_slots
)
980 if (! p
->in_use
&& GET_MODE (p
->slot
) == BLKmode
)
981 for (q
= p
->next
, prev_q
= p
; q
; q
= prev_q
->next
)
984 if (! q
->in_use
&& GET_MODE (q
->slot
) == BLKmode
)
986 if (p
->base_offset
+ p
->full_size
== q
->base_offset
)
988 /* Q comes after P; combine Q into P. */
990 p
->full_size
+= q
->full_size
;
993 else if (q
->base_offset
+ q
->full_size
== p
->base_offset
)
995 /* P comes after Q; combine P into Q. */
997 q
->full_size
+= p
->full_size
;
1002 /* Either delete Q or advance past it. */
1004 prev_q
->next
= q
->next
;
1008 /* Either delete P or advance past it. */
1012 prev_p
->next
= p
->next
;
1014 temp_slots
= p
->next
;
1020 /* Free all the RTL made by plus_constant. */
1021 rtx_free (free_pointer
);
1024 /* Find the temp slot corresponding to the object at address X. */
1026 static struct temp_slot
*
1027 find_temp_slot_from_address (x
)
1030 struct temp_slot
*p
;
1033 for (p
= temp_slots
; p
; p
= p
->next
)
1037 else if (XEXP (p
->slot
, 0) == x
1041 else if (p
->address
!= 0 && GET_CODE (p
->address
) == EXPR_LIST
)
1042 for (next
= p
->address
; next
; next
= XEXP (next
, 1))
1043 if (XEXP (next
, 0) == x
)
1050 /* Indicate that NEW is an alternate way of referring to the temp slot
1051 that previous was known by OLD. */
1054 update_temp_slot_address (old
, new)
1057 struct temp_slot
*p
= find_temp_slot_from_address (old
);
1059 /* If none, return. Else add NEW as an alias. */
1062 else if (p
->address
== 0)
1066 if (GET_CODE (p
->address
) != EXPR_LIST
)
1067 p
->address
= gen_rtx (EXPR_LIST
, VOIDmode
, p
->address
, NULL_RTX
);
1069 p
->address
= gen_rtx (EXPR_LIST
, VOIDmode
, new, p
->address
);
1073 /* If X could be a reference to a temporary slot, mark the fact that its
1074 address was taken. */
1077 mark_temp_addr_taken (x
)
1080 struct temp_slot
*p
;
1085 /* If X is not in memory or is at a constant address, it cannot be in
1086 a temporary slot. */
1087 if (GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0)))
1090 p
= find_temp_slot_from_address (XEXP (x
, 0));
1095 /* If X could be a reference to a temporary slot, mark that slot as
1096 belonging to the to one level higher than the current level. If X
1097 matched one of our slots, just mark that one. Otherwise, we can't
1098 easily predict which it is, so upgrade all of them. Kept slots
1099 need not be touched.
1101 This is called when an ({...}) construct occurs and a statement
1102 returns a value in memory. */
1105 preserve_temp_slots (x
)
1108 struct temp_slot
*p
= 0;
1110 /* If there is no result, we still might have some objects whose address
1111 were taken, so we need to make sure they stay around. */
1114 for (p
= temp_slots
; p
; p
= p
->next
)
1115 if (p
->in_use
&& p
->level
== temp_slot_level
&& p
->addr_taken
)
1121 /* If X is a register that is being used as a pointer, see if we have
1122 a temporary slot we know it points to. To be consistent with
1123 the code below, we really should preserve all non-kept slots
1124 if we can't find a match, but that seems to be much too costly. */
1125 if (GET_CODE (x
) == REG
&& REGNO_POINTER_FLAG (REGNO (x
)))
1126 p
= find_temp_slot_from_address (x
);
1128 /* If X is not in memory or is at a constant address, it cannot be in
1129 a temporary slot, but it can contain something whose address was
1131 if (p
== 0 && (GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0))))
1133 for (p
= temp_slots
; p
; p
= p
->next
)
1134 if (p
->in_use
&& p
->level
== temp_slot_level
&& p
->addr_taken
)
1140 /* First see if we can find a match. */
1142 p
= find_temp_slot_from_address (XEXP (x
, 0));
1146 /* Move everything at our level whose address was taken to our new
1147 level in case we used its address. */
1148 struct temp_slot
*q
;
1150 if (p
->level
== temp_slot_level
)
1152 for (q
= temp_slots
; q
; q
= q
->next
)
1153 if (q
!= p
&& q
->addr_taken
&& q
->level
== p
->level
)
1162 /* Otherwise, preserve all non-kept slots at this level. */
1163 for (p
= temp_slots
; p
; p
= p
->next
)
1164 if (p
->in_use
&& p
->level
== temp_slot_level
&& ! p
->keep
)
1168 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1169 with that RTL_EXPR, promote it into a temporary slot at the present
1170 level so it will not be freed when we free slots made in the
1174 preserve_rtl_expr_result (x
)
1177 struct temp_slot
*p
;
1179 /* If X is not in memory or is at a constant address, it cannot be in
1180 a temporary slot. */
1181 if (x
== 0 || GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0)))
1184 /* If we can find a match, move it to our level unless it is already at
1186 p
= find_temp_slot_from_address (XEXP (x
, 0));
1189 p
->level
= MIN (p
->level
, temp_slot_level
);
1196 /* Free all temporaries used so far. This is normally called at the end
1197 of generating code for a statement. Don't free any temporaries
1198 currently in use for an RTL_EXPR that hasn't yet been emitted.
1199 We could eventually do better than this since it can be reused while
1200 generating the same RTL_EXPR, but this is complex and probably not
1206 struct temp_slot
*p
;
1208 for (p
= temp_slots
; p
; p
= p
->next
)
1209 if (p
->in_use
&& p
->level
== temp_slot_level
&& ! p
->keep
1210 && p
->rtl_expr
== 0)
1213 combine_temp_slots ();
1216 /* Free all temporary slots used in T, an RTL_EXPR node. */
1219 free_temps_for_rtl_expr (t
)
1222 struct temp_slot
*p
;
1224 for (p
= temp_slots
; p
; p
= p
->next
)
1225 if (p
->rtl_expr
== t
)
1228 combine_temp_slots ();
1231 /* Push deeper into the nesting level for stack temporaries. */
1239 /* Pop a temporary nesting level. All slots in use in the current level
1245 struct temp_slot
*p
;
1247 for (p
= temp_slots
; p
; p
= p
->next
)
1248 if (p
->in_use
&& p
->level
== temp_slot_level
&& p
->rtl_expr
== 0)
1251 combine_temp_slots ();
1256 /* Initialize temporary slots. */
1261 /* We have not allocated any temporaries yet. */
1263 temp_slot_level
= 0;
1264 target_temp_slot_level
= 0;
1267 /* Retroactively move an auto variable from a register to a stack slot.
1268 This is done when an address-reference to the variable is seen. */
1271 put_var_into_stack (decl
)
1275 enum machine_mode promoted_mode
, decl_mode
;
1276 struct function
*function
= 0;
1279 if (output_bytecode
)
1282 context
= decl_function_context (decl
);
1284 /* Get the current rtl used for this object and it's original mode. */
1285 reg
= TREE_CODE (decl
) == SAVE_EXPR
? SAVE_EXPR_RTL (decl
) : DECL_RTL (decl
);
1287 /* No need to do anything if decl has no rtx yet
1288 since in that case caller is setting TREE_ADDRESSABLE
1289 and a stack slot will be assigned when the rtl is made. */
1293 /* Get the declared mode for this object. */
1294 decl_mode
= (TREE_CODE (decl
) == SAVE_EXPR
? TYPE_MODE (TREE_TYPE (decl
))
1295 : DECL_MODE (decl
));
1296 /* Get the mode it's actually stored in. */
1297 promoted_mode
= GET_MODE (reg
);
1299 /* If this variable comes from an outer function,
1300 find that function's saved context. */
1301 if (context
!= current_function_decl
)
1302 for (function
= outer_function_chain
; function
; function
= function
->next
)
1303 if (function
->decl
== context
)
1306 /* If this is a variable-size object with a pseudo to address it,
1307 put that pseudo into the stack, if the var is nonlocal. */
1308 if (DECL_NONLOCAL (decl
)
1309 && GET_CODE (reg
) == MEM
1310 && GET_CODE (XEXP (reg
, 0)) == REG
1311 && REGNO (XEXP (reg
, 0)) > LAST_VIRTUAL_REGISTER
)
1313 reg
= XEXP (reg
, 0);
1314 decl_mode
= promoted_mode
= GET_MODE (reg
);
1317 /* Now we should have a value that resides in one or more pseudo regs. */
1319 if (GET_CODE (reg
) == REG
)
1320 put_reg_into_stack (function
, reg
, TREE_TYPE (decl
),
1321 promoted_mode
, decl_mode
, TREE_SIDE_EFFECTS (decl
));
1322 else if (GET_CODE (reg
) == CONCAT
)
1324 /* A CONCAT contains two pseudos; put them both in the stack.
1325 We do it so they end up consecutive. */
1326 enum machine_mode part_mode
= GET_MODE (XEXP (reg
, 0));
1327 tree part_type
= TREE_TYPE (TREE_TYPE (decl
));
1328 #ifdef FRAME_GROWS_DOWNWARD
1329 /* Since part 0 should have a lower address, do it second. */
1330 put_reg_into_stack (function
, XEXP (reg
, 1), part_type
, part_mode
,
1331 part_mode
, TREE_SIDE_EFFECTS (decl
));
1332 put_reg_into_stack (function
, XEXP (reg
, 0), part_type
, part_mode
,
1333 part_mode
, TREE_SIDE_EFFECTS (decl
));
1335 put_reg_into_stack (function
, XEXP (reg
, 0), part_type
, part_mode
,
1336 part_mode
, TREE_SIDE_EFFECTS (decl
));
1337 put_reg_into_stack (function
, XEXP (reg
, 1), part_type
, part_mode
,
1338 part_mode
, TREE_SIDE_EFFECTS (decl
));
1341 /* Change the CONCAT into a combined MEM for both parts. */
1342 PUT_CODE (reg
, MEM
);
1343 MEM_VOLATILE_P (reg
) = MEM_VOLATILE_P (XEXP (reg
, 0));
1345 /* The two parts are in memory order already.
1346 Use the lower parts address as ours. */
1347 XEXP (reg
, 0) = XEXP (XEXP (reg
, 0), 0);
1348 /* Prevent sharing of rtl that might lose. */
1349 if (GET_CODE (XEXP (reg
, 0)) == PLUS
)
1350 XEXP (reg
, 0) = copy_rtx (XEXP (reg
, 0));
1354 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1355 into the stack frame of FUNCTION (0 means the current function).
1356 DECL_MODE is the machine mode of the user-level data type.
1357 PROMOTED_MODE is the machine mode of the register.
1358 VOLATILE_P is nonzero if this is for a "volatile" decl. */
1361 put_reg_into_stack (function
, reg
, type
, promoted_mode
, decl_mode
, volatile_p
)
1362 struct function
*function
;
1365 enum machine_mode promoted_mode
, decl_mode
;
1372 if (REGNO (reg
) < function
->max_parm_reg
)
1373 new = function
->parm_reg_stack_loc
[REGNO (reg
)];
1375 new = assign_outer_stack_local (decl_mode
, GET_MODE_SIZE (decl_mode
),
1380 if (REGNO (reg
) < max_parm_reg
)
1381 new = parm_reg_stack_loc
[REGNO (reg
)];
1383 new = assign_stack_local (decl_mode
, GET_MODE_SIZE (decl_mode
), 0);
1386 PUT_MODE (reg
, decl_mode
);
1387 XEXP (reg
, 0) = XEXP (new, 0);
1388 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1389 MEM_VOLATILE_P (reg
) = volatile_p
;
1390 PUT_CODE (reg
, MEM
);
1392 /* If this is a memory ref that contains aggregate components,
1393 mark it as such for cse and loop optimize. */
1394 MEM_IN_STRUCT_P (reg
) = AGGREGATE_TYPE_P (type
);
1396 /* Now make sure that all refs to the variable, previously made
1397 when it was a register, are fixed up to be valid again. */
1400 struct var_refs_queue
*temp
;
1402 /* Variable is inherited; fix it up when we get back to its function. */
1403 push_obstacks (function
->function_obstack
,
1404 function
->function_maybepermanent_obstack
);
1406 /* See comment in restore_tree_status in tree.c for why this needs to be
1407 on saveable obstack. */
1409 = (struct var_refs_queue
*) savealloc (sizeof (struct var_refs_queue
));
1410 temp
->modified
= reg
;
1411 temp
->promoted_mode
= promoted_mode
;
1412 temp
->unsignedp
= TREE_UNSIGNED (type
);
1413 temp
->next
= function
->fixup_var_refs_queue
;
1414 function
->fixup_var_refs_queue
= temp
;
1418 /* Variable is local; fix it up now. */
1419 fixup_var_refs (reg
, promoted_mode
, TREE_UNSIGNED (type
));
1423 fixup_var_refs (var
, promoted_mode
, unsignedp
)
1425 enum machine_mode promoted_mode
;
1429 rtx first_insn
= get_insns ();
1430 struct sequence_stack
*stack
= sequence_stack
;
1431 tree rtl_exps
= rtl_expr_chain
;
1433 /* Must scan all insns for stack-refs that exceed the limit. */
1434 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
, first_insn
, stack
== 0);
1436 /* Scan all pending sequences too. */
1437 for (; stack
; stack
= stack
->next
)
1439 push_to_sequence (stack
->first
);
1440 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
,
1441 stack
->first
, stack
->next
!= 0);
1442 /* Update remembered end of sequence
1443 in case we added an insn at the end. */
1444 stack
->last
= get_last_insn ();
1448 /* Scan all waiting RTL_EXPRs too. */
1449 for (pending
= rtl_exps
; pending
; pending
= TREE_CHAIN (pending
))
1451 rtx seq
= RTL_EXPR_SEQUENCE (TREE_VALUE (pending
));
1452 if (seq
!= const0_rtx
&& seq
!= 0)
1454 push_to_sequence (seq
);
1455 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
, seq
, 0);
1461 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1462 some part of an insn. Return a struct fixup_replacement whose OLD
1463 value is equal to X. Allocate a new structure if no such entry exists. */
1465 static struct fixup_replacement
*
1466 find_fixup_replacement (replacements
, x
)
1467 struct fixup_replacement
**replacements
;
1470 struct fixup_replacement
*p
;
1472 /* See if we have already replaced this. */
1473 for (p
= *replacements
; p
&& p
->old
!= x
; p
= p
->next
)
1478 p
= (struct fixup_replacement
*) oballoc (sizeof (struct fixup_replacement
));
1481 p
->next
= *replacements
;
1488 /* Scan the insn-chain starting with INSN for refs to VAR
1489 and fix them up. TOPLEVEL is nonzero if this chain is the
1490 main chain of insns for the current function. */
1493 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
, insn
, toplevel
)
1495 enum machine_mode promoted_mode
;
1504 rtx next
= NEXT_INSN (insn
);
1506 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
1508 /* If this is a CLOBBER of VAR, delete it.
1510 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1511 and REG_RETVAL notes too. */
1512 if (GET_CODE (PATTERN (insn
)) == CLOBBER
1513 && XEXP (PATTERN (insn
), 0) == var
)
1515 if ((note
= find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
)) != 0)
1516 /* The REG_LIBCALL note will go away since we are going to
1517 turn INSN into a NOTE, so just delete the
1518 corresponding REG_RETVAL note. */
1519 remove_note (XEXP (note
, 0),
1520 find_reg_note (XEXP (note
, 0), REG_RETVAL
,
1523 /* In unoptimized compilation, we shouldn't call delete_insn
1524 except in jump.c doing warnings. */
1525 PUT_CODE (insn
, NOTE
);
1526 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
1527 NOTE_SOURCE_FILE (insn
) = 0;
1530 /* The insn to load VAR from a home in the arglist
1531 is now a no-op. When we see it, just delete it. */
1533 && GET_CODE (PATTERN (insn
)) == SET
1534 && SET_DEST (PATTERN (insn
)) == var
1535 /* If this represents the result of an insn group,
1536 don't delete the insn. */
1537 && find_reg_note (insn
, REG_RETVAL
, NULL_RTX
) == 0
1538 && rtx_equal_p (SET_SRC (PATTERN (insn
)), var
))
1540 /* In unoptimized compilation, we shouldn't call delete_insn
1541 except in jump.c doing warnings. */
1542 PUT_CODE (insn
, NOTE
);
1543 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
1544 NOTE_SOURCE_FILE (insn
) = 0;
1545 if (insn
== last_parm_insn
)
1546 last_parm_insn
= PREV_INSN (next
);
1550 struct fixup_replacement
*replacements
= 0;
1551 rtx next_insn
= NEXT_INSN (insn
);
1553 #ifdef SMALL_REGISTER_CLASSES
1554 /* If the insn that copies the results of a CALL_INSN
1555 into a pseudo now references VAR, we have to use an
1556 intermediate pseudo since we want the life of the
1557 return value register to be only a single insn.
1559 If we don't use an intermediate pseudo, such things as
1560 address computations to make the address of VAR valid
1561 if it is not can be placed between the CALL_INSN and INSN.
1563 To make sure this doesn't happen, we record the destination
1564 of the CALL_INSN and see if the next insn uses both that
1567 if (call_dest
!= 0 && GET_CODE (insn
) == INSN
1568 && reg_mentioned_p (var
, PATTERN (insn
))
1569 && reg_mentioned_p (call_dest
, PATTERN (insn
)))
1571 rtx temp
= gen_reg_rtx (GET_MODE (call_dest
));
1573 emit_insn_before (gen_move_insn (temp
, call_dest
), insn
);
1575 PATTERN (insn
) = replace_rtx (PATTERN (insn
),
1579 if (GET_CODE (insn
) == CALL_INSN
1580 && GET_CODE (PATTERN (insn
)) == SET
)
1581 call_dest
= SET_DEST (PATTERN (insn
));
1582 else if (GET_CODE (insn
) == CALL_INSN
1583 && GET_CODE (PATTERN (insn
)) == PARALLEL
1584 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
1585 call_dest
= SET_DEST (XVECEXP (PATTERN (insn
), 0, 0));
1590 /* See if we have to do anything to INSN now that VAR is in
1591 memory. If it needs to be loaded into a pseudo, use a single
1592 pseudo for the entire insn in case there is a MATCH_DUP
1593 between two operands. We pass a pointer to the head of
1594 a list of struct fixup_replacements. If fixup_var_refs_1
1595 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1596 it will record them in this list.
1598 If it allocated a pseudo for any replacement, we copy into
1601 fixup_var_refs_1 (var
, promoted_mode
, &PATTERN (insn
), insn
,
1604 /* If this is last_parm_insn, and any instructions were output
1605 after it to fix it up, then we must set last_parm_insn to
1606 the last such instruction emitted. */
1607 if (insn
== last_parm_insn
)
1608 last_parm_insn
= PREV_INSN (next_insn
);
1610 while (replacements
)
1612 if (GET_CODE (replacements
->new) == REG
)
1617 /* OLD might be a (subreg (mem)). */
1618 if (GET_CODE (replacements
->old
) == SUBREG
)
1620 = fixup_memory_subreg (replacements
->old
, insn
, 0);
1623 = fixup_stack_1 (replacements
->old
, insn
);
1625 insert_before
= insn
;
1627 /* If we are changing the mode, do a conversion.
1628 This might be wasteful, but combine.c will
1629 eliminate much of the waste. */
1631 if (GET_MODE (replacements
->new)
1632 != GET_MODE (replacements
->old
))
1635 convert_move (replacements
->new,
1636 replacements
->old
, unsignedp
);
1637 seq
= gen_sequence ();
1641 seq
= gen_move_insn (replacements
->new,
1644 emit_insn_before (seq
, insert_before
);
1647 replacements
= replacements
->next
;
1651 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1652 But don't touch other insns referred to by reg-notes;
1653 we will get them elsewhere. */
1654 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
1655 if (GET_CODE (note
) != INSN_LIST
)
1657 = walk_fixup_memory_subreg (XEXP (note
, 0), insn
, 1);
1663 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1664 See if the rtx expression at *LOC in INSN needs to be changed.
1666 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1667 contain a list of original rtx's and replacements. If we find that we need
1668 to modify this insn by replacing a memory reference with a pseudo or by
1669 making a new MEM to implement a SUBREG, we consult that list to see if
1670 we have already chosen a replacement. If none has already been allocated,
1671 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1672 or the SUBREG, as appropriate, to the pseudo. */
1675 fixup_var_refs_1 (var
, promoted_mode
, loc
, insn
, replacements
)
1677 enum machine_mode promoted_mode
;
1680 struct fixup_replacement
**replacements
;
1683 register rtx x
= *loc
;
1684 RTX_CODE code
= GET_CODE (x
);
1686 register rtx tem
, tem1
;
1687 struct fixup_replacement
*replacement
;
1694 /* If we already have a replacement, use it. Otherwise,
1695 try to fix up this address in case it is invalid. */
1697 replacement
= find_fixup_replacement (replacements
, var
);
1698 if (replacement
->new)
1700 *loc
= replacement
->new;
1704 *loc
= replacement
->new = x
= fixup_stack_1 (x
, insn
);
1706 /* Unless we are forcing memory to register or we changed the mode,
1707 we can leave things the way they are if the insn is valid. */
1709 INSN_CODE (insn
) = -1;
1710 if (! flag_force_mem
&& GET_MODE (x
) == promoted_mode
1711 && recog_memoized (insn
) >= 0)
1714 *loc
= replacement
->new = gen_reg_rtx (promoted_mode
);
1718 /* If X contains VAR, we need to unshare it here so that we update
1719 each occurrence separately. But all identical MEMs in one insn
1720 must be replaced with the same rtx because of the possibility of
1723 if (reg_mentioned_p (var
, x
))
1725 replacement
= find_fixup_replacement (replacements
, x
);
1726 if (replacement
->new == 0)
1727 replacement
->new = copy_most_rtx (x
, var
);
1729 *loc
= x
= replacement
->new;
1745 /* Note that in some cases those types of expressions are altered
1746 by optimize_bit_field, and do not survive to get here. */
1747 if (XEXP (x
, 0) == var
1748 || (GET_CODE (XEXP (x
, 0)) == SUBREG
1749 && SUBREG_REG (XEXP (x
, 0)) == var
))
1751 /* Get TEM as a valid MEM in the mode presently in the insn.
1753 We don't worry about the possibility of MATCH_DUP here; it
1754 is highly unlikely and would be tricky to handle. */
1757 if (GET_CODE (tem
) == SUBREG
)
1758 tem
= fixup_memory_subreg (tem
, insn
, 1);
1759 tem
= fixup_stack_1 (tem
, insn
);
1761 /* Unless we want to load from memory, get TEM into the proper mode
1762 for an extract from memory. This can only be done if the
1763 extract is at a constant position and length. */
1765 if (! flag_force_mem
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
1766 && GET_CODE (XEXP (x
, 2)) == CONST_INT
1767 && ! mode_dependent_address_p (XEXP (tem
, 0))
1768 && ! MEM_VOLATILE_P (tem
))
1770 enum machine_mode wanted_mode
= VOIDmode
;
1771 enum machine_mode is_mode
= GET_MODE (tem
);
1772 int width
= INTVAL (XEXP (x
, 1));
1773 int pos
= INTVAL (XEXP (x
, 2));
1776 if (GET_CODE (x
) == ZERO_EXTRACT
)
1777 wanted_mode
= insn_operand_mode
[(int) CODE_FOR_extzv
][1];
1780 if (GET_CODE (x
) == SIGN_EXTRACT
)
1781 wanted_mode
= insn_operand_mode
[(int) CODE_FOR_extv
][1];
1783 /* If we have a narrower mode, we can do something. */
1784 if (wanted_mode
!= VOIDmode
1785 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
1787 int offset
= pos
/ BITS_PER_UNIT
;
1788 rtx old_pos
= XEXP (x
, 2);
1791 /* If the bytes and bits are counted differently, we
1792 must adjust the offset. */
1793 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
1794 offset
= (GET_MODE_SIZE (is_mode
)
1795 - GET_MODE_SIZE (wanted_mode
) - offset
);
1797 pos
%= GET_MODE_BITSIZE (wanted_mode
);
1799 newmem
= gen_rtx (MEM
, wanted_mode
,
1800 plus_constant (XEXP (tem
, 0), offset
));
1801 RTX_UNCHANGING_P (newmem
) = RTX_UNCHANGING_P (tem
);
1802 MEM_VOLATILE_P (newmem
) = MEM_VOLATILE_P (tem
);
1803 MEM_IN_STRUCT_P (newmem
) = MEM_IN_STRUCT_P (tem
);
1805 /* Make the change and see if the insn remains valid. */
1806 INSN_CODE (insn
) = -1;
1807 XEXP (x
, 0) = newmem
;
1808 XEXP (x
, 2) = GEN_INT (pos
);
1810 if (recog_memoized (insn
) >= 0)
1813 /* Otherwise, restore old position. XEXP (x, 0) will be
1815 XEXP (x
, 2) = old_pos
;
1819 /* If we get here, the bitfield extract insn can't accept a memory
1820 reference. Copy the input into a register. */
1822 tem1
= gen_reg_rtx (GET_MODE (tem
));
1823 emit_insn_before (gen_move_insn (tem1
, tem
), insn
);
1830 if (SUBREG_REG (x
) == var
)
1832 /* If this is a special SUBREG made because VAR was promoted
1833 from a wider mode, replace it with VAR and call ourself
1834 recursively, this time saying that the object previously
1835 had its current mode (by virtue of the SUBREG). */
1837 if (SUBREG_PROMOTED_VAR_P (x
))
1840 fixup_var_refs_1 (var
, GET_MODE (var
), loc
, insn
, replacements
);
1844 /* If this SUBREG makes VAR wider, it has become a paradoxical
1845 SUBREG with VAR in memory, but these aren't allowed at this
1846 stage of the compilation. So load VAR into a pseudo and take
1847 a SUBREG of that pseudo. */
1848 if (GET_MODE_SIZE (GET_MODE (x
)) > GET_MODE_SIZE (GET_MODE (var
)))
1850 replacement
= find_fixup_replacement (replacements
, var
);
1851 if (replacement
->new == 0)
1852 replacement
->new = gen_reg_rtx (GET_MODE (var
));
1853 SUBREG_REG (x
) = replacement
->new;
1857 /* See if we have already found a replacement for this SUBREG.
1858 If so, use it. Otherwise, make a MEM and see if the insn
1859 is recognized. If not, or if we should force MEM into a register,
1860 make a pseudo for this SUBREG. */
1861 replacement
= find_fixup_replacement (replacements
, x
);
1862 if (replacement
->new)
1864 *loc
= replacement
->new;
1868 replacement
->new = *loc
= fixup_memory_subreg (x
, insn
, 0);
1870 INSN_CODE (insn
) = -1;
1871 if (! flag_force_mem
&& recog_memoized (insn
) >= 0)
1874 *loc
= replacement
->new = gen_reg_rtx (GET_MODE (x
));
1880 /* First do special simplification of bit-field references. */
1881 if (GET_CODE (SET_DEST (x
)) == SIGN_EXTRACT
1882 || GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
)
1883 optimize_bit_field (x
, insn
, 0);
1884 if (GET_CODE (SET_SRC (x
)) == SIGN_EXTRACT
1885 || GET_CODE (SET_SRC (x
)) == ZERO_EXTRACT
)
1886 optimize_bit_field (x
, insn
, NULL_PTR
);
1888 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1889 insn into a pseudo and store the low part of the pseudo into VAR. */
1890 if (GET_CODE (SET_DEST (x
)) == SUBREG
1891 && SUBREG_REG (SET_DEST (x
)) == var
1892 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
1893 > GET_MODE_SIZE (GET_MODE (var
))))
1895 SET_DEST (x
) = tem
= gen_reg_rtx (GET_MODE (SET_DEST (x
)));
1896 emit_insn_after (gen_move_insn (var
, gen_lowpart (GET_MODE (var
),
1903 rtx dest
= SET_DEST (x
);
1904 rtx src
= SET_SRC (x
);
1905 rtx outerdest
= dest
;
1907 while (GET_CODE (dest
) == SUBREG
|| GET_CODE (dest
) == STRICT_LOW_PART
1908 || GET_CODE (dest
) == SIGN_EXTRACT
1909 || GET_CODE (dest
) == ZERO_EXTRACT
)
1910 dest
= XEXP (dest
, 0);
1912 if (GET_CODE (src
) == SUBREG
)
1913 src
= XEXP (src
, 0);
1915 /* If VAR does not appear at the top level of the SET
1916 just scan the lower levels of the tree. */
1918 if (src
!= var
&& dest
!= var
)
1921 /* We will need to rerecognize this insn. */
1922 INSN_CODE (insn
) = -1;
1925 if (GET_CODE (outerdest
) == ZERO_EXTRACT
&& dest
== var
)
1927 /* Since this case will return, ensure we fixup all the
1929 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (outerdest
, 1),
1930 insn
, replacements
);
1931 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (outerdest
, 2),
1932 insn
, replacements
);
1933 fixup_var_refs_1 (var
, promoted_mode
, &SET_SRC (x
),
1934 insn
, replacements
);
1936 tem
= XEXP (outerdest
, 0);
1938 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
1939 that may appear inside a ZERO_EXTRACT.
1940 This was legitimate when the MEM was a REG. */
1941 if (GET_CODE (tem
) == SUBREG
1942 && SUBREG_REG (tem
) == var
)
1943 tem
= fixup_memory_subreg (tem
, insn
, 1);
1945 tem
= fixup_stack_1 (tem
, insn
);
1947 if (GET_CODE (XEXP (outerdest
, 1)) == CONST_INT
1948 && GET_CODE (XEXP (outerdest
, 2)) == CONST_INT
1949 && ! mode_dependent_address_p (XEXP (tem
, 0))
1950 && ! MEM_VOLATILE_P (tem
))
1952 enum machine_mode wanted_mode
1953 = insn_operand_mode
[(int) CODE_FOR_insv
][0];
1954 enum machine_mode is_mode
= GET_MODE (tem
);
1955 int width
= INTVAL (XEXP (outerdest
, 1));
1956 int pos
= INTVAL (XEXP (outerdest
, 2));
1958 /* If we have a narrower mode, we can do something. */
1959 if (GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
1961 int offset
= pos
/ BITS_PER_UNIT
;
1962 rtx old_pos
= XEXP (outerdest
, 2);
1965 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
1966 offset
= (GET_MODE_SIZE (is_mode
)
1967 - GET_MODE_SIZE (wanted_mode
) - offset
);
1969 pos
%= GET_MODE_BITSIZE (wanted_mode
);
1971 newmem
= gen_rtx (MEM
, wanted_mode
,
1972 plus_constant (XEXP (tem
, 0), offset
));
1973 RTX_UNCHANGING_P (newmem
) = RTX_UNCHANGING_P (tem
);
1974 MEM_VOLATILE_P (newmem
) = MEM_VOLATILE_P (tem
);
1975 MEM_IN_STRUCT_P (newmem
) = MEM_IN_STRUCT_P (tem
);
1977 /* Make the change and see if the insn remains valid. */
1978 INSN_CODE (insn
) = -1;
1979 XEXP (outerdest
, 0) = newmem
;
1980 XEXP (outerdest
, 2) = GEN_INT (pos
);
1982 if (recog_memoized (insn
) >= 0)
1985 /* Otherwise, restore old position. XEXP (x, 0) will be
1987 XEXP (outerdest
, 2) = old_pos
;
1991 /* If we get here, the bit-field store doesn't allow memory
1992 or isn't located at a constant position. Load the value into
1993 a register, do the store, and put it back into memory. */
1995 tem1
= gen_reg_rtx (GET_MODE (tem
));
1996 emit_insn_before (gen_move_insn (tem1
, tem
), insn
);
1997 emit_insn_after (gen_move_insn (tem
, tem1
), insn
);
1998 XEXP (outerdest
, 0) = tem1
;
2003 /* STRICT_LOW_PART is a no-op on memory references
2004 and it can cause combinations to be unrecognizable,
2007 if (dest
== var
&& GET_CODE (SET_DEST (x
)) == STRICT_LOW_PART
)
2008 SET_DEST (x
) = XEXP (SET_DEST (x
), 0);
2010 /* A valid insn to copy VAR into or out of a register
2011 must be left alone, to avoid an infinite loop here.
2012 If the reference to VAR is by a subreg, fix that up,
2013 since SUBREG is not valid for a memref.
2014 Also fix up the address of the stack slot.
2016 Note that we must not try to recognize the insn until
2017 after we know that we have valid addresses and no
2018 (subreg (mem ...) ...) constructs, since these interfere
2019 with determining the validity of the insn. */
2021 if ((SET_SRC (x
) == var
2022 || (GET_CODE (SET_SRC (x
)) == SUBREG
2023 && SUBREG_REG (SET_SRC (x
)) == var
))
2024 && (GET_CODE (SET_DEST (x
)) == REG
2025 || (GET_CODE (SET_DEST (x
)) == SUBREG
2026 && GET_CODE (SUBREG_REG (SET_DEST (x
))) == REG
))
2027 && GET_MODE (var
) == promoted_mode
2028 && x
== single_set (insn
))
2032 replacement
= find_fixup_replacement (replacements
, SET_SRC (x
));
2033 if (replacement
->new)
2034 SET_SRC (x
) = replacement
->new;
2035 else if (GET_CODE (SET_SRC (x
)) == SUBREG
)
2036 SET_SRC (x
) = replacement
->new
2037 = fixup_memory_subreg (SET_SRC (x
), insn
, 0);
2039 SET_SRC (x
) = replacement
->new
2040 = fixup_stack_1 (SET_SRC (x
), insn
);
2042 if (recog_memoized (insn
) >= 0)
2045 /* INSN is not valid, but we know that we want to
2046 copy SET_SRC (x) to SET_DEST (x) in some way. So
2047 we generate the move and see whether it requires more
2048 than one insn. If it does, we emit those insns and
2049 delete INSN. Otherwise, we an just replace the pattern
2050 of INSN; we have already verified above that INSN has
2051 no other function that to do X. */
2053 pat
= gen_move_insn (SET_DEST (x
), SET_SRC (x
));
2054 if (GET_CODE (pat
) == SEQUENCE
)
2056 emit_insn_after (pat
, insn
);
2057 PUT_CODE (insn
, NOTE
);
2058 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2059 NOTE_SOURCE_FILE (insn
) = 0;
2062 PATTERN (insn
) = pat
;
2067 if ((SET_DEST (x
) == var
2068 || (GET_CODE (SET_DEST (x
)) == SUBREG
2069 && SUBREG_REG (SET_DEST (x
)) == var
))
2070 && (GET_CODE (SET_SRC (x
)) == REG
2071 || (GET_CODE (SET_SRC (x
)) == SUBREG
2072 && GET_CODE (SUBREG_REG (SET_SRC (x
))) == REG
))
2073 && GET_MODE (var
) == promoted_mode
2074 && x
== single_set (insn
))
2078 if (GET_CODE (SET_DEST (x
)) == SUBREG
)
2079 SET_DEST (x
) = fixup_memory_subreg (SET_DEST (x
), insn
, 0);
2081 SET_DEST (x
) = fixup_stack_1 (SET_DEST (x
), insn
);
2083 if (recog_memoized (insn
) >= 0)
2086 pat
= gen_move_insn (SET_DEST (x
), SET_SRC (x
));
2087 if (GET_CODE (pat
) == SEQUENCE
)
2089 emit_insn_after (pat
, insn
);
2090 PUT_CODE (insn
, NOTE
);
2091 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2092 NOTE_SOURCE_FILE (insn
) = 0;
2095 PATTERN (insn
) = pat
;
2100 /* Otherwise, storing into VAR must be handled specially
2101 by storing into a temporary and copying that into VAR
2102 with a new insn after this one. Note that this case
2103 will be used when storing into a promoted scalar since
2104 the insn will now have different modes on the input
2105 and output and hence will be invalid (except for the case
2106 of setting it to a constant, which does not need any
2107 change if it is valid). We generate extra code in that case,
2108 but combine.c will eliminate it. */
2113 rtx fixeddest
= SET_DEST (x
);
2115 /* STRICT_LOW_PART can be discarded, around a MEM. */
2116 if (GET_CODE (fixeddest
) == STRICT_LOW_PART
)
2117 fixeddest
= XEXP (fixeddest
, 0);
2118 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2119 if (GET_CODE (fixeddest
) == SUBREG
)
2121 fixeddest
= fixup_memory_subreg (fixeddest
, insn
, 0);
2122 promoted_mode
= GET_MODE (fixeddest
);
2125 fixeddest
= fixup_stack_1 (fixeddest
, insn
);
2127 temp
= gen_reg_rtx (promoted_mode
);
2129 emit_insn_after (gen_move_insn (fixeddest
,
2130 gen_lowpart (GET_MODE (fixeddest
),
2134 SET_DEST (x
) = temp
;
2139 /* Nothing special about this RTX; fix its operands. */
2141 fmt
= GET_RTX_FORMAT (code
);
2142 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2145 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (x
, i
), insn
, replacements
);
2149 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2150 fixup_var_refs_1 (var
, promoted_mode
, &XVECEXP (x
, i
, j
),
2151 insn
, replacements
);
2156 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2157 return an rtx (MEM:m1 newaddr) which is equivalent.
2158 If any insns must be emitted to compute NEWADDR, put them before INSN.
2160 UNCRITICAL nonzero means accept paradoxical subregs.
2161 This is used for subregs found inside of ZERO_EXTRACTs and in REG_NOTES. */
2164 fixup_memory_subreg (x
, insn
, uncritical
)
2169 int offset
= SUBREG_WORD (x
) * UNITS_PER_WORD
;
2170 rtx addr
= XEXP (SUBREG_REG (x
), 0);
2171 enum machine_mode mode
= GET_MODE (x
);
2174 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2175 if (GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
)))
2179 if (BYTES_BIG_ENDIAN
)
2180 offset
+= (MIN (UNITS_PER_WORD
, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
2181 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (mode
)));
2182 addr
= plus_constant (addr
, offset
);
2183 if (!flag_force_addr
&& memory_address_p (mode
, addr
))
2184 /* Shortcut if no insns need be emitted. */
2185 return change_address (SUBREG_REG (x
), mode
, addr
);
2187 result
= change_address (SUBREG_REG (x
), mode
, addr
);
2188 emit_insn_before (gen_sequence (), insn
);
2193 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2194 Replace subexpressions of X in place.
2195 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2196 Otherwise return X, with its contents possibly altered.
2198 If any insns must be emitted to compute NEWADDR, put them before INSN.
2200 UNCRITICAL is as in fixup_memory_subreg. */
2203 walk_fixup_memory_subreg (x
, insn
, uncritical
)
2208 register enum rtx_code code
;
2215 code
= GET_CODE (x
);
2217 if (code
== SUBREG
&& GET_CODE (SUBREG_REG (x
)) == MEM
)
2218 return fixup_memory_subreg (x
, insn
, uncritical
);
2220 /* Nothing special about this RTX; fix its operands. */
2222 fmt
= GET_RTX_FORMAT (code
);
2223 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2226 XEXP (x
, i
) = walk_fixup_memory_subreg (XEXP (x
, i
), insn
, uncritical
);
2230 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2232 = walk_fixup_memory_subreg (XVECEXP (x
, i
, j
), insn
, uncritical
);
2238 /* For each memory ref within X, if it refers to a stack slot
2239 with an out of range displacement, put the address in a temp register
2240 (emitting new insns before INSN to load these registers)
2241 and alter the memory ref to use that register.
2242 Replace each such MEM rtx with a copy, to avoid clobberage. */
2245 fixup_stack_1 (x
, insn
)
2250 register RTX_CODE code
= GET_CODE (x
);
2255 register rtx ad
= XEXP (x
, 0);
2256 /* If we have address of a stack slot but it's not valid
2257 (displacement is too large), compute the sum in a register. */
2258 if (GET_CODE (ad
) == PLUS
2259 && GET_CODE (XEXP (ad
, 0)) == REG
2260 && ((REGNO (XEXP (ad
, 0)) >= FIRST_VIRTUAL_REGISTER
2261 && REGNO (XEXP (ad
, 0)) <= LAST_VIRTUAL_REGISTER
)
2262 || XEXP (ad
, 0) == current_function_internal_arg_pointer
)
2263 && GET_CODE (XEXP (ad
, 1)) == CONST_INT
)
2266 if (memory_address_p (GET_MODE (x
), ad
))
2270 temp
= copy_to_reg (ad
);
2271 seq
= gen_sequence ();
2273 emit_insn_before (seq
, insn
);
2274 return change_address (x
, VOIDmode
, temp
);
2279 fmt
= GET_RTX_FORMAT (code
);
2280 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2283 XEXP (x
, i
) = fixup_stack_1 (XEXP (x
, i
), insn
);
2287 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2288 XVECEXP (x
, i
, j
) = fixup_stack_1 (XVECEXP (x
, i
, j
), insn
);
2294 /* Optimization: a bit-field instruction whose field
2295 happens to be a byte or halfword in memory
2296 can be changed to a move instruction.
2298 We call here when INSN is an insn to examine or store into a bit-field.
2299 BODY is the SET-rtx to be altered.
2301 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2302 (Currently this is called only from function.c, and EQUIV_MEM
2306 optimize_bit_field (body
, insn
, equiv_mem
)
2311 register rtx bitfield
;
2314 enum machine_mode mode
;
2316 if (GET_CODE (SET_DEST (body
)) == SIGN_EXTRACT
2317 || GET_CODE (SET_DEST (body
)) == ZERO_EXTRACT
)
2318 bitfield
= SET_DEST (body
), destflag
= 1;
2320 bitfield
= SET_SRC (body
), destflag
= 0;
2322 /* First check that the field being stored has constant size and position
2323 and is in fact a byte or halfword suitably aligned. */
2325 if (GET_CODE (XEXP (bitfield
, 1)) == CONST_INT
2326 && GET_CODE (XEXP (bitfield
, 2)) == CONST_INT
2327 && ((mode
= mode_for_size (INTVAL (XEXP (bitfield
, 1)), MODE_INT
, 1))
2329 && INTVAL (XEXP (bitfield
, 2)) % INTVAL (XEXP (bitfield
, 1)) == 0)
2331 register rtx memref
= 0;
2333 /* Now check that the containing word is memory, not a register,
2334 and that it is safe to change the machine mode. */
2336 if (GET_CODE (XEXP (bitfield
, 0)) == MEM
)
2337 memref
= XEXP (bitfield
, 0);
2338 else if (GET_CODE (XEXP (bitfield
, 0)) == REG
2340 memref
= equiv_mem
[REGNO (XEXP (bitfield
, 0))];
2341 else if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
2342 && GET_CODE (SUBREG_REG (XEXP (bitfield
, 0))) == MEM
)
2343 memref
= SUBREG_REG (XEXP (bitfield
, 0));
2344 else if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
2346 && GET_CODE (SUBREG_REG (XEXP (bitfield
, 0))) == REG
)
2347 memref
= equiv_mem
[REGNO (SUBREG_REG (XEXP (bitfield
, 0)))];
2350 && ! mode_dependent_address_p (XEXP (memref
, 0))
2351 && ! MEM_VOLATILE_P (memref
))
2353 /* Now adjust the address, first for any subreg'ing
2354 that we are now getting rid of,
2355 and then for which byte of the word is wanted. */
2357 register int offset
= INTVAL (XEXP (bitfield
, 2));
2360 /* Adjust OFFSET to count bits from low-address byte. */
2361 if (BITS_BIG_ENDIAN
!= BYTES_BIG_ENDIAN
)
2362 offset
= (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield
, 0)))
2363 - offset
- INTVAL (XEXP (bitfield
, 1)));
2365 /* Adjust OFFSET to count bytes from low-address byte. */
2366 offset
/= BITS_PER_UNIT
;
2367 if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
)
2369 offset
+= SUBREG_WORD (XEXP (bitfield
, 0)) * UNITS_PER_WORD
;
2370 if (BYTES_BIG_ENDIAN
)
2371 offset
-= (MIN (UNITS_PER_WORD
,
2372 GET_MODE_SIZE (GET_MODE (XEXP (bitfield
, 0))))
2373 - MIN (UNITS_PER_WORD
,
2374 GET_MODE_SIZE (GET_MODE (memref
))));
2378 memref
= change_address (memref
, mode
,
2379 plus_constant (XEXP (memref
, 0), offset
));
2380 insns
= get_insns ();
2382 emit_insns_before (insns
, insn
);
2384 /* Store this memory reference where
2385 we found the bit field reference. */
2389 validate_change (insn
, &SET_DEST (body
), memref
, 1);
2390 if (! CONSTANT_ADDRESS_P (SET_SRC (body
)))
2392 rtx src
= SET_SRC (body
);
2393 while (GET_CODE (src
) == SUBREG
2394 && SUBREG_WORD (src
) == 0)
2395 src
= SUBREG_REG (src
);
2396 if (GET_MODE (src
) != GET_MODE (memref
))
2397 src
= gen_lowpart (GET_MODE (memref
), SET_SRC (body
));
2398 validate_change (insn
, &SET_SRC (body
), src
, 1);
2400 else if (GET_MODE (SET_SRC (body
)) != VOIDmode
2401 && GET_MODE (SET_SRC (body
)) != GET_MODE (memref
))
2402 /* This shouldn't happen because anything that didn't have
2403 one of these modes should have got converted explicitly
2404 and then referenced through a subreg.
2405 This is so because the original bit-field was
2406 handled by agg_mode and so its tree structure had
2407 the same mode that memref now has. */
2412 rtx dest
= SET_DEST (body
);
2414 while (GET_CODE (dest
) == SUBREG
2415 && SUBREG_WORD (dest
) == 0
2416 && (GET_MODE_CLASS (GET_MODE (dest
))
2417 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest
)))))
2418 dest
= SUBREG_REG (dest
);
2420 validate_change (insn
, &SET_DEST (body
), dest
, 1);
2422 if (GET_MODE (dest
) == GET_MODE (memref
))
2423 validate_change (insn
, &SET_SRC (body
), memref
, 1);
2426 /* Convert the mem ref to the destination mode. */
2427 rtx newreg
= gen_reg_rtx (GET_MODE (dest
));
2430 convert_move (newreg
, memref
,
2431 GET_CODE (SET_SRC (body
)) == ZERO_EXTRACT
);
2435 validate_change (insn
, &SET_SRC (body
), newreg
, 1);
2439 /* See if we can convert this extraction or insertion into
2440 a simple move insn. We might not be able to do so if this
2441 was, for example, part of a PARALLEL.
2443 If we succeed, write out any needed conversions. If we fail,
2444 it is hard to guess why we failed, so don't do anything
2445 special; just let the optimization be suppressed. */
2447 if (apply_change_group () && seq
)
2448 emit_insns_before (seq
, insn
);
2453 /* These routines are responsible for converting virtual register references
2454 to the actual hard register references once RTL generation is complete.
2456 The following four variables are used for communication between the
2457 routines. They contain the offsets of the virtual registers from their
2458 respective hard registers. */
2460 static int in_arg_offset
;
2461 static int var_offset
;
2462 static int dynamic_offset
;
2463 static int out_arg_offset
;
2465 /* In most machines, the stack pointer register is equivalent to the bottom
2468 #ifndef STACK_POINTER_OFFSET
2469 #define STACK_POINTER_OFFSET 0
2472 /* If not defined, pick an appropriate default for the offset of dynamically
2473 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2474 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2476 #ifndef STACK_DYNAMIC_OFFSET
2478 #ifdef ACCUMULATE_OUTGOING_ARGS
2479 /* The bottom of the stack points to the actual arguments. If
2480 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2481 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2482 stack space for register parameters is not pushed by the caller, but
2483 rather part of the fixed stack areas and hence not included in
2484 `current_function_outgoing_args_size'. Nevertheless, we must allow
2485 for it when allocating stack dynamic objects. */
2487 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2488 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2489 (current_function_outgoing_args_size \
2490 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2493 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2494 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2498 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2502 /* Pass through the INSNS of function FNDECL and convert virtual register
2503 references to hard register references. */
2506 instantiate_virtual_regs (fndecl
, insns
)
2512 /* Compute the offsets to use for this function. */
2513 in_arg_offset
= FIRST_PARM_OFFSET (fndecl
);
2514 var_offset
= STARTING_FRAME_OFFSET
;
2515 dynamic_offset
= STACK_DYNAMIC_OFFSET (fndecl
);
2516 out_arg_offset
= STACK_POINTER_OFFSET
;
2518 /* Scan all variables and parameters of this function. For each that is
2519 in memory, instantiate all virtual registers if the result is a valid
2520 address. If not, we do it later. That will handle most uses of virtual
2521 regs on many machines. */
2522 instantiate_decls (fndecl
, 1);
2524 /* Initialize recognition, indicating that volatile is OK. */
2527 /* Scan through all the insns, instantiating every virtual register still
2529 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
2530 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == JUMP_INSN
2531 || GET_CODE (insn
) == CALL_INSN
)
2533 instantiate_virtual_regs_1 (&PATTERN (insn
), insn
, 1);
2534 instantiate_virtual_regs_1 (®_NOTES (insn
), NULL_RTX
, 0);
2537 /* Now instantiate the remaining register equivalences for debugging info.
2538 These will not be valid addresses. */
2539 instantiate_decls (fndecl
, 0);
2541 /* Indicate that, from now on, assign_stack_local should use
2542 frame_pointer_rtx. */
2543 virtuals_instantiated
= 1;
2546 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
2547 all virtual registers in their DECL_RTL's.
2549 If VALID_ONLY, do this only if the resulting address is still valid.
2550 Otherwise, always do it. */
2553 instantiate_decls (fndecl
, valid_only
)
2559 if (DECL_SAVED_INSNS (fndecl
))
2560 /* When compiling an inline function, the obstack used for
2561 rtl allocation is the maybepermanent_obstack. Calling
2562 `resume_temporary_allocation' switches us back to that
2563 obstack while we process this function's parameters. */
2564 resume_temporary_allocation ();
2566 /* Process all parameters of the function. */
2567 for (decl
= DECL_ARGUMENTS (fndecl
); decl
; decl
= TREE_CHAIN (decl
))
2569 instantiate_decl (DECL_RTL (decl
), int_size_in_bytes (TREE_TYPE (decl
)),
2571 instantiate_decl (DECL_INCOMING_RTL (decl
),
2572 int_size_in_bytes (TREE_TYPE (decl
)), valid_only
);
2575 /* Now process all variables defined in the function or its subblocks. */
2576 instantiate_decls_1 (DECL_INITIAL (fndecl
), valid_only
);
2578 if (DECL_INLINE (fndecl
) || DECL_DEFER_OUTPUT (fndecl
))
2580 /* Save all rtl allocated for this function by raising the
2581 high-water mark on the maybepermanent_obstack. */
2583 /* All further rtl allocation is now done in the current_obstack. */
2584 rtl_in_current_obstack ();
2588 /* Subroutine of instantiate_decls: Process all decls in the given
2589 BLOCK node and all its subblocks. */
2592 instantiate_decls_1 (let
, valid_only
)
2598 for (t
= BLOCK_VARS (let
); t
; t
= TREE_CHAIN (t
))
2599 instantiate_decl (DECL_RTL (t
), int_size_in_bytes (TREE_TYPE (t
)),
2602 /* Process all subblocks. */
2603 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= TREE_CHAIN (t
))
2604 instantiate_decls_1 (t
, valid_only
);
2607 /* Subroutine of the preceding procedures: Given RTL representing a
2608 decl and the size of the object, do any instantiation required.
2610 If VALID_ONLY is non-zero, it means that the RTL should only be
2611 changed if the new address is valid. */
2614 instantiate_decl (x
, size
, valid_only
)
2619 enum machine_mode mode
;
2622 /* If this is not a MEM, no need to do anything. Similarly if the
2623 address is a constant or a register that is not a virtual register. */
2625 if (x
== 0 || GET_CODE (x
) != MEM
)
2629 if (CONSTANT_P (addr
)
2630 || (GET_CODE (addr
) == REG
2631 && (REGNO (addr
) < FIRST_VIRTUAL_REGISTER
2632 || REGNO (addr
) > LAST_VIRTUAL_REGISTER
)))
2635 /* If we should only do this if the address is valid, copy the address.
2636 We need to do this so we can undo any changes that might make the
2637 address invalid. This copy is unfortunate, but probably can't be
2641 addr
= copy_rtx (addr
);
2643 instantiate_virtual_regs_1 (&addr
, NULL_RTX
, 0);
2648 /* Now verify that the resulting address is valid for every integer or
2649 floating-point mode up to and including SIZE bytes long. We do this
2650 since the object might be accessed in any mode and frame addresses
2653 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2654 mode
!= VOIDmode
&& GET_MODE_SIZE (mode
) <= size
;
2655 mode
= GET_MODE_WIDER_MODE (mode
))
2656 if (! memory_address_p (mode
, addr
))
2659 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
2660 mode
!= VOIDmode
&& GET_MODE_SIZE (mode
) <= size
;
2661 mode
= GET_MODE_WIDER_MODE (mode
))
2662 if (! memory_address_p (mode
, addr
))
2665 /* Otherwise, put back the address, now that we have updated it and we
2666 know it is valid. */
2671 /* Given a pointer to a piece of rtx and an optional pointer to the
2672 containing object, instantiate any virtual registers present in it.
2674 If EXTRA_INSNS, we always do the replacement and generate
2675 any extra insns before OBJECT. If it zero, we do nothing if replacement
2678 Return 1 if we either had nothing to do or if we were able to do the
2679 needed replacement. Return 0 otherwise; we only return zero if
2680 EXTRA_INSNS is zero.
2682 We first try some simple transformations to avoid the creation of extra
2686 instantiate_virtual_regs_1 (loc
, object
, extra_insns
)
2700 /* Re-start here to avoid recursion in common cases. */
2707 code
= GET_CODE (x
);
2709 /* Check for some special cases. */
2726 /* We are allowed to set the virtual registers. This means that
2727 that the actual register should receive the source minus the
2728 appropriate offset. This is used, for example, in the handling
2729 of non-local gotos. */
2730 if (SET_DEST (x
) == virtual_incoming_args_rtx
)
2731 new = arg_pointer_rtx
, offset
= - in_arg_offset
;
2732 else if (SET_DEST (x
) == virtual_stack_vars_rtx
)
2733 new = frame_pointer_rtx
, offset
= - var_offset
;
2734 else if (SET_DEST (x
) == virtual_stack_dynamic_rtx
)
2735 new = stack_pointer_rtx
, offset
= - dynamic_offset
;
2736 else if (SET_DEST (x
) == virtual_outgoing_args_rtx
)
2737 new = stack_pointer_rtx
, offset
= - out_arg_offset
;
2741 /* The only valid sources here are PLUS or REG. Just do
2742 the simplest possible thing to handle them. */
2743 if (GET_CODE (SET_SRC (x
)) != REG
2744 && GET_CODE (SET_SRC (x
)) != PLUS
)
2748 if (GET_CODE (SET_SRC (x
)) != REG
)
2749 temp
= force_operand (SET_SRC (x
), NULL_RTX
);
2752 temp
= force_operand (plus_constant (temp
, offset
), NULL_RTX
);
2756 emit_insns_before (seq
, object
);
2759 if (!validate_change (object
, &SET_SRC (x
), temp
, 0)
2766 instantiate_virtual_regs_1 (&SET_DEST (x
), object
, extra_insns
);
2771 /* Handle special case of virtual register plus constant. */
2772 if (CONSTANT_P (XEXP (x
, 1)))
2774 rtx old
, new_offset
;
2776 /* Check for (plus (plus VIRT foo) (const_int)) first. */
2777 if (GET_CODE (XEXP (x
, 0)) == PLUS
)
2779 rtx inner
= XEXP (XEXP (x
, 0), 0);
2781 if (inner
== virtual_incoming_args_rtx
)
2782 new = arg_pointer_rtx
, offset
= in_arg_offset
;
2783 else if (inner
== virtual_stack_vars_rtx
)
2784 new = frame_pointer_rtx
, offset
= var_offset
;
2785 else if (inner
== virtual_stack_dynamic_rtx
)
2786 new = stack_pointer_rtx
, offset
= dynamic_offset
;
2787 else if (inner
== virtual_outgoing_args_rtx
)
2788 new = stack_pointer_rtx
, offset
= out_arg_offset
;
2795 instantiate_virtual_regs_1 (&XEXP (XEXP (x
, 0), 1), object
,
2797 new = gen_rtx (PLUS
, Pmode
, new, XEXP (XEXP (x
, 0), 1));
2800 else if (XEXP (x
, 0) == virtual_incoming_args_rtx
)
2801 new = arg_pointer_rtx
, offset
= in_arg_offset
;
2802 else if (XEXP (x
, 0) == virtual_stack_vars_rtx
)
2803 new = frame_pointer_rtx
, offset
= var_offset
;
2804 else if (XEXP (x
, 0) == virtual_stack_dynamic_rtx
)
2805 new = stack_pointer_rtx
, offset
= dynamic_offset
;
2806 else if (XEXP (x
, 0) == virtual_outgoing_args_rtx
)
2807 new = stack_pointer_rtx
, offset
= out_arg_offset
;
2810 /* We know the second operand is a constant. Unless the
2811 first operand is a REG (which has been already checked),
2812 it needs to be checked. */
2813 if (GET_CODE (XEXP (x
, 0)) != REG
)
2821 new_offset
= plus_constant (XEXP (x
, 1), offset
);
2823 /* If the new constant is zero, try to replace the sum with just
2825 if (new_offset
== const0_rtx
2826 && validate_change (object
, loc
, new, 0))
2829 /* Next try to replace the register and new offset.
2830 There are two changes to validate here and we can't assume that
2831 in the case of old offset equals new just changing the register
2832 will yield a valid insn. In the interests of a little efficiency,
2833 however, we only call validate change once (we don't queue up the
2834 changes and then call apply_change_group). */
2838 ? ! validate_change (object
, &XEXP (x
, 0), new, 0)
2839 : (XEXP (x
, 0) = new,
2840 ! validate_change (object
, &XEXP (x
, 1), new_offset
, 0)))
2848 /* Otherwise copy the new constant into a register and replace
2849 constant with that register. */
2850 temp
= gen_reg_rtx (Pmode
);
2852 if (validate_change (object
, &XEXP (x
, 1), temp
, 0))
2853 emit_insn_before (gen_move_insn (temp
, new_offset
), object
);
2856 /* If that didn't work, replace this expression with a
2857 register containing the sum. */
2860 new = gen_rtx (PLUS
, Pmode
, new, new_offset
);
2863 temp
= force_operand (new, NULL_RTX
);
2867 emit_insns_before (seq
, object
);
2868 if (! validate_change (object
, loc
, temp
, 0)
2869 && ! validate_replace_rtx (x
, temp
, object
))
2877 /* Fall through to generic two-operand expression case. */
2883 case DIV
: case UDIV
:
2884 case MOD
: case UMOD
:
2885 case AND
: case IOR
: case XOR
:
2886 case ROTATERT
: case ROTATE
:
2887 case ASHIFTRT
: case LSHIFTRT
: case ASHIFT
:
2889 case GE
: case GT
: case GEU
: case GTU
:
2890 case LE
: case LT
: case LEU
: case LTU
:
2891 if (XEXP (x
, 1) && ! CONSTANT_P (XEXP (x
, 1)))
2892 instantiate_virtual_regs_1 (&XEXP (x
, 1), object
, extra_insns
);
2897 /* Most cases of MEM that convert to valid addresses have already been
2898 handled by our scan of regno_reg_rtx. The only special handling we
2899 need here is to make a copy of the rtx to ensure it isn't being
2900 shared if we have to change it to a pseudo.
2902 If the rtx is a simple reference to an address via a virtual register,
2903 it can potentially be shared. In such cases, first try to make it
2904 a valid address, which can also be shared. Otherwise, copy it and
2907 First check for common cases that need no processing. These are
2908 usually due to instantiation already being done on a previous instance
2912 if (CONSTANT_ADDRESS_P (temp
)
2913 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2914 || temp
== arg_pointer_rtx
2916 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2917 || temp
== hard_frame_pointer_rtx
2919 || temp
== frame_pointer_rtx
)
2922 if (GET_CODE (temp
) == PLUS
2923 && CONSTANT_ADDRESS_P (XEXP (temp
, 1))
2924 && (XEXP (temp
, 0) == frame_pointer_rtx
2925 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2926 || XEXP (temp
, 0) == hard_frame_pointer_rtx
2928 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2929 || XEXP (temp
, 0) == arg_pointer_rtx
2934 if (temp
== virtual_stack_vars_rtx
2935 || temp
== virtual_incoming_args_rtx
2936 || (GET_CODE (temp
) == PLUS
2937 && CONSTANT_ADDRESS_P (XEXP (temp
, 1))
2938 && (XEXP (temp
, 0) == virtual_stack_vars_rtx
2939 || XEXP (temp
, 0) == virtual_incoming_args_rtx
)))
2941 /* This MEM may be shared. If the substitution can be done without
2942 the need to generate new pseudos, we want to do it in place
2943 so all copies of the shared rtx benefit. The call below will
2944 only make substitutions if the resulting address is still
2947 Note that we cannot pass X as the object in the recursive call
2948 since the insn being processed may not allow all valid
2949 addresses. However, if we were not passed on object, we can
2950 only modify X without copying it if X will have a valid
2953 ??? Also note that this can still lose if OBJECT is an insn that
2954 has less restrictions on an address that some other insn.
2955 In that case, we will modify the shared address. This case
2956 doesn't seem very likely, though. */
2958 if (instantiate_virtual_regs_1 (&XEXP (x
, 0),
2959 object
? object
: x
, 0))
2962 /* Otherwise make a copy and process that copy. We copy the entire
2963 RTL expression since it might be a PLUS which could also be
2965 *loc
= x
= copy_rtx (x
);
2968 /* Fall through to generic unary operation case. */
2972 case STRICT_LOW_PART
:
2974 case PRE_DEC
: case PRE_INC
: case POST_DEC
: case POST_INC
:
2975 case SIGN_EXTEND
: case ZERO_EXTEND
:
2976 case TRUNCATE
: case FLOAT_EXTEND
: case FLOAT_TRUNCATE
:
2977 case FLOAT
: case FIX
:
2978 case UNSIGNED_FIX
: case UNSIGNED_FLOAT
:
2982 /* These case either have just one operand or we know that we need not
2983 check the rest of the operands. */
2988 /* Try to replace with a PLUS. If that doesn't work, compute the sum
2989 in front of this insn and substitute the temporary. */
2990 if (x
== virtual_incoming_args_rtx
)
2991 new = arg_pointer_rtx
, offset
= in_arg_offset
;
2992 else if (x
== virtual_stack_vars_rtx
)
2993 new = frame_pointer_rtx
, offset
= var_offset
;
2994 else if (x
== virtual_stack_dynamic_rtx
)
2995 new = stack_pointer_rtx
, offset
= dynamic_offset
;
2996 else if (x
== virtual_outgoing_args_rtx
)
2997 new = stack_pointer_rtx
, offset
= out_arg_offset
;
3001 temp
= plus_constant (new, offset
);
3002 if (!validate_change (object
, loc
, temp
, 0))
3008 temp
= force_operand (temp
, NULL_RTX
);
3012 emit_insns_before (seq
, object
);
3013 if (! validate_change (object
, loc
, temp
, 0)
3014 && ! validate_replace_rtx (x
, temp
, object
))
3022 /* Scan all subexpressions. */
3023 fmt
= GET_RTX_FORMAT (code
);
3024 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
3027 if (!instantiate_virtual_regs_1 (&XEXP (x
, i
), object
, extra_insns
))
3030 else if (*fmt
== 'E')
3031 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3032 if (! instantiate_virtual_regs_1 (&XVECEXP (x
, i
, j
), object
,
3039 /* Optimization: assuming this function does not receive nonlocal gotos,
3040 delete the handlers for such, as well as the insns to establish
3041 and disestablish them. */
3047 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3049 /* Delete the handler by turning off the flag that would
3050 prevent jump_optimize from deleting it.
3051 Also permit deletion of the nonlocal labels themselves
3052 if nothing local refers to them. */
3053 if (GET_CODE (insn
) == CODE_LABEL
)
3057 LABEL_PRESERVE_P (insn
) = 0;
3059 /* Remove it from the nonlocal_label list, to avoid confusing
3061 for (t
= nonlocal_labels
, last_t
= 0; t
;
3062 last_t
= t
, t
= TREE_CHAIN (t
))
3063 if (DECL_RTL (TREE_VALUE (t
)) == insn
)
3068 nonlocal_labels
= TREE_CHAIN (nonlocal_labels
);
3070 TREE_CHAIN (last_t
) = TREE_CHAIN (t
);
3073 if (GET_CODE (insn
) == INSN
3074 && ((nonlocal_goto_handler_slot
!= 0
3075 && reg_mentioned_p (nonlocal_goto_handler_slot
, PATTERN (insn
)))
3076 || (nonlocal_goto_stack_level
!= 0
3077 && reg_mentioned_p (nonlocal_goto_stack_level
,
3083 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3084 of the current function. */
3087 nonlocal_label_rtx_list ()
3092 for (t
= nonlocal_labels
; t
; t
= TREE_CHAIN (t
))
3093 x
= gen_rtx (EXPR_LIST
, VOIDmode
, label_rtx (TREE_VALUE (t
)), x
);
3098 /* Output a USE for any register use in RTL.
3099 This is used with -noreg to mark the extent of lifespan
3100 of any registers used in a user-visible variable's DECL_RTL. */
3106 if (GET_CODE (rtl
) == REG
)
3107 /* This is a register variable. */
3108 emit_insn (gen_rtx (USE
, VOIDmode
, rtl
));
3109 else if (GET_CODE (rtl
) == MEM
3110 && GET_CODE (XEXP (rtl
, 0)) == REG
3111 && (REGNO (XEXP (rtl
, 0)) < FIRST_VIRTUAL_REGISTER
3112 || REGNO (XEXP (rtl
, 0)) > LAST_VIRTUAL_REGISTER
)
3113 && XEXP (rtl
, 0) != current_function_internal_arg_pointer
)
3114 /* This is a variable-sized structure. */
3115 emit_insn (gen_rtx (USE
, VOIDmode
, XEXP (rtl
, 0)));
3118 /* Like use_variable except that it outputs the USEs after INSN
3119 instead of at the end of the insn-chain. */
3122 use_variable_after (rtl
, insn
)
3125 if (GET_CODE (rtl
) == REG
)
3126 /* This is a register variable. */
3127 emit_insn_after (gen_rtx (USE
, VOIDmode
, rtl
), insn
);
3128 else if (GET_CODE (rtl
) == MEM
3129 && GET_CODE (XEXP (rtl
, 0)) == REG
3130 && (REGNO (XEXP (rtl
, 0)) < FIRST_VIRTUAL_REGISTER
3131 || REGNO (XEXP (rtl
, 0)) > LAST_VIRTUAL_REGISTER
)
3132 && XEXP (rtl
, 0) != current_function_internal_arg_pointer
)
3133 /* This is a variable-sized structure. */
3134 emit_insn_after (gen_rtx (USE
, VOIDmode
, XEXP (rtl
, 0)), insn
);
3140 return max_parm_reg
;
3143 /* Return the first insn following those generated by `assign_parms'. */
3146 get_first_nonparm_insn ()
3149 return NEXT_INSN (last_parm_insn
);
3150 return get_insns ();
3153 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3154 Crash if there is none. */
3157 get_first_block_beg ()
3159 register rtx searcher
;
3160 register rtx insn
= get_first_nonparm_insn ();
3162 for (searcher
= insn
; searcher
; searcher
= NEXT_INSN (searcher
))
3163 if (GET_CODE (searcher
) == NOTE
3164 && NOTE_LINE_NUMBER (searcher
) == NOTE_INSN_BLOCK_BEG
)
3167 abort (); /* Invalid call to this function. (See comments above.) */
3171 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3172 This means a type for which function calls must pass an address to the
3173 function or get an address back from the function.
3174 EXP may be a type node or an expression (whose type is tested). */
3177 aggregate_value_p (exp
)
3180 int i
, regno
, nregs
;
3183 if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 't')
3186 type
= TREE_TYPE (exp
);
3188 if (RETURN_IN_MEMORY (type
))
3190 /* Types that are TREE_ADDRESSABLE must be contructed in memory,
3191 and thus can't be returned in registers. */
3192 if (TREE_ADDRESSABLE (type
))
3194 if (flag_pcc_struct_return
&& AGGREGATE_TYPE_P (type
))
3196 /* Make sure we have suitable call-clobbered regs to return
3197 the value in; if not, we must return it in memory. */
3198 reg
= hard_function_value (type
, 0);
3199 regno
= REGNO (reg
);
3200 nregs
= HARD_REGNO_NREGS (regno
, TYPE_MODE (type
));
3201 for (i
= 0; i
< nregs
; i
++)
3202 if (! call_used_regs
[regno
+ i
])
3207 /* Assign RTL expressions to the function's parameters.
3208 This may involve copying them into registers and using
3209 those registers as the RTL for them.
3211 If SECOND_TIME is non-zero it means that this function is being
3212 called a second time. This is done by integrate.c when a function's
3213 compilation is deferred. We need to come back here in case the
3214 FUNCTION_ARG macro computes items needed for the rest of the compilation
3215 (such as changing which registers are fixed or caller-saved). But suppress
3216 writing any insns or setting DECL_RTL of anything in this case. */
3219 assign_parms (fndecl
, second_time
)
3224 register rtx entry_parm
= 0;
3225 register rtx stack_parm
= 0;
3226 CUMULATIVE_ARGS args_so_far
;
3227 enum machine_mode promoted_mode
, passed_mode
;
3228 enum machine_mode nominal_mode
, promoted_nominal_mode
;
3230 /* Total space needed so far for args on the stack,
3231 given as a constant and a tree-expression. */
3232 struct args_size stack_args_size
;
3233 tree fntype
= TREE_TYPE (fndecl
);
3234 tree fnargs
= DECL_ARGUMENTS (fndecl
);
3235 /* This is used for the arg pointer when referring to stack args. */
3236 rtx internal_arg_pointer
;
3237 /* This is a dummy PARM_DECL that we used for the function result if
3238 the function returns a structure. */
3239 tree function_result_decl
= 0;
3240 int nparmregs
= list_length (fnargs
) + LAST_VIRTUAL_REGISTER
+ 1;
3241 int varargs_setup
= 0;
3242 rtx conversion_insns
= 0;
3244 /* Nonzero if the last arg is named `__builtin_va_alist',
3245 which is used on some machines for old-fashioned non-ANSI varargs.h;
3246 this should be stuck onto the stack as if it had arrived there. */
3248 = (current_function_varargs
3250 && (parm
= tree_last (fnargs
)) != 0
3252 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm
)),
3253 "__builtin_va_alist")));
3255 /* Nonzero if function takes extra anonymous args.
3256 This means the last named arg must be on the stack
3257 right before the anonymous ones. */
3259 = (TYPE_ARG_TYPES (fntype
) != 0
3260 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
3261 != void_type_node
));
3263 current_function_stdarg
= stdarg
;
3265 /* If the reg that the virtual arg pointer will be translated into is
3266 not a fixed reg or is the stack pointer, make a copy of the virtual
3267 arg pointer, and address parms via the copy. The frame pointer is
3268 considered fixed even though it is not marked as such.
3270 The second time through, simply use ap to avoid generating rtx. */
3272 if ((ARG_POINTER_REGNUM
== STACK_POINTER_REGNUM
3273 || ! (fixed_regs
[ARG_POINTER_REGNUM
]
3274 || ARG_POINTER_REGNUM
== FRAME_POINTER_REGNUM
))
3276 internal_arg_pointer
= copy_to_reg (virtual_incoming_args_rtx
);
3278 internal_arg_pointer
= virtual_incoming_args_rtx
;
3279 current_function_internal_arg_pointer
= internal_arg_pointer
;
3281 stack_args_size
.constant
= 0;
3282 stack_args_size
.var
= 0;
3284 /* If struct value address is treated as the first argument, make it so. */
3285 if (aggregate_value_p (DECL_RESULT (fndecl
))
3286 && ! current_function_returns_pcc_struct
3287 && struct_value_incoming_rtx
== 0)
3289 tree type
= build_pointer_type (TREE_TYPE (fntype
));
3291 function_result_decl
= build_decl (PARM_DECL
, NULL_TREE
, type
);
3293 DECL_ARG_TYPE (function_result_decl
) = type
;
3294 TREE_CHAIN (function_result_decl
) = fnargs
;
3295 fnargs
= function_result_decl
;
3298 parm_reg_stack_loc
= (rtx
*) oballoc (nparmregs
* sizeof (rtx
));
3299 bzero ((char *) parm_reg_stack_loc
, nparmregs
* sizeof (rtx
));
3301 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3302 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far
, fntype
, NULL_RTX
);
3304 INIT_CUMULATIVE_ARGS (args_so_far
, fntype
, NULL_RTX
, 0);
3307 /* We haven't yet found an argument that we must push and pretend the
3309 current_function_pretend_args_size
= 0;
3311 for (parm
= fnargs
; parm
; parm
= TREE_CHAIN (parm
))
3313 int aggregate
= AGGREGATE_TYPE_P (TREE_TYPE (parm
));
3314 struct args_size stack_offset
;
3315 struct args_size arg_size
;
3316 int passed_pointer
= 0;
3317 int did_conversion
= 0;
3318 tree passed_type
= DECL_ARG_TYPE (parm
);
3319 tree nominal_type
= TREE_TYPE (parm
);
3321 /* Set LAST_NAMED if this is last named arg before some
3322 anonymous args. We treat it as if it were anonymous too. */
3323 int last_named
= ((TREE_CHAIN (parm
) == 0
3324 || DECL_NAME (TREE_CHAIN (parm
)) == 0)
3325 && (stdarg
|| current_function_varargs
));
3327 if (TREE_TYPE (parm
) == error_mark_node
3328 /* This can happen after weird syntax errors
3329 or if an enum type is defined among the parms. */
3330 || TREE_CODE (parm
) != PARM_DECL
3331 || passed_type
== NULL
)
3333 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
) = gen_rtx (MEM
, BLKmode
,
3335 TREE_USED (parm
) = 1;
3339 /* For varargs.h function, save info about regs and stack space
3340 used by the individual args, not including the va_alist arg. */
3341 if (hide_last_arg
&& last_named
)
3342 current_function_args_info
= args_so_far
;
3344 /* Find mode of arg as it is passed, and mode of arg
3345 as it should be during execution of this function. */
3346 passed_mode
= TYPE_MODE (passed_type
);
3347 nominal_mode
= TYPE_MODE (nominal_type
);
3349 /* If the parm's mode is VOID, its value doesn't matter,
3350 and avoid the usual things like emit_move_insn that could crash. */
3351 if (nominal_mode
== VOIDmode
)
3353 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
) = const0_rtx
;
3357 /* If the parm is to be passed as a transparent union, use the
3358 type of the first field for the tests below. We have already
3359 verified that the modes are the same. */
3360 if (DECL_TRANSPARENT_UNION (parm
)
3361 || TYPE_TRANSPARENT_UNION (passed_type
))
3362 passed_type
= TREE_TYPE (TYPE_FIELDS (passed_type
));
3364 /* See if this arg was passed by invisible reference. It is if
3365 it is an object whose size depends on the contents of the
3366 object itself or if the machine requires these objects be passed
3369 if ((TREE_CODE (TYPE_SIZE (passed_type
)) != INTEGER_CST
3370 && contains_placeholder_p (TYPE_SIZE (passed_type
)))
3371 || TREE_ADDRESSABLE (passed_type
)
3372 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3373 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far
, passed_mode
,
3374 passed_type
, ! last_named
)
3378 passed_type
= nominal_type
= build_pointer_type (passed_type
);
3380 passed_mode
= nominal_mode
= Pmode
;
3383 promoted_mode
= passed_mode
;
3385 #ifdef PROMOTE_FUNCTION_ARGS
3386 /* Compute the mode in which the arg is actually extended to. */
3387 promoted_mode
= promote_mode (passed_type
, promoted_mode
, &unsignedp
, 1);
3390 /* Let machine desc say which reg (if any) the parm arrives in.
3391 0 means it arrives on the stack. */
3392 #ifdef FUNCTION_INCOMING_ARG
3393 entry_parm
= FUNCTION_INCOMING_ARG (args_so_far
, promoted_mode
,
3394 passed_type
, ! last_named
);
3396 entry_parm
= FUNCTION_ARG (args_so_far
, promoted_mode
,
3397 passed_type
, ! last_named
);
3400 if (entry_parm
== 0)
3401 promoted_mode
= passed_mode
;
3403 #ifdef SETUP_INCOMING_VARARGS
3404 /* If this is the last named parameter, do any required setup for
3405 varargs or stdargs. We need to know about the case of this being an
3406 addressable type, in which case we skip the registers it
3407 would have arrived in.
3409 For stdargs, LAST_NAMED will be set for two parameters, the one that
3410 is actually the last named, and the dummy parameter. We only
3411 want to do this action once.
3413 Also, indicate when RTL generation is to be suppressed. */
3414 if (last_named
&& !varargs_setup
)
3416 SETUP_INCOMING_VARARGS (args_so_far
, promoted_mode
, passed_type
,
3417 current_function_pretend_args_size
,
3423 /* Determine parm's home in the stack,
3424 in case it arrives in the stack or we should pretend it did.
3426 Compute the stack position and rtx where the argument arrives
3429 There is one complexity here: If this was a parameter that would
3430 have been passed in registers, but wasn't only because it is
3431 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
3432 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
3433 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
3434 0 as it was the previous time. */
3436 locate_and_pad_parm (promoted_mode
, passed_type
,
3437 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3440 #ifdef FUNCTION_INCOMING_ARG
3441 FUNCTION_INCOMING_ARG (args_so_far
, promoted_mode
,
3444 || varargs_setup
)) != 0,
3446 FUNCTION_ARG (args_so_far
, promoted_mode
,
3448 ! last_named
|| varargs_setup
) != 0,
3451 fndecl
, &stack_args_size
, &stack_offset
, &arg_size
);
3455 rtx offset_rtx
= ARGS_SIZE_RTX (stack_offset
);
3457 if (offset_rtx
== const0_rtx
)
3458 stack_parm
= gen_rtx (MEM
, promoted_mode
, internal_arg_pointer
);
3460 stack_parm
= gen_rtx (MEM
, promoted_mode
,
3461 gen_rtx (PLUS
, Pmode
,
3462 internal_arg_pointer
, offset_rtx
));
3464 /* If this is a memory ref that contains aggregate components,
3465 mark it as such for cse and loop optimize. Likewise if it
3467 MEM_IN_STRUCT_P (stack_parm
) = aggregate
;
3468 RTX_UNCHANGING_P (stack_parm
) = TREE_READONLY (parm
);
3471 /* If this parameter was passed both in registers and in the stack,
3472 use the copy on the stack. */
3473 if (MUST_PASS_IN_STACK (promoted_mode
, passed_type
))
3476 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3477 /* If this parm was passed part in regs and part in memory,
3478 pretend it arrived entirely in memory
3479 by pushing the register-part onto the stack.
3481 In the special case of a DImode or DFmode that is split,
3482 we could put it together in a pseudoreg directly,
3483 but for now that's not worth bothering with. */
3487 int nregs
= FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, promoted_mode
,
3488 passed_type
, ! last_named
);
3492 current_function_pretend_args_size
3493 = (((nregs
* UNITS_PER_WORD
) + (PARM_BOUNDARY
/ BITS_PER_UNIT
) - 1)
3494 / (PARM_BOUNDARY
/ BITS_PER_UNIT
)
3495 * (PARM_BOUNDARY
/ BITS_PER_UNIT
));
3498 move_block_from_reg (REGNO (entry_parm
),
3499 validize_mem (stack_parm
), nregs
,
3500 int_size_in_bytes (TREE_TYPE (parm
)));
3501 entry_parm
= stack_parm
;
3506 /* If we didn't decide this parm came in a register,
3507 by default it came on the stack. */
3508 if (entry_parm
== 0)
3509 entry_parm
= stack_parm
;
3511 /* Record permanently how this parm was passed. */
3513 DECL_INCOMING_RTL (parm
) = entry_parm
;
3515 /* If there is actually space on the stack for this parm,
3516 count it in stack_args_size; otherwise set stack_parm to 0
3517 to indicate there is no preallocated stack slot for the parm. */
3519 if (entry_parm
== stack_parm
3520 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
3521 /* On some machines, even if a parm value arrives in a register
3522 there is still an (uninitialized) stack slot allocated for it.
3524 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
3525 whether this parameter already has a stack slot allocated,
3526 because an arg block exists only if current_function_args_size
3527 is larger than some threshold, and we haven't calculated that
3528 yet. So, for now, we just assume that stack slots never exist
3530 || REG_PARM_STACK_SPACE (fndecl
) > 0
3534 stack_args_size
.constant
+= arg_size
.constant
;
3536 ADD_PARM_SIZE (stack_args_size
, arg_size
.var
);
3539 /* No stack slot was pushed for this parm. */
3542 /* Update info on where next arg arrives in registers. */
3544 FUNCTION_ARG_ADVANCE (args_so_far
, promoted_mode
,
3545 passed_type
, ! last_named
);
3547 /* If this is our second time through, we are done with this parm. */
3551 /* If we can't trust the parm stack slot to be aligned enough
3552 for its ultimate type, don't use that slot after entry.
3553 We'll make another stack slot, if we need one. */
3555 int thisparm_boundary
3556 = FUNCTION_ARG_BOUNDARY (promoted_mode
, passed_type
);
3558 if (GET_MODE_ALIGNMENT (nominal_mode
) > thisparm_boundary
)
3562 /* If parm was passed in memory, and we need to convert it on entry,
3563 don't store it back in that same slot. */
3565 && nominal_mode
!= BLKmode
&& nominal_mode
!= passed_mode
)
3569 /* Now adjust STACK_PARM to the mode and precise location
3570 where this parameter should live during execution,
3571 if we discover that it must live in the stack during execution.
3572 To make debuggers happier on big-endian machines, we store
3573 the value in the last bytes of the space available. */
3575 if (nominal_mode
!= BLKmode
&& nominal_mode
!= passed_mode
3580 if (BYTES_BIG_ENDIAN
3581 && GET_MODE_SIZE (nominal_mode
) < UNITS_PER_WORD
)
3582 stack_offset
.constant
+= (GET_MODE_SIZE (passed_mode
)
3583 - GET_MODE_SIZE (nominal_mode
));
3585 offset_rtx
= ARGS_SIZE_RTX (stack_offset
);
3586 if (offset_rtx
== const0_rtx
)
3587 stack_parm
= gen_rtx (MEM
, nominal_mode
, internal_arg_pointer
);
3589 stack_parm
= gen_rtx (MEM
, nominal_mode
,
3590 gen_rtx (PLUS
, Pmode
,
3591 internal_arg_pointer
, offset_rtx
));
3593 /* If this is a memory ref that contains aggregate components,
3594 mark it as such for cse and loop optimize. */
3595 MEM_IN_STRUCT_P (stack_parm
) = aggregate
;
3600 /* We need this "use" info, because the gcc-register->stack-register
3601 converter in reg-stack.c needs to know which registers are active
3602 at the start of the function call. The actual parameter loading
3603 instructions are not always available then anymore, since they might
3604 have been optimised away. */
3606 if (GET_CODE (entry_parm
) == REG
&& !(hide_last_arg
&& last_named
))
3607 emit_insn (gen_rtx (USE
, GET_MODE (entry_parm
), entry_parm
));
3610 /* ENTRY_PARM is an RTX for the parameter as it arrives,
3611 in the mode in which it arrives.
3612 STACK_PARM is an RTX for a stack slot where the parameter can live
3613 during the function (in case we want to put it there).
3614 STACK_PARM is 0 if no stack slot was pushed for it.
3616 Now output code if necessary to convert ENTRY_PARM to
3617 the type in which this function declares it,
3618 and store that result in an appropriate place,
3619 which may be a pseudo reg, may be STACK_PARM,
3620 or may be a local stack slot if STACK_PARM is 0.
3622 Set DECL_RTL to that place. */
3624 if (nominal_mode
== BLKmode
)
3626 /* If a BLKmode arrives in registers, copy it to a stack slot. */
3627 if (GET_CODE (entry_parm
) == REG
)
3630 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm
)),
3633 /* Note that we will be storing an integral number of words.
3634 So we have to be careful to ensure that we allocate an
3635 integral number of words. We do this below in the
3636 assign_stack_local if space was not allocated in the argument
3637 list. If it was, this will not work if PARM_BOUNDARY is not
3638 a multiple of BITS_PER_WORD. It isn't clear how to fix this
3639 if it becomes a problem. */
3641 if (stack_parm
== 0)
3644 = assign_stack_local (GET_MODE (entry_parm
),
3647 /* If this is a memory ref that contains aggregate
3648 components, mark it as such for cse and loop optimize. */
3649 MEM_IN_STRUCT_P (stack_parm
) = aggregate
;
3652 else if (PARM_BOUNDARY
% BITS_PER_WORD
!= 0)
3655 if (TREE_READONLY (parm
))
3656 RTX_UNCHANGING_P (stack_parm
) = 1;
3658 move_block_from_reg (REGNO (entry_parm
),
3659 validize_mem (stack_parm
),
3660 size_stored
/ UNITS_PER_WORD
,
3661 int_size_in_bytes (TREE_TYPE (parm
)));
3663 DECL_RTL (parm
) = stack_parm
;
3665 else if (! ((obey_regdecls
&& ! DECL_REGISTER (parm
)
3666 && ! DECL_INLINE (fndecl
))
3667 /* layout_decl may set this. */
3668 || TREE_ADDRESSABLE (parm
)
3669 || TREE_SIDE_EFFECTS (parm
)
3670 /* If -ffloat-store specified, don't put explicit
3671 float variables into registers. */
3672 || (flag_float_store
3673 && TREE_CODE (TREE_TYPE (parm
)) == REAL_TYPE
))
3674 /* Always assign pseudo to structure return or item passed
3675 by invisible reference. */
3676 || passed_pointer
|| parm
== function_result_decl
)
3678 /* Store the parm in a pseudoregister during the function, but we
3679 may need to do it in a wider mode. */
3681 register rtx parmreg
;
3682 int regno
, regnoi
, regnor
;
3684 unsignedp
= TREE_UNSIGNED (TREE_TYPE (parm
));
3686 promoted_nominal_mode
3687 = promote_mode (TREE_TYPE (parm
), nominal_mode
, &unsignedp
, 0);
3689 parmreg
= gen_reg_rtx (promoted_nominal_mode
);
3690 REG_USERVAR_P (parmreg
) = 1;
3692 /* If this was an item that we received a pointer to, set DECL_RTL
3697 = gen_rtx (MEM
, TYPE_MODE (TREE_TYPE (passed_type
)), parmreg
);
3698 MEM_IN_STRUCT_P (DECL_RTL (parm
)) = aggregate
;
3701 DECL_RTL (parm
) = parmreg
;
3703 /* Copy the value into the register. */
3704 if (nominal_mode
!= passed_mode
3705 || promoted_nominal_mode
!= promoted_mode
)
3707 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3708 mode, by the caller. We now have to convert it to
3709 NOMINAL_MODE, if different. However, PARMREG may be in
3710 a diffent mode than NOMINAL_MODE if it is being stored
3713 If ENTRY_PARM is a hard register, it might be in a register
3714 not valid for operating in its mode (e.g., an odd-numbered
3715 register for a DFmode). In that case, moves are the only
3716 thing valid, so we can't do a convert from there. This
3717 occurs when the calling sequence allow such misaligned
3720 In addition, the conversion may involve a call, which could
3721 clobber parameters which haven't been copied to pseudo
3722 registers yet. Therefore, we must first copy the parm to
3723 a pseudo reg here, and save the conversion until after all
3724 parameters have been moved. */
3726 rtx tempreg
= gen_reg_rtx (GET_MODE (entry_parm
));
3728 emit_move_insn (tempreg
, validize_mem (entry_parm
));
3730 push_to_sequence (conversion_insns
);
3731 tempreg
= convert_to_mode (nominal_mode
, tempreg
, unsignedp
);
3733 expand_assignment (parm
,
3734 make_tree (nominal_type
, tempreg
), 0, 0);
3735 conversion_insns
= get_insns ();
3740 emit_move_insn (parmreg
, validize_mem (entry_parm
));
3742 /* If we were passed a pointer but the actual value
3743 can safely live in a register, put it in one. */
3744 if (passed_pointer
&& TYPE_MODE (TREE_TYPE (parm
)) != BLKmode
3745 && ! ((obey_regdecls
&& ! DECL_REGISTER (parm
)
3746 && ! DECL_INLINE (fndecl
))
3747 /* layout_decl may set this. */
3748 || TREE_ADDRESSABLE (parm
)
3749 || TREE_SIDE_EFFECTS (parm
)
3750 /* If -ffloat-store specified, don't put explicit
3751 float variables into registers. */
3752 || (flag_float_store
3753 && TREE_CODE (TREE_TYPE (parm
)) == REAL_TYPE
)))
3755 /* We can't use nominal_mode, because it will have been set to
3756 Pmode above. We must use the actual mode of the parm. */
3757 parmreg
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm
)));
3758 REG_USERVAR_P (parmreg
) = 1;
3759 emit_move_insn (parmreg
, DECL_RTL (parm
));
3760 DECL_RTL (parm
) = parmreg
;
3761 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3765 #ifdef FUNCTION_ARG_CALLEE_COPIES
3766 /* If we are passed an arg by reference and it is our responsibility
3767 to make a copy, do it now.
3768 PASSED_TYPE and PASSED mode now refer to the pointer, not the
3769 original argument, so we must recreate them in the call to
3770 FUNCTION_ARG_CALLEE_COPIES. */
3771 /* ??? Later add code to handle the case that if the argument isn't
3772 modified, don't do the copy. */
3774 else if (passed_pointer
3775 && FUNCTION_ARG_CALLEE_COPIES (args_so_far
,
3776 TYPE_MODE (DECL_ARG_TYPE (parm
)),
3777 DECL_ARG_TYPE (parm
),
3779 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm
)))
3782 tree type
= DECL_ARG_TYPE (parm
);
3784 /* This sequence may involve a library call perhaps clobbering
3785 registers that haven't been copied to pseudos yet. */
3787 push_to_sequence (conversion_insns
);
3789 if (TYPE_SIZE (type
) == 0
3790 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
3791 /* This is a variable sized object. */
3792 copy
= gen_rtx (MEM
, BLKmode
,
3793 allocate_dynamic_stack_space
3794 (expr_size (parm
), NULL_RTX
,
3795 TYPE_ALIGN (type
)));
3797 copy
= assign_stack_temp (TYPE_MODE (type
),
3798 int_size_in_bytes (type
), 1);
3799 MEM_IN_STRUCT_P (copy
) = AGGREGATE_TYPE_P (type
);
3801 store_expr (parm
, copy
, 0);
3802 emit_move_insn (parmreg
, XEXP (copy
, 0));
3803 conversion_insns
= get_insns ();
3807 #endif /* FUNCTION_ARG_CALLEE_COPIES */
3809 /* In any case, record the parm's desired stack location
3810 in case we later discover it must live in the stack.
3812 If it is a COMPLEX value, store the stack location for both
3815 if (GET_CODE (parmreg
) == CONCAT
)
3816 regno
= MAX (REGNO (XEXP (parmreg
, 0)), REGNO (XEXP (parmreg
, 1)));
3818 regno
= REGNO (parmreg
);
3820 if (regno
>= nparmregs
)
3823 int old_nparmregs
= nparmregs
;
3825 nparmregs
= regno
+ 5;
3826 new = (rtx
*) oballoc (nparmregs
* sizeof (rtx
));
3827 bcopy ((char *) parm_reg_stack_loc
, (char *) new,
3828 old_nparmregs
* sizeof (rtx
));
3829 bzero ((char *) (new + old_nparmregs
),
3830 (nparmregs
- old_nparmregs
) * sizeof (rtx
));
3831 parm_reg_stack_loc
= new;
3834 if (GET_CODE (parmreg
) == CONCAT
)
3836 enum machine_mode submode
= GET_MODE (XEXP (parmreg
, 0));
3838 regnor
= REGNO (gen_realpart (submode
, parmreg
));
3839 regnoi
= REGNO (gen_imagpart (submode
, parmreg
));
3841 if (stack_parm
!= 0)
3843 parm_reg_stack_loc
[regnor
]
3844 = gen_realpart (submode
, stack_parm
);
3845 parm_reg_stack_loc
[regnoi
]
3846 = gen_imagpart (submode
, stack_parm
);
3850 parm_reg_stack_loc
[regnor
] = 0;
3851 parm_reg_stack_loc
[regnoi
] = 0;
3855 parm_reg_stack_loc
[REGNO (parmreg
)] = stack_parm
;
3857 /* Mark the register as eliminable if we did no conversion
3858 and it was copied from memory at a fixed offset,
3859 and the arg pointer was not copied to a pseudo-reg.
3860 If the arg pointer is a pseudo reg or the offset formed
3861 an invalid address, such memory-equivalences
3862 as we make here would screw up life analysis for it. */
3863 if (nominal_mode
== passed_mode
3865 && GET_CODE (entry_parm
) == MEM
3866 && entry_parm
== stack_parm
3867 && stack_offset
.var
== 0
3868 && reg_mentioned_p (virtual_incoming_args_rtx
,
3869 XEXP (entry_parm
, 0)))
3871 rtx linsn
= get_last_insn ();
3874 /* Mark complex types separately. */
3875 if (GET_CODE (parmreg
) == CONCAT
)
3876 /* Scan backwards for the set of the real and
3878 for (sinsn
= linsn
; sinsn
!= 0;
3879 sinsn
= prev_nonnote_insn (sinsn
))
3881 set
= single_set (sinsn
);
3883 && SET_DEST (set
) == regno_reg_rtx
[regnoi
])
3885 = gen_rtx (EXPR_LIST
, REG_EQUIV
,
3886 parm_reg_stack_loc
[regnoi
],
3889 && SET_DEST (set
) == regno_reg_rtx
[regnor
])
3891 = gen_rtx (EXPR_LIST
, REG_EQUIV
,
3892 parm_reg_stack_loc
[regnor
],
3895 else if ((set
= single_set (linsn
)) != 0
3896 && SET_DEST (set
) == parmreg
)
3898 = gen_rtx (EXPR_LIST
, REG_EQUIV
,
3899 entry_parm
, REG_NOTES (linsn
));
3902 /* For pointer data type, suggest pointer register. */
3903 if (TREE_CODE (TREE_TYPE (parm
)) == POINTER_TYPE
)
3904 mark_reg_pointer (parmreg
,
3905 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
)))
3910 /* Value must be stored in the stack slot STACK_PARM
3911 during function execution. */
3913 if (promoted_mode
!= nominal_mode
)
3915 /* Conversion is required. */
3916 rtx tempreg
= gen_reg_rtx (GET_MODE (entry_parm
));
3918 emit_move_insn (tempreg
, validize_mem (entry_parm
));
3920 push_to_sequence (conversion_insns
);
3921 entry_parm
= convert_to_mode (nominal_mode
, tempreg
,
3922 TREE_UNSIGNED (TREE_TYPE (parm
)));
3923 conversion_insns
= get_insns ();
3928 if (entry_parm
!= stack_parm
)
3930 if (stack_parm
== 0)
3933 = assign_stack_local (GET_MODE (entry_parm
),
3934 GET_MODE_SIZE (GET_MODE (entry_parm
)), 0);
3935 /* If this is a memory ref that contains aggregate components,
3936 mark it as such for cse and loop optimize. */
3937 MEM_IN_STRUCT_P (stack_parm
) = aggregate
;
3940 if (promoted_mode
!= nominal_mode
)
3942 push_to_sequence (conversion_insns
);
3943 emit_move_insn (validize_mem (stack_parm
),
3944 validize_mem (entry_parm
));
3945 conversion_insns
= get_insns ();
3949 emit_move_insn (validize_mem (stack_parm
),
3950 validize_mem (entry_parm
));
3953 DECL_RTL (parm
) = stack_parm
;
3956 /* If this "parameter" was the place where we are receiving the
3957 function's incoming structure pointer, set up the result. */
3958 if (parm
== function_result_decl
)
3960 tree result
= DECL_RESULT (fndecl
);
3961 tree restype
= TREE_TYPE (result
);
3964 = gen_rtx (MEM
, DECL_MODE (result
), DECL_RTL (parm
));
3966 MEM_IN_STRUCT_P (DECL_RTL (result
)) = AGGREGATE_TYPE_P (restype
);
3969 if (TREE_THIS_VOLATILE (parm
))
3970 MEM_VOLATILE_P (DECL_RTL (parm
)) = 1;
3971 if (TREE_READONLY (parm
))
3972 RTX_UNCHANGING_P (DECL_RTL (parm
)) = 1;
3975 /* Output all parameter conversion instructions (possibly including calls)
3976 now that all parameters have been copied out of hard registers. */
3977 emit_insns (conversion_insns
);
3979 max_parm_reg
= max_reg_num ();
3980 last_parm_insn
= get_last_insn ();
3982 current_function_args_size
= stack_args_size
.constant
;
3984 /* Adjust function incoming argument size for alignment and
3987 #ifdef REG_PARM_STACK_SPACE
3988 #ifndef MAYBE_REG_PARM_STACK_SPACE
3989 current_function_args_size
= MAX (current_function_args_size
,
3990 REG_PARM_STACK_SPACE (fndecl
));
3994 #ifdef STACK_BOUNDARY
3995 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
3997 current_function_args_size
3998 = ((current_function_args_size
+ STACK_BYTES
- 1)
3999 / STACK_BYTES
) * STACK_BYTES
;
4002 #ifdef ARGS_GROW_DOWNWARD
4003 current_function_arg_offset_rtx
4004 = (stack_args_size
.var
== 0 ? GEN_INT (-stack_args_size
.constant
)
4005 : expand_expr (size_binop (MINUS_EXPR
, stack_args_size
.var
,
4006 size_int (-stack_args_size
.constant
)),
4007 NULL_RTX
, VOIDmode
, 0));
4009 current_function_arg_offset_rtx
= ARGS_SIZE_RTX (stack_args_size
);
4012 /* See how many bytes, if any, of its args a function should try to pop
4015 current_function_pops_args
= RETURN_POPS_ARGS (fndecl
, TREE_TYPE (fndecl
),
4016 current_function_args_size
);
4018 /* For stdarg.h function, save info about
4019 regs and stack space used by the named args. */
4022 current_function_args_info
= args_so_far
;
4024 /* Set the rtx used for the function return value. Put this in its
4025 own variable so any optimizers that need this information don't have
4026 to include tree.h. Do this here so it gets done when an inlined
4027 function gets output. */
4029 current_function_return_rtx
= DECL_RTL (DECL_RESULT (fndecl
));
4032 /* Indicate whether REGNO is an incoming argument to the current function
4033 that was promoted to a wider mode. If so, return the RTX for the
4034 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4035 that REGNO is promoted from and whether the promotion was signed or
4038 #ifdef PROMOTE_FUNCTION_ARGS
4041 promoted_input_arg (regno
, pmode
, punsignedp
)
4043 enum machine_mode
*pmode
;
4048 for (arg
= DECL_ARGUMENTS (current_function_decl
); arg
;
4049 arg
= TREE_CHAIN (arg
))
4050 if (GET_CODE (DECL_INCOMING_RTL (arg
)) == REG
4051 && REGNO (DECL_INCOMING_RTL (arg
)) == regno
4052 && TYPE_MODE (DECL_ARG_TYPE (arg
)) == TYPE_MODE (TREE_TYPE (arg
)))
4054 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg
));
4055 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (arg
));
4057 mode
= promote_mode (TREE_TYPE (arg
), mode
, &unsignedp
, 1);
4058 if (mode
== GET_MODE (DECL_INCOMING_RTL (arg
))
4059 && mode
!= DECL_MODE (arg
))
4061 *pmode
= DECL_MODE (arg
);
4062 *punsignedp
= unsignedp
;
4063 return DECL_INCOMING_RTL (arg
);
4072 /* Compute the size and offset from the start of the stacked arguments for a
4073 parm passed in mode PASSED_MODE and with type TYPE.
4075 INITIAL_OFFSET_PTR points to the current offset into the stacked
4078 The starting offset and size for this parm are returned in *OFFSET_PTR
4079 and *ARG_SIZE_PTR, respectively.
4081 IN_REGS is non-zero if the argument will be passed in registers. It will
4082 never be set if REG_PARM_STACK_SPACE is not defined.
4084 FNDECL is the function in which the argument was defined.
4086 There are two types of rounding that are done. The first, controlled by
4087 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4088 list to be aligned to the specific boundary (in bits). This rounding
4089 affects the initial and starting offsets, but not the argument size.
4091 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4092 optionally rounds the size of the parm to PARM_BOUNDARY. The
4093 initial offset is not affected by this rounding, while the size always
4094 is and the starting offset may be. */
4096 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4097 initial_offset_ptr is positive because locate_and_pad_parm's
4098 callers pass in the total size of args so far as
4099 initial_offset_ptr. arg_size_ptr is always positive.*/
4102 locate_and_pad_parm (passed_mode
, type
, in_regs
, fndecl
,
4103 initial_offset_ptr
, offset_ptr
, arg_size_ptr
)
4104 enum machine_mode passed_mode
;
4108 struct args_size
*initial_offset_ptr
;
4109 struct args_size
*offset_ptr
;
4110 struct args_size
*arg_size_ptr
;
4113 = type
? size_in_bytes (type
) : size_int (GET_MODE_SIZE (passed_mode
));
4114 enum direction where_pad
= FUNCTION_ARG_PADDING (passed_mode
, type
);
4115 int boundary
= FUNCTION_ARG_BOUNDARY (passed_mode
, type
);
4116 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
4117 int reg_parm_stack_space
= 0;
4119 #ifdef REG_PARM_STACK_SPACE
4120 /* If we have found a stack parm before we reach the end of the
4121 area reserved for registers, skip that area. */
4124 #ifdef MAYBE_REG_PARM_STACK_SPACE
4125 reg_parm_stack_space
= MAYBE_REG_PARM_STACK_SPACE
;
4127 reg_parm_stack_space
= REG_PARM_STACK_SPACE (fndecl
);
4129 if (reg_parm_stack_space
> 0)
4131 if (initial_offset_ptr
->var
)
4133 initial_offset_ptr
->var
4134 = size_binop (MAX_EXPR
, ARGS_SIZE_TREE (*initial_offset_ptr
),
4135 size_int (reg_parm_stack_space
));
4136 initial_offset_ptr
->constant
= 0;
4138 else if (initial_offset_ptr
->constant
< reg_parm_stack_space
)
4139 initial_offset_ptr
->constant
= reg_parm_stack_space
;
4142 #endif /* REG_PARM_STACK_SPACE */
4144 arg_size_ptr
->var
= 0;
4145 arg_size_ptr
->constant
= 0;
4147 #ifdef ARGS_GROW_DOWNWARD
4148 if (initial_offset_ptr
->var
)
4150 offset_ptr
->constant
= 0;
4151 offset_ptr
->var
= size_binop (MINUS_EXPR
, integer_zero_node
,
4152 initial_offset_ptr
->var
);
4156 offset_ptr
->constant
= - initial_offset_ptr
->constant
;
4157 offset_ptr
->var
= 0;
4159 if (where_pad
!= none
4160 && (TREE_CODE (sizetree
) != INTEGER_CST
4161 || ((TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)))
4162 sizetree
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
4163 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
4164 if (where_pad
!= downward
)
4165 pad_to_arg_alignment (offset_ptr
, boundary
);
4166 if (initial_offset_ptr
->var
)
4168 arg_size_ptr
->var
= size_binop (MINUS_EXPR
,
4169 size_binop (MINUS_EXPR
,
4171 initial_offset_ptr
->var
),
4176 arg_size_ptr
->constant
= (- initial_offset_ptr
->constant
-
4177 offset_ptr
->constant
);
4179 #else /* !ARGS_GROW_DOWNWARD */
4180 pad_to_arg_alignment (initial_offset_ptr
, boundary
);
4181 *offset_ptr
= *initial_offset_ptr
;
4183 #ifdef PUSH_ROUNDING
4184 if (passed_mode
!= BLKmode
)
4185 sizetree
= size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree
)));
4188 /* Pad_below needs the pre-rounded size to know how much to pad below
4189 so this must be done before rounding up. */
4190 if (where_pad
== downward
4191 /* However, BLKmode args passed in regs have their padding done elsewhere.
4192 The stack slot must be able to hold the entire register. */
4193 && !(in_regs
&& passed_mode
== BLKmode
))
4194 pad_below (offset_ptr
, passed_mode
, sizetree
);
4196 if (where_pad
!= none
4197 && (TREE_CODE (sizetree
) != INTEGER_CST
4198 || ((TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)))
4199 sizetree
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
4201 ADD_PARM_SIZE (*arg_size_ptr
, sizetree
);
4202 #endif /* ARGS_GROW_DOWNWARD */
4205 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4206 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4209 pad_to_arg_alignment (offset_ptr
, boundary
)
4210 struct args_size
*offset_ptr
;
4213 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
4215 if (boundary
> BITS_PER_UNIT
)
4217 if (offset_ptr
->var
)
4220 #ifdef ARGS_GROW_DOWNWARD
4225 (ARGS_SIZE_TREE (*offset_ptr
),
4226 boundary
/ BITS_PER_UNIT
);
4227 offset_ptr
->constant
= 0; /*?*/
4230 offset_ptr
->constant
=
4231 #ifdef ARGS_GROW_DOWNWARD
4232 FLOOR_ROUND (offset_ptr
->constant
, boundary_in_bytes
);
4234 CEIL_ROUND (offset_ptr
->constant
, boundary_in_bytes
);
4240 pad_below (offset_ptr
, passed_mode
, sizetree
)
4241 struct args_size
*offset_ptr
;
4242 enum machine_mode passed_mode
;
4245 if (passed_mode
!= BLKmode
)
4247 if (GET_MODE_BITSIZE (passed_mode
) % PARM_BOUNDARY
)
4248 offset_ptr
->constant
4249 += (((GET_MODE_BITSIZE (passed_mode
) + PARM_BOUNDARY
- 1)
4250 / PARM_BOUNDARY
* PARM_BOUNDARY
/ BITS_PER_UNIT
)
4251 - GET_MODE_SIZE (passed_mode
));
4255 if (TREE_CODE (sizetree
) != INTEGER_CST
4256 || (TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)
4258 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4259 tree s2
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
4261 ADD_PARM_SIZE (*offset_ptr
, s2
);
4262 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
4268 round_down (value
, divisor
)
4272 return size_binop (MULT_EXPR
,
4273 size_binop (FLOOR_DIV_EXPR
, value
, size_int (divisor
)),
4274 size_int (divisor
));
4277 /* Walk the tree of blocks describing the binding levels within a function
4278 and warn about uninitialized variables.
4279 This is done after calling flow_analysis and before global_alloc
4280 clobbers the pseudo-regs to hard regs. */
4283 uninitialized_vars_warning (block
)
4286 register tree decl
, sub
;
4287 for (decl
= BLOCK_VARS (block
); decl
; decl
= TREE_CHAIN (decl
))
4289 if (TREE_CODE (decl
) == VAR_DECL
4290 /* These warnings are unreliable for and aggregates
4291 because assigning the fields one by one can fail to convince
4292 flow.c that the entire aggregate was initialized.
4293 Unions are troublesome because members may be shorter. */
4294 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl
))
4295 && DECL_RTL (decl
) != 0
4296 && GET_CODE (DECL_RTL (decl
)) == REG
4297 && regno_uninitialized (REGNO (DECL_RTL (decl
))))
4298 warning_with_decl (decl
,
4299 "`%s' might be used uninitialized in this function");
4300 if (TREE_CODE (decl
) == VAR_DECL
4301 && DECL_RTL (decl
) != 0
4302 && GET_CODE (DECL_RTL (decl
)) == REG
4303 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
4304 warning_with_decl (decl
,
4305 "variable `%s' might be clobbered by `longjmp' or `vfork'");
4307 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= TREE_CHAIN (sub
))
4308 uninitialized_vars_warning (sub
);
4311 /* Do the appropriate part of uninitialized_vars_warning
4312 but for arguments instead of local variables. */
4315 setjmp_args_warning ()
4318 for (decl
= DECL_ARGUMENTS (current_function_decl
);
4319 decl
; decl
= TREE_CHAIN (decl
))
4320 if (DECL_RTL (decl
) != 0
4321 && GET_CODE (DECL_RTL (decl
)) == REG
4322 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
4323 warning_with_decl (decl
, "argument `%s' might be clobbered by `longjmp' or `vfork'");
4326 /* If this function call setjmp, put all vars into the stack
4327 unless they were declared `register'. */
4330 setjmp_protect (block
)
4333 register tree decl
, sub
;
4334 for (decl
= BLOCK_VARS (block
); decl
; decl
= TREE_CHAIN (decl
))
4335 if ((TREE_CODE (decl
) == VAR_DECL
4336 || TREE_CODE (decl
) == PARM_DECL
)
4337 && DECL_RTL (decl
) != 0
4338 && GET_CODE (DECL_RTL (decl
)) == REG
4339 /* If this variable came from an inline function, it must be
4340 that it's life doesn't overlap the setjmp. If there was a
4341 setjmp in the function, it would already be in memory. We
4342 must exclude such variable because their DECL_RTL might be
4343 set to strange things such as virtual_stack_vars_rtx. */
4344 && ! DECL_FROM_INLINE (decl
)
4346 #ifdef NON_SAVING_SETJMP
4347 /* If longjmp doesn't restore the registers,
4348 don't put anything in them. */
4352 ! DECL_REGISTER (decl
)))
4353 put_var_into_stack (decl
);
4354 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= TREE_CHAIN (sub
))
4355 setjmp_protect (sub
);
4358 /* Like the previous function, but for args instead of local variables. */
4361 setjmp_protect_args ()
4363 register tree decl
, sub
;
4364 for (decl
= DECL_ARGUMENTS (current_function_decl
);
4365 decl
; decl
= TREE_CHAIN (decl
))
4366 if ((TREE_CODE (decl
) == VAR_DECL
4367 || TREE_CODE (decl
) == PARM_DECL
)
4368 && DECL_RTL (decl
) != 0
4369 && GET_CODE (DECL_RTL (decl
)) == REG
4371 /* If longjmp doesn't restore the registers,
4372 don't put anything in them. */
4373 #ifdef NON_SAVING_SETJMP
4377 ! DECL_REGISTER (decl
)))
4378 put_var_into_stack (decl
);
4381 /* Return the context-pointer register corresponding to DECL,
4382 or 0 if it does not need one. */
4385 lookup_static_chain (decl
)
4388 tree context
= decl_function_context (decl
);
4392 || (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_NO_STATIC_CHAIN (decl
)))
4395 /* We treat inline_function_decl as an alias for the current function
4396 because that is the inline function whose vars, types, etc.
4397 are being merged into the current function.
4398 See expand_inline_function. */
4399 if (context
== current_function_decl
|| context
== inline_function_decl
)
4400 return virtual_stack_vars_rtx
;
4402 for (link
= context_display
; link
; link
= TREE_CHAIN (link
))
4403 if (TREE_PURPOSE (link
) == context
)
4404 return RTL_EXPR_RTL (TREE_VALUE (link
));
4409 /* Convert a stack slot address ADDR for variable VAR
4410 (from a containing function)
4411 into an address valid in this function (using a static chain). */
4414 fix_lexical_addr (addr
, var
)
4420 tree context
= decl_function_context (var
);
4421 struct function
*fp
;
4424 /* If this is the present function, we need not do anything. */
4425 if (context
== current_function_decl
|| context
== inline_function_decl
)
4428 for (fp
= outer_function_chain
; fp
; fp
= fp
->next
)
4429 if (fp
->decl
== context
)
4435 /* Decode given address as base reg plus displacement. */
4436 if (GET_CODE (addr
) == REG
)
4437 basereg
= addr
, displacement
= 0;
4438 else if (GET_CODE (addr
) == PLUS
&& GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
4439 basereg
= XEXP (addr
, 0), displacement
= INTVAL (XEXP (addr
, 1));
4443 /* We accept vars reached via the containing function's
4444 incoming arg pointer and via its stack variables pointer. */
4445 if (basereg
== fp
->internal_arg_pointer
)
4447 /* If reached via arg pointer, get the arg pointer value
4448 out of that function's stack frame.
4450 There are two cases: If a separate ap is needed, allocate a
4451 slot in the outer function for it and dereference it that way.
4452 This is correct even if the real ap is actually a pseudo.
4453 Otherwise, just adjust the offset from the frame pointer to
4456 #ifdef NEED_SEPARATE_AP
4459 if (fp
->arg_pointer_save_area
== 0)
4460 fp
->arg_pointer_save_area
4461 = assign_outer_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0, fp
);
4463 addr
= fix_lexical_addr (XEXP (fp
->arg_pointer_save_area
, 0), var
);
4464 addr
= memory_address (Pmode
, addr
);
4466 base
= copy_to_reg (gen_rtx (MEM
, Pmode
, addr
));
4468 displacement
+= (FIRST_PARM_OFFSET (context
) - STARTING_FRAME_OFFSET
);
4469 base
= lookup_static_chain (var
);
4473 else if (basereg
== virtual_stack_vars_rtx
)
4475 /* This is the same code as lookup_static_chain, duplicated here to
4476 avoid an extra call to decl_function_context. */
4479 for (link
= context_display
; link
; link
= TREE_CHAIN (link
))
4480 if (TREE_PURPOSE (link
) == context
)
4482 base
= RTL_EXPR_RTL (TREE_VALUE (link
));
4490 /* Use same offset, relative to appropriate static chain or argument
4492 return plus_constant (base
, displacement
);
4495 /* Return the address of the trampoline for entering nested fn FUNCTION.
4496 If necessary, allocate a trampoline (in the stack frame)
4497 and emit rtl to initialize its contents (at entry to this function). */
4500 trampoline_address (function
)
4506 struct function
*fp
;
4509 /* Find an existing trampoline and return it. */
4510 for (link
= trampoline_list
; link
; link
= TREE_CHAIN (link
))
4511 if (TREE_PURPOSE (link
) == function
)
4513 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link
)), 0));
4515 for (fp
= outer_function_chain
; fp
; fp
= fp
->next
)
4516 for (link
= fp
->trampoline_list
; link
; link
= TREE_CHAIN (link
))
4517 if (TREE_PURPOSE (link
) == function
)
4519 tramp
= fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link
)), 0),
4521 return round_trampoline_addr (tramp
);
4524 /* None exists; we must make one. */
4526 /* Find the `struct function' for the function containing FUNCTION. */
4528 fn_context
= decl_function_context (function
);
4529 if (fn_context
!= current_function_decl
)
4530 for (fp
= outer_function_chain
; fp
; fp
= fp
->next
)
4531 if (fp
->decl
== fn_context
)
4534 /* Allocate run-time space for this trampoline
4535 (usually in the defining function's stack frame). */
4536 #ifdef ALLOCATE_TRAMPOLINE
4537 tramp
= ALLOCATE_TRAMPOLINE (fp
);
4539 /* If rounding needed, allocate extra space
4540 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
4541 #ifdef TRAMPOLINE_ALIGNMENT
4542 #define TRAMPOLINE_REAL_SIZE \
4543 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
4545 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
4548 tramp
= assign_outer_stack_local (BLKmode
, TRAMPOLINE_REAL_SIZE
, 0, fp
);
4550 tramp
= assign_stack_local (BLKmode
, TRAMPOLINE_REAL_SIZE
, 0);
4553 /* Record the trampoline for reuse and note it for later initialization
4554 by expand_function_end. */
4557 push_obstacks (fp
->function_maybepermanent_obstack
,
4558 fp
->function_maybepermanent_obstack
);
4559 rtlexp
= make_node (RTL_EXPR
);
4560 RTL_EXPR_RTL (rtlexp
) = tramp
;
4561 fp
->trampoline_list
= tree_cons (function
, rtlexp
, fp
->trampoline_list
);
4566 /* Make the RTL_EXPR node temporary, not momentary, so that the
4567 trampoline_list doesn't become garbage. */
4568 int momentary
= suspend_momentary ();
4569 rtlexp
= make_node (RTL_EXPR
);
4570 resume_momentary (momentary
);
4572 RTL_EXPR_RTL (rtlexp
) = tramp
;
4573 trampoline_list
= tree_cons (function
, rtlexp
, trampoline_list
);
4576 tramp
= fix_lexical_addr (XEXP (tramp
, 0), function
);
4577 return round_trampoline_addr (tramp
);
4580 /* Given a trampoline address,
4581 round it to multiple of TRAMPOLINE_ALIGNMENT. */
4584 round_trampoline_addr (tramp
)
4587 #ifdef TRAMPOLINE_ALIGNMENT
4588 /* Round address up to desired boundary. */
4589 rtx temp
= gen_reg_rtx (Pmode
);
4590 temp
= expand_binop (Pmode
, add_optab
, tramp
,
4591 GEN_INT (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1),
4592 temp
, 0, OPTAB_LIB_WIDEN
);
4593 tramp
= expand_binop (Pmode
, and_optab
, temp
,
4594 GEN_INT (- TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
),
4595 temp
, 0, OPTAB_LIB_WIDEN
);
4600 /* The functions identify_blocks and reorder_blocks provide a way to
4601 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
4602 duplicate portions of the RTL code. Call identify_blocks before
4603 changing the RTL, and call reorder_blocks after. */
4605 /* Put all this function's BLOCK nodes including those that are chained
4606 onto the first block into a vector, and return it.
4607 Also store in each NOTE for the beginning or end of a block
4608 the index of that block in the vector.
4609 The arguments are BLOCK, the chain of top-level blocks of the function,
4610 and INSNS, the insn chain of the function. */
4613 identify_blocks (block
, insns
)
4621 int next_block_number
= 1;
4622 int current_block_number
= 1;
4628 n_blocks
= all_blocks (block
, 0);
4629 block_vector
= (tree
*) xmalloc (n_blocks
* sizeof (tree
));
4630 block_stack
= (int *) alloca (n_blocks
* sizeof (int));
4632 all_blocks (block
, block_vector
);
4634 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
4635 if (GET_CODE (insn
) == NOTE
)
4637 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
)
4639 block_stack
[depth
++] = current_block_number
;
4640 current_block_number
= next_block_number
;
4641 NOTE_BLOCK_NUMBER (insn
) = next_block_number
++;
4643 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
)
4645 current_block_number
= block_stack
[--depth
];
4646 NOTE_BLOCK_NUMBER (insn
) = current_block_number
;
4650 if (n_blocks
!= next_block_number
)
4653 return block_vector
;
4656 /* Given BLOCK_VECTOR which was returned by identify_blocks,
4657 and a revised instruction chain, rebuild the tree structure
4658 of BLOCK nodes to correspond to the new order of RTL.
4659 The new block tree is inserted below TOP_BLOCK.
4660 Returns the current top-level block. */
4663 reorder_blocks (block_vector
, block
, insns
)
4668 tree current_block
= block
;
4671 if (block_vector
== 0)
4674 /* Prune the old trees away, so that it doesn't get in the way. */
4675 BLOCK_SUBBLOCKS (current_block
) = 0;
4676 BLOCK_CHAIN (current_block
) = 0;
4678 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
4679 if (GET_CODE (insn
) == NOTE
)
4681 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
)
4683 tree block
= block_vector
[NOTE_BLOCK_NUMBER (insn
)];
4684 /* If we have seen this block before, copy it. */
4685 if (TREE_ASM_WRITTEN (block
))
4686 block
= copy_node (block
);
4687 BLOCK_SUBBLOCKS (block
) = 0;
4688 TREE_ASM_WRITTEN (block
) = 1;
4689 BLOCK_SUPERCONTEXT (block
) = current_block
;
4690 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (current_block
);
4691 BLOCK_SUBBLOCKS (current_block
) = block
;
4692 current_block
= block
;
4693 NOTE_SOURCE_FILE (insn
) = 0;
4695 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
)
4697 BLOCK_SUBBLOCKS (current_block
)
4698 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block
));
4699 current_block
= BLOCK_SUPERCONTEXT (current_block
);
4700 NOTE_SOURCE_FILE (insn
) = 0;
4704 BLOCK_SUBBLOCKS (current_block
)
4705 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block
));
4706 return current_block
;
4709 /* Reverse the order of elements in the chain T of blocks,
4710 and return the new head of the chain (old last element). */
4716 register tree prev
= 0, decl
, next
;
4717 for (decl
= t
; decl
; decl
= next
)
4719 next
= BLOCK_CHAIN (decl
);
4720 BLOCK_CHAIN (decl
) = prev
;
4726 /* Count the subblocks of the list starting with BLOCK, and list them
4727 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
4731 all_blocks (block
, vector
)
4739 TREE_ASM_WRITTEN (block
) = 0;
4741 /* Record this block. */
4743 vector
[n_blocks
] = block
;
4747 /* Record the subblocks, and their subblocks... */
4748 n_blocks
+= all_blocks (BLOCK_SUBBLOCKS (block
),
4749 vector
? vector
+ n_blocks
: 0);
4750 block
= BLOCK_CHAIN (block
);
4756 /* Build bytecode call descriptor for function SUBR. */
4759 bc_build_calldesc (subr
)
4762 tree calldesc
= 0, arg
;
4765 /* Build the argument description vector in reverse order. */
4766 DECL_ARGUMENTS (subr
) = nreverse (DECL_ARGUMENTS (subr
));
4769 for (arg
= DECL_ARGUMENTS (subr
); arg
; arg
= TREE_CHAIN (arg
))
4773 calldesc
= tree_cons ((tree
) 0, size_in_bytes (TREE_TYPE (arg
)), calldesc
);
4774 calldesc
= tree_cons ((tree
) 0, bc_runtime_type_code (TREE_TYPE (arg
)), calldesc
);
4777 DECL_ARGUMENTS (subr
) = nreverse (DECL_ARGUMENTS (subr
));
4779 /* Prepend the function's return type. */
4780 calldesc
= tree_cons ((tree
) 0,
4781 size_in_bytes (TREE_TYPE (TREE_TYPE (subr
))),
4784 calldesc
= tree_cons ((tree
) 0,
4785 bc_runtime_type_code (TREE_TYPE (TREE_TYPE (subr
))),
4788 /* Prepend the arg count. */
4789 calldesc
= tree_cons ((tree
) 0, build_int_2 (nargs
, 0), calldesc
);
4791 /* Output the call description vector and get its address. */
4792 calldesc
= build_nt (CONSTRUCTOR
, (tree
) 0, calldesc
);
4793 TREE_TYPE (calldesc
) = build_array_type (integer_type_node
,
4794 build_index_type (build_int_2 (nargs
* 2, 0)));
4796 return output_constant_def (calldesc
);
4800 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4801 and initialize static variables for generating RTL for the statements
4805 init_function_start (subr
, filename
, line
)
4812 if (output_bytecode
)
4814 this_function_decl
= subr
;
4815 this_function_calldesc
= bc_build_calldesc (subr
);
4816 local_vars_size
= 0;
4818 max_stack_depth
= 0;
4819 stmt_expr_depth
= 0;
4823 init_stmt_for_function ();
4825 cse_not_expected
= ! optimize
;
4827 /* Caller save not needed yet. */
4828 caller_save_needed
= 0;
4830 /* No stack slots have been made yet. */
4831 stack_slot_list
= 0;
4833 /* There is no stack slot for handling nonlocal gotos. */
4834 nonlocal_goto_handler_slot
= 0;
4835 nonlocal_goto_stack_level
= 0;
4837 /* No labels have been declared for nonlocal use. */
4838 nonlocal_labels
= 0;
4840 /* No function calls so far in this function. */
4841 function_call_count
= 0;
4843 /* No parm regs have been allocated.
4844 (This is important for output_inline_function.) */
4845 max_parm_reg
= LAST_VIRTUAL_REGISTER
+ 1;
4847 /* Initialize the RTL mechanism. */
4850 /* Initialize the queue of pending postincrement and postdecrements,
4851 and some other info in expr.c. */
4854 /* We haven't done register allocation yet. */
4857 init_const_rtx_hash_table ();
4859 current_function_name
= (*decl_printable_name
) (subr
, &junk
);
4861 /* Nonzero if this is a nested function that uses a static chain. */
4863 current_function_needs_context
4864 = (decl_function_context (current_function_decl
) != 0
4865 && ! DECL_NO_STATIC_CHAIN (current_function_decl
));
4867 /* Set if a call to setjmp is seen. */
4868 current_function_calls_setjmp
= 0;
4870 /* Set if a call to longjmp is seen. */
4871 current_function_calls_longjmp
= 0;
4873 current_function_calls_alloca
= 0;
4874 current_function_has_nonlocal_label
= 0;
4875 current_function_has_nonlocal_goto
= 0;
4876 current_function_contains_functions
= 0;
4878 current_function_returns_pcc_struct
= 0;
4879 current_function_returns_struct
= 0;
4880 current_function_epilogue_delay_list
= 0;
4881 current_function_uses_const_pool
= 0;
4882 current_function_uses_pic_offset_table
= 0;
4884 /* We have not yet needed to make a label to jump to for tail-recursion. */
4885 tail_recursion_label
= 0;
4887 /* We haven't had a need to make a save area for ap yet. */
4889 arg_pointer_save_area
= 0;
4891 /* No stack slots allocated yet. */
4894 /* No SAVE_EXPRs in this function yet. */
4897 /* No RTL_EXPRs in this function yet. */
4900 /* Set up to allocate temporaries. */
4903 /* Within function body, compute a type's size as soon it is laid out. */
4904 immediate_size_expand
++;
4906 /* We haven't made any trampolines for this function yet. */
4907 trampoline_list
= 0;
4909 init_pending_stack_adjust ();
4910 inhibit_defer_pop
= 0;
4912 current_function_outgoing_args_size
= 0;
4914 /* Prevent ever trying to delete the first instruction of a function.
4915 Also tell final how to output a linenum before the function prologue. */
4916 emit_line_note (filename
, line
);
4918 /* Make sure first insn is a note even if we don't want linenums.
4919 This makes sure the first insn will never be deleted.
4920 Also, final expects a note to appear there. */
4921 emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
4923 /* Set flags used by final.c. */
4924 if (aggregate_value_p (DECL_RESULT (subr
)))
4926 #ifdef PCC_STATIC_STRUCT_RETURN
4927 current_function_returns_pcc_struct
= 1;
4929 current_function_returns_struct
= 1;
4932 /* Warn if this value is an aggregate type,
4933 regardless of which calling convention we are using for it. */
4934 if (warn_aggregate_return
4935 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
))))
4936 warning ("function returns an aggregate");
4938 current_function_returns_pointer
4939 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr
)));
4941 /* Indicate that we need to distinguish between the return value of the
4942 present function and the return value of a function being called. */
4943 rtx_equal_function_value_matters
= 1;
4945 /* Indicate that we have not instantiated virtual registers yet. */
4946 virtuals_instantiated
= 0;
4948 /* Indicate we have no need of a frame pointer yet. */
4949 frame_pointer_needed
= 0;
4951 /* By default assume not varargs or stdarg. */
4952 current_function_varargs
= 0;
4953 current_function_stdarg
= 0;
4956 /* Indicate that the current function uses extra args
4957 not explicitly mentioned in the argument list in any fashion. */
4962 current_function_varargs
= 1;
4965 /* Expand a call to __main at the beginning of a possible main function. */
4967 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
4968 #undef HAS_INIT_SECTION
4969 #define HAS_INIT_SECTION
4973 expand_main_function ()
4975 if (!output_bytecode
)
4977 /* The zero below avoids a possible parse error */
4979 #if !defined (HAS_INIT_SECTION)
4980 emit_library_call (gen_rtx (SYMBOL_REF
, Pmode
, NAME__MAIN
), 0,
4982 #endif /* not HAS_INIT_SECTION */
4986 extern struct obstack permanent_obstack
;
4988 /* Expand start of bytecode function. See comment at
4989 expand_function_start below for details. */
4992 bc_expand_function_start (subr
, parms_have_cleanups
)
4994 int parms_have_cleanups
;
4996 char label
[20], *name
;
5001 if (TREE_PUBLIC (subr
))
5002 bc_globalize_label (IDENTIFIER_POINTER (DECL_NAME (subr
)));
5004 #ifdef DEBUG_PRINT_CODE
5005 fprintf (stderr
, "\n<func %s>\n", IDENTIFIER_POINTER (DECL_NAME (subr
)));
5008 for (argsz
= 0, thisarg
= DECL_ARGUMENTS (subr
); thisarg
; thisarg
= TREE_CHAIN (thisarg
))
5010 if (DECL_RTL (thisarg
))
5011 abort (); /* Should be NULL here I think. */
5012 else if (TREE_CONSTANT (DECL_SIZE (thisarg
)))
5014 DECL_RTL (thisarg
) = bc_gen_rtx ((char *) 0, argsz
, (struct bc_label
*) 0);
5015 argsz
+= TREE_INT_CST_LOW (DECL_SIZE (thisarg
));
5019 /* Variable-sized objects are pointers to their storage. */
5020 DECL_RTL (thisarg
) = bc_gen_rtx ((char *) 0, argsz
, (struct bc_label
*) 0);
5021 argsz
+= POINTER_SIZE
;
5025 bc_begin_function (xstrdup (IDENTIFIER_POINTER (DECL_NAME (subr
))));
5027 ASM_GENERATE_INTERNAL_LABEL (label
, "LX", nlab
);
5030 name
= (char *) obstack_copy0 (&permanent_obstack
, label
, strlen (label
));
5031 this_function_callinfo
= bc_gen_rtx (name
, 0, (struct bc_label
*) 0);
5032 this_function_bytecode
=
5033 bc_emit_trampoline (BYTECODE_LABEL (this_function_callinfo
));
5037 /* Expand end of bytecode function. See details the comment of
5038 expand_function_end(), below. */
5041 bc_expand_function_end ()
5045 expand_null_return ();
5047 /* Emit any fixup code. This must be done before the call to
5048 to BC_END_FUNCTION (), since that will cause the bytecode
5049 segment to be finished off and closed. */
5051 expand_fixups (NULL_RTX
);
5053 ptrconsts
= bc_end_function ();
5055 bc_align_const (2 /* INT_ALIGN */);
5057 /* If this changes also make sure to change bc-interp.h! */
5059 bc_emit_const_labeldef (BYTECODE_LABEL (this_function_callinfo
));
5060 bc_emit_const ((char *) &max_stack_depth
, sizeof max_stack_depth
);
5061 bc_emit_const ((char *) &local_vars_size
, sizeof local_vars_size
);
5062 bc_emit_const_labelref (this_function_bytecode
, 0);
5063 bc_emit_const_labelref (ptrconsts
, 0);
5064 bc_emit_const_labelref (BYTECODE_LABEL (this_function_calldesc
), 0);
5068 /* Start the RTL for a new function, and set variables used for
5070 SUBR is the FUNCTION_DECL node.
5071 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5072 the function's parameters, which must be run at any return statement. */
5075 expand_function_start (subr
, parms_have_cleanups
)
5077 int parms_have_cleanups
;
5083 if (output_bytecode
)
5085 bc_expand_function_start (subr
, parms_have_cleanups
);
5089 /* Make sure volatile mem refs aren't considered
5090 valid operands of arithmetic insns. */
5091 init_recog_no_volatile ();
5093 /* If function gets a static chain arg, store it in the stack frame.
5094 Do this first, so it gets the first stack slot offset. */
5095 if (current_function_needs_context
)
5097 last_ptr
= assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
5099 #ifdef SMALL_REGISTER_CLASSES
5100 /* Delay copying static chain if it is not a register to avoid
5101 conflicts with regs used for parameters. */
5102 if (GET_CODE (static_chain_incoming_rtx
) == REG
)
5104 emit_move_insn (last_ptr
, static_chain_incoming_rtx
);
5107 /* If the parameters of this function need cleaning up, get a label
5108 for the beginning of the code which executes those cleanups. This must
5109 be done before doing anything with return_label. */
5110 if (parms_have_cleanups
)
5111 cleanup_label
= gen_label_rtx ();
5115 /* Make the label for return statements to jump to, if this machine
5116 does not have a one-instruction return and uses an epilogue,
5117 or if it returns a structure, or if it has parm cleanups. */
5119 if (cleanup_label
== 0 && HAVE_return
5120 && ! current_function_returns_pcc_struct
5121 && ! (current_function_returns_struct
&& ! optimize
))
5124 return_label
= gen_label_rtx ();
5126 return_label
= gen_label_rtx ();
5129 /* Initialize rtx used to return the value. */
5130 /* Do this before assign_parms so that we copy the struct value address
5131 before any library calls that assign parms might generate. */
5133 /* Decide whether to return the value in memory or in a register. */
5134 if (aggregate_value_p (DECL_RESULT (subr
)))
5136 /* Returning something that won't go in a register. */
5137 register rtx value_address
= 0;
5139 #ifdef PCC_STATIC_STRUCT_RETURN
5140 if (current_function_returns_pcc_struct
)
5142 int size
= int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr
)));
5143 value_address
= assemble_static_space (size
);
5148 /* Expect to be passed the address of a place to store the value.
5149 If it is passed as an argument, assign_parms will take care of
5151 if (struct_value_incoming_rtx
)
5153 value_address
= gen_reg_rtx (Pmode
);
5154 emit_move_insn (value_address
, struct_value_incoming_rtx
);
5159 DECL_RTL (DECL_RESULT (subr
))
5160 = gen_rtx (MEM
, DECL_MODE (DECL_RESULT (subr
)), value_address
);
5161 MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr
)))
5162 = AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
)));
5165 else if (DECL_MODE (DECL_RESULT (subr
)) == VOIDmode
)
5166 /* If return mode is void, this decl rtl should not be used. */
5167 DECL_RTL (DECL_RESULT (subr
)) = 0;
5168 else if (parms_have_cleanups
)
5170 /* If function will end with cleanup code for parms,
5171 compute the return values into a pseudo reg,
5172 which we will copy into the true return register
5173 after the cleanups are done. */
5175 enum machine_mode mode
= DECL_MODE (DECL_RESULT (subr
));
5177 #ifdef PROMOTE_FUNCTION_RETURN
5178 tree type
= TREE_TYPE (DECL_RESULT (subr
));
5179 int unsignedp
= TREE_UNSIGNED (type
);
5181 mode
= promote_mode (type
, mode
, &unsignedp
, 1);
5184 DECL_RTL (DECL_RESULT (subr
)) = gen_reg_rtx (mode
);
5187 /* Scalar, returned in a register. */
5189 #ifdef FUNCTION_OUTGOING_VALUE
5190 DECL_RTL (DECL_RESULT (subr
))
5191 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr
)), subr
);
5193 DECL_RTL (DECL_RESULT (subr
))
5194 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr
)), subr
);
5197 /* Mark this reg as the function's return value. */
5198 if (GET_CODE (DECL_RTL (DECL_RESULT (subr
))) == REG
)
5200 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr
))) = 1;
5201 /* Needed because we may need to move this to memory
5202 in case it's a named return value whose address is taken. */
5203 DECL_REGISTER (DECL_RESULT (subr
)) = 1;
5207 /* Initialize rtx for parameters and local variables.
5208 In some cases this requires emitting insns. */
5210 assign_parms (subr
, 0);
5212 #ifdef SMALL_REGISTER_CLASSES
5213 /* Copy the static chain now if it wasn't a register. The delay is to
5214 avoid conflicts with the parameter passing registers. */
5216 if (current_function_needs_context
)
5217 if (GET_CODE (static_chain_incoming_rtx
) != REG
)
5218 emit_move_insn (last_ptr
, static_chain_incoming_rtx
);
5221 /* The following was moved from init_function_start.
5222 The move is supposed to make sdb output more accurate. */
5223 /* Indicate the beginning of the function body,
5224 as opposed to parm setup. */
5225 emit_note (NULL_PTR
, NOTE_INSN_FUNCTION_BEG
);
5227 /* If doing stupid allocation, mark parms as born here. */
5229 if (GET_CODE (get_last_insn ()) != NOTE
)
5230 emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
5231 parm_birth_insn
= get_last_insn ();
5235 for (i
= LAST_VIRTUAL_REGISTER
+ 1; i
< max_parm_reg
; i
++)
5236 use_variable (regno_reg_rtx
[i
]);
5238 if (current_function_internal_arg_pointer
!= virtual_incoming_args_rtx
)
5239 use_variable (current_function_internal_arg_pointer
);
5242 context_display
= 0;
5243 if (current_function_needs_context
)
5245 /* Fetch static chain values for containing functions. */
5246 tem
= decl_function_context (current_function_decl
);
5247 /* If not doing stupid register allocation copy the static chain
5248 pointer into a pseudo. If we have small register classes, copy
5249 the value from memory if static_chain_incoming_rtx is a REG. If
5250 we do stupid register allocation, we use the stack address
5252 if (tem
&& ! obey_regdecls
)
5254 #ifdef SMALL_REGISTER_CLASSES
5255 /* If the static chain originally came in a register, put it back
5256 there, then move it out in the next insn. The reason for
5257 this peculiar code is to satisfy function integration. */
5258 if (GET_CODE (static_chain_incoming_rtx
) == REG
)
5259 emit_move_insn (static_chain_incoming_rtx
, last_ptr
);
5262 last_ptr
= copy_to_reg (static_chain_incoming_rtx
);
5267 tree rtlexp
= make_node (RTL_EXPR
);
5269 RTL_EXPR_RTL (rtlexp
) = last_ptr
;
5270 context_display
= tree_cons (tem
, rtlexp
, context_display
);
5271 tem
= decl_function_context (tem
);
5274 /* Chain thru stack frames, assuming pointer to next lexical frame
5275 is found at the place we always store it. */
5276 #ifdef FRAME_GROWS_DOWNWARD
5277 last_ptr
= plus_constant (last_ptr
, - GET_MODE_SIZE (Pmode
));
5279 last_ptr
= copy_to_reg (gen_rtx (MEM
, Pmode
,
5280 memory_address (Pmode
, last_ptr
)));
5282 /* If we are not optimizing, ensure that we know that this
5283 piece of context is live over the entire function. */
5285 save_expr_regs
= gen_rtx (EXPR_LIST
, VOIDmode
, last_ptr
,
5290 /* After the display initializations is where the tail-recursion label
5291 should go, if we end up needing one. Ensure we have a NOTE here
5292 since some things (like trampolines) get placed before this. */
5293 tail_recursion_reentry
= emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
5295 /* Evaluate now the sizes of any types declared among the arguments. */
5296 for (tem
= nreverse (get_pending_sizes ()); tem
; tem
= TREE_CHAIN (tem
))
5297 expand_expr (TREE_VALUE (tem
), const0_rtx
, VOIDmode
, 0);
5299 /* Make sure there is a line number after the function entry setup code. */
5300 force_next_line_note ();
5303 /* Generate RTL for the end of the current function.
5304 FILENAME and LINE are the current position in the source file.
5306 It is up to language-specific callers to do cleanups for parameters--
5307 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5310 expand_function_end (filename
, line
, end_bindings
)
5318 #ifdef TRAMPOLINE_TEMPLATE
5319 static rtx initial_trampoline
;
5322 if (output_bytecode
)
5324 bc_expand_function_end ();
5328 #ifdef NON_SAVING_SETJMP
5329 /* Don't put any variables in registers if we call setjmp
5330 on a machine that fails to restore the registers. */
5331 if (NON_SAVING_SETJMP
&& current_function_calls_setjmp
)
5333 if (DECL_INITIAL (current_function_decl
) != error_mark_node
)
5334 setjmp_protect (DECL_INITIAL (current_function_decl
));
5336 setjmp_protect_args ();
5340 /* Save the argument pointer if a save area was made for it. */
5341 if (arg_pointer_save_area
)
5343 rtx x
= gen_move_insn (arg_pointer_save_area
, virtual_incoming_args_rtx
);
5344 emit_insn_before (x
, tail_recursion_reentry
);
5347 /* Initialize any trampolines required by this function. */
5348 for (link
= trampoline_list
; link
; link
= TREE_CHAIN (link
))
5350 tree function
= TREE_PURPOSE (link
);
5351 rtx context
= lookup_static_chain (function
);
5352 rtx tramp
= RTL_EXPR_RTL (TREE_VALUE (link
));
5356 #ifdef TRAMPOLINE_TEMPLATE
5357 /* First make sure this compilation has a template for
5358 initializing trampolines. */
5359 if (initial_trampoline
== 0)
5361 end_temporary_allocation ();
5363 = gen_rtx (MEM
, BLKmode
, assemble_trampoline_template ());
5364 resume_temporary_allocation ();
5368 /* Generate insns to initialize the trampoline. */
5370 tramp
= round_trampoline_addr (XEXP (tramp
, 0));
5371 #ifdef TRAMPOLINE_TEMPLATE
5372 blktramp
= change_address (initial_trampoline
, BLKmode
, tramp
);
5373 emit_block_move (blktramp
, initial_trampoline
,
5374 GEN_INT (TRAMPOLINE_SIZE
),
5375 FUNCTION_BOUNDARY
/ BITS_PER_UNIT
);
5377 INITIALIZE_TRAMPOLINE (tramp
, XEXP (DECL_RTL (function
), 0), context
);
5381 /* Put those insns at entry to the containing function (this one). */
5382 emit_insns_before (seq
, tail_recursion_reentry
);
5385 /* Warn about unused parms if extra warnings were specified. */
5386 if (warn_unused
&& extra_warnings
)
5390 for (decl
= DECL_ARGUMENTS (current_function_decl
);
5391 decl
; decl
= TREE_CHAIN (decl
))
5392 if (! TREE_USED (decl
) && TREE_CODE (decl
) == PARM_DECL
5393 && DECL_NAME (decl
) && ! DECL_ARTIFICIAL (decl
))
5394 warning_with_decl (decl
, "unused parameter `%s'");
5397 /* Delete handlers for nonlocal gotos if nothing uses them. */
5398 if (nonlocal_goto_handler_slot
!= 0 && !current_function_has_nonlocal_label
)
5401 /* End any sequences that failed to be closed due to syntax errors. */
5402 while (in_sequence_p ())
5405 /* Outside function body, can't compute type's actual size
5406 until next function's body starts. */
5407 immediate_size_expand
--;
5409 /* If doing stupid register allocation,
5410 mark register parms as dying here. */
5415 for (i
= LAST_VIRTUAL_REGISTER
+ 1; i
< max_parm_reg
; i
++)
5416 use_variable (regno_reg_rtx
[i
]);
5418 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
5420 for (tem
= save_expr_regs
; tem
; tem
= XEXP (tem
, 1))
5422 use_variable (XEXP (tem
, 0));
5423 use_variable_after (XEXP (tem
, 0), parm_birth_insn
);
5426 if (current_function_internal_arg_pointer
!= virtual_incoming_args_rtx
)
5427 use_variable (current_function_internal_arg_pointer
);
5430 clear_pending_stack_adjust ();
5431 do_pending_stack_adjust ();
5433 /* Mark the end of the function body.
5434 If control reaches this insn, the function can drop through
5435 without returning a value. */
5436 emit_note (NULL_PTR
, NOTE_INSN_FUNCTION_END
);
5438 /* Output a linenumber for the end of the function.
5439 SDB depends on this. */
5440 emit_line_note_force (filename
, line
);
5442 /* Output the label for the actual return from the function,
5443 if one is expected. This happens either because a function epilogue
5444 is used instead of a return instruction, or because a return was done
5445 with a goto in order to run local cleanups, or because of pcc-style
5446 structure returning. */
5449 emit_label (return_label
);
5451 /* C++ uses this. */
5453 expand_end_bindings (0, 0, 0);
5455 /* If we had calls to alloca, and this machine needs
5456 an accurate stack pointer to exit the function,
5457 insert some code to save and restore the stack pointer. */
5458 #ifdef EXIT_IGNORE_STACK
5459 if (! EXIT_IGNORE_STACK
)
5461 if (current_function_calls_alloca
)
5465 emit_stack_save (SAVE_FUNCTION
, &tem
, parm_birth_insn
);
5466 emit_stack_restore (SAVE_FUNCTION
, tem
, NULL_RTX
);
5469 /* If scalar return value was computed in a pseudo-reg,
5470 copy that to the hard return register. */
5471 if (DECL_RTL (DECL_RESULT (current_function_decl
)) != 0
5472 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl
))) == REG
5473 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl
)))
5474 >= FIRST_PSEUDO_REGISTER
))
5476 rtx real_decl_result
;
5478 #ifdef FUNCTION_OUTGOING_VALUE
5480 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl
)),
5481 current_function_decl
);
5484 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl
)),
5485 current_function_decl
);
5487 REG_FUNCTION_VALUE_P (real_decl_result
) = 1;
5488 emit_move_insn (real_decl_result
,
5489 DECL_RTL (DECL_RESULT (current_function_decl
)));
5490 emit_insn (gen_rtx (USE
, VOIDmode
, real_decl_result
));
5493 /* If returning a structure, arrange to return the address of the value
5494 in a place where debuggers expect to find it.
5496 If returning a structure PCC style,
5497 the caller also depends on this value.
5498 And current_function_returns_pcc_struct is not necessarily set. */
5499 if (current_function_returns_struct
5500 || current_function_returns_pcc_struct
)
5502 rtx value_address
= XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
5503 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
5504 #ifdef FUNCTION_OUTGOING_VALUE
5506 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type
),
5507 current_function_decl
);
5510 = FUNCTION_VALUE (build_pointer_type (type
),
5511 current_function_decl
);
5514 /* Mark this as a function return value so integrate will delete the
5515 assignment and USE below when inlining this function. */
5516 REG_FUNCTION_VALUE_P (outgoing
) = 1;
5518 emit_move_insn (outgoing
, value_address
);
5519 use_variable (outgoing
);
5522 /* Output a return insn if we are using one.
5523 Otherwise, let the rtl chain end here, to drop through
5524 into the epilogue. */
5529 emit_jump_insn (gen_return ());
5534 /* Fix up any gotos that jumped out to the outermost
5535 binding level of the function.
5536 Must follow emitting RETURN_LABEL. */
5538 /* If you have any cleanups to do at this point,
5539 and they need to create temporary variables,
5540 then you will lose. */
5541 expand_fixups (get_insns ());
5544 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
5546 static int *prologue
;
5547 static int *epilogue
;
5549 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
5550 or a single insn). */
5553 record_insns (insns
)
5558 if (GET_CODE (insns
) == SEQUENCE
)
5560 int len
= XVECLEN (insns
, 0);
5561 vec
= (int *) oballoc ((len
+ 1) * sizeof (int));
5564 vec
[len
] = INSN_UID (XVECEXP (insns
, 0, len
));
5568 vec
= (int *) oballoc (2 * sizeof (int));
5569 vec
[0] = INSN_UID (insns
);
5575 /* Determine how many INSN_UIDs in VEC are part of INSN. */
5578 contains (insn
, vec
)
5584 if (GET_CODE (insn
) == INSN
5585 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
5588 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
5589 for (j
= 0; vec
[j
]; j
++)
5590 if (INSN_UID (XVECEXP (PATTERN (insn
), 0, i
)) == vec
[j
])
5596 for (j
= 0; vec
[j
]; j
++)
5597 if (INSN_UID (insn
) == vec
[j
])
5603 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5604 this into place with notes indicating where the prologue ends and where
5605 the epilogue begins. Update the basic block information when possible. */
5608 thread_prologue_and_epilogue_insns (f
)
5611 #ifdef HAVE_prologue
5614 rtx head
, seq
, insn
;
5616 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
5617 prologue insns and a NOTE_INSN_PROLOGUE_END. */
5618 emit_note_after (NOTE_INSN_PROLOGUE_END
, f
);
5619 seq
= gen_prologue ();
5620 head
= emit_insn_after (seq
, f
);
5622 /* Include the new prologue insns in the first block. Ignore them
5623 if they form a basic block unto themselves. */
5624 if (basic_block_head
&& n_basic_blocks
5625 && GET_CODE (basic_block_head
[0]) != CODE_LABEL
)
5626 basic_block_head
[0] = NEXT_INSN (f
);
5628 /* Retain a map of the prologue insns. */
5629 prologue
= record_insns (GET_CODE (seq
) == SEQUENCE
? seq
: head
);
5635 #ifdef HAVE_epilogue
5638 rtx insn
= get_last_insn ();
5639 rtx prev
= prev_nonnote_insn (insn
);
5641 /* If we end with a BARRIER, we don't need an epilogue. */
5642 if (! (prev
&& GET_CODE (prev
) == BARRIER
))
5648 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
5649 epilogue insns, the USE insns at the end of a function,
5650 the jump insn that returns, and then a BARRIER. */
5652 /* Move the USE insns at the end of a function onto a list. */
5654 && GET_CODE (prev
) == INSN
5655 && GET_CODE (PATTERN (prev
)) == USE
)
5658 prev
= prev_nonnote_insn (prev
);
5660 NEXT_INSN (PREV_INSN (tem
)) = NEXT_INSN (tem
);
5661 PREV_INSN (NEXT_INSN (tem
)) = PREV_INSN (tem
);
5664 NEXT_INSN (tem
) = first_use
;
5665 PREV_INSN (first_use
) = tem
;
5672 emit_barrier_after (insn
);
5674 seq
= gen_epilogue ();
5675 tail
= emit_jump_insn_after (seq
, insn
);
5677 /* Insert the USE insns immediately before the return insn, which
5678 must be the first instruction before the final barrier. */
5681 tem
= prev_nonnote_insn (get_last_insn ());
5682 NEXT_INSN (PREV_INSN (tem
)) = first_use
;
5683 PREV_INSN (first_use
) = PREV_INSN (tem
);
5684 PREV_INSN (tem
) = last_use
;
5685 NEXT_INSN (last_use
) = tem
;
5688 emit_note_after (NOTE_INSN_EPILOGUE_BEG
, insn
);
5690 /* Include the new epilogue insns in the last block. Ignore
5691 them if they form a basic block unto themselves. */
5692 if (basic_block_end
&& n_basic_blocks
5693 && GET_CODE (basic_block_end
[n_basic_blocks
- 1]) != JUMP_INSN
)
5694 basic_block_end
[n_basic_blocks
- 1] = tail
;
5696 /* Retain a map of the epilogue insns. */
5697 epilogue
= record_insns (GET_CODE (seq
) == SEQUENCE
? seq
: tail
);
5705 /* Reposition the prologue-end and epilogue-begin notes after instruction
5706 scheduling and delayed branch scheduling. */
5709 reposition_prologue_and_epilogue_notes (f
)
5712 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5713 /* Reposition the prologue and epilogue notes. */
5721 register rtx insn
, note
= 0;
5723 /* Scan from the beginning until we reach the last prologue insn.
5724 We apparently can't depend on basic_block_{head,end} after
5726 for (len
= 0; prologue
[len
]; len
++)
5728 for (insn
= f
; len
&& insn
; insn
= NEXT_INSN (insn
))
5730 if (GET_CODE (insn
) == NOTE
)
5732 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_PROLOGUE_END
)
5735 else if ((len
-= contains (insn
, prologue
)) == 0)
5737 /* Find the prologue-end note if we haven't already, and
5738 move it to just after the last prologue insn. */
5741 for (note
= insn
; note
= NEXT_INSN (note
);)
5742 if (GET_CODE (note
) == NOTE
5743 && NOTE_LINE_NUMBER (note
) == NOTE_INSN_PROLOGUE_END
)
5746 next
= NEXT_INSN (note
);
5747 prev
= PREV_INSN (note
);
5749 NEXT_INSN (prev
) = next
;
5751 PREV_INSN (next
) = prev
;
5752 add_insn_after (note
, insn
);
5759 register rtx insn
, note
= 0;
5761 /* Scan from the end until we reach the first epilogue insn.
5762 We apparently can't depend on basic_block_{head,end} after
5764 for (len
= 0; epilogue
[len
]; len
++)
5766 for (insn
= get_last_insn (); len
&& insn
; insn
= PREV_INSN (insn
))
5768 if (GET_CODE (insn
) == NOTE
)
5770 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EPILOGUE_BEG
)
5773 else if ((len
-= contains (insn
, epilogue
)) == 0)
5775 /* Find the epilogue-begin note if we haven't already, and
5776 move it to just before the first epilogue insn. */
5779 for (note
= insn
; note
= PREV_INSN (note
);)
5780 if (GET_CODE (note
) == NOTE
5781 && NOTE_LINE_NUMBER (note
) == NOTE_INSN_EPILOGUE_BEG
)
5784 next
= NEXT_INSN (note
);
5785 prev
= PREV_INSN (note
);
5787 NEXT_INSN (prev
) = next
;
5789 PREV_INSN (next
) = prev
;
5790 add_insn_after (note
, PREV_INSN (insn
));
5795 #endif /* HAVE_prologue or HAVE_epilogue */