1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
48 #include "insn-flags.h"
50 #include "insn-codes.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
56 #include "basic-block.h"
60 #ifndef TRAMPOLINE_ALIGNMENT
61 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
64 /* Some systems use __main in a way incompatible with its use in gcc, in these
65 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
66 give the same symbol without quotes for an alternative entry point. You
67 must define both, or neither. */
69 #define NAME__MAIN "__main"
70 #define SYMBOL__MAIN __main
73 /* Round a value to the lowest integer less than it that is a multiple of
74 the required alignment. Avoid using division in case the value is
75 negative. Assume the alignment is a power of two. */
76 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
78 /* Similar, but round to the next highest integer that meets the
80 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
82 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
83 during rtl generation. If they are different register numbers, this is
84 always true. It may also be true if
85 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
86 generation. See fix_lexical_addr for details. */
88 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
89 #define NEED_SEPARATE_AP
92 /* Number of bytes of args popped by function being compiled on its return.
93 Zero if no bytes are to be popped.
94 May affect compilation of return insn or of function epilogue. */
96 int current_function_pops_args
;
98 /* Nonzero if function being compiled needs to be given an address
99 where the value should be stored. */
101 int current_function_returns_struct
;
103 /* Nonzero if function being compiled needs to
104 return the address of where it has put a structure value. */
106 int current_function_returns_pcc_struct
;
108 /* Nonzero if function being compiled needs to be passed a static chain. */
110 int current_function_needs_context
;
112 /* Nonzero if function being compiled can call setjmp. */
114 int current_function_calls_setjmp
;
116 /* Nonzero if function being compiled can call longjmp. */
118 int current_function_calls_longjmp
;
120 /* Nonzero if function being compiled receives nonlocal gotos
121 from nested functions. */
123 int current_function_has_nonlocal_label
;
125 /* Nonzero if function being compiled has nonlocal gotos to parent
128 int current_function_has_nonlocal_goto
;
130 /* Nonzero if this function has a computed goto.
132 It is computed during find_basic_blocks or during stupid life
135 int current_function_has_computed_jump
;
137 /* Nonzero if function being compiled contains nested functions. */
139 int current_function_contains_functions
;
141 /* Nonzero if the current function is a thunk (a lightweight function that
142 just adjusts one of its arguments and forwards to another function), so
143 we should try to cut corners where we can. */
144 int current_function_is_thunk
;
146 /* Nonzero if function being compiled can call alloca,
147 either as a subroutine or builtin. */
149 int current_function_calls_alloca
;
151 /* Nonzero if the current function returns a pointer type */
153 int current_function_returns_pointer
;
155 /* If some insns can be deferred to the delay slots of the epilogue, the
156 delay list for them is recorded here. */
158 rtx current_function_epilogue_delay_list
;
160 /* If function's args have a fixed size, this is that size, in bytes.
162 May affect compilation of return insn or of function epilogue. */
164 int current_function_args_size
;
166 /* # bytes the prologue should push and pretend that the caller pushed them.
167 The prologue must do this, but only if parms can be passed in registers. */
169 int current_function_pretend_args_size
;
171 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
172 defined, the needed space is pushed by the prologue. */
174 int current_function_outgoing_args_size
;
176 /* This is the offset from the arg pointer to the place where the first
177 anonymous arg can be found, if there is one. */
179 rtx current_function_arg_offset_rtx
;
181 /* Nonzero if current function uses varargs.h or equivalent.
182 Zero for functions that use stdarg.h. */
184 int current_function_varargs
;
186 /* Nonzero if current function uses stdarg.h or equivalent.
187 Zero for functions that use varargs.h. */
189 int current_function_stdarg
;
191 /* Quantities of various kinds of registers
192 used for the current function's args. */
194 CUMULATIVE_ARGS current_function_args_info
;
196 /* Name of function now being compiled. */
198 char *current_function_name
;
200 /* If non-zero, an RTL expression for the location at which the current
201 function returns its result. If the current function returns its
202 result in a register, current_function_return_rtx will always be
203 the hard register containing the result. */
205 rtx current_function_return_rtx
;
207 /* Nonzero if the current function uses the constant pool. */
209 int current_function_uses_const_pool
;
211 /* Nonzero if the current function uses pic_offset_table_rtx. */
212 int current_function_uses_pic_offset_table
;
214 /* The arg pointer hard register, or the pseudo into which it was copied. */
215 rtx current_function_internal_arg_pointer
;
217 /* Language-specific reason why the current function cannot be made inline. */
218 char *current_function_cannot_inline
;
220 /* The FUNCTION_DECL for an inline function currently being expanded. */
221 tree inline_function_decl
;
223 /* Number of function calls seen so far in current function. */
225 int function_call_count
;
227 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
228 (labels to which there can be nonlocal gotos from nested functions)
231 tree nonlocal_labels
;
233 /* RTX for stack slot that holds the current handler for nonlocal gotos.
234 Zero when function does not have nonlocal labels. */
236 rtx nonlocal_goto_handler_slot
;
238 /* RTX for stack slot that holds the stack pointer value to restore
240 Zero when function does not have nonlocal labels. */
242 rtx nonlocal_goto_stack_level
;
244 /* Label that will go on parm cleanup code, if any.
245 Jumping to this label runs cleanup code for parameters, if
246 such code must be run. Following this code is the logical return label. */
250 /* Label that will go on function epilogue.
251 Jumping to this label serves as a "return" instruction
252 on machines which require execution of the epilogue on all returns. */
256 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
257 So we can mark them all live at the end of the function, if nonopt. */
260 /* List (chain of EXPR_LISTs) of all stack slots in this function.
261 Made for the sake of unshare_all_rtl. */
264 /* Chain of all RTL_EXPRs that have insns in them. */
267 /* Label to jump back to for tail recursion, or 0 if we have
268 not yet needed one for this function. */
269 rtx tail_recursion_label
;
271 /* Place after which to insert the tail_recursion_label if we need one. */
272 rtx tail_recursion_reentry
;
274 /* Location at which to save the argument pointer if it will need to be
275 referenced. There are two cases where this is done: if nonlocal gotos
276 exist, or if vars stored at an offset from the argument pointer will be
277 needed by inner routines. */
279 rtx arg_pointer_save_area
;
281 /* Offset to end of allocated area of stack frame.
282 If stack grows down, this is the address of the last stack slot allocated.
283 If stack grows up, this is the address for the next slot. */
284 HOST_WIDE_INT frame_offset
;
286 /* List (chain of TREE_LISTs) of static chains for containing functions.
287 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
288 in an RTL_EXPR in the TREE_VALUE. */
289 static tree context_display
;
291 /* List (chain of TREE_LISTs) of trampolines for nested functions.
292 The trampoline sets up the static chain and jumps to the function.
293 We supply the trampoline's address when the function's address is requested.
295 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
296 in an RTL_EXPR in the TREE_VALUE. */
297 static tree trampoline_list
;
299 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
300 static rtx parm_birth_insn
;
303 /* Nonzero if a stack slot has been generated whose address is not
304 actually valid. It means that the generated rtl must all be scanned
305 to detect and correct the invalid addresses where they occur. */
306 static int invalid_stack_slot
;
309 /* Last insn of those whose job was to put parms into their nominal homes. */
310 static rtx last_parm_insn
;
312 /* 1 + last pseudo register number possibly used for loading a copy
313 of a parameter of this function. */
316 /* Vector indexed by REGNO, containing location on stack in which
317 to put the parm which is nominally in pseudo register REGNO,
318 if we discover that that parm must go in the stack. The highest
319 element in this vector is one less than MAX_PARM_REG, above. */
320 rtx
*parm_reg_stack_loc
;
322 /* Nonzero once virtual register instantiation has been done.
323 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
324 static int virtuals_instantiated
;
326 /* These variables hold pointers to functions to
327 save and restore machine-specific data,
328 in push_function_context and pop_function_context. */
329 void (*save_machine_status
) PROTO((struct function
*));
330 void (*restore_machine_status
) PROTO((struct function
*));
332 /* Nonzero if we need to distinguish between the return value of this function
333 and the return value of a function called by this function. This helps
336 extern int rtx_equal_function_value_matters
;
337 extern tree sequence_rtl_expr
;
339 /* In order to evaluate some expressions, such as function calls returning
340 structures in memory, we need to temporarily allocate stack locations.
341 We record each allocated temporary in the following structure.
343 Associated with each temporary slot is a nesting level. When we pop up
344 one level, all temporaries associated with the previous level are freed.
345 Normally, all temporaries are freed after the execution of the statement
346 in which they were created. However, if we are inside a ({...}) grouping,
347 the result may be in a temporary and hence must be preserved. If the
348 result could be in a temporary, we preserve it if we can determine which
349 one it is in. If we cannot determine which temporary may contain the
350 result, all temporaries are preserved. A temporary is preserved by
351 pretending it was allocated at the previous nesting level.
353 Automatic variables are also assigned temporary slots, at the nesting
354 level where they are defined. They are marked a "kept" so that
355 free_temp_slots will not free them. */
359 /* Points to next temporary slot. */
360 struct temp_slot
*next
;
361 /* The rtx to used to reference the slot. */
363 /* The rtx used to represent the address if not the address of the
364 slot above. May be an EXPR_LIST if multiple addresses exist. */
366 /* The size, in units, of the slot. */
368 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
370 /* Non-zero if this temporary is currently in use. */
372 /* Non-zero if this temporary has its address taken. */
374 /* Nesting level at which this slot is being used. */
376 /* Non-zero if this should survive a call to free_temp_slots. */
378 /* The offset of the slot from the frame_pointer, including extra space
379 for alignment. This info is for combine_temp_slots. */
380 HOST_WIDE_INT base_offset
;
381 /* The size of the slot, including extra space for alignment. This
382 info is for combine_temp_slots. */
383 HOST_WIDE_INT full_size
;
386 /* List of all temporaries allocated, both available and in use. */
388 struct temp_slot
*temp_slots
;
390 /* Current nesting level for temporaries. */
394 /* Current nesting level for variables in a block. */
396 int var_temp_slot_level
;
398 /* When temporaries are created by TARGET_EXPRs, they are created at
399 this level of temp_slot_level, so that they can remain allocated
400 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
402 int target_temp_slot_level
;
404 /* This structure is used to record MEMs or pseudos used to replace VAR, any
405 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
406 maintain this list in case two operands of an insn were required to match;
407 in that case we must ensure we use the same replacement. */
409 struct fixup_replacement
413 struct fixup_replacement
*next
;
416 /* Forward declarations. */
418 static rtx assign_outer_stack_local
PROTO ((enum machine_mode
, HOST_WIDE_INT
,
419 int, struct function
*));
420 static struct temp_slot
*find_temp_slot_from_address
PROTO((rtx
));
421 static void put_reg_into_stack
PROTO((struct function
*, rtx
, tree
,
422 enum machine_mode
, enum machine_mode
,
424 static void fixup_var_refs
PROTO((rtx
, enum machine_mode
, int));
425 static struct fixup_replacement
426 *find_fixup_replacement
PROTO((struct fixup_replacement
**, rtx
));
427 static void fixup_var_refs_insns
PROTO((rtx
, enum machine_mode
, int,
429 static void fixup_var_refs_1
PROTO((rtx
, enum machine_mode
, rtx
*, rtx
,
430 struct fixup_replacement
**));
431 static rtx fixup_memory_subreg
PROTO((rtx
, rtx
, int));
432 static rtx walk_fixup_memory_subreg
PROTO((rtx
, rtx
, int));
433 static rtx fixup_stack_1
PROTO((rtx
, rtx
));
434 static void optimize_bit_field
PROTO((rtx
, rtx
, rtx
*));
435 static void instantiate_decls
PROTO((tree
, int));
436 static void instantiate_decls_1
PROTO((tree
, int));
437 static void instantiate_decl
PROTO((rtx
, int, int));
438 static int instantiate_virtual_regs_1
PROTO((rtx
*, rtx
, int));
439 static void delete_handlers
PROTO((void));
440 static void pad_to_arg_alignment
PROTO((struct args_size
*, int));
441 #ifndef ARGS_GROW_DOWNWARD
442 static void pad_below
PROTO((struct args_size
*, enum machine_mode
,
445 #ifdef ARGS_GROW_DOWNWARD
446 static tree round_down
PROTO((tree
, int));
448 static rtx round_trampoline_addr
PROTO((rtx
));
449 static tree blocks_nreverse
PROTO((tree
));
450 static int all_blocks
PROTO((tree
, tree
*));
451 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
452 static int *record_insns
PROTO((rtx
));
453 static int contains
PROTO((rtx
, int *));
454 #endif /* HAVE_prologue || HAVE_epilogue */
455 static void put_addressof_into_stack
PROTO((rtx
));
456 static void purge_addressof_1
PROTO((rtx
*, rtx
, int));
458 /* Pointer to chain of `struct function' for containing functions. */
459 struct function
*outer_function_chain
;
461 /* Given a function decl for a containing function,
462 return the `struct function' for it. */
465 find_function_data (decl
)
470 for (p
= outer_function_chain
; p
; p
= p
->next
)
477 /* Save the current context for compilation of a nested function.
478 This is called from language-specific code.
479 The caller is responsible for saving any language-specific status,
480 since this function knows only about language-independent variables. */
483 push_function_context_to (context
)
486 struct function
*p
= (struct function
*) xmalloc (sizeof (struct function
));
488 p
->next
= outer_function_chain
;
489 outer_function_chain
= p
;
491 p
->name
= current_function_name
;
492 p
->decl
= current_function_decl
;
493 p
->pops_args
= current_function_pops_args
;
494 p
->returns_struct
= current_function_returns_struct
;
495 p
->returns_pcc_struct
= current_function_returns_pcc_struct
;
496 p
->returns_pointer
= current_function_returns_pointer
;
497 p
->needs_context
= current_function_needs_context
;
498 p
->calls_setjmp
= current_function_calls_setjmp
;
499 p
->calls_longjmp
= current_function_calls_longjmp
;
500 p
->calls_alloca
= current_function_calls_alloca
;
501 p
->has_nonlocal_label
= current_function_has_nonlocal_label
;
502 p
->has_nonlocal_goto
= current_function_has_nonlocal_goto
;
503 p
->contains_functions
= current_function_contains_functions
;
504 p
->is_thunk
= current_function_is_thunk
;
505 p
->args_size
= current_function_args_size
;
506 p
->pretend_args_size
= current_function_pretend_args_size
;
507 p
->arg_offset_rtx
= current_function_arg_offset_rtx
;
508 p
->varargs
= current_function_varargs
;
509 p
->stdarg
= current_function_stdarg
;
510 p
->uses_const_pool
= current_function_uses_const_pool
;
511 p
->uses_pic_offset_table
= current_function_uses_pic_offset_table
;
512 p
->internal_arg_pointer
= current_function_internal_arg_pointer
;
513 p
->cannot_inline
= current_function_cannot_inline
;
514 p
->max_parm_reg
= max_parm_reg
;
515 p
->parm_reg_stack_loc
= parm_reg_stack_loc
;
516 p
->outgoing_args_size
= current_function_outgoing_args_size
;
517 p
->return_rtx
= current_function_return_rtx
;
518 p
->nonlocal_goto_handler_slot
= nonlocal_goto_handler_slot
;
519 p
->nonlocal_goto_stack_level
= nonlocal_goto_stack_level
;
520 p
->nonlocal_labels
= nonlocal_labels
;
521 p
->cleanup_label
= cleanup_label
;
522 p
->return_label
= return_label
;
523 p
->save_expr_regs
= save_expr_regs
;
524 p
->stack_slot_list
= stack_slot_list
;
525 p
->parm_birth_insn
= parm_birth_insn
;
526 p
->frame_offset
= frame_offset
;
527 p
->tail_recursion_label
= tail_recursion_label
;
528 p
->tail_recursion_reentry
= tail_recursion_reentry
;
529 p
->arg_pointer_save_area
= arg_pointer_save_area
;
530 p
->rtl_expr_chain
= rtl_expr_chain
;
531 p
->last_parm_insn
= last_parm_insn
;
532 p
->context_display
= context_display
;
533 p
->trampoline_list
= trampoline_list
;
534 p
->function_call_count
= function_call_count
;
535 p
->temp_slots
= temp_slots
;
536 p
->temp_slot_level
= temp_slot_level
;
537 p
->target_temp_slot_level
= target_temp_slot_level
;
538 p
->var_temp_slot_level
= var_temp_slot_level
;
539 p
->fixup_var_refs_queue
= 0;
540 p
->epilogue_delay_list
= current_function_epilogue_delay_list
;
541 p
->args_info
= current_function_args_info
;
543 save_tree_status (p
, context
);
544 save_storage_status (p
);
545 save_emit_status (p
);
546 save_expr_status (p
);
547 save_stmt_status (p
);
548 save_varasm_status (p
, context
);
549 if (save_machine_status
)
550 (*save_machine_status
) (p
);
554 push_function_context ()
556 push_function_context_to (current_function_decl
);
559 /* Restore the last saved context, at the end of a nested function.
560 This function is called from language-specific code. */
563 pop_function_context_from (context
)
566 struct function
*p
= outer_function_chain
;
567 struct var_refs_queue
*queue
;
569 outer_function_chain
= p
->next
;
571 current_function_contains_functions
572 = p
->contains_functions
|| p
->inline_obstacks
573 || context
== current_function_decl
;
574 current_function_name
= p
->name
;
575 current_function_decl
= p
->decl
;
576 current_function_pops_args
= p
->pops_args
;
577 current_function_returns_struct
= p
->returns_struct
;
578 current_function_returns_pcc_struct
= p
->returns_pcc_struct
;
579 current_function_returns_pointer
= p
->returns_pointer
;
580 current_function_needs_context
= p
->needs_context
;
581 current_function_calls_setjmp
= p
->calls_setjmp
;
582 current_function_calls_longjmp
= p
->calls_longjmp
;
583 current_function_calls_alloca
= p
->calls_alloca
;
584 current_function_has_nonlocal_label
= p
->has_nonlocal_label
;
585 current_function_has_nonlocal_goto
= p
->has_nonlocal_goto
;
586 current_function_is_thunk
= p
->is_thunk
;
587 current_function_args_size
= p
->args_size
;
588 current_function_pretend_args_size
= p
->pretend_args_size
;
589 current_function_arg_offset_rtx
= p
->arg_offset_rtx
;
590 current_function_varargs
= p
->varargs
;
591 current_function_stdarg
= p
->stdarg
;
592 current_function_uses_const_pool
= p
->uses_const_pool
;
593 current_function_uses_pic_offset_table
= p
->uses_pic_offset_table
;
594 current_function_internal_arg_pointer
= p
->internal_arg_pointer
;
595 current_function_cannot_inline
= p
->cannot_inline
;
596 max_parm_reg
= p
->max_parm_reg
;
597 parm_reg_stack_loc
= p
->parm_reg_stack_loc
;
598 current_function_outgoing_args_size
= p
->outgoing_args_size
;
599 current_function_return_rtx
= p
->return_rtx
;
600 nonlocal_goto_handler_slot
= p
->nonlocal_goto_handler_slot
;
601 nonlocal_goto_stack_level
= p
->nonlocal_goto_stack_level
;
602 nonlocal_labels
= p
->nonlocal_labels
;
603 cleanup_label
= p
->cleanup_label
;
604 return_label
= p
->return_label
;
605 save_expr_regs
= p
->save_expr_regs
;
606 stack_slot_list
= p
->stack_slot_list
;
607 parm_birth_insn
= p
->parm_birth_insn
;
608 frame_offset
= p
->frame_offset
;
609 tail_recursion_label
= p
->tail_recursion_label
;
610 tail_recursion_reentry
= p
->tail_recursion_reentry
;
611 arg_pointer_save_area
= p
->arg_pointer_save_area
;
612 rtl_expr_chain
= p
->rtl_expr_chain
;
613 last_parm_insn
= p
->last_parm_insn
;
614 context_display
= p
->context_display
;
615 trampoline_list
= p
->trampoline_list
;
616 function_call_count
= p
->function_call_count
;
617 temp_slots
= p
->temp_slots
;
618 temp_slot_level
= p
->temp_slot_level
;
619 target_temp_slot_level
= p
->target_temp_slot_level
;
620 var_temp_slot_level
= p
->var_temp_slot_level
;
621 current_function_epilogue_delay_list
= p
->epilogue_delay_list
;
623 current_function_args_info
= p
->args_info
;
625 restore_tree_status (p
, context
);
626 restore_storage_status (p
);
627 restore_expr_status (p
);
628 restore_emit_status (p
);
629 restore_stmt_status (p
);
630 restore_varasm_status (p
);
632 if (restore_machine_status
)
633 (*restore_machine_status
) (p
);
635 /* Finish doing put_var_into_stack for any of our variables
636 which became addressable during the nested function. */
637 for (queue
= p
->fixup_var_refs_queue
; queue
; queue
= queue
->next
)
638 fixup_var_refs (queue
->modified
, queue
->promoted_mode
, queue
->unsignedp
);
642 /* Reset variables that have known state during rtx generation. */
643 rtx_equal_function_value_matters
= 1;
644 virtuals_instantiated
= 0;
647 void pop_function_context ()
649 pop_function_context_from (current_function_decl
);
652 /* Allocate fixed slots in the stack frame of the current function. */
654 /* Return size needed for stack frame based on slots so far allocated.
655 This size counts from zero. It is not rounded to STACK_BOUNDARY;
656 the caller may have to do that. */
661 #ifdef FRAME_GROWS_DOWNWARD
662 return -frame_offset
;
668 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
669 with machine mode MODE.
671 ALIGN controls the amount of alignment for the address of the slot:
672 0 means according to MODE,
673 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
674 positive specifies alignment boundary in bits.
676 We do not round to stack_boundary here. */
679 assign_stack_local (mode
, size
, align
)
680 enum machine_mode mode
;
684 register rtx x
, addr
;
685 int bigend_correction
= 0;
690 alignment
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
692 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
694 else if (align
== -1)
696 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
697 size
= CEIL_ROUND (size
, alignment
);
700 alignment
= align
/ BITS_PER_UNIT
;
702 /* Round frame offset to that alignment.
703 We must be careful here, since FRAME_OFFSET might be negative and
704 division with a negative dividend isn't as well defined as we might
705 like. So we instead assume that ALIGNMENT is a power of two and
706 use logical operations which are unambiguous. */
707 #ifdef FRAME_GROWS_DOWNWARD
708 frame_offset
= FLOOR_ROUND (frame_offset
, alignment
);
710 frame_offset
= CEIL_ROUND (frame_offset
, alignment
);
713 /* On a big-endian machine, if we are allocating more space than we will use,
714 use the least significant bytes of those that are allocated. */
715 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
)
716 bigend_correction
= size
- GET_MODE_SIZE (mode
);
718 #ifdef FRAME_GROWS_DOWNWARD
719 frame_offset
-= size
;
722 /* If we have already instantiated virtual registers, return the actual
723 address relative to the frame pointer. */
724 if (virtuals_instantiated
)
725 addr
= plus_constant (frame_pointer_rtx
,
726 (frame_offset
+ bigend_correction
727 + STARTING_FRAME_OFFSET
));
729 addr
= plus_constant (virtual_stack_vars_rtx
,
730 frame_offset
+ bigend_correction
);
732 #ifndef FRAME_GROWS_DOWNWARD
733 frame_offset
+= size
;
736 x
= gen_rtx_MEM (mode
, addr
);
738 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, x
, stack_slot_list
);
743 /* Assign a stack slot in a containing function.
744 First three arguments are same as in preceding function.
745 The last argument specifies the function to allocate in. */
748 assign_outer_stack_local (mode
, size
, align
, function
)
749 enum machine_mode mode
;
752 struct function
*function
;
754 register rtx x
, addr
;
755 int bigend_correction
= 0;
758 /* Allocate in the memory associated with the function in whose frame
760 push_obstacks (function
->function_obstack
,
761 function
->function_maybepermanent_obstack
);
765 alignment
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
767 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
769 else if (align
== -1)
771 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
772 size
= CEIL_ROUND (size
, alignment
);
775 alignment
= align
/ BITS_PER_UNIT
;
777 /* Round frame offset to that alignment. */
778 #ifdef FRAME_GROWS_DOWNWARD
779 function
->frame_offset
= FLOOR_ROUND (function
->frame_offset
, alignment
);
781 function
->frame_offset
= CEIL_ROUND (function
->frame_offset
, alignment
);
784 /* On a big-endian machine, if we are allocating more space than we will use,
785 use the least significant bytes of those that are allocated. */
786 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
)
787 bigend_correction
= size
- GET_MODE_SIZE (mode
);
789 #ifdef FRAME_GROWS_DOWNWARD
790 function
->frame_offset
-= size
;
792 addr
= plus_constant (virtual_stack_vars_rtx
,
793 function
->frame_offset
+ bigend_correction
);
794 #ifndef FRAME_GROWS_DOWNWARD
795 function
->frame_offset
+= size
;
798 x
= gen_rtx_MEM (mode
, addr
);
800 function
->stack_slot_list
801 = gen_rtx_EXPR_LIST (VOIDmode
, x
, function
->stack_slot_list
);
808 /* Allocate a temporary stack slot and record it for possible later
811 MODE is the machine mode to be given to the returned rtx.
813 SIZE is the size in units of the space required. We do no rounding here
814 since assign_stack_local will do any required rounding.
816 KEEP is 1 if this slot is to be retained after a call to
817 free_temp_slots. Automatic variables for a block are allocated
818 with this flag. KEEP is 2 if we allocate a longer term temporary,
819 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
820 if we are to allocate something at an inner level to be treated as
821 a variable in the block (e.g., a SAVE_EXPR). */
824 assign_stack_temp (mode
, size
, keep
)
825 enum machine_mode mode
;
829 struct temp_slot
*p
, *best_p
= 0;
831 /* If SIZE is -1 it means that somebody tried to allocate a temporary
832 of a variable size. */
836 /* First try to find an available, already-allocated temporary that is the
837 exact size we require. */
838 for (p
= temp_slots
; p
; p
= p
->next
)
839 if (p
->size
== size
&& GET_MODE (p
->slot
) == mode
&& ! p
->in_use
)
842 /* If we didn't find, one, try one that is larger than what we want. We
843 find the smallest such. */
845 for (p
= temp_slots
; p
; p
= p
->next
)
846 if (p
->size
> size
&& GET_MODE (p
->slot
) == mode
&& ! p
->in_use
847 && (best_p
== 0 || best_p
->size
> p
->size
))
850 /* Make our best, if any, the one to use. */
853 /* If there are enough aligned bytes left over, make them into a new
854 temp_slot so that the extra bytes don't get wasted. Do this only
855 for BLKmode slots, so that we can be sure of the alignment. */
856 if (GET_MODE (best_p
->slot
) == BLKmode
)
858 int alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
859 HOST_WIDE_INT rounded_size
= CEIL_ROUND (size
, alignment
);
861 if (best_p
->size
- rounded_size
>= alignment
)
863 p
= (struct temp_slot
*) oballoc (sizeof (struct temp_slot
));
864 p
->in_use
= p
->addr_taken
= 0;
865 p
->size
= best_p
->size
- rounded_size
;
866 p
->base_offset
= best_p
->base_offset
+ rounded_size
;
867 p
->full_size
= best_p
->full_size
- rounded_size
;
868 p
->slot
= gen_rtx_MEM (BLKmode
,
869 plus_constant (XEXP (best_p
->slot
, 0),
873 p
->next
= temp_slots
;
876 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, p
->slot
,
879 best_p
->size
= rounded_size
;
880 best_p
->full_size
= rounded_size
;
887 /* If we still didn't find one, make a new temporary. */
890 HOST_WIDE_INT frame_offset_old
= frame_offset
;
892 p
= (struct temp_slot
*) oballoc (sizeof (struct temp_slot
));
894 /* If the temp slot mode doesn't indicate the alignment,
895 use the largest possible, so no one will be disappointed. */
896 p
->slot
= assign_stack_local (mode
, size
, mode
== BLKmode
? -1 : 0);
898 /* The following slot size computation is necessary because we don't
899 know the actual size of the temporary slot until assign_stack_local
900 has performed all the frame alignment and size rounding for the
901 requested temporary. Note that extra space added for alignment
902 can be either above or below this stack slot depending on which
903 way the frame grows. We include the extra space if and only if it
904 is above this slot. */
905 #ifdef FRAME_GROWS_DOWNWARD
906 p
->size
= frame_offset_old
- frame_offset
;
911 /* Now define the fields used by combine_temp_slots. */
912 #ifdef FRAME_GROWS_DOWNWARD
913 p
->base_offset
= frame_offset
;
914 p
->full_size
= frame_offset_old
- frame_offset
;
916 p
->base_offset
= frame_offset_old
;
917 p
->full_size
= frame_offset
- frame_offset_old
;
920 p
->next
= temp_slots
;
926 p
->rtl_expr
= sequence_rtl_expr
;
930 p
->level
= target_temp_slot_level
;
935 p
->level
= var_temp_slot_level
;
940 p
->level
= temp_slot_level
;
944 /* We may be reusing an old slot, so clear any MEM flags that may have been
946 RTX_UNCHANGING_P (p
->slot
) = 0;
947 MEM_IN_STRUCT_P (p
->slot
) = 0;
951 /* Assign a temporary of given TYPE.
952 KEEP is as for assign_stack_temp.
953 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
954 it is 0 if a register is OK.
955 DONT_PROMOTE is 1 if we should not promote values in register
959 assign_temp (type
, keep
, memory_required
, dont_promote
)
965 enum machine_mode mode
= TYPE_MODE (type
);
966 int unsignedp
= TREE_UNSIGNED (type
);
968 if (mode
== BLKmode
|| memory_required
)
970 HOST_WIDE_INT size
= int_size_in_bytes (type
);
973 /* Unfortunately, we don't yet know how to allocate variable-sized
974 temporaries. However, sometimes we have a fixed upper limit on
975 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
976 instead. This is the case for Chill variable-sized strings. */
977 if (size
== -1 && TREE_CODE (type
) == ARRAY_TYPE
978 && TYPE_ARRAY_MAX_SIZE (type
) != NULL_TREE
979 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type
)) == INTEGER_CST
)
980 size
= TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type
));
982 tmp
= assign_stack_temp (mode
, size
, keep
);
983 MEM_IN_STRUCT_P (tmp
) = AGGREGATE_TYPE_P (type
);
987 #ifndef PROMOTE_FOR_CALL_ONLY
989 mode
= promote_mode (type
, mode
, &unsignedp
, 0);
992 return gen_reg_rtx (mode
);
995 /* Combine temporary stack slots which are adjacent on the stack.
997 This allows for better use of already allocated stack space. This is only
998 done for BLKmode slots because we can be sure that we won't have alignment
999 problems in this case. */
1002 combine_temp_slots ()
1004 struct temp_slot
*p
, *q
;
1005 struct temp_slot
*prev_p
, *prev_q
;
1008 /* If there are a lot of temp slots, don't do anything unless
1009 high levels of optimizaton. */
1010 if (! flag_expensive_optimizations
)
1011 for (p
= temp_slots
, num_slots
= 0; p
; p
= p
->next
, num_slots
++)
1012 if (num_slots
> 100 || (num_slots
> 10 && optimize
== 0))
1015 for (p
= temp_slots
, prev_p
= 0; p
; p
= prev_p
? prev_p
->next
: temp_slots
)
1019 if (! p
->in_use
&& GET_MODE (p
->slot
) == BLKmode
)
1020 for (q
= p
->next
, prev_q
= p
; q
; q
= prev_q
->next
)
1023 if (! q
->in_use
&& GET_MODE (q
->slot
) == BLKmode
)
1025 if (p
->base_offset
+ p
->full_size
== q
->base_offset
)
1027 /* Q comes after P; combine Q into P. */
1029 p
->full_size
+= q
->full_size
;
1032 else if (q
->base_offset
+ q
->full_size
== p
->base_offset
)
1034 /* P comes after Q; combine P into Q. */
1036 q
->full_size
+= p
->full_size
;
1041 /* Either delete Q or advance past it. */
1043 prev_q
->next
= q
->next
;
1047 /* Either delete P or advance past it. */
1051 prev_p
->next
= p
->next
;
1053 temp_slots
= p
->next
;
1060 /* Find the temp slot corresponding to the object at address X. */
1062 static struct temp_slot
*
1063 find_temp_slot_from_address (x
)
1066 struct temp_slot
*p
;
1069 for (p
= temp_slots
; p
; p
= p
->next
)
1074 else if (XEXP (p
->slot
, 0) == x
1076 || (GET_CODE (x
) == PLUS
1077 && XEXP (x
, 0) == virtual_stack_vars_rtx
1078 && GET_CODE (XEXP (x
, 1)) == CONST_INT
1079 && INTVAL (XEXP (x
, 1)) >= p
->base_offset
1080 && INTVAL (XEXP (x
, 1)) < p
->base_offset
+ p
->full_size
))
1083 else if (p
->address
!= 0 && GET_CODE (p
->address
) == EXPR_LIST
)
1084 for (next
= p
->address
; next
; next
= XEXP (next
, 1))
1085 if (XEXP (next
, 0) == x
)
1092 /* Indicate that NEW is an alternate way of referring to the temp slot
1093 that previously was known by OLD. */
1096 update_temp_slot_address (old
, new)
1099 struct temp_slot
*p
= find_temp_slot_from_address (old
);
1101 /* If none, return. Else add NEW as an alias. */
1104 else if (p
->address
== 0)
1108 if (GET_CODE (p
->address
) != EXPR_LIST
)
1109 p
->address
= gen_rtx_EXPR_LIST (VOIDmode
, p
->address
, NULL_RTX
);
1111 p
->address
= gen_rtx_EXPR_LIST (VOIDmode
, new, p
->address
);
1115 /* If X could be a reference to a temporary slot, mark the fact that its
1116 address was taken. */
1119 mark_temp_addr_taken (x
)
1122 struct temp_slot
*p
;
1127 /* If X is not in memory or is at a constant address, it cannot be in
1128 a temporary slot. */
1129 if (GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0)))
1132 p
= find_temp_slot_from_address (XEXP (x
, 0));
1137 /* If X could be a reference to a temporary slot, mark that slot as
1138 belonging to the to one level higher than the current level. If X
1139 matched one of our slots, just mark that one. Otherwise, we can't
1140 easily predict which it is, so upgrade all of them. Kept slots
1141 need not be touched.
1143 This is called when an ({...}) construct occurs and a statement
1144 returns a value in memory. */
1147 preserve_temp_slots (x
)
1150 struct temp_slot
*p
= 0;
1152 /* If there is no result, we still might have some objects whose address
1153 were taken, so we need to make sure they stay around. */
1156 for (p
= temp_slots
; p
; p
= p
->next
)
1157 if (p
->in_use
&& p
->level
== temp_slot_level
&& p
->addr_taken
)
1163 /* If X is a register that is being used as a pointer, see if we have
1164 a temporary slot we know it points to. To be consistent with
1165 the code below, we really should preserve all non-kept slots
1166 if we can't find a match, but that seems to be much too costly. */
1167 if (GET_CODE (x
) == REG
&& REGNO_POINTER_FLAG (REGNO (x
)))
1168 p
= find_temp_slot_from_address (x
);
1170 /* If X is not in memory or is at a constant address, it cannot be in
1171 a temporary slot, but it can contain something whose address was
1173 if (p
== 0 && (GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0))))
1175 for (p
= temp_slots
; p
; p
= p
->next
)
1176 if (p
->in_use
&& p
->level
== temp_slot_level
&& p
->addr_taken
)
1182 /* First see if we can find a match. */
1184 p
= find_temp_slot_from_address (XEXP (x
, 0));
1188 /* Move everything at our level whose address was taken to our new
1189 level in case we used its address. */
1190 struct temp_slot
*q
;
1192 if (p
->level
== temp_slot_level
)
1194 for (q
= temp_slots
; q
; q
= q
->next
)
1195 if (q
!= p
&& q
->addr_taken
&& q
->level
== p
->level
)
1204 /* Otherwise, preserve all non-kept slots at this level. */
1205 for (p
= temp_slots
; p
; p
= p
->next
)
1206 if (p
->in_use
&& p
->level
== temp_slot_level
&& ! p
->keep
)
1210 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1211 with that RTL_EXPR, promote it into a temporary slot at the present
1212 level so it will not be freed when we free slots made in the
1216 preserve_rtl_expr_result (x
)
1219 struct temp_slot
*p
;
1221 /* If X is not in memory or is at a constant address, it cannot be in
1222 a temporary slot. */
1223 if (x
== 0 || GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0)))
1226 /* If we can find a match, move it to our level unless it is already at
1228 p
= find_temp_slot_from_address (XEXP (x
, 0));
1231 p
->level
= MIN (p
->level
, temp_slot_level
);
1238 /* Free all temporaries used so far. This is normally called at the end
1239 of generating code for a statement. Don't free any temporaries
1240 currently in use for an RTL_EXPR that hasn't yet been emitted.
1241 We could eventually do better than this since it can be reused while
1242 generating the same RTL_EXPR, but this is complex and probably not
1248 struct temp_slot
*p
;
1250 for (p
= temp_slots
; p
; p
= p
->next
)
1251 if (p
->in_use
&& p
->level
== temp_slot_level
&& ! p
->keep
1252 && p
->rtl_expr
== 0)
1255 combine_temp_slots ();
1258 /* Free all temporary slots used in T, an RTL_EXPR node. */
1261 free_temps_for_rtl_expr (t
)
1264 struct temp_slot
*p
;
1266 for (p
= temp_slots
; p
; p
= p
->next
)
1267 if (p
->rtl_expr
== t
)
1270 combine_temp_slots ();
1273 /* Mark all temporaries ever allocated in this function as not suitable
1274 for reuse until the current level is exited. */
1277 mark_all_temps_used ()
1279 struct temp_slot
*p
;
1281 for (p
= temp_slots
; p
; p
= p
->next
)
1283 p
->in_use
= p
->keep
= 1;
1284 p
->level
= MIN (p
->level
, temp_slot_level
);
1288 /* Push deeper into the nesting level for stack temporaries. */
1296 /* Likewise, but save the new level as the place to allocate variables
1300 push_temp_slots_for_block ()
1304 var_temp_slot_level
= temp_slot_level
;
1307 /* Likewise, but save the new level as the place to allocate temporaries
1308 for TARGET_EXPRs. */
1311 push_temp_slots_for_target ()
1315 target_temp_slot_level
= temp_slot_level
;
1318 /* Set and get the value of target_temp_slot_level. The only
1319 permitted use of these functions is to save and restore this value. */
1322 get_target_temp_slot_level ()
1324 return target_temp_slot_level
;
1328 set_target_temp_slot_level (level
)
1331 target_temp_slot_level
= level
;
1334 /* Pop a temporary nesting level. All slots in use in the current level
1340 struct temp_slot
*p
;
1342 for (p
= temp_slots
; p
; p
= p
->next
)
1343 if (p
->in_use
&& p
->level
== temp_slot_level
&& p
->rtl_expr
== 0)
1346 combine_temp_slots ();
1351 /* Initialize temporary slots. */
1356 /* We have not allocated any temporaries yet. */
1358 temp_slot_level
= 0;
1359 var_temp_slot_level
= 0;
1360 target_temp_slot_level
= 0;
1363 /* Retroactively move an auto variable from a register to a stack slot.
1364 This is done when an address-reference to the variable is seen. */
1367 put_var_into_stack (decl
)
1371 enum machine_mode promoted_mode
, decl_mode
;
1372 struct function
*function
= 0;
1374 int can_use_addressof
;
1376 context
= decl_function_context (decl
);
1378 /* Get the current rtl used for this object and it's original mode. */
1379 reg
= TREE_CODE (decl
) == SAVE_EXPR
? SAVE_EXPR_RTL (decl
) : DECL_RTL (decl
);
1381 /* No need to do anything if decl has no rtx yet
1382 since in that case caller is setting TREE_ADDRESSABLE
1383 and a stack slot will be assigned when the rtl is made. */
1387 /* Get the declared mode for this object. */
1388 decl_mode
= (TREE_CODE (decl
) == SAVE_EXPR
? TYPE_MODE (TREE_TYPE (decl
))
1389 : DECL_MODE (decl
));
1390 /* Get the mode it's actually stored in. */
1391 promoted_mode
= GET_MODE (reg
);
1393 /* If this variable comes from an outer function,
1394 find that function's saved context. */
1395 if (context
!= current_function_decl
&& context
!= inline_function_decl
)
1396 for (function
= outer_function_chain
; function
; function
= function
->next
)
1397 if (function
->decl
== context
)
1400 /* If this is a variable-size object with a pseudo to address it,
1401 put that pseudo into the stack, if the var is nonlocal. */
1402 if (DECL_NONLOCAL (decl
)
1403 && GET_CODE (reg
) == MEM
1404 && GET_CODE (XEXP (reg
, 0)) == REG
1405 && REGNO (XEXP (reg
, 0)) > LAST_VIRTUAL_REGISTER
)
1407 reg
= XEXP (reg
, 0);
1408 decl_mode
= promoted_mode
= GET_MODE (reg
);
1414 /* FIXME make it work for promoted modes too */
1415 && decl_mode
== promoted_mode
1416 #ifdef NON_SAVING_SETJMP
1417 && ! (NON_SAVING_SETJMP
&& current_function_calls_setjmp
)
1421 /* If we can't use ADDRESSOF, make sure we see through one we already
1423 if (! can_use_addressof
&& GET_CODE (reg
) == MEM
1424 && GET_CODE (XEXP (reg
, 0)) == ADDRESSOF
)
1425 reg
= XEXP (XEXP (reg
, 0), 0);
1427 /* Now we should have a value that resides in one or more pseudo regs. */
1429 if (GET_CODE (reg
) == REG
)
1431 /* If this variable lives in the current function and we don't need
1432 to put things in the stack for the sake of setjmp, try to keep it
1433 in a register until we know we actually need the address. */
1434 if (can_use_addressof
)
1435 gen_mem_addressof (reg
, decl
);
1437 put_reg_into_stack (function
, reg
, TREE_TYPE (decl
),
1438 promoted_mode
, decl_mode
,
1439 TREE_SIDE_EFFECTS (decl
), 0,
1441 || DECL_INITIAL (decl
) != 0);
1443 else if (GET_CODE (reg
) == CONCAT
)
1445 /* A CONCAT contains two pseudos; put them both in the stack.
1446 We do it so they end up consecutive. */
1447 enum machine_mode part_mode
= GET_MODE (XEXP (reg
, 0));
1448 tree part_type
= TREE_TYPE (TREE_TYPE (decl
));
1449 #ifdef FRAME_GROWS_DOWNWARD
1450 /* Since part 0 should have a lower address, do it second. */
1451 put_reg_into_stack (function
, XEXP (reg
, 1), part_type
, part_mode
,
1452 part_mode
, TREE_SIDE_EFFECTS (decl
), 0,
1453 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0);
1454 put_reg_into_stack (function
, XEXP (reg
, 0), part_type
, part_mode
,
1455 part_mode
, TREE_SIDE_EFFECTS (decl
), 0,
1456 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0);
1458 put_reg_into_stack (function
, XEXP (reg
, 0), part_type
, part_mode
,
1459 part_mode
, TREE_SIDE_EFFECTS (decl
), 0,
1460 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0);
1461 put_reg_into_stack (function
, XEXP (reg
, 1), part_type
, part_mode
,
1462 part_mode
, TREE_SIDE_EFFECTS (decl
), 0,
1463 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0);
1466 /* Change the CONCAT into a combined MEM for both parts. */
1467 PUT_CODE (reg
, MEM
);
1468 MEM_VOLATILE_P (reg
) = MEM_VOLATILE_P (XEXP (reg
, 0));
1470 /* The two parts are in memory order already.
1471 Use the lower parts address as ours. */
1472 XEXP (reg
, 0) = XEXP (XEXP (reg
, 0), 0);
1473 /* Prevent sharing of rtl that might lose. */
1474 if (GET_CODE (XEXP (reg
, 0)) == PLUS
)
1475 XEXP (reg
, 0) = copy_rtx (XEXP (reg
, 0));
1480 if (flag_check_memory_usage
)
1481 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
1482 XEXP (reg
, 0), ptr_mode
,
1483 GEN_INT (GET_MODE_SIZE (GET_MODE (reg
))),
1484 TYPE_MODE (sizetype
),
1485 GEN_INT (MEMORY_USE_RW
),
1486 TYPE_MODE (integer_type_node
));
1489 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1490 into the stack frame of FUNCTION (0 means the current function).
1491 DECL_MODE is the machine mode of the user-level data type.
1492 PROMOTED_MODE is the machine mode of the register.
1493 VOLATILE_P is nonzero if this is for a "volatile" decl.
1494 USED_P is nonzero if this reg might have already been used in an insn. */
1497 put_reg_into_stack (function
, reg
, type
, promoted_mode
, decl_mode
, volatile_p
,
1498 original_regno
, used_p
)
1499 struct function
*function
;
1502 enum machine_mode promoted_mode
, decl_mode
;
1508 int regno
= original_regno
;
1511 regno
= REGNO (reg
);
1515 if (regno
< function
->max_parm_reg
)
1516 new = function
->parm_reg_stack_loc
[regno
];
1518 new = assign_outer_stack_local (decl_mode
, GET_MODE_SIZE (decl_mode
),
1523 if (regno
< max_parm_reg
)
1524 new = parm_reg_stack_loc
[regno
];
1526 new = assign_stack_local (decl_mode
, GET_MODE_SIZE (decl_mode
), 0);
1529 PUT_MODE (reg
, decl_mode
);
1530 XEXP (reg
, 0) = XEXP (new, 0);
1531 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1532 MEM_VOLATILE_P (reg
) = volatile_p
;
1533 PUT_CODE (reg
, MEM
);
1535 /* If this is a memory ref that contains aggregate components,
1536 mark it as such for cse and loop optimize. If we are reusing a
1537 previously generated stack slot, then we need to copy the bit in
1538 case it was set for other reasons. For instance, it is set for
1539 __builtin_va_alist. */
1540 MEM_IN_STRUCT_P (reg
) = AGGREGATE_TYPE_P (type
) | MEM_IN_STRUCT_P (new);
1542 /* Now make sure that all refs to the variable, previously made
1543 when it was a register, are fixed up to be valid again. */
1545 if (used_p
&& function
!= 0)
1547 struct var_refs_queue
*temp
;
1549 /* Variable is inherited; fix it up when we get back to its function. */
1550 push_obstacks (function
->function_obstack
,
1551 function
->function_maybepermanent_obstack
);
1553 /* See comment in restore_tree_status in tree.c for why this needs to be
1554 on saveable obstack. */
1556 = (struct var_refs_queue
*) savealloc (sizeof (struct var_refs_queue
));
1557 temp
->modified
= reg
;
1558 temp
->promoted_mode
= promoted_mode
;
1559 temp
->unsignedp
= TREE_UNSIGNED (type
);
1560 temp
->next
= function
->fixup_var_refs_queue
;
1561 function
->fixup_var_refs_queue
= temp
;
1565 /* Variable is local; fix it up now. */
1566 fixup_var_refs (reg
, promoted_mode
, TREE_UNSIGNED (type
));
1570 fixup_var_refs (var
, promoted_mode
, unsignedp
)
1572 enum machine_mode promoted_mode
;
1576 rtx first_insn
= get_insns ();
1577 struct sequence_stack
*stack
= sequence_stack
;
1578 tree rtl_exps
= rtl_expr_chain
;
1580 /* Must scan all insns for stack-refs that exceed the limit. */
1581 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
, first_insn
, stack
== 0);
1583 /* Scan all pending sequences too. */
1584 for (; stack
; stack
= stack
->next
)
1586 push_to_sequence (stack
->first
);
1587 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
,
1588 stack
->first
, stack
->next
!= 0);
1589 /* Update remembered end of sequence
1590 in case we added an insn at the end. */
1591 stack
->last
= get_last_insn ();
1595 /* Scan all waiting RTL_EXPRs too. */
1596 for (pending
= rtl_exps
; pending
; pending
= TREE_CHAIN (pending
))
1598 rtx seq
= RTL_EXPR_SEQUENCE (TREE_VALUE (pending
));
1599 if (seq
!= const0_rtx
&& seq
!= 0)
1601 push_to_sequence (seq
);
1602 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
, seq
, 0);
1608 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1609 some part of an insn. Return a struct fixup_replacement whose OLD
1610 value is equal to X. Allocate a new structure if no such entry exists. */
1612 static struct fixup_replacement
*
1613 find_fixup_replacement (replacements
, x
)
1614 struct fixup_replacement
**replacements
;
1617 struct fixup_replacement
*p
;
1619 /* See if we have already replaced this. */
1620 for (p
= *replacements
; p
&& p
->old
!= x
; p
= p
->next
)
1625 p
= (struct fixup_replacement
*) oballoc (sizeof (struct fixup_replacement
));
1628 p
->next
= *replacements
;
1635 /* Scan the insn-chain starting with INSN for refs to VAR
1636 and fix them up. TOPLEVEL is nonzero if this chain is the
1637 main chain of insns for the current function. */
1640 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
, insn
, toplevel
)
1642 enum machine_mode promoted_mode
;
1651 rtx next
= NEXT_INSN (insn
);
1652 rtx set
, prev
, prev_set
;
1655 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
1657 /* If this is a CLOBBER of VAR, delete it.
1659 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1660 and REG_RETVAL notes too. */
1661 if (GET_CODE (PATTERN (insn
)) == CLOBBER
1662 && XEXP (PATTERN (insn
), 0) == var
)
1664 if ((note
= find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
)) != 0)
1665 /* The REG_LIBCALL note will go away since we are going to
1666 turn INSN into a NOTE, so just delete the
1667 corresponding REG_RETVAL note. */
1668 remove_note (XEXP (note
, 0),
1669 find_reg_note (XEXP (note
, 0), REG_RETVAL
,
1672 /* In unoptimized compilation, we shouldn't call delete_insn
1673 except in jump.c doing warnings. */
1674 PUT_CODE (insn
, NOTE
);
1675 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
1676 NOTE_SOURCE_FILE (insn
) = 0;
1679 /* The insn to load VAR from a home in the arglist
1680 is now a no-op. When we see it, just delete it.
1681 Similarly if this is storing VAR from a register from which
1682 it was loaded in the previous insn. This will occur
1683 when an ADDRESSOF was made for an arglist slot. */
1685 && (set
= single_set (insn
)) != 0
1686 && SET_DEST (set
) == var
1687 /* If this represents the result of an insn group,
1688 don't delete the insn. */
1689 && find_reg_note (insn
, REG_RETVAL
, NULL_RTX
) == 0
1690 && (rtx_equal_p (SET_SRC (set
), var
)
1691 || (GET_CODE (SET_SRC (set
)) == REG
1692 && (prev
= prev_nonnote_insn (insn
)) != 0
1693 && (prev_set
= single_set (prev
)) != 0
1694 && SET_DEST (prev_set
) == SET_SRC (set
)
1695 && rtx_equal_p (SET_SRC (prev_set
), var
))))
1697 /* In unoptimized compilation, we shouldn't call delete_insn
1698 except in jump.c doing warnings. */
1699 PUT_CODE (insn
, NOTE
);
1700 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
1701 NOTE_SOURCE_FILE (insn
) = 0;
1702 if (insn
== last_parm_insn
)
1703 last_parm_insn
= PREV_INSN (next
);
1707 struct fixup_replacement
*replacements
= 0;
1708 rtx next_insn
= NEXT_INSN (insn
);
1710 if (SMALL_REGISTER_CLASSES
)
1712 /* If the insn that copies the results of a CALL_INSN
1713 into a pseudo now references VAR, we have to use an
1714 intermediate pseudo since we want the life of the
1715 return value register to be only a single insn.
1717 If we don't use an intermediate pseudo, such things as
1718 address computations to make the address of VAR valid
1719 if it is not can be placed between the CALL_INSN and INSN.
1721 To make sure this doesn't happen, we record the destination
1722 of the CALL_INSN and see if the next insn uses both that
1725 if (call_dest
!= 0 && GET_CODE (insn
) == INSN
1726 && reg_mentioned_p (var
, PATTERN (insn
))
1727 && reg_mentioned_p (call_dest
, PATTERN (insn
)))
1729 rtx temp
= gen_reg_rtx (GET_MODE (call_dest
));
1731 emit_insn_before (gen_move_insn (temp
, call_dest
), insn
);
1733 PATTERN (insn
) = replace_rtx (PATTERN (insn
),
1737 if (GET_CODE (insn
) == CALL_INSN
1738 && GET_CODE (PATTERN (insn
)) == SET
)
1739 call_dest
= SET_DEST (PATTERN (insn
));
1740 else if (GET_CODE (insn
) == CALL_INSN
1741 && GET_CODE (PATTERN (insn
)) == PARALLEL
1742 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
1743 call_dest
= SET_DEST (XVECEXP (PATTERN (insn
), 0, 0));
1748 /* See if we have to do anything to INSN now that VAR is in
1749 memory. If it needs to be loaded into a pseudo, use a single
1750 pseudo for the entire insn in case there is a MATCH_DUP
1751 between two operands. We pass a pointer to the head of
1752 a list of struct fixup_replacements. If fixup_var_refs_1
1753 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1754 it will record them in this list.
1756 If it allocated a pseudo for any replacement, we copy into
1759 fixup_var_refs_1 (var
, promoted_mode
, &PATTERN (insn
), insn
,
1762 /* If this is last_parm_insn, and any instructions were output
1763 after it to fix it up, then we must set last_parm_insn to
1764 the last such instruction emitted. */
1765 if (insn
== last_parm_insn
)
1766 last_parm_insn
= PREV_INSN (next_insn
);
1768 while (replacements
)
1770 if (GET_CODE (replacements
->new) == REG
)
1775 /* OLD might be a (subreg (mem)). */
1776 if (GET_CODE (replacements
->old
) == SUBREG
)
1778 = fixup_memory_subreg (replacements
->old
, insn
, 0);
1781 = fixup_stack_1 (replacements
->old
, insn
);
1783 insert_before
= insn
;
1785 /* If we are changing the mode, do a conversion.
1786 This might be wasteful, but combine.c will
1787 eliminate much of the waste. */
1789 if (GET_MODE (replacements
->new)
1790 != GET_MODE (replacements
->old
))
1793 convert_move (replacements
->new,
1794 replacements
->old
, unsignedp
);
1795 seq
= gen_sequence ();
1799 seq
= gen_move_insn (replacements
->new,
1802 emit_insn_before (seq
, insert_before
);
1805 replacements
= replacements
->next
;
1809 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1810 But don't touch other insns referred to by reg-notes;
1811 we will get them elsewhere. */
1812 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
1813 if (GET_CODE (note
) != INSN_LIST
)
1815 = walk_fixup_memory_subreg (XEXP (note
, 0), insn
, 1);
1821 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1822 See if the rtx expression at *LOC in INSN needs to be changed.
1824 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1825 contain a list of original rtx's and replacements. If we find that we need
1826 to modify this insn by replacing a memory reference with a pseudo or by
1827 making a new MEM to implement a SUBREG, we consult that list to see if
1828 we have already chosen a replacement. If none has already been allocated,
1829 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1830 or the SUBREG, as appropriate, to the pseudo. */
1833 fixup_var_refs_1 (var
, promoted_mode
, loc
, insn
, replacements
)
1835 enum machine_mode promoted_mode
;
1838 struct fixup_replacement
**replacements
;
1841 register rtx x
= *loc
;
1842 RTX_CODE code
= GET_CODE (x
);
1844 register rtx tem
, tem1
;
1845 struct fixup_replacement
*replacement
;
1850 if (XEXP (x
, 0) == var
)
1852 /* Prevent sharing of rtl that might lose. */
1853 rtx sub
= copy_rtx (XEXP (var
, 0));
1857 if (! validate_change (insn
, loc
, sub
, 0))
1859 rtx y
= force_operand (sub
, NULL_RTX
);
1861 if (! validate_change (insn
, loc
, y
, 0))
1862 *loc
= copy_to_reg (y
);
1865 emit_insn_before (gen_sequence (), insn
);
1873 /* If we already have a replacement, use it. Otherwise,
1874 try to fix up this address in case it is invalid. */
1876 replacement
= find_fixup_replacement (replacements
, var
);
1877 if (replacement
->new)
1879 *loc
= replacement
->new;
1883 *loc
= replacement
->new = x
= fixup_stack_1 (x
, insn
);
1885 /* Unless we are forcing memory to register or we changed the mode,
1886 we can leave things the way they are if the insn is valid. */
1888 INSN_CODE (insn
) = -1;
1889 if (! flag_force_mem
&& GET_MODE (x
) == promoted_mode
1890 && recog_memoized (insn
) >= 0)
1893 *loc
= replacement
->new = gen_reg_rtx (promoted_mode
);
1897 /* If X contains VAR, we need to unshare it here so that we update
1898 each occurrence separately. But all identical MEMs in one insn
1899 must be replaced with the same rtx because of the possibility of
1902 if (reg_mentioned_p (var
, x
))
1904 replacement
= find_fixup_replacement (replacements
, x
);
1905 if (replacement
->new == 0)
1906 replacement
->new = copy_most_rtx (x
, var
);
1908 *loc
= x
= replacement
->new;
1924 /* Note that in some cases those types of expressions are altered
1925 by optimize_bit_field, and do not survive to get here. */
1926 if (XEXP (x
, 0) == var
1927 || (GET_CODE (XEXP (x
, 0)) == SUBREG
1928 && SUBREG_REG (XEXP (x
, 0)) == var
))
1930 /* Get TEM as a valid MEM in the mode presently in the insn.
1932 We don't worry about the possibility of MATCH_DUP here; it
1933 is highly unlikely and would be tricky to handle. */
1936 if (GET_CODE (tem
) == SUBREG
)
1938 if (GET_MODE_BITSIZE (GET_MODE (tem
))
1939 > GET_MODE_BITSIZE (GET_MODE (var
)))
1941 replacement
= find_fixup_replacement (replacements
, var
);
1942 if (replacement
->new == 0)
1943 replacement
->new = gen_reg_rtx (GET_MODE (var
));
1944 SUBREG_REG (tem
) = replacement
->new;
1947 tem
= fixup_memory_subreg (tem
, insn
, 0);
1950 tem
= fixup_stack_1 (tem
, insn
);
1952 /* Unless we want to load from memory, get TEM into the proper mode
1953 for an extract from memory. This can only be done if the
1954 extract is at a constant position and length. */
1956 if (! flag_force_mem
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
1957 && GET_CODE (XEXP (x
, 2)) == CONST_INT
1958 && ! mode_dependent_address_p (XEXP (tem
, 0))
1959 && ! MEM_VOLATILE_P (tem
))
1961 enum machine_mode wanted_mode
= VOIDmode
;
1962 enum machine_mode is_mode
= GET_MODE (tem
);
1963 HOST_WIDE_INT pos
= INTVAL (XEXP (x
, 2));
1966 if (GET_CODE (x
) == ZERO_EXTRACT
)
1967 wanted_mode
= insn_operand_mode
[(int) CODE_FOR_extzv
][1];
1970 if (GET_CODE (x
) == SIGN_EXTRACT
)
1971 wanted_mode
= insn_operand_mode
[(int) CODE_FOR_extv
][1];
1973 /* If we have a narrower mode, we can do something. */
1974 if (wanted_mode
!= VOIDmode
1975 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
1977 HOST_WIDE_INT offset
= pos
/ BITS_PER_UNIT
;
1978 rtx old_pos
= XEXP (x
, 2);
1981 /* If the bytes and bits are counted differently, we
1982 must adjust the offset. */
1983 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
1984 offset
= (GET_MODE_SIZE (is_mode
)
1985 - GET_MODE_SIZE (wanted_mode
) - offset
);
1987 pos
%= GET_MODE_BITSIZE (wanted_mode
);
1989 newmem
= gen_rtx_MEM (wanted_mode
,
1990 plus_constant (XEXP (tem
, 0), offset
));
1991 RTX_UNCHANGING_P (newmem
) = RTX_UNCHANGING_P (tem
);
1992 MEM_VOLATILE_P (newmem
) = MEM_VOLATILE_P (tem
);
1993 MEM_IN_STRUCT_P (newmem
) = MEM_IN_STRUCT_P (tem
);
1995 /* Make the change and see if the insn remains valid. */
1996 INSN_CODE (insn
) = -1;
1997 XEXP (x
, 0) = newmem
;
1998 XEXP (x
, 2) = GEN_INT (pos
);
2000 if (recog_memoized (insn
) >= 0)
2003 /* Otherwise, restore old position. XEXP (x, 0) will be
2005 XEXP (x
, 2) = old_pos
;
2009 /* If we get here, the bitfield extract insn can't accept a memory
2010 reference. Copy the input into a register. */
2012 tem1
= gen_reg_rtx (GET_MODE (tem
));
2013 emit_insn_before (gen_move_insn (tem1
, tem
), insn
);
2020 if (SUBREG_REG (x
) == var
)
2022 /* If this is a special SUBREG made because VAR was promoted
2023 from a wider mode, replace it with VAR and call ourself
2024 recursively, this time saying that the object previously
2025 had its current mode (by virtue of the SUBREG). */
2027 if (SUBREG_PROMOTED_VAR_P (x
))
2030 fixup_var_refs_1 (var
, GET_MODE (var
), loc
, insn
, replacements
);
2034 /* If this SUBREG makes VAR wider, it has become a paradoxical
2035 SUBREG with VAR in memory, but these aren't allowed at this
2036 stage of the compilation. So load VAR into a pseudo and take
2037 a SUBREG of that pseudo. */
2038 if (GET_MODE_SIZE (GET_MODE (x
)) > GET_MODE_SIZE (GET_MODE (var
)))
2040 replacement
= find_fixup_replacement (replacements
, var
);
2041 if (replacement
->new == 0)
2042 replacement
->new = gen_reg_rtx (GET_MODE (var
));
2043 SUBREG_REG (x
) = replacement
->new;
2047 /* See if we have already found a replacement for this SUBREG.
2048 If so, use it. Otherwise, make a MEM and see if the insn
2049 is recognized. If not, or if we should force MEM into a register,
2050 make a pseudo for this SUBREG. */
2051 replacement
= find_fixup_replacement (replacements
, x
);
2052 if (replacement
->new)
2054 *loc
= replacement
->new;
2058 replacement
->new = *loc
= fixup_memory_subreg (x
, insn
, 0);
2060 INSN_CODE (insn
) = -1;
2061 if (! flag_force_mem
&& recog_memoized (insn
) >= 0)
2064 *loc
= replacement
->new = gen_reg_rtx (GET_MODE (x
));
2070 /* First do special simplification of bit-field references. */
2071 if (GET_CODE (SET_DEST (x
)) == SIGN_EXTRACT
2072 || GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
)
2073 optimize_bit_field (x
, insn
, 0);
2074 if (GET_CODE (SET_SRC (x
)) == SIGN_EXTRACT
2075 || GET_CODE (SET_SRC (x
)) == ZERO_EXTRACT
)
2076 optimize_bit_field (x
, insn
, NULL_PTR
);
2078 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2079 into a register and then store it back out. */
2080 if (GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
2081 && GET_CODE (XEXP (SET_DEST (x
), 0)) == SUBREG
2082 && SUBREG_REG (XEXP (SET_DEST (x
), 0)) == var
2083 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x
), 0)))
2084 > GET_MODE_SIZE (GET_MODE (var
))))
2086 replacement
= find_fixup_replacement (replacements
, var
);
2087 if (replacement
->new == 0)
2088 replacement
->new = gen_reg_rtx (GET_MODE (var
));
2090 SUBREG_REG (XEXP (SET_DEST (x
), 0)) = replacement
->new;
2091 emit_insn_after (gen_move_insn (var
, replacement
->new), insn
);
2094 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2095 insn into a pseudo and store the low part of the pseudo into VAR. */
2096 if (GET_CODE (SET_DEST (x
)) == SUBREG
2097 && SUBREG_REG (SET_DEST (x
)) == var
2098 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
2099 > GET_MODE_SIZE (GET_MODE (var
))))
2101 SET_DEST (x
) = tem
= gen_reg_rtx (GET_MODE (SET_DEST (x
)));
2102 emit_insn_after (gen_move_insn (var
, gen_lowpart (GET_MODE (var
),
2109 rtx dest
= SET_DEST (x
);
2110 rtx src
= SET_SRC (x
);
2112 rtx outerdest
= dest
;
2115 while (GET_CODE (dest
) == SUBREG
|| GET_CODE (dest
) == STRICT_LOW_PART
2116 || GET_CODE (dest
) == SIGN_EXTRACT
2117 || GET_CODE (dest
) == ZERO_EXTRACT
)
2118 dest
= XEXP (dest
, 0);
2120 if (GET_CODE (src
) == SUBREG
)
2121 src
= XEXP (src
, 0);
2123 /* If VAR does not appear at the top level of the SET
2124 just scan the lower levels of the tree. */
2126 if (src
!= var
&& dest
!= var
)
2129 /* We will need to rerecognize this insn. */
2130 INSN_CODE (insn
) = -1;
2133 if (GET_CODE (outerdest
) == ZERO_EXTRACT
&& dest
== var
)
2135 /* Since this case will return, ensure we fixup all the
2137 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (outerdest
, 1),
2138 insn
, replacements
);
2139 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (outerdest
, 2),
2140 insn
, replacements
);
2141 fixup_var_refs_1 (var
, promoted_mode
, &SET_SRC (x
),
2142 insn
, replacements
);
2144 tem
= XEXP (outerdest
, 0);
2146 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2147 that may appear inside a ZERO_EXTRACT.
2148 This was legitimate when the MEM was a REG. */
2149 if (GET_CODE (tem
) == SUBREG
2150 && SUBREG_REG (tem
) == var
)
2151 tem
= fixup_memory_subreg (tem
, insn
, 0);
2153 tem
= fixup_stack_1 (tem
, insn
);
2155 if (GET_CODE (XEXP (outerdest
, 1)) == CONST_INT
2156 && GET_CODE (XEXP (outerdest
, 2)) == CONST_INT
2157 && ! mode_dependent_address_p (XEXP (tem
, 0))
2158 && ! MEM_VOLATILE_P (tem
))
2160 enum machine_mode wanted_mode
2161 = insn_operand_mode
[(int) CODE_FOR_insv
][0];
2162 enum machine_mode is_mode
= GET_MODE (tem
);
2163 HOST_WIDE_INT pos
= INTVAL (XEXP (outerdest
, 2));
2165 /* If we have a narrower mode, we can do something. */
2166 if (GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
2168 HOST_WIDE_INT offset
= pos
/ BITS_PER_UNIT
;
2169 rtx old_pos
= XEXP (outerdest
, 2);
2172 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
2173 offset
= (GET_MODE_SIZE (is_mode
)
2174 - GET_MODE_SIZE (wanted_mode
) - offset
);
2176 pos
%= GET_MODE_BITSIZE (wanted_mode
);
2178 newmem
= gen_rtx_MEM (wanted_mode
,
2179 plus_constant (XEXP (tem
, 0), offset
));
2180 RTX_UNCHANGING_P (newmem
) = RTX_UNCHANGING_P (tem
);
2181 MEM_VOLATILE_P (newmem
) = MEM_VOLATILE_P (tem
);
2182 MEM_IN_STRUCT_P (newmem
) = MEM_IN_STRUCT_P (tem
);
2184 /* Make the change and see if the insn remains valid. */
2185 INSN_CODE (insn
) = -1;
2186 XEXP (outerdest
, 0) = newmem
;
2187 XEXP (outerdest
, 2) = GEN_INT (pos
);
2189 if (recog_memoized (insn
) >= 0)
2192 /* Otherwise, restore old position. XEXP (x, 0) will be
2194 XEXP (outerdest
, 2) = old_pos
;
2198 /* If we get here, the bit-field store doesn't allow memory
2199 or isn't located at a constant position. Load the value into
2200 a register, do the store, and put it back into memory. */
2202 tem1
= gen_reg_rtx (GET_MODE (tem
));
2203 emit_insn_before (gen_move_insn (tem1
, tem
), insn
);
2204 emit_insn_after (gen_move_insn (tem
, tem1
), insn
);
2205 XEXP (outerdest
, 0) = tem1
;
2210 /* STRICT_LOW_PART is a no-op on memory references
2211 and it can cause combinations to be unrecognizable,
2214 if (dest
== var
&& GET_CODE (SET_DEST (x
)) == STRICT_LOW_PART
)
2215 SET_DEST (x
) = XEXP (SET_DEST (x
), 0);
2217 /* A valid insn to copy VAR into or out of a register
2218 must be left alone, to avoid an infinite loop here.
2219 If the reference to VAR is by a subreg, fix that up,
2220 since SUBREG is not valid for a memref.
2221 Also fix up the address of the stack slot.
2223 Note that we must not try to recognize the insn until
2224 after we know that we have valid addresses and no
2225 (subreg (mem ...) ...) constructs, since these interfere
2226 with determining the validity of the insn. */
2228 if ((SET_SRC (x
) == var
2229 || (GET_CODE (SET_SRC (x
)) == SUBREG
2230 && SUBREG_REG (SET_SRC (x
)) == var
))
2231 && (GET_CODE (SET_DEST (x
)) == REG
2232 || (GET_CODE (SET_DEST (x
)) == SUBREG
2233 && GET_CODE (SUBREG_REG (SET_DEST (x
))) == REG
))
2234 && GET_MODE (var
) == promoted_mode
2235 && x
== single_set (insn
))
2239 replacement
= find_fixup_replacement (replacements
, SET_SRC (x
));
2240 if (replacement
->new)
2241 SET_SRC (x
) = replacement
->new;
2242 else if (GET_CODE (SET_SRC (x
)) == SUBREG
)
2243 SET_SRC (x
) = replacement
->new
2244 = fixup_memory_subreg (SET_SRC (x
), insn
, 0);
2246 SET_SRC (x
) = replacement
->new
2247 = fixup_stack_1 (SET_SRC (x
), insn
);
2249 if (recog_memoized (insn
) >= 0)
2252 /* INSN is not valid, but we know that we want to
2253 copy SET_SRC (x) to SET_DEST (x) in some way. So
2254 we generate the move and see whether it requires more
2255 than one insn. If it does, we emit those insns and
2256 delete INSN. Otherwise, we an just replace the pattern
2257 of INSN; we have already verified above that INSN has
2258 no other function that to do X. */
2260 pat
= gen_move_insn (SET_DEST (x
), SET_SRC (x
));
2261 if (GET_CODE (pat
) == SEQUENCE
)
2263 emit_insn_after (pat
, insn
);
2264 PUT_CODE (insn
, NOTE
);
2265 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2266 NOTE_SOURCE_FILE (insn
) = 0;
2269 PATTERN (insn
) = pat
;
2274 if ((SET_DEST (x
) == var
2275 || (GET_CODE (SET_DEST (x
)) == SUBREG
2276 && SUBREG_REG (SET_DEST (x
)) == var
))
2277 && (GET_CODE (SET_SRC (x
)) == REG
2278 || (GET_CODE (SET_SRC (x
)) == SUBREG
2279 && GET_CODE (SUBREG_REG (SET_SRC (x
))) == REG
))
2280 && GET_MODE (var
) == promoted_mode
2281 && x
== single_set (insn
))
2285 if (GET_CODE (SET_DEST (x
)) == SUBREG
)
2286 SET_DEST (x
) = fixup_memory_subreg (SET_DEST (x
), insn
, 0);
2288 SET_DEST (x
) = fixup_stack_1 (SET_DEST (x
), insn
);
2290 if (recog_memoized (insn
) >= 0)
2293 pat
= gen_move_insn (SET_DEST (x
), SET_SRC (x
));
2294 if (GET_CODE (pat
) == SEQUENCE
)
2296 emit_insn_after (pat
, insn
);
2297 PUT_CODE (insn
, NOTE
);
2298 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2299 NOTE_SOURCE_FILE (insn
) = 0;
2302 PATTERN (insn
) = pat
;
2307 /* Otherwise, storing into VAR must be handled specially
2308 by storing into a temporary and copying that into VAR
2309 with a new insn after this one. Note that this case
2310 will be used when storing into a promoted scalar since
2311 the insn will now have different modes on the input
2312 and output and hence will be invalid (except for the case
2313 of setting it to a constant, which does not need any
2314 change if it is valid). We generate extra code in that case,
2315 but combine.c will eliminate it. */
2320 rtx fixeddest
= SET_DEST (x
);
2322 /* STRICT_LOW_PART can be discarded, around a MEM. */
2323 if (GET_CODE (fixeddest
) == STRICT_LOW_PART
)
2324 fixeddest
= XEXP (fixeddest
, 0);
2325 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2326 if (GET_CODE (fixeddest
) == SUBREG
)
2328 fixeddest
= fixup_memory_subreg (fixeddest
, insn
, 0);
2329 promoted_mode
= GET_MODE (fixeddest
);
2332 fixeddest
= fixup_stack_1 (fixeddest
, insn
);
2334 temp
= gen_reg_rtx (promoted_mode
);
2336 emit_insn_after (gen_move_insn (fixeddest
,
2337 gen_lowpart (GET_MODE (fixeddest
),
2341 SET_DEST (x
) = temp
;
2349 /* Nothing special about this RTX; fix its operands. */
2351 fmt
= GET_RTX_FORMAT (code
);
2352 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2355 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (x
, i
), insn
, replacements
);
2359 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2360 fixup_var_refs_1 (var
, promoted_mode
, &XVECEXP (x
, i
, j
),
2361 insn
, replacements
);
2366 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2367 return an rtx (MEM:m1 newaddr) which is equivalent.
2368 If any insns must be emitted to compute NEWADDR, put them before INSN.
2370 UNCRITICAL nonzero means accept paradoxical subregs.
2371 This is used for subregs found inside REG_NOTES. */
2374 fixup_memory_subreg (x
, insn
, uncritical
)
2379 int offset
= SUBREG_WORD (x
) * UNITS_PER_WORD
;
2380 rtx addr
= XEXP (SUBREG_REG (x
), 0);
2381 enum machine_mode mode
= GET_MODE (x
);
2384 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2385 if (GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
)))
2389 if (BYTES_BIG_ENDIAN
)
2390 offset
+= (MIN (UNITS_PER_WORD
, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
2391 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (mode
)));
2392 addr
= plus_constant (addr
, offset
);
2393 if (!flag_force_addr
&& memory_address_p (mode
, addr
))
2394 /* Shortcut if no insns need be emitted. */
2395 return change_address (SUBREG_REG (x
), mode
, addr
);
2397 result
= change_address (SUBREG_REG (x
), mode
, addr
);
2398 emit_insn_before (gen_sequence (), insn
);
2403 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2404 Replace subexpressions of X in place.
2405 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2406 Otherwise return X, with its contents possibly altered.
2408 If any insns must be emitted to compute NEWADDR, put them before INSN.
2410 UNCRITICAL is as in fixup_memory_subreg. */
2413 walk_fixup_memory_subreg (x
, insn
, uncritical
)
2418 register enum rtx_code code
;
2425 code
= GET_CODE (x
);
2427 if (code
== SUBREG
&& GET_CODE (SUBREG_REG (x
)) == MEM
)
2428 return fixup_memory_subreg (x
, insn
, uncritical
);
2430 /* Nothing special about this RTX; fix its operands. */
2432 fmt
= GET_RTX_FORMAT (code
);
2433 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2436 XEXP (x
, i
) = walk_fixup_memory_subreg (XEXP (x
, i
), insn
, uncritical
);
2440 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2442 = walk_fixup_memory_subreg (XVECEXP (x
, i
, j
), insn
, uncritical
);
2448 /* For each memory ref within X, if it refers to a stack slot
2449 with an out of range displacement, put the address in a temp register
2450 (emitting new insns before INSN to load these registers)
2451 and alter the memory ref to use that register.
2452 Replace each such MEM rtx with a copy, to avoid clobberage. */
2455 fixup_stack_1 (x
, insn
)
2460 register RTX_CODE code
= GET_CODE (x
);
2465 register rtx ad
= XEXP (x
, 0);
2466 /* If we have address of a stack slot but it's not valid
2467 (displacement is too large), compute the sum in a register. */
2468 if (GET_CODE (ad
) == PLUS
2469 && GET_CODE (XEXP (ad
, 0)) == REG
2470 && ((REGNO (XEXP (ad
, 0)) >= FIRST_VIRTUAL_REGISTER
2471 && REGNO (XEXP (ad
, 0)) <= LAST_VIRTUAL_REGISTER
)
2472 || REGNO (XEXP (ad
, 0)) == FRAME_POINTER_REGNUM
2473 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2474 || REGNO (XEXP (ad
, 0)) == HARD_FRAME_POINTER_REGNUM
2476 || REGNO (XEXP (ad
, 0)) == STACK_POINTER_REGNUM
2477 || REGNO (XEXP (ad
, 0)) == ARG_POINTER_REGNUM
2478 || XEXP (ad
, 0) == current_function_internal_arg_pointer
)
2479 && GET_CODE (XEXP (ad
, 1)) == CONST_INT
)
2482 if (memory_address_p (GET_MODE (x
), ad
))
2486 temp
= copy_to_reg (ad
);
2487 seq
= gen_sequence ();
2489 emit_insn_before (seq
, insn
);
2490 return change_address (x
, VOIDmode
, temp
);
2495 fmt
= GET_RTX_FORMAT (code
);
2496 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2499 XEXP (x
, i
) = fixup_stack_1 (XEXP (x
, i
), insn
);
2503 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2504 XVECEXP (x
, i
, j
) = fixup_stack_1 (XVECEXP (x
, i
, j
), insn
);
2510 /* Optimization: a bit-field instruction whose field
2511 happens to be a byte or halfword in memory
2512 can be changed to a move instruction.
2514 We call here when INSN is an insn to examine or store into a bit-field.
2515 BODY is the SET-rtx to be altered.
2517 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2518 (Currently this is called only from function.c, and EQUIV_MEM
2522 optimize_bit_field (body
, insn
, equiv_mem
)
2527 register rtx bitfield
;
2530 enum machine_mode mode
;
2532 if (GET_CODE (SET_DEST (body
)) == SIGN_EXTRACT
2533 || GET_CODE (SET_DEST (body
)) == ZERO_EXTRACT
)
2534 bitfield
= SET_DEST (body
), destflag
= 1;
2536 bitfield
= SET_SRC (body
), destflag
= 0;
2538 /* First check that the field being stored has constant size and position
2539 and is in fact a byte or halfword suitably aligned. */
2541 if (GET_CODE (XEXP (bitfield
, 1)) == CONST_INT
2542 && GET_CODE (XEXP (bitfield
, 2)) == CONST_INT
2543 && ((mode
= mode_for_size (INTVAL (XEXP (bitfield
, 1)), MODE_INT
, 1))
2545 && INTVAL (XEXP (bitfield
, 2)) % INTVAL (XEXP (bitfield
, 1)) == 0)
2547 register rtx memref
= 0;
2549 /* Now check that the containing word is memory, not a register,
2550 and that it is safe to change the machine mode. */
2552 if (GET_CODE (XEXP (bitfield
, 0)) == MEM
)
2553 memref
= XEXP (bitfield
, 0);
2554 else if (GET_CODE (XEXP (bitfield
, 0)) == REG
2556 memref
= equiv_mem
[REGNO (XEXP (bitfield
, 0))];
2557 else if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
2558 && GET_CODE (SUBREG_REG (XEXP (bitfield
, 0))) == MEM
)
2559 memref
= SUBREG_REG (XEXP (bitfield
, 0));
2560 else if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
2562 && GET_CODE (SUBREG_REG (XEXP (bitfield
, 0))) == REG
)
2563 memref
= equiv_mem
[REGNO (SUBREG_REG (XEXP (bitfield
, 0)))];
2566 && ! mode_dependent_address_p (XEXP (memref
, 0))
2567 && ! MEM_VOLATILE_P (memref
))
2569 /* Now adjust the address, first for any subreg'ing
2570 that we are now getting rid of,
2571 and then for which byte of the word is wanted. */
2573 HOST_WIDE_INT offset
= INTVAL (XEXP (bitfield
, 2));
2576 /* Adjust OFFSET to count bits from low-address byte. */
2577 if (BITS_BIG_ENDIAN
!= BYTES_BIG_ENDIAN
)
2578 offset
= (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield
, 0)))
2579 - offset
- INTVAL (XEXP (bitfield
, 1)));
2581 /* Adjust OFFSET to count bytes from low-address byte. */
2582 offset
/= BITS_PER_UNIT
;
2583 if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
)
2585 offset
+= SUBREG_WORD (XEXP (bitfield
, 0)) * UNITS_PER_WORD
;
2586 if (BYTES_BIG_ENDIAN
)
2587 offset
-= (MIN (UNITS_PER_WORD
,
2588 GET_MODE_SIZE (GET_MODE (XEXP (bitfield
, 0))))
2589 - MIN (UNITS_PER_WORD
,
2590 GET_MODE_SIZE (GET_MODE (memref
))));
2594 memref
= change_address (memref
, mode
,
2595 plus_constant (XEXP (memref
, 0), offset
));
2596 insns
= get_insns ();
2598 emit_insns_before (insns
, insn
);
2600 /* Store this memory reference where
2601 we found the bit field reference. */
2605 validate_change (insn
, &SET_DEST (body
), memref
, 1);
2606 if (! CONSTANT_ADDRESS_P (SET_SRC (body
)))
2608 rtx src
= SET_SRC (body
);
2609 while (GET_CODE (src
) == SUBREG
2610 && SUBREG_WORD (src
) == 0)
2611 src
= SUBREG_REG (src
);
2612 if (GET_MODE (src
) != GET_MODE (memref
))
2613 src
= gen_lowpart (GET_MODE (memref
), SET_SRC (body
));
2614 validate_change (insn
, &SET_SRC (body
), src
, 1);
2616 else if (GET_MODE (SET_SRC (body
)) != VOIDmode
2617 && GET_MODE (SET_SRC (body
)) != GET_MODE (memref
))
2618 /* This shouldn't happen because anything that didn't have
2619 one of these modes should have got converted explicitly
2620 and then referenced through a subreg.
2621 This is so because the original bit-field was
2622 handled by agg_mode and so its tree structure had
2623 the same mode that memref now has. */
2628 rtx dest
= SET_DEST (body
);
2630 while (GET_CODE (dest
) == SUBREG
2631 && SUBREG_WORD (dest
) == 0
2632 && (GET_MODE_CLASS (GET_MODE (dest
))
2633 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest
)))))
2634 dest
= SUBREG_REG (dest
);
2636 validate_change (insn
, &SET_DEST (body
), dest
, 1);
2638 if (GET_MODE (dest
) == GET_MODE (memref
))
2639 validate_change (insn
, &SET_SRC (body
), memref
, 1);
2642 /* Convert the mem ref to the destination mode. */
2643 rtx newreg
= gen_reg_rtx (GET_MODE (dest
));
2646 convert_move (newreg
, memref
,
2647 GET_CODE (SET_SRC (body
)) == ZERO_EXTRACT
);
2651 validate_change (insn
, &SET_SRC (body
), newreg
, 1);
2655 /* See if we can convert this extraction or insertion into
2656 a simple move insn. We might not be able to do so if this
2657 was, for example, part of a PARALLEL.
2659 If we succeed, write out any needed conversions. If we fail,
2660 it is hard to guess why we failed, so don't do anything
2661 special; just let the optimization be suppressed. */
2663 if (apply_change_group () && seq
)
2664 emit_insns_before (seq
, insn
);
2669 /* These routines are responsible for converting virtual register references
2670 to the actual hard register references once RTL generation is complete.
2672 The following four variables are used for communication between the
2673 routines. They contain the offsets of the virtual registers from their
2674 respective hard registers. */
2676 static int in_arg_offset
;
2677 static int var_offset
;
2678 static int dynamic_offset
;
2679 static int out_arg_offset
;
2681 /* In most machines, the stack pointer register is equivalent to the bottom
2684 #ifndef STACK_POINTER_OFFSET
2685 #define STACK_POINTER_OFFSET 0
2688 /* If not defined, pick an appropriate default for the offset of dynamically
2689 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2690 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2692 #ifndef STACK_DYNAMIC_OFFSET
2694 #ifdef ACCUMULATE_OUTGOING_ARGS
2695 /* The bottom of the stack points to the actual arguments. If
2696 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2697 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2698 stack space for register parameters is not pushed by the caller, but
2699 rather part of the fixed stack areas and hence not included in
2700 `current_function_outgoing_args_size'. Nevertheless, we must allow
2701 for it when allocating stack dynamic objects. */
2703 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2704 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2705 (current_function_outgoing_args_size \
2706 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2709 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2710 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2714 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2718 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2719 its address taken. DECL is the decl for the object stored in the
2720 register, for later use if we do need to force REG into the stack.
2721 REG is overwritten by the MEM like in put_reg_into_stack. */
2724 gen_mem_addressof (reg
, decl
)
2728 tree type
= TREE_TYPE (decl
);
2730 rtx r
= gen_rtx_ADDRESSOF (Pmode
, gen_reg_rtx (GET_MODE (reg
)), REGNO (reg
));
2731 SET_ADDRESSOF_DECL (r
, decl
);
2734 PUT_CODE (reg
, MEM
);
2735 PUT_MODE (reg
, DECL_MODE (decl
));
2736 MEM_VOLATILE_P (reg
) = TREE_SIDE_EFFECTS (decl
);
2737 MEM_IN_STRUCT_P (reg
) = AGGREGATE_TYPE_P (type
);
2739 if (TREE_USED (decl
) || DECL_INITIAL (decl
) != 0)
2740 fixup_var_refs (reg
, GET_MODE (reg
), TREE_UNSIGNED (type
));
2745 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2748 flush_addressof (decl
)
2751 if ((TREE_CODE (decl
) == PARM_DECL
|| TREE_CODE (decl
) == VAR_DECL
)
2752 && DECL_RTL (decl
) != 0
2753 && GET_CODE (DECL_RTL (decl
)) == MEM
2754 && GET_CODE (XEXP (DECL_RTL (decl
), 0)) == ADDRESSOF
2755 && GET_CODE (XEXP (XEXP (DECL_RTL (decl
), 0), 0)) == REG
)
2756 put_addressof_into_stack (XEXP (DECL_RTL (decl
), 0));
2759 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2762 put_addressof_into_stack (r
)
2765 tree decl
= ADDRESSOF_DECL (r
);
2766 rtx reg
= XEXP (r
, 0);
2768 if (GET_CODE (reg
) != REG
)
2771 put_reg_into_stack (0, reg
, TREE_TYPE (decl
), GET_MODE (reg
),
2772 DECL_MODE (decl
), TREE_SIDE_EFFECTS (decl
),
2773 ADDRESSOF_REGNO (r
),
2774 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0);
2777 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2778 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2782 purge_addressof_1 (loc
, insn
, force
)
2792 /* Re-start here to avoid recursion in common cases. */
2799 code
= GET_CODE (x
);
2801 if (code
== ADDRESSOF
&& GET_CODE (XEXP (x
, 0)) == MEM
)
2804 /* We must create a copy of the rtx because it was created by
2805 overwriting a REG rtx which is always shared. */
2806 rtx sub
= copy_rtx (XEXP (XEXP (x
, 0), 0));
2808 if (validate_change (insn
, loc
, sub
, 0))
2812 if (! validate_change (insn
, loc
,
2813 force_operand (sub
, NULL_RTX
),
2817 insns
= get_insns ();
2819 emit_insns_before (insns
, insn
);
2822 else if (code
== MEM
&& GET_CODE (XEXP (x
, 0)) == ADDRESSOF
&& ! force
)
2824 rtx sub
= XEXP (XEXP (x
, 0), 0);
2826 if (GET_CODE (sub
) == MEM
)
2827 sub
= gen_rtx_MEM (GET_MODE (x
), copy_rtx (XEXP (sub
, 0)));
2829 if (GET_CODE (sub
) == REG
2830 && (MEM_VOLATILE_P (x
) || GET_MODE (x
) == BLKmode
))
2832 put_addressof_into_stack (XEXP (x
, 0));
2835 else if (GET_CODE (sub
) == REG
&& GET_MODE (x
) != GET_MODE (sub
))
2837 if (! BYTES_BIG_ENDIAN
&& ! WORDS_BIG_ENDIAN
)
2839 rtx sub2
= gen_rtx_SUBREG (GET_MODE (x
), sub
, 0);
2840 if (validate_change (insn
, loc
, sub2
, 0))
2844 else if (validate_change (insn
, loc
, sub
, 0))
2846 /* else give up and put it into the stack */
2848 else if (code
== ADDRESSOF
)
2850 put_addressof_into_stack (x
);
2854 /* Scan all subexpressions. */
2855 fmt
= GET_RTX_FORMAT (code
);
2856 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
2859 purge_addressof_1 (&XEXP (x
, i
), insn
, force
);
2860 else if (*fmt
== 'E')
2861 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2862 purge_addressof_1 (&XVECEXP (x
, i
, j
), insn
, force
);
2866 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
2867 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
2871 purge_addressof (insns
)
2875 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
2876 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == JUMP_INSN
2877 || GET_CODE (insn
) == CALL_INSN
)
2879 purge_addressof_1 (&PATTERN (insn
), insn
,
2880 asm_noperands (PATTERN (insn
)) > 0);
2881 purge_addressof_1 (®_NOTES (insn
), NULL_RTX
, 0);
2885 /* Pass through the INSNS of function FNDECL and convert virtual register
2886 references to hard register references. */
2889 instantiate_virtual_regs (fndecl
, insns
)
2896 /* Compute the offsets to use for this function. */
2897 in_arg_offset
= FIRST_PARM_OFFSET (fndecl
);
2898 var_offset
= STARTING_FRAME_OFFSET
;
2899 dynamic_offset
= STACK_DYNAMIC_OFFSET (fndecl
);
2900 out_arg_offset
= STACK_POINTER_OFFSET
;
2902 /* Scan all variables and parameters of this function. For each that is
2903 in memory, instantiate all virtual registers if the result is a valid
2904 address. If not, we do it later. That will handle most uses of virtual
2905 regs on many machines. */
2906 instantiate_decls (fndecl
, 1);
2908 /* Initialize recognition, indicating that volatile is OK. */
2911 /* Scan through all the insns, instantiating every virtual register still
2913 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
2914 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == JUMP_INSN
2915 || GET_CODE (insn
) == CALL_INSN
)
2917 instantiate_virtual_regs_1 (&PATTERN (insn
), insn
, 1);
2918 instantiate_virtual_regs_1 (®_NOTES (insn
), NULL_RTX
, 0);
2921 /* Instantiate the stack slots for the parm registers, for later use in
2922 addressof elimination. */
2923 for (i
= 0; i
< max_parm_reg
; ++i
)
2924 if (parm_reg_stack_loc
[i
])
2925 instantiate_virtual_regs_1 (&parm_reg_stack_loc
[i
], NULL_RTX
, 0);
2927 /* Now instantiate the remaining register equivalences for debugging info.
2928 These will not be valid addresses. */
2929 instantiate_decls (fndecl
, 0);
2931 /* Indicate that, from now on, assign_stack_local should use
2932 frame_pointer_rtx. */
2933 virtuals_instantiated
= 1;
2936 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
2937 all virtual registers in their DECL_RTL's.
2939 If VALID_ONLY, do this only if the resulting address is still valid.
2940 Otherwise, always do it. */
2943 instantiate_decls (fndecl
, valid_only
)
2949 if (DECL_SAVED_INSNS (fndecl
))
2950 /* When compiling an inline function, the obstack used for
2951 rtl allocation is the maybepermanent_obstack. Calling
2952 `resume_temporary_allocation' switches us back to that
2953 obstack while we process this function's parameters. */
2954 resume_temporary_allocation ();
2956 /* Process all parameters of the function. */
2957 for (decl
= DECL_ARGUMENTS (fndecl
); decl
; decl
= TREE_CHAIN (decl
))
2959 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (decl
));
2961 instantiate_decl (DECL_RTL (decl
), size
, valid_only
);
2963 /* If the parameter was promoted, then the incoming RTL mode may be
2964 larger than the declared type size. We must use the larger of
2966 size
= MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl
))), size
);
2967 instantiate_decl (DECL_INCOMING_RTL (decl
), size
, valid_only
);
2970 /* Now process all variables defined in the function or its subblocks. */
2971 instantiate_decls_1 (DECL_INITIAL (fndecl
), valid_only
);
2973 if (DECL_INLINE (fndecl
) || DECL_DEFER_OUTPUT (fndecl
))
2975 /* Save all rtl allocated for this function by raising the
2976 high-water mark on the maybepermanent_obstack. */
2978 /* All further rtl allocation is now done in the current_obstack. */
2979 rtl_in_current_obstack ();
2983 /* Subroutine of instantiate_decls: Process all decls in the given
2984 BLOCK node and all its subblocks. */
2987 instantiate_decls_1 (let
, valid_only
)
2993 for (t
= BLOCK_VARS (let
); t
; t
= TREE_CHAIN (t
))
2994 instantiate_decl (DECL_RTL (t
), int_size_in_bytes (TREE_TYPE (t
)),
2997 /* Process all subblocks. */
2998 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= TREE_CHAIN (t
))
2999 instantiate_decls_1 (t
, valid_only
);
3002 /* Subroutine of the preceding procedures: Given RTL representing a
3003 decl and the size of the object, do any instantiation required.
3005 If VALID_ONLY is non-zero, it means that the RTL should only be
3006 changed if the new address is valid. */
3009 instantiate_decl (x
, size
, valid_only
)
3014 enum machine_mode mode
;
3017 /* If this is not a MEM, no need to do anything. Similarly if the
3018 address is a constant or a register that is not a virtual register. */
3020 if (x
== 0 || GET_CODE (x
) != MEM
)
3024 if (CONSTANT_P (addr
)
3025 || (GET_CODE (addr
) == ADDRESSOF
&& GET_CODE (XEXP (addr
, 0)) == REG
)
3026 || (GET_CODE (addr
) == REG
3027 && (REGNO (addr
) < FIRST_VIRTUAL_REGISTER
3028 || REGNO (addr
) > LAST_VIRTUAL_REGISTER
)))
3031 /* If we should only do this if the address is valid, copy the address.
3032 We need to do this so we can undo any changes that might make the
3033 address invalid. This copy is unfortunate, but probably can't be
3037 addr
= copy_rtx (addr
);
3039 instantiate_virtual_regs_1 (&addr
, NULL_RTX
, 0);
3043 /* Now verify that the resulting address is valid for every integer or
3044 floating-point mode up to and including SIZE bytes long. We do this
3045 since the object might be accessed in any mode and frame addresses
3048 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
3049 mode
!= VOIDmode
&& GET_MODE_SIZE (mode
) <= size
;
3050 mode
= GET_MODE_WIDER_MODE (mode
))
3051 if (! memory_address_p (mode
, addr
))
3054 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
3055 mode
!= VOIDmode
&& GET_MODE_SIZE (mode
) <= size
;
3056 mode
= GET_MODE_WIDER_MODE (mode
))
3057 if (! memory_address_p (mode
, addr
))
3061 /* Put back the address now that we have updated it and we either know
3062 it is valid or we don't care whether it is valid. */
3067 /* Given a pointer to a piece of rtx and an optional pointer to the
3068 containing object, instantiate any virtual registers present in it.
3070 If EXTRA_INSNS, we always do the replacement and generate
3071 any extra insns before OBJECT. If it zero, we do nothing if replacement
3074 Return 1 if we either had nothing to do or if we were able to do the
3075 needed replacement. Return 0 otherwise; we only return zero if
3076 EXTRA_INSNS is zero.
3078 We first try some simple transformations to avoid the creation of extra
3082 instantiate_virtual_regs_1 (loc
, object
, extra_insns
)
3090 HOST_WIDE_INT offset
;
3096 /* Re-start here to avoid recursion in common cases. */
3103 code
= GET_CODE (x
);
3105 /* Check for some special cases. */
3122 /* We are allowed to set the virtual registers. This means that
3123 the actual register should receive the source minus the
3124 appropriate offset. This is used, for example, in the handling
3125 of non-local gotos. */
3126 if (SET_DEST (x
) == virtual_incoming_args_rtx
)
3127 new = arg_pointer_rtx
, offset
= - in_arg_offset
;
3128 else if (SET_DEST (x
) == virtual_stack_vars_rtx
)
3129 new = frame_pointer_rtx
, offset
= - var_offset
;
3130 else if (SET_DEST (x
) == virtual_stack_dynamic_rtx
)
3131 new = stack_pointer_rtx
, offset
= - dynamic_offset
;
3132 else if (SET_DEST (x
) == virtual_outgoing_args_rtx
)
3133 new = stack_pointer_rtx
, offset
= - out_arg_offset
;
3137 /* The only valid sources here are PLUS or REG. Just do
3138 the simplest possible thing to handle them. */
3139 if (GET_CODE (SET_SRC (x
)) != REG
3140 && GET_CODE (SET_SRC (x
)) != PLUS
)
3144 if (GET_CODE (SET_SRC (x
)) != REG
)
3145 temp
= force_operand (SET_SRC (x
), NULL_RTX
);
3148 temp
= force_operand (plus_constant (temp
, offset
), NULL_RTX
);
3152 emit_insns_before (seq
, object
);
3155 if (! validate_change (object
, &SET_SRC (x
), temp
, 0)
3162 instantiate_virtual_regs_1 (&SET_DEST (x
), object
, extra_insns
);
3167 /* Handle special case of virtual register plus constant. */
3168 if (CONSTANT_P (XEXP (x
, 1)))
3170 rtx old
, new_offset
;
3172 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3173 if (GET_CODE (XEXP (x
, 0)) == PLUS
)
3175 rtx inner
= XEXP (XEXP (x
, 0), 0);
3177 if (inner
== virtual_incoming_args_rtx
)
3178 new = arg_pointer_rtx
, offset
= in_arg_offset
;
3179 else if (inner
== virtual_stack_vars_rtx
)
3180 new = frame_pointer_rtx
, offset
= var_offset
;
3181 else if (inner
== virtual_stack_dynamic_rtx
)
3182 new = stack_pointer_rtx
, offset
= dynamic_offset
;
3183 else if (inner
== virtual_outgoing_args_rtx
)
3184 new = stack_pointer_rtx
, offset
= out_arg_offset
;
3191 instantiate_virtual_regs_1 (&XEXP (XEXP (x
, 0), 1), object
,
3193 new = gen_rtx_PLUS (Pmode
, new, XEXP (XEXP (x
, 0), 1));
3196 else if (XEXP (x
, 0) == virtual_incoming_args_rtx
)
3197 new = arg_pointer_rtx
, offset
= in_arg_offset
;
3198 else if (XEXP (x
, 0) == virtual_stack_vars_rtx
)
3199 new = frame_pointer_rtx
, offset
= var_offset
;
3200 else if (XEXP (x
, 0) == virtual_stack_dynamic_rtx
)
3201 new = stack_pointer_rtx
, offset
= dynamic_offset
;
3202 else if (XEXP (x
, 0) == virtual_outgoing_args_rtx
)
3203 new = stack_pointer_rtx
, offset
= out_arg_offset
;
3206 /* We know the second operand is a constant. Unless the
3207 first operand is a REG (which has been already checked),
3208 it needs to be checked. */
3209 if (GET_CODE (XEXP (x
, 0)) != REG
)
3217 new_offset
= plus_constant (XEXP (x
, 1), offset
);
3219 /* If the new constant is zero, try to replace the sum with just
3221 if (new_offset
== const0_rtx
3222 && validate_change (object
, loc
, new, 0))
3225 /* Next try to replace the register and new offset.
3226 There are two changes to validate here and we can't assume that
3227 in the case of old offset equals new just changing the register
3228 will yield a valid insn. In the interests of a little efficiency,
3229 however, we only call validate change once (we don't queue up the
3230 changes and then call apply_change_group). */
3234 ? ! validate_change (object
, &XEXP (x
, 0), new, 0)
3235 : (XEXP (x
, 0) = new,
3236 ! validate_change (object
, &XEXP (x
, 1), new_offset
, 0)))
3244 /* Otherwise copy the new constant into a register and replace
3245 constant with that register. */
3246 temp
= gen_reg_rtx (Pmode
);
3248 if (validate_change (object
, &XEXP (x
, 1), temp
, 0))
3249 emit_insn_before (gen_move_insn (temp
, new_offset
), object
);
3252 /* If that didn't work, replace this expression with a
3253 register containing the sum. */
3256 new = gen_rtx_PLUS (Pmode
, new, new_offset
);
3259 temp
= force_operand (new, NULL_RTX
);
3263 emit_insns_before (seq
, object
);
3264 if (! validate_change (object
, loc
, temp
, 0)
3265 && ! validate_replace_rtx (x
, temp
, object
))
3273 /* Fall through to generic two-operand expression case. */
3279 case DIV
: case UDIV
:
3280 case MOD
: case UMOD
:
3281 case AND
: case IOR
: case XOR
:
3282 case ROTATERT
: case ROTATE
:
3283 case ASHIFTRT
: case LSHIFTRT
: case ASHIFT
:
3285 case GE
: case GT
: case GEU
: case GTU
:
3286 case LE
: case LT
: case LEU
: case LTU
:
3287 if (XEXP (x
, 1) && ! CONSTANT_P (XEXP (x
, 1)))
3288 instantiate_virtual_regs_1 (&XEXP (x
, 1), object
, extra_insns
);
3293 /* Most cases of MEM that convert to valid addresses have already been
3294 handled by our scan of decls. The only special handling we
3295 need here is to make a copy of the rtx to ensure it isn't being
3296 shared if we have to change it to a pseudo.
3298 If the rtx is a simple reference to an address via a virtual register,
3299 it can potentially be shared. In such cases, first try to make it
3300 a valid address, which can also be shared. Otherwise, copy it and
3303 First check for common cases that need no processing. These are
3304 usually due to instantiation already being done on a previous instance
3308 if (CONSTANT_ADDRESS_P (temp
)
3309 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3310 || temp
== arg_pointer_rtx
3312 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3313 || temp
== hard_frame_pointer_rtx
3315 || temp
== frame_pointer_rtx
)
3318 if (GET_CODE (temp
) == PLUS
3319 && CONSTANT_ADDRESS_P (XEXP (temp
, 1))
3320 && (XEXP (temp
, 0) == frame_pointer_rtx
3321 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3322 || XEXP (temp
, 0) == hard_frame_pointer_rtx
3324 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3325 || XEXP (temp
, 0) == arg_pointer_rtx
3330 if (temp
== virtual_stack_vars_rtx
3331 || temp
== virtual_incoming_args_rtx
3332 || (GET_CODE (temp
) == PLUS
3333 && CONSTANT_ADDRESS_P (XEXP (temp
, 1))
3334 && (XEXP (temp
, 0) == virtual_stack_vars_rtx
3335 || XEXP (temp
, 0) == virtual_incoming_args_rtx
)))
3337 /* This MEM may be shared. If the substitution can be done without
3338 the need to generate new pseudos, we want to do it in place
3339 so all copies of the shared rtx benefit. The call below will
3340 only make substitutions if the resulting address is still
3343 Note that we cannot pass X as the object in the recursive call
3344 since the insn being processed may not allow all valid
3345 addresses. However, if we were not passed on object, we can
3346 only modify X without copying it if X will have a valid
3349 ??? Also note that this can still lose if OBJECT is an insn that
3350 has less restrictions on an address that some other insn.
3351 In that case, we will modify the shared address. This case
3352 doesn't seem very likely, though. One case where this could
3353 happen is in the case of a USE or CLOBBER reference, but we
3354 take care of that below. */
3356 if (instantiate_virtual_regs_1 (&XEXP (x
, 0),
3357 object
? object
: x
, 0))
3360 /* Otherwise make a copy and process that copy. We copy the entire
3361 RTL expression since it might be a PLUS which could also be
3363 *loc
= x
= copy_rtx (x
);
3366 /* Fall through to generic unary operation case. */
3368 case STRICT_LOW_PART
:
3370 case PRE_DEC
: case PRE_INC
: case POST_DEC
: case POST_INC
:
3371 case SIGN_EXTEND
: case ZERO_EXTEND
:
3372 case TRUNCATE
: case FLOAT_EXTEND
: case FLOAT_TRUNCATE
:
3373 case FLOAT
: case FIX
:
3374 case UNSIGNED_FIX
: case UNSIGNED_FLOAT
:
3378 /* These case either have just one operand or we know that we need not
3379 check the rest of the operands. */
3385 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3386 go ahead and make the invalid one, but do it to a copy. For a REG,
3387 just make the recursive call, since there's no chance of a problem. */
3389 if ((GET_CODE (XEXP (x
, 0)) == MEM
3390 && instantiate_virtual_regs_1 (&XEXP (XEXP (x
, 0), 0), XEXP (x
, 0),
3392 || (GET_CODE (XEXP (x
, 0)) == REG
3393 && instantiate_virtual_regs_1 (&XEXP (x
, 0), object
, 0)))
3396 XEXP (x
, 0) = copy_rtx (XEXP (x
, 0));
3401 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3402 in front of this insn and substitute the temporary. */
3403 if (x
== virtual_incoming_args_rtx
)
3404 new = arg_pointer_rtx
, offset
= in_arg_offset
;
3405 else if (x
== virtual_stack_vars_rtx
)
3406 new = frame_pointer_rtx
, offset
= var_offset
;
3407 else if (x
== virtual_stack_dynamic_rtx
)
3408 new = stack_pointer_rtx
, offset
= dynamic_offset
;
3409 else if (x
== virtual_outgoing_args_rtx
)
3410 new = stack_pointer_rtx
, offset
= out_arg_offset
;
3414 temp
= plus_constant (new, offset
);
3415 if (!validate_change (object
, loc
, temp
, 0))
3421 temp
= force_operand (temp
, NULL_RTX
);
3425 emit_insns_before (seq
, object
);
3426 if (! validate_change (object
, loc
, temp
, 0)
3427 && ! validate_replace_rtx (x
, temp
, object
))
3435 if (GET_CODE (XEXP (x
, 0)) == REG
)
3438 else if (GET_CODE (XEXP (x
, 0)) == MEM
)
3440 /* If we have a (addressof (mem ..)), do any instantiation inside
3441 since we know we'll be making the inside valid when we finally
3442 remove the ADDRESSOF. */
3443 instantiate_virtual_regs_1 (&XEXP (XEXP (x
, 0), 0), NULL_RTX
, 0);
3452 /* Scan all subexpressions. */
3453 fmt
= GET_RTX_FORMAT (code
);
3454 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
3457 if (!instantiate_virtual_regs_1 (&XEXP (x
, i
), object
, extra_insns
))
3460 else if (*fmt
== 'E')
3461 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3462 if (! instantiate_virtual_regs_1 (&XVECEXP (x
, i
, j
), object
,
3469 /* Optimization: assuming this function does not receive nonlocal gotos,
3470 delete the handlers for such, as well as the insns to establish
3471 and disestablish them. */
3477 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3479 /* Delete the handler by turning off the flag that would
3480 prevent jump_optimize from deleting it.
3481 Also permit deletion of the nonlocal labels themselves
3482 if nothing local refers to them. */
3483 if (GET_CODE (insn
) == CODE_LABEL
)
3487 LABEL_PRESERVE_P (insn
) = 0;
3489 /* Remove it from the nonlocal_label list, to avoid confusing
3491 for (t
= nonlocal_labels
, last_t
= 0; t
;
3492 last_t
= t
, t
= TREE_CHAIN (t
))
3493 if (DECL_RTL (TREE_VALUE (t
)) == insn
)
3498 nonlocal_labels
= TREE_CHAIN (nonlocal_labels
);
3500 TREE_CHAIN (last_t
) = TREE_CHAIN (t
);
3503 if (GET_CODE (insn
) == INSN
3504 && ((nonlocal_goto_handler_slot
!= 0
3505 && reg_mentioned_p (nonlocal_goto_handler_slot
, PATTERN (insn
)))
3506 || (nonlocal_goto_stack_level
!= 0
3507 && reg_mentioned_p (nonlocal_goto_stack_level
,
3513 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3514 of the current function. */
3517 nonlocal_label_rtx_list ()
3522 for (t
= nonlocal_labels
; t
; t
= TREE_CHAIN (t
))
3523 x
= gen_rtx_EXPR_LIST (VOIDmode
, label_rtx (TREE_VALUE (t
)), x
);
3528 /* Output a USE for any register use in RTL.
3529 This is used with -noreg to mark the extent of lifespan
3530 of any registers used in a user-visible variable's DECL_RTL. */
3536 if (GET_CODE (rtl
) == REG
)
3537 /* This is a register variable. */
3538 emit_insn (gen_rtx_USE (VOIDmode
, rtl
));
3539 else if (GET_CODE (rtl
) == MEM
3540 && GET_CODE (XEXP (rtl
, 0)) == REG
3541 && (REGNO (XEXP (rtl
, 0)) < FIRST_VIRTUAL_REGISTER
3542 || REGNO (XEXP (rtl
, 0)) > LAST_VIRTUAL_REGISTER
)
3543 && XEXP (rtl
, 0) != current_function_internal_arg_pointer
)
3544 /* This is a variable-sized structure. */
3545 emit_insn (gen_rtx_USE (VOIDmode
, XEXP (rtl
, 0)));
3548 /* Like use_variable except that it outputs the USEs after INSN
3549 instead of at the end of the insn-chain. */
3552 use_variable_after (rtl
, insn
)
3555 if (GET_CODE (rtl
) == REG
)
3556 /* This is a register variable. */
3557 emit_insn_after (gen_rtx_USE (VOIDmode
, rtl
), insn
);
3558 else if (GET_CODE (rtl
) == MEM
3559 && GET_CODE (XEXP (rtl
, 0)) == REG
3560 && (REGNO (XEXP (rtl
, 0)) < FIRST_VIRTUAL_REGISTER
3561 || REGNO (XEXP (rtl
, 0)) > LAST_VIRTUAL_REGISTER
)
3562 && XEXP (rtl
, 0) != current_function_internal_arg_pointer
)
3563 /* This is a variable-sized structure. */
3564 emit_insn_after (gen_rtx_USE (VOIDmode
, XEXP (rtl
, 0)), insn
);
3570 return max_parm_reg
;
3573 /* Return the first insn following those generated by `assign_parms'. */
3576 get_first_nonparm_insn ()
3579 return NEXT_INSN (last_parm_insn
);
3580 return get_insns ();
3583 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3584 Crash if there is none. */
3587 get_first_block_beg ()
3589 register rtx searcher
;
3590 register rtx insn
= get_first_nonparm_insn ();
3592 for (searcher
= insn
; searcher
; searcher
= NEXT_INSN (searcher
))
3593 if (GET_CODE (searcher
) == NOTE
3594 && NOTE_LINE_NUMBER (searcher
) == NOTE_INSN_BLOCK_BEG
)
3597 abort (); /* Invalid call to this function. (See comments above.) */
3601 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3602 This means a type for which function calls must pass an address to the
3603 function or get an address back from the function.
3604 EXP may be a type node or an expression (whose type is tested). */
3607 aggregate_value_p (exp
)
3610 int i
, regno
, nregs
;
3613 if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 't')
3616 type
= TREE_TYPE (exp
);
3618 if (RETURN_IN_MEMORY (type
))
3620 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3621 and thus can't be returned in registers. */
3622 if (TREE_ADDRESSABLE (type
))
3624 if (flag_pcc_struct_return
&& AGGREGATE_TYPE_P (type
))
3626 /* Make sure we have suitable call-clobbered regs to return
3627 the value in; if not, we must return it in memory. */
3628 reg
= hard_function_value (type
, 0);
3630 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3632 if (GET_CODE (reg
) != REG
)
3635 regno
= REGNO (reg
);
3636 nregs
= HARD_REGNO_NREGS (regno
, TYPE_MODE (type
));
3637 for (i
= 0; i
< nregs
; i
++)
3638 if (! call_used_regs
[regno
+ i
])
3643 /* Assign RTL expressions to the function's parameters.
3644 This may involve copying them into registers and using
3645 those registers as the RTL for them.
3647 If SECOND_TIME is non-zero it means that this function is being
3648 called a second time. This is done by integrate.c when a function's
3649 compilation is deferred. We need to come back here in case the
3650 FUNCTION_ARG macro computes items needed for the rest of the compilation
3651 (such as changing which registers are fixed or caller-saved). But suppress
3652 writing any insns or setting DECL_RTL of anything in this case. */
3655 assign_parms (fndecl
, second_time
)
3660 register rtx entry_parm
= 0;
3661 register rtx stack_parm
= 0;
3662 CUMULATIVE_ARGS args_so_far
;
3663 enum machine_mode promoted_mode
, passed_mode
;
3664 enum machine_mode nominal_mode
, promoted_nominal_mode
;
3666 /* Total space needed so far for args on the stack,
3667 given as a constant and a tree-expression. */
3668 struct args_size stack_args_size
;
3669 tree fntype
= TREE_TYPE (fndecl
);
3670 tree fnargs
= DECL_ARGUMENTS (fndecl
);
3671 /* This is used for the arg pointer when referring to stack args. */
3672 rtx internal_arg_pointer
;
3673 /* This is a dummy PARM_DECL that we used for the function result if
3674 the function returns a structure. */
3675 tree function_result_decl
= 0;
3676 int varargs_setup
= 0;
3677 rtx conversion_insns
= 0;
3679 /* Nonzero if the last arg is named `__builtin_va_alist',
3680 which is used on some machines for old-fashioned non-ANSI varargs.h;
3681 this should be stuck onto the stack as if it had arrived there. */
3683 = (current_function_varargs
3685 && (parm
= tree_last (fnargs
)) != 0
3687 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm
)),
3688 "__builtin_va_alist")));
3690 /* Nonzero if function takes extra anonymous args.
3691 This means the last named arg must be on the stack
3692 right before the anonymous ones. */
3694 = (TYPE_ARG_TYPES (fntype
) != 0
3695 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
3696 != void_type_node
));
3698 current_function_stdarg
= stdarg
;
3700 /* If the reg that the virtual arg pointer will be translated into is
3701 not a fixed reg or is the stack pointer, make a copy of the virtual
3702 arg pointer, and address parms via the copy. The frame pointer is
3703 considered fixed even though it is not marked as such.
3705 The second time through, simply use ap to avoid generating rtx. */
3707 if ((ARG_POINTER_REGNUM
== STACK_POINTER_REGNUM
3708 || ! (fixed_regs
[ARG_POINTER_REGNUM
]
3709 || ARG_POINTER_REGNUM
== FRAME_POINTER_REGNUM
))
3711 internal_arg_pointer
= copy_to_reg (virtual_incoming_args_rtx
);
3713 internal_arg_pointer
= virtual_incoming_args_rtx
;
3714 current_function_internal_arg_pointer
= internal_arg_pointer
;
3716 stack_args_size
.constant
= 0;
3717 stack_args_size
.var
= 0;
3719 /* If struct value address is treated as the first argument, make it so. */
3720 if (aggregate_value_p (DECL_RESULT (fndecl
))
3721 && ! current_function_returns_pcc_struct
3722 && struct_value_incoming_rtx
== 0)
3724 tree type
= build_pointer_type (TREE_TYPE (fntype
));
3726 function_result_decl
= build_decl (PARM_DECL
, NULL_TREE
, type
);
3728 DECL_ARG_TYPE (function_result_decl
) = type
;
3729 TREE_CHAIN (function_result_decl
) = fnargs
;
3730 fnargs
= function_result_decl
;
3733 max_parm_reg
= LAST_VIRTUAL_REGISTER
+ 1;
3734 parm_reg_stack_loc
= (rtx
*) savealloc (max_parm_reg
* sizeof (rtx
));
3735 bzero ((char *) parm_reg_stack_loc
, max_parm_reg
* sizeof (rtx
));
3737 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3738 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far
, fntype
, NULL_RTX
);
3740 INIT_CUMULATIVE_ARGS (args_so_far
, fntype
, NULL_RTX
, 0);
3743 /* We haven't yet found an argument that we must push and pretend the
3745 current_function_pretend_args_size
= 0;
3747 for (parm
= fnargs
; parm
; parm
= TREE_CHAIN (parm
))
3749 int aggregate
= AGGREGATE_TYPE_P (TREE_TYPE (parm
));
3750 struct args_size stack_offset
;
3751 struct args_size arg_size
;
3752 int passed_pointer
= 0;
3753 int did_conversion
= 0;
3754 tree passed_type
= DECL_ARG_TYPE (parm
);
3755 tree nominal_type
= TREE_TYPE (parm
);
3757 /* Set LAST_NAMED if this is last named arg before some
3759 int last_named
= ((TREE_CHAIN (parm
) == 0
3760 || DECL_NAME (TREE_CHAIN (parm
)) == 0)
3761 && (stdarg
|| current_function_varargs
));
3762 /* Set NAMED_ARG if this arg should be treated as a named arg. For
3763 most machines, if this is a varargs/stdarg function, then we treat
3764 the last named arg as if it were anonymous too. */
3765 int named_arg
= STRICT_ARGUMENT_NAMING
? 1 : ! last_named
;
3767 if (TREE_TYPE (parm
) == error_mark_node
3768 /* This can happen after weird syntax errors
3769 or if an enum type is defined among the parms. */
3770 || TREE_CODE (parm
) != PARM_DECL
3771 || passed_type
== NULL
)
3773 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
)
3774 = gen_rtx_MEM (BLKmode
, const0_rtx
);
3775 TREE_USED (parm
) = 1;
3779 /* For varargs.h function, save info about regs and stack space
3780 used by the individual args, not including the va_alist arg. */
3781 if (hide_last_arg
&& last_named
)
3782 current_function_args_info
= args_so_far
;
3784 /* Find mode of arg as it is passed, and mode of arg
3785 as it should be during execution of this function. */
3786 passed_mode
= TYPE_MODE (passed_type
);
3787 nominal_mode
= TYPE_MODE (nominal_type
);
3789 /* If the parm's mode is VOID, its value doesn't matter,
3790 and avoid the usual things like emit_move_insn that could crash. */
3791 if (nominal_mode
== VOIDmode
)
3793 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
) = const0_rtx
;
3797 /* If the parm is to be passed as a transparent union, use the
3798 type of the first field for the tests below. We have already
3799 verified that the modes are the same. */
3800 if (DECL_TRANSPARENT_UNION (parm
)
3801 || TYPE_TRANSPARENT_UNION (passed_type
))
3802 passed_type
= TREE_TYPE (TYPE_FIELDS (passed_type
));
3804 /* See if this arg was passed by invisible reference. It is if
3805 it is an object whose size depends on the contents of the
3806 object itself or if the machine requires these objects be passed
3809 if ((TREE_CODE (TYPE_SIZE (passed_type
)) != INTEGER_CST
3810 && contains_placeholder_p (TYPE_SIZE (passed_type
)))
3811 || TREE_ADDRESSABLE (passed_type
)
3812 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3813 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far
, passed_mode
,
3814 passed_type
, named_arg
)
3818 passed_type
= nominal_type
= build_pointer_type (passed_type
);
3820 passed_mode
= nominal_mode
= Pmode
;
3823 promoted_mode
= passed_mode
;
3825 #ifdef PROMOTE_FUNCTION_ARGS
3826 /* Compute the mode in which the arg is actually extended to. */
3827 unsignedp
= TREE_UNSIGNED (passed_type
);
3828 promoted_mode
= promote_mode (passed_type
, promoted_mode
, &unsignedp
, 1);
3831 /* Let machine desc say which reg (if any) the parm arrives in.
3832 0 means it arrives on the stack. */
3833 #ifdef FUNCTION_INCOMING_ARG
3834 entry_parm
= FUNCTION_INCOMING_ARG (args_so_far
, promoted_mode
,
3835 passed_type
, named_arg
);
3837 entry_parm
= FUNCTION_ARG (args_so_far
, promoted_mode
,
3838 passed_type
, named_arg
);
3841 if (entry_parm
== 0)
3842 promoted_mode
= passed_mode
;
3844 #ifdef SETUP_INCOMING_VARARGS
3845 /* If this is the last named parameter, do any required setup for
3846 varargs or stdargs. We need to know about the case of this being an
3847 addressable type, in which case we skip the registers it
3848 would have arrived in.
3850 For stdargs, LAST_NAMED will be set for two parameters, the one that
3851 is actually the last named, and the dummy parameter. We only
3852 want to do this action once.
3854 Also, indicate when RTL generation is to be suppressed. */
3855 if (last_named
&& !varargs_setup
)
3857 SETUP_INCOMING_VARARGS (args_so_far
, promoted_mode
, passed_type
,
3858 current_function_pretend_args_size
,
3864 /* Determine parm's home in the stack,
3865 in case it arrives in the stack or we should pretend it did.
3867 Compute the stack position and rtx where the argument arrives
3870 There is one complexity here: If this was a parameter that would
3871 have been passed in registers, but wasn't only because it is
3872 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
3873 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
3874 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
3875 0 as it was the previous time. */
3877 locate_and_pad_parm (promoted_mode
, passed_type
,
3878 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3881 #ifdef FUNCTION_INCOMING_ARG
3882 FUNCTION_INCOMING_ARG (args_so_far
, promoted_mode
,
3885 || varargs_setup
)) != 0,
3887 FUNCTION_ARG (args_so_far
, promoted_mode
,
3889 named_arg
|| varargs_setup
) != 0,
3892 fndecl
, &stack_args_size
, &stack_offset
, &arg_size
);
3896 rtx offset_rtx
= ARGS_SIZE_RTX (stack_offset
);
3898 if (offset_rtx
== const0_rtx
)
3899 stack_parm
= gen_rtx_MEM (promoted_mode
, internal_arg_pointer
);
3901 stack_parm
= gen_rtx_MEM (promoted_mode
,
3902 gen_rtx_PLUS (Pmode
,
3903 internal_arg_pointer
,
3906 /* If this is a memory ref that contains aggregate components,
3907 mark it as such for cse and loop optimize. Likewise if it
3909 MEM_IN_STRUCT_P (stack_parm
) = aggregate
;
3910 RTX_UNCHANGING_P (stack_parm
) = TREE_READONLY (parm
);
3913 /* If this parameter was passed both in registers and in the stack,
3914 use the copy on the stack. */
3915 if (MUST_PASS_IN_STACK (promoted_mode
, passed_type
))
3918 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3919 /* If this parm was passed part in regs and part in memory,
3920 pretend it arrived entirely in memory
3921 by pushing the register-part onto the stack.
3923 In the special case of a DImode or DFmode that is split,
3924 we could put it together in a pseudoreg directly,
3925 but for now that's not worth bothering with. */
3929 int nregs
= FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, promoted_mode
,
3930 passed_type
, named_arg
);
3934 current_function_pretend_args_size
3935 = (((nregs
* UNITS_PER_WORD
) + (PARM_BOUNDARY
/ BITS_PER_UNIT
) - 1)
3936 / (PARM_BOUNDARY
/ BITS_PER_UNIT
)
3937 * (PARM_BOUNDARY
/ BITS_PER_UNIT
));
3941 /* Handle calls that pass values in multiple non-contiguous
3942 locations. The Irix 6 ABI has examples of this. */
3943 if (GET_CODE (entry_parm
) == PARALLEL
)
3944 emit_group_store (validize_mem (stack_parm
),
3947 move_block_from_reg (REGNO (entry_parm
),
3948 validize_mem (stack_parm
), nregs
,
3949 int_size_in_bytes (TREE_TYPE (parm
)));
3951 entry_parm
= stack_parm
;
3956 /* If we didn't decide this parm came in a register,
3957 by default it came on the stack. */
3958 if (entry_parm
== 0)
3959 entry_parm
= stack_parm
;
3961 /* Record permanently how this parm was passed. */
3963 DECL_INCOMING_RTL (parm
) = entry_parm
;
3965 /* If there is actually space on the stack for this parm,
3966 count it in stack_args_size; otherwise set stack_parm to 0
3967 to indicate there is no preallocated stack slot for the parm. */
3969 if (entry_parm
== stack_parm
3970 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
3971 /* On some machines, even if a parm value arrives in a register
3972 there is still an (uninitialized) stack slot allocated for it.
3974 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
3975 whether this parameter already has a stack slot allocated,
3976 because an arg block exists only if current_function_args_size
3977 is larger than some threshold, and we haven't calculated that
3978 yet. So, for now, we just assume that stack slots never exist
3980 || REG_PARM_STACK_SPACE (fndecl
) > 0
3984 stack_args_size
.constant
+= arg_size
.constant
;
3986 ADD_PARM_SIZE (stack_args_size
, arg_size
.var
);
3989 /* No stack slot was pushed for this parm. */
3992 /* Update info on where next arg arrives in registers. */
3994 FUNCTION_ARG_ADVANCE (args_so_far
, promoted_mode
,
3995 passed_type
, named_arg
);
3997 /* If this is our second time through, we are done with this parm. */
4001 /* If we can't trust the parm stack slot to be aligned enough
4002 for its ultimate type, don't use that slot after entry.
4003 We'll make another stack slot, if we need one. */
4005 int thisparm_boundary
4006 = FUNCTION_ARG_BOUNDARY (promoted_mode
, passed_type
);
4008 if (GET_MODE_ALIGNMENT (nominal_mode
) > thisparm_boundary
)
4012 /* If parm was passed in memory, and we need to convert it on entry,
4013 don't store it back in that same slot. */
4015 && nominal_mode
!= BLKmode
&& nominal_mode
!= passed_mode
)
4019 /* Now adjust STACK_PARM to the mode and precise location
4020 where this parameter should live during execution,
4021 if we discover that it must live in the stack during execution.
4022 To make debuggers happier on big-endian machines, we store
4023 the value in the last bytes of the space available. */
4025 if (nominal_mode
!= BLKmode
&& nominal_mode
!= passed_mode
4030 if (BYTES_BIG_ENDIAN
4031 && GET_MODE_SIZE (nominal_mode
) < UNITS_PER_WORD
)
4032 stack_offset
.constant
+= (GET_MODE_SIZE (passed_mode
)
4033 - GET_MODE_SIZE (nominal_mode
));
4035 offset_rtx
= ARGS_SIZE_RTX (stack_offset
);
4036 if (offset_rtx
== const0_rtx
)
4037 stack_parm
= gen_rtx_MEM (nominal_mode
, internal_arg_pointer
);
4039 stack_parm
= gen_rtx_MEM (nominal_mode
,
4040 gen_rtx_PLUS (Pmode
,
4041 internal_arg_pointer
,
4044 /* If this is a memory ref that contains aggregate components,
4045 mark it as such for cse and loop optimize. */
4046 MEM_IN_STRUCT_P (stack_parm
) = aggregate
;
4051 /* We need this "use" info, because the gcc-register->stack-register
4052 converter in reg-stack.c needs to know which registers are active
4053 at the start of the function call. The actual parameter loading
4054 instructions are not always available then anymore, since they might
4055 have been optimised away. */
4057 if (GET_CODE (entry_parm
) == REG
&& !(hide_last_arg
&& last_named
))
4058 emit_insn (gen_rtx_USE (GET_MODE (entry_parm
), entry_parm
));
4061 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4062 in the mode in which it arrives.
4063 STACK_PARM is an RTX for a stack slot where the parameter can live
4064 during the function (in case we want to put it there).
4065 STACK_PARM is 0 if no stack slot was pushed for it.
4067 Now output code if necessary to convert ENTRY_PARM to
4068 the type in which this function declares it,
4069 and store that result in an appropriate place,
4070 which may be a pseudo reg, may be STACK_PARM,
4071 or may be a local stack slot if STACK_PARM is 0.
4073 Set DECL_RTL to that place. */
4075 if (nominal_mode
== BLKmode
|| GET_CODE (entry_parm
) == PARALLEL
)
4077 /* If a BLKmode arrives in registers, copy it to a stack slot.
4078 Handle calls that pass values in multiple non-contiguous
4079 locations. The Irix 6 ABI has examples of this. */
4080 if (GET_CODE (entry_parm
) == REG
4081 || GET_CODE (entry_parm
) == PARALLEL
)
4084 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm
)),
4087 /* Note that we will be storing an integral number of words.
4088 So we have to be careful to ensure that we allocate an
4089 integral number of words. We do this below in the
4090 assign_stack_local if space was not allocated in the argument
4091 list. If it was, this will not work if PARM_BOUNDARY is not
4092 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4093 if it becomes a problem. */
4095 if (stack_parm
== 0)
4098 = assign_stack_local (GET_MODE (entry_parm
),
4101 /* If this is a memory ref that contains aggregate
4102 components, mark it as such for cse and loop optimize. */
4103 MEM_IN_STRUCT_P (stack_parm
) = aggregate
;
4106 else if (PARM_BOUNDARY
% BITS_PER_WORD
!= 0)
4109 if (TREE_READONLY (parm
))
4110 RTX_UNCHANGING_P (stack_parm
) = 1;
4112 /* Handle calls that pass values in multiple non-contiguous
4113 locations. The Irix 6 ABI has examples of this. */
4114 if (GET_CODE (entry_parm
) == PARALLEL
)
4115 emit_group_store (validize_mem (stack_parm
), entry_parm
);
4117 move_block_from_reg (REGNO (entry_parm
),
4118 validize_mem (stack_parm
),
4119 size_stored
/ UNITS_PER_WORD
,
4120 int_size_in_bytes (TREE_TYPE (parm
)));
4122 DECL_RTL (parm
) = stack_parm
;
4124 else if (! ((obey_regdecls
&& ! DECL_REGISTER (parm
)
4125 && ! DECL_INLINE (fndecl
))
4126 /* layout_decl may set this. */
4127 || TREE_ADDRESSABLE (parm
)
4128 || TREE_SIDE_EFFECTS (parm
)
4129 /* If -ffloat-store specified, don't put explicit
4130 float variables into registers. */
4131 || (flag_float_store
4132 && TREE_CODE (TREE_TYPE (parm
)) == REAL_TYPE
))
4133 /* Always assign pseudo to structure return or item passed
4134 by invisible reference. */
4135 || passed_pointer
|| parm
== function_result_decl
)
4137 /* Store the parm in a pseudoregister during the function, but we
4138 may need to do it in a wider mode. */
4140 register rtx parmreg
;
4141 int regno
, regnoi
= 0, regnor
= 0;
4143 unsignedp
= TREE_UNSIGNED (TREE_TYPE (parm
));
4145 promoted_nominal_mode
4146 = promote_mode (TREE_TYPE (parm
), nominal_mode
, &unsignedp
, 0);
4148 parmreg
= gen_reg_rtx (promoted_nominal_mode
);
4149 mark_user_reg (parmreg
);
4151 /* If this was an item that we received a pointer to, set DECL_RTL
4156 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type
)), parmreg
);
4157 MEM_IN_STRUCT_P (DECL_RTL (parm
)) = aggregate
;
4160 DECL_RTL (parm
) = parmreg
;
4162 /* Copy the value into the register. */
4163 if (nominal_mode
!= passed_mode
4164 || promoted_nominal_mode
!= promoted_mode
)
4166 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4167 mode, by the caller. We now have to convert it to
4168 NOMINAL_MODE, if different. However, PARMREG may be in
4169 a different mode than NOMINAL_MODE if it is being stored
4172 If ENTRY_PARM is a hard register, it might be in a register
4173 not valid for operating in its mode (e.g., an odd-numbered
4174 register for a DFmode). In that case, moves are the only
4175 thing valid, so we can't do a convert from there. This
4176 occurs when the calling sequence allow such misaligned
4179 In addition, the conversion may involve a call, which could
4180 clobber parameters which haven't been copied to pseudo
4181 registers yet. Therefore, we must first copy the parm to
4182 a pseudo reg here, and save the conversion until after all
4183 parameters have been moved. */
4185 rtx tempreg
= gen_reg_rtx (GET_MODE (entry_parm
));
4187 emit_move_insn (tempreg
, validize_mem (entry_parm
));
4189 push_to_sequence (conversion_insns
);
4190 tempreg
= convert_to_mode (nominal_mode
, tempreg
, unsignedp
);
4192 expand_assignment (parm
,
4193 make_tree (nominal_type
, tempreg
), 0, 0);
4194 conversion_insns
= get_insns ();
4199 emit_move_insn (parmreg
, validize_mem (entry_parm
));
4201 /* If we were passed a pointer but the actual value
4202 can safely live in a register, put it in one. */
4203 if (passed_pointer
&& TYPE_MODE (TREE_TYPE (parm
)) != BLKmode
4204 && ! ((obey_regdecls
&& ! DECL_REGISTER (parm
)
4205 && ! DECL_INLINE (fndecl
))
4206 /* layout_decl may set this. */
4207 || TREE_ADDRESSABLE (parm
)
4208 || TREE_SIDE_EFFECTS (parm
)
4209 /* If -ffloat-store specified, don't put explicit
4210 float variables into registers. */
4211 || (flag_float_store
4212 && TREE_CODE (TREE_TYPE (parm
)) == REAL_TYPE
)))
4214 /* We can't use nominal_mode, because it will have been set to
4215 Pmode above. We must use the actual mode of the parm. */
4216 parmreg
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm
)));
4217 mark_user_reg (parmreg
);
4218 emit_move_insn (parmreg
, DECL_RTL (parm
));
4219 DECL_RTL (parm
) = parmreg
;
4220 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4224 #ifdef FUNCTION_ARG_CALLEE_COPIES
4225 /* If we are passed an arg by reference and it is our responsibility
4226 to make a copy, do it now.
4227 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4228 original argument, so we must recreate them in the call to
4229 FUNCTION_ARG_CALLEE_COPIES. */
4230 /* ??? Later add code to handle the case that if the argument isn't
4231 modified, don't do the copy. */
4233 else if (passed_pointer
4234 && FUNCTION_ARG_CALLEE_COPIES (args_so_far
,
4235 TYPE_MODE (DECL_ARG_TYPE (parm
)),
4236 DECL_ARG_TYPE (parm
),
4238 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm
)))
4241 tree type
= DECL_ARG_TYPE (parm
);
4243 /* This sequence may involve a library call perhaps clobbering
4244 registers that haven't been copied to pseudos yet. */
4246 push_to_sequence (conversion_insns
);
4248 if (TYPE_SIZE (type
) == 0
4249 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
4250 /* This is a variable sized object. */
4251 copy
= gen_rtx_MEM (BLKmode
,
4252 allocate_dynamic_stack_space
4253 (expr_size (parm
), NULL_RTX
,
4254 TYPE_ALIGN (type
)));
4256 copy
= assign_stack_temp (TYPE_MODE (type
),
4257 int_size_in_bytes (type
), 1);
4258 MEM_IN_STRUCT_P (copy
) = AGGREGATE_TYPE_P (type
);
4259 RTX_UNCHANGING_P (copy
) = TREE_READONLY (parm
);
4261 store_expr (parm
, copy
, 0);
4262 emit_move_insn (parmreg
, XEXP (copy
, 0));
4263 if (flag_check_memory_usage
)
4264 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
4265 XEXP (copy
, 0), ptr_mode
,
4266 GEN_INT (int_size_in_bytes (type
)),
4267 TYPE_MODE (sizetype
),
4268 GEN_INT (MEMORY_USE_RW
),
4269 TYPE_MODE (integer_type_node
));
4270 conversion_insns
= get_insns ();
4274 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4276 /* In any case, record the parm's desired stack location
4277 in case we later discover it must live in the stack.
4279 If it is a COMPLEX value, store the stack location for both
4282 if (GET_CODE (parmreg
) == CONCAT
)
4283 regno
= MAX (REGNO (XEXP (parmreg
, 0)), REGNO (XEXP (parmreg
, 1)));
4285 regno
= REGNO (parmreg
);
4287 if (regno
>= max_parm_reg
)
4290 int old_max_parm_reg
= max_parm_reg
;
4292 /* It's slow to expand this one register at a time,
4293 but it's also rare and we need max_parm_reg to be
4294 precisely correct. */
4295 max_parm_reg
= regno
+ 1;
4296 new = (rtx
*) savealloc (max_parm_reg
* sizeof (rtx
));
4297 bcopy ((char *) parm_reg_stack_loc
, (char *) new,
4298 old_max_parm_reg
* sizeof (rtx
));
4299 bzero ((char *) (new + old_max_parm_reg
),
4300 (max_parm_reg
- old_max_parm_reg
) * sizeof (rtx
));
4301 parm_reg_stack_loc
= new;
4304 if (GET_CODE (parmreg
) == CONCAT
)
4306 enum machine_mode submode
= GET_MODE (XEXP (parmreg
, 0));
4308 regnor
= REGNO (gen_realpart (submode
, parmreg
));
4309 regnoi
= REGNO (gen_imagpart (submode
, parmreg
));
4311 if (stack_parm
!= 0)
4313 parm_reg_stack_loc
[regnor
]
4314 = gen_realpart (submode
, stack_parm
);
4315 parm_reg_stack_loc
[regnoi
]
4316 = gen_imagpart (submode
, stack_parm
);
4320 parm_reg_stack_loc
[regnor
] = 0;
4321 parm_reg_stack_loc
[regnoi
] = 0;
4325 parm_reg_stack_loc
[REGNO (parmreg
)] = stack_parm
;
4327 /* Mark the register as eliminable if we did no conversion
4328 and it was copied from memory at a fixed offset,
4329 and the arg pointer was not copied to a pseudo-reg.
4330 If the arg pointer is a pseudo reg or the offset formed
4331 an invalid address, such memory-equivalences
4332 as we make here would screw up life analysis for it. */
4333 if (nominal_mode
== passed_mode
4336 && GET_CODE (stack_parm
) == MEM
4337 && stack_offset
.var
== 0
4338 && reg_mentioned_p (virtual_incoming_args_rtx
,
4339 XEXP (stack_parm
, 0)))
4341 rtx linsn
= get_last_insn ();
4344 /* Mark complex types separately. */
4345 if (GET_CODE (parmreg
) == CONCAT
)
4346 /* Scan backwards for the set of the real and
4348 for (sinsn
= linsn
; sinsn
!= 0;
4349 sinsn
= prev_nonnote_insn (sinsn
))
4351 set
= single_set (sinsn
);
4353 && SET_DEST (set
) == regno_reg_rtx
[regnoi
])
4355 = gen_rtx_EXPR_LIST (REG_EQUIV
,
4356 parm_reg_stack_loc
[regnoi
],
4359 && SET_DEST (set
) == regno_reg_rtx
[regnor
])
4361 = gen_rtx_EXPR_LIST (REG_EQUIV
,
4362 parm_reg_stack_loc
[regnor
],
4365 else if ((set
= single_set (linsn
)) != 0
4366 && SET_DEST (set
) == parmreg
)
4368 = gen_rtx_EXPR_LIST (REG_EQUIV
,
4369 stack_parm
, REG_NOTES (linsn
));
4372 /* For pointer data type, suggest pointer register. */
4373 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
4374 mark_reg_pointer (parmreg
,
4375 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
)))
4380 /* Value must be stored in the stack slot STACK_PARM
4381 during function execution. */
4383 if (promoted_mode
!= nominal_mode
)
4385 /* Conversion is required. */
4386 rtx tempreg
= gen_reg_rtx (GET_MODE (entry_parm
));
4388 emit_move_insn (tempreg
, validize_mem (entry_parm
));
4390 push_to_sequence (conversion_insns
);
4391 entry_parm
= convert_to_mode (nominal_mode
, tempreg
,
4392 TREE_UNSIGNED (TREE_TYPE (parm
)));
4395 /* ??? This may need a big-endian conversion on sparc64. */
4396 stack_parm
= change_address (stack_parm
, nominal_mode
,
4399 conversion_insns
= get_insns ();
4404 if (entry_parm
!= stack_parm
)
4406 if (stack_parm
== 0)
4409 = assign_stack_local (GET_MODE (entry_parm
),
4410 GET_MODE_SIZE (GET_MODE (entry_parm
)), 0);
4411 /* If this is a memory ref that contains aggregate components,
4412 mark it as such for cse and loop optimize. */
4413 MEM_IN_STRUCT_P (stack_parm
) = aggregate
;
4416 if (promoted_mode
!= nominal_mode
)
4418 push_to_sequence (conversion_insns
);
4419 emit_move_insn (validize_mem (stack_parm
),
4420 validize_mem (entry_parm
));
4421 conversion_insns
= get_insns ();
4425 emit_move_insn (validize_mem (stack_parm
),
4426 validize_mem (entry_parm
));
4428 if (flag_check_memory_usage
)
4430 push_to_sequence (conversion_insns
);
4431 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
4432 XEXP (stack_parm
, 0), ptr_mode
,
4433 GEN_INT (GET_MODE_SIZE (GET_MODE
4435 TYPE_MODE (sizetype
),
4436 GEN_INT (MEMORY_USE_RW
),
4437 TYPE_MODE (integer_type_node
));
4439 conversion_insns
= get_insns ();
4442 DECL_RTL (parm
) = stack_parm
;
4445 /* If this "parameter" was the place where we are receiving the
4446 function's incoming structure pointer, set up the result. */
4447 if (parm
== function_result_decl
)
4449 tree result
= DECL_RESULT (fndecl
);
4450 tree restype
= TREE_TYPE (result
);
4453 = gen_rtx_MEM (DECL_MODE (result
), DECL_RTL (parm
));
4455 MEM_IN_STRUCT_P (DECL_RTL (result
)) = AGGREGATE_TYPE_P (restype
);
4458 if (TREE_THIS_VOLATILE (parm
))
4459 MEM_VOLATILE_P (DECL_RTL (parm
)) = 1;
4460 if (TREE_READONLY (parm
))
4461 RTX_UNCHANGING_P (DECL_RTL (parm
)) = 1;
4464 /* Output all parameter conversion instructions (possibly including calls)
4465 now that all parameters have been copied out of hard registers. */
4466 emit_insns (conversion_insns
);
4468 last_parm_insn
= get_last_insn ();
4470 current_function_args_size
= stack_args_size
.constant
;
4472 /* Adjust function incoming argument size for alignment and
4475 #ifdef REG_PARM_STACK_SPACE
4476 #ifndef MAYBE_REG_PARM_STACK_SPACE
4477 current_function_args_size
= MAX (current_function_args_size
,
4478 REG_PARM_STACK_SPACE (fndecl
));
4482 #ifdef STACK_BOUNDARY
4483 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4485 current_function_args_size
4486 = ((current_function_args_size
+ STACK_BYTES
- 1)
4487 / STACK_BYTES
) * STACK_BYTES
;
4490 #ifdef ARGS_GROW_DOWNWARD
4491 current_function_arg_offset_rtx
4492 = (stack_args_size
.var
== 0 ? GEN_INT (-stack_args_size
.constant
)
4493 : expand_expr (size_binop (MINUS_EXPR
, stack_args_size
.var
,
4494 size_int (-stack_args_size
.constant
)),
4495 NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_BAD
));
4497 current_function_arg_offset_rtx
= ARGS_SIZE_RTX (stack_args_size
);
4500 /* See how many bytes, if any, of its args a function should try to pop
4503 current_function_pops_args
= RETURN_POPS_ARGS (fndecl
, TREE_TYPE (fndecl
),
4504 current_function_args_size
);
4506 /* For stdarg.h function, save info about
4507 regs and stack space used by the named args. */
4510 current_function_args_info
= args_so_far
;
4512 /* Set the rtx used for the function return value. Put this in its
4513 own variable so any optimizers that need this information don't have
4514 to include tree.h. Do this here so it gets done when an inlined
4515 function gets output. */
4517 current_function_return_rtx
= DECL_RTL (DECL_RESULT (fndecl
));
4520 /* Indicate whether REGNO is an incoming argument to the current function
4521 that was promoted to a wider mode. If so, return the RTX for the
4522 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4523 that REGNO is promoted from and whether the promotion was signed or
4526 #ifdef PROMOTE_FUNCTION_ARGS
4529 promoted_input_arg (regno
, pmode
, punsignedp
)
4531 enum machine_mode
*pmode
;
4536 for (arg
= DECL_ARGUMENTS (current_function_decl
); arg
;
4537 arg
= TREE_CHAIN (arg
))
4538 if (GET_CODE (DECL_INCOMING_RTL (arg
)) == REG
4539 && REGNO (DECL_INCOMING_RTL (arg
)) == regno
4540 && TYPE_MODE (DECL_ARG_TYPE (arg
)) == TYPE_MODE (TREE_TYPE (arg
)))
4542 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg
));
4543 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (arg
));
4545 mode
= promote_mode (TREE_TYPE (arg
), mode
, &unsignedp
, 1);
4546 if (mode
== GET_MODE (DECL_INCOMING_RTL (arg
))
4547 && mode
!= DECL_MODE (arg
))
4549 *pmode
= DECL_MODE (arg
);
4550 *punsignedp
= unsignedp
;
4551 return DECL_INCOMING_RTL (arg
);
4560 /* Compute the size and offset from the start of the stacked arguments for a
4561 parm passed in mode PASSED_MODE and with type TYPE.
4563 INITIAL_OFFSET_PTR points to the current offset into the stacked
4566 The starting offset and size for this parm are returned in *OFFSET_PTR
4567 and *ARG_SIZE_PTR, respectively.
4569 IN_REGS is non-zero if the argument will be passed in registers. It will
4570 never be set if REG_PARM_STACK_SPACE is not defined.
4572 FNDECL is the function in which the argument was defined.
4574 There are two types of rounding that are done. The first, controlled by
4575 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4576 list to be aligned to the specific boundary (in bits). This rounding
4577 affects the initial and starting offsets, but not the argument size.
4579 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4580 optionally rounds the size of the parm to PARM_BOUNDARY. The
4581 initial offset is not affected by this rounding, while the size always
4582 is and the starting offset may be. */
4584 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4585 initial_offset_ptr is positive because locate_and_pad_parm's
4586 callers pass in the total size of args so far as
4587 initial_offset_ptr. arg_size_ptr is always positive.*/
4590 locate_and_pad_parm (passed_mode
, type
, in_regs
, fndecl
,
4591 initial_offset_ptr
, offset_ptr
, arg_size_ptr
)
4592 enum machine_mode passed_mode
;
4596 struct args_size
*initial_offset_ptr
;
4597 struct args_size
*offset_ptr
;
4598 struct args_size
*arg_size_ptr
;
4601 = type
? size_in_bytes (type
) : size_int (GET_MODE_SIZE (passed_mode
));
4602 enum direction where_pad
= FUNCTION_ARG_PADDING (passed_mode
, type
);
4603 int boundary
= FUNCTION_ARG_BOUNDARY (passed_mode
, type
);
4605 #ifdef REG_PARM_STACK_SPACE
4606 /* If we have found a stack parm before we reach the end of the
4607 area reserved for registers, skip that area. */
4610 int reg_parm_stack_space
= 0;
4612 #ifdef MAYBE_REG_PARM_STACK_SPACE
4613 reg_parm_stack_space
= MAYBE_REG_PARM_STACK_SPACE
;
4615 reg_parm_stack_space
= REG_PARM_STACK_SPACE (fndecl
);
4617 if (reg_parm_stack_space
> 0)
4619 if (initial_offset_ptr
->var
)
4621 initial_offset_ptr
->var
4622 = size_binop (MAX_EXPR
, ARGS_SIZE_TREE (*initial_offset_ptr
),
4623 size_int (reg_parm_stack_space
));
4624 initial_offset_ptr
->constant
= 0;
4626 else if (initial_offset_ptr
->constant
< reg_parm_stack_space
)
4627 initial_offset_ptr
->constant
= reg_parm_stack_space
;
4630 #endif /* REG_PARM_STACK_SPACE */
4632 arg_size_ptr
->var
= 0;
4633 arg_size_ptr
->constant
= 0;
4635 #ifdef ARGS_GROW_DOWNWARD
4636 if (initial_offset_ptr
->var
)
4638 offset_ptr
->constant
= 0;
4639 offset_ptr
->var
= size_binop (MINUS_EXPR
, integer_zero_node
,
4640 initial_offset_ptr
->var
);
4644 offset_ptr
->constant
= - initial_offset_ptr
->constant
;
4645 offset_ptr
->var
= 0;
4647 if (where_pad
!= none
4648 && (TREE_CODE (sizetree
) != INTEGER_CST
4649 || ((TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)))
4650 sizetree
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
4651 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
4652 if (where_pad
!= downward
)
4653 pad_to_arg_alignment (offset_ptr
, boundary
);
4654 if (initial_offset_ptr
->var
)
4656 arg_size_ptr
->var
= size_binop (MINUS_EXPR
,
4657 size_binop (MINUS_EXPR
,
4659 initial_offset_ptr
->var
),
4664 arg_size_ptr
->constant
= (- initial_offset_ptr
->constant
4665 - offset_ptr
->constant
);
4667 #else /* !ARGS_GROW_DOWNWARD */
4668 pad_to_arg_alignment (initial_offset_ptr
, boundary
);
4669 *offset_ptr
= *initial_offset_ptr
;
4671 #ifdef PUSH_ROUNDING
4672 if (passed_mode
!= BLKmode
)
4673 sizetree
= size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree
)));
4676 /* Pad_below needs the pre-rounded size to know how much to pad below
4677 so this must be done before rounding up. */
4678 if (where_pad
== downward
4679 /* However, BLKmode args passed in regs have their padding done elsewhere.
4680 The stack slot must be able to hold the entire register. */
4681 && !(in_regs
&& passed_mode
== BLKmode
))
4682 pad_below (offset_ptr
, passed_mode
, sizetree
);
4684 if (where_pad
!= none
4685 && (TREE_CODE (sizetree
) != INTEGER_CST
4686 || ((TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)))
4687 sizetree
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
4689 ADD_PARM_SIZE (*arg_size_ptr
, sizetree
);
4690 #endif /* ARGS_GROW_DOWNWARD */
4693 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4694 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4697 pad_to_arg_alignment (offset_ptr
, boundary
)
4698 struct args_size
*offset_ptr
;
4701 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
4703 if (boundary
> BITS_PER_UNIT
)
4705 if (offset_ptr
->var
)
4708 #ifdef ARGS_GROW_DOWNWARD
4713 (ARGS_SIZE_TREE (*offset_ptr
),
4714 boundary
/ BITS_PER_UNIT
);
4715 offset_ptr
->constant
= 0; /*?*/
4718 offset_ptr
->constant
=
4719 #ifdef ARGS_GROW_DOWNWARD
4720 FLOOR_ROUND (offset_ptr
->constant
, boundary_in_bytes
);
4722 CEIL_ROUND (offset_ptr
->constant
, boundary_in_bytes
);
4727 #ifndef ARGS_GROW_DOWNWARD
4729 pad_below (offset_ptr
, passed_mode
, sizetree
)
4730 struct args_size
*offset_ptr
;
4731 enum machine_mode passed_mode
;
4734 if (passed_mode
!= BLKmode
)
4736 if (GET_MODE_BITSIZE (passed_mode
) % PARM_BOUNDARY
)
4737 offset_ptr
->constant
4738 += (((GET_MODE_BITSIZE (passed_mode
) + PARM_BOUNDARY
- 1)
4739 / PARM_BOUNDARY
* PARM_BOUNDARY
/ BITS_PER_UNIT
)
4740 - GET_MODE_SIZE (passed_mode
));
4744 if (TREE_CODE (sizetree
) != INTEGER_CST
4745 || (TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)
4747 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4748 tree s2
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
4750 ADD_PARM_SIZE (*offset_ptr
, s2
);
4751 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
4757 #ifdef ARGS_GROW_DOWNWARD
4759 round_down (value
, divisor
)
4763 return size_binop (MULT_EXPR
,
4764 size_binop (FLOOR_DIV_EXPR
, value
, size_int (divisor
)),
4765 size_int (divisor
));
4769 /* Walk the tree of blocks describing the binding levels within a function
4770 and warn about uninitialized variables.
4771 This is done after calling flow_analysis and before global_alloc
4772 clobbers the pseudo-regs to hard regs. */
4775 uninitialized_vars_warning (block
)
4778 register tree decl
, sub
;
4779 for (decl
= BLOCK_VARS (block
); decl
; decl
= TREE_CHAIN (decl
))
4781 if (TREE_CODE (decl
) == VAR_DECL
4782 /* These warnings are unreliable for and aggregates
4783 because assigning the fields one by one can fail to convince
4784 flow.c that the entire aggregate was initialized.
4785 Unions are troublesome because members may be shorter. */
4786 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl
))
4787 && DECL_RTL (decl
) != 0
4788 && GET_CODE (DECL_RTL (decl
)) == REG
4789 && regno_uninitialized (REGNO (DECL_RTL (decl
))))
4790 warning_with_decl (decl
,
4791 "`%s' might be used uninitialized in this function");
4792 if (TREE_CODE (decl
) == VAR_DECL
4793 && DECL_RTL (decl
) != 0
4794 && GET_CODE (DECL_RTL (decl
)) == REG
4795 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
4796 warning_with_decl (decl
,
4797 "variable `%s' might be clobbered by `longjmp' or `vfork'");
4799 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= TREE_CHAIN (sub
))
4800 uninitialized_vars_warning (sub
);
4803 /* Do the appropriate part of uninitialized_vars_warning
4804 but for arguments instead of local variables. */
4807 setjmp_args_warning ()
4810 for (decl
= DECL_ARGUMENTS (current_function_decl
);
4811 decl
; decl
= TREE_CHAIN (decl
))
4812 if (DECL_RTL (decl
) != 0
4813 && GET_CODE (DECL_RTL (decl
)) == REG
4814 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
4815 warning_with_decl (decl
, "argument `%s' might be clobbered by `longjmp' or `vfork'");
4818 /* If this function call setjmp, put all vars into the stack
4819 unless they were declared `register'. */
4822 setjmp_protect (block
)
4825 register tree decl
, sub
;
4826 for (decl
= BLOCK_VARS (block
); decl
; decl
= TREE_CHAIN (decl
))
4827 if ((TREE_CODE (decl
) == VAR_DECL
4828 || TREE_CODE (decl
) == PARM_DECL
)
4829 && DECL_RTL (decl
) != 0
4830 && (GET_CODE (DECL_RTL (decl
)) == REG
4831 || (GET_CODE (DECL_RTL (decl
)) == MEM
4832 && GET_CODE (XEXP (DECL_RTL (decl
), 0)) == ADDRESSOF
))
4833 /* If this variable came from an inline function, it must be
4834 that it's life doesn't overlap the setjmp. If there was a
4835 setjmp in the function, it would already be in memory. We
4836 must exclude such variable because their DECL_RTL might be
4837 set to strange things such as virtual_stack_vars_rtx. */
4838 && ! DECL_FROM_INLINE (decl
)
4840 #ifdef NON_SAVING_SETJMP
4841 /* If longjmp doesn't restore the registers,
4842 don't put anything in them. */
4846 ! DECL_REGISTER (decl
)))
4847 put_var_into_stack (decl
);
4848 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= TREE_CHAIN (sub
))
4849 setjmp_protect (sub
);
4852 /* Like the previous function, but for args instead of local variables. */
4855 setjmp_protect_args ()
4858 for (decl
= DECL_ARGUMENTS (current_function_decl
);
4859 decl
; decl
= TREE_CHAIN (decl
))
4860 if ((TREE_CODE (decl
) == VAR_DECL
4861 || TREE_CODE (decl
) == PARM_DECL
)
4862 && DECL_RTL (decl
) != 0
4863 && (GET_CODE (DECL_RTL (decl
)) == REG
4864 || (GET_CODE (DECL_RTL (decl
)) == MEM
4865 && GET_CODE (XEXP (DECL_RTL (decl
), 0)) == ADDRESSOF
))
4867 /* If longjmp doesn't restore the registers,
4868 don't put anything in them. */
4869 #ifdef NON_SAVING_SETJMP
4873 ! DECL_REGISTER (decl
)))
4874 put_var_into_stack (decl
);
4877 /* Return the context-pointer register corresponding to DECL,
4878 or 0 if it does not need one. */
4881 lookup_static_chain (decl
)
4884 tree context
= decl_function_context (decl
);
4888 || (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_NO_STATIC_CHAIN (decl
)))
4891 /* We treat inline_function_decl as an alias for the current function
4892 because that is the inline function whose vars, types, etc.
4893 are being merged into the current function.
4894 See expand_inline_function. */
4895 if (context
== current_function_decl
|| context
== inline_function_decl
)
4896 return virtual_stack_vars_rtx
;
4898 for (link
= context_display
; link
; link
= TREE_CHAIN (link
))
4899 if (TREE_PURPOSE (link
) == context
)
4900 return RTL_EXPR_RTL (TREE_VALUE (link
));
4905 /* Convert a stack slot address ADDR for variable VAR
4906 (from a containing function)
4907 into an address valid in this function (using a static chain). */
4910 fix_lexical_addr (addr
, var
)
4915 HOST_WIDE_INT displacement
;
4916 tree context
= decl_function_context (var
);
4917 struct function
*fp
;
4920 /* If this is the present function, we need not do anything. */
4921 if (context
== current_function_decl
|| context
== inline_function_decl
)
4924 for (fp
= outer_function_chain
; fp
; fp
= fp
->next
)
4925 if (fp
->decl
== context
)
4931 if (GET_CODE (addr
) == ADDRESSOF
&& GET_CODE (XEXP (addr
, 0)) == MEM
)
4932 addr
= XEXP (XEXP (addr
, 0), 0);
4934 /* Decode given address as base reg plus displacement. */
4935 if (GET_CODE (addr
) == REG
)
4936 basereg
= addr
, displacement
= 0;
4937 else if (GET_CODE (addr
) == PLUS
&& GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
4938 basereg
= XEXP (addr
, 0), displacement
= INTVAL (XEXP (addr
, 1));
4942 /* We accept vars reached via the containing function's
4943 incoming arg pointer and via its stack variables pointer. */
4944 if (basereg
== fp
->internal_arg_pointer
)
4946 /* If reached via arg pointer, get the arg pointer value
4947 out of that function's stack frame.
4949 There are two cases: If a separate ap is needed, allocate a
4950 slot in the outer function for it and dereference it that way.
4951 This is correct even if the real ap is actually a pseudo.
4952 Otherwise, just adjust the offset from the frame pointer to
4955 #ifdef NEED_SEPARATE_AP
4958 if (fp
->arg_pointer_save_area
== 0)
4959 fp
->arg_pointer_save_area
4960 = assign_outer_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0, fp
);
4962 addr
= fix_lexical_addr (XEXP (fp
->arg_pointer_save_area
, 0), var
);
4963 addr
= memory_address (Pmode
, addr
);
4965 base
= copy_to_reg (gen_rtx_MEM (Pmode
, addr
));
4967 displacement
+= (FIRST_PARM_OFFSET (context
) - STARTING_FRAME_OFFSET
);
4968 base
= lookup_static_chain (var
);
4972 else if (basereg
== virtual_stack_vars_rtx
)
4974 /* This is the same code as lookup_static_chain, duplicated here to
4975 avoid an extra call to decl_function_context. */
4978 for (link
= context_display
; link
; link
= TREE_CHAIN (link
))
4979 if (TREE_PURPOSE (link
) == context
)
4981 base
= RTL_EXPR_RTL (TREE_VALUE (link
));
4989 /* Use same offset, relative to appropriate static chain or argument
4991 return plus_constant (base
, displacement
);
4994 /* Return the address of the trampoline for entering nested fn FUNCTION.
4995 If necessary, allocate a trampoline (in the stack frame)
4996 and emit rtl to initialize its contents (at entry to this function). */
4999 trampoline_address (function
)
5005 struct function
*fp
;
5008 /* Find an existing trampoline and return it. */
5009 for (link
= trampoline_list
; link
; link
= TREE_CHAIN (link
))
5010 if (TREE_PURPOSE (link
) == function
)
5012 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link
)), 0));
5014 for (fp
= outer_function_chain
; fp
; fp
= fp
->next
)
5015 for (link
= fp
->trampoline_list
; link
; link
= TREE_CHAIN (link
))
5016 if (TREE_PURPOSE (link
) == function
)
5018 tramp
= fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link
)), 0),
5020 return round_trampoline_addr (tramp
);
5023 /* None exists; we must make one. */
5025 /* Find the `struct function' for the function containing FUNCTION. */
5027 fn_context
= decl_function_context (function
);
5028 if (fn_context
!= current_function_decl
5029 && fn_context
!= inline_function_decl
)
5030 for (fp
= outer_function_chain
; fp
; fp
= fp
->next
)
5031 if (fp
->decl
== fn_context
)
5034 /* Allocate run-time space for this trampoline
5035 (usually in the defining function's stack frame). */
5036 #ifdef ALLOCATE_TRAMPOLINE
5037 tramp
= ALLOCATE_TRAMPOLINE (fp
);
5039 /* If rounding needed, allocate extra space
5040 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5041 #ifdef TRAMPOLINE_ALIGNMENT
5042 #define TRAMPOLINE_REAL_SIZE \
5043 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5045 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5048 tramp
= assign_outer_stack_local (BLKmode
, TRAMPOLINE_REAL_SIZE
, 0, fp
);
5050 tramp
= assign_stack_local (BLKmode
, TRAMPOLINE_REAL_SIZE
, 0);
5053 /* Record the trampoline for reuse and note it for later initialization
5054 by expand_function_end. */
5057 push_obstacks (fp
->function_maybepermanent_obstack
,
5058 fp
->function_maybepermanent_obstack
);
5059 rtlexp
= make_node (RTL_EXPR
);
5060 RTL_EXPR_RTL (rtlexp
) = tramp
;
5061 fp
->trampoline_list
= tree_cons (function
, rtlexp
, fp
->trampoline_list
);
5066 /* Make the RTL_EXPR node temporary, not momentary, so that the
5067 trampoline_list doesn't become garbage. */
5068 int momentary
= suspend_momentary ();
5069 rtlexp
= make_node (RTL_EXPR
);
5070 resume_momentary (momentary
);
5072 RTL_EXPR_RTL (rtlexp
) = tramp
;
5073 trampoline_list
= tree_cons (function
, rtlexp
, trampoline_list
);
5076 tramp
= fix_lexical_addr (XEXP (tramp
, 0), function
);
5077 return round_trampoline_addr (tramp
);
5080 /* Given a trampoline address,
5081 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5084 round_trampoline_addr (tramp
)
5087 #ifdef TRAMPOLINE_ALIGNMENT
5088 /* Round address up to desired boundary. */
5089 rtx temp
= gen_reg_rtx (Pmode
);
5090 temp
= expand_binop (Pmode
, add_optab
, tramp
,
5091 GEN_INT (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1),
5092 temp
, 0, OPTAB_LIB_WIDEN
);
5093 tramp
= expand_binop (Pmode
, and_optab
, temp
,
5094 GEN_INT (- TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
),
5095 temp
, 0, OPTAB_LIB_WIDEN
);
5100 /* The functions identify_blocks and reorder_blocks provide a way to
5101 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5102 duplicate portions of the RTL code. Call identify_blocks before
5103 changing the RTL, and call reorder_blocks after. */
5105 /* Put all this function's BLOCK nodes including those that are chained
5106 onto the first block into a vector, and return it.
5107 Also store in each NOTE for the beginning or end of a block
5108 the index of that block in the vector.
5109 The arguments are BLOCK, the chain of top-level blocks of the function,
5110 and INSNS, the insn chain of the function. */
5113 identify_blocks (block
, insns
)
5121 int next_block_number
= 1;
5122 int current_block_number
= 1;
5128 n_blocks
= all_blocks (block
, 0);
5129 block_vector
= (tree
*) xmalloc (n_blocks
* sizeof (tree
));
5130 block_stack
= (int *) alloca (n_blocks
* sizeof (int));
5132 all_blocks (block
, block_vector
);
5134 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5135 if (GET_CODE (insn
) == NOTE
)
5137 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
)
5139 block_stack
[depth
++] = current_block_number
;
5140 current_block_number
= next_block_number
;
5141 NOTE_BLOCK_NUMBER (insn
) = next_block_number
++;
5143 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
)
5145 NOTE_BLOCK_NUMBER (insn
) = current_block_number
;
5146 current_block_number
= block_stack
[--depth
];
5150 if (n_blocks
!= next_block_number
)
5153 return block_vector
;
5156 /* Given BLOCK_VECTOR which was returned by identify_blocks,
5157 and a revised instruction chain, rebuild the tree structure
5158 of BLOCK nodes to correspond to the new order of RTL.
5159 The new block tree is inserted below TOP_BLOCK.
5160 Returns the current top-level block. */
5163 reorder_blocks (block_vector
, block
, insns
)
5168 tree current_block
= block
;
5171 if (block_vector
== 0)
5174 /* Prune the old trees away, so that it doesn't get in the way. */
5175 BLOCK_SUBBLOCKS (current_block
) = 0;
5176 BLOCK_CHAIN (current_block
) = 0;
5178 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5179 if (GET_CODE (insn
) == NOTE
)
5181 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
)
5183 tree block
= block_vector
[NOTE_BLOCK_NUMBER (insn
)];
5184 /* If we have seen this block before, copy it. */
5185 if (TREE_ASM_WRITTEN (block
))
5186 block
= copy_node (block
);
5187 BLOCK_SUBBLOCKS (block
) = 0;
5188 TREE_ASM_WRITTEN (block
) = 1;
5189 BLOCK_SUPERCONTEXT (block
) = current_block
;
5190 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (current_block
);
5191 BLOCK_SUBBLOCKS (current_block
) = block
;
5192 current_block
= block
;
5193 NOTE_SOURCE_FILE (insn
) = 0;
5195 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
)
5197 BLOCK_SUBBLOCKS (current_block
)
5198 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block
));
5199 current_block
= BLOCK_SUPERCONTEXT (current_block
);
5200 NOTE_SOURCE_FILE (insn
) = 0;
5204 BLOCK_SUBBLOCKS (current_block
)
5205 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block
));
5206 return current_block
;
5209 /* Reverse the order of elements in the chain T of blocks,
5210 and return the new head of the chain (old last element). */
5216 register tree prev
= 0, decl
, next
;
5217 for (decl
= t
; decl
; decl
= next
)
5219 next
= BLOCK_CHAIN (decl
);
5220 BLOCK_CHAIN (decl
) = prev
;
5226 /* Count the subblocks of the list starting with BLOCK, and list them
5227 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5231 all_blocks (block
, vector
)
5239 TREE_ASM_WRITTEN (block
) = 0;
5241 /* Record this block. */
5243 vector
[n_blocks
] = block
;
5247 /* Record the subblocks, and their subblocks... */
5248 n_blocks
+= all_blocks (BLOCK_SUBBLOCKS (block
),
5249 vector
? vector
+ n_blocks
: 0);
5250 block
= BLOCK_CHAIN (block
);
5256 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5257 and initialize static variables for generating RTL for the statements
5261 init_function_start (subr
, filename
, line
)
5266 init_stmt_for_function ();
5268 cse_not_expected
= ! optimize
;
5270 /* Caller save not needed yet. */
5271 caller_save_needed
= 0;
5273 /* No stack slots have been made yet. */
5274 stack_slot_list
= 0;
5276 /* There is no stack slot for handling nonlocal gotos. */
5277 nonlocal_goto_handler_slot
= 0;
5278 nonlocal_goto_stack_level
= 0;
5280 /* No labels have been declared for nonlocal use. */
5281 nonlocal_labels
= 0;
5283 /* No function calls so far in this function. */
5284 function_call_count
= 0;
5286 /* No parm regs have been allocated.
5287 (This is important for output_inline_function.) */
5288 max_parm_reg
= LAST_VIRTUAL_REGISTER
+ 1;
5290 /* Initialize the RTL mechanism. */
5293 /* Initialize the queue of pending postincrement and postdecrements,
5294 and some other info in expr.c. */
5297 /* We haven't done register allocation yet. */
5300 init_const_rtx_hash_table ();
5302 current_function_name
= (*decl_printable_name
) (subr
, 2);
5304 /* Nonzero if this is a nested function that uses a static chain. */
5306 current_function_needs_context
5307 = (decl_function_context (current_function_decl
) != 0
5308 && ! DECL_NO_STATIC_CHAIN (current_function_decl
));
5310 /* Set if a call to setjmp is seen. */
5311 current_function_calls_setjmp
= 0;
5313 /* Set if a call to longjmp is seen. */
5314 current_function_calls_longjmp
= 0;
5316 current_function_calls_alloca
= 0;
5317 current_function_has_nonlocal_label
= 0;
5318 current_function_has_nonlocal_goto
= 0;
5319 current_function_contains_functions
= 0;
5320 current_function_is_thunk
= 0;
5322 current_function_returns_pcc_struct
= 0;
5323 current_function_returns_struct
= 0;
5324 current_function_epilogue_delay_list
= 0;
5325 current_function_uses_const_pool
= 0;
5326 current_function_uses_pic_offset_table
= 0;
5327 current_function_cannot_inline
= 0;
5329 /* We have not yet needed to make a label to jump to for tail-recursion. */
5330 tail_recursion_label
= 0;
5332 /* We haven't had a need to make a save area for ap yet. */
5334 arg_pointer_save_area
= 0;
5336 /* No stack slots allocated yet. */
5339 /* No SAVE_EXPRs in this function yet. */
5342 /* No RTL_EXPRs in this function yet. */
5345 /* Set up to allocate temporaries. */
5348 /* Within function body, compute a type's size as soon it is laid out. */
5349 immediate_size_expand
++;
5351 /* We haven't made any trampolines for this function yet. */
5352 trampoline_list
= 0;
5354 init_pending_stack_adjust ();
5355 inhibit_defer_pop
= 0;
5357 current_function_outgoing_args_size
= 0;
5359 /* Prevent ever trying to delete the first instruction of a function.
5360 Also tell final how to output a linenum before the function prologue.
5361 Note linenums could be missing, e.g. when compiling a Java .class file. */
5363 emit_line_note (filename
, line
);
5365 /* Make sure first insn is a note even if we don't want linenums.
5366 This makes sure the first insn will never be deleted.
5367 Also, final expects a note to appear there. */
5368 emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
5370 /* Set flags used by final.c. */
5371 if (aggregate_value_p (DECL_RESULT (subr
)))
5373 #ifdef PCC_STATIC_STRUCT_RETURN
5374 current_function_returns_pcc_struct
= 1;
5376 current_function_returns_struct
= 1;
5379 /* Warn if this value is an aggregate type,
5380 regardless of which calling convention we are using for it. */
5381 if (warn_aggregate_return
5382 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
))))
5383 warning ("function returns an aggregate");
5385 current_function_returns_pointer
5386 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr
)));
5388 /* Indicate that we need to distinguish between the return value of the
5389 present function and the return value of a function being called. */
5390 rtx_equal_function_value_matters
= 1;
5392 /* Indicate that we have not instantiated virtual registers yet. */
5393 virtuals_instantiated
= 0;
5395 /* Indicate we have no need of a frame pointer yet. */
5396 frame_pointer_needed
= 0;
5398 /* By default assume not varargs or stdarg. */
5399 current_function_varargs
= 0;
5400 current_function_stdarg
= 0;
5403 /* Indicate that the current function uses extra args
5404 not explicitly mentioned in the argument list in any fashion. */
5409 current_function_varargs
= 1;
5412 /* Expand a call to __main at the beginning of a possible main function. */
5414 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5415 #undef HAS_INIT_SECTION
5416 #define HAS_INIT_SECTION
5420 expand_main_function ()
5422 #if !defined (HAS_INIT_SECTION)
5423 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, NAME__MAIN
), 0,
5425 #endif /* not HAS_INIT_SECTION */
5428 extern struct obstack permanent_obstack
;
5430 /* Start the RTL for a new function, and set variables used for
5432 SUBR is the FUNCTION_DECL node.
5433 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5434 the function's parameters, which must be run at any return statement. */
5437 expand_function_start (subr
, parms_have_cleanups
)
5439 int parms_have_cleanups
;
5443 rtx last_ptr
= NULL_RTX
;
5445 /* Make sure volatile mem refs aren't considered
5446 valid operands of arithmetic insns. */
5447 init_recog_no_volatile ();
5449 /* If function gets a static chain arg, store it in the stack frame.
5450 Do this first, so it gets the first stack slot offset. */
5451 if (current_function_needs_context
)
5453 last_ptr
= assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
5455 /* Delay copying static chain if it is not a register to avoid
5456 conflicts with regs used for parameters. */
5457 if (! SMALL_REGISTER_CLASSES
5458 || GET_CODE (static_chain_incoming_rtx
) == REG
)
5459 emit_move_insn (last_ptr
, static_chain_incoming_rtx
);
5462 /* If the parameters of this function need cleaning up, get a label
5463 for the beginning of the code which executes those cleanups. This must
5464 be done before doing anything with return_label. */
5465 if (parms_have_cleanups
)
5466 cleanup_label
= gen_label_rtx ();
5470 /* Make the label for return statements to jump to, if this machine
5471 does not have a one-instruction return and uses an epilogue,
5472 or if it returns a structure, or if it has parm cleanups. */
5474 if (cleanup_label
== 0 && HAVE_return
5475 && ! current_function_returns_pcc_struct
5476 && ! (current_function_returns_struct
&& ! optimize
))
5479 return_label
= gen_label_rtx ();
5481 return_label
= gen_label_rtx ();
5484 /* Initialize rtx used to return the value. */
5485 /* Do this before assign_parms so that we copy the struct value address
5486 before any library calls that assign parms might generate. */
5488 /* Decide whether to return the value in memory or in a register. */
5489 if (aggregate_value_p (DECL_RESULT (subr
)))
5491 /* Returning something that won't go in a register. */
5492 register rtx value_address
= 0;
5494 #ifdef PCC_STATIC_STRUCT_RETURN
5495 if (current_function_returns_pcc_struct
)
5497 int size
= int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr
)));
5498 value_address
= assemble_static_space (size
);
5503 /* Expect to be passed the address of a place to store the value.
5504 If it is passed as an argument, assign_parms will take care of
5506 if (struct_value_incoming_rtx
)
5508 value_address
= gen_reg_rtx (Pmode
);
5509 emit_move_insn (value_address
, struct_value_incoming_rtx
);
5514 DECL_RTL (DECL_RESULT (subr
))
5515 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr
)), value_address
);
5516 MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr
)))
5517 = AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
)));
5520 else if (DECL_MODE (DECL_RESULT (subr
)) == VOIDmode
)
5521 /* If return mode is void, this decl rtl should not be used. */
5522 DECL_RTL (DECL_RESULT (subr
)) = 0;
5523 else if (parms_have_cleanups
)
5525 /* If function will end with cleanup code for parms,
5526 compute the return values into a pseudo reg,
5527 which we will copy into the true return register
5528 after the cleanups are done. */
5530 enum machine_mode mode
= DECL_MODE (DECL_RESULT (subr
));
5532 #ifdef PROMOTE_FUNCTION_RETURN
5533 tree type
= TREE_TYPE (DECL_RESULT (subr
));
5534 int unsignedp
= TREE_UNSIGNED (type
);
5536 mode
= promote_mode (type
, mode
, &unsignedp
, 1);
5539 DECL_RTL (DECL_RESULT (subr
)) = gen_reg_rtx (mode
);
5542 /* Scalar, returned in a register. */
5544 #ifdef FUNCTION_OUTGOING_VALUE
5545 DECL_RTL (DECL_RESULT (subr
))
5546 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr
)), subr
);
5548 DECL_RTL (DECL_RESULT (subr
))
5549 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr
)), subr
);
5552 /* Mark this reg as the function's return value. */
5553 if (GET_CODE (DECL_RTL (DECL_RESULT (subr
))) == REG
)
5555 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr
))) = 1;
5556 /* Needed because we may need to move this to memory
5557 in case it's a named return value whose address is taken. */
5558 DECL_REGISTER (DECL_RESULT (subr
)) = 1;
5562 /* Initialize rtx for parameters and local variables.
5563 In some cases this requires emitting insns. */
5565 assign_parms (subr
, 0);
5567 /* Copy the static chain now if it wasn't a register. The delay is to
5568 avoid conflicts with the parameter passing registers. */
5570 if (SMALL_REGISTER_CLASSES
&& current_function_needs_context
)
5571 if (GET_CODE (static_chain_incoming_rtx
) != REG
)
5572 emit_move_insn (last_ptr
, static_chain_incoming_rtx
);
5574 /* The following was moved from init_function_start.
5575 The move is supposed to make sdb output more accurate. */
5576 /* Indicate the beginning of the function body,
5577 as opposed to parm setup. */
5578 emit_note (NULL_PTR
, NOTE_INSN_FUNCTION_BEG
);
5580 /* If doing stupid allocation, mark parms as born here. */
5582 if (GET_CODE (get_last_insn ()) != NOTE
)
5583 emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
5584 parm_birth_insn
= get_last_insn ();
5588 for (i
= LAST_VIRTUAL_REGISTER
+ 1; i
< max_parm_reg
; i
++)
5589 use_variable (regno_reg_rtx
[i
]);
5591 if (current_function_internal_arg_pointer
!= virtual_incoming_args_rtx
)
5592 use_variable (current_function_internal_arg_pointer
);
5595 context_display
= 0;
5596 if (current_function_needs_context
)
5598 /* Fetch static chain values for containing functions. */
5599 tem
= decl_function_context (current_function_decl
);
5600 /* If not doing stupid register allocation copy the static chain
5601 pointer into a pseudo. If we have small register classes, copy
5602 the value from memory if static_chain_incoming_rtx is a REG. If
5603 we do stupid register allocation, we use the stack address
5605 if (tem
&& ! obey_regdecls
)
5607 /* If the static chain originally came in a register, put it back
5608 there, then move it out in the next insn. The reason for
5609 this peculiar code is to satisfy function integration. */
5610 if (SMALL_REGISTER_CLASSES
5611 && GET_CODE (static_chain_incoming_rtx
) == REG
)
5612 emit_move_insn (static_chain_incoming_rtx
, last_ptr
);
5613 last_ptr
= copy_to_reg (static_chain_incoming_rtx
);
5618 tree rtlexp
= make_node (RTL_EXPR
);
5620 RTL_EXPR_RTL (rtlexp
) = last_ptr
;
5621 context_display
= tree_cons (tem
, rtlexp
, context_display
);
5622 tem
= decl_function_context (tem
);
5625 /* Chain thru stack frames, assuming pointer to next lexical frame
5626 is found at the place we always store it. */
5627 #ifdef FRAME_GROWS_DOWNWARD
5628 last_ptr
= plus_constant (last_ptr
, - GET_MODE_SIZE (Pmode
));
5630 last_ptr
= copy_to_reg (gen_rtx_MEM (Pmode
,
5631 memory_address (Pmode
, last_ptr
)));
5633 /* If we are not optimizing, ensure that we know that this
5634 piece of context is live over the entire function. */
5636 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, last_ptr
,
5641 /* After the display initializations is where the tail-recursion label
5642 should go, if we end up needing one. Ensure we have a NOTE here
5643 since some things (like trampolines) get placed before this. */
5644 tail_recursion_reentry
= emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
5646 /* Evaluate now the sizes of any types declared among the arguments. */
5647 for (tem
= nreverse (get_pending_sizes ()); tem
; tem
= TREE_CHAIN (tem
))
5649 expand_expr (TREE_VALUE (tem
), const0_rtx
, VOIDmode
,
5650 EXPAND_MEMORY_USE_BAD
);
5651 /* Flush the queue in case this parameter declaration has
5656 /* Make sure there is a line number after the function entry setup code. */
5657 force_next_line_note ();
5660 /* Generate RTL for the end of the current function.
5661 FILENAME and LINE are the current position in the source file.
5663 It is up to language-specific callers to do cleanups for parameters--
5664 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5667 expand_function_end (filename
, line
, end_bindings
)
5675 #ifdef TRAMPOLINE_TEMPLATE
5676 static rtx initial_trampoline
;
5679 #ifdef NON_SAVING_SETJMP
5680 /* Don't put any variables in registers if we call setjmp
5681 on a machine that fails to restore the registers. */
5682 if (NON_SAVING_SETJMP
&& current_function_calls_setjmp
)
5684 if (DECL_INITIAL (current_function_decl
) != error_mark_node
)
5685 setjmp_protect (DECL_INITIAL (current_function_decl
));
5687 setjmp_protect_args ();
5691 /* Save the argument pointer if a save area was made for it. */
5692 if (arg_pointer_save_area
)
5694 rtx x
= gen_move_insn (arg_pointer_save_area
, virtual_incoming_args_rtx
);
5695 emit_insn_before (x
, tail_recursion_reentry
);
5698 /* Initialize any trampolines required by this function. */
5699 for (link
= trampoline_list
; link
; link
= TREE_CHAIN (link
))
5701 tree function
= TREE_PURPOSE (link
);
5702 rtx context
= lookup_static_chain (function
);
5703 rtx tramp
= RTL_EXPR_RTL (TREE_VALUE (link
));
5704 #ifdef TRAMPOLINE_TEMPLATE
5709 #ifdef TRAMPOLINE_TEMPLATE
5710 /* First make sure this compilation has a template for
5711 initializing trampolines. */
5712 if (initial_trampoline
== 0)
5714 end_temporary_allocation ();
5716 = gen_rtx_MEM (BLKmode
, assemble_trampoline_template ());
5717 resume_temporary_allocation ();
5721 /* Generate insns to initialize the trampoline. */
5723 tramp
= round_trampoline_addr (XEXP (tramp
, 0));
5724 #ifdef TRAMPOLINE_TEMPLATE
5725 blktramp
= change_address (initial_trampoline
, BLKmode
, tramp
);
5726 emit_block_move (blktramp
, initial_trampoline
,
5727 GEN_INT (TRAMPOLINE_SIZE
),
5728 TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
);
5730 INITIALIZE_TRAMPOLINE (tramp
, XEXP (DECL_RTL (function
), 0), context
);
5734 /* Put those insns at entry to the containing function (this one). */
5735 emit_insns_before (seq
, tail_recursion_reentry
);
5738 /* If we are doing stack checking and this function makes calls,
5739 do a stack probe at the start of the function to ensure we have enough
5740 space for another stack frame. */
5741 if (flag_stack_check
&& ! STACK_CHECK_BUILTIN
)
5745 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5746 if (GET_CODE (insn
) == CALL_INSN
)
5749 probe_stack_range (STACK_CHECK_PROTECT
,
5750 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE
));
5753 emit_insns_before (seq
, tail_recursion_reentry
);
5758 /* Warn about unused parms if extra warnings were specified. */
5759 if (warn_unused
&& extra_warnings
)
5763 for (decl
= DECL_ARGUMENTS (current_function_decl
);
5764 decl
; decl
= TREE_CHAIN (decl
))
5765 if (! TREE_USED (decl
) && TREE_CODE (decl
) == PARM_DECL
5766 && DECL_NAME (decl
) && ! DECL_ARTIFICIAL (decl
))
5767 warning_with_decl (decl
, "unused parameter `%s'");
5770 /* Delete handlers for nonlocal gotos if nothing uses them. */
5771 if (nonlocal_goto_handler_slot
!= 0 && !current_function_has_nonlocal_label
)
5774 /* End any sequences that failed to be closed due to syntax errors. */
5775 while (in_sequence_p ())
5778 /* Outside function body, can't compute type's actual size
5779 until next function's body starts. */
5780 immediate_size_expand
--;
5782 /* If doing stupid register allocation,
5783 mark register parms as dying here. */
5788 for (i
= LAST_VIRTUAL_REGISTER
+ 1; i
< max_parm_reg
; i
++)
5789 use_variable (regno_reg_rtx
[i
]);
5791 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
5793 for (tem
= save_expr_regs
; tem
; tem
= XEXP (tem
, 1))
5795 use_variable (XEXP (tem
, 0));
5796 use_variable_after (XEXP (tem
, 0), parm_birth_insn
);
5799 if (current_function_internal_arg_pointer
!= virtual_incoming_args_rtx
)
5800 use_variable (current_function_internal_arg_pointer
);
5803 clear_pending_stack_adjust ();
5804 do_pending_stack_adjust ();
5806 /* Mark the end of the function body.
5807 If control reaches this insn, the function can drop through
5808 without returning a value. */
5809 emit_note (NULL_PTR
, NOTE_INSN_FUNCTION_END
);
5811 /* Must mark the last line number note in the function, so that the test
5812 coverage code can avoid counting the last line twice. This just tells
5813 the code to ignore the immediately following line note, since there
5814 already exists a copy of this note somewhere above. This line number
5815 note is still needed for debugging though, so we can't delete it. */
5816 if (flag_test_coverage
)
5817 emit_note (NULL_PTR
, NOTE_REPEATED_LINE_NUMBER
);
5819 /* Output a linenumber for the end of the function.
5820 SDB depends on this. */
5821 emit_line_note_force (filename
, line
);
5823 /* Output the label for the actual return from the function,
5824 if one is expected. This happens either because a function epilogue
5825 is used instead of a return instruction, or because a return was done
5826 with a goto in order to run local cleanups, or because of pcc-style
5827 structure returning. */
5830 emit_label (return_label
);
5832 /* C++ uses this. */
5834 expand_end_bindings (0, 0, 0);
5836 /* Now handle any leftover exception regions that may have been
5837 created for the parameters. */
5839 rtx last
= get_last_insn ();
5842 expand_leftover_cleanups ();
5844 /* If the above emitted any code, may sure we jump around it. */
5845 if (last
!= get_last_insn ())
5847 label
= gen_label_rtx ();
5848 last
= emit_jump_insn_after (gen_jump (label
), last
);
5849 last
= emit_barrier_after (last
);
5854 /* If we had calls to alloca, and this machine needs
5855 an accurate stack pointer to exit the function,
5856 insert some code to save and restore the stack pointer. */
5857 #ifdef EXIT_IGNORE_STACK
5858 if (! EXIT_IGNORE_STACK
)
5860 if (current_function_calls_alloca
)
5864 emit_stack_save (SAVE_FUNCTION
, &tem
, parm_birth_insn
);
5865 emit_stack_restore (SAVE_FUNCTION
, tem
, NULL_RTX
);
5868 /* If scalar return value was computed in a pseudo-reg,
5869 copy that to the hard return register. */
5870 if (DECL_RTL (DECL_RESULT (current_function_decl
)) != 0
5871 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl
))) == REG
5872 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl
)))
5873 >= FIRST_PSEUDO_REGISTER
))
5875 rtx real_decl_result
;
5877 #ifdef FUNCTION_OUTGOING_VALUE
5879 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl
)),
5880 current_function_decl
);
5883 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl
)),
5884 current_function_decl
);
5886 REG_FUNCTION_VALUE_P (real_decl_result
) = 1;
5887 /* If this is a BLKmode structure being returned in registers, then use
5888 the mode computed in expand_return. */
5889 if (GET_MODE (real_decl_result
) == BLKmode
)
5890 PUT_MODE (real_decl_result
,
5891 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl
))));
5892 emit_move_insn (real_decl_result
,
5893 DECL_RTL (DECL_RESULT (current_function_decl
)));
5894 emit_insn (gen_rtx_USE (VOIDmode
, real_decl_result
));
5896 /* The delay slot scheduler assumes that current_function_return_rtx
5897 holds the hard register containing the return value, not a temporary
5899 current_function_return_rtx
= real_decl_result
;
5902 /* If returning a structure, arrange to return the address of the value
5903 in a place where debuggers expect to find it.
5905 If returning a structure PCC style,
5906 the caller also depends on this value.
5907 And current_function_returns_pcc_struct is not necessarily set. */
5908 if (current_function_returns_struct
5909 || current_function_returns_pcc_struct
)
5911 rtx value_address
= XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
5912 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
5913 #ifdef FUNCTION_OUTGOING_VALUE
5915 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type
),
5916 current_function_decl
);
5919 = FUNCTION_VALUE (build_pointer_type (type
),
5920 current_function_decl
);
5923 /* Mark this as a function return value so integrate will delete the
5924 assignment and USE below when inlining this function. */
5925 REG_FUNCTION_VALUE_P (outgoing
) = 1;
5927 emit_move_insn (outgoing
, value_address
);
5928 use_variable (outgoing
);
5931 /* Output a return insn if we are using one.
5932 Otherwise, let the rtl chain end here, to drop through
5933 into the epilogue. */
5938 emit_jump_insn (gen_return ());
5943 /* Fix up any gotos that jumped out to the outermost
5944 binding level of the function.
5945 Must follow emitting RETURN_LABEL. */
5947 /* If you have any cleanups to do at this point,
5948 and they need to create temporary variables,
5949 then you will lose. */
5950 expand_fixups (get_insns ());
5953 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
5955 static int *prologue
;
5956 static int *epilogue
;
5958 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
5959 or a single insn). */
5961 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5963 record_insns (insns
)
5968 if (GET_CODE (insns
) == SEQUENCE
)
5970 int len
= XVECLEN (insns
, 0);
5971 vec
= (int *) oballoc ((len
+ 1) * sizeof (int));
5974 vec
[len
] = INSN_UID (XVECEXP (insns
, 0, len
));
5978 vec
= (int *) oballoc (2 * sizeof (int));
5979 vec
[0] = INSN_UID (insns
);
5985 /* Determine how many INSN_UIDs in VEC are part of INSN. */
5988 contains (insn
, vec
)
5994 if (GET_CODE (insn
) == INSN
5995 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
5998 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
5999 for (j
= 0; vec
[j
]; j
++)
6000 if (INSN_UID (XVECEXP (PATTERN (insn
), 0, i
)) == vec
[j
])
6006 for (j
= 0; vec
[j
]; j
++)
6007 if (INSN_UID (insn
) == vec
[j
])
6012 #endif /* HAVE_prologue || HAVE_epilogue */
6014 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6015 this into place with notes indicating where the prologue ends and where
6016 the epilogue begins. Update the basic block information when possible. */
6019 thread_prologue_and_epilogue_insns (f
)
6022 #ifdef HAVE_prologue
6027 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
6028 prologue insns and a NOTE_INSN_PROLOGUE_END. */
6029 emit_note_after (NOTE_INSN_PROLOGUE_END
, f
);
6030 seq
= gen_prologue ();
6031 head
= emit_insn_after (seq
, f
);
6033 /* Include the new prologue insns in the first block. Ignore them
6034 if they form a basic block unto themselves. */
6035 if (basic_block_head
&& n_basic_blocks
6036 && GET_CODE (basic_block_head
[0]) != CODE_LABEL
)
6037 basic_block_head
[0] = NEXT_INSN (f
);
6039 /* Retain a map of the prologue insns. */
6040 prologue
= record_insns (GET_CODE (seq
) == SEQUENCE
? seq
: head
);
6046 #ifdef HAVE_epilogue
6049 rtx insn
= get_last_insn ();
6050 rtx prev
= prev_nonnote_insn (insn
);
6052 /* If we end with a BARRIER, we don't need an epilogue. */
6053 if (! (prev
&& GET_CODE (prev
) == BARRIER
))
6059 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6060 epilogue insns, the USE insns at the end of a function,
6061 the jump insn that returns, and then a BARRIER. */
6063 /* Move the USE insns at the end of a function onto a list. */
6065 && GET_CODE (prev
) == INSN
6066 && GET_CODE (PATTERN (prev
)) == USE
)
6069 prev
= prev_nonnote_insn (prev
);
6071 NEXT_INSN (PREV_INSN (tem
)) = NEXT_INSN (tem
);
6072 PREV_INSN (NEXT_INSN (tem
)) = PREV_INSN (tem
);
6075 NEXT_INSN (tem
) = first_use
;
6076 PREV_INSN (first_use
) = tem
;
6083 emit_barrier_after (insn
);
6085 seq
= gen_epilogue ();
6086 tail
= emit_jump_insn_after (seq
, insn
);
6088 /* Insert the USE insns immediately before the return insn, which
6089 must be the first instruction before the final barrier. */
6092 tem
= prev_nonnote_insn (get_last_insn ());
6093 NEXT_INSN (PREV_INSN (tem
)) = first_use
;
6094 PREV_INSN (first_use
) = PREV_INSN (tem
);
6095 PREV_INSN (tem
) = last_use
;
6096 NEXT_INSN (last_use
) = tem
;
6099 emit_note_after (NOTE_INSN_EPILOGUE_BEG
, insn
);
6101 /* Include the new epilogue insns in the last block. Ignore
6102 them if they form a basic block unto themselves. */
6103 if (basic_block_end
&& n_basic_blocks
6104 && GET_CODE (basic_block_end
[n_basic_blocks
- 1]) != JUMP_INSN
)
6105 basic_block_end
[n_basic_blocks
- 1] = tail
;
6107 /* Retain a map of the epilogue insns. */
6108 epilogue
= record_insns (GET_CODE (seq
) == SEQUENCE
? seq
: tail
);
6116 /* Reposition the prologue-end and epilogue-begin notes after instruction
6117 scheduling and delayed branch scheduling. */
6120 reposition_prologue_and_epilogue_notes (f
)
6123 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6124 /* Reposition the prologue and epilogue notes. */
6132 register rtx insn
, note
= 0;
6134 /* Scan from the beginning until we reach the last prologue insn.
6135 We apparently can't depend on basic_block_{head,end} after
6137 for (len
= 0; prologue
[len
]; len
++)
6139 for (insn
= f
; len
&& insn
; insn
= NEXT_INSN (insn
))
6141 if (GET_CODE (insn
) == NOTE
)
6143 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_PROLOGUE_END
)
6146 else if ((len
-= contains (insn
, prologue
)) == 0)
6148 /* Find the prologue-end note if we haven't already, and
6149 move it to just after the last prologue insn. */
6152 for (note
= insn
; (note
= NEXT_INSN (note
));)
6153 if (GET_CODE (note
) == NOTE
6154 && NOTE_LINE_NUMBER (note
) == NOTE_INSN_PROLOGUE_END
)
6157 next
= NEXT_INSN (note
);
6158 prev
= PREV_INSN (note
);
6160 NEXT_INSN (prev
) = next
;
6162 PREV_INSN (next
) = prev
;
6163 add_insn_after (note
, insn
);
6170 register rtx insn
, note
= 0;
6172 /* Scan from the end until we reach the first epilogue insn.
6173 We apparently can't depend on basic_block_{head,end} after
6175 for (len
= 0; epilogue
[len
]; len
++)
6177 for (insn
= get_last_insn (); len
&& insn
; insn
= PREV_INSN (insn
))
6179 if (GET_CODE (insn
) == NOTE
)
6181 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EPILOGUE_BEG
)
6184 else if ((len
-= contains (insn
, epilogue
)) == 0)
6186 /* Find the epilogue-begin note if we haven't already, and
6187 move it to just before the first epilogue insn. */
6190 for (note
= insn
; (note
= PREV_INSN (note
));)
6191 if (GET_CODE (note
) == NOTE
6192 && NOTE_LINE_NUMBER (note
) == NOTE_INSN_EPILOGUE_BEG
)
6195 next
= NEXT_INSN (note
);
6196 prev
= PREV_INSN (note
);
6198 NEXT_INSN (prev
) = next
;
6200 PREV_INSN (next
) = prev
;
6201 add_insn_after (note
, PREV_INSN (insn
));
6206 #endif /* HAVE_prologue or HAVE_epilogue */