1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
48 #include "insn-flags.h"
50 #include "insn-codes.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
56 #include "basic-block.h"
60 #ifndef TRAMPOLINE_ALIGNMENT
61 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
64 /* Some systems use __main in a way incompatible with its use in gcc, in these
65 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
66 give the same symbol without quotes for an alternative entry point. You
67 must define both, or neither. */
69 #define NAME__MAIN "__main"
70 #define SYMBOL__MAIN __main
73 /* Round a value to the lowest integer less than it that is a multiple of
74 the required alignment. Avoid using division in case the value is
75 negative. Assume the alignment is a power of two. */
76 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
78 /* Similar, but round to the next highest integer that meets the
80 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
82 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
83 during rtl generation. If they are different register numbers, this is
84 always true. It may also be true if
85 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
86 generation. See fix_lexical_addr for details. */
88 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
89 #define NEED_SEPARATE_AP
92 /* Number of bytes of args popped by function being compiled on its return.
93 Zero if no bytes are to be popped.
94 May affect compilation of return insn or of function epilogue. */
96 int current_function_pops_args
;
98 /* Nonzero if function being compiled needs to be given an address
99 where the value should be stored. */
101 int current_function_returns_struct
;
103 /* Nonzero if function being compiled needs to
104 return the address of where it has put a structure value. */
106 int current_function_returns_pcc_struct
;
108 /* Nonzero if function being compiled needs to be passed a static chain. */
110 int current_function_needs_context
;
112 /* Nonzero if function being compiled can call setjmp. */
114 int current_function_calls_setjmp
;
116 /* Nonzero if function being compiled can call longjmp. */
118 int current_function_calls_longjmp
;
120 /* Nonzero if function being compiled receives nonlocal gotos
121 from nested functions. */
123 int current_function_has_nonlocal_label
;
125 /* Nonzero if function being compiled has nonlocal gotos to parent
128 int current_function_has_nonlocal_goto
;
130 /* Nonzero if this function has a computed goto.
132 It is computed during find_basic_blocks or during stupid life
135 int current_function_has_computed_jump
;
137 /* Nonzero if function being compiled contains nested functions. */
139 int current_function_contains_functions
;
141 /* Nonzero if the current function is a thunk (a lightweight function that
142 just adjusts one of its arguments and forwards to another function), so
143 we should try to cut corners where we can. */
144 int current_function_is_thunk
;
146 /* Nonzero if function being compiled can call alloca,
147 either as a subroutine or builtin. */
149 int current_function_calls_alloca
;
151 /* Nonzero if the current function returns a pointer type */
153 int current_function_returns_pointer
;
155 /* If some insns can be deferred to the delay slots of the epilogue, the
156 delay list for them is recorded here. */
158 rtx current_function_epilogue_delay_list
;
160 /* If function's args have a fixed size, this is that size, in bytes.
162 May affect compilation of return insn or of function epilogue. */
164 int current_function_args_size
;
166 /* # bytes the prologue should push and pretend that the caller pushed them.
167 The prologue must do this, but only if parms can be passed in registers. */
169 int current_function_pretend_args_size
;
171 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
172 defined, the needed space is pushed by the prologue. */
174 int current_function_outgoing_args_size
;
176 /* This is the offset from the arg pointer to the place where the first
177 anonymous arg can be found, if there is one. */
179 rtx current_function_arg_offset_rtx
;
181 /* Nonzero if current function uses varargs.h or equivalent.
182 Zero for functions that use stdarg.h. */
184 int current_function_varargs
;
186 /* Nonzero if current function uses stdarg.h or equivalent.
187 Zero for functions that use varargs.h. */
189 int current_function_stdarg
;
191 /* Quantities of various kinds of registers
192 used for the current function's args. */
194 CUMULATIVE_ARGS current_function_args_info
;
196 /* Name of function now being compiled. */
198 char *current_function_name
;
200 /* If non-zero, an RTL expression for the location at which the current
201 function returns its result. If the current function returns its
202 result in a register, current_function_return_rtx will always be
203 the hard register containing the result. */
205 rtx current_function_return_rtx
;
207 /* Nonzero if the current function uses the constant pool. */
209 int current_function_uses_const_pool
;
211 /* Nonzero if the current function uses pic_offset_table_rtx. */
212 int current_function_uses_pic_offset_table
;
214 /* The arg pointer hard register, or the pseudo into which it was copied. */
215 rtx current_function_internal_arg_pointer
;
217 /* Language-specific reason why the current function cannot be made inline. */
218 char *current_function_cannot_inline
;
220 /* The FUNCTION_DECL for an inline function currently being expanded. */
221 tree inline_function_decl
;
223 /* Number of function calls seen so far in current function. */
225 int function_call_count
;
227 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
228 (labels to which there can be nonlocal gotos from nested functions)
231 tree nonlocal_labels
;
233 /* RTX for stack slot that holds the current handler for nonlocal gotos.
234 Zero when function does not have nonlocal labels. */
236 rtx nonlocal_goto_handler_slot
;
238 /* RTX for stack slot that holds the stack pointer value to restore
240 Zero when function does not have nonlocal labels. */
242 rtx nonlocal_goto_stack_level
;
244 /* Label that will go on parm cleanup code, if any.
245 Jumping to this label runs cleanup code for parameters, if
246 such code must be run. Following this code is the logical return label. */
250 /* Label that will go on function epilogue.
251 Jumping to this label serves as a "return" instruction
252 on machines which require execution of the epilogue on all returns. */
256 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
257 So we can mark them all live at the end of the function, if nonopt. */
260 /* List (chain of EXPR_LISTs) of all stack slots in this function.
261 Made for the sake of unshare_all_rtl. */
264 /* Chain of all RTL_EXPRs that have insns in them. */
267 /* Label to jump back to for tail recursion, or 0 if we have
268 not yet needed one for this function. */
269 rtx tail_recursion_label
;
271 /* Place after which to insert the tail_recursion_label if we need one. */
272 rtx tail_recursion_reentry
;
274 /* Location at which to save the argument pointer if it will need to be
275 referenced. There are two cases where this is done: if nonlocal gotos
276 exist, or if vars stored at an offset from the argument pointer will be
277 needed by inner routines. */
279 rtx arg_pointer_save_area
;
281 /* Offset to end of allocated area of stack frame.
282 If stack grows down, this is the address of the last stack slot allocated.
283 If stack grows up, this is the address for the next slot. */
284 HOST_WIDE_INT frame_offset
;
286 /* List (chain of TREE_LISTs) of static chains for containing functions.
287 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
288 in an RTL_EXPR in the TREE_VALUE. */
289 static tree context_display
;
291 /* List (chain of TREE_LISTs) of trampolines for nested functions.
292 The trampoline sets up the static chain and jumps to the function.
293 We supply the trampoline's address when the function's address is requested.
295 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
296 in an RTL_EXPR in the TREE_VALUE. */
297 static tree trampoline_list
;
299 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
300 static rtx parm_birth_insn
;
303 /* Nonzero if a stack slot has been generated whose address is not
304 actually valid. It means that the generated rtl must all be scanned
305 to detect and correct the invalid addresses where they occur. */
306 static int invalid_stack_slot
;
309 /* Last insn of those whose job was to put parms into their nominal homes. */
310 static rtx last_parm_insn
;
312 /* 1 + last pseudo register number possibly used for loading a copy
313 of a parameter of this function. */
316 /* Vector indexed by REGNO, containing location on stack in which
317 to put the parm which is nominally in pseudo register REGNO,
318 if we discover that that parm must go in the stack. The highest
319 element in this vector is one less than MAX_PARM_REG, above. */
320 rtx
*parm_reg_stack_loc
;
322 /* Nonzero once virtual register instantiation has been done.
323 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
324 static int virtuals_instantiated
;
326 /* These variables hold pointers to functions to
327 save and restore machine-specific data,
328 in push_function_context and pop_function_context. */
329 void (*save_machine_status
) PROTO((struct function
*));
330 void (*restore_machine_status
) PROTO((struct function
*));
332 /* Nonzero if we need to distinguish between the return value of this function
333 and the return value of a function called by this function. This helps
336 extern int rtx_equal_function_value_matters
;
337 extern tree sequence_rtl_expr
;
339 /* In order to evaluate some expressions, such as function calls returning
340 structures in memory, we need to temporarily allocate stack locations.
341 We record each allocated temporary in the following structure.
343 Associated with each temporary slot is a nesting level. When we pop up
344 one level, all temporaries associated with the previous level are freed.
345 Normally, all temporaries are freed after the execution of the statement
346 in which they were created. However, if we are inside a ({...}) grouping,
347 the result may be in a temporary and hence must be preserved. If the
348 result could be in a temporary, we preserve it if we can determine which
349 one it is in. If we cannot determine which temporary may contain the
350 result, all temporaries are preserved. A temporary is preserved by
351 pretending it was allocated at the previous nesting level.
353 Automatic variables are also assigned temporary slots, at the nesting
354 level where they are defined. They are marked a "kept" so that
355 free_temp_slots will not free them. */
359 /* Points to next temporary slot. */
360 struct temp_slot
*next
;
361 /* The rtx to used to reference the slot. */
363 /* The rtx used to represent the address if not the address of the
364 slot above. May be an EXPR_LIST if multiple addresses exist. */
366 /* The size, in units, of the slot. */
368 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
370 /* Non-zero if this temporary is currently in use. */
372 /* Non-zero if this temporary has its address taken. */
374 /* Nesting level at which this slot is being used. */
376 /* Non-zero if this should survive a call to free_temp_slots. */
378 /* The offset of the slot from the frame_pointer, including extra space
379 for alignment. This info is for combine_temp_slots. */
380 HOST_WIDE_INT base_offset
;
381 /* The size of the slot, including extra space for alignment. This
382 info is for combine_temp_slots. */
383 HOST_WIDE_INT full_size
;
386 /* List of all temporaries allocated, both available and in use. */
388 struct temp_slot
*temp_slots
;
390 /* Current nesting level for temporaries. */
394 /* Current nesting level for variables in a block. */
396 int var_temp_slot_level
;
398 /* When temporaries are created by TARGET_EXPRs, they are created at
399 this level of temp_slot_level, so that they can remain allocated
400 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
402 int target_temp_slot_level
;
404 /* This structure is used to record MEMs or pseudos used to replace VAR, any
405 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
406 maintain this list in case two operands of an insn were required to match;
407 in that case we must ensure we use the same replacement. */
409 struct fixup_replacement
413 struct fixup_replacement
*next
;
416 /* Forward declarations. */
418 static rtx assign_outer_stack_local
PROTO ((enum machine_mode
, HOST_WIDE_INT
,
419 int, struct function
*));
420 static struct temp_slot
*find_temp_slot_from_address
PROTO((rtx
));
421 static void put_reg_into_stack
PROTO((struct function
*, rtx
, tree
,
422 enum machine_mode
, enum machine_mode
,
424 static void fixup_var_refs
PROTO((rtx
, enum machine_mode
, int));
425 static struct fixup_replacement
426 *find_fixup_replacement
PROTO((struct fixup_replacement
**, rtx
));
427 static void fixup_var_refs_insns
PROTO((rtx
, enum machine_mode
, int,
429 static void fixup_var_refs_1
PROTO((rtx
, enum machine_mode
, rtx
*, rtx
,
430 struct fixup_replacement
**));
431 static rtx fixup_memory_subreg
PROTO((rtx
, rtx
, int));
432 static rtx walk_fixup_memory_subreg
PROTO((rtx
, rtx
, int));
433 static rtx fixup_stack_1
PROTO((rtx
, rtx
));
434 static void optimize_bit_field
PROTO((rtx
, rtx
, rtx
*));
435 static void instantiate_decls
PROTO((tree
, int));
436 static void instantiate_decls_1
PROTO((tree
, int));
437 static void instantiate_decl
PROTO((rtx
, int, int));
438 static int instantiate_virtual_regs_1
PROTO((rtx
*, rtx
, int));
439 static void delete_handlers
PROTO((void));
440 static void pad_to_arg_alignment
PROTO((struct args_size
*, int));
441 #ifndef ARGS_GROW_DOWNWARD
442 static void pad_below
PROTO((struct args_size
*, enum machine_mode
,
445 #ifdef ARGS_GROW_DOWNWARD
446 static tree round_down
PROTO((tree
, int));
448 static rtx round_trampoline_addr
PROTO((rtx
));
449 static tree blocks_nreverse
PROTO((tree
));
450 static int all_blocks
PROTO((tree
, tree
*));
451 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
452 static int *record_insns
PROTO((rtx
));
453 static int contains
PROTO((rtx
, int *));
454 #endif /* HAVE_prologue || HAVE_epilogue */
455 static void put_addressof_into_stack
PROTO((rtx
));
456 static void purge_addressof_1
PROTO((rtx
*, rtx
, int));
458 /* Pointer to chain of `struct function' for containing functions. */
459 struct function
*outer_function_chain
;
461 /* Given a function decl for a containing function,
462 return the `struct function' for it. */
465 find_function_data (decl
)
470 for (p
= outer_function_chain
; p
; p
= p
->next
)
477 /* Save the current context for compilation of a nested function.
478 This is called from language-specific code.
479 The caller is responsible for saving any language-specific status,
480 since this function knows only about language-independent variables. */
483 push_function_context_to (context
)
486 struct function
*p
= (struct function
*) xmalloc (sizeof (struct function
));
488 p
->next
= outer_function_chain
;
489 outer_function_chain
= p
;
491 p
->name
= current_function_name
;
492 p
->decl
= current_function_decl
;
493 p
->pops_args
= current_function_pops_args
;
494 p
->returns_struct
= current_function_returns_struct
;
495 p
->returns_pcc_struct
= current_function_returns_pcc_struct
;
496 p
->returns_pointer
= current_function_returns_pointer
;
497 p
->needs_context
= current_function_needs_context
;
498 p
->calls_setjmp
= current_function_calls_setjmp
;
499 p
->calls_longjmp
= current_function_calls_longjmp
;
500 p
->calls_alloca
= current_function_calls_alloca
;
501 p
->has_nonlocal_label
= current_function_has_nonlocal_label
;
502 p
->has_nonlocal_goto
= current_function_has_nonlocal_goto
;
503 p
->contains_functions
= current_function_contains_functions
;
504 p
->is_thunk
= current_function_is_thunk
;
505 p
->args_size
= current_function_args_size
;
506 p
->pretend_args_size
= current_function_pretend_args_size
;
507 p
->arg_offset_rtx
= current_function_arg_offset_rtx
;
508 p
->varargs
= current_function_varargs
;
509 p
->stdarg
= current_function_stdarg
;
510 p
->uses_const_pool
= current_function_uses_const_pool
;
511 p
->uses_pic_offset_table
= current_function_uses_pic_offset_table
;
512 p
->internal_arg_pointer
= current_function_internal_arg_pointer
;
513 p
->cannot_inline
= current_function_cannot_inline
;
514 p
->max_parm_reg
= max_parm_reg
;
515 p
->parm_reg_stack_loc
= parm_reg_stack_loc
;
516 p
->outgoing_args_size
= current_function_outgoing_args_size
;
517 p
->return_rtx
= current_function_return_rtx
;
518 p
->nonlocal_goto_handler_slot
= nonlocal_goto_handler_slot
;
519 p
->nonlocal_goto_stack_level
= nonlocal_goto_stack_level
;
520 p
->nonlocal_labels
= nonlocal_labels
;
521 p
->cleanup_label
= cleanup_label
;
522 p
->return_label
= return_label
;
523 p
->save_expr_regs
= save_expr_regs
;
524 p
->stack_slot_list
= stack_slot_list
;
525 p
->parm_birth_insn
= parm_birth_insn
;
526 p
->frame_offset
= frame_offset
;
527 p
->tail_recursion_label
= tail_recursion_label
;
528 p
->tail_recursion_reentry
= tail_recursion_reentry
;
529 p
->arg_pointer_save_area
= arg_pointer_save_area
;
530 p
->rtl_expr_chain
= rtl_expr_chain
;
531 p
->last_parm_insn
= last_parm_insn
;
532 p
->context_display
= context_display
;
533 p
->trampoline_list
= trampoline_list
;
534 p
->function_call_count
= function_call_count
;
535 p
->temp_slots
= temp_slots
;
536 p
->temp_slot_level
= temp_slot_level
;
537 p
->target_temp_slot_level
= target_temp_slot_level
;
538 p
->var_temp_slot_level
= var_temp_slot_level
;
539 p
->fixup_var_refs_queue
= 0;
540 p
->epilogue_delay_list
= current_function_epilogue_delay_list
;
541 p
->args_info
= current_function_args_info
;
543 save_tree_status (p
, context
);
544 save_storage_status (p
);
545 save_emit_status (p
);
546 save_expr_status (p
);
547 save_stmt_status (p
);
548 save_varasm_status (p
, context
);
549 if (save_machine_status
)
550 (*save_machine_status
) (p
);
554 push_function_context ()
556 push_function_context_to (current_function_decl
);
559 /* Restore the last saved context, at the end of a nested function.
560 This function is called from language-specific code. */
563 pop_function_context_from (context
)
566 struct function
*p
= outer_function_chain
;
567 struct var_refs_queue
*queue
;
569 outer_function_chain
= p
->next
;
571 current_function_contains_functions
572 = p
->contains_functions
|| p
->inline_obstacks
573 || context
== current_function_decl
;
574 current_function_name
= p
->name
;
575 current_function_decl
= p
->decl
;
576 current_function_pops_args
= p
->pops_args
;
577 current_function_returns_struct
= p
->returns_struct
;
578 current_function_returns_pcc_struct
= p
->returns_pcc_struct
;
579 current_function_returns_pointer
= p
->returns_pointer
;
580 current_function_needs_context
= p
->needs_context
;
581 current_function_calls_setjmp
= p
->calls_setjmp
;
582 current_function_calls_longjmp
= p
->calls_longjmp
;
583 current_function_calls_alloca
= p
->calls_alloca
;
584 current_function_has_nonlocal_label
= p
->has_nonlocal_label
;
585 current_function_has_nonlocal_goto
= p
->has_nonlocal_goto
;
586 current_function_is_thunk
= p
->is_thunk
;
587 current_function_args_size
= p
->args_size
;
588 current_function_pretend_args_size
= p
->pretend_args_size
;
589 current_function_arg_offset_rtx
= p
->arg_offset_rtx
;
590 current_function_varargs
= p
->varargs
;
591 current_function_stdarg
= p
->stdarg
;
592 current_function_uses_const_pool
= p
->uses_const_pool
;
593 current_function_uses_pic_offset_table
= p
->uses_pic_offset_table
;
594 current_function_internal_arg_pointer
= p
->internal_arg_pointer
;
595 current_function_cannot_inline
= p
->cannot_inline
;
596 max_parm_reg
= p
->max_parm_reg
;
597 parm_reg_stack_loc
= p
->parm_reg_stack_loc
;
598 current_function_outgoing_args_size
= p
->outgoing_args_size
;
599 current_function_return_rtx
= p
->return_rtx
;
600 nonlocal_goto_handler_slot
= p
->nonlocal_goto_handler_slot
;
601 nonlocal_goto_stack_level
= p
->nonlocal_goto_stack_level
;
602 nonlocal_labels
= p
->nonlocal_labels
;
603 cleanup_label
= p
->cleanup_label
;
604 return_label
= p
->return_label
;
605 save_expr_regs
= p
->save_expr_regs
;
606 stack_slot_list
= p
->stack_slot_list
;
607 parm_birth_insn
= p
->parm_birth_insn
;
608 frame_offset
= p
->frame_offset
;
609 tail_recursion_label
= p
->tail_recursion_label
;
610 tail_recursion_reentry
= p
->tail_recursion_reentry
;
611 arg_pointer_save_area
= p
->arg_pointer_save_area
;
612 rtl_expr_chain
= p
->rtl_expr_chain
;
613 last_parm_insn
= p
->last_parm_insn
;
614 context_display
= p
->context_display
;
615 trampoline_list
= p
->trampoline_list
;
616 function_call_count
= p
->function_call_count
;
617 temp_slots
= p
->temp_slots
;
618 temp_slot_level
= p
->temp_slot_level
;
619 target_temp_slot_level
= p
->target_temp_slot_level
;
620 var_temp_slot_level
= p
->var_temp_slot_level
;
621 current_function_epilogue_delay_list
= p
->epilogue_delay_list
;
623 current_function_args_info
= p
->args_info
;
625 restore_tree_status (p
, context
);
626 restore_storage_status (p
);
627 restore_expr_status (p
);
628 restore_emit_status (p
);
629 restore_stmt_status (p
);
630 restore_varasm_status (p
);
632 if (restore_machine_status
)
633 (*restore_machine_status
) (p
);
635 /* Finish doing put_var_into_stack for any of our variables
636 which became addressable during the nested function. */
637 for (queue
= p
->fixup_var_refs_queue
; queue
; queue
= queue
->next
)
638 fixup_var_refs (queue
->modified
, queue
->promoted_mode
, queue
->unsignedp
);
642 /* Reset variables that have known state during rtx generation. */
643 rtx_equal_function_value_matters
= 1;
644 virtuals_instantiated
= 0;
647 void pop_function_context ()
649 pop_function_context_from (current_function_decl
);
652 /* Allocate fixed slots in the stack frame of the current function. */
654 /* Return size needed for stack frame based on slots so far allocated.
655 This size counts from zero. It is not rounded to STACK_BOUNDARY;
656 the caller may have to do that. */
661 #ifdef FRAME_GROWS_DOWNWARD
662 return -frame_offset
;
668 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
669 with machine mode MODE.
671 ALIGN controls the amount of alignment for the address of the slot:
672 0 means according to MODE,
673 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
674 positive specifies alignment boundary in bits.
676 We do not round to stack_boundary here. */
679 assign_stack_local (mode
, size
, align
)
680 enum machine_mode mode
;
684 register rtx x
, addr
;
685 int bigend_correction
= 0;
690 alignment
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
692 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
694 else if (align
== -1)
696 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
697 size
= CEIL_ROUND (size
, alignment
);
700 alignment
= align
/ BITS_PER_UNIT
;
702 /* Round frame offset to that alignment.
703 We must be careful here, since FRAME_OFFSET might be negative and
704 division with a negative dividend isn't as well defined as we might
705 like. So we instead assume that ALIGNMENT is a power of two and
706 use logical operations which are unambiguous. */
707 #ifdef FRAME_GROWS_DOWNWARD
708 frame_offset
= FLOOR_ROUND (frame_offset
, alignment
);
710 frame_offset
= CEIL_ROUND (frame_offset
, alignment
);
713 /* On a big-endian machine, if we are allocating more space than we will use,
714 use the least significant bytes of those that are allocated. */
715 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
)
716 bigend_correction
= size
- GET_MODE_SIZE (mode
);
718 #ifdef FRAME_GROWS_DOWNWARD
719 frame_offset
-= size
;
722 /* If we have already instantiated virtual registers, return the actual
723 address relative to the frame pointer. */
724 if (virtuals_instantiated
)
725 addr
= plus_constant (frame_pointer_rtx
,
726 (frame_offset
+ bigend_correction
727 + STARTING_FRAME_OFFSET
));
729 addr
= plus_constant (virtual_stack_vars_rtx
,
730 frame_offset
+ bigend_correction
);
732 #ifndef FRAME_GROWS_DOWNWARD
733 frame_offset
+= size
;
736 x
= gen_rtx_MEM (mode
, addr
);
738 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, x
, stack_slot_list
);
743 /* Assign a stack slot in a containing function.
744 First three arguments are same as in preceding function.
745 The last argument specifies the function to allocate in. */
748 assign_outer_stack_local (mode
, size
, align
, function
)
749 enum machine_mode mode
;
752 struct function
*function
;
754 register rtx x
, addr
;
755 int bigend_correction
= 0;
758 /* Allocate in the memory associated with the function in whose frame
760 push_obstacks (function
->function_obstack
,
761 function
->function_maybepermanent_obstack
);
765 alignment
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
767 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
769 else if (align
== -1)
771 alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
772 size
= CEIL_ROUND (size
, alignment
);
775 alignment
= align
/ BITS_PER_UNIT
;
777 /* Round frame offset to that alignment. */
778 #ifdef FRAME_GROWS_DOWNWARD
779 function
->frame_offset
= FLOOR_ROUND (function
->frame_offset
, alignment
);
781 function
->frame_offset
= CEIL_ROUND (function
->frame_offset
, alignment
);
784 /* On a big-endian machine, if we are allocating more space than we will use,
785 use the least significant bytes of those that are allocated. */
786 if (BYTES_BIG_ENDIAN
&& mode
!= BLKmode
)
787 bigend_correction
= size
- GET_MODE_SIZE (mode
);
789 #ifdef FRAME_GROWS_DOWNWARD
790 function
->frame_offset
-= size
;
792 addr
= plus_constant (virtual_stack_vars_rtx
,
793 function
->frame_offset
+ bigend_correction
);
794 #ifndef FRAME_GROWS_DOWNWARD
795 function
->frame_offset
+= size
;
798 x
= gen_rtx_MEM (mode
, addr
);
800 function
->stack_slot_list
801 = gen_rtx_EXPR_LIST (VOIDmode
, x
, function
->stack_slot_list
);
808 /* Allocate a temporary stack slot and record it for possible later
811 MODE is the machine mode to be given to the returned rtx.
813 SIZE is the size in units of the space required. We do no rounding here
814 since assign_stack_local will do any required rounding.
816 KEEP is 1 if this slot is to be retained after a call to
817 free_temp_slots. Automatic variables for a block are allocated
818 with this flag. KEEP is 2 if we allocate a longer term temporary,
819 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
820 if we are to allocate something at an inner level to be treated as
821 a variable in the block (e.g., a SAVE_EXPR). */
824 assign_stack_temp (mode
, size
, keep
)
825 enum machine_mode mode
;
829 struct temp_slot
*p
, *best_p
= 0;
831 /* If SIZE is -1 it means that somebody tried to allocate a temporary
832 of a variable size. */
836 /* First try to find an available, already-allocated temporary that is the
837 exact size we require. */
838 for (p
= temp_slots
; p
; p
= p
->next
)
839 if (p
->size
== size
&& GET_MODE (p
->slot
) == mode
&& ! p
->in_use
)
842 /* If we didn't find, one, try one that is larger than what we want. We
843 find the smallest such. */
845 for (p
= temp_slots
; p
; p
= p
->next
)
846 if (p
->size
> size
&& GET_MODE (p
->slot
) == mode
&& ! p
->in_use
847 && (best_p
== 0 || best_p
->size
> p
->size
))
850 /* Make our best, if any, the one to use. */
853 /* If there are enough aligned bytes left over, make them into a new
854 temp_slot so that the extra bytes don't get wasted. Do this only
855 for BLKmode slots, so that we can be sure of the alignment. */
856 if (GET_MODE (best_p
->slot
) == BLKmode
)
858 int alignment
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
859 HOST_WIDE_INT rounded_size
= CEIL_ROUND (size
, alignment
);
861 if (best_p
->size
- rounded_size
>= alignment
)
863 p
= (struct temp_slot
*) oballoc (sizeof (struct temp_slot
));
864 p
->in_use
= p
->addr_taken
= 0;
865 p
->size
= best_p
->size
- rounded_size
;
866 p
->base_offset
= best_p
->base_offset
+ rounded_size
;
867 p
->full_size
= best_p
->full_size
- rounded_size
;
868 p
->slot
= gen_rtx_MEM (BLKmode
,
869 plus_constant (XEXP (best_p
->slot
, 0),
873 p
->next
= temp_slots
;
876 stack_slot_list
= gen_rtx_EXPR_LIST (VOIDmode
, p
->slot
,
879 best_p
->size
= rounded_size
;
880 best_p
->full_size
= rounded_size
;
887 /* If we still didn't find one, make a new temporary. */
890 HOST_WIDE_INT frame_offset_old
= frame_offset
;
892 p
= (struct temp_slot
*) oballoc (sizeof (struct temp_slot
));
894 /* If the temp slot mode doesn't indicate the alignment,
895 use the largest possible, so no one will be disappointed. */
896 p
->slot
= assign_stack_local (mode
, size
, mode
== BLKmode
? -1 : 0);
898 /* The following slot size computation is necessary because we don't
899 know the actual size of the temporary slot until assign_stack_local
900 has performed all the frame alignment and size rounding for the
901 requested temporary. Note that extra space added for alignment
902 can be either above or below this stack slot depending on which
903 way the frame grows. We include the extra space if and only if it
904 is above this slot. */
905 #ifdef FRAME_GROWS_DOWNWARD
906 p
->size
= frame_offset_old
- frame_offset
;
911 /* Now define the fields used by combine_temp_slots. */
912 #ifdef FRAME_GROWS_DOWNWARD
913 p
->base_offset
= frame_offset
;
914 p
->full_size
= frame_offset_old
- frame_offset
;
916 p
->base_offset
= frame_offset_old
;
917 p
->full_size
= frame_offset
- frame_offset_old
;
920 p
->next
= temp_slots
;
926 p
->rtl_expr
= sequence_rtl_expr
;
930 p
->level
= target_temp_slot_level
;
935 p
->level
= var_temp_slot_level
;
940 p
->level
= temp_slot_level
;
944 /* We may be reusing an old slot, so clear any MEM flags that may have been
946 RTX_UNCHANGING_P (p
->slot
) = 0;
947 MEM_IN_STRUCT_P (p
->slot
) = 0;
951 /* Assign a temporary of given TYPE.
952 KEEP is as for assign_stack_temp.
953 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
954 it is 0 if a register is OK.
955 DONT_PROMOTE is 1 if we should not promote values in register
959 assign_temp (type
, keep
, memory_required
, dont_promote
)
965 enum machine_mode mode
= TYPE_MODE (type
);
966 int unsignedp
= TREE_UNSIGNED (type
);
968 if (mode
== BLKmode
|| memory_required
)
970 HOST_WIDE_INT size
= int_size_in_bytes (type
);
973 /* Unfortunately, we don't yet know how to allocate variable-sized
974 temporaries. However, sometimes we have a fixed upper limit on
975 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
976 instead. This is the case for Chill variable-sized strings. */
977 if (size
== -1 && TREE_CODE (type
) == ARRAY_TYPE
978 && TYPE_ARRAY_MAX_SIZE (type
) != NULL_TREE
979 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type
)) == INTEGER_CST
)
980 size
= TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type
));
982 tmp
= assign_stack_temp (mode
, size
, keep
);
983 MEM_IN_STRUCT_P (tmp
) = AGGREGATE_TYPE_P (type
);
987 #ifndef PROMOTE_FOR_CALL_ONLY
989 mode
= promote_mode (type
, mode
, &unsignedp
, 0);
992 return gen_reg_rtx (mode
);
995 /* Combine temporary stack slots which are adjacent on the stack.
997 This allows for better use of already allocated stack space. This is only
998 done for BLKmode slots because we can be sure that we won't have alignment
999 problems in this case. */
1002 combine_temp_slots ()
1004 struct temp_slot
*p
, *q
;
1005 struct temp_slot
*prev_p
, *prev_q
;
1008 /* If there are a lot of temp slots, don't do anything unless
1009 high levels of optimizaton. */
1010 if (! flag_expensive_optimizations
)
1011 for (p
= temp_slots
, num_slots
= 0; p
; p
= p
->next
, num_slots
++)
1012 if (num_slots
> 100 || (num_slots
> 10 && optimize
== 0))
1015 for (p
= temp_slots
, prev_p
= 0; p
; p
= prev_p
? prev_p
->next
: temp_slots
)
1019 if (! p
->in_use
&& GET_MODE (p
->slot
) == BLKmode
)
1020 for (q
= p
->next
, prev_q
= p
; q
; q
= prev_q
->next
)
1023 if (! q
->in_use
&& GET_MODE (q
->slot
) == BLKmode
)
1025 if (p
->base_offset
+ p
->full_size
== q
->base_offset
)
1027 /* Q comes after P; combine Q into P. */
1029 p
->full_size
+= q
->full_size
;
1032 else if (q
->base_offset
+ q
->full_size
== p
->base_offset
)
1034 /* P comes after Q; combine P into Q. */
1036 q
->full_size
+= p
->full_size
;
1041 /* Either delete Q or advance past it. */
1043 prev_q
->next
= q
->next
;
1047 /* Either delete P or advance past it. */
1051 prev_p
->next
= p
->next
;
1053 temp_slots
= p
->next
;
1060 /* Find the temp slot corresponding to the object at address X. */
1062 static struct temp_slot
*
1063 find_temp_slot_from_address (x
)
1066 struct temp_slot
*p
;
1069 for (p
= temp_slots
; p
; p
= p
->next
)
1074 else if (XEXP (p
->slot
, 0) == x
1076 || (GET_CODE (x
) == PLUS
1077 && XEXP (x
, 0) == virtual_stack_vars_rtx
1078 && GET_CODE (XEXP (x
, 1)) == CONST_INT
1079 && INTVAL (XEXP (x
, 1)) >= p
->base_offset
1080 && INTVAL (XEXP (x
, 1)) < p
->base_offset
+ p
->full_size
))
1083 else if (p
->address
!= 0 && GET_CODE (p
->address
) == EXPR_LIST
)
1084 for (next
= p
->address
; next
; next
= XEXP (next
, 1))
1085 if (XEXP (next
, 0) == x
)
1092 /* Indicate that NEW is an alternate way of referring to the temp slot
1093 that previously was known by OLD. */
1096 update_temp_slot_address (old
, new)
1099 struct temp_slot
*p
= find_temp_slot_from_address (old
);
1101 /* If none, return. Else add NEW as an alias. */
1104 else if (p
->address
== 0)
1108 if (GET_CODE (p
->address
) != EXPR_LIST
)
1109 p
->address
= gen_rtx_EXPR_LIST (VOIDmode
, p
->address
, NULL_RTX
);
1111 p
->address
= gen_rtx_EXPR_LIST (VOIDmode
, new, p
->address
);
1115 /* If X could be a reference to a temporary slot, mark the fact that its
1116 address was taken. */
1119 mark_temp_addr_taken (x
)
1122 struct temp_slot
*p
;
1127 /* If X is not in memory or is at a constant address, it cannot be in
1128 a temporary slot. */
1129 if (GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0)))
1132 p
= find_temp_slot_from_address (XEXP (x
, 0));
1137 /* If X could be a reference to a temporary slot, mark that slot as
1138 belonging to the to one level higher than the current level. If X
1139 matched one of our slots, just mark that one. Otherwise, we can't
1140 easily predict which it is, so upgrade all of them. Kept slots
1141 need not be touched.
1143 This is called when an ({...}) construct occurs and a statement
1144 returns a value in memory. */
1147 preserve_temp_slots (x
)
1150 struct temp_slot
*p
= 0;
1152 /* If there is no result, we still might have some objects whose address
1153 were taken, so we need to make sure they stay around. */
1156 for (p
= temp_slots
; p
; p
= p
->next
)
1157 if (p
->in_use
&& p
->level
== temp_slot_level
&& p
->addr_taken
)
1163 /* If X is a register that is being used as a pointer, see if we have
1164 a temporary slot we know it points to. To be consistent with
1165 the code below, we really should preserve all non-kept slots
1166 if we can't find a match, but that seems to be much too costly. */
1167 if (GET_CODE (x
) == REG
&& REGNO_POINTER_FLAG (REGNO (x
)))
1168 p
= find_temp_slot_from_address (x
);
1170 /* If X is not in memory or is at a constant address, it cannot be in
1171 a temporary slot, but it can contain something whose address was
1173 if (p
== 0 && (GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0))))
1175 for (p
= temp_slots
; p
; p
= p
->next
)
1176 if (p
->in_use
&& p
->level
== temp_slot_level
&& p
->addr_taken
)
1182 /* First see if we can find a match. */
1184 p
= find_temp_slot_from_address (XEXP (x
, 0));
1188 /* Move everything at our level whose address was taken to our new
1189 level in case we used its address. */
1190 struct temp_slot
*q
;
1192 if (p
->level
== temp_slot_level
)
1194 for (q
= temp_slots
; q
; q
= q
->next
)
1195 if (q
!= p
&& q
->addr_taken
&& q
->level
== p
->level
)
1204 /* Otherwise, preserve all non-kept slots at this level. */
1205 for (p
= temp_slots
; p
; p
= p
->next
)
1206 if (p
->in_use
&& p
->level
== temp_slot_level
&& ! p
->keep
)
1210 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1211 with that RTL_EXPR, promote it into a temporary slot at the present
1212 level so it will not be freed when we free slots made in the
1216 preserve_rtl_expr_result (x
)
1219 struct temp_slot
*p
;
1221 /* If X is not in memory or is at a constant address, it cannot be in
1222 a temporary slot. */
1223 if (x
== 0 || GET_CODE (x
) != MEM
|| CONSTANT_P (XEXP (x
, 0)))
1226 /* If we can find a match, move it to our level unless it is already at
1228 p
= find_temp_slot_from_address (XEXP (x
, 0));
1231 p
->level
= MIN (p
->level
, temp_slot_level
);
1238 /* Free all temporaries used so far. This is normally called at the end
1239 of generating code for a statement. Don't free any temporaries
1240 currently in use for an RTL_EXPR that hasn't yet been emitted.
1241 We could eventually do better than this since it can be reused while
1242 generating the same RTL_EXPR, but this is complex and probably not
1248 struct temp_slot
*p
;
1250 for (p
= temp_slots
; p
; p
= p
->next
)
1251 if (p
->in_use
&& p
->level
== temp_slot_level
&& ! p
->keep
1252 && p
->rtl_expr
== 0)
1255 combine_temp_slots ();
1258 /* Free all temporary slots used in T, an RTL_EXPR node. */
1261 free_temps_for_rtl_expr (t
)
1264 struct temp_slot
*p
;
1266 for (p
= temp_slots
; p
; p
= p
->next
)
1267 if (p
->rtl_expr
== t
)
1270 combine_temp_slots ();
1273 /* Mark all temporaries ever allocated in this function as not suitable
1274 for reuse until the current level is exited. */
1277 mark_all_temps_used ()
1279 struct temp_slot
*p
;
1281 for (p
= temp_slots
; p
; p
= p
->next
)
1283 p
->in_use
= p
->keep
= 1;
1284 p
->level
= MIN (p
->level
, temp_slot_level
);
1288 /* Push deeper into the nesting level for stack temporaries. */
1296 /* Likewise, but save the new level as the place to allocate variables
1300 push_temp_slots_for_block ()
1304 var_temp_slot_level
= temp_slot_level
;
1307 /* Likewise, but save the new level as the place to allocate temporaries
1308 for TARGET_EXPRs. */
1311 push_temp_slots_for_target ()
1315 target_temp_slot_level
= temp_slot_level
;
1318 /* Set and get the value of target_temp_slot_level. The only
1319 permitted use of these functions is to save and restore this value. */
1322 get_target_temp_slot_level ()
1324 return target_temp_slot_level
;
1328 set_target_temp_slot_level (level
)
1331 target_temp_slot_level
= level
;
1334 /* Pop a temporary nesting level. All slots in use in the current level
1340 struct temp_slot
*p
;
1342 for (p
= temp_slots
; p
; p
= p
->next
)
1343 if (p
->in_use
&& p
->level
== temp_slot_level
&& p
->rtl_expr
== 0)
1346 combine_temp_slots ();
1351 /* Initialize temporary slots. */
1356 /* We have not allocated any temporaries yet. */
1358 temp_slot_level
= 0;
1359 var_temp_slot_level
= 0;
1360 target_temp_slot_level
= 0;
1363 /* Retroactively move an auto variable from a register to a stack slot.
1364 This is done when an address-reference to the variable is seen. */
1367 put_var_into_stack (decl
)
1371 enum machine_mode promoted_mode
, decl_mode
;
1372 struct function
*function
= 0;
1374 int can_use_addressof
;
1376 context
= decl_function_context (decl
);
1378 /* Get the current rtl used for this object and its original mode. */
1379 reg
= TREE_CODE (decl
) == SAVE_EXPR
? SAVE_EXPR_RTL (decl
) : DECL_RTL (decl
);
1381 /* No need to do anything if decl has no rtx yet
1382 since in that case caller is setting TREE_ADDRESSABLE
1383 and a stack slot will be assigned when the rtl is made. */
1387 /* Get the declared mode for this object. */
1388 decl_mode
= (TREE_CODE (decl
) == SAVE_EXPR
? TYPE_MODE (TREE_TYPE (decl
))
1389 : DECL_MODE (decl
));
1390 /* Get the mode it's actually stored in. */
1391 promoted_mode
= GET_MODE (reg
);
1393 /* If this variable comes from an outer function,
1394 find that function's saved context. */
1395 if (context
!= current_function_decl
&& context
!= inline_function_decl
)
1396 for (function
= outer_function_chain
; function
; function
= function
->next
)
1397 if (function
->decl
== context
)
1400 /* If this is a variable-size object with a pseudo to address it,
1401 put that pseudo into the stack, if the var is nonlocal. */
1402 if (DECL_NONLOCAL (decl
)
1403 && GET_CODE (reg
) == MEM
1404 && GET_CODE (XEXP (reg
, 0)) == REG
1405 && REGNO (XEXP (reg
, 0)) > LAST_VIRTUAL_REGISTER
)
1407 reg
= XEXP (reg
, 0);
1408 decl_mode
= promoted_mode
= GET_MODE (reg
);
1414 /* FIXME make it work for promoted modes too */
1415 && decl_mode
== promoted_mode
1416 #ifdef NON_SAVING_SETJMP
1417 && ! (NON_SAVING_SETJMP
&& current_function_calls_setjmp
)
1421 /* If we can't use ADDRESSOF, make sure we see through one we already
1423 if (! can_use_addressof
&& GET_CODE (reg
) == MEM
1424 && GET_CODE (XEXP (reg
, 0)) == ADDRESSOF
)
1425 reg
= XEXP (XEXP (reg
, 0), 0);
1427 /* Now we should have a value that resides in one or more pseudo regs. */
1429 if (GET_CODE (reg
) == REG
)
1431 /* If this variable lives in the current function and we don't need
1432 to put things in the stack for the sake of setjmp, try to keep it
1433 in a register until we know we actually need the address. */
1434 if (can_use_addressof
)
1435 gen_mem_addressof (reg
, decl
);
1437 put_reg_into_stack (function
, reg
, TREE_TYPE (decl
),
1438 promoted_mode
, decl_mode
,
1439 TREE_SIDE_EFFECTS (decl
), 0,
1441 || DECL_INITIAL (decl
) != 0);
1443 else if (GET_CODE (reg
) == CONCAT
)
1445 /* A CONCAT contains two pseudos; put them both in the stack.
1446 We do it so they end up consecutive. */
1447 enum machine_mode part_mode
= GET_MODE (XEXP (reg
, 0));
1448 tree part_type
= TREE_TYPE (TREE_TYPE (decl
));
1449 #ifdef FRAME_GROWS_DOWNWARD
1450 /* Since part 0 should have a lower address, do it second. */
1451 put_reg_into_stack (function
, XEXP (reg
, 1), part_type
, part_mode
,
1452 part_mode
, TREE_SIDE_EFFECTS (decl
), 0,
1453 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0);
1454 put_reg_into_stack (function
, XEXP (reg
, 0), part_type
, part_mode
,
1455 part_mode
, TREE_SIDE_EFFECTS (decl
), 0,
1456 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0);
1458 put_reg_into_stack (function
, XEXP (reg
, 0), part_type
, part_mode
,
1459 part_mode
, TREE_SIDE_EFFECTS (decl
), 0,
1460 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0);
1461 put_reg_into_stack (function
, XEXP (reg
, 1), part_type
, part_mode
,
1462 part_mode
, TREE_SIDE_EFFECTS (decl
), 0,
1463 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0);
1466 /* Change the CONCAT into a combined MEM for both parts. */
1467 PUT_CODE (reg
, MEM
);
1468 MEM_VOLATILE_P (reg
) = MEM_VOLATILE_P (XEXP (reg
, 0));
1469 MEM_ALIAS_SET (reg
) = get_alias_set (decl
);
1471 /* The two parts are in memory order already.
1472 Use the lower parts address as ours. */
1473 XEXP (reg
, 0) = XEXP (XEXP (reg
, 0), 0);
1474 /* Prevent sharing of rtl that might lose. */
1475 if (GET_CODE (XEXP (reg
, 0)) == PLUS
)
1476 XEXP (reg
, 0) = copy_rtx (XEXP (reg
, 0));
1481 if (flag_check_memory_usage
)
1482 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
1483 XEXP (reg
, 0), ptr_mode
,
1484 GEN_INT (GET_MODE_SIZE (GET_MODE (reg
))),
1485 TYPE_MODE (sizetype
),
1486 GEN_INT (MEMORY_USE_RW
),
1487 TYPE_MODE (integer_type_node
));
1490 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1491 into the stack frame of FUNCTION (0 means the current function).
1492 DECL_MODE is the machine mode of the user-level data type.
1493 PROMOTED_MODE is the machine mode of the register.
1494 VOLATILE_P is nonzero if this is for a "volatile" decl.
1495 USED_P is nonzero if this reg might have already been used in an insn. */
1498 put_reg_into_stack (function
, reg
, type
, promoted_mode
, decl_mode
, volatile_p
,
1499 original_regno
, used_p
)
1500 struct function
*function
;
1503 enum machine_mode promoted_mode
, decl_mode
;
1509 int regno
= original_regno
;
1512 regno
= REGNO (reg
);
1516 if (regno
< function
->max_parm_reg
)
1517 new = function
->parm_reg_stack_loc
[regno
];
1519 new = assign_outer_stack_local (decl_mode
, GET_MODE_SIZE (decl_mode
),
1524 if (regno
< max_parm_reg
)
1525 new = parm_reg_stack_loc
[regno
];
1527 new = assign_stack_local (decl_mode
, GET_MODE_SIZE (decl_mode
), 0);
1530 PUT_MODE (reg
, decl_mode
);
1531 XEXP (reg
, 0) = XEXP (new, 0);
1532 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1533 MEM_VOLATILE_P (reg
) = volatile_p
;
1534 PUT_CODE (reg
, MEM
);
1536 /* If this is a memory ref that contains aggregate components,
1537 mark it as such for cse and loop optimize. If we are reusing a
1538 previously generated stack slot, then we need to copy the bit in
1539 case it was set for other reasons. For instance, it is set for
1540 __builtin_va_alist. */
1541 MEM_IN_STRUCT_P (reg
) = AGGREGATE_TYPE_P (type
) | MEM_IN_STRUCT_P (new);
1542 MEM_ALIAS_SET (reg
) = get_alias_set (type
);
1544 /* Now make sure that all refs to the variable, previously made
1545 when it was a register, are fixed up to be valid again. */
1547 if (used_p
&& function
!= 0)
1549 struct var_refs_queue
*temp
;
1551 /* Variable is inherited; fix it up when we get back to its function. */
1552 push_obstacks (function
->function_obstack
,
1553 function
->function_maybepermanent_obstack
);
1555 /* See comment in restore_tree_status in tree.c for why this needs to be
1556 on saveable obstack. */
1558 = (struct var_refs_queue
*) savealloc (sizeof (struct var_refs_queue
));
1559 temp
->modified
= reg
;
1560 temp
->promoted_mode
= promoted_mode
;
1561 temp
->unsignedp
= TREE_UNSIGNED (type
);
1562 temp
->next
= function
->fixup_var_refs_queue
;
1563 function
->fixup_var_refs_queue
= temp
;
1567 /* Variable is local; fix it up now. */
1568 fixup_var_refs (reg
, promoted_mode
, TREE_UNSIGNED (type
));
1572 fixup_var_refs (var
, promoted_mode
, unsignedp
)
1574 enum machine_mode promoted_mode
;
1578 rtx first_insn
= get_insns ();
1579 struct sequence_stack
*stack
= sequence_stack
;
1580 tree rtl_exps
= rtl_expr_chain
;
1582 /* Must scan all insns for stack-refs that exceed the limit. */
1583 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
, first_insn
, stack
== 0);
1585 /* Scan all pending sequences too. */
1586 for (; stack
; stack
= stack
->next
)
1588 push_to_sequence (stack
->first
);
1589 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
,
1590 stack
->first
, stack
->next
!= 0);
1591 /* Update remembered end of sequence
1592 in case we added an insn at the end. */
1593 stack
->last
= get_last_insn ();
1597 /* Scan all waiting RTL_EXPRs too. */
1598 for (pending
= rtl_exps
; pending
; pending
= TREE_CHAIN (pending
))
1600 rtx seq
= RTL_EXPR_SEQUENCE (TREE_VALUE (pending
));
1601 if (seq
!= const0_rtx
&& seq
!= 0)
1603 push_to_sequence (seq
);
1604 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
, seq
, 0);
1610 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1611 some part of an insn. Return a struct fixup_replacement whose OLD
1612 value is equal to X. Allocate a new structure if no such entry exists. */
1614 static struct fixup_replacement
*
1615 find_fixup_replacement (replacements
, x
)
1616 struct fixup_replacement
**replacements
;
1619 struct fixup_replacement
*p
;
1621 /* See if we have already replaced this. */
1622 for (p
= *replacements
; p
&& p
->old
!= x
; p
= p
->next
)
1627 p
= (struct fixup_replacement
*) oballoc (sizeof (struct fixup_replacement
));
1630 p
->next
= *replacements
;
1637 /* Scan the insn-chain starting with INSN for refs to VAR
1638 and fix them up. TOPLEVEL is nonzero if this chain is the
1639 main chain of insns for the current function. */
1642 fixup_var_refs_insns (var
, promoted_mode
, unsignedp
, insn
, toplevel
)
1644 enum machine_mode promoted_mode
;
1653 rtx next
= NEXT_INSN (insn
);
1654 rtx set
, prev
, prev_set
;
1657 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
1659 /* If this is a CLOBBER of VAR, delete it.
1661 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1662 and REG_RETVAL notes too. */
1663 if (GET_CODE (PATTERN (insn
)) == CLOBBER
1664 && XEXP (PATTERN (insn
), 0) == var
)
1666 if ((note
= find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
)) != 0)
1667 /* The REG_LIBCALL note will go away since we are going to
1668 turn INSN into a NOTE, so just delete the
1669 corresponding REG_RETVAL note. */
1670 remove_note (XEXP (note
, 0),
1671 find_reg_note (XEXP (note
, 0), REG_RETVAL
,
1674 /* In unoptimized compilation, we shouldn't call delete_insn
1675 except in jump.c doing warnings. */
1676 PUT_CODE (insn
, NOTE
);
1677 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
1678 NOTE_SOURCE_FILE (insn
) = 0;
1681 /* The insn to load VAR from a home in the arglist
1682 is now a no-op. When we see it, just delete it.
1683 Similarly if this is storing VAR from a register from which
1684 it was loaded in the previous insn. This will occur
1685 when an ADDRESSOF was made for an arglist slot. */
1687 && (set
= single_set (insn
)) != 0
1688 && SET_DEST (set
) == var
1689 /* If this represents the result of an insn group,
1690 don't delete the insn. */
1691 && find_reg_note (insn
, REG_RETVAL
, NULL_RTX
) == 0
1692 && (rtx_equal_p (SET_SRC (set
), var
)
1693 || (GET_CODE (SET_SRC (set
)) == REG
1694 && (prev
= prev_nonnote_insn (insn
)) != 0
1695 && (prev_set
= single_set (prev
)) != 0
1696 && SET_DEST (prev_set
) == SET_SRC (set
)
1697 && rtx_equal_p (SET_SRC (prev_set
), var
))))
1699 /* In unoptimized compilation, we shouldn't call delete_insn
1700 except in jump.c doing warnings. */
1701 PUT_CODE (insn
, NOTE
);
1702 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
1703 NOTE_SOURCE_FILE (insn
) = 0;
1704 if (insn
== last_parm_insn
)
1705 last_parm_insn
= PREV_INSN (next
);
1709 struct fixup_replacement
*replacements
= 0;
1710 rtx next_insn
= NEXT_INSN (insn
);
1712 if (SMALL_REGISTER_CLASSES
)
1714 /* If the insn that copies the results of a CALL_INSN
1715 into a pseudo now references VAR, we have to use an
1716 intermediate pseudo since we want the life of the
1717 return value register to be only a single insn.
1719 If we don't use an intermediate pseudo, such things as
1720 address computations to make the address of VAR valid
1721 if it is not can be placed between the CALL_INSN and INSN.
1723 To make sure this doesn't happen, we record the destination
1724 of the CALL_INSN and see if the next insn uses both that
1727 if (call_dest
!= 0 && GET_CODE (insn
) == INSN
1728 && reg_mentioned_p (var
, PATTERN (insn
))
1729 && reg_mentioned_p (call_dest
, PATTERN (insn
)))
1731 rtx temp
= gen_reg_rtx (GET_MODE (call_dest
));
1733 emit_insn_before (gen_move_insn (temp
, call_dest
), insn
);
1735 PATTERN (insn
) = replace_rtx (PATTERN (insn
),
1739 if (GET_CODE (insn
) == CALL_INSN
1740 && GET_CODE (PATTERN (insn
)) == SET
)
1741 call_dest
= SET_DEST (PATTERN (insn
));
1742 else if (GET_CODE (insn
) == CALL_INSN
1743 && GET_CODE (PATTERN (insn
)) == PARALLEL
1744 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
1745 call_dest
= SET_DEST (XVECEXP (PATTERN (insn
), 0, 0));
1750 /* See if we have to do anything to INSN now that VAR is in
1751 memory. If it needs to be loaded into a pseudo, use a single
1752 pseudo for the entire insn in case there is a MATCH_DUP
1753 between two operands. We pass a pointer to the head of
1754 a list of struct fixup_replacements. If fixup_var_refs_1
1755 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1756 it will record them in this list.
1758 If it allocated a pseudo for any replacement, we copy into
1761 fixup_var_refs_1 (var
, promoted_mode
, &PATTERN (insn
), insn
,
1764 /* If this is last_parm_insn, and any instructions were output
1765 after it to fix it up, then we must set last_parm_insn to
1766 the last such instruction emitted. */
1767 if (insn
== last_parm_insn
)
1768 last_parm_insn
= PREV_INSN (next_insn
);
1770 while (replacements
)
1772 if (GET_CODE (replacements
->new) == REG
)
1777 /* OLD might be a (subreg (mem)). */
1778 if (GET_CODE (replacements
->old
) == SUBREG
)
1780 = fixup_memory_subreg (replacements
->old
, insn
, 0);
1783 = fixup_stack_1 (replacements
->old
, insn
);
1785 insert_before
= insn
;
1787 /* If we are changing the mode, do a conversion.
1788 This might be wasteful, but combine.c will
1789 eliminate much of the waste. */
1791 if (GET_MODE (replacements
->new)
1792 != GET_MODE (replacements
->old
))
1795 convert_move (replacements
->new,
1796 replacements
->old
, unsignedp
);
1797 seq
= gen_sequence ();
1801 seq
= gen_move_insn (replacements
->new,
1804 emit_insn_before (seq
, insert_before
);
1807 replacements
= replacements
->next
;
1811 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1812 But don't touch other insns referred to by reg-notes;
1813 we will get them elsewhere. */
1814 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
1815 if (GET_CODE (note
) != INSN_LIST
)
1817 = walk_fixup_memory_subreg (XEXP (note
, 0), insn
, 1);
1823 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1824 See if the rtx expression at *LOC in INSN needs to be changed.
1826 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1827 contain a list of original rtx's and replacements. If we find that we need
1828 to modify this insn by replacing a memory reference with a pseudo or by
1829 making a new MEM to implement a SUBREG, we consult that list to see if
1830 we have already chosen a replacement. If none has already been allocated,
1831 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1832 or the SUBREG, as appropriate, to the pseudo. */
1835 fixup_var_refs_1 (var
, promoted_mode
, loc
, insn
, replacements
)
1837 enum machine_mode promoted_mode
;
1840 struct fixup_replacement
**replacements
;
1843 register rtx x
= *loc
;
1844 RTX_CODE code
= GET_CODE (x
);
1846 register rtx tem
, tem1
;
1847 struct fixup_replacement
*replacement
;
1852 if (XEXP (x
, 0) == var
)
1854 /* Prevent sharing of rtl that might lose. */
1855 rtx sub
= copy_rtx (XEXP (var
, 0));
1859 if (! validate_change (insn
, loc
, sub
, 0))
1861 rtx y
= force_operand (sub
, NULL_RTX
);
1863 if (! validate_change (insn
, loc
, y
, 0))
1864 *loc
= copy_to_reg (y
);
1867 emit_insn_before (gen_sequence (), insn
);
1875 /* If we already have a replacement, use it. Otherwise,
1876 try to fix up this address in case it is invalid. */
1878 replacement
= find_fixup_replacement (replacements
, var
);
1879 if (replacement
->new)
1881 *loc
= replacement
->new;
1885 *loc
= replacement
->new = x
= fixup_stack_1 (x
, insn
);
1887 /* Unless we are forcing memory to register or we changed the mode,
1888 we can leave things the way they are if the insn is valid. */
1890 INSN_CODE (insn
) = -1;
1891 if (! flag_force_mem
&& GET_MODE (x
) == promoted_mode
1892 && recog_memoized (insn
) >= 0)
1895 *loc
= replacement
->new = gen_reg_rtx (promoted_mode
);
1899 /* If X contains VAR, we need to unshare it here so that we update
1900 each occurrence separately. But all identical MEMs in one insn
1901 must be replaced with the same rtx because of the possibility of
1904 if (reg_mentioned_p (var
, x
))
1906 replacement
= find_fixup_replacement (replacements
, x
);
1907 if (replacement
->new == 0)
1908 replacement
->new = copy_most_rtx (x
, var
);
1910 *loc
= x
= replacement
->new;
1926 /* Note that in some cases those types of expressions are altered
1927 by optimize_bit_field, and do not survive to get here. */
1928 if (XEXP (x
, 0) == var
1929 || (GET_CODE (XEXP (x
, 0)) == SUBREG
1930 && SUBREG_REG (XEXP (x
, 0)) == var
))
1932 /* Get TEM as a valid MEM in the mode presently in the insn.
1934 We don't worry about the possibility of MATCH_DUP here; it
1935 is highly unlikely and would be tricky to handle. */
1938 if (GET_CODE (tem
) == SUBREG
)
1940 if (GET_MODE_BITSIZE (GET_MODE (tem
))
1941 > GET_MODE_BITSIZE (GET_MODE (var
)))
1943 replacement
= find_fixup_replacement (replacements
, var
);
1944 if (replacement
->new == 0)
1945 replacement
->new = gen_reg_rtx (GET_MODE (var
));
1946 SUBREG_REG (tem
) = replacement
->new;
1949 tem
= fixup_memory_subreg (tem
, insn
, 0);
1952 tem
= fixup_stack_1 (tem
, insn
);
1954 /* Unless we want to load from memory, get TEM into the proper mode
1955 for an extract from memory. This can only be done if the
1956 extract is at a constant position and length. */
1958 if (! flag_force_mem
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
1959 && GET_CODE (XEXP (x
, 2)) == CONST_INT
1960 && ! mode_dependent_address_p (XEXP (tem
, 0))
1961 && ! MEM_VOLATILE_P (tem
))
1963 enum machine_mode wanted_mode
= VOIDmode
;
1964 enum machine_mode is_mode
= GET_MODE (tem
);
1965 HOST_WIDE_INT pos
= INTVAL (XEXP (x
, 2));
1968 if (GET_CODE (x
) == ZERO_EXTRACT
)
1969 wanted_mode
= insn_operand_mode
[(int) CODE_FOR_extzv
][1];
1972 if (GET_CODE (x
) == SIGN_EXTRACT
)
1973 wanted_mode
= insn_operand_mode
[(int) CODE_FOR_extv
][1];
1975 /* If we have a narrower mode, we can do something. */
1976 if (wanted_mode
!= VOIDmode
1977 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
1979 HOST_WIDE_INT offset
= pos
/ BITS_PER_UNIT
;
1980 rtx old_pos
= XEXP (x
, 2);
1983 /* If the bytes and bits are counted differently, we
1984 must adjust the offset. */
1985 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
1986 offset
= (GET_MODE_SIZE (is_mode
)
1987 - GET_MODE_SIZE (wanted_mode
) - offset
);
1989 pos
%= GET_MODE_BITSIZE (wanted_mode
);
1991 newmem
= gen_rtx_MEM (wanted_mode
,
1992 plus_constant (XEXP (tem
, 0), offset
));
1993 RTX_UNCHANGING_P (newmem
) = RTX_UNCHANGING_P (tem
);
1994 MEM_VOLATILE_P (newmem
) = MEM_VOLATILE_P (tem
);
1995 MEM_IN_STRUCT_P (newmem
) = MEM_IN_STRUCT_P (tem
);
1997 /* Make the change and see if the insn remains valid. */
1998 INSN_CODE (insn
) = -1;
1999 XEXP (x
, 0) = newmem
;
2000 XEXP (x
, 2) = GEN_INT (pos
);
2002 if (recog_memoized (insn
) >= 0)
2005 /* Otherwise, restore old position. XEXP (x, 0) will be
2007 XEXP (x
, 2) = old_pos
;
2011 /* If we get here, the bitfield extract insn can't accept a memory
2012 reference. Copy the input into a register. */
2014 tem1
= gen_reg_rtx (GET_MODE (tem
));
2015 emit_insn_before (gen_move_insn (tem1
, tem
), insn
);
2022 if (SUBREG_REG (x
) == var
)
2024 /* If this is a special SUBREG made because VAR was promoted
2025 from a wider mode, replace it with VAR and call ourself
2026 recursively, this time saying that the object previously
2027 had its current mode (by virtue of the SUBREG). */
2029 if (SUBREG_PROMOTED_VAR_P (x
))
2032 fixup_var_refs_1 (var
, GET_MODE (var
), loc
, insn
, replacements
);
2036 /* If this SUBREG makes VAR wider, it has become a paradoxical
2037 SUBREG with VAR in memory, but these aren't allowed at this
2038 stage of the compilation. So load VAR into a pseudo and take
2039 a SUBREG of that pseudo. */
2040 if (GET_MODE_SIZE (GET_MODE (x
)) > GET_MODE_SIZE (GET_MODE (var
)))
2042 replacement
= find_fixup_replacement (replacements
, var
);
2043 if (replacement
->new == 0)
2044 replacement
->new = gen_reg_rtx (GET_MODE (var
));
2045 SUBREG_REG (x
) = replacement
->new;
2049 /* See if we have already found a replacement for this SUBREG.
2050 If so, use it. Otherwise, make a MEM and see if the insn
2051 is recognized. If not, or if we should force MEM into a register,
2052 make a pseudo for this SUBREG. */
2053 replacement
= find_fixup_replacement (replacements
, x
);
2054 if (replacement
->new)
2056 *loc
= replacement
->new;
2060 replacement
->new = *loc
= fixup_memory_subreg (x
, insn
, 0);
2062 INSN_CODE (insn
) = -1;
2063 if (! flag_force_mem
&& recog_memoized (insn
) >= 0)
2066 *loc
= replacement
->new = gen_reg_rtx (GET_MODE (x
));
2072 /* First do special simplification of bit-field references. */
2073 if (GET_CODE (SET_DEST (x
)) == SIGN_EXTRACT
2074 || GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
)
2075 optimize_bit_field (x
, insn
, 0);
2076 if (GET_CODE (SET_SRC (x
)) == SIGN_EXTRACT
2077 || GET_CODE (SET_SRC (x
)) == ZERO_EXTRACT
)
2078 optimize_bit_field (x
, insn
, NULL_PTR
);
2080 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2081 into a register and then store it back out. */
2082 if (GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
2083 && GET_CODE (XEXP (SET_DEST (x
), 0)) == SUBREG
2084 && SUBREG_REG (XEXP (SET_DEST (x
), 0)) == var
2085 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x
), 0)))
2086 > GET_MODE_SIZE (GET_MODE (var
))))
2088 replacement
= find_fixup_replacement (replacements
, var
);
2089 if (replacement
->new == 0)
2090 replacement
->new = gen_reg_rtx (GET_MODE (var
));
2092 SUBREG_REG (XEXP (SET_DEST (x
), 0)) = replacement
->new;
2093 emit_insn_after (gen_move_insn (var
, replacement
->new), insn
);
2096 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2097 insn into a pseudo and store the low part of the pseudo into VAR. */
2098 if (GET_CODE (SET_DEST (x
)) == SUBREG
2099 && SUBREG_REG (SET_DEST (x
)) == var
2100 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
2101 > GET_MODE_SIZE (GET_MODE (var
))))
2103 SET_DEST (x
) = tem
= gen_reg_rtx (GET_MODE (SET_DEST (x
)));
2104 emit_insn_after (gen_move_insn (var
, gen_lowpart (GET_MODE (var
),
2111 rtx dest
= SET_DEST (x
);
2112 rtx src
= SET_SRC (x
);
2114 rtx outerdest
= dest
;
2117 while (GET_CODE (dest
) == SUBREG
|| GET_CODE (dest
) == STRICT_LOW_PART
2118 || GET_CODE (dest
) == SIGN_EXTRACT
2119 || GET_CODE (dest
) == ZERO_EXTRACT
)
2120 dest
= XEXP (dest
, 0);
2122 if (GET_CODE (src
) == SUBREG
)
2123 src
= XEXP (src
, 0);
2125 /* If VAR does not appear at the top level of the SET
2126 just scan the lower levels of the tree. */
2128 if (src
!= var
&& dest
!= var
)
2131 /* We will need to rerecognize this insn. */
2132 INSN_CODE (insn
) = -1;
2135 if (GET_CODE (outerdest
) == ZERO_EXTRACT
&& dest
== var
)
2137 /* Since this case will return, ensure we fixup all the
2139 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (outerdest
, 1),
2140 insn
, replacements
);
2141 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (outerdest
, 2),
2142 insn
, replacements
);
2143 fixup_var_refs_1 (var
, promoted_mode
, &SET_SRC (x
),
2144 insn
, replacements
);
2146 tem
= XEXP (outerdest
, 0);
2148 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2149 that may appear inside a ZERO_EXTRACT.
2150 This was legitimate when the MEM was a REG. */
2151 if (GET_CODE (tem
) == SUBREG
2152 && SUBREG_REG (tem
) == var
)
2153 tem
= fixup_memory_subreg (tem
, insn
, 0);
2155 tem
= fixup_stack_1 (tem
, insn
);
2157 if (GET_CODE (XEXP (outerdest
, 1)) == CONST_INT
2158 && GET_CODE (XEXP (outerdest
, 2)) == CONST_INT
2159 && ! mode_dependent_address_p (XEXP (tem
, 0))
2160 && ! MEM_VOLATILE_P (tem
))
2162 enum machine_mode wanted_mode
2163 = insn_operand_mode
[(int) CODE_FOR_insv
][0];
2164 enum machine_mode is_mode
= GET_MODE (tem
);
2165 HOST_WIDE_INT pos
= INTVAL (XEXP (outerdest
, 2));
2167 /* If we have a narrower mode, we can do something. */
2168 if (GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
2170 HOST_WIDE_INT offset
= pos
/ BITS_PER_UNIT
;
2171 rtx old_pos
= XEXP (outerdest
, 2);
2174 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
2175 offset
= (GET_MODE_SIZE (is_mode
)
2176 - GET_MODE_SIZE (wanted_mode
) - offset
);
2178 pos
%= GET_MODE_BITSIZE (wanted_mode
);
2180 newmem
= gen_rtx_MEM (wanted_mode
,
2181 plus_constant (XEXP (tem
, 0), offset
));
2182 RTX_UNCHANGING_P (newmem
) = RTX_UNCHANGING_P (tem
);
2183 MEM_VOLATILE_P (newmem
) = MEM_VOLATILE_P (tem
);
2184 MEM_IN_STRUCT_P (newmem
) = MEM_IN_STRUCT_P (tem
);
2186 /* Make the change and see if the insn remains valid. */
2187 INSN_CODE (insn
) = -1;
2188 XEXP (outerdest
, 0) = newmem
;
2189 XEXP (outerdest
, 2) = GEN_INT (pos
);
2191 if (recog_memoized (insn
) >= 0)
2194 /* Otherwise, restore old position. XEXP (x, 0) will be
2196 XEXP (outerdest
, 2) = old_pos
;
2200 /* If we get here, the bit-field store doesn't allow memory
2201 or isn't located at a constant position. Load the value into
2202 a register, do the store, and put it back into memory. */
2204 tem1
= gen_reg_rtx (GET_MODE (tem
));
2205 emit_insn_before (gen_move_insn (tem1
, tem
), insn
);
2206 emit_insn_after (gen_move_insn (tem
, tem1
), insn
);
2207 XEXP (outerdest
, 0) = tem1
;
2212 /* STRICT_LOW_PART is a no-op on memory references
2213 and it can cause combinations to be unrecognizable,
2216 if (dest
== var
&& GET_CODE (SET_DEST (x
)) == STRICT_LOW_PART
)
2217 SET_DEST (x
) = XEXP (SET_DEST (x
), 0);
2219 /* A valid insn to copy VAR into or out of a register
2220 must be left alone, to avoid an infinite loop here.
2221 If the reference to VAR is by a subreg, fix that up,
2222 since SUBREG is not valid for a memref.
2223 Also fix up the address of the stack slot.
2225 Note that we must not try to recognize the insn until
2226 after we know that we have valid addresses and no
2227 (subreg (mem ...) ...) constructs, since these interfere
2228 with determining the validity of the insn. */
2230 if ((SET_SRC (x
) == var
2231 || (GET_CODE (SET_SRC (x
)) == SUBREG
2232 && SUBREG_REG (SET_SRC (x
)) == var
))
2233 && (GET_CODE (SET_DEST (x
)) == REG
2234 || (GET_CODE (SET_DEST (x
)) == SUBREG
2235 && GET_CODE (SUBREG_REG (SET_DEST (x
))) == REG
))
2236 && GET_MODE (var
) == promoted_mode
2237 && x
== single_set (insn
))
2241 replacement
= find_fixup_replacement (replacements
, SET_SRC (x
));
2242 if (replacement
->new)
2243 SET_SRC (x
) = replacement
->new;
2244 else if (GET_CODE (SET_SRC (x
)) == SUBREG
)
2245 SET_SRC (x
) = replacement
->new
2246 = fixup_memory_subreg (SET_SRC (x
), insn
, 0);
2248 SET_SRC (x
) = replacement
->new
2249 = fixup_stack_1 (SET_SRC (x
), insn
);
2251 if (recog_memoized (insn
) >= 0)
2254 /* INSN is not valid, but we know that we want to
2255 copy SET_SRC (x) to SET_DEST (x) in some way. So
2256 we generate the move and see whether it requires more
2257 than one insn. If it does, we emit those insns and
2258 delete INSN. Otherwise, we an just replace the pattern
2259 of INSN; we have already verified above that INSN has
2260 no other function that to do X. */
2262 pat
= gen_move_insn (SET_DEST (x
), SET_SRC (x
));
2263 if (GET_CODE (pat
) == SEQUENCE
)
2265 emit_insn_after (pat
, insn
);
2266 PUT_CODE (insn
, NOTE
);
2267 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2268 NOTE_SOURCE_FILE (insn
) = 0;
2271 PATTERN (insn
) = pat
;
2276 if ((SET_DEST (x
) == var
2277 || (GET_CODE (SET_DEST (x
)) == SUBREG
2278 && SUBREG_REG (SET_DEST (x
)) == var
))
2279 && (GET_CODE (SET_SRC (x
)) == REG
2280 || (GET_CODE (SET_SRC (x
)) == SUBREG
2281 && GET_CODE (SUBREG_REG (SET_SRC (x
))) == REG
))
2282 && GET_MODE (var
) == promoted_mode
2283 && x
== single_set (insn
))
2287 if (GET_CODE (SET_DEST (x
)) == SUBREG
)
2288 SET_DEST (x
) = fixup_memory_subreg (SET_DEST (x
), insn
, 0);
2290 SET_DEST (x
) = fixup_stack_1 (SET_DEST (x
), insn
);
2292 if (recog_memoized (insn
) >= 0)
2295 pat
= gen_move_insn (SET_DEST (x
), SET_SRC (x
));
2296 if (GET_CODE (pat
) == SEQUENCE
)
2298 emit_insn_after (pat
, insn
);
2299 PUT_CODE (insn
, NOTE
);
2300 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2301 NOTE_SOURCE_FILE (insn
) = 0;
2304 PATTERN (insn
) = pat
;
2309 /* Otherwise, storing into VAR must be handled specially
2310 by storing into a temporary and copying that into VAR
2311 with a new insn after this one. Note that this case
2312 will be used when storing into a promoted scalar since
2313 the insn will now have different modes on the input
2314 and output and hence will be invalid (except for the case
2315 of setting it to a constant, which does not need any
2316 change if it is valid). We generate extra code in that case,
2317 but combine.c will eliminate it. */
2322 rtx fixeddest
= SET_DEST (x
);
2324 /* STRICT_LOW_PART can be discarded, around a MEM. */
2325 if (GET_CODE (fixeddest
) == STRICT_LOW_PART
)
2326 fixeddest
= XEXP (fixeddest
, 0);
2327 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2328 if (GET_CODE (fixeddest
) == SUBREG
)
2330 fixeddest
= fixup_memory_subreg (fixeddest
, insn
, 0);
2331 promoted_mode
= GET_MODE (fixeddest
);
2334 fixeddest
= fixup_stack_1 (fixeddest
, insn
);
2336 temp
= gen_reg_rtx (promoted_mode
);
2338 emit_insn_after (gen_move_insn (fixeddest
,
2339 gen_lowpart (GET_MODE (fixeddest
),
2343 SET_DEST (x
) = temp
;
2351 /* Nothing special about this RTX; fix its operands. */
2353 fmt
= GET_RTX_FORMAT (code
);
2354 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2357 fixup_var_refs_1 (var
, promoted_mode
, &XEXP (x
, i
), insn
, replacements
);
2361 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2362 fixup_var_refs_1 (var
, promoted_mode
, &XVECEXP (x
, i
, j
),
2363 insn
, replacements
);
2368 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2369 return an rtx (MEM:m1 newaddr) which is equivalent.
2370 If any insns must be emitted to compute NEWADDR, put them before INSN.
2372 UNCRITICAL nonzero means accept paradoxical subregs.
2373 This is used for subregs found inside REG_NOTES. */
2376 fixup_memory_subreg (x
, insn
, uncritical
)
2381 int offset
= SUBREG_WORD (x
) * UNITS_PER_WORD
;
2382 rtx addr
= XEXP (SUBREG_REG (x
), 0);
2383 enum machine_mode mode
= GET_MODE (x
);
2386 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2387 if (GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
)))
2391 if (BYTES_BIG_ENDIAN
)
2392 offset
+= (MIN (UNITS_PER_WORD
, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
2393 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (mode
)));
2394 addr
= plus_constant (addr
, offset
);
2395 if (!flag_force_addr
&& memory_address_p (mode
, addr
))
2396 /* Shortcut if no insns need be emitted. */
2397 return change_address (SUBREG_REG (x
), mode
, addr
);
2399 result
= change_address (SUBREG_REG (x
), mode
, addr
);
2400 emit_insn_before (gen_sequence (), insn
);
2405 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2406 Replace subexpressions of X in place.
2407 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2408 Otherwise return X, with its contents possibly altered.
2410 If any insns must be emitted to compute NEWADDR, put them before INSN.
2412 UNCRITICAL is as in fixup_memory_subreg. */
2415 walk_fixup_memory_subreg (x
, insn
, uncritical
)
2420 register enum rtx_code code
;
2427 code
= GET_CODE (x
);
2429 if (code
== SUBREG
&& GET_CODE (SUBREG_REG (x
)) == MEM
)
2430 return fixup_memory_subreg (x
, insn
, uncritical
);
2432 /* Nothing special about this RTX; fix its operands. */
2434 fmt
= GET_RTX_FORMAT (code
);
2435 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2438 XEXP (x
, i
) = walk_fixup_memory_subreg (XEXP (x
, i
), insn
, uncritical
);
2442 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2444 = walk_fixup_memory_subreg (XVECEXP (x
, i
, j
), insn
, uncritical
);
2450 /* For each memory ref within X, if it refers to a stack slot
2451 with an out of range displacement, put the address in a temp register
2452 (emitting new insns before INSN to load these registers)
2453 and alter the memory ref to use that register.
2454 Replace each such MEM rtx with a copy, to avoid clobberage. */
2457 fixup_stack_1 (x
, insn
)
2462 register RTX_CODE code
= GET_CODE (x
);
2467 register rtx ad
= XEXP (x
, 0);
2468 /* If we have address of a stack slot but it's not valid
2469 (displacement is too large), compute the sum in a register. */
2470 if (GET_CODE (ad
) == PLUS
2471 && GET_CODE (XEXP (ad
, 0)) == REG
2472 && ((REGNO (XEXP (ad
, 0)) >= FIRST_VIRTUAL_REGISTER
2473 && REGNO (XEXP (ad
, 0)) <= LAST_VIRTUAL_REGISTER
)
2474 || REGNO (XEXP (ad
, 0)) == FRAME_POINTER_REGNUM
2475 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2476 || REGNO (XEXP (ad
, 0)) == HARD_FRAME_POINTER_REGNUM
2478 || REGNO (XEXP (ad
, 0)) == STACK_POINTER_REGNUM
2479 || REGNO (XEXP (ad
, 0)) == ARG_POINTER_REGNUM
2480 || XEXP (ad
, 0) == current_function_internal_arg_pointer
)
2481 && GET_CODE (XEXP (ad
, 1)) == CONST_INT
)
2484 if (memory_address_p (GET_MODE (x
), ad
))
2488 temp
= copy_to_reg (ad
);
2489 seq
= gen_sequence ();
2491 emit_insn_before (seq
, insn
);
2492 return change_address (x
, VOIDmode
, temp
);
2497 fmt
= GET_RTX_FORMAT (code
);
2498 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2501 XEXP (x
, i
) = fixup_stack_1 (XEXP (x
, i
), insn
);
2505 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2506 XVECEXP (x
, i
, j
) = fixup_stack_1 (XVECEXP (x
, i
, j
), insn
);
2512 /* Optimization: a bit-field instruction whose field
2513 happens to be a byte or halfword in memory
2514 can be changed to a move instruction.
2516 We call here when INSN is an insn to examine or store into a bit-field.
2517 BODY is the SET-rtx to be altered.
2519 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2520 (Currently this is called only from function.c, and EQUIV_MEM
2524 optimize_bit_field (body
, insn
, equiv_mem
)
2529 register rtx bitfield
;
2532 enum machine_mode mode
;
2534 if (GET_CODE (SET_DEST (body
)) == SIGN_EXTRACT
2535 || GET_CODE (SET_DEST (body
)) == ZERO_EXTRACT
)
2536 bitfield
= SET_DEST (body
), destflag
= 1;
2538 bitfield
= SET_SRC (body
), destflag
= 0;
2540 /* First check that the field being stored has constant size and position
2541 and is in fact a byte or halfword suitably aligned. */
2543 if (GET_CODE (XEXP (bitfield
, 1)) == CONST_INT
2544 && GET_CODE (XEXP (bitfield
, 2)) == CONST_INT
2545 && ((mode
= mode_for_size (INTVAL (XEXP (bitfield
, 1)), MODE_INT
, 1))
2547 && INTVAL (XEXP (bitfield
, 2)) % INTVAL (XEXP (bitfield
, 1)) == 0)
2549 register rtx memref
= 0;
2551 /* Now check that the containing word is memory, not a register,
2552 and that it is safe to change the machine mode. */
2554 if (GET_CODE (XEXP (bitfield
, 0)) == MEM
)
2555 memref
= XEXP (bitfield
, 0);
2556 else if (GET_CODE (XEXP (bitfield
, 0)) == REG
2558 memref
= equiv_mem
[REGNO (XEXP (bitfield
, 0))];
2559 else if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
2560 && GET_CODE (SUBREG_REG (XEXP (bitfield
, 0))) == MEM
)
2561 memref
= SUBREG_REG (XEXP (bitfield
, 0));
2562 else if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
2564 && GET_CODE (SUBREG_REG (XEXP (bitfield
, 0))) == REG
)
2565 memref
= equiv_mem
[REGNO (SUBREG_REG (XEXP (bitfield
, 0)))];
2568 && ! mode_dependent_address_p (XEXP (memref
, 0))
2569 && ! MEM_VOLATILE_P (memref
))
2571 /* Now adjust the address, first for any subreg'ing
2572 that we are now getting rid of,
2573 and then for which byte of the word is wanted. */
2575 HOST_WIDE_INT offset
= INTVAL (XEXP (bitfield
, 2));
2578 /* Adjust OFFSET to count bits from low-address byte. */
2579 if (BITS_BIG_ENDIAN
!= BYTES_BIG_ENDIAN
)
2580 offset
= (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield
, 0)))
2581 - offset
- INTVAL (XEXP (bitfield
, 1)));
2583 /* Adjust OFFSET to count bytes from low-address byte. */
2584 offset
/= BITS_PER_UNIT
;
2585 if (GET_CODE (XEXP (bitfield
, 0)) == SUBREG
)
2587 offset
+= SUBREG_WORD (XEXP (bitfield
, 0)) * UNITS_PER_WORD
;
2588 if (BYTES_BIG_ENDIAN
)
2589 offset
-= (MIN (UNITS_PER_WORD
,
2590 GET_MODE_SIZE (GET_MODE (XEXP (bitfield
, 0))))
2591 - MIN (UNITS_PER_WORD
,
2592 GET_MODE_SIZE (GET_MODE (memref
))));
2596 memref
= change_address (memref
, mode
,
2597 plus_constant (XEXP (memref
, 0), offset
));
2598 insns
= get_insns ();
2600 emit_insns_before (insns
, insn
);
2602 /* Store this memory reference where
2603 we found the bit field reference. */
2607 validate_change (insn
, &SET_DEST (body
), memref
, 1);
2608 if (! CONSTANT_ADDRESS_P (SET_SRC (body
)))
2610 rtx src
= SET_SRC (body
);
2611 while (GET_CODE (src
) == SUBREG
2612 && SUBREG_WORD (src
) == 0)
2613 src
= SUBREG_REG (src
);
2614 if (GET_MODE (src
) != GET_MODE (memref
))
2615 src
= gen_lowpart (GET_MODE (memref
), SET_SRC (body
));
2616 validate_change (insn
, &SET_SRC (body
), src
, 1);
2618 else if (GET_MODE (SET_SRC (body
)) != VOIDmode
2619 && GET_MODE (SET_SRC (body
)) != GET_MODE (memref
))
2620 /* This shouldn't happen because anything that didn't have
2621 one of these modes should have got converted explicitly
2622 and then referenced through a subreg.
2623 This is so because the original bit-field was
2624 handled by agg_mode and so its tree structure had
2625 the same mode that memref now has. */
2630 rtx dest
= SET_DEST (body
);
2632 while (GET_CODE (dest
) == SUBREG
2633 && SUBREG_WORD (dest
) == 0
2634 && (GET_MODE_CLASS (GET_MODE (dest
))
2635 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest
)))))
2636 dest
= SUBREG_REG (dest
);
2638 validate_change (insn
, &SET_DEST (body
), dest
, 1);
2640 if (GET_MODE (dest
) == GET_MODE (memref
))
2641 validate_change (insn
, &SET_SRC (body
), memref
, 1);
2644 /* Convert the mem ref to the destination mode. */
2645 rtx newreg
= gen_reg_rtx (GET_MODE (dest
));
2648 convert_move (newreg
, memref
,
2649 GET_CODE (SET_SRC (body
)) == ZERO_EXTRACT
);
2653 validate_change (insn
, &SET_SRC (body
), newreg
, 1);
2657 /* See if we can convert this extraction or insertion into
2658 a simple move insn. We might not be able to do so if this
2659 was, for example, part of a PARALLEL.
2661 If we succeed, write out any needed conversions. If we fail,
2662 it is hard to guess why we failed, so don't do anything
2663 special; just let the optimization be suppressed. */
2665 if (apply_change_group () && seq
)
2666 emit_insns_before (seq
, insn
);
2671 /* These routines are responsible for converting virtual register references
2672 to the actual hard register references once RTL generation is complete.
2674 The following four variables are used for communication between the
2675 routines. They contain the offsets of the virtual registers from their
2676 respective hard registers. */
2678 static int in_arg_offset
;
2679 static int var_offset
;
2680 static int dynamic_offset
;
2681 static int out_arg_offset
;
2683 /* In most machines, the stack pointer register is equivalent to the bottom
2686 #ifndef STACK_POINTER_OFFSET
2687 #define STACK_POINTER_OFFSET 0
2690 /* If not defined, pick an appropriate default for the offset of dynamically
2691 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2692 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2694 #ifndef STACK_DYNAMIC_OFFSET
2696 #ifdef ACCUMULATE_OUTGOING_ARGS
2697 /* The bottom of the stack points to the actual arguments. If
2698 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2699 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2700 stack space for register parameters is not pushed by the caller, but
2701 rather part of the fixed stack areas and hence not included in
2702 `current_function_outgoing_args_size'. Nevertheless, we must allow
2703 for it when allocating stack dynamic objects. */
2705 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2706 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2707 (current_function_outgoing_args_size \
2708 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2711 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2712 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2716 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2720 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2721 its address taken. DECL is the decl for the object stored in the
2722 register, for later use if we do need to force REG into the stack.
2723 REG is overwritten by the MEM like in put_reg_into_stack. */
2726 gen_mem_addressof (reg
, decl
)
2730 tree type
= TREE_TYPE (decl
);
2732 rtx r
= gen_rtx_ADDRESSOF (Pmode
, gen_reg_rtx (GET_MODE (reg
)), REGNO (reg
));
2733 SET_ADDRESSOF_DECL (r
, decl
);
2736 PUT_CODE (reg
, MEM
);
2737 PUT_MODE (reg
, DECL_MODE (decl
));
2738 MEM_VOLATILE_P (reg
) = TREE_SIDE_EFFECTS (decl
);
2739 MEM_IN_STRUCT_P (reg
) = AGGREGATE_TYPE_P (type
);
2740 MEM_ALIAS_SET (reg
) = get_alias_set (decl
);
2742 if (TREE_USED (decl
) || DECL_INITIAL (decl
) != 0)
2743 fixup_var_refs (reg
, GET_MODE (reg
), TREE_UNSIGNED (type
));
2748 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2751 flush_addressof (decl
)
2754 if ((TREE_CODE (decl
) == PARM_DECL
|| TREE_CODE (decl
) == VAR_DECL
)
2755 && DECL_RTL (decl
) != 0
2756 && GET_CODE (DECL_RTL (decl
)) == MEM
2757 && GET_CODE (XEXP (DECL_RTL (decl
), 0)) == ADDRESSOF
2758 && GET_CODE (XEXP (XEXP (DECL_RTL (decl
), 0), 0)) == REG
)
2759 put_addressof_into_stack (XEXP (DECL_RTL (decl
), 0));
2762 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2765 put_addressof_into_stack (r
)
2768 tree decl
= ADDRESSOF_DECL (r
);
2769 rtx reg
= XEXP (r
, 0);
2771 if (GET_CODE (reg
) != REG
)
2774 put_reg_into_stack (0, reg
, TREE_TYPE (decl
), GET_MODE (reg
),
2775 DECL_MODE (decl
), TREE_SIDE_EFFECTS (decl
),
2776 ADDRESSOF_REGNO (r
),
2777 TREE_USED (decl
) || DECL_INITIAL (decl
) != 0);
2780 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2781 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2785 purge_addressof_1 (loc
, insn
, force
)
2795 /* Re-start here to avoid recursion in common cases. */
2802 code
= GET_CODE (x
);
2804 if (code
== ADDRESSOF
&& GET_CODE (XEXP (x
, 0)) == MEM
)
2807 /* We must create a copy of the rtx because it was created by
2808 overwriting a REG rtx which is always shared. */
2809 rtx sub
= copy_rtx (XEXP (XEXP (x
, 0), 0));
2811 if (validate_change (insn
, loc
, sub
, 0))
2815 if (! validate_change (insn
, loc
,
2816 force_operand (sub
, NULL_RTX
),
2820 insns
= get_insns ();
2822 emit_insns_before (insns
, insn
);
2825 else if (code
== MEM
&& GET_CODE (XEXP (x
, 0)) == ADDRESSOF
&& ! force
)
2827 rtx sub
= XEXP (XEXP (x
, 0), 0);
2829 if (GET_CODE (sub
) == MEM
)
2830 sub
= gen_rtx_MEM (GET_MODE (x
), copy_rtx (XEXP (sub
, 0)));
2832 if (GET_CODE (sub
) == REG
2833 && (MEM_VOLATILE_P (x
) || GET_MODE (x
) == BLKmode
))
2835 put_addressof_into_stack (XEXP (x
, 0));
2838 else if (GET_CODE (sub
) == REG
&& GET_MODE (x
) != GET_MODE (sub
))
2840 if (! BYTES_BIG_ENDIAN
&& ! WORDS_BIG_ENDIAN
)
2842 rtx sub2
= gen_rtx_SUBREG (GET_MODE (x
), sub
, 0);
2843 if (validate_change (insn
, loc
, sub2
, 0))
2847 else if (validate_change (insn
, loc
, sub
, 0))
2849 /* else give up and put it into the stack */
2851 else if (code
== ADDRESSOF
)
2853 put_addressof_into_stack (x
);
2857 /* Scan all subexpressions. */
2858 fmt
= GET_RTX_FORMAT (code
);
2859 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
2862 purge_addressof_1 (&XEXP (x
, i
), insn
, force
);
2863 else if (*fmt
== 'E')
2864 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2865 purge_addressof_1 (&XVECEXP (x
, i
, j
), insn
, force
);
2869 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
2870 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
2874 purge_addressof (insns
)
2878 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
2879 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == JUMP_INSN
2880 || GET_CODE (insn
) == CALL_INSN
)
2882 purge_addressof_1 (&PATTERN (insn
), insn
,
2883 asm_noperands (PATTERN (insn
)) > 0);
2884 purge_addressof_1 (®_NOTES (insn
), NULL_RTX
, 0);
2888 /* Pass through the INSNS of function FNDECL and convert virtual register
2889 references to hard register references. */
2892 instantiate_virtual_regs (fndecl
, insns
)
2899 /* Compute the offsets to use for this function. */
2900 in_arg_offset
= FIRST_PARM_OFFSET (fndecl
);
2901 var_offset
= STARTING_FRAME_OFFSET
;
2902 dynamic_offset
= STACK_DYNAMIC_OFFSET (fndecl
);
2903 out_arg_offset
= STACK_POINTER_OFFSET
;
2905 /* Scan all variables and parameters of this function. For each that is
2906 in memory, instantiate all virtual registers if the result is a valid
2907 address. If not, we do it later. That will handle most uses of virtual
2908 regs on many machines. */
2909 instantiate_decls (fndecl
, 1);
2911 /* Initialize recognition, indicating that volatile is OK. */
2914 /* Scan through all the insns, instantiating every virtual register still
2916 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
2917 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == JUMP_INSN
2918 || GET_CODE (insn
) == CALL_INSN
)
2920 instantiate_virtual_regs_1 (&PATTERN (insn
), insn
, 1);
2921 instantiate_virtual_regs_1 (®_NOTES (insn
), NULL_RTX
, 0);
2924 /* Instantiate the stack slots for the parm registers, for later use in
2925 addressof elimination. */
2926 for (i
= 0; i
< max_parm_reg
; ++i
)
2927 if (parm_reg_stack_loc
[i
])
2928 instantiate_virtual_regs_1 (&parm_reg_stack_loc
[i
], NULL_RTX
, 0);
2930 /* Now instantiate the remaining register equivalences for debugging info.
2931 These will not be valid addresses. */
2932 instantiate_decls (fndecl
, 0);
2934 /* Indicate that, from now on, assign_stack_local should use
2935 frame_pointer_rtx. */
2936 virtuals_instantiated
= 1;
2939 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
2940 all virtual registers in their DECL_RTL's.
2942 If VALID_ONLY, do this only if the resulting address is still valid.
2943 Otherwise, always do it. */
2946 instantiate_decls (fndecl
, valid_only
)
2952 if (DECL_SAVED_INSNS (fndecl
))
2953 /* When compiling an inline function, the obstack used for
2954 rtl allocation is the maybepermanent_obstack. Calling
2955 `resume_temporary_allocation' switches us back to that
2956 obstack while we process this function's parameters. */
2957 resume_temporary_allocation ();
2959 /* Process all parameters of the function. */
2960 for (decl
= DECL_ARGUMENTS (fndecl
); decl
; decl
= TREE_CHAIN (decl
))
2962 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (decl
));
2964 instantiate_decl (DECL_RTL (decl
), size
, valid_only
);
2966 /* If the parameter was promoted, then the incoming RTL mode may be
2967 larger than the declared type size. We must use the larger of
2969 size
= MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl
))), size
);
2970 instantiate_decl (DECL_INCOMING_RTL (decl
), size
, valid_only
);
2973 /* Now process all variables defined in the function or its subblocks. */
2974 instantiate_decls_1 (DECL_INITIAL (fndecl
), valid_only
);
2976 if (DECL_INLINE (fndecl
) || DECL_DEFER_OUTPUT (fndecl
))
2978 /* Save all rtl allocated for this function by raising the
2979 high-water mark on the maybepermanent_obstack. */
2981 /* All further rtl allocation is now done in the current_obstack. */
2982 rtl_in_current_obstack ();
2986 /* Subroutine of instantiate_decls: Process all decls in the given
2987 BLOCK node and all its subblocks. */
2990 instantiate_decls_1 (let
, valid_only
)
2996 for (t
= BLOCK_VARS (let
); t
; t
= TREE_CHAIN (t
))
2997 instantiate_decl (DECL_RTL (t
), int_size_in_bytes (TREE_TYPE (t
)),
3000 /* Process all subblocks. */
3001 for (t
= BLOCK_SUBBLOCKS (let
); t
; t
= TREE_CHAIN (t
))
3002 instantiate_decls_1 (t
, valid_only
);
3005 /* Subroutine of the preceding procedures: Given RTL representing a
3006 decl and the size of the object, do any instantiation required.
3008 If VALID_ONLY is non-zero, it means that the RTL should only be
3009 changed if the new address is valid. */
3012 instantiate_decl (x
, size
, valid_only
)
3017 enum machine_mode mode
;
3020 /* If this is not a MEM, no need to do anything. Similarly if the
3021 address is a constant or a register that is not a virtual register. */
3023 if (x
== 0 || GET_CODE (x
) != MEM
)
3027 if (CONSTANT_P (addr
)
3028 || (GET_CODE (addr
) == ADDRESSOF
&& GET_CODE (XEXP (addr
, 0)) == REG
)
3029 || (GET_CODE (addr
) == REG
3030 && (REGNO (addr
) < FIRST_VIRTUAL_REGISTER
3031 || REGNO (addr
) > LAST_VIRTUAL_REGISTER
)))
3034 /* If we should only do this if the address is valid, copy the address.
3035 We need to do this so we can undo any changes that might make the
3036 address invalid. This copy is unfortunate, but probably can't be
3040 addr
= copy_rtx (addr
);
3042 instantiate_virtual_regs_1 (&addr
, NULL_RTX
, 0);
3046 /* Now verify that the resulting address is valid for every integer or
3047 floating-point mode up to and including SIZE bytes long. We do this
3048 since the object might be accessed in any mode and frame addresses
3051 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
3052 mode
!= VOIDmode
&& GET_MODE_SIZE (mode
) <= size
;
3053 mode
= GET_MODE_WIDER_MODE (mode
))
3054 if (! memory_address_p (mode
, addr
))
3057 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
3058 mode
!= VOIDmode
&& GET_MODE_SIZE (mode
) <= size
;
3059 mode
= GET_MODE_WIDER_MODE (mode
))
3060 if (! memory_address_p (mode
, addr
))
3064 /* Put back the address now that we have updated it and we either know
3065 it is valid or we don't care whether it is valid. */
3070 /* Given a pointer to a piece of rtx and an optional pointer to the
3071 containing object, instantiate any virtual registers present in it.
3073 If EXTRA_INSNS, we always do the replacement and generate
3074 any extra insns before OBJECT. If it zero, we do nothing if replacement
3077 Return 1 if we either had nothing to do or if we were able to do the
3078 needed replacement. Return 0 otherwise; we only return zero if
3079 EXTRA_INSNS is zero.
3081 We first try some simple transformations to avoid the creation of extra
3085 instantiate_virtual_regs_1 (loc
, object
, extra_insns
)
3093 HOST_WIDE_INT offset
;
3099 /* Re-start here to avoid recursion in common cases. */
3106 code
= GET_CODE (x
);
3108 /* Check for some special cases. */
3125 /* We are allowed to set the virtual registers. This means that
3126 the actual register should receive the source minus the
3127 appropriate offset. This is used, for example, in the handling
3128 of non-local gotos. */
3129 if (SET_DEST (x
) == virtual_incoming_args_rtx
)
3130 new = arg_pointer_rtx
, offset
= - in_arg_offset
;
3131 else if (SET_DEST (x
) == virtual_stack_vars_rtx
)
3132 new = frame_pointer_rtx
, offset
= - var_offset
;
3133 else if (SET_DEST (x
) == virtual_stack_dynamic_rtx
)
3134 new = stack_pointer_rtx
, offset
= - dynamic_offset
;
3135 else if (SET_DEST (x
) == virtual_outgoing_args_rtx
)
3136 new = stack_pointer_rtx
, offset
= - out_arg_offset
;
3140 /* The only valid sources here are PLUS or REG. Just do
3141 the simplest possible thing to handle them. */
3142 if (GET_CODE (SET_SRC (x
)) != REG
3143 && GET_CODE (SET_SRC (x
)) != PLUS
)
3147 if (GET_CODE (SET_SRC (x
)) != REG
)
3148 temp
= force_operand (SET_SRC (x
), NULL_RTX
);
3151 temp
= force_operand (plus_constant (temp
, offset
), NULL_RTX
);
3155 emit_insns_before (seq
, object
);
3158 if (! validate_change (object
, &SET_SRC (x
), temp
, 0)
3165 instantiate_virtual_regs_1 (&SET_DEST (x
), object
, extra_insns
);
3170 /* Handle special case of virtual register plus constant. */
3171 if (CONSTANT_P (XEXP (x
, 1)))
3173 rtx old
, new_offset
;
3175 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3176 if (GET_CODE (XEXP (x
, 0)) == PLUS
)
3178 rtx inner
= XEXP (XEXP (x
, 0), 0);
3180 if (inner
== virtual_incoming_args_rtx
)
3181 new = arg_pointer_rtx
, offset
= in_arg_offset
;
3182 else if (inner
== virtual_stack_vars_rtx
)
3183 new = frame_pointer_rtx
, offset
= var_offset
;
3184 else if (inner
== virtual_stack_dynamic_rtx
)
3185 new = stack_pointer_rtx
, offset
= dynamic_offset
;
3186 else if (inner
== virtual_outgoing_args_rtx
)
3187 new = stack_pointer_rtx
, offset
= out_arg_offset
;
3194 instantiate_virtual_regs_1 (&XEXP (XEXP (x
, 0), 1), object
,
3196 new = gen_rtx_PLUS (Pmode
, new, XEXP (XEXP (x
, 0), 1));
3199 else if (XEXP (x
, 0) == virtual_incoming_args_rtx
)
3200 new = arg_pointer_rtx
, offset
= in_arg_offset
;
3201 else if (XEXP (x
, 0) == virtual_stack_vars_rtx
)
3202 new = frame_pointer_rtx
, offset
= var_offset
;
3203 else if (XEXP (x
, 0) == virtual_stack_dynamic_rtx
)
3204 new = stack_pointer_rtx
, offset
= dynamic_offset
;
3205 else if (XEXP (x
, 0) == virtual_outgoing_args_rtx
)
3206 new = stack_pointer_rtx
, offset
= out_arg_offset
;
3209 /* We know the second operand is a constant. Unless the
3210 first operand is a REG (which has been already checked),
3211 it needs to be checked. */
3212 if (GET_CODE (XEXP (x
, 0)) != REG
)
3220 new_offset
= plus_constant (XEXP (x
, 1), offset
);
3222 /* If the new constant is zero, try to replace the sum with just
3224 if (new_offset
== const0_rtx
3225 && validate_change (object
, loc
, new, 0))
3228 /* Next try to replace the register and new offset.
3229 There are two changes to validate here and we can't assume that
3230 in the case of old offset equals new just changing the register
3231 will yield a valid insn. In the interests of a little efficiency,
3232 however, we only call validate change once (we don't queue up the
3233 changes and then call apply_change_group). */
3237 ? ! validate_change (object
, &XEXP (x
, 0), new, 0)
3238 : (XEXP (x
, 0) = new,
3239 ! validate_change (object
, &XEXP (x
, 1), new_offset
, 0)))
3247 /* Otherwise copy the new constant into a register and replace
3248 constant with that register. */
3249 temp
= gen_reg_rtx (Pmode
);
3251 if (validate_change (object
, &XEXP (x
, 1), temp
, 0))
3252 emit_insn_before (gen_move_insn (temp
, new_offset
), object
);
3255 /* If that didn't work, replace this expression with a
3256 register containing the sum. */
3259 new = gen_rtx_PLUS (Pmode
, new, new_offset
);
3262 temp
= force_operand (new, NULL_RTX
);
3266 emit_insns_before (seq
, object
);
3267 if (! validate_change (object
, loc
, temp
, 0)
3268 && ! validate_replace_rtx (x
, temp
, object
))
3276 /* Fall through to generic two-operand expression case. */
3282 case DIV
: case UDIV
:
3283 case MOD
: case UMOD
:
3284 case AND
: case IOR
: case XOR
:
3285 case ROTATERT
: case ROTATE
:
3286 case ASHIFTRT
: case LSHIFTRT
: case ASHIFT
:
3288 case GE
: case GT
: case GEU
: case GTU
:
3289 case LE
: case LT
: case LEU
: case LTU
:
3290 if (XEXP (x
, 1) && ! CONSTANT_P (XEXP (x
, 1)))
3291 instantiate_virtual_regs_1 (&XEXP (x
, 1), object
, extra_insns
);
3296 /* Most cases of MEM that convert to valid addresses have already been
3297 handled by our scan of decls. The only special handling we
3298 need here is to make a copy of the rtx to ensure it isn't being
3299 shared if we have to change it to a pseudo.
3301 If the rtx is a simple reference to an address via a virtual register,
3302 it can potentially be shared. In such cases, first try to make it
3303 a valid address, which can also be shared. Otherwise, copy it and
3306 First check for common cases that need no processing. These are
3307 usually due to instantiation already being done on a previous instance
3311 if (CONSTANT_ADDRESS_P (temp
)
3312 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3313 || temp
== arg_pointer_rtx
3315 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3316 || temp
== hard_frame_pointer_rtx
3318 || temp
== frame_pointer_rtx
)
3321 if (GET_CODE (temp
) == PLUS
3322 && CONSTANT_ADDRESS_P (XEXP (temp
, 1))
3323 && (XEXP (temp
, 0) == frame_pointer_rtx
3324 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3325 || XEXP (temp
, 0) == hard_frame_pointer_rtx
3327 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3328 || XEXP (temp
, 0) == arg_pointer_rtx
3333 if (temp
== virtual_stack_vars_rtx
3334 || temp
== virtual_incoming_args_rtx
3335 || (GET_CODE (temp
) == PLUS
3336 && CONSTANT_ADDRESS_P (XEXP (temp
, 1))
3337 && (XEXP (temp
, 0) == virtual_stack_vars_rtx
3338 || XEXP (temp
, 0) == virtual_incoming_args_rtx
)))
3340 /* This MEM may be shared. If the substitution can be done without
3341 the need to generate new pseudos, we want to do it in place
3342 so all copies of the shared rtx benefit. The call below will
3343 only make substitutions if the resulting address is still
3346 Note that we cannot pass X as the object in the recursive call
3347 since the insn being processed may not allow all valid
3348 addresses. However, if we were not passed on object, we can
3349 only modify X without copying it if X will have a valid
3352 ??? Also note that this can still lose if OBJECT is an insn that
3353 has less restrictions on an address that some other insn.
3354 In that case, we will modify the shared address. This case
3355 doesn't seem very likely, though. One case where this could
3356 happen is in the case of a USE or CLOBBER reference, but we
3357 take care of that below. */
3359 if (instantiate_virtual_regs_1 (&XEXP (x
, 0),
3360 object
? object
: x
, 0))
3363 /* Otherwise make a copy and process that copy. We copy the entire
3364 RTL expression since it might be a PLUS which could also be
3366 *loc
= x
= copy_rtx (x
);
3369 /* Fall through to generic unary operation case. */
3371 case STRICT_LOW_PART
:
3373 case PRE_DEC
: case PRE_INC
: case POST_DEC
: case POST_INC
:
3374 case SIGN_EXTEND
: case ZERO_EXTEND
:
3375 case TRUNCATE
: case FLOAT_EXTEND
: case FLOAT_TRUNCATE
:
3376 case FLOAT
: case FIX
:
3377 case UNSIGNED_FIX
: case UNSIGNED_FLOAT
:
3381 /* These case either have just one operand or we know that we need not
3382 check the rest of the operands. */
3388 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3389 go ahead and make the invalid one, but do it to a copy. For a REG,
3390 just make the recursive call, since there's no chance of a problem. */
3392 if ((GET_CODE (XEXP (x
, 0)) == MEM
3393 && instantiate_virtual_regs_1 (&XEXP (XEXP (x
, 0), 0), XEXP (x
, 0),
3395 || (GET_CODE (XEXP (x
, 0)) == REG
3396 && instantiate_virtual_regs_1 (&XEXP (x
, 0), object
, 0)))
3399 XEXP (x
, 0) = copy_rtx (XEXP (x
, 0));
3404 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3405 in front of this insn and substitute the temporary. */
3406 if (x
== virtual_incoming_args_rtx
)
3407 new = arg_pointer_rtx
, offset
= in_arg_offset
;
3408 else if (x
== virtual_stack_vars_rtx
)
3409 new = frame_pointer_rtx
, offset
= var_offset
;
3410 else if (x
== virtual_stack_dynamic_rtx
)
3411 new = stack_pointer_rtx
, offset
= dynamic_offset
;
3412 else if (x
== virtual_outgoing_args_rtx
)
3413 new = stack_pointer_rtx
, offset
= out_arg_offset
;
3417 temp
= plus_constant (new, offset
);
3418 if (!validate_change (object
, loc
, temp
, 0))
3424 temp
= force_operand (temp
, NULL_RTX
);
3428 emit_insns_before (seq
, object
);
3429 if (! validate_change (object
, loc
, temp
, 0)
3430 && ! validate_replace_rtx (x
, temp
, object
))
3438 if (GET_CODE (XEXP (x
, 0)) == REG
)
3441 else if (GET_CODE (XEXP (x
, 0)) == MEM
)
3443 /* If we have a (addressof (mem ..)), do any instantiation inside
3444 since we know we'll be making the inside valid when we finally
3445 remove the ADDRESSOF. */
3446 instantiate_virtual_regs_1 (&XEXP (XEXP (x
, 0), 0), NULL_RTX
, 0);
3455 /* Scan all subexpressions. */
3456 fmt
= GET_RTX_FORMAT (code
);
3457 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
3460 if (!instantiate_virtual_regs_1 (&XEXP (x
, i
), object
, extra_insns
))
3463 else if (*fmt
== 'E')
3464 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3465 if (! instantiate_virtual_regs_1 (&XVECEXP (x
, i
, j
), object
,
3472 /* Optimization: assuming this function does not receive nonlocal gotos,
3473 delete the handlers for such, as well as the insns to establish
3474 and disestablish them. */
3480 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3482 /* Delete the handler by turning off the flag that would
3483 prevent jump_optimize from deleting it.
3484 Also permit deletion of the nonlocal labels themselves
3485 if nothing local refers to them. */
3486 if (GET_CODE (insn
) == CODE_LABEL
)
3490 LABEL_PRESERVE_P (insn
) = 0;
3492 /* Remove it from the nonlocal_label list, to avoid confusing
3494 for (t
= nonlocal_labels
, last_t
= 0; t
;
3495 last_t
= t
, t
= TREE_CHAIN (t
))
3496 if (DECL_RTL (TREE_VALUE (t
)) == insn
)
3501 nonlocal_labels
= TREE_CHAIN (nonlocal_labels
);
3503 TREE_CHAIN (last_t
) = TREE_CHAIN (t
);
3506 if (GET_CODE (insn
) == INSN
3507 && ((nonlocal_goto_handler_slot
!= 0
3508 && reg_mentioned_p (nonlocal_goto_handler_slot
, PATTERN (insn
)))
3509 || (nonlocal_goto_stack_level
!= 0
3510 && reg_mentioned_p (nonlocal_goto_stack_level
,
3516 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3517 of the current function. */
3520 nonlocal_label_rtx_list ()
3525 for (t
= nonlocal_labels
; t
; t
= TREE_CHAIN (t
))
3526 x
= gen_rtx_EXPR_LIST (VOIDmode
, label_rtx (TREE_VALUE (t
)), x
);
3531 /* Output a USE for any register use in RTL.
3532 This is used with -noreg to mark the extent of lifespan
3533 of any registers used in a user-visible variable's DECL_RTL. */
3539 if (GET_CODE (rtl
) == REG
)
3540 /* This is a register variable. */
3541 emit_insn (gen_rtx_USE (VOIDmode
, rtl
));
3542 else if (GET_CODE (rtl
) == MEM
3543 && GET_CODE (XEXP (rtl
, 0)) == REG
3544 && (REGNO (XEXP (rtl
, 0)) < FIRST_VIRTUAL_REGISTER
3545 || REGNO (XEXP (rtl
, 0)) > LAST_VIRTUAL_REGISTER
)
3546 && XEXP (rtl
, 0) != current_function_internal_arg_pointer
)
3547 /* This is a variable-sized structure. */
3548 emit_insn (gen_rtx_USE (VOIDmode
, XEXP (rtl
, 0)));
3551 /* Like use_variable except that it outputs the USEs after INSN
3552 instead of at the end of the insn-chain. */
3555 use_variable_after (rtl
, insn
)
3558 if (GET_CODE (rtl
) == REG
)
3559 /* This is a register variable. */
3560 emit_insn_after (gen_rtx_USE (VOIDmode
, rtl
), insn
);
3561 else if (GET_CODE (rtl
) == MEM
3562 && GET_CODE (XEXP (rtl
, 0)) == REG
3563 && (REGNO (XEXP (rtl
, 0)) < FIRST_VIRTUAL_REGISTER
3564 || REGNO (XEXP (rtl
, 0)) > LAST_VIRTUAL_REGISTER
)
3565 && XEXP (rtl
, 0) != current_function_internal_arg_pointer
)
3566 /* This is a variable-sized structure. */
3567 emit_insn_after (gen_rtx_USE (VOIDmode
, XEXP (rtl
, 0)), insn
);
3573 return max_parm_reg
;
3576 /* Return the first insn following those generated by `assign_parms'. */
3579 get_first_nonparm_insn ()
3582 return NEXT_INSN (last_parm_insn
);
3583 return get_insns ();
3586 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3587 Crash if there is none. */
3590 get_first_block_beg ()
3592 register rtx searcher
;
3593 register rtx insn
= get_first_nonparm_insn ();
3595 for (searcher
= insn
; searcher
; searcher
= NEXT_INSN (searcher
))
3596 if (GET_CODE (searcher
) == NOTE
3597 && NOTE_LINE_NUMBER (searcher
) == NOTE_INSN_BLOCK_BEG
)
3600 abort (); /* Invalid call to this function. (See comments above.) */
3604 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3605 This means a type for which function calls must pass an address to the
3606 function or get an address back from the function.
3607 EXP may be a type node or an expression (whose type is tested). */
3610 aggregate_value_p (exp
)
3613 int i
, regno
, nregs
;
3616 if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 't')
3619 type
= TREE_TYPE (exp
);
3621 if (RETURN_IN_MEMORY (type
))
3623 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3624 and thus can't be returned in registers. */
3625 if (TREE_ADDRESSABLE (type
))
3627 if (flag_pcc_struct_return
&& AGGREGATE_TYPE_P (type
))
3629 /* Make sure we have suitable call-clobbered regs to return
3630 the value in; if not, we must return it in memory. */
3631 reg
= hard_function_value (type
, 0);
3633 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3635 if (GET_CODE (reg
) != REG
)
3638 regno
= REGNO (reg
);
3639 nregs
= HARD_REGNO_NREGS (regno
, TYPE_MODE (type
));
3640 for (i
= 0; i
< nregs
; i
++)
3641 if (! call_used_regs
[regno
+ i
])
3646 /* Assign RTL expressions to the function's parameters.
3647 This may involve copying them into registers and using
3648 those registers as the RTL for them.
3650 If SECOND_TIME is non-zero it means that this function is being
3651 called a second time. This is done by integrate.c when a function's
3652 compilation is deferred. We need to come back here in case the
3653 FUNCTION_ARG macro computes items needed for the rest of the compilation
3654 (such as changing which registers are fixed or caller-saved). But suppress
3655 writing any insns or setting DECL_RTL of anything in this case. */
3658 assign_parms (fndecl
, second_time
)
3663 register rtx entry_parm
= 0;
3664 register rtx stack_parm
= 0;
3665 CUMULATIVE_ARGS args_so_far
;
3666 enum machine_mode promoted_mode
, passed_mode
;
3667 enum machine_mode nominal_mode
, promoted_nominal_mode
;
3669 /* Total space needed so far for args on the stack,
3670 given as a constant and a tree-expression. */
3671 struct args_size stack_args_size
;
3672 tree fntype
= TREE_TYPE (fndecl
);
3673 tree fnargs
= DECL_ARGUMENTS (fndecl
);
3674 /* This is used for the arg pointer when referring to stack args. */
3675 rtx internal_arg_pointer
;
3676 /* This is a dummy PARM_DECL that we used for the function result if
3677 the function returns a structure. */
3678 tree function_result_decl
= 0;
3679 int varargs_setup
= 0;
3680 rtx conversion_insns
= 0;
3682 /* Nonzero if the last arg is named `__builtin_va_alist',
3683 which is used on some machines for old-fashioned non-ANSI varargs.h;
3684 this should be stuck onto the stack as if it had arrived there. */
3686 = (current_function_varargs
3688 && (parm
= tree_last (fnargs
)) != 0
3690 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm
)),
3691 "__builtin_va_alist")));
3693 /* Nonzero if function takes extra anonymous args.
3694 This means the last named arg must be on the stack
3695 right before the anonymous ones. */
3697 = (TYPE_ARG_TYPES (fntype
) != 0
3698 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
3699 != void_type_node
));
3701 current_function_stdarg
= stdarg
;
3703 /* If the reg that the virtual arg pointer will be translated into is
3704 not a fixed reg or is the stack pointer, make a copy of the virtual
3705 arg pointer, and address parms via the copy. The frame pointer is
3706 considered fixed even though it is not marked as such.
3708 The second time through, simply use ap to avoid generating rtx. */
3710 if ((ARG_POINTER_REGNUM
== STACK_POINTER_REGNUM
3711 || ! (fixed_regs
[ARG_POINTER_REGNUM
]
3712 || ARG_POINTER_REGNUM
== FRAME_POINTER_REGNUM
))
3714 internal_arg_pointer
= copy_to_reg (virtual_incoming_args_rtx
);
3716 internal_arg_pointer
= virtual_incoming_args_rtx
;
3717 current_function_internal_arg_pointer
= internal_arg_pointer
;
3719 stack_args_size
.constant
= 0;
3720 stack_args_size
.var
= 0;
3722 /* If struct value address is treated as the first argument, make it so. */
3723 if (aggregate_value_p (DECL_RESULT (fndecl
))
3724 && ! current_function_returns_pcc_struct
3725 && struct_value_incoming_rtx
== 0)
3727 tree type
= build_pointer_type (TREE_TYPE (fntype
));
3729 function_result_decl
= build_decl (PARM_DECL
, NULL_TREE
, type
);
3731 DECL_ARG_TYPE (function_result_decl
) = type
;
3732 TREE_CHAIN (function_result_decl
) = fnargs
;
3733 fnargs
= function_result_decl
;
3736 max_parm_reg
= LAST_VIRTUAL_REGISTER
+ 1;
3737 parm_reg_stack_loc
= (rtx
*) savealloc (max_parm_reg
* sizeof (rtx
));
3738 bzero ((char *) parm_reg_stack_loc
, max_parm_reg
* sizeof (rtx
));
3740 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3741 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far
, fntype
, NULL_RTX
);
3743 INIT_CUMULATIVE_ARGS (args_so_far
, fntype
, NULL_RTX
, 0);
3746 /* We haven't yet found an argument that we must push and pretend the
3748 current_function_pretend_args_size
= 0;
3750 for (parm
= fnargs
; parm
; parm
= TREE_CHAIN (parm
))
3752 int aggregate
= AGGREGATE_TYPE_P (TREE_TYPE (parm
));
3753 struct args_size stack_offset
;
3754 struct args_size arg_size
;
3755 int passed_pointer
= 0;
3756 int did_conversion
= 0;
3757 tree passed_type
= DECL_ARG_TYPE (parm
);
3758 tree nominal_type
= TREE_TYPE (parm
);
3760 /* Set LAST_NAMED if this is last named arg before some
3762 int last_named
= ((TREE_CHAIN (parm
) == 0
3763 || DECL_NAME (TREE_CHAIN (parm
)) == 0)
3764 && (stdarg
|| current_function_varargs
));
3765 /* Set NAMED_ARG if this arg should be treated as a named arg. For
3766 most machines, if this is a varargs/stdarg function, then we treat
3767 the last named arg as if it were anonymous too. */
3768 int named_arg
= STRICT_ARGUMENT_NAMING
? 1 : ! last_named
;
3770 if (TREE_TYPE (parm
) == error_mark_node
3771 /* This can happen after weird syntax errors
3772 or if an enum type is defined among the parms. */
3773 || TREE_CODE (parm
) != PARM_DECL
3774 || passed_type
== NULL
)
3776 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
)
3777 = gen_rtx_MEM (BLKmode
, const0_rtx
);
3778 TREE_USED (parm
) = 1;
3782 /* For varargs.h function, save info about regs and stack space
3783 used by the individual args, not including the va_alist arg. */
3784 if (hide_last_arg
&& last_named
)
3785 current_function_args_info
= args_so_far
;
3787 /* Find mode of arg as it is passed, and mode of arg
3788 as it should be during execution of this function. */
3789 passed_mode
= TYPE_MODE (passed_type
);
3790 nominal_mode
= TYPE_MODE (nominal_type
);
3792 /* If the parm's mode is VOID, its value doesn't matter,
3793 and avoid the usual things like emit_move_insn that could crash. */
3794 if (nominal_mode
== VOIDmode
)
3796 DECL_INCOMING_RTL (parm
) = DECL_RTL (parm
) = const0_rtx
;
3800 /* If the parm is to be passed as a transparent union, use the
3801 type of the first field for the tests below. We have already
3802 verified that the modes are the same. */
3803 if (DECL_TRANSPARENT_UNION (parm
)
3804 || TYPE_TRANSPARENT_UNION (passed_type
))
3805 passed_type
= TREE_TYPE (TYPE_FIELDS (passed_type
));
3807 /* See if this arg was passed by invisible reference. It is if
3808 it is an object whose size depends on the contents of the
3809 object itself or if the machine requires these objects be passed
3812 if ((TREE_CODE (TYPE_SIZE (passed_type
)) != INTEGER_CST
3813 && contains_placeholder_p (TYPE_SIZE (passed_type
)))
3814 || TREE_ADDRESSABLE (passed_type
)
3815 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3816 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far
, passed_mode
,
3817 passed_type
, named_arg
)
3821 passed_type
= nominal_type
= build_pointer_type (passed_type
);
3823 passed_mode
= nominal_mode
= Pmode
;
3826 promoted_mode
= passed_mode
;
3828 #ifdef PROMOTE_FUNCTION_ARGS
3829 /* Compute the mode in which the arg is actually extended to. */
3830 unsignedp
= TREE_UNSIGNED (passed_type
);
3831 promoted_mode
= promote_mode (passed_type
, promoted_mode
, &unsignedp
, 1);
3834 /* Let machine desc say which reg (if any) the parm arrives in.
3835 0 means it arrives on the stack. */
3836 #ifdef FUNCTION_INCOMING_ARG
3837 entry_parm
= FUNCTION_INCOMING_ARG (args_so_far
, promoted_mode
,
3838 passed_type
, named_arg
);
3840 entry_parm
= FUNCTION_ARG (args_so_far
, promoted_mode
,
3841 passed_type
, named_arg
);
3844 if (entry_parm
== 0)
3845 promoted_mode
= passed_mode
;
3847 #ifdef SETUP_INCOMING_VARARGS
3848 /* If this is the last named parameter, do any required setup for
3849 varargs or stdargs. We need to know about the case of this being an
3850 addressable type, in which case we skip the registers it
3851 would have arrived in.
3853 For stdargs, LAST_NAMED will be set for two parameters, the one that
3854 is actually the last named, and the dummy parameter. We only
3855 want to do this action once.
3857 Also, indicate when RTL generation is to be suppressed. */
3858 if (last_named
&& !varargs_setup
)
3860 SETUP_INCOMING_VARARGS (args_so_far
, promoted_mode
, passed_type
,
3861 current_function_pretend_args_size
,
3867 /* Determine parm's home in the stack,
3868 in case it arrives in the stack or we should pretend it did.
3870 Compute the stack position and rtx where the argument arrives
3873 There is one complexity here: If this was a parameter that would
3874 have been passed in registers, but wasn't only because it is
3875 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
3876 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
3877 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
3878 0 as it was the previous time. */
3880 locate_and_pad_parm (promoted_mode
, passed_type
,
3881 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3884 #ifdef FUNCTION_INCOMING_ARG
3885 FUNCTION_INCOMING_ARG (args_so_far
, promoted_mode
,
3888 || varargs_setup
)) != 0,
3890 FUNCTION_ARG (args_so_far
, promoted_mode
,
3892 named_arg
|| varargs_setup
) != 0,
3895 fndecl
, &stack_args_size
, &stack_offset
, &arg_size
);
3899 rtx offset_rtx
= ARGS_SIZE_RTX (stack_offset
);
3901 if (offset_rtx
== const0_rtx
)
3902 stack_parm
= gen_rtx_MEM (promoted_mode
, internal_arg_pointer
);
3904 stack_parm
= gen_rtx_MEM (promoted_mode
,
3905 gen_rtx_PLUS (Pmode
,
3906 internal_arg_pointer
,
3909 /* If this is a memory ref that contains aggregate components,
3910 mark it as such for cse and loop optimize. Likewise if it
3912 MEM_IN_STRUCT_P (stack_parm
) = aggregate
;
3913 RTX_UNCHANGING_P (stack_parm
) = TREE_READONLY (parm
);
3914 MEM_ALIAS_SET (stack_parm
) = get_alias_set (parm
);
3917 /* If this parameter was passed both in registers and in the stack,
3918 use the copy on the stack. */
3919 if (MUST_PASS_IN_STACK (promoted_mode
, passed_type
))
3922 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3923 /* If this parm was passed part in regs and part in memory,
3924 pretend it arrived entirely in memory
3925 by pushing the register-part onto the stack.
3927 In the special case of a DImode or DFmode that is split,
3928 we could put it together in a pseudoreg directly,
3929 but for now that's not worth bothering with. */
3933 int nregs
= FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, promoted_mode
,
3934 passed_type
, named_arg
);
3938 current_function_pretend_args_size
3939 = (((nregs
* UNITS_PER_WORD
) + (PARM_BOUNDARY
/ BITS_PER_UNIT
) - 1)
3940 / (PARM_BOUNDARY
/ BITS_PER_UNIT
)
3941 * (PARM_BOUNDARY
/ BITS_PER_UNIT
));
3945 /* Handle calls that pass values in multiple non-contiguous
3946 locations. The Irix 6 ABI has examples of this. */
3947 if (GET_CODE (entry_parm
) == PARALLEL
)
3948 emit_group_store (validize_mem (stack_parm
),
3951 move_block_from_reg (REGNO (entry_parm
),
3952 validize_mem (stack_parm
), nregs
,
3953 int_size_in_bytes (TREE_TYPE (parm
)));
3955 entry_parm
= stack_parm
;
3960 /* If we didn't decide this parm came in a register,
3961 by default it came on the stack. */
3962 if (entry_parm
== 0)
3963 entry_parm
= stack_parm
;
3965 /* Record permanently how this parm was passed. */
3967 DECL_INCOMING_RTL (parm
) = entry_parm
;
3969 /* If there is actually space on the stack for this parm,
3970 count it in stack_args_size; otherwise set stack_parm to 0
3971 to indicate there is no preallocated stack slot for the parm. */
3973 if (entry_parm
== stack_parm
3974 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
3975 /* On some machines, even if a parm value arrives in a register
3976 there is still an (uninitialized) stack slot allocated for it.
3978 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
3979 whether this parameter already has a stack slot allocated,
3980 because an arg block exists only if current_function_args_size
3981 is larger than some threshold, and we haven't calculated that
3982 yet. So, for now, we just assume that stack slots never exist
3984 || REG_PARM_STACK_SPACE (fndecl
) > 0
3988 stack_args_size
.constant
+= arg_size
.constant
;
3990 ADD_PARM_SIZE (stack_args_size
, arg_size
.var
);
3993 /* No stack slot was pushed for this parm. */
3996 /* Update info on where next arg arrives in registers. */
3998 FUNCTION_ARG_ADVANCE (args_so_far
, promoted_mode
,
3999 passed_type
, named_arg
);
4001 /* If this is our second time through, we are done with this parm. */
4005 /* If we can't trust the parm stack slot to be aligned enough
4006 for its ultimate type, don't use that slot after entry.
4007 We'll make another stack slot, if we need one. */
4009 int thisparm_boundary
4010 = FUNCTION_ARG_BOUNDARY (promoted_mode
, passed_type
);
4012 if (GET_MODE_ALIGNMENT (nominal_mode
) > thisparm_boundary
)
4016 /* If parm was passed in memory, and we need to convert it on entry,
4017 don't store it back in that same slot. */
4019 && nominal_mode
!= BLKmode
&& nominal_mode
!= passed_mode
)
4023 /* Now adjust STACK_PARM to the mode and precise location
4024 where this parameter should live during execution,
4025 if we discover that it must live in the stack during execution.
4026 To make debuggers happier on big-endian machines, we store
4027 the value in the last bytes of the space available. */
4029 if (nominal_mode
!= BLKmode
&& nominal_mode
!= passed_mode
4034 if (BYTES_BIG_ENDIAN
4035 && GET_MODE_SIZE (nominal_mode
) < UNITS_PER_WORD
)
4036 stack_offset
.constant
+= (GET_MODE_SIZE (passed_mode
)
4037 - GET_MODE_SIZE (nominal_mode
));
4039 offset_rtx
= ARGS_SIZE_RTX (stack_offset
);
4040 if (offset_rtx
== const0_rtx
)
4041 stack_parm
= gen_rtx_MEM (nominal_mode
, internal_arg_pointer
);
4043 stack_parm
= gen_rtx_MEM (nominal_mode
,
4044 gen_rtx_PLUS (Pmode
,
4045 internal_arg_pointer
,
4048 /* If this is a memory ref that contains aggregate components,
4049 mark it as such for cse and loop optimize. */
4050 MEM_IN_STRUCT_P (stack_parm
) = aggregate
;
4055 /* We need this "use" info, because the gcc-register->stack-register
4056 converter in reg-stack.c needs to know which registers are active
4057 at the start of the function call. The actual parameter loading
4058 instructions are not always available then anymore, since they might
4059 have been optimised away. */
4061 if (GET_CODE (entry_parm
) == REG
&& !(hide_last_arg
&& last_named
))
4062 emit_insn (gen_rtx_USE (GET_MODE (entry_parm
), entry_parm
));
4065 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4066 in the mode in which it arrives.
4067 STACK_PARM is an RTX for a stack slot where the parameter can live
4068 during the function (in case we want to put it there).
4069 STACK_PARM is 0 if no stack slot was pushed for it.
4071 Now output code if necessary to convert ENTRY_PARM to
4072 the type in which this function declares it,
4073 and store that result in an appropriate place,
4074 which may be a pseudo reg, may be STACK_PARM,
4075 or may be a local stack slot if STACK_PARM is 0.
4077 Set DECL_RTL to that place. */
4079 if (nominal_mode
== BLKmode
|| GET_CODE (entry_parm
) == PARALLEL
)
4081 /* If a BLKmode arrives in registers, copy it to a stack slot.
4082 Handle calls that pass values in multiple non-contiguous
4083 locations. The Irix 6 ABI has examples of this. */
4084 if (GET_CODE (entry_parm
) == REG
4085 || GET_CODE (entry_parm
) == PARALLEL
)
4088 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm
)),
4091 /* Note that we will be storing an integral number of words.
4092 So we have to be careful to ensure that we allocate an
4093 integral number of words. We do this below in the
4094 assign_stack_local if space was not allocated in the argument
4095 list. If it was, this will not work if PARM_BOUNDARY is not
4096 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4097 if it becomes a problem. */
4099 if (stack_parm
== 0)
4102 = assign_stack_local (GET_MODE (entry_parm
),
4105 /* If this is a memory ref that contains aggregate
4106 components, mark it as such for cse and loop optimize. */
4107 MEM_IN_STRUCT_P (stack_parm
) = aggregate
;
4110 else if (PARM_BOUNDARY
% BITS_PER_WORD
!= 0)
4113 if (TREE_READONLY (parm
))
4114 RTX_UNCHANGING_P (stack_parm
) = 1;
4116 /* Handle calls that pass values in multiple non-contiguous
4117 locations. The Irix 6 ABI has examples of this. */
4118 if (GET_CODE (entry_parm
) == PARALLEL
)
4119 emit_group_store (validize_mem (stack_parm
), entry_parm
);
4121 move_block_from_reg (REGNO (entry_parm
),
4122 validize_mem (stack_parm
),
4123 size_stored
/ UNITS_PER_WORD
,
4124 int_size_in_bytes (TREE_TYPE (parm
)));
4126 DECL_RTL (parm
) = stack_parm
;
4128 else if (! ((obey_regdecls
&& ! DECL_REGISTER (parm
)
4129 && ! DECL_INLINE (fndecl
))
4130 /* layout_decl may set this. */
4131 || TREE_ADDRESSABLE (parm
)
4132 || TREE_SIDE_EFFECTS (parm
)
4133 /* If -ffloat-store specified, don't put explicit
4134 float variables into registers. */
4135 || (flag_float_store
4136 && TREE_CODE (TREE_TYPE (parm
)) == REAL_TYPE
))
4137 /* Always assign pseudo to structure return or item passed
4138 by invisible reference. */
4139 || passed_pointer
|| parm
== function_result_decl
)
4141 /* Store the parm in a pseudoregister during the function, but we
4142 may need to do it in a wider mode. */
4144 register rtx parmreg
;
4145 int regno
, regnoi
= 0, regnor
= 0;
4147 unsignedp
= TREE_UNSIGNED (TREE_TYPE (parm
));
4149 promoted_nominal_mode
4150 = promote_mode (TREE_TYPE (parm
), nominal_mode
, &unsignedp
, 0);
4152 parmreg
= gen_reg_rtx (promoted_nominal_mode
);
4153 mark_user_reg (parmreg
);
4155 /* If this was an item that we received a pointer to, set DECL_RTL
4160 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type
)), parmreg
);
4161 MEM_IN_STRUCT_P (DECL_RTL (parm
)) = aggregate
;
4164 DECL_RTL (parm
) = parmreg
;
4166 /* Copy the value into the register. */
4167 if (nominal_mode
!= passed_mode
4168 || promoted_nominal_mode
!= promoted_mode
)
4170 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4171 mode, by the caller. We now have to convert it to
4172 NOMINAL_MODE, if different. However, PARMREG may be in
4173 a different mode than NOMINAL_MODE if it is being stored
4176 If ENTRY_PARM is a hard register, it might be in a register
4177 not valid for operating in its mode (e.g., an odd-numbered
4178 register for a DFmode). In that case, moves are the only
4179 thing valid, so we can't do a convert from there. This
4180 occurs when the calling sequence allow such misaligned
4183 In addition, the conversion may involve a call, which could
4184 clobber parameters which haven't been copied to pseudo
4185 registers yet. Therefore, we must first copy the parm to
4186 a pseudo reg here, and save the conversion until after all
4187 parameters have been moved. */
4189 rtx tempreg
= gen_reg_rtx (GET_MODE (entry_parm
));
4191 emit_move_insn (tempreg
, validize_mem (entry_parm
));
4193 push_to_sequence (conversion_insns
);
4194 tempreg
= convert_to_mode (nominal_mode
, tempreg
, unsignedp
);
4196 expand_assignment (parm
,
4197 make_tree (nominal_type
, tempreg
), 0, 0);
4198 conversion_insns
= get_insns ();
4203 emit_move_insn (parmreg
, validize_mem (entry_parm
));
4205 /* If we were passed a pointer but the actual value
4206 can safely live in a register, put it in one. */
4207 if (passed_pointer
&& TYPE_MODE (TREE_TYPE (parm
)) != BLKmode
4208 && ! ((obey_regdecls
&& ! DECL_REGISTER (parm
)
4209 && ! DECL_INLINE (fndecl
))
4210 /* layout_decl may set this. */
4211 || TREE_ADDRESSABLE (parm
)
4212 || TREE_SIDE_EFFECTS (parm
)
4213 /* If -ffloat-store specified, don't put explicit
4214 float variables into registers. */
4215 || (flag_float_store
4216 && TREE_CODE (TREE_TYPE (parm
)) == REAL_TYPE
)))
4218 /* We can't use nominal_mode, because it will have been set to
4219 Pmode above. We must use the actual mode of the parm. */
4220 parmreg
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm
)));
4221 mark_user_reg (parmreg
);
4222 emit_move_insn (parmreg
, DECL_RTL (parm
));
4223 DECL_RTL (parm
) = parmreg
;
4224 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4228 #ifdef FUNCTION_ARG_CALLEE_COPIES
4229 /* If we are passed an arg by reference and it is our responsibility
4230 to make a copy, do it now.
4231 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4232 original argument, so we must recreate them in the call to
4233 FUNCTION_ARG_CALLEE_COPIES. */
4234 /* ??? Later add code to handle the case that if the argument isn't
4235 modified, don't do the copy. */
4237 else if (passed_pointer
4238 && FUNCTION_ARG_CALLEE_COPIES (args_so_far
,
4239 TYPE_MODE (DECL_ARG_TYPE (parm
)),
4240 DECL_ARG_TYPE (parm
),
4242 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm
)))
4245 tree type
= DECL_ARG_TYPE (parm
);
4247 /* This sequence may involve a library call perhaps clobbering
4248 registers that haven't been copied to pseudos yet. */
4250 push_to_sequence (conversion_insns
);
4252 if (TYPE_SIZE (type
) == 0
4253 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
4254 /* This is a variable sized object. */
4255 copy
= gen_rtx_MEM (BLKmode
,
4256 allocate_dynamic_stack_space
4257 (expr_size (parm
), NULL_RTX
,
4258 TYPE_ALIGN (type
)));
4260 copy
= assign_stack_temp (TYPE_MODE (type
),
4261 int_size_in_bytes (type
), 1);
4262 MEM_IN_STRUCT_P (copy
) = AGGREGATE_TYPE_P (type
);
4263 RTX_UNCHANGING_P (copy
) = TREE_READONLY (parm
);
4265 store_expr (parm
, copy
, 0);
4266 emit_move_insn (parmreg
, XEXP (copy
, 0));
4267 if (flag_check_memory_usage
)
4268 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
4269 XEXP (copy
, 0), ptr_mode
,
4270 GEN_INT (int_size_in_bytes (type
)),
4271 TYPE_MODE (sizetype
),
4272 GEN_INT (MEMORY_USE_RW
),
4273 TYPE_MODE (integer_type_node
));
4274 conversion_insns
= get_insns ();
4278 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4280 /* In any case, record the parm's desired stack location
4281 in case we later discover it must live in the stack.
4283 If it is a COMPLEX value, store the stack location for both
4286 if (GET_CODE (parmreg
) == CONCAT
)
4287 regno
= MAX (REGNO (XEXP (parmreg
, 0)), REGNO (XEXP (parmreg
, 1)));
4289 regno
= REGNO (parmreg
);
4291 if (regno
>= max_parm_reg
)
4294 int old_max_parm_reg
= max_parm_reg
;
4296 /* It's slow to expand this one register at a time,
4297 but it's also rare and we need max_parm_reg to be
4298 precisely correct. */
4299 max_parm_reg
= regno
+ 1;
4300 new = (rtx
*) savealloc (max_parm_reg
* sizeof (rtx
));
4301 bcopy ((char *) parm_reg_stack_loc
, (char *) new,
4302 old_max_parm_reg
* sizeof (rtx
));
4303 bzero ((char *) (new + old_max_parm_reg
),
4304 (max_parm_reg
- old_max_parm_reg
) * sizeof (rtx
));
4305 parm_reg_stack_loc
= new;
4308 if (GET_CODE (parmreg
) == CONCAT
)
4310 enum machine_mode submode
= GET_MODE (XEXP (parmreg
, 0));
4312 regnor
= REGNO (gen_realpart (submode
, parmreg
));
4313 regnoi
= REGNO (gen_imagpart (submode
, parmreg
));
4315 if (stack_parm
!= 0)
4317 parm_reg_stack_loc
[regnor
]
4318 = gen_realpart (submode
, stack_parm
);
4319 parm_reg_stack_loc
[regnoi
]
4320 = gen_imagpart (submode
, stack_parm
);
4324 parm_reg_stack_loc
[regnor
] = 0;
4325 parm_reg_stack_loc
[regnoi
] = 0;
4329 parm_reg_stack_loc
[REGNO (parmreg
)] = stack_parm
;
4331 /* Mark the register as eliminable if we did no conversion
4332 and it was copied from memory at a fixed offset,
4333 and the arg pointer was not copied to a pseudo-reg.
4334 If the arg pointer is a pseudo reg or the offset formed
4335 an invalid address, such memory-equivalences
4336 as we make here would screw up life analysis for it. */
4337 if (nominal_mode
== passed_mode
4340 && GET_CODE (stack_parm
) == MEM
4341 && stack_offset
.var
== 0
4342 && reg_mentioned_p (virtual_incoming_args_rtx
,
4343 XEXP (stack_parm
, 0)))
4345 rtx linsn
= get_last_insn ();
4348 /* Mark complex types separately. */
4349 if (GET_CODE (parmreg
) == CONCAT
)
4350 /* Scan backwards for the set of the real and
4352 for (sinsn
= linsn
; sinsn
!= 0;
4353 sinsn
= prev_nonnote_insn (sinsn
))
4355 set
= single_set (sinsn
);
4357 && SET_DEST (set
) == regno_reg_rtx
[regnoi
])
4359 = gen_rtx_EXPR_LIST (REG_EQUIV
,
4360 parm_reg_stack_loc
[regnoi
],
4363 && SET_DEST (set
) == regno_reg_rtx
[regnor
])
4365 = gen_rtx_EXPR_LIST (REG_EQUIV
,
4366 parm_reg_stack_loc
[regnor
],
4369 else if ((set
= single_set (linsn
)) != 0
4370 && SET_DEST (set
) == parmreg
)
4372 = gen_rtx_EXPR_LIST (REG_EQUIV
,
4373 stack_parm
, REG_NOTES (linsn
));
4376 /* For pointer data type, suggest pointer register. */
4377 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
4378 mark_reg_pointer (parmreg
,
4379 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm
)))
4384 /* Value must be stored in the stack slot STACK_PARM
4385 during function execution. */
4387 if (promoted_mode
!= nominal_mode
)
4389 /* Conversion is required. */
4390 rtx tempreg
= gen_reg_rtx (GET_MODE (entry_parm
));
4392 emit_move_insn (tempreg
, validize_mem (entry_parm
));
4394 push_to_sequence (conversion_insns
);
4395 entry_parm
= convert_to_mode (nominal_mode
, tempreg
,
4396 TREE_UNSIGNED (TREE_TYPE (parm
)));
4399 /* ??? This may need a big-endian conversion on sparc64. */
4400 stack_parm
= change_address (stack_parm
, nominal_mode
,
4403 conversion_insns
= get_insns ();
4408 if (entry_parm
!= stack_parm
)
4410 if (stack_parm
== 0)
4413 = assign_stack_local (GET_MODE (entry_parm
),
4414 GET_MODE_SIZE (GET_MODE (entry_parm
)), 0);
4415 /* If this is a memory ref that contains aggregate components,
4416 mark it as such for cse and loop optimize. */
4417 MEM_IN_STRUCT_P (stack_parm
) = aggregate
;
4420 if (promoted_mode
!= nominal_mode
)
4422 push_to_sequence (conversion_insns
);
4423 emit_move_insn (validize_mem (stack_parm
),
4424 validize_mem (entry_parm
));
4425 conversion_insns
= get_insns ();
4429 emit_move_insn (validize_mem (stack_parm
),
4430 validize_mem (entry_parm
));
4432 if (flag_check_memory_usage
)
4434 push_to_sequence (conversion_insns
);
4435 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
4436 XEXP (stack_parm
, 0), ptr_mode
,
4437 GEN_INT (GET_MODE_SIZE (GET_MODE
4439 TYPE_MODE (sizetype
),
4440 GEN_INT (MEMORY_USE_RW
),
4441 TYPE_MODE (integer_type_node
));
4443 conversion_insns
= get_insns ();
4446 DECL_RTL (parm
) = stack_parm
;
4449 /* If this "parameter" was the place where we are receiving the
4450 function's incoming structure pointer, set up the result. */
4451 if (parm
== function_result_decl
)
4453 tree result
= DECL_RESULT (fndecl
);
4454 tree restype
= TREE_TYPE (result
);
4457 = gen_rtx_MEM (DECL_MODE (result
), DECL_RTL (parm
));
4459 MEM_IN_STRUCT_P (DECL_RTL (result
)) = AGGREGATE_TYPE_P (restype
);
4462 if (TREE_THIS_VOLATILE (parm
))
4463 MEM_VOLATILE_P (DECL_RTL (parm
)) = 1;
4464 if (TREE_READONLY (parm
))
4465 RTX_UNCHANGING_P (DECL_RTL (parm
)) = 1;
4468 /* Output all parameter conversion instructions (possibly including calls)
4469 now that all parameters have been copied out of hard registers. */
4470 emit_insns (conversion_insns
);
4472 last_parm_insn
= get_last_insn ();
4474 current_function_args_size
= stack_args_size
.constant
;
4476 /* Adjust function incoming argument size for alignment and
4479 #ifdef REG_PARM_STACK_SPACE
4480 #ifndef MAYBE_REG_PARM_STACK_SPACE
4481 current_function_args_size
= MAX (current_function_args_size
,
4482 REG_PARM_STACK_SPACE (fndecl
));
4486 #ifdef STACK_BOUNDARY
4487 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4489 current_function_args_size
4490 = ((current_function_args_size
+ STACK_BYTES
- 1)
4491 / STACK_BYTES
) * STACK_BYTES
;
4494 #ifdef ARGS_GROW_DOWNWARD
4495 current_function_arg_offset_rtx
4496 = (stack_args_size
.var
== 0 ? GEN_INT (-stack_args_size
.constant
)
4497 : expand_expr (size_binop (MINUS_EXPR
, stack_args_size
.var
,
4498 size_int (-stack_args_size
.constant
)),
4499 NULL_RTX
, VOIDmode
, EXPAND_MEMORY_USE_BAD
));
4501 current_function_arg_offset_rtx
= ARGS_SIZE_RTX (stack_args_size
);
4504 /* See how many bytes, if any, of its args a function should try to pop
4507 current_function_pops_args
= RETURN_POPS_ARGS (fndecl
, TREE_TYPE (fndecl
),
4508 current_function_args_size
);
4510 /* For stdarg.h function, save info about
4511 regs and stack space used by the named args. */
4514 current_function_args_info
= args_so_far
;
4516 /* Set the rtx used for the function return value. Put this in its
4517 own variable so any optimizers that need this information don't have
4518 to include tree.h. Do this here so it gets done when an inlined
4519 function gets output. */
4521 current_function_return_rtx
= DECL_RTL (DECL_RESULT (fndecl
));
4524 /* Indicate whether REGNO is an incoming argument to the current function
4525 that was promoted to a wider mode. If so, return the RTX for the
4526 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4527 that REGNO is promoted from and whether the promotion was signed or
4530 #ifdef PROMOTE_FUNCTION_ARGS
4533 promoted_input_arg (regno
, pmode
, punsignedp
)
4535 enum machine_mode
*pmode
;
4540 for (arg
= DECL_ARGUMENTS (current_function_decl
); arg
;
4541 arg
= TREE_CHAIN (arg
))
4542 if (GET_CODE (DECL_INCOMING_RTL (arg
)) == REG
4543 && REGNO (DECL_INCOMING_RTL (arg
)) == regno
4544 && TYPE_MODE (DECL_ARG_TYPE (arg
)) == TYPE_MODE (TREE_TYPE (arg
)))
4546 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (arg
));
4547 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (arg
));
4549 mode
= promote_mode (TREE_TYPE (arg
), mode
, &unsignedp
, 1);
4550 if (mode
== GET_MODE (DECL_INCOMING_RTL (arg
))
4551 && mode
!= DECL_MODE (arg
))
4553 *pmode
= DECL_MODE (arg
);
4554 *punsignedp
= unsignedp
;
4555 return DECL_INCOMING_RTL (arg
);
4564 /* Compute the size and offset from the start of the stacked arguments for a
4565 parm passed in mode PASSED_MODE and with type TYPE.
4567 INITIAL_OFFSET_PTR points to the current offset into the stacked
4570 The starting offset and size for this parm are returned in *OFFSET_PTR
4571 and *ARG_SIZE_PTR, respectively.
4573 IN_REGS is non-zero if the argument will be passed in registers. It will
4574 never be set if REG_PARM_STACK_SPACE is not defined.
4576 FNDECL is the function in which the argument was defined.
4578 There are two types of rounding that are done. The first, controlled by
4579 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4580 list to be aligned to the specific boundary (in bits). This rounding
4581 affects the initial and starting offsets, but not the argument size.
4583 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4584 optionally rounds the size of the parm to PARM_BOUNDARY. The
4585 initial offset is not affected by this rounding, while the size always
4586 is and the starting offset may be. */
4588 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4589 initial_offset_ptr is positive because locate_and_pad_parm's
4590 callers pass in the total size of args so far as
4591 initial_offset_ptr. arg_size_ptr is always positive.*/
4594 locate_and_pad_parm (passed_mode
, type
, in_regs
, fndecl
,
4595 initial_offset_ptr
, offset_ptr
, arg_size_ptr
)
4596 enum machine_mode passed_mode
;
4600 struct args_size
*initial_offset_ptr
;
4601 struct args_size
*offset_ptr
;
4602 struct args_size
*arg_size_ptr
;
4605 = type
? size_in_bytes (type
) : size_int (GET_MODE_SIZE (passed_mode
));
4606 enum direction where_pad
= FUNCTION_ARG_PADDING (passed_mode
, type
);
4607 int boundary
= FUNCTION_ARG_BOUNDARY (passed_mode
, type
);
4609 #ifdef REG_PARM_STACK_SPACE
4610 /* If we have found a stack parm before we reach the end of the
4611 area reserved for registers, skip that area. */
4614 int reg_parm_stack_space
= 0;
4616 #ifdef MAYBE_REG_PARM_STACK_SPACE
4617 reg_parm_stack_space
= MAYBE_REG_PARM_STACK_SPACE
;
4619 reg_parm_stack_space
= REG_PARM_STACK_SPACE (fndecl
);
4621 if (reg_parm_stack_space
> 0)
4623 if (initial_offset_ptr
->var
)
4625 initial_offset_ptr
->var
4626 = size_binop (MAX_EXPR
, ARGS_SIZE_TREE (*initial_offset_ptr
),
4627 size_int (reg_parm_stack_space
));
4628 initial_offset_ptr
->constant
= 0;
4630 else if (initial_offset_ptr
->constant
< reg_parm_stack_space
)
4631 initial_offset_ptr
->constant
= reg_parm_stack_space
;
4634 #endif /* REG_PARM_STACK_SPACE */
4636 arg_size_ptr
->var
= 0;
4637 arg_size_ptr
->constant
= 0;
4639 #ifdef ARGS_GROW_DOWNWARD
4640 if (initial_offset_ptr
->var
)
4642 offset_ptr
->constant
= 0;
4643 offset_ptr
->var
= size_binop (MINUS_EXPR
, integer_zero_node
,
4644 initial_offset_ptr
->var
);
4648 offset_ptr
->constant
= - initial_offset_ptr
->constant
;
4649 offset_ptr
->var
= 0;
4651 if (where_pad
!= none
4652 && (TREE_CODE (sizetree
) != INTEGER_CST
4653 || ((TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)))
4654 sizetree
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
4655 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
4656 if (where_pad
!= downward
)
4657 pad_to_arg_alignment (offset_ptr
, boundary
);
4658 if (initial_offset_ptr
->var
)
4660 arg_size_ptr
->var
= size_binop (MINUS_EXPR
,
4661 size_binop (MINUS_EXPR
,
4663 initial_offset_ptr
->var
),
4668 arg_size_ptr
->constant
= (- initial_offset_ptr
->constant
4669 - offset_ptr
->constant
);
4671 #else /* !ARGS_GROW_DOWNWARD */
4672 pad_to_arg_alignment (initial_offset_ptr
, boundary
);
4673 *offset_ptr
= *initial_offset_ptr
;
4675 #ifdef PUSH_ROUNDING
4676 if (passed_mode
!= BLKmode
)
4677 sizetree
= size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree
)));
4680 /* Pad_below needs the pre-rounded size to know how much to pad below
4681 so this must be done before rounding up. */
4682 if (where_pad
== downward
4683 /* However, BLKmode args passed in regs have their padding done elsewhere.
4684 The stack slot must be able to hold the entire register. */
4685 && !(in_regs
&& passed_mode
== BLKmode
))
4686 pad_below (offset_ptr
, passed_mode
, sizetree
);
4688 if (where_pad
!= none
4689 && (TREE_CODE (sizetree
) != INTEGER_CST
4690 || ((TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)))
4691 sizetree
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
4693 ADD_PARM_SIZE (*arg_size_ptr
, sizetree
);
4694 #endif /* ARGS_GROW_DOWNWARD */
4697 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4698 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4701 pad_to_arg_alignment (offset_ptr
, boundary
)
4702 struct args_size
*offset_ptr
;
4705 int boundary_in_bytes
= boundary
/ BITS_PER_UNIT
;
4707 if (boundary
> BITS_PER_UNIT
)
4709 if (offset_ptr
->var
)
4712 #ifdef ARGS_GROW_DOWNWARD
4717 (ARGS_SIZE_TREE (*offset_ptr
),
4718 boundary
/ BITS_PER_UNIT
);
4719 offset_ptr
->constant
= 0; /*?*/
4722 offset_ptr
->constant
=
4723 #ifdef ARGS_GROW_DOWNWARD
4724 FLOOR_ROUND (offset_ptr
->constant
, boundary_in_bytes
);
4726 CEIL_ROUND (offset_ptr
->constant
, boundary_in_bytes
);
4731 #ifndef ARGS_GROW_DOWNWARD
4733 pad_below (offset_ptr
, passed_mode
, sizetree
)
4734 struct args_size
*offset_ptr
;
4735 enum machine_mode passed_mode
;
4738 if (passed_mode
!= BLKmode
)
4740 if (GET_MODE_BITSIZE (passed_mode
) % PARM_BOUNDARY
)
4741 offset_ptr
->constant
4742 += (((GET_MODE_BITSIZE (passed_mode
) + PARM_BOUNDARY
- 1)
4743 / PARM_BOUNDARY
* PARM_BOUNDARY
/ BITS_PER_UNIT
)
4744 - GET_MODE_SIZE (passed_mode
));
4748 if (TREE_CODE (sizetree
) != INTEGER_CST
4749 || (TREE_INT_CST_LOW (sizetree
) * BITS_PER_UNIT
) % PARM_BOUNDARY
)
4751 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4752 tree s2
= round_up (sizetree
, PARM_BOUNDARY
/ BITS_PER_UNIT
);
4754 ADD_PARM_SIZE (*offset_ptr
, s2
);
4755 SUB_PARM_SIZE (*offset_ptr
, sizetree
);
4761 #ifdef ARGS_GROW_DOWNWARD
4763 round_down (value
, divisor
)
4767 return size_binop (MULT_EXPR
,
4768 size_binop (FLOOR_DIV_EXPR
, value
, size_int (divisor
)),
4769 size_int (divisor
));
4773 /* Walk the tree of blocks describing the binding levels within a function
4774 and warn about uninitialized variables.
4775 This is done after calling flow_analysis and before global_alloc
4776 clobbers the pseudo-regs to hard regs. */
4779 uninitialized_vars_warning (block
)
4782 register tree decl
, sub
;
4783 for (decl
= BLOCK_VARS (block
); decl
; decl
= TREE_CHAIN (decl
))
4785 if (TREE_CODE (decl
) == VAR_DECL
4786 /* These warnings are unreliable for and aggregates
4787 because assigning the fields one by one can fail to convince
4788 flow.c that the entire aggregate was initialized.
4789 Unions are troublesome because members may be shorter. */
4790 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl
))
4791 && DECL_RTL (decl
) != 0
4792 && GET_CODE (DECL_RTL (decl
)) == REG
4793 && regno_uninitialized (REGNO (DECL_RTL (decl
))))
4794 warning_with_decl (decl
,
4795 "`%s' might be used uninitialized in this function");
4796 if (TREE_CODE (decl
) == VAR_DECL
4797 && DECL_RTL (decl
) != 0
4798 && GET_CODE (DECL_RTL (decl
)) == REG
4799 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
4800 warning_with_decl (decl
,
4801 "variable `%s' might be clobbered by `longjmp' or `vfork'");
4803 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= TREE_CHAIN (sub
))
4804 uninitialized_vars_warning (sub
);
4807 /* Do the appropriate part of uninitialized_vars_warning
4808 but for arguments instead of local variables. */
4811 setjmp_args_warning ()
4814 for (decl
= DECL_ARGUMENTS (current_function_decl
);
4815 decl
; decl
= TREE_CHAIN (decl
))
4816 if (DECL_RTL (decl
) != 0
4817 && GET_CODE (DECL_RTL (decl
)) == REG
4818 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl
))))
4819 warning_with_decl (decl
, "argument `%s' might be clobbered by `longjmp' or `vfork'");
4822 /* If this function call setjmp, put all vars into the stack
4823 unless they were declared `register'. */
4826 setjmp_protect (block
)
4829 register tree decl
, sub
;
4830 for (decl
= BLOCK_VARS (block
); decl
; decl
= TREE_CHAIN (decl
))
4831 if ((TREE_CODE (decl
) == VAR_DECL
4832 || TREE_CODE (decl
) == PARM_DECL
)
4833 && DECL_RTL (decl
) != 0
4834 && (GET_CODE (DECL_RTL (decl
)) == REG
4835 || (GET_CODE (DECL_RTL (decl
)) == MEM
4836 && GET_CODE (XEXP (DECL_RTL (decl
), 0)) == ADDRESSOF
))
4837 /* If this variable came from an inline function, it must be
4838 that its life doesn't overlap the setjmp. If there was a
4839 setjmp in the function, it would already be in memory. We
4840 must exclude such variable because their DECL_RTL might be
4841 set to strange things such as virtual_stack_vars_rtx. */
4842 && ! DECL_FROM_INLINE (decl
)
4844 #ifdef NON_SAVING_SETJMP
4845 /* If longjmp doesn't restore the registers,
4846 don't put anything in them. */
4850 ! DECL_REGISTER (decl
)))
4851 put_var_into_stack (decl
);
4852 for (sub
= BLOCK_SUBBLOCKS (block
); sub
; sub
= TREE_CHAIN (sub
))
4853 setjmp_protect (sub
);
4856 /* Like the previous function, but for args instead of local variables. */
4859 setjmp_protect_args ()
4862 for (decl
= DECL_ARGUMENTS (current_function_decl
);
4863 decl
; decl
= TREE_CHAIN (decl
))
4864 if ((TREE_CODE (decl
) == VAR_DECL
4865 || TREE_CODE (decl
) == PARM_DECL
)
4866 && DECL_RTL (decl
) != 0
4867 && (GET_CODE (DECL_RTL (decl
)) == REG
4868 || (GET_CODE (DECL_RTL (decl
)) == MEM
4869 && GET_CODE (XEXP (DECL_RTL (decl
), 0)) == ADDRESSOF
))
4871 /* If longjmp doesn't restore the registers,
4872 don't put anything in them. */
4873 #ifdef NON_SAVING_SETJMP
4877 ! DECL_REGISTER (decl
)))
4878 put_var_into_stack (decl
);
4881 /* Return the context-pointer register corresponding to DECL,
4882 or 0 if it does not need one. */
4885 lookup_static_chain (decl
)
4888 tree context
= decl_function_context (decl
);
4892 || (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_NO_STATIC_CHAIN (decl
)))
4895 /* We treat inline_function_decl as an alias for the current function
4896 because that is the inline function whose vars, types, etc.
4897 are being merged into the current function.
4898 See expand_inline_function. */
4899 if (context
== current_function_decl
|| context
== inline_function_decl
)
4900 return virtual_stack_vars_rtx
;
4902 for (link
= context_display
; link
; link
= TREE_CHAIN (link
))
4903 if (TREE_PURPOSE (link
) == context
)
4904 return RTL_EXPR_RTL (TREE_VALUE (link
));
4909 /* Convert a stack slot address ADDR for variable VAR
4910 (from a containing function)
4911 into an address valid in this function (using a static chain). */
4914 fix_lexical_addr (addr
, var
)
4919 HOST_WIDE_INT displacement
;
4920 tree context
= decl_function_context (var
);
4921 struct function
*fp
;
4924 /* If this is the present function, we need not do anything. */
4925 if (context
== current_function_decl
|| context
== inline_function_decl
)
4928 for (fp
= outer_function_chain
; fp
; fp
= fp
->next
)
4929 if (fp
->decl
== context
)
4935 if (GET_CODE (addr
) == ADDRESSOF
&& GET_CODE (XEXP (addr
, 0)) == MEM
)
4936 addr
= XEXP (XEXP (addr
, 0), 0);
4938 /* Decode given address as base reg plus displacement. */
4939 if (GET_CODE (addr
) == REG
)
4940 basereg
= addr
, displacement
= 0;
4941 else if (GET_CODE (addr
) == PLUS
&& GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
4942 basereg
= XEXP (addr
, 0), displacement
= INTVAL (XEXP (addr
, 1));
4946 /* We accept vars reached via the containing function's
4947 incoming arg pointer and via its stack variables pointer. */
4948 if (basereg
== fp
->internal_arg_pointer
)
4950 /* If reached via arg pointer, get the arg pointer value
4951 out of that function's stack frame.
4953 There are two cases: If a separate ap is needed, allocate a
4954 slot in the outer function for it and dereference it that way.
4955 This is correct even if the real ap is actually a pseudo.
4956 Otherwise, just adjust the offset from the frame pointer to
4959 #ifdef NEED_SEPARATE_AP
4962 if (fp
->arg_pointer_save_area
== 0)
4963 fp
->arg_pointer_save_area
4964 = assign_outer_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0, fp
);
4966 addr
= fix_lexical_addr (XEXP (fp
->arg_pointer_save_area
, 0), var
);
4967 addr
= memory_address (Pmode
, addr
);
4969 base
= copy_to_reg (gen_rtx_MEM (Pmode
, addr
));
4971 displacement
+= (FIRST_PARM_OFFSET (context
) - STARTING_FRAME_OFFSET
);
4972 base
= lookup_static_chain (var
);
4976 else if (basereg
== virtual_stack_vars_rtx
)
4978 /* This is the same code as lookup_static_chain, duplicated here to
4979 avoid an extra call to decl_function_context. */
4982 for (link
= context_display
; link
; link
= TREE_CHAIN (link
))
4983 if (TREE_PURPOSE (link
) == context
)
4985 base
= RTL_EXPR_RTL (TREE_VALUE (link
));
4993 /* Use same offset, relative to appropriate static chain or argument
4995 return plus_constant (base
, displacement
);
4998 /* Return the address of the trampoline for entering nested fn FUNCTION.
4999 If necessary, allocate a trampoline (in the stack frame)
5000 and emit rtl to initialize its contents (at entry to this function). */
5003 trampoline_address (function
)
5009 struct function
*fp
;
5012 /* Find an existing trampoline and return it. */
5013 for (link
= trampoline_list
; link
; link
= TREE_CHAIN (link
))
5014 if (TREE_PURPOSE (link
) == function
)
5016 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link
)), 0));
5018 for (fp
= outer_function_chain
; fp
; fp
= fp
->next
)
5019 for (link
= fp
->trampoline_list
; link
; link
= TREE_CHAIN (link
))
5020 if (TREE_PURPOSE (link
) == function
)
5022 tramp
= fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link
)), 0),
5024 return round_trampoline_addr (tramp
);
5027 /* None exists; we must make one. */
5029 /* Find the `struct function' for the function containing FUNCTION. */
5031 fn_context
= decl_function_context (function
);
5032 if (fn_context
!= current_function_decl
5033 && fn_context
!= inline_function_decl
)
5034 for (fp
= outer_function_chain
; fp
; fp
= fp
->next
)
5035 if (fp
->decl
== fn_context
)
5038 /* Allocate run-time space for this trampoline
5039 (usually in the defining function's stack frame). */
5040 #ifdef ALLOCATE_TRAMPOLINE
5041 tramp
= ALLOCATE_TRAMPOLINE (fp
);
5043 /* If rounding needed, allocate extra space
5044 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5045 #ifdef TRAMPOLINE_ALIGNMENT
5046 #define TRAMPOLINE_REAL_SIZE \
5047 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5049 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5052 tramp
= assign_outer_stack_local (BLKmode
, TRAMPOLINE_REAL_SIZE
, 0, fp
);
5054 tramp
= assign_stack_local (BLKmode
, TRAMPOLINE_REAL_SIZE
, 0);
5057 /* Record the trampoline for reuse and note it for later initialization
5058 by expand_function_end. */
5061 push_obstacks (fp
->function_maybepermanent_obstack
,
5062 fp
->function_maybepermanent_obstack
);
5063 rtlexp
= make_node (RTL_EXPR
);
5064 RTL_EXPR_RTL (rtlexp
) = tramp
;
5065 fp
->trampoline_list
= tree_cons (function
, rtlexp
, fp
->trampoline_list
);
5070 /* Make the RTL_EXPR node temporary, not momentary, so that the
5071 trampoline_list doesn't become garbage. */
5072 int momentary
= suspend_momentary ();
5073 rtlexp
= make_node (RTL_EXPR
);
5074 resume_momentary (momentary
);
5076 RTL_EXPR_RTL (rtlexp
) = tramp
;
5077 trampoline_list
= tree_cons (function
, rtlexp
, trampoline_list
);
5080 tramp
= fix_lexical_addr (XEXP (tramp
, 0), function
);
5081 return round_trampoline_addr (tramp
);
5084 /* Given a trampoline address,
5085 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5088 round_trampoline_addr (tramp
)
5091 #ifdef TRAMPOLINE_ALIGNMENT
5092 /* Round address up to desired boundary. */
5093 rtx temp
= gen_reg_rtx (Pmode
);
5094 temp
= expand_binop (Pmode
, add_optab
, tramp
,
5095 GEN_INT (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1),
5096 temp
, 0, OPTAB_LIB_WIDEN
);
5097 tramp
= expand_binop (Pmode
, and_optab
, temp
,
5098 GEN_INT (- TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
),
5099 temp
, 0, OPTAB_LIB_WIDEN
);
5104 /* The functions identify_blocks and reorder_blocks provide a way to
5105 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5106 duplicate portions of the RTL code. Call identify_blocks before
5107 changing the RTL, and call reorder_blocks after. */
5109 /* Put all this function's BLOCK nodes including those that are chained
5110 onto the first block into a vector, and return it.
5111 Also store in each NOTE for the beginning or end of a block
5112 the index of that block in the vector.
5113 The arguments are BLOCK, the chain of top-level blocks of the function,
5114 and INSNS, the insn chain of the function. */
5117 identify_blocks (block
, insns
)
5125 int next_block_number
= 1;
5126 int current_block_number
= 1;
5132 n_blocks
= all_blocks (block
, 0);
5133 block_vector
= (tree
*) xmalloc (n_blocks
* sizeof (tree
));
5134 block_stack
= (int *) alloca (n_blocks
* sizeof (int));
5136 all_blocks (block
, block_vector
);
5138 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5139 if (GET_CODE (insn
) == NOTE
)
5141 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
)
5143 block_stack
[depth
++] = current_block_number
;
5144 current_block_number
= next_block_number
;
5145 NOTE_BLOCK_NUMBER (insn
) = next_block_number
++;
5147 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
)
5149 NOTE_BLOCK_NUMBER (insn
) = current_block_number
;
5150 current_block_number
= block_stack
[--depth
];
5154 if (n_blocks
!= next_block_number
)
5157 return block_vector
;
5160 /* Given BLOCK_VECTOR which was returned by identify_blocks,
5161 and a revised instruction chain, rebuild the tree structure
5162 of BLOCK nodes to correspond to the new order of RTL.
5163 The new block tree is inserted below TOP_BLOCK.
5164 Returns the current top-level block. */
5167 reorder_blocks (block_vector
, block
, insns
)
5172 tree current_block
= block
;
5175 if (block_vector
== 0)
5178 /* Prune the old trees away, so that it doesn't get in the way. */
5179 BLOCK_SUBBLOCKS (current_block
) = 0;
5180 BLOCK_CHAIN (current_block
) = 0;
5182 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
5183 if (GET_CODE (insn
) == NOTE
)
5185 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
)
5187 tree block
= block_vector
[NOTE_BLOCK_NUMBER (insn
)];
5188 /* If we have seen this block before, copy it. */
5189 if (TREE_ASM_WRITTEN (block
))
5190 block
= copy_node (block
);
5191 BLOCK_SUBBLOCKS (block
) = 0;
5192 TREE_ASM_WRITTEN (block
) = 1;
5193 BLOCK_SUPERCONTEXT (block
) = current_block
;
5194 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (current_block
);
5195 BLOCK_SUBBLOCKS (current_block
) = block
;
5196 current_block
= block
;
5197 NOTE_SOURCE_FILE (insn
) = 0;
5199 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
)
5201 BLOCK_SUBBLOCKS (current_block
)
5202 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block
));
5203 current_block
= BLOCK_SUPERCONTEXT (current_block
);
5204 NOTE_SOURCE_FILE (insn
) = 0;
5208 BLOCK_SUBBLOCKS (current_block
)
5209 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block
));
5210 return current_block
;
5213 /* Reverse the order of elements in the chain T of blocks,
5214 and return the new head of the chain (old last element). */
5220 register tree prev
= 0, decl
, next
;
5221 for (decl
= t
; decl
; decl
= next
)
5223 next
= BLOCK_CHAIN (decl
);
5224 BLOCK_CHAIN (decl
) = prev
;
5230 /* Count the subblocks of the list starting with BLOCK, and list them
5231 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5235 all_blocks (block
, vector
)
5243 TREE_ASM_WRITTEN (block
) = 0;
5245 /* Record this block. */
5247 vector
[n_blocks
] = block
;
5251 /* Record the subblocks, and their subblocks... */
5252 n_blocks
+= all_blocks (BLOCK_SUBBLOCKS (block
),
5253 vector
? vector
+ n_blocks
: 0);
5254 block
= BLOCK_CHAIN (block
);
5260 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5261 and initialize static variables for generating RTL for the statements
5265 init_function_start (subr
, filename
, line
)
5270 init_stmt_for_function ();
5272 cse_not_expected
= ! optimize
;
5274 /* Caller save not needed yet. */
5275 caller_save_needed
= 0;
5277 /* No stack slots have been made yet. */
5278 stack_slot_list
= 0;
5280 /* There is no stack slot for handling nonlocal gotos. */
5281 nonlocal_goto_handler_slot
= 0;
5282 nonlocal_goto_stack_level
= 0;
5284 /* No labels have been declared for nonlocal use. */
5285 nonlocal_labels
= 0;
5287 /* No function calls so far in this function. */
5288 function_call_count
= 0;
5290 /* No parm regs have been allocated.
5291 (This is important for output_inline_function.) */
5292 max_parm_reg
= LAST_VIRTUAL_REGISTER
+ 1;
5294 /* Initialize the RTL mechanism. */
5297 /* Initialize the queue of pending postincrement and postdecrements,
5298 and some other info in expr.c. */
5301 /* We haven't done register allocation yet. */
5304 init_const_rtx_hash_table ();
5306 current_function_name
= (*decl_printable_name
) (subr
, 2);
5308 /* Nonzero if this is a nested function that uses a static chain. */
5310 current_function_needs_context
5311 = (decl_function_context (current_function_decl
) != 0
5312 && ! DECL_NO_STATIC_CHAIN (current_function_decl
));
5314 /* Set if a call to setjmp is seen. */
5315 current_function_calls_setjmp
= 0;
5317 /* Set if a call to longjmp is seen. */
5318 current_function_calls_longjmp
= 0;
5320 current_function_calls_alloca
= 0;
5321 current_function_has_nonlocal_label
= 0;
5322 current_function_has_nonlocal_goto
= 0;
5323 current_function_contains_functions
= 0;
5324 current_function_is_thunk
= 0;
5326 current_function_returns_pcc_struct
= 0;
5327 current_function_returns_struct
= 0;
5328 current_function_epilogue_delay_list
= 0;
5329 current_function_uses_const_pool
= 0;
5330 current_function_uses_pic_offset_table
= 0;
5331 current_function_cannot_inline
= 0;
5333 /* We have not yet needed to make a label to jump to for tail-recursion. */
5334 tail_recursion_label
= 0;
5336 /* We haven't had a need to make a save area for ap yet. */
5338 arg_pointer_save_area
= 0;
5340 /* No stack slots allocated yet. */
5343 /* No SAVE_EXPRs in this function yet. */
5346 /* No RTL_EXPRs in this function yet. */
5349 /* Set up to allocate temporaries. */
5352 /* Within function body, compute a type's size as soon it is laid out. */
5353 immediate_size_expand
++;
5355 /* We haven't made any trampolines for this function yet. */
5356 trampoline_list
= 0;
5358 init_pending_stack_adjust ();
5359 inhibit_defer_pop
= 0;
5361 current_function_outgoing_args_size
= 0;
5363 /* Prevent ever trying to delete the first instruction of a function.
5364 Also tell final how to output a linenum before the function prologue.
5365 Note linenums could be missing, e.g. when compiling a Java .class file. */
5367 emit_line_note (filename
, line
);
5369 /* Make sure first insn is a note even if we don't want linenums.
5370 This makes sure the first insn will never be deleted.
5371 Also, final expects a note to appear there. */
5372 emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
5374 /* Set flags used by final.c. */
5375 if (aggregate_value_p (DECL_RESULT (subr
)))
5377 #ifdef PCC_STATIC_STRUCT_RETURN
5378 current_function_returns_pcc_struct
= 1;
5380 current_function_returns_struct
= 1;
5383 /* Warn if this value is an aggregate type,
5384 regardless of which calling convention we are using for it. */
5385 if (warn_aggregate_return
5386 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
))))
5387 warning ("function returns an aggregate");
5389 current_function_returns_pointer
5390 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr
)));
5392 /* Indicate that we need to distinguish between the return value of the
5393 present function and the return value of a function being called. */
5394 rtx_equal_function_value_matters
= 1;
5396 /* Indicate that we have not instantiated virtual registers yet. */
5397 virtuals_instantiated
= 0;
5399 /* Indicate we have no need of a frame pointer yet. */
5400 frame_pointer_needed
= 0;
5402 /* By default assume not varargs or stdarg. */
5403 current_function_varargs
= 0;
5404 current_function_stdarg
= 0;
5407 /* Indicate that the current function uses extra args
5408 not explicitly mentioned in the argument list in any fashion. */
5413 current_function_varargs
= 1;
5416 /* Expand a call to __main at the beginning of a possible main function. */
5418 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5419 #undef HAS_INIT_SECTION
5420 #define HAS_INIT_SECTION
5424 expand_main_function ()
5426 #if !defined (HAS_INIT_SECTION)
5427 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, NAME__MAIN
), 0,
5429 #endif /* not HAS_INIT_SECTION */
5432 extern struct obstack permanent_obstack
;
5434 /* Start the RTL for a new function, and set variables used for
5436 SUBR is the FUNCTION_DECL node.
5437 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5438 the function's parameters, which must be run at any return statement. */
5441 expand_function_start (subr
, parms_have_cleanups
)
5443 int parms_have_cleanups
;
5447 rtx last_ptr
= NULL_RTX
;
5449 /* Make sure volatile mem refs aren't considered
5450 valid operands of arithmetic insns. */
5451 init_recog_no_volatile ();
5453 /* If function gets a static chain arg, store it in the stack frame.
5454 Do this first, so it gets the first stack slot offset. */
5455 if (current_function_needs_context
)
5457 last_ptr
= assign_stack_local (Pmode
, GET_MODE_SIZE (Pmode
), 0);
5459 /* Delay copying static chain if it is not a register to avoid
5460 conflicts with regs used for parameters. */
5461 if (! SMALL_REGISTER_CLASSES
5462 || GET_CODE (static_chain_incoming_rtx
) == REG
)
5463 emit_move_insn (last_ptr
, static_chain_incoming_rtx
);
5466 /* If the parameters of this function need cleaning up, get a label
5467 for the beginning of the code which executes those cleanups. This must
5468 be done before doing anything with return_label. */
5469 if (parms_have_cleanups
)
5470 cleanup_label
= gen_label_rtx ();
5474 /* Make the label for return statements to jump to, if this machine
5475 does not have a one-instruction return and uses an epilogue,
5476 or if it returns a structure, or if it has parm cleanups. */
5478 if (cleanup_label
== 0 && HAVE_return
5479 && ! current_function_returns_pcc_struct
5480 && ! (current_function_returns_struct
&& ! optimize
))
5483 return_label
= gen_label_rtx ();
5485 return_label
= gen_label_rtx ();
5488 /* Initialize rtx used to return the value. */
5489 /* Do this before assign_parms so that we copy the struct value address
5490 before any library calls that assign parms might generate. */
5492 /* Decide whether to return the value in memory or in a register. */
5493 if (aggregate_value_p (DECL_RESULT (subr
)))
5495 /* Returning something that won't go in a register. */
5496 register rtx value_address
= 0;
5498 #ifdef PCC_STATIC_STRUCT_RETURN
5499 if (current_function_returns_pcc_struct
)
5501 int size
= int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr
)));
5502 value_address
= assemble_static_space (size
);
5507 /* Expect to be passed the address of a place to store the value.
5508 If it is passed as an argument, assign_parms will take care of
5510 if (struct_value_incoming_rtx
)
5512 value_address
= gen_reg_rtx (Pmode
);
5513 emit_move_insn (value_address
, struct_value_incoming_rtx
);
5518 DECL_RTL (DECL_RESULT (subr
))
5519 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr
)), value_address
);
5520 MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr
)))
5521 = AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr
)));
5524 else if (DECL_MODE (DECL_RESULT (subr
)) == VOIDmode
)
5525 /* If return mode is void, this decl rtl should not be used. */
5526 DECL_RTL (DECL_RESULT (subr
)) = 0;
5527 else if (parms_have_cleanups
)
5529 /* If function will end with cleanup code for parms,
5530 compute the return values into a pseudo reg,
5531 which we will copy into the true return register
5532 after the cleanups are done. */
5534 enum machine_mode mode
= DECL_MODE (DECL_RESULT (subr
));
5536 #ifdef PROMOTE_FUNCTION_RETURN
5537 tree type
= TREE_TYPE (DECL_RESULT (subr
));
5538 int unsignedp
= TREE_UNSIGNED (type
);
5540 mode
= promote_mode (type
, mode
, &unsignedp
, 1);
5543 DECL_RTL (DECL_RESULT (subr
)) = gen_reg_rtx (mode
);
5546 /* Scalar, returned in a register. */
5548 #ifdef FUNCTION_OUTGOING_VALUE
5549 DECL_RTL (DECL_RESULT (subr
))
5550 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr
)), subr
);
5552 DECL_RTL (DECL_RESULT (subr
))
5553 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr
)), subr
);
5556 /* Mark this reg as the function's return value. */
5557 if (GET_CODE (DECL_RTL (DECL_RESULT (subr
))) == REG
)
5559 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr
))) = 1;
5560 /* Needed because we may need to move this to memory
5561 in case it's a named return value whose address is taken. */
5562 DECL_REGISTER (DECL_RESULT (subr
)) = 1;
5566 /* Initialize rtx for parameters and local variables.
5567 In some cases this requires emitting insns. */
5569 assign_parms (subr
, 0);
5571 /* Copy the static chain now if it wasn't a register. The delay is to
5572 avoid conflicts with the parameter passing registers. */
5574 if (SMALL_REGISTER_CLASSES
&& current_function_needs_context
)
5575 if (GET_CODE (static_chain_incoming_rtx
) != REG
)
5576 emit_move_insn (last_ptr
, static_chain_incoming_rtx
);
5578 /* The following was moved from init_function_start.
5579 The move is supposed to make sdb output more accurate. */
5580 /* Indicate the beginning of the function body,
5581 as opposed to parm setup. */
5582 emit_note (NULL_PTR
, NOTE_INSN_FUNCTION_BEG
);
5584 /* If doing stupid allocation, mark parms as born here. */
5586 if (GET_CODE (get_last_insn ()) != NOTE
)
5587 emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
5588 parm_birth_insn
= get_last_insn ();
5592 for (i
= LAST_VIRTUAL_REGISTER
+ 1; i
< max_parm_reg
; i
++)
5593 use_variable (regno_reg_rtx
[i
]);
5595 if (current_function_internal_arg_pointer
!= virtual_incoming_args_rtx
)
5596 use_variable (current_function_internal_arg_pointer
);
5599 context_display
= 0;
5600 if (current_function_needs_context
)
5602 /* Fetch static chain values for containing functions. */
5603 tem
= decl_function_context (current_function_decl
);
5604 /* If not doing stupid register allocation copy the static chain
5605 pointer into a pseudo. If we have small register classes, copy
5606 the value from memory if static_chain_incoming_rtx is a REG. If
5607 we do stupid register allocation, we use the stack address
5609 if (tem
&& ! obey_regdecls
)
5611 /* If the static chain originally came in a register, put it back
5612 there, then move it out in the next insn. The reason for
5613 this peculiar code is to satisfy function integration. */
5614 if (SMALL_REGISTER_CLASSES
5615 && GET_CODE (static_chain_incoming_rtx
) == REG
)
5616 emit_move_insn (static_chain_incoming_rtx
, last_ptr
);
5617 last_ptr
= copy_to_reg (static_chain_incoming_rtx
);
5622 tree rtlexp
= make_node (RTL_EXPR
);
5624 RTL_EXPR_RTL (rtlexp
) = last_ptr
;
5625 context_display
= tree_cons (tem
, rtlexp
, context_display
);
5626 tem
= decl_function_context (tem
);
5629 /* Chain thru stack frames, assuming pointer to next lexical frame
5630 is found at the place we always store it. */
5631 #ifdef FRAME_GROWS_DOWNWARD
5632 last_ptr
= plus_constant (last_ptr
, - GET_MODE_SIZE (Pmode
));
5634 last_ptr
= copy_to_reg (gen_rtx_MEM (Pmode
,
5635 memory_address (Pmode
, last_ptr
)));
5637 /* If we are not optimizing, ensure that we know that this
5638 piece of context is live over the entire function. */
5640 save_expr_regs
= gen_rtx_EXPR_LIST (VOIDmode
, last_ptr
,
5645 /* After the display initializations is where the tail-recursion label
5646 should go, if we end up needing one. Ensure we have a NOTE here
5647 since some things (like trampolines) get placed before this. */
5648 tail_recursion_reentry
= emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
5650 /* Evaluate now the sizes of any types declared among the arguments. */
5651 for (tem
= nreverse (get_pending_sizes ()); tem
; tem
= TREE_CHAIN (tem
))
5653 expand_expr (TREE_VALUE (tem
), const0_rtx
, VOIDmode
,
5654 EXPAND_MEMORY_USE_BAD
);
5655 /* Flush the queue in case this parameter declaration has
5660 /* Make sure there is a line number after the function entry setup code. */
5661 force_next_line_note ();
5664 /* Generate RTL for the end of the current function.
5665 FILENAME and LINE are the current position in the source file.
5667 It is up to language-specific callers to do cleanups for parameters--
5668 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5671 expand_function_end (filename
, line
, end_bindings
)
5679 #ifdef TRAMPOLINE_TEMPLATE
5680 static rtx initial_trampoline
;
5683 #ifdef NON_SAVING_SETJMP
5684 /* Don't put any variables in registers if we call setjmp
5685 on a machine that fails to restore the registers. */
5686 if (NON_SAVING_SETJMP
&& current_function_calls_setjmp
)
5688 if (DECL_INITIAL (current_function_decl
) != error_mark_node
)
5689 setjmp_protect (DECL_INITIAL (current_function_decl
));
5691 setjmp_protect_args ();
5695 /* Save the argument pointer if a save area was made for it. */
5696 if (arg_pointer_save_area
)
5698 rtx x
= gen_move_insn (arg_pointer_save_area
, virtual_incoming_args_rtx
);
5699 emit_insn_before (x
, tail_recursion_reentry
);
5702 /* Initialize any trampolines required by this function. */
5703 for (link
= trampoline_list
; link
; link
= TREE_CHAIN (link
))
5705 tree function
= TREE_PURPOSE (link
);
5706 rtx context
= lookup_static_chain (function
);
5707 rtx tramp
= RTL_EXPR_RTL (TREE_VALUE (link
));
5708 #ifdef TRAMPOLINE_TEMPLATE
5713 #ifdef TRAMPOLINE_TEMPLATE
5714 /* First make sure this compilation has a template for
5715 initializing trampolines. */
5716 if (initial_trampoline
== 0)
5718 end_temporary_allocation ();
5720 = gen_rtx_MEM (BLKmode
, assemble_trampoline_template ());
5721 resume_temporary_allocation ();
5725 /* Generate insns to initialize the trampoline. */
5727 tramp
= round_trampoline_addr (XEXP (tramp
, 0));
5728 #ifdef TRAMPOLINE_TEMPLATE
5729 blktramp
= change_address (initial_trampoline
, BLKmode
, tramp
);
5730 emit_block_move (blktramp
, initial_trampoline
,
5731 GEN_INT (TRAMPOLINE_SIZE
),
5732 TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
);
5734 INITIALIZE_TRAMPOLINE (tramp
, XEXP (DECL_RTL (function
), 0), context
);
5738 /* Put those insns at entry to the containing function (this one). */
5739 emit_insns_before (seq
, tail_recursion_reentry
);
5742 /* If we are doing stack checking and this function makes calls,
5743 do a stack probe at the start of the function to ensure we have enough
5744 space for another stack frame. */
5745 if (flag_stack_check
&& ! STACK_CHECK_BUILTIN
)
5749 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5750 if (GET_CODE (insn
) == CALL_INSN
)
5753 probe_stack_range (STACK_CHECK_PROTECT
,
5754 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE
));
5757 emit_insns_before (seq
, tail_recursion_reentry
);
5762 /* Warn about unused parms if extra warnings were specified. */
5763 if (warn_unused
&& extra_warnings
)
5767 for (decl
= DECL_ARGUMENTS (current_function_decl
);
5768 decl
; decl
= TREE_CHAIN (decl
))
5769 if (! TREE_USED (decl
) && TREE_CODE (decl
) == PARM_DECL
5770 && DECL_NAME (decl
) && ! DECL_ARTIFICIAL (decl
))
5771 warning_with_decl (decl
, "unused parameter `%s'");
5774 /* Delete handlers for nonlocal gotos if nothing uses them. */
5775 if (nonlocal_goto_handler_slot
!= 0 && !current_function_has_nonlocal_label
)
5778 /* End any sequences that failed to be closed due to syntax errors. */
5779 while (in_sequence_p ())
5782 /* Outside function body, can't compute type's actual size
5783 until next function's body starts. */
5784 immediate_size_expand
--;
5786 /* If doing stupid register allocation,
5787 mark register parms as dying here. */
5792 for (i
= LAST_VIRTUAL_REGISTER
+ 1; i
< max_parm_reg
; i
++)
5793 use_variable (regno_reg_rtx
[i
]);
5795 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
5797 for (tem
= save_expr_regs
; tem
; tem
= XEXP (tem
, 1))
5799 use_variable (XEXP (tem
, 0));
5800 use_variable_after (XEXP (tem
, 0), parm_birth_insn
);
5803 if (current_function_internal_arg_pointer
!= virtual_incoming_args_rtx
)
5804 use_variable (current_function_internal_arg_pointer
);
5807 clear_pending_stack_adjust ();
5808 do_pending_stack_adjust ();
5810 /* Mark the end of the function body.
5811 If control reaches this insn, the function can drop through
5812 without returning a value. */
5813 emit_note (NULL_PTR
, NOTE_INSN_FUNCTION_END
);
5815 /* Must mark the last line number note in the function, so that the test
5816 coverage code can avoid counting the last line twice. This just tells
5817 the code to ignore the immediately following line note, since there
5818 already exists a copy of this note somewhere above. This line number
5819 note is still needed for debugging though, so we can't delete it. */
5820 if (flag_test_coverage
)
5821 emit_note (NULL_PTR
, NOTE_REPEATED_LINE_NUMBER
);
5823 /* Output a linenumber for the end of the function.
5824 SDB depends on this. */
5825 emit_line_note_force (filename
, line
);
5827 /* Output the label for the actual return from the function,
5828 if one is expected. This happens either because a function epilogue
5829 is used instead of a return instruction, or because a return was done
5830 with a goto in order to run local cleanups, or because of pcc-style
5831 structure returning. */
5834 emit_label (return_label
);
5836 /* C++ uses this. */
5838 expand_end_bindings (0, 0, 0);
5840 /* Now handle any leftover exception regions that may have been
5841 created for the parameters. */
5843 rtx last
= get_last_insn ();
5846 expand_leftover_cleanups ();
5848 /* If the above emitted any code, may sure we jump around it. */
5849 if (last
!= get_last_insn ())
5851 label
= gen_label_rtx ();
5852 last
= emit_jump_insn_after (gen_jump (label
), last
);
5853 last
= emit_barrier_after (last
);
5858 /* If we had calls to alloca, and this machine needs
5859 an accurate stack pointer to exit the function,
5860 insert some code to save and restore the stack pointer. */
5861 #ifdef EXIT_IGNORE_STACK
5862 if (! EXIT_IGNORE_STACK
)
5864 if (current_function_calls_alloca
)
5868 emit_stack_save (SAVE_FUNCTION
, &tem
, parm_birth_insn
);
5869 emit_stack_restore (SAVE_FUNCTION
, tem
, NULL_RTX
);
5872 /* If scalar return value was computed in a pseudo-reg,
5873 copy that to the hard return register. */
5874 if (DECL_RTL (DECL_RESULT (current_function_decl
)) != 0
5875 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl
))) == REG
5876 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl
)))
5877 >= FIRST_PSEUDO_REGISTER
))
5879 rtx real_decl_result
;
5881 #ifdef FUNCTION_OUTGOING_VALUE
5883 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl
)),
5884 current_function_decl
);
5887 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl
)),
5888 current_function_decl
);
5890 REG_FUNCTION_VALUE_P (real_decl_result
) = 1;
5891 /* If this is a BLKmode structure being returned in registers, then use
5892 the mode computed in expand_return. */
5893 if (GET_MODE (real_decl_result
) == BLKmode
)
5894 PUT_MODE (real_decl_result
,
5895 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl
))));
5896 emit_move_insn (real_decl_result
,
5897 DECL_RTL (DECL_RESULT (current_function_decl
)));
5898 emit_insn (gen_rtx_USE (VOIDmode
, real_decl_result
));
5900 /* The delay slot scheduler assumes that current_function_return_rtx
5901 holds the hard register containing the return value, not a temporary
5903 current_function_return_rtx
= real_decl_result
;
5906 /* If returning a structure, arrange to return the address of the value
5907 in a place where debuggers expect to find it.
5909 If returning a structure PCC style,
5910 the caller also depends on this value.
5911 And current_function_returns_pcc_struct is not necessarily set. */
5912 if (current_function_returns_struct
5913 || current_function_returns_pcc_struct
)
5915 rtx value_address
= XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
5916 tree type
= TREE_TYPE (DECL_RESULT (current_function_decl
));
5917 #ifdef FUNCTION_OUTGOING_VALUE
5919 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type
),
5920 current_function_decl
);
5923 = FUNCTION_VALUE (build_pointer_type (type
),
5924 current_function_decl
);
5927 /* Mark this as a function return value so integrate will delete the
5928 assignment and USE below when inlining this function. */
5929 REG_FUNCTION_VALUE_P (outgoing
) = 1;
5931 emit_move_insn (outgoing
, value_address
);
5932 use_variable (outgoing
);
5935 /* Output a return insn if we are using one.
5936 Otherwise, let the rtl chain end here, to drop through
5937 into the epilogue. */
5942 emit_jump_insn (gen_return ());
5947 /* Fix up any gotos that jumped out to the outermost
5948 binding level of the function.
5949 Must follow emitting RETURN_LABEL. */
5951 /* If you have any cleanups to do at this point,
5952 and they need to create temporary variables,
5953 then you will lose. */
5954 expand_fixups (get_insns ());
5957 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
5959 static int *prologue
;
5960 static int *epilogue
;
5962 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
5963 or a single insn). */
5965 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5967 record_insns (insns
)
5972 if (GET_CODE (insns
) == SEQUENCE
)
5974 int len
= XVECLEN (insns
, 0);
5975 vec
= (int *) oballoc ((len
+ 1) * sizeof (int));
5978 vec
[len
] = INSN_UID (XVECEXP (insns
, 0, len
));
5982 vec
= (int *) oballoc (2 * sizeof (int));
5983 vec
[0] = INSN_UID (insns
);
5989 /* Determine how many INSN_UIDs in VEC are part of INSN. */
5992 contains (insn
, vec
)
5998 if (GET_CODE (insn
) == INSN
5999 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
6002 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
6003 for (j
= 0; vec
[j
]; j
++)
6004 if (INSN_UID (XVECEXP (PATTERN (insn
), 0, i
)) == vec
[j
])
6010 for (j
= 0; vec
[j
]; j
++)
6011 if (INSN_UID (insn
) == vec
[j
])
6016 #endif /* HAVE_prologue || HAVE_epilogue */
6018 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6019 this into place with notes indicating where the prologue ends and where
6020 the epilogue begins. Update the basic block information when possible. */
6023 thread_prologue_and_epilogue_insns (f
)
6026 #ifdef HAVE_prologue
6031 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
6032 prologue insns and a NOTE_INSN_PROLOGUE_END. */
6033 emit_note_after (NOTE_INSN_PROLOGUE_END
, f
);
6034 seq
= gen_prologue ();
6035 head
= emit_insn_after (seq
, f
);
6037 /* Include the new prologue insns in the first block. Ignore them
6038 if they form a basic block unto themselves. */
6039 if (basic_block_head
&& n_basic_blocks
6040 && GET_CODE (basic_block_head
[0]) != CODE_LABEL
)
6041 basic_block_head
[0] = NEXT_INSN (f
);
6043 /* Retain a map of the prologue insns. */
6044 prologue
= record_insns (GET_CODE (seq
) == SEQUENCE
? seq
: head
);
6050 #ifdef HAVE_epilogue
6053 rtx insn
= get_last_insn ();
6054 rtx prev
= prev_nonnote_insn (insn
);
6056 /* If we end with a BARRIER, we don't need an epilogue. */
6057 if (! (prev
&& GET_CODE (prev
) == BARRIER
))
6063 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6064 epilogue insns, the USE insns at the end of a function,
6065 the jump insn that returns, and then a BARRIER. */
6067 /* Move the USE insns at the end of a function onto a list. */
6069 && GET_CODE (prev
) == INSN
6070 && GET_CODE (PATTERN (prev
)) == USE
)
6073 prev
= prev_nonnote_insn (prev
);
6075 NEXT_INSN (PREV_INSN (tem
)) = NEXT_INSN (tem
);
6076 PREV_INSN (NEXT_INSN (tem
)) = PREV_INSN (tem
);
6079 NEXT_INSN (tem
) = first_use
;
6080 PREV_INSN (first_use
) = tem
;
6087 emit_barrier_after (insn
);
6089 seq
= gen_epilogue ();
6090 tail
= emit_jump_insn_after (seq
, insn
);
6092 /* Insert the USE insns immediately before the return insn, which
6093 must be the first instruction before the final barrier. */
6096 tem
= prev_nonnote_insn (get_last_insn ());
6097 NEXT_INSN (PREV_INSN (tem
)) = first_use
;
6098 PREV_INSN (first_use
) = PREV_INSN (tem
);
6099 PREV_INSN (tem
) = last_use
;
6100 NEXT_INSN (last_use
) = tem
;
6103 emit_note_after (NOTE_INSN_EPILOGUE_BEG
, insn
);
6105 /* Include the new epilogue insns in the last block. Ignore
6106 them if they form a basic block unto themselves. */
6107 if (basic_block_end
&& n_basic_blocks
6108 && GET_CODE (basic_block_end
[n_basic_blocks
- 1]) != JUMP_INSN
)
6109 basic_block_end
[n_basic_blocks
- 1] = tail
;
6111 /* Retain a map of the epilogue insns. */
6112 epilogue
= record_insns (GET_CODE (seq
) == SEQUENCE
? seq
: tail
);
6120 /* Reposition the prologue-end and epilogue-begin notes after instruction
6121 scheduling and delayed branch scheduling. */
6124 reposition_prologue_and_epilogue_notes (f
)
6127 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6128 /* Reposition the prologue and epilogue notes. */
6136 register rtx insn
, note
= 0;
6138 /* Scan from the beginning until we reach the last prologue insn.
6139 We apparently can't depend on basic_block_{head,end} after
6141 for (len
= 0; prologue
[len
]; len
++)
6143 for (insn
= f
; len
&& insn
; insn
= NEXT_INSN (insn
))
6145 if (GET_CODE (insn
) == NOTE
)
6147 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_PROLOGUE_END
)
6150 else if ((len
-= contains (insn
, prologue
)) == 0)
6152 /* Find the prologue-end note if we haven't already, and
6153 move it to just after the last prologue insn. */
6156 for (note
= insn
; (note
= NEXT_INSN (note
));)
6157 if (GET_CODE (note
) == NOTE
6158 && NOTE_LINE_NUMBER (note
) == NOTE_INSN_PROLOGUE_END
)
6161 next
= NEXT_INSN (note
);
6162 prev
= PREV_INSN (note
);
6164 NEXT_INSN (prev
) = next
;
6166 PREV_INSN (next
) = prev
;
6167 add_insn_after (note
, insn
);
6174 register rtx insn
, note
= 0;
6176 /* Scan from the end until we reach the first epilogue insn.
6177 We apparently can't depend on basic_block_{head,end} after
6179 for (len
= 0; epilogue
[len
]; len
++)
6181 for (insn
= get_last_insn (); len
&& insn
; insn
= PREV_INSN (insn
))
6183 if (GET_CODE (insn
) == NOTE
)
6185 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EPILOGUE_BEG
)
6188 else if ((len
-= contains (insn
, epilogue
)) == 0)
6190 /* Find the epilogue-begin note if we haven't already, and
6191 move it to just before the first epilogue insn. */
6194 for (note
= insn
; (note
= PREV_INSN (note
));)
6195 if (GET_CODE (note
) == NOTE
6196 && NOTE_LINE_NUMBER (note
) == NOTE_INSN_EPILOGUE_BEG
)
6199 next
= NEXT_INSN (note
);
6200 prev
= PREV_INSN (note
);
6202 NEXT_INSN (prev
) = next
;
6204 PREV_INSN (next
) = prev
;
6205 add_insn_after (note
, PREV_INSN (insn
));
6210 #endif /* HAVE_prologue or HAVE_epilogue */