basic-block.h (basic_block_head): Rename to x_basic_block_head.
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-97, 1998 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58 #include "toplev.h"
59
60 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
61 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
62 #endif
63
64 #ifndef TRAMPOLINE_ALIGNMENT
65 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
66 #endif
67
68 /* Some systems use __main in a way incompatible with its use in gcc, in these
69 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
70 give the same symbol without quotes for an alternative entry point. You
71 must define both, or neither. */
72 #ifndef NAME__MAIN
73 #define NAME__MAIN "__main"
74 #define SYMBOL__MAIN __main
75 #endif
76
77 /* Round a value to the lowest integer less than it that is a multiple of
78 the required alignment. Avoid using division in case the value is
79 negative. Assume the alignment is a power of two. */
80 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
81
82 /* Similar, but round to the next highest integer that meets the
83 alignment. */
84 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
85
86 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
87 during rtl generation. If they are different register numbers, this is
88 always true. It may also be true if
89 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
90 generation. See fix_lexical_addr for details. */
91
92 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
93 #define NEED_SEPARATE_AP
94 #endif
95
96 /* Number of bytes of args popped by function being compiled on its return.
97 Zero if no bytes are to be popped.
98 May affect compilation of return insn or of function epilogue. */
99
100 int current_function_pops_args;
101
102 /* Nonzero if function being compiled needs to be given an address
103 where the value should be stored. */
104
105 int current_function_returns_struct;
106
107 /* Nonzero if function being compiled needs to
108 return the address of where it has put a structure value. */
109
110 int current_function_returns_pcc_struct;
111
112 /* Nonzero if function being compiled needs to be passed a static chain. */
113
114 int current_function_needs_context;
115
116 /* Nonzero if function being compiled can call setjmp. */
117
118 int current_function_calls_setjmp;
119
120 /* Nonzero if function being compiled can call longjmp. */
121
122 int current_function_calls_longjmp;
123
124 /* Nonzero if function being compiled receives nonlocal gotos
125 from nested functions. */
126
127 int current_function_has_nonlocal_label;
128
129 /* Nonzero if function being compiled has nonlocal gotos to parent
130 function. */
131
132 int current_function_has_nonlocal_goto;
133
134 /* Nonzero if this function has a computed goto.
135
136 It is computed during find_basic_blocks or during stupid life
137 analysis. */
138
139 int current_function_has_computed_jump;
140
141 /* Nonzero if function being compiled contains nested functions. */
142
143 int current_function_contains_functions;
144
145 /* Nonzero if function being compiled doesn't modify the stack pointer
146 (ignoring the prologue and epilogue). This is only valid after
147 life_analysis has run. */
148
149 int current_function_sp_is_unchanging;
150
151 /* Nonzero if the current function is a thunk (a lightweight function that
152 just adjusts one of its arguments and forwards to another function), so
153 we should try to cut corners where we can. */
154 int current_function_is_thunk;
155
156 /* Nonzero if function being compiled can call alloca,
157 either as a subroutine or builtin. */
158
159 int current_function_calls_alloca;
160
161 /* Nonzero if the current function returns a pointer type */
162
163 int current_function_returns_pointer;
164
165 /* If some insns can be deferred to the delay slots of the epilogue, the
166 delay list for them is recorded here. */
167
168 rtx current_function_epilogue_delay_list;
169
170 /* If function's args have a fixed size, this is that size, in bytes.
171 Otherwise, it is -1.
172 May affect compilation of return insn or of function epilogue. */
173
174 int current_function_args_size;
175
176 /* # bytes the prologue should push and pretend that the caller pushed them.
177 The prologue must do this, but only if parms can be passed in registers. */
178
179 int current_function_pretend_args_size;
180
181 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
182 defined, the needed space is pushed by the prologue. */
183
184 int current_function_outgoing_args_size;
185
186 /* This is the offset from the arg pointer to the place where the first
187 anonymous arg can be found, if there is one. */
188
189 rtx current_function_arg_offset_rtx;
190
191 /* Nonzero if current function uses varargs.h or equivalent.
192 Zero for functions that use stdarg.h. */
193
194 int current_function_varargs;
195
196 /* Nonzero if current function uses stdarg.h or equivalent.
197 Zero for functions that use varargs.h. */
198
199 int current_function_stdarg;
200
201 /* Quantities of various kinds of registers
202 used for the current function's args. */
203
204 CUMULATIVE_ARGS current_function_args_info;
205
206 /* Name of function now being compiled. */
207
208 char *current_function_name;
209
210 /* If non-zero, an RTL expression for the location at which the current
211 function returns its result. If the current function returns its
212 result in a register, current_function_return_rtx will always be
213 the hard register containing the result. */
214
215 rtx current_function_return_rtx;
216
217 /* Nonzero if the current function uses the constant pool. */
218
219 int current_function_uses_const_pool;
220
221 /* Nonzero if the current function uses pic_offset_table_rtx. */
222 int current_function_uses_pic_offset_table;
223
224 /* The arg pointer hard register, or the pseudo into which it was copied. */
225 rtx current_function_internal_arg_pointer;
226
227 /* Language-specific reason why the current function cannot be made inline. */
228 char *current_function_cannot_inline;
229
230 /* Nonzero if instrumentation calls for function entry and exit should be
231 generated. */
232 int current_function_instrument_entry_exit;
233
234 /* Nonzero if memory access checking be enabled in the current function. */
235 int current_function_check_memory_usage;
236
237 /* The FUNCTION_DECL for an inline function currently being expanded. */
238 tree inline_function_decl;
239
240 /* Number of function calls seen so far in current function. */
241
242 int function_call_count;
243
244 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
245 (labels to which there can be nonlocal gotos from nested functions)
246 in this function. */
247
248 tree nonlocal_labels;
249
250 /* List (chain of EXPR_LIST) of stack slots that hold the current handlers
251 for nonlocal gotos. There is one for every nonlocal label in the function;
252 this list matches the one in nonlocal_labels.
253 Zero when function does not have nonlocal labels. */
254
255 rtx nonlocal_goto_handler_slots;
256
257 /* RTX for stack slot that holds the stack pointer value to restore
258 for a nonlocal goto.
259 Zero when function does not have nonlocal labels. */
260
261 rtx nonlocal_goto_stack_level;
262
263 /* Label that will go on parm cleanup code, if any.
264 Jumping to this label runs cleanup code for parameters, if
265 such code must be run. Following this code is the logical return label. */
266
267 rtx cleanup_label;
268
269 /* Label that will go on function epilogue.
270 Jumping to this label serves as a "return" instruction
271 on machines which require execution of the epilogue on all returns. */
272
273 rtx return_label;
274
275 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
276 So we can mark them all live at the end of the function, if nonopt. */
277 rtx save_expr_regs;
278
279 /* List (chain of EXPR_LISTs) of all stack slots in this function.
280 Made for the sake of unshare_all_rtl. */
281 rtx stack_slot_list;
282
283 /* Chain of all RTL_EXPRs that have insns in them. */
284 tree rtl_expr_chain;
285
286 /* Label to jump back to for tail recursion, or 0 if we have
287 not yet needed one for this function. */
288 rtx tail_recursion_label;
289
290 /* Place after which to insert the tail_recursion_label if we need one. */
291 rtx tail_recursion_reentry;
292
293 /* Location at which to save the argument pointer if it will need to be
294 referenced. There are two cases where this is done: if nonlocal gotos
295 exist, or if vars stored at an offset from the argument pointer will be
296 needed by inner routines. */
297
298 rtx arg_pointer_save_area;
299
300 /* Offset to end of allocated area of stack frame.
301 If stack grows down, this is the address of the last stack slot allocated.
302 If stack grows up, this is the address for the next slot. */
303 HOST_WIDE_INT frame_offset;
304
305 /* List (chain of TREE_LISTs) of static chains for containing functions.
306 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
307 in an RTL_EXPR in the TREE_VALUE. */
308 static tree context_display;
309
310 /* List (chain of TREE_LISTs) of trampolines for nested functions.
311 The trampoline sets up the static chain and jumps to the function.
312 We supply the trampoline's address when the function's address is requested.
313
314 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
315 in an RTL_EXPR in the TREE_VALUE. */
316 static tree trampoline_list;
317
318 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
319 static rtx parm_birth_insn;
320
321 #if 0
322 /* Nonzero if a stack slot has been generated whose address is not
323 actually valid. It means that the generated rtl must all be scanned
324 to detect and correct the invalid addresses where they occur. */
325 static int invalid_stack_slot;
326 #endif
327
328 /* Last insn of those whose job was to put parms into their nominal homes. */
329 static rtx last_parm_insn;
330
331 /* 1 + last pseudo register number possibly used for loading a copy
332 of a parameter of this function. */
333 int max_parm_reg;
334
335 /* Vector indexed by REGNO, containing location on stack in which
336 to put the parm which is nominally in pseudo register REGNO,
337 if we discover that that parm must go in the stack. The highest
338 element in this vector is one less than MAX_PARM_REG, above. */
339 rtx *parm_reg_stack_loc;
340
341 /* Nonzero once virtual register instantiation has been done.
342 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
343 static int virtuals_instantiated;
344
345 /* These variables hold pointers to functions to
346 save and restore machine-specific data,
347 in push_function_context and pop_function_context. */
348 void (*save_machine_status) PROTO((struct function *));
349 void (*restore_machine_status) PROTO((struct function *));
350
351 /* Nonzero if we need to distinguish between the return value of this function
352 and the return value of a function called by this function. This helps
353 integrate.c */
354
355 extern int rtx_equal_function_value_matters;
356 extern tree sequence_rtl_expr;
357 \f
358 /* In order to evaluate some expressions, such as function calls returning
359 structures in memory, we need to temporarily allocate stack locations.
360 We record each allocated temporary in the following structure.
361
362 Associated with each temporary slot is a nesting level. When we pop up
363 one level, all temporaries associated with the previous level are freed.
364 Normally, all temporaries are freed after the execution of the statement
365 in which they were created. However, if we are inside a ({...}) grouping,
366 the result may be in a temporary and hence must be preserved. If the
367 result could be in a temporary, we preserve it if we can determine which
368 one it is in. If we cannot determine which temporary may contain the
369 result, all temporaries are preserved. A temporary is preserved by
370 pretending it was allocated at the previous nesting level.
371
372 Automatic variables are also assigned temporary slots, at the nesting
373 level where they are defined. They are marked a "kept" so that
374 free_temp_slots will not free them. */
375
376 struct temp_slot
377 {
378 /* Points to next temporary slot. */
379 struct temp_slot *next;
380 /* The rtx to used to reference the slot. */
381 rtx slot;
382 /* The rtx used to represent the address if not the address of the
383 slot above. May be an EXPR_LIST if multiple addresses exist. */
384 rtx address;
385 /* The size, in units, of the slot. */
386 HOST_WIDE_INT size;
387 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
388 tree rtl_expr;
389 /* Non-zero if this temporary is currently in use. */
390 char in_use;
391 /* Non-zero if this temporary has its address taken. */
392 char addr_taken;
393 /* Nesting level at which this slot is being used. */
394 int level;
395 /* Non-zero if this should survive a call to free_temp_slots. */
396 int keep;
397 /* The offset of the slot from the frame_pointer, including extra space
398 for alignment. This info is for combine_temp_slots. */
399 HOST_WIDE_INT base_offset;
400 /* The size of the slot, including extra space for alignment. This
401 info is for combine_temp_slots. */
402 HOST_WIDE_INT full_size;
403 };
404
405 /* List of all temporaries allocated, both available and in use. */
406
407 struct temp_slot *temp_slots;
408
409 /* Current nesting level for temporaries. */
410
411 int temp_slot_level;
412
413 /* Current nesting level for variables in a block. */
414
415 int var_temp_slot_level;
416
417 /* When temporaries are created by TARGET_EXPRs, they are created at
418 this level of temp_slot_level, so that they can remain allocated
419 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
420 of TARGET_EXPRs. */
421 int target_temp_slot_level;
422 \f
423 /* This structure is used to record MEMs or pseudos used to replace VAR, any
424 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
425 maintain this list in case two operands of an insn were required to match;
426 in that case we must ensure we use the same replacement. */
427
428 struct fixup_replacement
429 {
430 rtx old;
431 rtx new;
432 struct fixup_replacement *next;
433 };
434
435 /* Forward declarations. */
436
437 static rtx assign_outer_stack_local PROTO ((enum machine_mode, HOST_WIDE_INT,
438 int, struct function *));
439 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
440 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
441 enum machine_mode, enum machine_mode,
442 int, int, int));
443 static void fixup_var_refs PROTO((rtx, enum machine_mode, int));
444 static struct fixup_replacement
445 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
446 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
447 rtx, int));
448 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
449 struct fixup_replacement **));
450 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
451 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
452 static rtx fixup_stack_1 PROTO((rtx, rtx));
453 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
454 static void instantiate_decls PROTO((tree, int));
455 static void instantiate_decls_1 PROTO((tree, int));
456 static void instantiate_decl PROTO((rtx, int, int));
457 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
458 static void delete_handlers PROTO((void));
459 static void pad_to_arg_alignment PROTO((struct args_size *, int));
460 #ifndef ARGS_GROW_DOWNWARD
461 static void pad_below PROTO((struct args_size *, enum machine_mode,
462 tree));
463 #endif
464 #ifdef ARGS_GROW_DOWNWARD
465 static tree round_down PROTO((tree, int));
466 #endif
467 static rtx round_trampoline_addr PROTO((rtx));
468 static tree blocks_nreverse PROTO((tree));
469 static int all_blocks PROTO((tree, tree *));
470 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
471 static int *record_insns PROTO((rtx));
472 static int contains PROTO((rtx, int *));
473 #endif /* HAVE_prologue || HAVE_epilogue */
474 static void put_addressof_into_stack PROTO((rtx));
475 static void purge_addressof_1 PROTO((rtx *, rtx, int, int));
476 \f
477 /* Pointer to chain of `struct function' for containing functions. */
478 struct function *outer_function_chain;
479
480 /* Given a function decl for a containing function,
481 return the `struct function' for it. */
482
483 struct function *
484 find_function_data (decl)
485 tree decl;
486 {
487 struct function *p;
488
489 for (p = outer_function_chain; p; p = p->next)
490 if (p->decl == decl)
491 return p;
492
493 abort ();
494 }
495
496 /* Save the current context for compilation of a nested function.
497 This is called from language-specific code.
498 The caller is responsible for saving any language-specific status,
499 since this function knows only about language-independent variables. */
500
501 void
502 push_function_context_to (context)
503 tree context;
504 {
505 struct function *p = (struct function *) xmalloc (sizeof (struct function));
506
507 p->next = outer_function_chain;
508 outer_function_chain = p;
509
510 p->name = current_function_name;
511 p->decl = current_function_decl;
512 p->pops_args = current_function_pops_args;
513 p->returns_struct = current_function_returns_struct;
514 p->returns_pcc_struct = current_function_returns_pcc_struct;
515 p->returns_pointer = current_function_returns_pointer;
516 p->needs_context = current_function_needs_context;
517 p->calls_setjmp = current_function_calls_setjmp;
518 p->calls_longjmp = current_function_calls_longjmp;
519 p->calls_alloca = current_function_calls_alloca;
520 p->has_nonlocal_label = current_function_has_nonlocal_label;
521 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
522 p->contains_functions = current_function_contains_functions;
523 p->is_thunk = current_function_is_thunk;
524 p->args_size = current_function_args_size;
525 p->pretend_args_size = current_function_pretend_args_size;
526 p->arg_offset_rtx = current_function_arg_offset_rtx;
527 p->varargs = current_function_varargs;
528 p->stdarg = current_function_stdarg;
529 p->uses_const_pool = current_function_uses_const_pool;
530 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
531 p->internal_arg_pointer = current_function_internal_arg_pointer;
532 p->cannot_inline = current_function_cannot_inline;
533 p->max_parm_reg = max_parm_reg;
534 p->parm_reg_stack_loc = parm_reg_stack_loc;
535 p->outgoing_args_size = current_function_outgoing_args_size;
536 p->return_rtx = current_function_return_rtx;
537 p->nonlocal_goto_handler_slots = nonlocal_goto_handler_slots;
538 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
539 p->nonlocal_labels = nonlocal_labels;
540 p->cleanup_label = cleanup_label;
541 p->return_label = return_label;
542 p->save_expr_regs = save_expr_regs;
543 p->stack_slot_list = stack_slot_list;
544 p->parm_birth_insn = parm_birth_insn;
545 p->frame_offset = frame_offset;
546 p->tail_recursion_label = tail_recursion_label;
547 p->tail_recursion_reentry = tail_recursion_reentry;
548 p->arg_pointer_save_area = arg_pointer_save_area;
549 p->rtl_expr_chain = rtl_expr_chain;
550 p->last_parm_insn = last_parm_insn;
551 p->context_display = context_display;
552 p->trampoline_list = trampoline_list;
553 p->function_call_count = function_call_count;
554 p->temp_slots = temp_slots;
555 p->temp_slot_level = temp_slot_level;
556 p->target_temp_slot_level = target_temp_slot_level;
557 p->var_temp_slot_level = var_temp_slot_level;
558 p->fixup_var_refs_queue = 0;
559 p->epilogue_delay_list = current_function_epilogue_delay_list;
560 p->args_info = current_function_args_info;
561 p->check_memory_usage = current_function_check_memory_usage;
562 p->instrument_entry_exit = current_function_instrument_entry_exit;
563
564 save_tree_status (p, context);
565 save_storage_status (p);
566 save_emit_status (p);
567 save_expr_status (p);
568 save_stmt_status (p);
569 save_varasm_status (p, context);
570 if (save_machine_status)
571 (*save_machine_status) (p);
572 }
573
574 void
575 push_function_context ()
576 {
577 push_function_context_to (current_function_decl);
578 }
579
580 /* Restore the last saved context, at the end of a nested function.
581 This function is called from language-specific code. */
582
583 void
584 pop_function_context_from (context)
585 tree context;
586 {
587 struct function *p = outer_function_chain;
588 struct var_refs_queue *queue;
589
590 outer_function_chain = p->next;
591
592 current_function_contains_functions
593 = p->contains_functions || p->inline_obstacks
594 || context == current_function_decl;
595 current_function_name = p->name;
596 current_function_decl = p->decl;
597 current_function_pops_args = p->pops_args;
598 current_function_returns_struct = p->returns_struct;
599 current_function_returns_pcc_struct = p->returns_pcc_struct;
600 current_function_returns_pointer = p->returns_pointer;
601 current_function_needs_context = p->needs_context;
602 current_function_calls_setjmp = p->calls_setjmp;
603 current_function_calls_longjmp = p->calls_longjmp;
604 current_function_calls_alloca = p->calls_alloca;
605 current_function_has_nonlocal_label = p->has_nonlocal_label;
606 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
607 current_function_is_thunk = p->is_thunk;
608 current_function_args_size = p->args_size;
609 current_function_pretend_args_size = p->pretend_args_size;
610 current_function_arg_offset_rtx = p->arg_offset_rtx;
611 current_function_varargs = p->varargs;
612 current_function_stdarg = p->stdarg;
613 current_function_uses_const_pool = p->uses_const_pool;
614 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
615 current_function_internal_arg_pointer = p->internal_arg_pointer;
616 current_function_cannot_inline = p->cannot_inline;
617 max_parm_reg = p->max_parm_reg;
618 parm_reg_stack_loc = p->parm_reg_stack_loc;
619 current_function_outgoing_args_size = p->outgoing_args_size;
620 current_function_return_rtx = p->return_rtx;
621 nonlocal_goto_handler_slots = p->nonlocal_goto_handler_slots;
622 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
623 nonlocal_labels = p->nonlocal_labels;
624 cleanup_label = p->cleanup_label;
625 return_label = p->return_label;
626 save_expr_regs = p->save_expr_regs;
627 stack_slot_list = p->stack_slot_list;
628 parm_birth_insn = p->parm_birth_insn;
629 frame_offset = p->frame_offset;
630 tail_recursion_label = p->tail_recursion_label;
631 tail_recursion_reentry = p->tail_recursion_reentry;
632 arg_pointer_save_area = p->arg_pointer_save_area;
633 rtl_expr_chain = p->rtl_expr_chain;
634 last_parm_insn = p->last_parm_insn;
635 context_display = p->context_display;
636 trampoline_list = p->trampoline_list;
637 function_call_count = p->function_call_count;
638 temp_slots = p->temp_slots;
639 temp_slot_level = p->temp_slot_level;
640 target_temp_slot_level = p->target_temp_slot_level;
641 var_temp_slot_level = p->var_temp_slot_level;
642 current_function_epilogue_delay_list = p->epilogue_delay_list;
643 reg_renumber = 0;
644 current_function_args_info = p->args_info;
645 current_function_check_memory_usage = p->check_memory_usage;
646 current_function_instrument_entry_exit = p->instrument_entry_exit;
647
648 restore_tree_status (p, context);
649 restore_storage_status (p);
650 restore_expr_status (p);
651 restore_emit_status (p);
652 restore_stmt_status (p);
653 restore_varasm_status (p);
654
655 if (restore_machine_status)
656 (*restore_machine_status) (p);
657
658 /* Finish doing put_var_into_stack for any of our variables
659 which became addressable during the nested function. */
660 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
661 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
662
663 free (p);
664
665 /* Reset variables that have known state during rtx generation. */
666 rtx_equal_function_value_matters = 1;
667 virtuals_instantiated = 0;
668 }
669
670 void pop_function_context ()
671 {
672 pop_function_context_from (current_function_decl);
673 }
674 \f
675 /* Allocate fixed slots in the stack frame of the current function. */
676
677 /* Return size needed for stack frame based on slots so far allocated.
678 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
679 the caller may have to do that. */
680
681 HOST_WIDE_INT
682 get_frame_size ()
683 {
684 #ifdef FRAME_GROWS_DOWNWARD
685 return -frame_offset;
686 #else
687 return frame_offset;
688 #endif
689 }
690
691 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
692 with machine mode MODE.
693
694 ALIGN controls the amount of alignment for the address of the slot:
695 0 means according to MODE,
696 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
697 positive specifies alignment boundary in bits.
698
699 We do not round to stack_boundary here. */
700
701 rtx
702 assign_stack_local (mode, size, align)
703 enum machine_mode mode;
704 HOST_WIDE_INT size;
705 int align;
706 {
707 register rtx x, addr;
708 int bigend_correction = 0;
709 int alignment;
710
711 if (align == 0)
712 {
713 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
714 if (mode == BLKmode)
715 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
716 }
717 else if (align == -1)
718 {
719 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
720 size = CEIL_ROUND (size, alignment);
721 }
722 else
723 alignment = align / BITS_PER_UNIT;
724
725 /* Round frame offset to that alignment.
726 We must be careful here, since FRAME_OFFSET might be negative and
727 division with a negative dividend isn't as well defined as we might
728 like. So we instead assume that ALIGNMENT is a power of two and
729 use logical operations which are unambiguous. */
730 #ifdef FRAME_GROWS_DOWNWARD
731 frame_offset = FLOOR_ROUND (frame_offset, alignment);
732 #else
733 frame_offset = CEIL_ROUND (frame_offset, alignment);
734 #endif
735
736 /* On a big-endian machine, if we are allocating more space than we will use,
737 use the least significant bytes of those that are allocated. */
738 if (BYTES_BIG_ENDIAN && mode != BLKmode)
739 bigend_correction = size - GET_MODE_SIZE (mode);
740
741 #ifdef FRAME_GROWS_DOWNWARD
742 frame_offset -= size;
743 #endif
744
745 /* If we have already instantiated virtual registers, return the actual
746 address relative to the frame pointer. */
747 if (virtuals_instantiated)
748 addr = plus_constant (frame_pointer_rtx,
749 (frame_offset + bigend_correction
750 + STARTING_FRAME_OFFSET));
751 else
752 addr = plus_constant (virtual_stack_vars_rtx,
753 frame_offset + bigend_correction);
754
755 #ifndef FRAME_GROWS_DOWNWARD
756 frame_offset += size;
757 #endif
758
759 x = gen_rtx_MEM (mode, addr);
760
761 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
762
763 return x;
764 }
765
766 /* Assign a stack slot in a containing function.
767 First three arguments are same as in preceding function.
768 The last argument specifies the function to allocate in. */
769
770 static rtx
771 assign_outer_stack_local (mode, size, align, function)
772 enum machine_mode mode;
773 HOST_WIDE_INT size;
774 int align;
775 struct function *function;
776 {
777 register rtx x, addr;
778 int bigend_correction = 0;
779 int alignment;
780
781 /* Allocate in the memory associated with the function in whose frame
782 we are assigning. */
783 push_obstacks (function->function_obstack,
784 function->function_maybepermanent_obstack);
785
786 if (align == 0)
787 {
788 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
789 if (mode == BLKmode)
790 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
791 }
792 else if (align == -1)
793 {
794 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
795 size = CEIL_ROUND (size, alignment);
796 }
797 else
798 alignment = align / BITS_PER_UNIT;
799
800 /* Round frame offset to that alignment. */
801 #ifdef FRAME_GROWS_DOWNWARD
802 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
803 #else
804 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
805 #endif
806
807 /* On a big-endian machine, if we are allocating more space than we will use,
808 use the least significant bytes of those that are allocated. */
809 if (BYTES_BIG_ENDIAN && mode != BLKmode)
810 bigend_correction = size - GET_MODE_SIZE (mode);
811
812 #ifdef FRAME_GROWS_DOWNWARD
813 function->frame_offset -= size;
814 #endif
815 addr = plus_constant (virtual_stack_vars_rtx,
816 function->frame_offset + bigend_correction);
817 #ifndef FRAME_GROWS_DOWNWARD
818 function->frame_offset += size;
819 #endif
820
821 x = gen_rtx_MEM (mode, addr);
822
823 function->stack_slot_list
824 = gen_rtx_EXPR_LIST (VOIDmode, x, function->stack_slot_list);
825
826 pop_obstacks ();
827
828 return x;
829 }
830 \f
831 /* Allocate a temporary stack slot and record it for possible later
832 reuse.
833
834 MODE is the machine mode to be given to the returned rtx.
835
836 SIZE is the size in units of the space required. We do no rounding here
837 since assign_stack_local will do any required rounding.
838
839 KEEP is 1 if this slot is to be retained after a call to
840 free_temp_slots. Automatic variables for a block are allocated
841 with this flag. KEEP is 2 if we allocate a longer term temporary,
842 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
843 if we are to allocate something at an inner level to be treated as
844 a variable in the block (e.g., a SAVE_EXPR). */
845
846 rtx
847 assign_stack_temp (mode, size, keep)
848 enum machine_mode mode;
849 HOST_WIDE_INT size;
850 int keep;
851 {
852 struct temp_slot *p, *best_p = 0;
853
854 /* If SIZE is -1 it means that somebody tried to allocate a temporary
855 of a variable size. */
856 if (size == -1)
857 abort ();
858
859 /* First try to find an available, already-allocated temporary that is the
860 exact size we require. */
861 for (p = temp_slots; p; p = p->next)
862 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
863 break;
864
865 /* If we didn't find, one, try one that is larger than what we want. We
866 find the smallest such. */
867 if (p == 0)
868 for (p = temp_slots; p; p = p->next)
869 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
870 && (best_p == 0 || best_p->size > p->size))
871 best_p = p;
872
873 /* Make our best, if any, the one to use. */
874 if (best_p)
875 {
876 /* If there are enough aligned bytes left over, make them into a new
877 temp_slot so that the extra bytes don't get wasted. Do this only
878 for BLKmode slots, so that we can be sure of the alignment. */
879 if (GET_MODE (best_p->slot) == BLKmode)
880 {
881 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
882 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
883
884 if (best_p->size - rounded_size >= alignment)
885 {
886 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
887 p->in_use = p->addr_taken = 0;
888 p->size = best_p->size - rounded_size;
889 p->base_offset = best_p->base_offset + rounded_size;
890 p->full_size = best_p->full_size - rounded_size;
891 p->slot = gen_rtx_MEM (BLKmode,
892 plus_constant (XEXP (best_p->slot, 0),
893 rounded_size));
894 p->address = 0;
895 p->rtl_expr = 0;
896 p->next = temp_slots;
897 temp_slots = p;
898
899 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
900 stack_slot_list);
901
902 best_p->size = rounded_size;
903 best_p->full_size = rounded_size;
904 }
905 }
906
907 p = best_p;
908 }
909
910 /* If we still didn't find one, make a new temporary. */
911 if (p == 0)
912 {
913 HOST_WIDE_INT frame_offset_old = frame_offset;
914
915 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
916
917 /* If the temp slot mode doesn't indicate the alignment,
918 use the largest possible, so no one will be disappointed. */
919 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
920
921 /* The following slot size computation is necessary because we don't
922 know the actual size of the temporary slot until assign_stack_local
923 has performed all the frame alignment and size rounding for the
924 requested temporary. Note that extra space added for alignment
925 can be either above or below this stack slot depending on which
926 way the frame grows. We include the extra space if and only if it
927 is above this slot. */
928 #ifdef FRAME_GROWS_DOWNWARD
929 p->size = frame_offset_old - frame_offset;
930 #else
931 p->size = size;
932 #endif
933
934 /* Now define the fields used by combine_temp_slots. */
935 #ifdef FRAME_GROWS_DOWNWARD
936 p->base_offset = frame_offset;
937 p->full_size = frame_offset_old - frame_offset;
938 #else
939 p->base_offset = frame_offset_old;
940 p->full_size = frame_offset - frame_offset_old;
941 #endif
942 p->address = 0;
943 p->next = temp_slots;
944 temp_slots = p;
945 }
946
947 p->in_use = 1;
948 p->addr_taken = 0;
949 p->rtl_expr = sequence_rtl_expr;
950
951 if (keep == 2)
952 {
953 p->level = target_temp_slot_level;
954 p->keep = 0;
955 }
956 else if (keep == 3)
957 {
958 p->level = var_temp_slot_level;
959 p->keep = 0;
960 }
961 else
962 {
963 p->level = temp_slot_level;
964 p->keep = keep;
965 }
966
967 /* We may be reusing an old slot, so clear any MEM flags that may have been
968 set from before. */
969 RTX_UNCHANGING_P (p->slot) = 0;
970 MEM_IN_STRUCT_P (p->slot) = 0;
971 return p->slot;
972 }
973 \f
974 /* Assign a temporary of given TYPE.
975 KEEP is as for assign_stack_temp.
976 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
977 it is 0 if a register is OK.
978 DONT_PROMOTE is 1 if we should not promote values in register
979 to wider modes. */
980
981 rtx
982 assign_temp (type, keep, memory_required, dont_promote)
983 tree type;
984 int keep;
985 int memory_required;
986 int dont_promote;
987 {
988 enum machine_mode mode = TYPE_MODE (type);
989 int unsignedp = TREE_UNSIGNED (type);
990
991 if (mode == BLKmode || memory_required)
992 {
993 HOST_WIDE_INT size = int_size_in_bytes (type);
994 rtx tmp;
995
996 /* Unfortunately, we don't yet know how to allocate variable-sized
997 temporaries. However, sometimes we have a fixed upper limit on
998 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
999 instead. This is the case for Chill variable-sized strings. */
1000 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
1001 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
1002 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
1003 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
1004
1005 tmp = assign_stack_temp (mode, size, keep);
1006 MEM_IN_STRUCT_P (tmp) = AGGREGATE_TYPE_P (type);
1007 return tmp;
1008 }
1009
1010 #ifndef PROMOTE_FOR_CALL_ONLY
1011 if (! dont_promote)
1012 mode = promote_mode (type, mode, &unsignedp, 0);
1013 #endif
1014
1015 return gen_reg_rtx (mode);
1016 }
1017 \f
1018 /* Combine temporary stack slots which are adjacent on the stack.
1019
1020 This allows for better use of already allocated stack space. This is only
1021 done for BLKmode slots because we can be sure that we won't have alignment
1022 problems in this case. */
1023
1024 void
1025 combine_temp_slots ()
1026 {
1027 struct temp_slot *p, *q;
1028 struct temp_slot *prev_p, *prev_q;
1029 int num_slots;
1030
1031 /* If there are a lot of temp slots, don't do anything unless
1032 high levels of optimizaton. */
1033 if (! flag_expensive_optimizations)
1034 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1035 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1036 return;
1037
1038 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
1039 {
1040 int delete_p = 0;
1041
1042 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
1043 for (q = p->next, prev_q = p; q; q = prev_q->next)
1044 {
1045 int delete_q = 0;
1046 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
1047 {
1048 if (p->base_offset + p->full_size == q->base_offset)
1049 {
1050 /* Q comes after P; combine Q into P. */
1051 p->size += q->size;
1052 p->full_size += q->full_size;
1053 delete_q = 1;
1054 }
1055 else if (q->base_offset + q->full_size == p->base_offset)
1056 {
1057 /* P comes after Q; combine P into Q. */
1058 q->size += p->size;
1059 q->full_size += p->full_size;
1060 delete_p = 1;
1061 break;
1062 }
1063 }
1064 /* Either delete Q or advance past it. */
1065 if (delete_q)
1066 prev_q->next = q->next;
1067 else
1068 prev_q = q;
1069 }
1070 /* Either delete P or advance past it. */
1071 if (delete_p)
1072 {
1073 if (prev_p)
1074 prev_p->next = p->next;
1075 else
1076 temp_slots = p->next;
1077 }
1078 else
1079 prev_p = p;
1080 }
1081 }
1082 \f
1083 /* Find the temp slot corresponding to the object at address X. */
1084
1085 static struct temp_slot *
1086 find_temp_slot_from_address (x)
1087 rtx x;
1088 {
1089 struct temp_slot *p;
1090 rtx next;
1091
1092 for (p = temp_slots; p; p = p->next)
1093 {
1094 if (! p->in_use)
1095 continue;
1096
1097 else if (XEXP (p->slot, 0) == x
1098 || p->address == x
1099 || (GET_CODE (x) == PLUS
1100 && XEXP (x, 0) == virtual_stack_vars_rtx
1101 && GET_CODE (XEXP (x, 1)) == CONST_INT
1102 && INTVAL (XEXP (x, 1)) >= p->base_offset
1103 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1104 return p;
1105
1106 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1107 for (next = p->address; next; next = XEXP (next, 1))
1108 if (XEXP (next, 0) == x)
1109 return p;
1110 }
1111
1112 return 0;
1113 }
1114
1115 /* Indicate that NEW is an alternate way of referring to the temp slot
1116 that previously was known by OLD. */
1117
1118 void
1119 update_temp_slot_address (old, new)
1120 rtx old, new;
1121 {
1122 struct temp_slot *p = find_temp_slot_from_address (old);
1123
1124 /* If none, return. Else add NEW as an alias. */
1125 if (p == 0)
1126 return;
1127 else if (p->address == 0)
1128 p->address = new;
1129 else
1130 {
1131 if (GET_CODE (p->address) != EXPR_LIST)
1132 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1133
1134 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1135 }
1136 }
1137
1138 /* If X could be a reference to a temporary slot, mark the fact that its
1139 address was taken. */
1140
1141 void
1142 mark_temp_addr_taken (x)
1143 rtx x;
1144 {
1145 struct temp_slot *p;
1146
1147 if (x == 0)
1148 return;
1149
1150 /* If X is not in memory or is at a constant address, it cannot be in
1151 a temporary slot. */
1152 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1153 return;
1154
1155 p = find_temp_slot_from_address (XEXP (x, 0));
1156 if (p != 0)
1157 p->addr_taken = 1;
1158 }
1159
1160 /* If X could be a reference to a temporary slot, mark that slot as
1161 belonging to the to one level higher than the current level. If X
1162 matched one of our slots, just mark that one. Otherwise, we can't
1163 easily predict which it is, so upgrade all of them. Kept slots
1164 need not be touched.
1165
1166 This is called when an ({...}) construct occurs and a statement
1167 returns a value in memory. */
1168
1169 void
1170 preserve_temp_slots (x)
1171 rtx x;
1172 {
1173 struct temp_slot *p = 0;
1174
1175 /* If there is no result, we still might have some objects whose address
1176 were taken, so we need to make sure they stay around. */
1177 if (x == 0)
1178 {
1179 for (p = temp_slots; p; p = p->next)
1180 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1181 p->level--;
1182
1183 return;
1184 }
1185
1186 /* If X is a register that is being used as a pointer, see if we have
1187 a temporary slot we know it points to. To be consistent with
1188 the code below, we really should preserve all non-kept slots
1189 if we can't find a match, but that seems to be much too costly. */
1190 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1191 p = find_temp_slot_from_address (x);
1192
1193 /* If X is not in memory or is at a constant address, it cannot be in
1194 a temporary slot, but it can contain something whose address was
1195 taken. */
1196 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1197 {
1198 for (p = temp_slots; p; p = p->next)
1199 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1200 p->level--;
1201
1202 return;
1203 }
1204
1205 /* First see if we can find a match. */
1206 if (p == 0)
1207 p = find_temp_slot_from_address (XEXP (x, 0));
1208
1209 if (p != 0)
1210 {
1211 /* Move everything at our level whose address was taken to our new
1212 level in case we used its address. */
1213 struct temp_slot *q;
1214
1215 if (p->level == temp_slot_level)
1216 {
1217 for (q = temp_slots; q; q = q->next)
1218 if (q != p && q->addr_taken && q->level == p->level)
1219 q->level--;
1220
1221 p->level--;
1222 p->addr_taken = 0;
1223 }
1224 return;
1225 }
1226
1227 /* Otherwise, preserve all non-kept slots at this level. */
1228 for (p = temp_slots; p; p = p->next)
1229 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1230 p->level--;
1231 }
1232
1233 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1234 with that RTL_EXPR, promote it into a temporary slot at the present
1235 level so it will not be freed when we free slots made in the
1236 RTL_EXPR. */
1237
1238 void
1239 preserve_rtl_expr_result (x)
1240 rtx x;
1241 {
1242 struct temp_slot *p;
1243
1244 /* If X is not in memory or is at a constant address, it cannot be in
1245 a temporary slot. */
1246 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1247 return;
1248
1249 /* If we can find a match, move it to our level unless it is already at
1250 an upper level. */
1251 p = find_temp_slot_from_address (XEXP (x, 0));
1252 if (p != 0)
1253 {
1254 p->level = MIN (p->level, temp_slot_level);
1255 p->rtl_expr = 0;
1256 }
1257
1258 return;
1259 }
1260
1261 /* Free all temporaries used so far. This is normally called at the end
1262 of generating code for a statement. Don't free any temporaries
1263 currently in use for an RTL_EXPR that hasn't yet been emitted.
1264 We could eventually do better than this since it can be reused while
1265 generating the same RTL_EXPR, but this is complex and probably not
1266 worthwhile. */
1267
1268 void
1269 free_temp_slots ()
1270 {
1271 struct temp_slot *p;
1272
1273 for (p = temp_slots; p; p = p->next)
1274 if (p->in_use && p->level == temp_slot_level && ! p->keep
1275 && p->rtl_expr == 0)
1276 p->in_use = 0;
1277
1278 combine_temp_slots ();
1279 }
1280
1281 /* Free all temporary slots used in T, an RTL_EXPR node. */
1282
1283 void
1284 free_temps_for_rtl_expr (t)
1285 tree t;
1286 {
1287 struct temp_slot *p;
1288
1289 for (p = temp_slots; p; p = p->next)
1290 if (p->rtl_expr == t)
1291 p->in_use = 0;
1292
1293 combine_temp_slots ();
1294 }
1295
1296 /* Mark all temporaries ever allocated in this function as not suitable
1297 for reuse until the current level is exited. */
1298
1299 void
1300 mark_all_temps_used ()
1301 {
1302 struct temp_slot *p;
1303
1304 for (p = temp_slots; p; p = p->next)
1305 {
1306 p->in_use = p->keep = 1;
1307 p->level = MIN (p->level, temp_slot_level);
1308 }
1309 }
1310
1311 /* Push deeper into the nesting level for stack temporaries. */
1312
1313 void
1314 push_temp_slots ()
1315 {
1316 temp_slot_level++;
1317 }
1318
1319 /* Likewise, but save the new level as the place to allocate variables
1320 for blocks. */
1321
1322 void
1323 push_temp_slots_for_block ()
1324 {
1325 push_temp_slots ();
1326
1327 var_temp_slot_level = temp_slot_level;
1328 }
1329
1330 /* Likewise, but save the new level as the place to allocate temporaries
1331 for TARGET_EXPRs. */
1332
1333 void
1334 push_temp_slots_for_target ()
1335 {
1336 push_temp_slots ();
1337
1338 target_temp_slot_level = temp_slot_level;
1339 }
1340
1341 /* Set and get the value of target_temp_slot_level. The only
1342 permitted use of these functions is to save and restore this value. */
1343
1344 int
1345 get_target_temp_slot_level ()
1346 {
1347 return target_temp_slot_level;
1348 }
1349
1350 void
1351 set_target_temp_slot_level (level)
1352 int level;
1353 {
1354 target_temp_slot_level = level;
1355 }
1356
1357 /* Pop a temporary nesting level. All slots in use in the current level
1358 are freed. */
1359
1360 void
1361 pop_temp_slots ()
1362 {
1363 struct temp_slot *p;
1364
1365 for (p = temp_slots; p; p = p->next)
1366 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1367 p->in_use = 0;
1368
1369 combine_temp_slots ();
1370
1371 temp_slot_level--;
1372 }
1373
1374 /* Initialize temporary slots. */
1375
1376 void
1377 init_temp_slots ()
1378 {
1379 /* We have not allocated any temporaries yet. */
1380 temp_slots = 0;
1381 temp_slot_level = 0;
1382 var_temp_slot_level = 0;
1383 target_temp_slot_level = 0;
1384 }
1385 \f
1386 /* Retroactively move an auto variable from a register to a stack slot.
1387 This is done when an address-reference to the variable is seen. */
1388
1389 void
1390 put_var_into_stack (decl)
1391 tree decl;
1392 {
1393 register rtx reg;
1394 enum machine_mode promoted_mode, decl_mode;
1395 struct function *function = 0;
1396 tree context;
1397 int can_use_addressof;
1398
1399 context = decl_function_context (decl);
1400
1401 /* Get the current rtl used for this object and its original mode. */
1402 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1403
1404 /* No need to do anything if decl has no rtx yet
1405 since in that case caller is setting TREE_ADDRESSABLE
1406 and a stack slot will be assigned when the rtl is made. */
1407 if (reg == 0)
1408 return;
1409
1410 /* Get the declared mode for this object. */
1411 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1412 : DECL_MODE (decl));
1413 /* Get the mode it's actually stored in. */
1414 promoted_mode = GET_MODE (reg);
1415
1416 /* If this variable comes from an outer function,
1417 find that function's saved context. */
1418 if (context != current_function_decl && context != inline_function_decl)
1419 for (function = outer_function_chain; function; function = function->next)
1420 if (function->decl == context)
1421 break;
1422
1423 /* If this is a variable-size object with a pseudo to address it,
1424 put that pseudo into the stack, if the var is nonlocal. */
1425 if (DECL_NONLOCAL (decl)
1426 && GET_CODE (reg) == MEM
1427 && GET_CODE (XEXP (reg, 0)) == REG
1428 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1429 {
1430 reg = XEXP (reg, 0);
1431 decl_mode = promoted_mode = GET_MODE (reg);
1432 }
1433
1434 can_use_addressof
1435 = (function == 0
1436 && optimize > 0
1437 /* FIXME make it work for promoted modes too */
1438 && decl_mode == promoted_mode
1439 #ifdef NON_SAVING_SETJMP
1440 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1441 #endif
1442 );
1443
1444 /* If we can't use ADDRESSOF, make sure we see through one we already
1445 generated. */
1446 if (! can_use_addressof && GET_CODE (reg) == MEM
1447 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1448 reg = XEXP (XEXP (reg, 0), 0);
1449
1450 /* Now we should have a value that resides in one or more pseudo regs. */
1451
1452 if (GET_CODE (reg) == REG)
1453 {
1454 /* If this variable lives in the current function and we don't need
1455 to put things in the stack for the sake of setjmp, try to keep it
1456 in a register until we know we actually need the address. */
1457 if (can_use_addressof)
1458 gen_mem_addressof (reg, decl);
1459 else
1460 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1461 promoted_mode, decl_mode,
1462 TREE_SIDE_EFFECTS (decl), 0,
1463 TREE_USED (decl)
1464 || DECL_INITIAL (decl) != 0);
1465 }
1466 else if (GET_CODE (reg) == CONCAT)
1467 {
1468 /* A CONCAT contains two pseudos; put them both in the stack.
1469 We do it so they end up consecutive. */
1470 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1471 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1472 #ifdef FRAME_GROWS_DOWNWARD
1473 /* Since part 0 should have a lower address, do it second. */
1474 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1475 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1476 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1477 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1478 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1479 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1480 #else
1481 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1482 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1483 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1484 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1485 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1486 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1487 #endif
1488
1489 /* Change the CONCAT into a combined MEM for both parts. */
1490 PUT_CODE (reg, MEM);
1491 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1492 MEM_ALIAS_SET (reg) = get_alias_set (decl);
1493
1494 /* The two parts are in memory order already.
1495 Use the lower parts address as ours. */
1496 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1497 /* Prevent sharing of rtl that might lose. */
1498 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1499 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1500 }
1501 else
1502 return;
1503
1504 if (current_function_check_memory_usage)
1505 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1506 XEXP (reg, 0), ptr_mode,
1507 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1508 TYPE_MODE (sizetype),
1509 GEN_INT (MEMORY_USE_RW),
1510 TYPE_MODE (integer_type_node));
1511 }
1512
1513 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1514 into the stack frame of FUNCTION (0 means the current function).
1515 DECL_MODE is the machine mode of the user-level data type.
1516 PROMOTED_MODE is the machine mode of the register.
1517 VOLATILE_P is nonzero if this is for a "volatile" decl.
1518 USED_P is nonzero if this reg might have already been used in an insn. */
1519
1520 static void
1521 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1522 original_regno, used_p)
1523 struct function *function;
1524 rtx reg;
1525 tree type;
1526 enum machine_mode promoted_mode, decl_mode;
1527 int volatile_p;
1528 int original_regno;
1529 int used_p;
1530 {
1531 rtx new = 0;
1532 int regno = original_regno;
1533
1534 if (regno == 0)
1535 regno = REGNO (reg);
1536
1537 if (function)
1538 {
1539 if (regno < function->max_parm_reg)
1540 new = function->parm_reg_stack_loc[regno];
1541 if (new == 0)
1542 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1543 0, function);
1544 }
1545 else
1546 {
1547 if (regno < max_parm_reg)
1548 new = parm_reg_stack_loc[regno];
1549 if (new == 0)
1550 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1551 }
1552
1553 PUT_MODE (reg, decl_mode);
1554 XEXP (reg, 0) = XEXP (new, 0);
1555 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1556 MEM_VOLATILE_P (reg) = volatile_p;
1557 PUT_CODE (reg, MEM);
1558
1559 /* If this is a memory ref that contains aggregate components,
1560 mark it as such for cse and loop optimize. If we are reusing a
1561 previously generated stack slot, then we need to copy the bit in
1562 case it was set for other reasons. For instance, it is set for
1563 __builtin_va_alist. */
1564 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type) | MEM_IN_STRUCT_P (new);
1565 MEM_ALIAS_SET (reg) = get_alias_set (type);
1566
1567 /* Now make sure that all refs to the variable, previously made
1568 when it was a register, are fixed up to be valid again. */
1569
1570 if (used_p && function != 0)
1571 {
1572 struct var_refs_queue *temp;
1573
1574 /* Variable is inherited; fix it up when we get back to its function. */
1575 push_obstacks (function->function_obstack,
1576 function->function_maybepermanent_obstack);
1577
1578 /* See comment in restore_tree_status in tree.c for why this needs to be
1579 on saveable obstack. */
1580 temp
1581 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1582 temp->modified = reg;
1583 temp->promoted_mode = promoted_mode;
1584 temp->unsignedp = TREE_UNSIGNED (type);
1585 temp->next = function->fixup_var_refs_queue;
1586 function->fixup_var_refs_queue = temp;
1587 pop_obstacks ();
1588 }
1589 else if (used_p)
1590 /* Variable is local; fix it up now. */
1591 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
1592 }
1593 \f
1594 static void
1595 fixup_var_refs (var, promoted_mode, unsignedp)
1596 rtx var;
1597 enum machine_mode promoted_mode;
1598 int unsignedp;
1599 {
1600 tree pending;
1601 rtx first_insn = get_insns ();
1602 struct sequence_stack *stack = sequence_stack;
1603 tree rtl_exps = rtl_expr_chain;
1604
1605 /* Must scan all insns for stack-refs that exceed the limit. */
1606 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
1607
1608 /* Scan all pending sequences too. */
1609 for (; stack; stack = stack->next)
1610 {
1611 push_to_sequence (stack->first);
1612 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1613 stack->first, stack->next != 0);
1614 /* Update remembered end of sequence
1615 in case we added an insn at the end. */
1616 stack->last = get_last_insn ();
1617 end_sequence ();
1618 }
1619
1620 /* Scan all waiting RTL_EXPRs too. */
1621 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1622 {
1623 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1624 if (seq != const0_rtx && seq != 0)
1625 {
1626 push_to_sequence (seq);
1627 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
1628 end_sequence ();
1629 }
1630 }
1631 }
1632 \f
1633 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1634 some part of an insn. Return a struct fixup_replacement whose OLD
1635 value is equal to X. Allocate a new structure if no such entry exists. */
1636
1637 static struct fixup_replacement *
1638 find_fixup_replacement (replacements, x)
1639 struct fixup_replacement **replacements;
1640 rtx x;
1641 {
1642 struct fixup_replacement *p;
1643
1644 /* See if we have already replaced this. */
1645 for (p = *replacements; p && p->old != x; p = p->next)
1646 ;
1647
1648 if (p == 0)
1649 {
1650 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1651 p->old = x;
1652 p->new = 0;
1653 p->next = *replacements;
1654 *replacements = p;
1655 }
1656
1657 return p;
1658 }
1659
1660 /* Scan the insn-chain starting with INSN for refs to VAR
1661 and fix them up. TOPLEVEL is nonzero if this chain is the
1662 main chain of insns for the current function. */
1663
1664 static void
1665 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
1666 rtx var;
1667 enum machine_mode promoted_mode;
1668 int unsignedp;
1669 rtx insn;
1670 int toplevel;
1671 {
1672 rtx call_dest = 0;
1673
1674 while (insn)
1675 {
1676 rtx next = NEXT_INSN (insn);
1677 rtx set, prev, prev_set;
1678 rtx note;
1679
1680 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1681 {
1682 /* If this is a CLOBBER of VAR, delete it.
1683
1684 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1685 and REG_RETVAL notes too. */
1686 if (GET_CODE (PATTERN (insn)) == CLOBBER
1687 && (XEXP (PATTERN (insn), 0) == var
1688 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1689 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1690 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1691 {
1692 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1693 /* The REG_LIBCALL note will go away since we are going to
1694 turn INSN into a NOTE, so just delete the
1695 corresponding REG_RETVAL note. */
1696 remove_note (XEXP (note, 0),
1697 find_reg_note (XEXP (note, 0), REG_RETVAL,
1698 NULL_RTX));
1699
1700 /* In unoptimized compilation, we shouldn't call delete_insn
1701 except in jump.c doing warnings. */
1702 PUT_CODE (insn, NOTE);
1703 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1704 NOTE_SOURCE_FILE (insn) = 0;
1705 }
1706
1707 /* The insn to load VAR from a home in the arglist
1708 is now a no-op. When we see it, just delete it.
1709 Similarly if this is storing VAR from a register from which
1710 it was loaded in the previous insn. This will occur
1711 when an ADDRESSOF was made for an arglist slot. */
1712 else if (toplevel
1713 && (set = single_set (insn)) != 0
1714 && SET_DEST (set) == var
1715 /* If this represents the result of an insn group,
1716 don't delete the insn. */
1717 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1718 && (rtx_equal_p (SET_SRC (set), var)
1719 || (GET_CODE (SET_SRC (set)) == REG
1720 && (prev = prev_nonnote_insn (insn)) != 0
1721 && (prev_set = single_set (prev)) != 0
1722 && SET_DEST (prev_set) == SET_SRC (set)
1723 && rtx_equal_p (SET_SRC (prev_set), var))))
1724 {
1725 /* In unoptimized compilation, we shouldn't call delete_insn
1726 except in jump.c doing warnings. */
1727 PUT_CODE (insn, NOTE);
1728 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1729 NOTE_SOURCE_FILE (insn) = 0;
1730 if (insn == last_parm_insn)
1731 last_parm_insn = PREV_INSN (next);
1732 }
1733 else
1734 {
1735 struct fixup_replacement *replacements = 0;
1736 rtx next_insn = NEXT_INSN (insn);
1737
1738 if (SMALL_REGISTER_CLASSES)
1739 {
1740 /* If the insn that copies the results of a CALL_INSN
1741 into a pseudo now references VAR, we have to use an
1742 intermediate pseudo since we want the life of the
1743 return value register to be only a single insn.
1744
1745 If we don't use an intermediate pseudo, such things as
1746 address computations to make the address of VAR valid
1747 if it is not can be placed between the CALL_INSN and INSN.
1748
1749 To make sure this doesn't happen, we record the destination
1750 of the CALL_INSN and see if the next insn uses both that
1751 and VAR. */
1752
1753 if (call_dest != 0 && GET_CODE (insn) == INSN
1754 && reg_mentioned_p (var, PATTERN (insn))
1755 && reg_mentioned_p (call_dest, PATTERN (insn)))
1756 {
1757 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1758
1759 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1760
1761 PATTERN (insn) = replace_rtx (PATTERN (insn),
1762 call_dest, temp);
1763 }
1764
1765 if (GET_CODE (insn) == CALL_INSN
1766 && GET_CODE (PATTERN (insn)) == SET)
1767 call_dest = SET_DEST (PATTERN (insn));
1768 else if (GET_CODE (insn) == CALL_INSN
1769 && GET_CODE (PATTERN (insn)) == PARALLEL
1770 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1771 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1772 else
1773 call_dest = 0;
1774 }
1775
1776 /* See if we have to do anything to INSN now that VAR is in
1777 memory. If it needs to be loaded into a pseudo, use a single
1778 pseudo for the entire insn in case there is a MATCH_DUP
1779 between two operands. We pass a pointer to the head of
1780 a list of struct fixup_replacements. If fixup_var_refs_1
1781 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1782 it will record them in this list.
1783
1784 If it allocated a pseudo for any replacement, we copy into
1785 it here. */
1786
1787 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1788 &replacements);
1789
1790 /* If this is last_parm_insn, and any instructions were output
1791 after it to fix it up, then we must set last_parm_insn to
1792 the last such instruction emitted. */
1793 if (insn == last_parm_insn)
1794 last_parm_insn = PREV_INSN (next_insn);
1795
1796 while (replacements)
1797 {
1798 if (GET_CODE (replacements->new) == REG)
1799 {
1800 rtx insert_before;
1801 rtx seq;
1802
1803 /* OLD might be a (subreg (mem)). */
1804 if (GET_CODE (replacements->old) == SUBREG)
1805 replacements->old
1806 = fixup_memory_subreg (replacements->old, insn, 0);
1807 else
1808 replacements->old
1809 = fixup_stack_1 (replacements->old, insn);
1810
1811 insert_before = insn;
1812
1813 /* If we are changing the mode, do a conversion.
1814 This might be wasteful, but combine.c will
1815 eliminate much of the waste. */
1816
1817 if (GET_MODE (replacements->new)
1818 != GET_MODE (replacements->old))
1819 {
1820 start_sequence ();
1821 convert_move (replacements->new,
1822 replacements->old, unsignedp);
1823 seq = gen_sequence ();
1824 end_sequence ();
1825 }
1826 else
1827 seq = gen_move_insn (replacements->new,
1828 replacements->old);
1829
1830 emit_insn_before (seq, insert_before);
1831 }
1832
1833 replacements = replacements->next;
1834 }
1835 }
1836
1837 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1838 But don't touch other insns referred to by reg-notes;
1839 we will get them elsewhere. */
1840 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1841 if (GET_CODE (note) != INSN_LIST)
1842 XEXP (note, 0)
1843 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1844 }
1845 insn = next;
1846 }
1847 }
1848 \f
1849 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1850 See if the rtx expression at *LOC in INSN needs to be changed.
1851
1852 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1853 contain a list of original rtx's and replacements. If we find that we need
1854 to modify this insn by replacing a memory reference with a pseudo or by
1855 making a new MEM to implement a SUBREG, we consult that list to see if
1856 we have already chosen a replacement. If none has already been allocated,
1857 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1858 or the SUBREG, as appropriate, to the pseudo. */
1859
1860 static void
1861 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1862 register rtx var;
1863 enum machine_mode promoted_mode;
1864 register rtx *loc;
1865 rtx insn;
1866 struct fixup_replacement **replacements;
1867 {
1868 register int i;
1869 register rtx x = *loc;
1870 RTX_CODE code = GET_CODE (x);
1871 register char *fmt;
1872 register rtx tem, tem1;
1873 struct fixup_replacement *replacement;
1874
1875 switch (code)
1876 {
1877 case ADDRESSOF:
1878 if (XEXP (x, 0) == var)
1879 {
1880 /* Prevent sharing of rtl that might lose. */
1881 rtx sub = copy_rtx (XEXP (var, 0));
1882
1883 start_sequence ();
1884
1885 if (! validate_change (insn, loc, sub, 0))
1886 {
1887 rtx y = force_operand (sub, NULL_RTX);
1888
1889 if (! validate_change (insn, loc, y, 0))
1890 *loc = copy_to_reg (y);
1891 }
1892
1893 emit_insn_before (gen_sequence (), insn);
1894 end_sequence ();
1895 }
1896 return;
1897
1898 case MEM:
1899 if (var == x)
1900 {
1901 /* If we already have a replacement, use it. Otherwise,
1902 try to fix up this address in case it is invalid. */
1903
1904 replacement = find_fixup_replacement (replacements, var);
1905 if (replacement->new)
1906 {
1907 *loc = replacement->new;
1908 return;
1909 }
1910
1911 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1912
1913 /* Unless we are forcing memory to register or we changed the mode,
1914 we can leave things the way they are if the insn is valid. */
1915
1916 INSN_CODE (insn) = -1;
1917 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1918 && recog_memoized (insn) >= 0)
1919 return;
1920
1921 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1922 return;
1923 }
1924
1925 /* If X contains VAR, we need to unshare it here so that we update
1926 each occurrence separately. But all identical MEMs in one insn
1927 must be replaced with the same rtx because of the possibility of
1928 MATCH_DUPs. */
1929
1930 if (reg_mentioned_p (var, x))
1931 {
1932 replacement = find_fixup_replacement (replacements, x);
1933 if (replacement->new == 0)
1934 replacement->new = copy_most_rtx (x, var);
1935
1936 *loc = x = replacement->new;
1937 }
1938 break;
1939
1940 case REG:
1941 case CC0:
1942 case PC:
1943 case CONST_INT:
1944 case CONST:
1945 case SYMBOL_REF:
1946 case LABEL_REF:
1947 case CONST_DOUBLE:
1948 return;
1949
1950 case SIGN_EXTRACT:
1951 case ZERO_EXTRACT:
1952 /* Note that in some cases those types of expressions are altered
1953 by optimize_bit_field, and do not survive to get here. */
1954 if (XEXP (x, 0) == var
1955 || (GET_CODE (XEXP (x, 0)) == SUBREG
1956 && SUBREG_REG (XEXP (x, 0)) == var))
1957 {
1958 /* Get TEM as a valid MEM in the mode presently in the insn.
1959
1960 We don't worry about the possibility of MATCH_DUP here; it
1961 is highly unlikely and would be tricky to handle. */
1962
1963 tem = XEXP (x, 0);
1964 if (GET_CODE (tem) == SUBREG)
1965 {
1966 if (GET_MODE_BITSIZE (GET_MODE (tem))
1967 > GET_MODE_BITSIZE (GET_MODE (var)))
1968 {
1969 replacement = find_fixup_replacement (replacements, var);
1970 if (replacement->new == 0)
1971 replacement->new = gen_reg_rtx (GET_MODE (var));
1972 SUBREG_REG (tem) = replacement->new;
1973 }
1974 else
1975 tem = fixup_memory_subreg (tem, insn, 0);
1976 }
1977 else
1978 tem = fixup_stack_1 (tem, insn);
1979
1980 /* Unless we want to load from memory, get TEM into the proper mode
1981 for an extract from memory. This can only be done if the
1982 extract is at a constant position and length. */
1983
1984 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1985 && GET_CODE (XEXP (x, 2)) == CONST_INT
1986 && ! mode_dependent_address_p (XEXP (tem, 0))
1987 && ! MEM_VOLATILE_P (tem))
1988 {
1989 enum machine_mode wanted_mode = VOIDmode;
1990 enum machine_mode is_mode = GET_MODE (tem);
1991 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
1992
1993 #ifdef HAVE_extzv
1994 if (GET_CODE (x) == ZERO_EXTRACT)
1995 {
1996 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1997 if (wanted_mode == VOIDmode)
1998 wanted_mode = word_mode;
1999 }
2000 #endif
2001 #ifdef HAVE_extv
2002 if (GET_CODE (x) == SIGN_EXTRACT)
2003 {
2004 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
2005 if (wanted_mode == VOIDmode)
2006 wanted_mode = word_mode;
2007 }
2008 #endif
2009 /* If we have a narrower mode, we can do something. */
2010 if (wanted_mode != VOIDmode
2011 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2012 {
2013 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2014 rtx old_pos = XEXP (x, 2);
2015 rtx newmem;
2016
2017 /* If the bytes and bits are counted differently, we
2018 must adjust the offset. */
2019 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2020 offset = (GET_MODE_SIZE (is_mode)
2021 - GET_MODE_SIZE (wanted_mode) - offset);
2022
2023 pos %= GET_MODE_BITSIZE (wanted_mode);
2024
2025 newmem = gen_rtx_MEM (wanted_mode,
2026 plus_constant (XEXP (tem, 0), offset));
2027 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2028 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
2029 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
2030
2031 /* Make the change and see if the insn remains valid. */
2032 INSN_CODE (insn) = -1;
2033 XEXP (x, 0) = newmem;
2034 XEXP (x, 2) = GEN_INT (pos);
2035
2036 if (recog_memoized (insn) >= 0)
2037 return;
2038
2039 /* Otherwise, restore old position. XEXP (x, 0) will be
2040 restored later. */
2041 XEXP (x, 2) = old_pos;
2042 }
2043 }
2044
2045 /* If we get here, the bitfield extract insn can't accept a memory
2046 reference. Copy the input into a register. */
2047
2048 tem1 = gen_reg_rtx (GET_MODE (tem));
2049 emit_insn_before (gen_move_insn (tem1, tem), insn);
2050 XEXP (x, 0) = tem1;
2051 return;
2052 }
2053 break;
2054
2055 case SUBREG:
2056 if (SUBREG_REG (x) == var)
2057 {
2058 /* If this is a special SUBREG made because VAR was promoted
2059 from a wider mode, replace it with VAR and call ourself
2060 recursively, this time saying that the object previously
2061 had its current mode (by virtue of the SUBREG). */
2062
2063 if (SUBREG_PROMOTED_VAR_P (x))
2064 {
2065 *loc = var;
2066 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2067 return;
2068 }
2069
2070 /* If this SUBREG makes VAR wider, it has become a paradoxical
2071 SUBREG with VAR in memory, but these aren't allowed at this
2072 stage of the compilation. So load VAR into a pseudo and take
2073 a SUBREG of that pseudo. */
2074 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2075 {
2076 replacement = find_fixup_replacement (replacements, var);
2077 if (replacement->new == 0)
2078 replacement->new = gen_reg_rtx (GET_MODE (var));
2079 SUBREG_REG (x) = replacement->new;
2080 return;
2081 }
2082
2083 /* See if we have already found a replacement for this SUBREG.
2084 If so, use it. Otherwise, make a MEM and see if the insn
2085 is recognized. If not, or if we should force MEM into a register,
2086 make a pseudo for this SUBREG. */
2087 replacement = find_fixup_replacement (replacements, x);
2088 if (replacement->new)
2089 {
2090 *loc = replacement->new;
2091 return;
2092 }
2093
2094 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2095
2096 INSN_CODE (insn) = -1;
2097 if (! flag_force_mem && recog_memoized (insn) >= 0)
2098 return;
2099
2100 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2101 return;
2102 }
2103 break;
2104
2105 case SET:
2106 /* First do special simplification of bit-field references. */
2107 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2108 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2109 optimize_bit_field (x, insn, 0);
2110 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2111 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2112 optimize_bit_field (x, insn, NULL_PTR);
2113
2114 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2115 into a register and then store it back out. */
2116 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2117 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2118 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2119 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2120 > GET_MODE_SIZE (GET_MODE (var))))
2121 {
2122 replacement = find_fixup_replacement (replacements, var);
2123 if (replacement->new == 0)
2124 replacement->new = gen_reg_rtx (GET_MODE (var));
2125
2126 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2127 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2128 }
2129
2130 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2131 insn into a pseudo and store the low part of the pseudo into VAR. */
2132 if (GET_CODE (SET_DEST (x)) == SUBREG
2133 && SUBREG_REG (SET_DEST (x)) == var
2134 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2135 > GET_MODE_SIZE (GET_MODE (var))))
2136 {
2137 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2138 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2139 tem)),
2140 insn);
2141 break;
2142 }
2143
2144 {
2145 rtx dest = SET_DEST (x);
2146 rtx src = SET_SRC (x);
2147 #ifdef HAVE_insv
2148 rtx outerdest = dest;
2149 #endif
2150
2151 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2152 || GET_CODE (dest) == SIGN_EXTRACT
2153 || GET_CODE (dest) == ZERO_EXTRACT)
2154 dest = XEXP (dest, 0);
2155
2156 if (GET_CODE (src) == SUBREG)
2157 src = XEXP (src, 0);
2158
2159 /* If VAR does not appear at the top level of the SET
2160 just scan the lower levels of the tree. */
2161
2162 if (src != var && dest != var)
2163 break;
2164
2165 /* We will need to rerecognize this insn. */
2166 INSN_CODE (insn) = -1;
2167
2168 #ifdef HAVE_insv
2169 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2170 {
2171 /* Since this case will return, ensure we fixup all the
2172 operands here. */
2173 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2174 insn, replacements);
2175 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2176 insn, replacements);
2177 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2178 insn, replacements);
2179
2180 tem = XEXP (outerdest, 0);
2181
2182 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2183 that may appear inside a ZERO_EXTRACT.
2184 This was legitimate when the MEM was a REG. */
2185 if (GET_CODE (tem) == SUBREG
2186 && SUBREG_REG (tem) == var)
2187 tem = fixup_memory_subreg (tem, insn, 0);
2188 else
2189 tem = fixup_stack_1 (tem, insn);
2190
2191 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2192 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2193 && ! mode_dependent_address_p (XEXP (tem, 0))
2194 && ! MEM_VOLATILE_P (tem))
2195 {
2196 enum machine_mode wanted_mode;
2197 enum machine_mode is_mode = GET_MODE (tem);
2198 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2199
2200 wanted_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
2201 if (wanted_mode == VOIDmode)
2202 wanted_mode = word_mode;
2203
2204 /* If we have a narrower mode, we can do something. */
2205 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2206 {
2207 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2208 rtx old_pos = XEXP (outerdest, 2);
2209 rtx newmem;
2210
2211 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2212 offset = (GET_MODE_SIZE (is_mode)
2213 - GET_MODE_SIZE (wanted_mode) - offset);
2214
2215 pos %= GET_MODE_BITSIZE (wanted_mode);
2216
2217 newmem = gen_rtx_MEM (wanted_mode,
2218 plus_constant (XEXP (tem, 0), offset));
2219 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2220 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
2221 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
2222
2223 /* Make the change and see if the insn remains valid. */
2224 INSN_CODE (insn) = -1;
2225 XEXP (outerdest, 0) = newmem;
2226 XEXP (outerdest, 2) = GEN_INT (pos);
2227
2228 if (recog_memoized (insn) >= 0)
2229 return;
2230
2231 /* Otherwise, restore old position. XEXP (x, 0) will be
2232 restored later. */
2233 XEXP (outerdest, 2) = old_pos;
2234 }
2235 }
2236
2237 /* If we get here, the bit-field store doesn't allow memory
2238 or isn't located at a constant position. Load the value into
2239 a register, do the store, and put it back into memory. */
2240
2241 tem1 = gen_reg_rtx (GET_MODE (tem));
2242 emit_insn_before (gen_move_insn (tem1, tem), insn);
2243 emit_insn_after (gen_move_insn (tem, tem1), insn);
2244 XEXP (outerdest, 0) = tem1;
2245 return;
2246 }
2247 #endif
2248
2249 /* STRICT_LOW_PART is a no-op on memory references
2250 and it can cause combinations to be unrecognizable,
2251 so eliminate it. */
2252
2253 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2254 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2255
2256 /* A valid insn to copy VAR into or out of a register
2257 must be left alone, to avoid an infinite loop here.
2258 If the reference to VAR is by a subreg, fix that up,
2259 since SUBREG is not valid for a memref.
2260 Also fix up the address of the stack slot.
2261
2262 Note that we must not try to recognize the insn until
2263 after we know that we have valid addresses and no
2264 (subreg (mem ...) ...) constructs, since these interfere
2265 with determining the validity of the insn. */
2266
2267 if ((SET_SRC (x) == var
2268 || (GET_CODE (SET_SRC (x)) == SUBREG
2269 && SUBREG_REG (SET_SRC (x)) == var))
2270 && (GET_CODE (SET_DEST (x)) == REG
2271 || (GET_CODE (SET_DEST (x)) == SUBREG
2272 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2273 && GET_MODE (var) == promoted_mode
2274 && x == single_set (insn))
2275 {
2276 rtx pat;
2277
2278 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2279 if (replacement->new)
2280 SET_SRC (x) = replacement->new;
2281 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2282 SET_SRC (x) = replacement->new
2283 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2284 else
2285 SET_SRC (x) = replacement->new
2286 = fixup_stack_1 (SET_SRC (x), insn);
2287
2288 if (recog_memoized (insn) >= 0)
2289 return;
2290
2291 /* INSN is not valid, but we know that we want to
2292 copy SET_SRC (x) to SET_DEST (x) in some way. So
2293 we generate the move and see whether it requires more
2294 than one insn. If it does, we emit those insns and
2295 delete INSN. Otherwise, we an just replace the pattern
2296 of INSN; we have already verified above that INSN has
2297 no other function that to do X. */
2298
2299 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2300 if (GET_CODE (pat) == SEQUENCE)
2301 {
2302 emit_insn_after (pat, insn);
2303 PUT_CODE (insn, NOTE);
2304 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2305 NOTE_SOURCE_FILE (insn) = 0;
2306 }
2307 else
2308 PATTERN (insn) = pat;
2309
2310 return;
2311 }
2312
2313 if ((SET_DEST (x) == var
2314 || (GET_CODE (SET_DEST (x)) == SUBREG
2315 && SUBREG_REG (SET_DEST (x)) == var))
2316 && (GET_CODE (SET_SRC (x)) == REG
2317 || (GET_CODE (SET_SRC (x)) == SUBREG
2318 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2319 && GET_MODE (var) == promoted_mode
2320 && x == single_set (insn))
2321 {
2322 rtx pat;
2323
2324 if (GET_CODE (SET_DEST (x)) == SUBREG)
2325 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2326 else
2327 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2328
2329 if (recog_memoized (insn) >= 0)
2330 return;
2331
2332 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2333 if (GET_CODE (pat) == SEQUENCE)
2334 {
2335 emit_insn_after (pat, insn);
2336 PUT_CODE (insn, NOTE);
2337 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2338 NOTE_SOURCE_FILE (insn) = 0;
2339 }
2340 else
2341 PATTERN (insn) = pat;
2342
2343 return;
2344 }
2345
2346 /* Otherwise, storing into VAR must be handled specially
2347 by storing into a temporary and copying that into VAR
2348 with a new insn after this one. Note that this case
2349 will be used when storing into a promoted scalar since
2350 the insn will now have different modes on the input
2351 and output and hence will be invalid (except for the case
2352 of setting it to a constant, which does not need any
2353 change if it is valid). We generate extra code in that case,
2354 but combine.c will eliminate it. */
2355
2356 if (dest == var)
2357 {
2358 rtx temp;
2359 rtx fixeddest = SET_DEST (x);
2360
2361 /* STRICT_LOW_PART can be discarded, around a MEM. */
2362 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2363 fixeddest = XEXP (fixeddest, 0);
2364 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2365 if (GET_CODE (fixeddest) == SUBREG)
2366 {
2367 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2368 promoted_mode = GET_MODE (fixeddest);
2369 }
2370 else
2371 fixeddest = fixup_stack_1 (fixeddest, insn);
2372
2373 temp = gen_reg_rtx (promoted_mode);
2374
2375 emit_insn_after (gen_move_insn (fixeddest,
2376 gen_lowpart (GET_MODE (fixeddest),
2377 temp)),
2378 insn);
2379
2380 SET_DEST (x) = temp;
2381 }
2382 }
2383
2384 default:
2385 break;
2386 }
2387
2388 /* Nothing special about this RTX; fix its operands. */
2389
2390 fmt = GET_RTX_FORMAT (code);
2391 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2392 {
2393 if (fmt[i] == 'e')
2394 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2395 if (fmt[i] == 'E')
2396 {
2397 register int j;
2398 for (j = 0; j < XVECLEN (x, i); j++)
2399 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2400 insn, replacements);
2401 }
2402 }
2403 }
2404 \f
2405 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2406 return an rtx (MEM:m1 newaddr) which is equivalent.
2407 If any insns must be emitted to compute NEWADDR, put them before INSN.
2408
2409 UNCRITICAL nonzero means accept paradoxical subregs.
2410 This is used for subregs found inside REG_NOTES. */
2411
2412 static rtx
2413 fixup_memory_subreg (x, insn, uncritical)
2414 rtx x;
2415 rtx insn;
2416 int uncritical;
2417 {
2418 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2419 rtx addr = XEXP (SUBREG_REG (x), 0);
2420 enum machine_mode mode = GET_MODE (x);
2421 rtx result;
2422
2423 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2424 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2425 && ! uncritical)
2426 abort ();
2427
2428 if (BYTES_BIG_ENDIAN)
2429 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2430 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2431 addr = plus_constant (addr, offset);
2432 if (!flag_force_addr && memory_address_p (mode, addr))
2433 /* Shortcut if no insns need be emitted. */
2434 return change_address (SUBREG_REG (x), mode, addr);
2435 start_sequence ();
2436 result = change_address (SUBREG_REG (x), mode, addr);
2437 emit_insn_before (gen_sequence (), insn);
2438 end_sequence ();
2439 return result;
2440 }
2441
2442 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2443 Replace subexpressions of X in place.
2444 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2445 Otherwise return X, with its contents possibly altered.
2446
2447 If any insns must be emitted to compute NEWADDR, put them before INSN.
2448
2449 UNCRITICAL is as in fixup_memory_subreg. */
2450
2451 static rtx
2452 walk_fixup_memory_subreg (x, insn, uncritical)
2453 register rtx x;
2454 rtx insn;
2455 int uncritical;
2456 {
2457 register enum rtx_code code;
2458 register char *fmt;
2459 register int i;
2460
2461 if (x == 0)
2462 return 0;
2463
2464 code = GET_CODE (x);
2465
2466 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2467 return fixup_memory_subreg (x, insn, uncritical);
2468
2469 /* Nothing special about this RTX; fix its operands. */
2470
2471 fmt = GET_RTX_FORMAT (code);
2472 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2473 {
2474 if (fmt[i] == 'e')
2475 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2476 if (fmt[i] == 'E')
2477 {
2478 register int j;
2479 for (j = 0; j < XVECLEN (x, i); j++)
2480 XVECEXP (x, i, j)
2481 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2482 }
2483 }
2484 return x;
2485 }
2486 \f
2487 /* For each memory ref within X, if it refers to a stack slot
2488 with an out of range displacement, put the address in a temp register
2489 (emitting new insns before INSN to load these registers)
2490 and alter the memory ref to use that register.
2491 Replace each such MEM rtx with a copy, to avoid clobberage. */
2492
2493 static rtx
2494 fixup_stack_1 (x, insn)
2495 rtx x;
2496 rtx insn;
2497 {
2498 register int i;
2499 register RTX_CODE code = GET_CODE (x);
2500 register char *fmt;
2501
2502 if (code == MEM)
2503 {
2504 register rtx ad = XEXP (x, 0);
2505 /* If we have address of a stack slot but it's not valid
2506 (displacement is too large), compute the sum in a register. */
2507 if (GET_CODE (ad) == PLUS
2508 && GET_CODE (XEXP (ad, 0)) == REG
2509 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2510 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2511 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2512 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2513 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2514 #endif
2515 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2516 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2517 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2518 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2519 {
2520 rtx temp, seq;
2521 if (memory_address_p (GET_MODE (x), ad))
2522 return x;
2523
2524 start_sequence ();
2525 temp = copy_to_reg (ad);
2526 seq = gen_sequence ();
2527 end_sequence ();
2528 emit_insn_before (seq, insn);
2529 return change_address (x, VOIDmode, temp);
2530 }
2531 return x;
2532 }
2533
2534 fmt = GET_RTX_FORMAT (code);
2535 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2536 {
2537 if (fmt[i] == 'e')
2538 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2539 if (fmt[i] == 'E')
2540 {
2541 register int j;
2542 for (j = 0; j < XVECLEN (x, i); j++)
2543 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2544 }
2545 }
2546 return x;
2547 }
2548 \f
2549 /* Optimization: a bit-field instruction whose field
2550 happens to be a byte or halfword in memory
2551 can be changed to a move instruction.
2552
2553 We call here when INSN is an insn to examine or store into a bit-field.
2554 BODY is the SET-rtx to be altered.
2555
2556 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2557 (Currently this is called only from function.c, and EQUIV_MEM
2558 is always 0.) */
2559
2560 static void
2561 optimize_bit_field (body, insn, equiv_mem)
2562 rtx body;
2563 rtx insn;
2564 rtx *equiv_mem;
2565 {
2566 register rtx bitfield;
2567 int destflag;
2568 rtx seq = 0;
2569 enum machine_mode mode;
2570
2571 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2572 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2573 bitfield = SET_DEST (body), destflag = 1;
2574 else
2575 bitfield = SET_SRC (body), destflag = 0;
2576
2577 /* First check that the field being stored has constant size and position
2578 and is in fact a byte or halfword suitably aligned. */
2579
2580 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2581 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2582 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2583 != BLKmode)
2584 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2585 {
2586 register rtx memref = 0;
2587
2588 /* Now check that the containing word is memory, not a register,
2589 and that it is safe to change the machine mode. */
2590
2591 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2592 memref = XEXP (bitfield, 0);
2593 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2594 && equiv_mem != 0)
2595 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2596 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2597 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2598 memref = SUBREG_REG (XEXP (bitfield, 0));
2599 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2600 && equiv_mem != 0
2601 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2602 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2603
2604 if (memref
2605 && ! mode_dependent_address_p (XEXP (memref, 0))
2606 && ! MEM_VOLATILE_P (memref))
2607 {
2608 /* Now adjust the address, first for any subreg'ing
2609 that we are now getting rid of,
2610 and then for which byte of the word is wanted. */
2611
2612 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2613 rtx insns;
2614
2615 /* Adjust OFFSET to count bits from low-address byte. */
2616 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2617 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2618 - offset - INTVAL (XEXP (bitfield, 1)));
2619
2620 /* Adjust OFFSET to count bytes from low-address byte. */
2621 offset /= BITS_PER_UNIT;
2622 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2623 {
2624 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2625 if (BYTES_BIG_ENDIAN)
2626 offset -= (MIN (UNITS_PER_WORD,
2627 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2628 - MIN (UNITS_PER_WORD,
2629 GET_MODE_SIZE (GET_MODE (memref))));
2630 }
2631
2632 start_sequence ();
2633 memref = change_address (memref, mode,
2634 plus_constant (XEXP (memref, 0), offset));
2635 insns = get_insns ();
2636 end_sequence ();
2637 emit_insns_before (insns, insn);
2638
2639 /* Store this memory reference where
2640 we found the bit field reference. */
2641
2642 if (destflag)
2643 {
2644 validate_change (insn, &SET_DEST (body), memref, 1);
2645 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2646 {
2647 rtx src = SET_SRC (body);
2648 while (GET_CODE (src) == SUBREG
2649 && SUBREG_WORD (src) == 0)
2650 src = SUBREG_REG (src);
2651 if (GET_MODE (src) != GET_MODE (memref))
2652 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2653 validate_change (insn, &SET_SRC (body), src, 1);
2654 }
2655 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2656 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2657 /* This shouldn't happen because anything that didn't have
2658 one of these modes should have got converted explicitly
2659 and then referenced through a subreg.
2660 This is so because the original bit-field was
2661 handled by agg_mode and so its tree structure had
2662 the same mode that memref now has. */
2663 abort ();
2664 }
2665 else
2666 {
2667 rtx dest = SET_DEST (body);
2668
2669 while (GET_CODE (dest) == SUBREG
2670 && SUBREG_WORD (dest) == 0
2671 && (GET_MODE_CLASS (GET_MODE (dest))
2672 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest)))))
2673 dest = SUBREG_REG (dest);
2674
2675 validate_change (insn, &SET_DEST (body), dest, 1);
2676
2677 if (GET_MODE (dest) == GET_MODE (memref))
2678 validate_change (insn, &SET_SRC (body), memref, 1);
2679 else
2680 {
2681 /* Convert the mem ref to the destination mode. */
2682 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2683
2684 start_sequence ();
2685 convert_move (newreg, memref,
2686 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2687 seq = get_insns ();
2688 end_sequence ();
2689
2690 validate_change (insn, &SET_SRC (body), newreg, 1);
2691 }
2692 }
2693
2694 /* See if we can convert this extraction or insertion into
2695 a simple move insn. We might not be able to do so if this
2696 was, for example, part of a PARALLEL.
2697
2698 If we succeed, write out any needed conversions. If we fail,
2699 it is hard to guess why we failed, so don't do anything
2700 special; just let the optimization be suppressed. */
2701
2702 if (apply_change_group () && seq)
2703 emit_insns_before (seq, insn);
2704 }
2705 }
2706 }
2707 \f
2708 /* These routines are responsible for converting virtual register references
2709 to the actual hard register references once RTL generation is complete.
2710
2711 The following four variables are used for communication between the
2712 routines. They contain the offsets of the virtual registers from their
2713 respective hard registers. */
2714
2715 static int in_arg_offset;
2716 static int var_offset;
2717 static int dynamic_offset;
2718 static int out_arg_offset;
2719 static int cfa_offset;
2720
2721 /* In most machines, the stack pointer register is equivalent to the bottom
2722 of the stack. */
2723
2724 #ifndef STACK_POINTER_OFFSET
2725 #define STACK_POINTER_OFFSET 0
2726 #endif
2727
2728 /* If not defined, pick an appropriate default for the offset of dynamically
2729 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2730 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2731
2732 #ifndef STACK_DYNAMIC_OFFSET
2733
2734 #ifdef ACCUMULATE_OUTGOING_ARGS
2735 /* The bottom of the stack points to the actual arguments. If
2736 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2737 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2738 stack space for register parameters is not pushed by the caller, but
2739 rather part of the fixed stack areas and hence not included in
2740 `current_function_outgoing_args_size'. Nevertheless, we must allow
2741 for it when allocating stack dynamic objects. */
2742
2743 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2744 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2745 (current_function_outgoing_args_size \
2746 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2747
2748 #else
2749 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2750 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2751 #endif
2752
2753 #else
2754 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2755 #endif
2756 #endif
2757
2758 /* On a few machines, the CFA coincides with the arg pointer. */
2759
2760 #ifndef ARG_POINTER_CFA_OFFSET
2761 #define ARG_POINTER_CFA_OFFSET 0
2762 #endif
2763
2764
2765 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2766 its address taken. DECL is the decl for the object stored in the
2767 register, for later use if we do need to force REG into the stack.
2768 REG is overwritten by the MEM like in put_reg_into_stack. */
2769
2770 rtx
2771 gen_mem_addressof (reg, decl)
2772 rtx reg;
2773 tree decl;
2774 {
2775 tree type = TREE_TYPE (decl);
2776 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)), REGNO (reg));
2777 SET_ADDRESSOF_DECL (r, decl);
2778 /* If the original REG was a user-variable, then so is the REG whose
2779 address is being taken. */
2780 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2781
2782 XEXP (reg, 0) = r;
2783 PUT_CODE (reg, MEM);
2784 PUT_MODE (reg, DECL_MODE (decl));
2785 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2786 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type);
2787 MEM_ALIAS_SET (reg) = get_alias_set (decl);
2788
2789 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2790 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type));
2791
2792 return reg;
2793 }
2794
2795 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2796
2797 void
2798 flush_addressof (decl)
2799 tree decl;
2800 {
2801 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2802 && DECL_RTL (decl) != 0
2803 && GET_CODE (DECL_RTL (decl)) == MEM
2804 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2805 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2806 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0));
2807 }
2808
2809 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2810
2811 static void
2812 put_addressof_into_stack (r)
2813 rtx r;
2814 {
2815 tree decl = ADDRESSOF_DECL (r);
2816 rtx reg = XEXP (r, 0);
2817
2818 if (GET_CODE (reg) != REG)
2819 abort ();
2820
2821 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2822 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
2823 ADDRESSOF_REGNO (r),
2824 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
2825 }
2826
2827 /* List of replacements made below in purge_addressof_1 when creating
2828 bitfield insertions. */
2829 static rtx purge_addressof_replacements;
2830
2831 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2832 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2833 the stack. */
2834
2835 static void
2836 purge_addressof_1 (loc, insn, force, store)
2837 rtx *loc;
2838 rtx insn;
2839 int force, store;
2840 {
2841 rtx x;
2842 RTX_CODE code;
2843 int i, j;
2844 char *fmt;
2845
2846 /* Re-start here to avoid recursion in common cases. */
2847 restart:
2848
2849 x = *loc;
2850 if (x == 0)
2851 return;
2852
2853 code = GET_CODE (x);
2854
2855 if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2856 {
2857 rtx insns;
2858 /* We must create a copy of the rtx because it was created by
2859 overwriting a REG rtx which is always shared. */
2860 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2861
2862 if (validate_change (insn, loc, sub, 0))
2863 return;
2864
2865 start_sequence ();
2866 if (! validate_change (insn, loc,
2867 force_operand (sub, NULL_RTX),
2868 0))
2869 abort ();
2870
2871 insns = gen_sequence ();
2872 end_sequence ();
2873 emit_insn_before (insns, insn);
2874 return;
2875 }
2876 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2877 {
2878 rtx sub = XEXP (XEXP (x, 0), 0);
2879
2880 if (GET_CODE (sub) == MEM)
2881 sub = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2882
2883 if (GET_CODE (sub) == REG
2884 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2885 {
2886 put_addressof_into_stack (XEXP (x, 0));
2887 return;
2888 }
2889 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
2890 {
2891 int size_x, size_sub;
2892
2893 if (!insn)
2894 {
2895 /* When processing REG_NOTES look at the list of
2896 replacements done on the insn to find the register that X
2897 was replaced by. */
2898 rtx tem;
2899
2900 for (tem = purge_addressof_replacements; tem != NULL_RTX;
2901 tem = XEXP (XEXP (tem, 1), 1))
2902 if (rtx_equal_p (x, XEXP (tem, 0)))
2903 {
2904 *loc = XEXP (XEXP (tem, 1), 0);
2905 return;
2906 }
2907
2908 /* There should always be such a replacement. */
2909 abort ();
2910 }
2911
2912 size_x = GET_MODE_BITSIZE (GET_MODE (x));
2913 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
2914
2915 /* Don't even consider working with paradoxical subregs,
2916 or the moral equivalent seen here. */
2917 if (size_x <= size_sub
2918 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
2919 {
2920 /* Do a bitfield insertion to mirror what would happen
2921 in memory. */
2922
2923 rtx val, seq;
2924
2925 if (store)
2926 {
2927 rtx p;
2928
2929 start_sequence ();
2930 val = gen_reg_rtx (GET_MODE (x));
2931 if (! validate_change (insn, loc, val, 0))
2932 {
2933 /* Discard the current sequence and put the
2934 ADDRESSOF on stack. */
2935 end_sequence ();
2936 goto give_up;
2937 }
2938 seq = gen_sequence ();
2939 end_sequence ();
2940 emit_insn_before (seq, insn);
2941
2942 start_sequence ();
2943 store_bit_field (sub, size_x, 0, GET_MODE (x),
2944 val, GET_MODE_SIZE (GET_MODE (sub)),
2945 GET_MODE_SIZE (GET_MODE (sub)));
2946
2947 /* Make sure to unshare any shared rtl that store_bit_field
2948 might have created. */
2949 for (p = get_insns(); p; p = NEXT_INSN (p))
2950 {
2951 reset_used_flags (PATTERN (p));
2952 reset_used_flags (REG_NOTES (p));
2953 reset_used_flags (LOG_LINKS (p));
2954 }
2955 unshare_all_rtl (get_insns ());
2956
2957 seq = gen_sequence ();
2958 end_sequence ();
2959 emit_insn_after (seq, insn);
2960 }
2961 else
2962 {
2963 start_sequence ();
2964 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
2965 GET_MODE (x), GET_MODE (x),
2966 GET_MODE_SIZE (GET_MODE (sub)),
2967 GET_MODE_SIZE (GET_MODE (sub)));
2968
2969 if (! validate_change (insn, loc, val, 0))
2970 {
2971 /* Discard the current sequence and put the
2972 ADDRESSOF on stack. */
2973 end_sequence ();
2974 goto give_up;
2975 }
2976
2977 seq = gen_sequence ();
2978 end_sequence ();
2979 emit_insn_before (seq, insn);
2980 }
2981
2982 /* Remember the replacement so that the same one can be done
2983 on the REG_NOTES. */
2984 purge_addressof_replacements
2985 = gen_rtx_EXPR_LIST (VOIDmode, x,
2986 gen_rtx_EXPR_LIST (VOIDmode, val,
2987 purge_addressof_replacements));
2988
2989 /* We replaced with a reg -- all done. */
2990 return;
2991 }
2992 }
2993 else if (validate_change (insn, loc, sub, 0))
2994 goto restart;
2995 give_up:;
2996 /* else give up and put it into the stack */
2997 }
2998 else if (code == ADDRESSOF)
2999 {
3000 put_addressof_into_stack (x);
3001 return;
3002 }
3003 else if (code == SET)
3004 {
3005 purge_addressof_1 (&SET_DEST (x), insn, force, 1);
3006 purge_addressof_1 (&SET_SRC (x), insn, force, 0);
3007 return;
3008 }
3009
3010 /* Scan all subexpressions. */
3011 fmt = GET_RTX_FORMAT (code);
3012 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3013 {
3014 if (*fmt == 'e')
3015 purge_addressof_1 (&XEXP (x, i), insn, force, 0);
3016 else if (*fmt == 'E')
3017 for (j = 0; j < XVECLEN (x, i); j++)
3018 purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0);
3019 }
3020 }
3021
3022 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3023 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3024 stack. */
3025
3026 void
3027 purge_addressof (insns)
3028 rtx insns;
3029 {
3030 rtx insn;
3031 for (insn = insns; insn; insn = NEXT_INSN (insn))
3032 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3033 || GET_CODE (insn) == CALL_INSN)
3034 {
3035 purge_addressof_1 (&PATTERN (insn), insn,
3036 asm_noperands (PATTERN (insn)) > 0, 0);
3037 purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0);
3038 }
3039 purge_addressof_replacements = 0;
3040 }
3041 \f
3042 /* Pass through the INSNS of function FNDECL and convert virtual register
3043 references to hard register references. */
3044
3045 void
3046 instantiate_virtual_regs (fndecl, insns)
3047 tree fndecl;
3048 rtx insns;
3049 {
3050 rtx insn;
3051 int i;
3052
3053 /* Compute the offsets to use for this function. */
3054 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3055 var_offset = STARTING_FRAME_OFFSET;
3056 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3057 out_arg_offset = STACK_POINTER_OFFSET;
3058 cfa_offset = ARG_POINTER_CFA_OFFSET;
3059
3060 /* Scan all variables and parameters of this function. For each that is
3061 in memory, instantiate all virtual registers if the result is a valid
3062 address. If not, we do it later. That will handle most uses of virtual
3063 regs on many machines. */
3064 instantiate_decls (fndecl, 1);
3065
3066 /* Initialize recognition, indicating that volatile is OK. */
3067 init_recog ();
3068
3069 /* Scan through all the insns, instantiating every virtual register still
3070 present. */
3071 for (insn = insns; insn; insn = NEXT_INSN (insn))
3072 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3073 || GET_CODE (insn) == CALL_INSN)
3074 {
3075 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3076 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3077 }
3078
3079 /* Instantiate the stack slots for the parm registers, for later use in
3080 addressof elimination. */
3081 for (i = 0; i < max_parm_reg; ++i)
3082 if (parm_reg_stack_loc[i])
3083 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3084
3085 /* Now instantiate the remaining register equivalences for debugging info.
3086 These will not be valid addresses. */
3087 instantiate_decls (fndecl, 0);
3088
3089 /* Indicate that, from now on, assign_stack_local should use
3090 frame_pointer_rtx. */
3091 virtuals_instantiated = 1;
3092 }
3093
3094 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3095 all virtual registers in their DECL_RTL's.
3096
3097 If VALID_ONLY, do this only if the resulting address is still valid.
3098 Otherwise, always do it. */
3099
3100 static void
3101 instantiate_decls (fndecl, valid_only)
3102 tree fndecl;
3103 int valid_only;
3104 {
3105 tree decl;
3106
3107 if (DECL_SAVED_INSNS (fndecl))
3108 /* When compiling an inline function, the obstack used for
3109 rtl allocation is the maybepermanent_obstack. Calling
3110 `resume_temporary_allocation' switches us back to that
3111 obstack while we process this function's parameters. */
3112 resume_temporary_allocation ();
3113
3114 /* Process all parameters of the function. */
3115 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3116 {
3117 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3118
3119 instantiate_decl (DECL_RTL (decl), size, valid_only);
3120
3121 /* If the parameter was promoted, then the incoming RTL mode may be
3122 larger than the declared type size. We must use the larger of
3123 the two sizes. */
3124 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3125 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3126 }
3127
3128 /* Now process all variables defined in the function or its subblocks. */
3129 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3130
3131 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
3132 {
3133 /* Save all rtl allocated for this function by raising the
3134 high-water mark on the maybepermanent_obstack. */
3135 preserve_data ();
3136 /* All further rtl allocation is now done in the current_obstack. */
3137 rtl_in_current_obstack ();
3138 }
3139 }
3140
3141 /* Subroutine of instantiate_decls: Process all decls in the given
3142 BLOCK node and all its subblocks. */
3143
3144 static void
3145 instantiate_decls_1 (let, valid_only)
3146 tree let;
3147 int valid_only;
3148 {
3149 tree t;
3150
3151 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3152 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3153 valid_only);
3154
3155 /* Process all subblocks. */
3156 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3157 instantiate_decls_1 (t, valid_only);
3158 }
3159
3160 /* Subroutine of the preceding procedures: Given RTL representing a
3161 decl and the size of the object, do any instantiation required.
3162
3163 If VALID_ONLY is non-zero, it means that the RTL should only be
3164 changed if the new address is valid. */
3165
3166 static void
3167 instantiate_decl (x, size, valid_only)
3168 rtx x;
3169 int size;
3170 int valid_only;
3171 {
3172 enum machine_mode mode;
3173 rtx addr;
3174
3175 /* If this is not a MEM, no need to do anything. Similarly if the
3176 address is a constant or a register that is not a virtual register. */
3177
3178 if (x == 0 || GET_CODE (x) != MEM)
3179 return;
3180
3181 addr = XEXP (x, 0);
3182 if (CONSTANT_P (addr)
3183 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3184 || (GET_CODE (addr) == REG
3185 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3186 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3187 return;
3188
3189 /* If we should only do this if the address is valid, copy the address.
3190 We need to do this so we can undo any changes that might make the
3191 address invalid. This copy is unfortunate, but probably can't be
3192 avoided. */
3193
3194 if (valid_only)
3195 addr = copy_rtx (addr);
3196
3197 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3198
3199 if (valid_only)
3200 {
3201 /* Now verify that the resulting address is valid for every integer or
3202 floating-point mode up to and including SIZE bytes long. We do this
3203 since the object might be accessed in any mode and frame addresses
3204 are shared. */
3205
3206 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3207 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3208 mode = GET_MODE_WIDER_MODE (mode))
3209 if (! memory_address_p (mode, addr))
3210 return;
3211
3212 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3213 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3214 mode = GET_MODE_WIDER_MODE (mode))
3215 if (! memory_address_p (mode, addr))
3216 return;
3217 }
3218
3219 /* Put back the address now that we have updated it and we either know
3220 it is valid or we don't care whether it is valid. */
3221
3222 XEXP (x, 0) = addr;
3223 }
3224 \f
3225 /* Given a pointer to a piece of rtx and an optional pointer to the
3226 containing object, instantiate any virtual registers present in it.
3227
3228 If EXTRA_INSNS, we always do the replacement and generate
3229 any extra insns before OBJECT. If it zero, we do nothing if replacement
3230 is not valid.
3231
3232 Return 1 if we either had nothing to do or if we were able to do the
3233 needed replacement. Return 0 otherwise; we only return zero if
3234 EXTRA_INSNS is zero.
3235
3236 We first try some simple transformations to avoid the creation of extra
3237 pseudos. */
3238
3239 static int
3240 instantiate_virtual_regs_1 (loc, object, extra_insns)
3241 rtx *loc;
3242 rtx object;
3243 int extra_insns;
3244 {
3245 rtx x;
3246 RTX_CODE code;
3247 rtx new = 0;
3248 HOST_WIDE_INT offset;
3249 rtx temp;
3250 rtx seq;
3251 int i, j;
3252 char *fmt;
3253
3254 /* Re-start here to avoid recursion in common cases. */
3255 restart:
3256
3257 x = *loc;
3258 if (x == 0)
3259 return 1;
3260
3261 code = GET_CODE (x);
3262
3263 /* Check for some special cases. */
3264 switch (code)
3265 {
3266 case CONST_INT:
3267 case CONST_DOUBLE:
3268 case CONST:
3269 case SYMBOL_REF:
3270 case CODE_LABEL:
3271 case PC:
3272 case CC0:
3273 case ASM_INPUT:
3274 case ADDR_VEC:
3275 case ADDR_DIFF_VEC:
3276 case RETURN:
3277 return 1;
3278
3279 case SET:
3280 /* We are allowed to set the virtual registers. This means that
3281 the actual register should receive the source minus the
3282 appropriate offset. This is used, for example, in the handling
3283 of non-local gotos. */
3284 if (SET_DEST (x) == virtual_incoming_args_rtx)
3285 new = arg_pointer_rtx, offset = - in_arg_offset;
3286 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3287 new = frame_pointer_rtx, offset = - var_offset;
3288 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3289 new = stack_pointer_rtx, offset = - dynamic_offset;
3290 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3291 new = stack_pointer_rtx, offset = - out_arg_offset;
3292 else if (SET_DEST (x) == virtual_cfa_rtx)
3293 new = arg_pointer_rtx, offset = - cfa_offset;
3294
3295 if (new)
3296 {
3297 /* The only valid sources here are PLUS or REG. Just do
3298 the simplest possible thing to handle them. */
3299 if (GET_CODE (SET_SRC (x)) != REG
3300 && GET_CODE (SET_SRC (x)) != PLUS)
3301 abort ();
3302
3303 start_sequence ();
3304 if (GET_CODE (SET_SRC (x)) != REG)
3305 temp = force_operand (SET_SRC (x), NULL_RTX);
3306 else
3307 temp = SET_SRC (x);
3308 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3309 seq = get_insns ();
3310 end_sequence ();
3311
3312 emit_insns_before (seq, object);
3313 SET_DEST (x) = new;
3314
3315 if (! validate_change (object, &SET_SRC (x), temp, 0)
3316 || ! extra_insns)
3317 abort ();
3318
3319 return 1;
3320 }
3321
3322 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3323 loc = &SET_SRC (x);
3324 goto restart;
3325
3326 case PLUS:
3327 /* Handle special case of virtual register plus constant. */
3328 if (CONSTANT_P (XEXP (x, 1)))
3329 {
3330 rtx old, new_offset;
3331
3332 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3333 if (GET_CODE (XEXP (x, 0)) == PLUS)
3334 {
3335 rtx inner = XEXP (XEXP (x, 0), 0);
3336
3337 if (inner == virtual_incoming_args_rtx)
3338 new = arg_pointer_rtx, offset = in_arg_offset;
3339 else if (inner == virtual_stack_vars_rtx)
3340 new = frame_pointer_rtx, offset = var_offset;
3341 else if (inner == virtual_stack_dynamic_rtx)
3342 new = stack_pointer_rtx, offset = dynamic_offset;
3343 else if (inner == virtual_outgoing_args_rtx)
3344 new = stack_pointer_rtx, offset = out_arg_offset;
3345 else if (inner == virtual_cfa_rtx)
3346 new = arg_pointer_rtx, offset = cfa_offset;
3347 else
3348 {
3349 loc = &XEXP (x, 0);
3350 goto restart;
3351 }
3352
3353 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3354 extra_insns);
3355 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3356 }
3357
3358 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3359 new = arg_pointer_rtx, offset = in_arg_offset;
3360 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3361 new = frame_pointer_rtx, offset = var_offset;
3362 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3363 new = stack_pointer_rtx, offset = dynamic_offset;
3364 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3365 new = stack_pointer_rtx, offset = out_arg_offset;
3366 else if (XEXP (x, 0) == virtual_cfa_rtx)
3367 new = arg_pointer_rtx, offset = cfa_offset;
3368 else
3369 {
3370 /* We know the second operand is a constant. Unless the
3371 first operand is a REG (which has been already checked),
3372 it needs to be checked. */
3373 if (GET_CODE (XEXP (x, 0)) != REG)
3374 {
3375 loc = &XEXP (x, 0);
3376 goto restart;
3377 }
3378 return 1;
3379 }
3380
3381 new_offset = plus_constant (XEXP (x, 1), offset);
3382
3383 /* If the new constant is zero, try to replace the sum with just
3384 the register. */
3385 if (new_offset == const0_rtx
3386 && validate_change (object, loc, new, 0))
3387 return 1;
3388
3389 /* Next try to replace the register and new offset.
3390 There are two changes to validate here and we can't assume that
3391 in the case of old offset equals new just changing the register
3392 will yield a valid insn. In the interests of a little efficiency,
3393 however, we only call validate change once (we don't queue up the
3394 changes and then call apply_change_group). */
3395
3396 old = XEXP (x, 0);
3397 if (offset == 0
3398 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3399 : (XEXP (x, 0) = new,
3400 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3401 {
3402 if (! extra_insns)
3403 {
3404 XEXP (x, 0) = old;
3405 return 0;
3406 }
3407
3408 /* Otherwise copy the new constant into a register and replace
3409 constant with that register. */
3410 temp = gen_reg_rtx (Pmode);
3411 XEXP (x, 0) = new;
3412 if (validate_change (object, &XEXP (x, 1), temp, 0))
3413 emit_insn_before (gen_move_insn (temp, new_offset), object);
3414 else
3415 {
3416 /* If that didn't work, replace this expression with a
3417 register containing the sum. */
3418
3419 XEXP (x, 0) = old;
3420 new = gen_rtx_PLUS (Pmode, new, new_offset);
3421
3422 start_sequence ();
3423 temp = force_operand (new, NULL_RTX);
3424 seq = get_insns ();
3425 end_sequence ();
3426
3427 emit_insns_before (seq, object);
3428 if (! validate_change (object, loc, temp, 0)
3429 && ! validate_replace_rtx (x, temp, object))
3430 abort ();
3431 }
3432 }
3433
3434 return 1;
3435 }
3436
3437 /* Fall through to generic two-operand expression case. */
3438 case EXPR_LIST:
3439 case CALL:
3440 case COMPARE:
3441 case MINUS:
3442 case MULT:
3443 case DIV: case UDIV:
3444 case MOD: case UMOD:
3445 case AND: case IOR: case XOR:
3446 case ROTATERT: case ROTATE:
3447 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3448 case NE: case EQ:
3449 case GE: case GT: case GEU: case GTU:
3450 case LE: case LT: case LEU: case LTU:
3451 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3452 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3453 loc = &XEXP (x, 0);
3454 goto restart;
3455
3456 case MEM:
3457 /* Most cases of MEM that convert to valid addresses have already been
3458 handled by our scan of decls. The only special handling we
3459 need here is to make a copy of the rtx to ensure it isn't being
3460 shared if we have to change it to a pseudo.
3461
3462 If the rtx is a simple reference to an address via a virtual register,
3463 it can potentially be shared. In such cases, first try to make it
3464 a valid address, which can also be shared. Otherwise, copy it and
3465 proceed normally.
3466
3467 First check for common cases that need no processing. These are
3468 usually due to instantiation already being done on a previous instance
3469 of a shared rtx. */
3470
3471 temp = XEXP (x, 0);
3472 if (CONSTANT_ADDRESS_P (temp)
3473 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3474 || temp == arg_pointer_rtx
3475 #endif
3476 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3477 || temp == hard_frame_pointer_rtx
3478 #endif
3479 || temp == frame_pointer_rtx)
3480 return 1;
3481
3482 if (GET_CODE (temp) == PLUS
3483 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3484 && (XEXP (temp, 0) == frame_pointer_rtx
3485 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3486 || XEXP (temp, 0) == hard_frame_pointer_rtx
3487 #endif
3488 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3489 || XEXP (temp, 0) == arg_pointer_rtx
3490 #endif
3491 ))
3492 return 1;
3493
3494 if (temp == virtual_stack_vars_rtx
3495 || temp == virtual_incoming_args_rtx
3496 || (GET_CODE (temp) == PLUS
3497 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3498 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3499 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3500 {
3501 /* This MEM may be shared. If the substitution can be done without
3502 the need to generate new pseudos, we want to do it in place
3503 so all copies of the shared rtx benefit. The call below will
3504 only make substitutions if the resulting address is still
3505 valid.
3506
3507 Note that we cannot pass X as the object in the recursive call
3508 since the insn being processed may not allow all valid
3509 addresses. However, if we were not passed on object, we can
3510 only modify X without copying it if X will have a valid
3511 address.
3512
3513 ??? Also note that this can still lose if OBJECT is an insn that
3514 has less restrictions on an address that some other insn.
3515 In that case, we will modify the shared address. This case
3516 doesn't seem very likely, though. One case where this could
3517 happen is in the case of a USE or CLOBBER reference, but we
3518 take care of that below. */
3519
3520 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3521 object ? object : x, 0))
3522 return 1;
3523
3524 /* Otherwise make a copy and process that copy. We copy the entire
3525 RTL expression since it might be a PLUS which could also be
3526 shared. */
3527 *loc = x = copy_rtx (x);
3528 }
3529
3530 /* Fall through to generic unary operation case. */
3531 case SUBREG:
3532 case STRICT_LOW_PART:
3533 case NEG: case NOT:
3534 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3535 case SIGN_EXTEND: case ZERO_EXTEND:
3536 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3537 case FLOAT: case FIX:
3538 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3539 case ABS:
3540 case SQRT:
3541 case FFS:
3542 /* These case either have just one operand or we know that we need not
3543 check the rest of the operands. */
3544 loc = &XEXP (x, 0);
3545 goto restart;
3546
3547 case USE:
3548 case CLOBBER:
3549 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3550 go ahead and make the invalid one, but do it to a copy. For a REG,
3551 just make the recursive call, since there's no chance of a problem. */
3552
3553 if ((GET_CODE (XEXP (x, 0)) == MEM
3554 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3555 0))
3556 || (GET_CODE (XEXP (x, 0)) == REG
3557 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3558 return 1;
3559
3560 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3561 loc = &XEXP (x, 0);
3562 goto restart;
3563
3564 case REG:
3565 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3566 in front of this insn and substitute the temporary. */
3567 if (x == virtual_incoming_args_rtx)
3568 new = arg_pointer_rtx, offset = in_arg_offset;
3569 else if (x == virtual_stack_vars_rtx)
3570 new = frame_pointer_rtx, offset = var_offset;
3571 else if (x == virtual_stack_dynamic_rtx)
3572 new = stack_pointer_rtx, offset = dynamic_offset;
3573 else if (x == virtual_outgoing_args_rtx)
3574 new = stack_pointer_rtx, offset = out_arg_offset;
3575 else if (x == virtual_cfa_rtx)
3576 new = arg_pointer_rtx, offset = cfa_offset;
3577
3578 if (new)
3579 {
3580 temp = plus_constant (new, offset);
3581 if (!validate_change (object, loc, temp, 0))
3582 {
3583 if (! extra_insns)
3584 return 0;
3585
3586 start_sequence ();
3587 temp = force_operand (temp, NULL_RTX);
3588 seq = get_insns ();
3589 end_sequence ();
3590
3591 emit_insns_before (seq, object);
3592 if (! validate_change (object, loc, temp, 0)
3593 && ! validate_replace_rtx (x, temp, object))
3594 abort ();
3595 }
3596 }
3597
3598 return 1;
3599
3600 case ADDRESSOF:
3601 if (GET_CODE (XEXP (x, 0)) == REG)
3602 return 1;
3603
3604 else if (GET_CODE (XEXP (x, 0)) == MEM)
3605 {
3606 /* If we have a (addressof (mem ..)), do any instantiation inside
3607 since we know we'll be making the inside valid when we finally
3608 remove the ADDRESSOF. */
3609 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3610 return 1;
3611 }
3612 break;
3613
3614 default:
3615 break;
3616 }
3617
3618 /* Scan all subexpressions. */
3619 fmt = GET_RTX_FORMAT (code);
3620 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3621 if (*fmt == 'e')
3622 {
3623 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3624 return 0;
3625 }
3626 else if (*fmt == 'E')
3627 for (j = 0; j < XVECLEN (x, i); j++)
3628 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3629 extra_insns))
3630 return 0;
3631
3632 return 1;
3633 }
3634 \f
3635 /* Optimization: assuming this function does not receive nonlocal gotos,
3636 delete the handlers for such, as well as the insns to establish
3637 and disestablish them. */
3638
3639 static void
3640 delete_handlers ()
3641 {
3642 rtx insn;
3643 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3644 {
3645 /* Delete the handler by turning off the flag that would
3646 prevent jump_optimize from deleting it.
3647 Also permit deletion of the nonlocal labels themselves
3648 if nothing local refers to them. */
3649 if (GET_CODE (insn) == CODE_LABEL)
3650 {
3651 tree t, last_t;
3652
3653 LABEL_PRESERVE_P (insn) = 0;
3654
3655 /* Remove it from the nonlocal_label list, to avoid confusing
3656 flow. */
3657 for (t = nonlocal_labels, last_t = 0; t;
3658 last_t = t, t = TREE_CHAIN (t))
3659 if (DECL_RTL (TREE_VALUE (t)) == insn)
3660 break;
3661 if (t)
3662 {
3663 if (! last_t)
3664 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3665 else
3666 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3667 }
3668 }
3669 if (GET_CODE (insn) == INSN)
3670 {
3671 int can_delete = 0;
3672 rtx t;
3673 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
3674 if (reg_mentioned_p (t, PATTERN (insn)))
3675 {
3676 can_delete = 1;
3677 break;
3678 }
3679 if (can_delete
3680 || (nonlocal_goto_stack_level != 0
3681 && reg_mentioned_p (nonlocal_goto_stack_level,
3682 PATTERN (insn))))
3683 delete_insn (insn);
3684 }
3685 }
3686 }
3687
3688 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3689 of the current function. */
3690
3691 rtx
3692 nonlocal_label_rtx_list ()
3693 {
3694 tree t;
3695 rtx x = 0;
3696
3697 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
3698 x = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (TREE_VALUE (t)), x);
3699
3700 return x;
3701 }
3702 \f
3703 /* Output a USE for any register use in RTL.
3704 This is used with -noreg to mark the extent of lifespan
3705 of any registers used in a user-visible variable's DECL_RTL. */
3706
3707 void
3708 use_variable (rtl)
3709 rtx rtl;
3710 {
3711 if (GET_CODE (rtl) == REG)
3712 /* This is a register variable. */
3713 emit_insn (gen_rtx_USE (VOIDmode, rtl));
3714 else if (GET_CODE (rtl) == MEM
3715 && GET_CODE (XEXP (rtl, 0)) == REG
3716 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3717 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3718 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3719 /* This is a variable-sized structure. */
3720 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
3721 }
3722
3723 /* Like use_variable except that it outputs the USEs after INSN
3724 instead of at the end of the insn-chain. */
3725
3726 void
3727 use_variable_after (rtl, insn)
3728 rtx rtl, insn;
3729 {
3730 if (GET_CODE (rtl) == REG)
3731 /* This is a register variable. */
3732 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
3733 else if (GET_CODE (rtl) == MEM
3734 && GET_CODE (XEXP (rtl, 0)) == REG
3735 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3736 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3737 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3738 /* This is a variable-sized structure. */
3739 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
3740 }
3741 \f
3742 int
3743 max_parm_reg_num ()
3744 {
3745 return max_parm_reg;
3746 }
3747
3748 /* Return the first insn following those generated by `assign_parms'. */
3749
3750 rtx
3751 get_first_nonparm_insn ()
3752 {
3753 if (last_parm_insn)
3754 return NEXT_INSN (last_parm_insn);
3755 return get_insns ();
3756 }
3757
3758 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3759 Crash if there is none. */
3760
3761 rtx
3762 get_first_block_beg ()
3763 {
3764 register rtx searcher;
3765 register rtx insn = get_first_nonparm_insn ();
3766
3767 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3768 if (GET_CODE (searcher) == NOTE
3769 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3770 return searcher;
3771
3772 abort (); /* Invalid call to this function. (See comments above.) */
3773 return NULL_RTX;
3774 }
3775
3776 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3777 This means a type for which function calls must pass an address to the
3778 function or get an address back from the function.
3779 EXP may be a type node or an expression (whose type is tested). */
3780
3781 int
3782 aggregate_value_p (exp)
3783 tree exp;
3784 {
3785 int i, regno, nregs;
3786 rtx reg;
3787 tree type;
3788 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3789 type = exp;
3790 else
3791 type = TREE_TYPE (exp);
3792
3793 if (RETURN_IN_MEMORY (type))
3794 return 1;
3795 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3796 and thus can't be returned in registers. */
3797 if (TREE_ADDRESSABLE (type))
3798 return 1;
3799 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3800 return 1;
3801 /* Make sure we have suitable call-clobbered regs to return
3802 the value in; if not, we must return it in memory. */
3803 reg = hard_function_value (type, 0);
3804
3805 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3806 it is OK. */
3807 if (GET_CODE (reg) != REG)
3808 return 0;
3809
3810 regno = REGNO (reg);
3811 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3812 for (i = 0; i < nregs; i++)
3813 if (! call_used_regs[regno + i])
3814 return 1;
3815 return 0;
3816 }
3817 \f
3818 /* Assign RTL expressions to the function's parameters.
3819 This may involve copying them into registers and using
3820 those registers as the RTL for them.
3821
3822 If SECOND_TIME is non-zero it means that this function is being
3823 called a second time. This is done by integrate.c when a function's
3824 compilation is deferred. We need to come back here in case the
3825 FUNCTION_ARG macro computes items needed for the rest of the compilation
3826 (such as changing which registers are fixed or caller-saved). But suppress
3827 writing any insns or setting DECL_RTL of anything in this case. */
3828
3829 void
3830 assign_parms (fndecl, second_time)
3831 tree fndecl;
3832 int second_time;
3833 {
3834 register tree parm;
3835 register rtx entry_parm = 0;
3836 register rtx stack_parm = 0;
3837 CUMULATIVE_ARGS args_so_far;
3838 enum machine_mode promoted_mode, passed_mode;
3839 enum machine_mode nominal_mode, promoted_nominal_mode;
3840 int unsignedp;
3841 /* Total space needed so far for args on the stack,
3842 given as a constant and a tree-expression. */
3843 struct args_size stack_args_size;
3844 tree fntype = TREE_TYPE (fndecl);
3845 tree fnargs = DECL_ARGUMENTS (fndecl);
3846 /* This is used for the arg pointer when referring to stack args. */
3847 rtx internal_arg_pointer;
3848 /* This is a dummy PARM_DECL that we used for the function result if
3849 the function returns a structure. */
3850 tree function_result_decl = 0;
3851 int varargs_setup = 0;
3852 rtx conversion_insns = 0;
3853
3854 /* Nonzero if the last arg is named `__builtin_va_alist',
3855 which is used on some machines for old-fashioned non-ANSI varargs.h;
3856 this should be stuck onto the stack as if it had arrived there. */
3857 int hide_last_arg
3858 = (current_function_varargs
3859 && fnargs
3860 && (parm = tree_last (fnargs)) != 0
3861 && DECL_NAME (parm)
3862 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3863 "__builtin_va_alist")));
3864
3865 /* Nonzero if function takes extra anonymous args.
3866 This means the last named arg must be on the stack
3867 right before the anonymous ones. */
3868 int stdarg
3869 = (TYPE_ARG_TYPES (fntype) != 0
3870 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3871 != void_type_node));
3872
3873 current_function_stdarg = stdarg;
3874
3875 /* If the reg that the virtual arg pointer will be translated into is
3876 not a fixed reg or is the stack pointer, make a copy of the virtual
3877 arg pointer, and address parms via the copy. The frame pointer is
3878 considered fixed even though it is not marked as such.
3879
3880 The second time through, simply use ap to avoid generating rtx. */
3881
3882 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3883 || ! (fixed_regs[ARG_POINTER_REGNUM]
3884 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3885 && ! second_time)
3886 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3887 else
3888 internal_arg_pointer = virtual_incoming_args_rtx;
3889 current_function_internal_arg_pointer = internal_arg_pointer;
3890
3891 stack_args_size.constant = 0;
3892 stack_args_size.var = 0;
3893
3894 /* If struct value address is treated as the first argument, make it so. */
3895 if (aggregate_value_p (DECL_RESULT (fndecl))
3896 && ! current_function_returns_pcc_struct
3897 && struct_value_incoming_rtx == 0)
3898 {
3899 tree type = build_pointer_type (TREE_TYPE (fntype));
3900
3901 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
3902
3903 DECL_ARG_TYPE (function_result_decl) = type;
3904 TREE_CHAIN (function_result_decl) = fnargs;
3905 fnargs = function_result_decl;
3906 }
3907
3908 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3909 parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
3910 bzero ((char *) parm_reg_stack_loc, max_parm_reg * sizeof (rtx));
3911
3912 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3913 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
3914 #else
3915 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
3916 #endif
3917
3918 /* We haven't yet found an argument that we must push and pretend the
3919 caller did. */
3920 current_function_pretend_args_size = 0;
3921
3922 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3923 {
3924 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
3925 struct args_size stack_offset;
3926 struct args_size arg_size;
3927 int passed_pointer = 0;
3928 int did_conversion = 0;
3929 tree passed_type = DECL_ARG_TYPE (parm);
3930 tree nominal_type = TREE_TYPE (parm);
3931
3932 /* Set LAST_NAMED if this is last named arg before some
3933 anonymous args. */
3934 int last_named = ((TREE_CHAIN (parm) == 0
3935 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3936 && (stdarg || current_function_varargs));
3937 /* Set NAMED_ARG if this arg should be treated as a named arg. For
3938 most machines, if this is a varargs/stdarg function, then we treat
3939 the last named arg as if it were anonymous too. */
3940 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
3941
3942 if (TREE_TYPE (parm) == error_mark_node
3943 /* This can happen after weird syntax errors
3944 or if an enum type is defined among the parms. */
3945 || TREE_CODE (parm) != PARM_DECL
3946 || passed_type == NULL)
3947 {
3948 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
3949 = gen_rtx_MEM (BLKmode, const0_rtx);
3950 TREE_USED (parm) = 1;
3951 continue;
3952 }
3953
3954 /* For varargs.h function, save info about regs and stack space
3955 used by the individual args, not including the va_alist arg. */
3956 if (hide_last_arg && last_named)
3957 current_function_args_info = args_so_far;
3958
3959 /* Find mode of arg as it is passed, and mode of arg
3960 as it should be during execution of this function. */
3961 passed_mode = TYPE_MODE (passed_type);
3962 nominal_mode = TYPE_MODE (nominal_type);
3963
3964 /* If the parm's mode is VOID, its value doesn't matter,
3965 and avoid the usual things like emit_move_insn that could crash. */
3966 if (nominal_mode == VOIDmode)
3967 {
3968 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
3969 continue;
3970 }
3971
3972 /* If the parm is to be passed as a transparent union, use the
3973 type of the first field for the tests below. We have already
3974 verified that the modes are the same. */
3975 if (DECL_TRANSPARENT_UNION (parm)
3976 || TYPE_TRANSPARENT_UNION (passed_type))
3977 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
3978
3979 /* See if this arg was passed by invisible reference. It is if
3980 it is an object whose size depends on the contents of the
3981 object itself or if the machine requires these objects be passed
3982 that way. */
3983
3984 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
3985 && contains_placeholder_p (TYPE_SIZE (passed_type)))
3986 || TREE_ADDRESSABLE (passed_type)
3987 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3988 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
3989 passed_type, named_arg)
3990 #endif
3991 )
3992 {
3993 passed_type = nominal_type = build_pointer_type (passed_type);
3994 passed_pointer = 1;
3995 passed_mode = nominal_mode = Pmode;
3996 }
3997
3998 promoted_mode = passed_mode;
3999
4000 #ifdef PROMOTE_FUNCTION_ARGS
4001 /* Compute the mode in which the arg is actually extended to. */
4002 unsignedp = TREE_UNSIGNED (passed_type);
4003 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4004 #endif
4005
4006 /* Let machine desc say which reg (if any) the parm arrives in.
4007 0 means it arrives on the stack. */
4008 #ifdef FUNCTION_INCOMING_ARG
4009 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4010 passed_type, named_arg);
4011 #else
4012 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4013 passed_type, named_arg);
4014 #endif
4015
4016 if (entry_parm == 0)
4017 promoted_mode = passed_mode;
4018
4019 #ifdef SETUP_INCOMING_VARARGS
4020 /* If this is the last named parameter, do any required setup for
4021 varargs or stdargs. We need to know about the case of this being an
4022 addressable type, in which case we skip the registers it
4023 would have arrived in.
4024
4025 For stdargs, LAST_NAMED will be set for two parameters, the one that
4026 is actually the last named, and the dummy parameter. We only
4027 want to do this action once.
4028
4029 Also, indicate when RTL generation is to be suppressed. */
4030 if (last_named && !varargs_setup)
4031 {
4032 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4033 current_function_pretend_args_size,
4034 second_time);
4035 varargs_setup = 1;
4036 }
4037 #endif
4038
4039 /* Determine parm's home in the stack,
4040 in case it arrives in the stack or we should pretend it did.
4041
4042 Compute the stack position and rtx where the argument arrives
4043 and its size.
4044
4045 There is one complexity here: If this was a parameter that would
4046 have been passed in registers, but wasn't only because it is
4047 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4048 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4049 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4050 0 as it was the previous time. */
4051
4052 locate_and_pad_parm (promoted_mode, passed_type,
4053 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4054 1,
4055 #else
4056 #ifdef FUNCTION_INCOMING_ARG
4057 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4058 passed_type,
4059 (named_arg
4060 || varargs_setup)) != 0,
4061 #else
4062 FUNCTION_ARG (args_so_far, promoted_mode,
4063 passed_type,
4064 named_arg || varargs_setup) != 0,
4065 #endif
4066 #endif
4067 fndecl, &stack_args_size, &stack_offset, &arg_size);
4068
4069 if (! second_time)
4070 {
4071 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4072
4073 if (offset_rtx == const0_rtx)
4074 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4075 else
4076 stack_parm = gen_rtx_MEM (promoted_mode,
4077 gen_rtx_PLUS (Pmode,
4078 internal_arg_pointer,
4079 offset_rtx));
4080
4081 /* If this is a memory ref that contains aggregate components,
4082 mark it as such for cse and loop optimize. Likewise if it
4083 is readonly. */
4084 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4085 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
4086 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
4087 }
4088
4089 /* If this parameter was passed both in registers and in the stack,
4090 use the copy on the stack. */
4091 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4092 entry_parm = 0;
4093
4094 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4095 /* If this parm was passed part in regs and part in memory,
4096 pretend it arrived entirely in memory
4097 by pushing the register-part onto the stack.
4098
4099 In the special case of a DImode or DFmode that is split,
4100 we could put it together in a pseudoreg directly,
4101 but for now that's not worth bothering with. */
4102
4103 if (entry_parm)
4104 {
4105 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4106 passed_type, named_arg);
4107
4108 if (nregs > 0)
4109 {
4110 current_function_pretend_args_size
4111 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4112 / (PARM_BOUNDARY / BITS_PER_UNIT)
4113 * (PARM_BOUNDARY / BITS_PER_UNIT));
4114
4115 if (! second_time)
4116 {
4117 /* Handle calls that pass values in multiple non-contiguous
4118 locations. The Irix 6 ABI has examples of this. */
4119 if (GET_CODE (entry_parm) == PARALLEL)
4120 emit_group_store (validize_mem (stack_parm), entry_parm,
4121 int_size_in_bytes (TREE_TYPE (parm)),
4122 (TYPE_ALIGN (TREE_TYPE (parm))
4123 / BITS_PER_UNIT));
4124 else
4125 move_block_from_reg (REGNO (entry_parm),
4126 validize_mem (stack_parm), nregs,
4127 int_size_in_bytes (TREE_TYPE (parm)));
4128 }
4129 entry_parm = stack_parm;
4130 }
4131 }
4132 #endif
4133
4134 /* If we didn't decide this parm came in a register,
4135 by default it came on the stack. */
4136 if (entry_parm == 0)
4137 entry_parm = stack_parm;
4138
4139 /* Record permanently how this parm was passed. */
4140 if (! second_time)
4141 DECL_INCOMING_RTL (parm) = entry_parm;
4142
4143 /* If there is actually space on the stack for this parm,
4144 count it in stack_args_size; otherwise set stack_parm to 0
4145 to indicate there is no preallocated stack slot for the parm. */
4146
4147 if (entry_parm == stack_parm
4148 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4149 /* On some machines, even if a parm value arrives in a register
4150 there is still an (uninitialized) stack slot allocated for it.
4151
4152 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4153 whether this parameter already has a stack slot allocated,
4154 because an arg block exists only if current_function_args_size
4155 is larger than some threshold, and we haven't calculated that
4156 yet. So, for now, we just assume that stack slots never exist
4157 in this case. */
4158 || REG_PARM_STACK_SPACE (fndecl) > 0
4159 #endif
4160 )
4161 {
4162 stack_args_size.constant += arg_size.constant;
4163 if (arg_size.var)
4164 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4165 }
4166 else
4167 /* No stack slot was pushed for this parm. */
4168 stack_parm = 0;
4169
4170 /* Update info on where next arg arrives in registers. */
4171
4172 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4173 passed_type, named_arg);
4174
4175 /* If this is our second time through, we are done with this parm. */
4176 if (second_time)
4177 continue;
4178
4179 /* If we can't trust the parm stack slot to be aligned enough
4180 for its ultimate type, don't use that slot after entry.
4181 We'll make another stack slot, if we need one. */
4182 {
4183 int thisparm_boundary
4184 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4185
4186 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4187 stack_parm = 0;
4188 }
4189
4190 /* If parm was passed in memory, and we need to convert it on entry,
4191 don't store it back in that same slot. */
4192 if (entry_parm != 0
4193 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4194 stack_parm = 0;
4195
4196 #if 0
4197 /* Now adjust STACK_PARM to the mode and precise location
4198 where this parameter should live during execution,
4199 if we discover that it must live in the stack during execution.
4200 To make debuggers happier on big-endian machines, we store
4201 the value in the last bytes of the space available. */
4202
4203 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4204 && stack_parm != 0)
4205 {
4206 rtx offset_rtx;
4207
4208 if (BYTES_BIG_ENDIAN
4209 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4210 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4211 - GET_MODE_SIZE (nominal_mode));
4212
4213 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4214 if (offset_rtx == const0_rtx)
4215 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4216 else
4217 stack_parm = gen_rtx_MEM (nominal_mode,
4218 gen_rtx_PLUS (Pmode,
4219 internal_arg_pointer,
4220 offset_rtx));
4221
4222 /* If this is a memory ref that contains aggregate components,
4223 mark it as such for cse and loop optimize. */
4224 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4225 }
4226 #endif /* 0 */
4227
4228 #ifdef STACK_REGS
4229 /* We need this "use" info, because the gcc-register->stack-register
4230 converter in reg-stack.c needs to know which registers are active
4231 at the start of the function call. The actual parameter loading
4232 instructions are not always available then anymore, since they might
4233 have been optimised away. */
4234
4235 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
4236 emit_insn (gen_rtx_USE (GET_MODE (entry_parm), entry_parm));
4237 #endif
4238
4239 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4240 in the mode in which it arrives.
4241 STACK_PARM is an RTX for a stack slot where the parameter can live
4242 during the function (in case we want to put it there).
4243 STACK_PARM is 0 if no stack slot was pushed for it.
4244
4245 Now output code if necessary to convert ENTRY_PARM to
4246 the type in which this function declares it,
4247 and store that result in an appropriate place,
4248 which may be a pseudo reg, may be STACK_PARM,
4249 or may be a local stack slot if STACK_PARM is 0.
4250
4251 Set DECL_RTL to that place. */
4252
4253 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4254 {
4255 /* If a BLKmode arrives in registers, copy it to a stack slot.
4256 Handle calls that pass values in multiple non-contiguous
4257 locations. The Irix 6 ABI has examples of this. */
4258 if (GET_CODE (entry_parm) == REG
4259 || GET_CODE (entry_parm) == PARALLEL)
4260 {
4261 int size_stored
4262 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4263 UNITS_PER_WORD);
4264
4265 /* Note that we will be storing an integral number of words.
4266 So we have to be careful to ensure that we allocate an
4267 integral number of words. We do this below in the
4268 assign_stack_local if space was not allocated in the argument
4269 list. If it was, this will not work if PARM_BOUNDARY is not
4270 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4271 if it becomes a problem. */
4272
4273 if (stack_parm == 0)
4274 {
4275 stack_parm
4276 = assign_stack_local (GET_MODE (entry_parm),
4277 size_stored, 0);
4278
4279 /* If this is a memory ref that contains aggregate
4280 components, mark it as such for cse and loop optimize. */
4281 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4282 }
4283
4284 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4285 abort ();
4286
4287 if (TREE_READONLY (parm))
4288 RTX_UNCHANGING_P (stack_parm) = 1;
4289
4290 /* Handle calls that pass values in multiple non-contiguous
4291 locations. The Irix 6 ABI has examples of this. */
4292 if (GET_CODE (entry_parm) == PARALLEL)
4293 emit_group_store (validize_mem (stack_parm), entry_parm,
4294 int_size_in_bytes (TREE_TYPE (parm)),
4295 (TYPE_ALIGN (TREE_TYPE (parm))
4296 / BITS_PER_UNIT));
4297 else
4298 move_block_from_reg (REGNO (entry_parm),
4299 validize_mem (stack_parm),
4300 size_stored / UNITS_PER_WORD,
4301 int_size_in_bytes (TREE_TYPE (parm)));
4302 }
4303 DECL_RTL (parm) = stack_parm;
4304 }
4305 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
4306 && ! DECL_INLINE (fndecl))
4307 /* layout_decl may set this. */
4308 || TREE_ADDRESSABLE (parm)
4309 || TREE_SIDE_EFFECTS (parm)
4310 /* If -ffloat-store specified, don't put explicit
4311 float variables into registers. */
4312 || (flag_float_store
4313 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4314 /* Always assign pseudo to structure return or item passed
4315 by invisible reference. */
4316 || passed_pointer || parm == function_result_decl)
4317 {
4318 /* Store the parm in a pseudoregister during the function, but we
4319 may need to do it in a wider mode. */
4320
4321 register rtx parmreg;
4322 int regno, regnoi = 0, regnor = 0;
4323
4324 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4325
4326 promoted_nominal_mode
4327 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4328
4329 parmreg = gen_reg_rtx (promoted_nominal_mode);
4330 mark_user_reg (parmreg);
4331
4332 /* If this was an item that we received a pointer to, set DECL_RTL
4333 appropriately. */
4334 if (passed_pointer)
4335 {
4336 DECL_RTL (parm)
4337 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4338 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
4339 }
4340 else
4341 DECL_RTL (parm) = parmreg;
4342
4343 /* Copy the value into the register. */
4344 if (nominal_mode != passed_mode
4345 || promoted_nominal_mode != promoted_mode)
4346 {
4347 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4348 mode, by the caller. We now have to convert it to
4349 NOMINAL_MODE, if different. However, PARMREG may be in
4350 a different mode than NOMINAL_MODE if it is being stored
4351 promoted.
4352
4353 If ENTRY_PARM is a hard register, it might be in a register
4354 not valid for operating in its mode (e.g., an odd-numbered
4355 register for a DFmode). In that case, moves are the only
4356 thing valid, so we can't do a convert from there. This
4357 occurs when the calling sequence allow such misaligned
4358 usages.
4359
4360 In addition, the conversion may involve a call, which could
4361 clobber parameters which haven't been copied to pseudo
4362 registers yet. Therefore, we must first copy the parm to
4363 a pseudo reg here, and save the conversion until after all
4364 parameters have been moved. */
4365
4366 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4367
4368 emit_move_insn (tempreg, validize_mem (entry_parm));
4369
4370 push_to_sequence (conversion_insns);
4371 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4372
4373 expand_assignment (parm,
4374 make_tree (nominal_type, tempreg), 0, 0);
4375 conversion_insns = get_insns ();
4376 did_conversion = 1;
4377 end_sequence ();
4378 }
4379 else
4380 emit_move_insn (parmreg, validize_mem (entry_parm));
4381
4382 /* If we were passed a pointer but the actual value
4383 can safely live in a register, put it in one. */
4384 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4385 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4386 && ! DECL_INLINE (fndecl))
4387 /* layout_decl may set this. */
4388 || TREE_ADDRESSABLE (parm)
4389 || TREE_SIDE_EFFECTS (parm)
4390 /* If -ffloat-store specified, don't put explicit
4391 float variables into registers. */
4392 || (flag_float_store
4393 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4394 {
4395 /* We can't use nominal_mode, because it will have been set to
4396 Pmode above. We must use the actual mode of the parm. */
4397 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4398 mark_user_reg (parmreg);
4399 emit_move_insn (parmreg, DECL_RTL (parm));
4400 DECL_RTL (parm) = parmreg;
4401 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4402 now the parm. */
4403 stack_parm = 0;
4404 }
4405 #ifdef FUNCTION_ARG_CALLEE_COPIES
4406 /* If we are passed an arg by reference and it is our responsibility
4407 to make a copy, do it now.
4408 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4409 original argument, so we must recreate them in the call to
4410 FUNCTION_ARG_CALLEE_COPIES. */
4411 /* ??? Later add code to handle the case that if the argument isn't
4412 modified, don't do the copy. */
4413
4414 else if (passed_pointer
4415 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4416 TYPE_MODE (DECL_ARG_TYPE (parm)),
4417 DECL_ARG_TYPE (parm),
4418 named_arg)
4419 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4420 {
4421 rtx copy;
4422 tree type = DECL_ARG_TYPE (parm);
4423
4424 /* This sequence may involve a library call perhaps clobbering
4425 registers that haven't been copied to pseudos yet. */
4426
4427 push_to_sequence (conversion_insns);
4428
4429 if (TYPE_SIZE (type) == 0
4430 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4431 /* This is a variable sized object. */
4432 copy = gen_rtx_MEM (BLKmode,
4433 allocate_dynamic_stack_space
4434 (expr_size (parm), NULL_RTX,
4435 TYPE_ALIGN (type)));
4436 else
4437 copy = assign_stack_temp (TYPE_MODE (type),
4438 int_size_in_bytes (type), 1);
4439 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
4440 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4441
4442 store_expr (parm, copy, 0);
4443 emit_move_insn (parmreg, XEXP (copy, 0));
4444 if (current_function_check_memory_usage)
4445 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4446 XEXP (copy, 0), ptr_mode,
4447 GEN_INT (int_size_in_bytes (type)),
4448 TYPE_MODE (sizetype),
4449 GEN_INT (MEMORY_USE_RW),
4450 TYPE_MODE (integer_type_node));
4451 conversion_insns = get_insns ();
4452 did_conversion = 1;
4453 end_sequence ();
4454 }
4455 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4456
4457 /* In any case, record the parm's desired stack location
4458 in case we later discover it must live in the stack.
4459
4460 If it is a COMPLEX value, store the stack location for both
4461 halves. */
4462
4463 if (GET_CODE (parmreg) == CONCAT)
4464 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4465 else
4466 regno = REGNO (parmreg);
4467
4468 if (regno >= max_parm_reg)
4469 {
4470 rtx *new;
4471 int old_max_parm_reg = max_parm_reg;
4472
4473 /* It's slow to expand this one register at a time,
4474 but it's also rare and we need max_parm_reg to be
4475 precisely correct. */
4476 max_parm_reg = regno + 1;
4477 new = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4478 bcopy ((char *) parm_reg_stack_loc, (char *) new,
4479 old_max_parm_reg * sizeof (rtx));
4480 bzero ((char *) (new + old_max_parm_reg),
4481 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4482 parm_reg_stack_loc = new;
4483 }
4484
4485 if (GET_CODE (parmreg) == CONCAT)
4486 {
4487 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4488
4489 regnor = REGNO (gen_realpart (submode, parmreg));
4490 regnoi = REGNO (gen_imagpart (submode, parmreg));
4491
4492 if (stack_parm != 0)
4493 {
4494 parm_reg_stack_loc[regnor]
4495 = gen_realpart (submode, stack_parm);
4496 parm_reg_stack_loc[regnoi]
4497 = gen_imagpart (submode, stack_parm);
4498 }
4499 else
4500 {
4501 parm_reg_stack_loc[regnor] = 0;
4502 parm_reg_stack_loc[regnoi] = 0;
4503 }
4504 }
4505 else
4506 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4507
4508 /* Mark the register as eliminable if we did no conversion
4509 and it was copied from memory at a fixed offset,
4510 and the arg pointer was not copied to a pseudo-reg.
4511 If the arg pointer is a pseudo reg or the offset formed
4512 an invalid address, such memory-equivalences
4513 as we make here would screw up life analysis for it. */
4514 if (nominal_mode == passed_mode
4515 && ! did_conversion
4516 && stack_parm != 0
4517 && GET_CODE (stack_parm) == MEM
4518 && stack_offset.var == 0
4519 && reg_mentioned_p (virtual_incoming_args_rtx,
4520 XEXP (stack_parm, 0)))
4521 {
4522 rtx linsn = get_last_insn ();
4523 rtx sinsn, set;
4524
4525 /* Mark complex types separately. */
4526 if (GET_CODE (parmreg) == CONCAT)
4527 /* Scan backwards for the set of the real and
4528 imaginary parts. */
4529 for (sinsn = linsn; sinsn != 0;
4530 sinsn = prev_nonnote_insn (sinsn))
4531 {
4532 set = single_set (sinsn);
4533 if (set != 0
4534 && SET_DEST (set) == regno_reg_rtx [regnoi])
4535 REG_NOTES (sinsn)
4536 = gen_rtx_EXPR_LIST (REG_EQUIV,
4537 parm_reg_stack_loc[regnoi],
4538 REG_NOTES (sinsn));
4539 else if (set != 0
4540 && SET_DEST (set) == regno_reg_rtx [regnor])
4541 REG_NOTES (sinsn)
4542 = gen_rtx_EXPR_LIST (REG_EQUIV,
4543 parm_reg_stack_loc[regnor],
4544 REG_NOTES (sinsn));
4545 }
4546 else if ((set = single_set (linsn)) != 0
4547 && SET_DEST (set) == parmreg)
4548 REG_NOTES (linsn)
4549 = gen_rtx_EXPR_LIST (REG_EQUIV,
4550 stack_parm, REG_NOTES (linsn));
4551 }
4552
4553 /* For pointer data type, suggest pointer register. */
4554 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4555 mark_reg_pointer (parmreg,
4556 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4557 / BITS_PER_UNIT));
4558 }
4559 else
4560 {
4561 /* Value must be stored in the stack slot STACK_PARM
4562 during function execution. */
4563
4564 if (promoted_mode != nominal_mode)
4565 {
4566 /* Conversion is required. */
4567 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4568
4569 emit_move_insn (tempreg, validize_mem (entry_parm));
4570
4571 push_to_sequence (conversion_insns);
4572 entry_parm = convert_to_mode (nominal_mode, tempreg,
4573 TREE_UNSIGNED (TREE_TYPE (parm)));
4574 if (stack_parm)
4575 {
4576 /* ??? This may need a big-endian conversion on sparc64. */
4577 stack_parm = change_address (stack_parm, nominal_mode,
4578 NULL_RTX);
4579 }
4580 conversion_insns = get_insns ();
4581 did_conversion = 1;
4582 end_sequence ();
4583 }
4584
4585 if (entry_parm != stack_parm)
4586 {
4587 if (stack_parm == 0)
4588 {
4589 stack_parm
4590 = assign_stack_local (GET_MODE (entry_parm),
4591 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4592 /* If this is a memory ref that contains aggregate components,
4593 mark it as such for cse and loop optimize. */
4594 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4595 }
4596
4597 if (promoted_mode != nominal_mode)
4598 {
4599 push_to_sequence (conversion_insns);
4600 emit_move_insn (validize_mem (stack_parm),
4601 validize_mem (entry_parm));
4602 conversion_insns = get_insns ();
4603 end_sequence ();
4604 }
4605 else
4606 emit_move_insn (validize_mem (stack_parm),
4607 validize_mem (entry_parm));
4608 }
4609 if (current_function_check_memory_usage)
4610 {
4611 push_to_sequence (conversion_insns);
4612 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4613 XEXP (stack_parm, 0), ptr_mode,
4614 GEN_INT (GET_MODE_SIZE (GET_MODE
4615 (entry_parm))),
4616 TYPE_MODE (sizetype),
4617 GEN_INT (MEMORY_USE_RW),
4618 TYPE_MODE (integer_type_node));
4619
4620 conversion_insns = get_insns ();
4621 end_sequence ();
4622 }
4623 DECL_RTL (parm) = stack_parm;
4624 }
4625
4626 /* If this "parameter" was the place where we are receiving the
4627 function's incoming structure pointer, set up the result. */
4628 if (parm == function_result_decl)
4629 {
4630 tree result = DECL_RESULT (fndecl);
4631 tree restype = TREE_TYPE (result);
4632
4633 DECL_RTL (result)
4634 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4635
4636 MEM_IN_STRUCT_P (DECL_RTL (result)) = AGGREGATE_TYPE_P (restype);
4637 }
4638
4639 if (TREE_THIS_VOLATILE (parm))
4640 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4641 if (TREE_READONLY (parm))
4642 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4643 }
4644
4645 /* Output all parameter conversion instructions (possibly including calls)
4646 now that all parameters have been copied out of hard registers. */
4647 emit_insns (conversion_insns);
4648
4649 last_parm_insn = get_last_insn ();
4650
4651 current_function_args_size = stack_args_size.constant;
4652
4653 /* Adjust function incoming argument size for alignment and
4654 minimum length. */
4655
4656 #ifdef REG_PARM_STACK_SPACE
4657 #ifndef MAYBE_REG_PARM_STACK_SPACE
4658 current_function_args_size = MAX (current_function_args_size,
4659 REG_PARM_STACK_SPACE (fndecl));
4660 #endif
4661 #endif
4662
4663 #ifdef PREFERRED_STACK_BOUNDARY
4664 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
4665
4666 current_function_args_size
4667 = ((current_function_args_size + STACK_BYTES - 1)
4668 / STACK_BYTES) * STACK_BYTES;
4669 #endif
4670
4671 #ifdef ARGS_GROW_DOWNWARD
4672 current_function_arg_offset_rtx
4673 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4674 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4675 size_int (-stack_args_size.constant)),
4676 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4677 #else
4678 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4679 #endif
4680
4681 /* See how many bytes, if any, of its args a function should try to pop
4682 on return. */
4683
4684 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4685 current_function_args_size);
4686
4687 /* For stdarg.h function, save info about
4688 regs and stack space used by the named args. */
4689
4690 if (!hide_last_arg)
4691 current_function_args_info = args_so_far;
4692
4693 /* Set the rtx used for the function return value. Put this in its
4694 own variable so any optimizers that need this information don't have
4695 to include tree.h. Do this here so it gets done when an inlined
4696 function gets output. */
4697
4698 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4699 }
4700 \f
4701 /* Indicate whether REGNO is an incoming argument to the current function
4702 that was promoted to a wider mode. If so, return the RTX for the
4703 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4704 that REGNO is promoted from and whether the promotion was signed or
4705 unsigned. */
4706
4707 #ifdef PROMOTE_FUNCTION_ARGS
4708
4709 rtx
4710 promoted_input_arg (regno, pmode, punsignedp)
4711 int regno;
4712 enum machine_mode *pmode;
4713 int *punsignedp;
4714 {
4715 tree arg;
4716
4717 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4718 arg = TREE_CHAIN (arg))
4719 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4720 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4721 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4722 {
4723 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4724 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4725
4726 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4727 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4728 && mode != DECL_MODE (arg))
4729 {
4730 *pmode = DECL_MODE (arg);
4731 *punsignedp = unsignedp;
4732 return DECL_INCOMING_RTL (arg);
4733 }
4734 }
4735
4736 return 0;
4737 }
4738
4739 #endif
4740 \f
4741 /* Compute the size and offset from the start of the stacked arguments for a
4742 parm passed in mode PASSED_MODE and with type TYPE.
4743
4744 INITIAL_OFFSET_PTR points to the current offset into the stacked
4745 arguments.
4746
4747 The starting offset and size for this parm are returned in *OFFSET_PTR
4748 and *ARG_SIZE_PTR, respectively.
4749
4750 IN_REGS is non-zero if the argument will be passed in registers. It will
4751 never be set if REG_PARM_STACK_SPACE is not defined.
4752
4753 FNDECL is the function in which the argument was defined.
4754
4755 There are two types of rounding that are done. The first, controlled by
4756 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4757 list to be aligned to the specific boundary (in bits). This rounding
4758 affects the initial and starting offsets, but not the argument size.
4759
4760 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4761 optionally rounds the size of the parm to PARM_BOUNDARY. The
4762 initial offset is not affected by this rounding, while the size always
4763 is and the starting offset may be. */
4764
4765 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4766 initial_offset_ptr is positive because locate_and_pad_parm's
4767 callers pass in the total size of args so far as
4768 initial_offset_ptr. arg_size_ptr is always positive.*/
4769
4770 void
4771 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4772 initial_offset_ptr, offset_ptr, arg_size_ptr)
4773 enum machine_mode passed_mode;
4774 tree type;
4775 int in_regs;
4776 tree fndecl;
4777 struct args_size *initial_offset_ptr;
4778 struct args_size *offset_ptr;
4779 struct args_size *arg_size_ptr;
4780 {
4781 tree sizetree
4782 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4783 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4784 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4785
4786 #ifdef REG_PARM_STACK_SPACE
4787 /* If we have found a stack parm before we reach the end of the
4788 area reserved for registers, skip that area. */
4789 if (! in_regs)
4790 {
4791 int reg_parm_stack_space = 0;
4792
4793 #ifdef MAYBE_REG_PARM_STACK_SPACE
4794 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4795 #else
4796 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4797 #endif
4798 if (reg_parm_stack_space > 0)
4799 {
4800 if (initial_offset_ptr->var)
4801 {
4802 initial_offset_ptr->var
4803 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4804 size_int (reg_parm_stack_space));
4805 initial_offset_ptr->constant = 0;
4806 }
4807 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4808 initial_offset_ptr->constant = reg_parm_stack_space;
4809 }
4810 }
4811 #endif /* REG_PARM_STACK_SPACE */
4812
4813 arg_size_ptr->var = 0;
4814 arg_size_ptr->constant = 0;
4815
4816 #ifdef ARGS_GROW_DOWNWARD
4817 if (initial_offset_ptr->var)
4818 {
4819 offset_ptr->constant = 0;
4820 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4821 initial_offset_ptr->var);
4822 }
4823 else
4824 {
4825 offset_ptr->constant = - initial_offset_ptr->constant;
4826 offset_ptr->var = 0;
4827 }
4828 if (where_pad != none
4829 && (TREE_CODE (sizetree) != INTEGER_CST
4830 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4831 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4832 SUB_PARM_SIZE (*offset_ptr, sizetree);
4833 if (where_pad != downward)
4834 pad_to_arg_alignment (offset_ptr, boundary);
4835 if (initial_offset_ptr->var)
4836 {
4837 arg_size_ptr->var = size_binop (MINUS_EXPR,
4838 size_binop (MINUS_EXPR,
4839 integer_zero_node,
4840 initial_offset_ptr->var),
4841 offset_ptr->var);
4842 }
4843 else
4844 {
4845 arg_size_ptr->constant = (- initial_offset_ptr->constant
4846 - offset_ptr->constant);
4847 }
4848 #else /* !ARGS_GROW_DOWNWARD */
4849 pad_to_arg_alignment (initial_offset_ptr, boundary);
4850 *offset_ptr = *initial_offset_ptr;
4851
4852 #ifdef PUSH_ROUNDING
4853 if (passed_mode != BLKmode)
4854 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4855 #endif
4856
4857 /* Pad_below needs the pre-rounded size to know how much to pad below
4858 so this must be done before rounding up. */
4859 if (where_pad == downward
4860 /* However, BLKmode args passed in regs have their padding done elsewhere.
4861 The stack slot must be able to hold the entire register. */
4862 && !(in_regs && passed_mode == BLKmode))
4863 pad_below (offset_ptr, passed_mode, sizetree);
4864
4865 if (where_pad != none
4866 && (TREE_CODE (sizetree) != INTEGER_CST
4867 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4868 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4869
4870 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4871 #endif /* ARGS_GROW_DOWNWARD */
4872 }
4873
4874 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4875 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4876
4877 static void
4878 pad_to_arg_alignment (offset_ptr, boundary)
4879 struct args_size *offset_ptr;
4880 int boundary;
4881 {
4882 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4883
4884 if (boundary > BITS_PER_UNIT)
4885 {
4886 if (offset_ptr->var)
4887 {
4888 offset_ptr->var =
4889 #ifdef ARGS_GROW_DOWNWARD
4890 round_down
4891 #else
4892 round_up
4893 #endif
4894 (ARGS_SIZE_TREE (*offset_ptr),
4895 boundary / BITS_PER_UNIT);
4896 offset_ptr->constant = 0; /*?*/
4897 }
4898 else
4899 offset_ptr->constant =
4900 #ifdef ARGS_GROW_DOWNWARD
4901 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4902 #else
4903 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4904 #endif
4905 }
4906 }
4907
4908 #ifndef ARGS_GROW_DOWNWARD
4909 static void
4910 pad_below (offset_ptr, passed_mode, sizetree)
4911 struct args_size *offset_ptr;
4912 enum machine_mode passed_mode;
4913 tree sizetree;
4914 {
4915 if (passed_mode != BLKmode)
4916 {
4917 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4918 offset_ptr->constant
4919 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4920 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4921 - GET_MODE_SIZE (passed_mode));
4922 }
4923 else
4924 {
4925 if (TREE_CODE (sizetree) != INTEGER_CST
4926 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4927 {
4928 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4929 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4930 /* Add it in. */
4931 ADD_PARM_SIZE (*offset_ptr, s2);
4932 SUB_PARM_SIZE (*offset_ptr, sizetree);
4933 }
4934 }
4935 }
4936 #endif
4937
4938 #ifdef ARGS_GROW_DOWNWARD
4939 static tree
4940 round_down (value, divisor)
4941 tree value;
4942 int divisor;
4943 {
4944 return size_binop (MULT_EXPR,
4945 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
4946 size_int (divisor));
4947 }
4948 #endif
4949 \f
4950 /* Walk the tree of blocks describing the binding levels within a function
4951 and warn about uninitialized variables.
4952 This is done after calling flow_analysis and before global_alloc
4953 clobbers the pseudo-regs to hard regs. */
4954
4955 void
4956 uninitialized_vars_warning (block)
4957 tree block;
4958 {
4959 register tree decl, sub;
4960 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4961 {
4962 if (TREE_CODE (decl) == VAR_DECL
4963 /* These warnings are unreliable for and aggregates
4964 because assigning the fields one by one can fail to convince
4965 flow.c that the entire aggregate was initialized.
4966 Unions are troublesome because members may be shorter. */
4967 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
4968 && DECL_RTL (decl) != 0
4969 && GET_CODE (DECL_RTL (decl)) == REG
4970 /* Global optimizations can make it difficult to determine if a
4971 particular variable has been initialized. However, a VAR_DECL
4972 with a nonzero DECL_INITIAL had an initializer, so do not
4973 claim it is potentially uninitialized.
4974
4975 We do not care about the actual value in DECL_INITIAL, so we do
4976 not worry that it may be a dangling pointer. */
4977 && DECL_INITIAL (decl) == NULL_TREE
4978 && regno_uninitialized (REGNO (DECL_RTL (decl))))
4979 warning_with_decl (decl,
4980 "`%s' might be used uninitialized in this function");
4981 if (TREE_CODE (decl) == VAR_DECL
4982 && DECL_RTL (decl) != 0
4983 && GET_CODE (DECL_RTL (decl)) == REG
4984 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4985 warning_with_decl (decl,
4986 "variable `%s' might be clobbered by `longjmp' or `vfork'");
4987 }
4988 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4989 uninitialized_vars_warning (sub);
4990 }
4991
4992 /* Do the appropriate part of uninitialized_vars_warning
4993 but for arguments instead of local variables. */
4994
4995 void
4996 setjmp_args_warning ()
4997 {
4998 register tree decl;
4999 for (decl = DECL_ARGUMENTS (current_function_decl);
5000 decl; decl = TREE_CHAIN (decl))
5001 if (DECL_RTL (decl) != 0
5002 && GET_CODE (DECL_RTL (decl)) == REG
5003 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5004 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
5005 }
5006
5007 /* If this function call setjmp, put all vars into the stack
5008 unless they were declared `register'. */
5009
5010 void
5011 setjmp_protect (block)
5012 tree block;
5013 {
5014 register tree decl, sub;
5015 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5016 if ((TREE_CODE (decl) == VAR_DECL
5017 || TREE_CODE (decl) == PARM_DECL)
5018 && DECL_RTL (decl) != 0
5019 && (GET_CODE (DECL_RTL (decl)) == REG
5020 || (GET_CODE (DECL_RTL (decl)) == MEM
5021 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5022 /* If this variable came from an inline function, it must be
5023 that its life doesn't overlap the setjmp. If there was a
5024 setjmp in the function, it would already be in memory. We
5025 must exclude such variable because their DECL_RTL might be
5026 set to strange things such as virtual_stack_vars_rtx. */
5027 && ! DECL_FROM_INLINE (decl)
5028 && (
5029 #ifdef NON_SAVING_SETJMP
5030 /* If longjmp doesn't restore the registers,
5031 don't put anything in them. */
5032 NON_SAVING_SETJMP
5033 ||
5034 #endif
5035 ! DECL_REGISTER (decl)))
5036 put_var_into_stack (decl);
5037 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5038 setjmp_protect (sub);
5039 }
5040 \f
5041 /* Like the previous function, but for args instead of local variables. */
5042
5043 void
5044 setjmp_protect_args ()
5045 {
5046 register tree decl;
5047 for (decl = DECL_ARGUMENTS (current_function_decl);
5048 decl; decl = TREE_CHAIN (decl))
5049 if ((TREE_CODE (decl) == VAR_DECL
5050 || TREE_CODE (decl) == PARM_DECL)
5051 && DECL_RTL (decl) != 0
5052 && (GET_CODE (DECL_RTL (decl)) == REG
5053 || (GET_CODE (DECL_RTL (decl)) == MEM
5054 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5055 && (
5056 /* If longjmp doesn't restore the registers,
5057 don't put anything in them. */
5058 #ifdef NON_SAVING_SETJMP
5059 NON_SAVING_SETJMP
5060 ||
5061 #endif
5062 ! DECL_REGISTER (decl)))
5063 put_var_into_stack (decl);
5064 }
5065 \f
5066 /* Return the context-pointer register corresponding to DECL,
5067 or 0 if it does not need one. */
5068
5069 rtx
5070 lookup_static_chain (decl)
5071 tree decl;
5072 {
5073 tree context = decl_function_context (decl);
5074 tree link;
5075
5076 if (context == 0
5077 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5078 return 0;
5079
5080 /* We treat inline_function_decl as an alias for the current function
5081 because that is the inline function whose vars, types, etc.
5082 are being merged into the current function.
5083 See expand_inline_function. */
5084 if (context == current_function_decl || context == inline_function_decl)
5085 return virtual_stack_vars_rtx;
5086
5087 for (link = context_display; link; link = TREE_CHAIN (link))
5088 if (TREE_PURPOSE (link) == context)
5089 return RTL_EXPR_RTL (TREE_VALUE (link));
5090
5091 abort ();
5092 }
5093 \f
5094 /* Convert a stack slot address ADDR for variable VAR
5095 (from a containing function)
5096 into an address valid in this function (using a static chain). */
5097
5098 rtx
5099 fix_lexical_addr (addr, var)
5100 rtx addr;
5101 tree var;
5102 {
5103 rtx basereg;
5104 HOST_WIDE_INT displacement;
5105 tree context = decl_function_context (var);
5106 struct function *fp;
5107 rtx base = 0;
5108
5109 /* If this is the present function, we need not do anything. */
5110 if (context == current_function_decl || context == inline_function_decl)
5111 return addr;
5112
5113 for (fp = outer_function_chain; fp; fp = fp->next)
5114 if (fp->decl == context)
5115 break;
5116
5117 if (fp == 0)
5118 abort ();
5119
5120 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5121 addr = XEXP (XEXP (addr, 0), 0);
5122
5123 /* Decode given address as base reg plus displacement. */
5124 if (GET_CODE (addr) == REG)
5125 basereg = addr, displacement = 0;
5126 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5127 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5128 else
5129 abort ();
5130
5131 /* We accept vars reached via the containing function's
5132 incoming arg pointer and via its stack variables pointer. */
5133 if (basereg == fp->internal_arg_pointer)
5134 {
5135 /* If reached via arg pointer, get the arg pointer value
5136 out of that function's stack frame.
5137
5138 There are two cases: If a separate ap is needed, allocate a
5139 slot in the outer function for it and dereference it that way.
5140 This is correct even if the real ap is actually a pseudo.
5141 Otherwise, just adjust the offset from the frame pointer to
5142 compensate. */
5143
5144 #ifdef NEED_SEPARATE_AP
5145 rtx addr;
5146
5147 if (fp->arg_pointer_save_area == 0)
5148 fp->arg_pointer_save_area
5149 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5150
5151 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
5152 addr = memory_address (Pmode, addr);
5153
5154 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
5155 #else
5156 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5157 base = lookup_static_chain (var);
5158 #endif
5159 }
5160
5161 else if (basereg == virtual_stack_vars_rtx)
5162 {
5163 /* This is the same code as lookup_static_chain, duplicated here to
5164 avoid an extra call to decl_function_context. */
5165 tree link;
5166
5167 for (link = context_display; link; link = TREE_CHAIN (link))
5168 if (TREE_PURPOSE (link) == context)
5169 {
5170 base = RTL_EXPR_RTL (TREE_VALUE (link));
5171 break;
5172 }
5173 }
5174
5175 if (base == 0)
5176 abort ();
5177
5178 /* Use same offset, relative to appropriate static chain or argument
5179 pointer. */
5180 return plus_constant (base, displacement);
5181 }
5182 \f
5183 /* Return the address of the trampoline for entering nested fn FUNCTION.
5184 If necessary, allocate a trampoline (in the stack frame)
5185 and emit rtl to initialize its contents (at entry to this function). */
5186
5187 rtx
5188 trampoline_address (function)
5189 tree function;
5190 {
5191 tree link;
5192 tree rtlexp;
5193 rtx tramp;
5194 struct function *fp;
5195 tree fn_context;
5196
5197 /* Find an existing trampoline and return it. */
5198 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5199 if (TREE_PURPOSE (link) == function)
5200 return
5201 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5202
5203 for (fp = outer_function_chain; fp; fp = fp->next)
5204 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
5205 if (TREE_PURPOSE (link) == function)
5206 {
5207 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5208 function);
5209 return round_trampoline_addr (tramp);
5210 }
5211
5212 /* None exists; we must make one. */
5213
5214 /* Find the `struct function' for the function containing FUNCTION. */
5215 fp = 0;
5216 fn_context = decl_function_context (function);
5217 if (fn_context != current_function_decl
5218 && fn_context != inline_function_decl)
5219 for (fp = outer_function_chain; fp; fp = fp->next)
5220 if (fp->decl == fn_context)
5221 break;
5222
5223 /* Allocate run-time space for this trampoline
5224 (usually in the defining function's stack frame). */
5225 #ifdef ALLOCATE_TRAMPOLINE
5226 tramp = ALLOCATE_TRAMPOLINE (fp);
5227 #else
5228 /* If rounding needed, allocate extra space
5229 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5230 #ifdef TRAMPOLINE_ALIGNMENT
5231 #define TRAMPOLINE_REAL_SIZE \
5232 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5233 #else
5234 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5235 #endif
5236 if (fp != 0)
5237 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
5238 else
5239 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
5240 #endif
5241
5242 /* Record the trampoline for reuse and note it for later initialization
5243 by expand_function_end. */
5244 if (fp != 0)
5245 {
5246 push_obstacks (fp->function_maybepermanent_obstack,
5247 fp->function_maybepermanent_obstack);
5248 rtlexp = make_node (RTL_EXPR);
5249 RTL_EXPR_RTL (rtlexp) = tramp;
5250 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
5251 pop_obstacks ();
5252 }
5253 else
5254 {
5255 /* Make the RTL_EXPR node temporary, not momentary, so that the
5256 trampoline_list doesn't become garbage. */
5257 int momentary = suspend_momentary ();
5258 rtlexp = make_node (RTL_EXPR);
5259 resume_momentary (momentary);
5260
5261 RTL_EXPR_RTL (rtlexp) = tramp;
5262 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5263 }
5264
5265 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5266 return round_trampoline_addr (tramp);
5267 }
5268
5269 /* Given a trampoline address,
5270 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5271
5272 static rtx
5273 round_trampoline_addr (tramp)
5274 rtx tramp;
5275 {
5276 #ifdef TRAMPOLINE_ALIGNMENT
5277 /* Round address up to desired boundary. */
5278 rtx temp = gen_reg_rtx (Pmode);
5279 temp = expand_binop (Pmode, add_optab, tramp,
5280 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5281 temp, 0, OPTAB_LIB_WIDEN);
5282 tramp = expand_binop (Pmode, and_optab, temp,
5283 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5284 temp, 0, OPTAB_LIB_WIDEN);
5285 #endif
5286 return tramp;
5287 }
5288 \f
5289 /* The functions identify_blocks and reorder_blocks provide a way to
5290 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5291 duplicate portions of the RTL code. Call identify_blocks before
5292 changing the RTL, and call reorder_blocks after. */
5293
5294 /* Put all this function's BLOCK nodes including those that are chained
5295 onto the first block into a vector, and return it.
5296 Also store in each NOTE for the beginning or end of a block
5297 the index of that block in the vector.
5298 The arguments are BLOCK, the chain of top-level blocks of the function,
5299 and INSNS, the insn chain of the function. */
5300
5301 tree *
5302 identify_blocks (block, insns)
5303 tree block;
5304 rtx insns;
5305 {
5306 int n_blocks;
5307 tree *block_vector;
5308 int *block_stack;
5309 int depth = 0;
5310 int next_block_number = 1;
5311 int current_block_number = 1;
5312 rtx insn;
5313
5314 if (block == 0)
5315 return 0;
5316
5317 n_blocks = all_blocks (block, 0);
5318 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5319 block_stack = (int *) alloca (n_blocks * sizeof (int));
5320
5321 all_blocks (block, block_vector);
5322
5323 for (insn = insns; insn; insn = NEXT_INSN (insn))
5324 if (GET_CODE (insn) == NOTE)
5325 {
5326 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5327 {
5328 block_stack[depth++] = current_block_number;
5329 current_block_number = next_block_number;
5330 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
5331 }
5332 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5333 {
5334 NOTE_BLOCK_NUMBER (insn) = current_block_number;
5335 current_block_number = block_stack[--depth];
5336 }
5337 }
5338
5339 if (n_blocks != next_block_number)
5340 abort ();
5341
5342 return block_vector;
5343 }
5344
5345 /* Given BLOCK_VECTOR which was returned by identify_blocks,
5346 and a revised instruction chain, rebuild the tree structure
5347 of BLOCK nodes to correspond to the new order of RTL.
5348 The new block tree is inserted below TOP_BLOCK.
5349 Returns the current top-level block. */
5350
5351 tree
5352 reorder_blocks (block_vector, block, insns)
5353 tree *block_vector;
5354 tree block;
5355 rtx insns;
5356 {
5357 tree current_block = block;
5358 rtx insn;
5359
5360 if (block_vector == 0)
5361 return block;
5362
5363 /* Prune the old trees away, so that it doesn't get in the way. */
5364 BLOCK_SUBBLOCKS (current_block) = 0;
5365 BLOCK_CHAIN (current_block) = 0;
5366
5367 for (insn = insns; insn; insn = NEXT_INSN (insn))
5368 if (GET_CODE (insn) == NOTE)
5369 {
5370 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5371 {
5372 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
5373 /* If we have seen this block before, copy it. */
5374 if (TREE_ASM_WRITTEN (block))
5375 block = copy_node (block);
5376 BLOCK_SUBBLOCKS (block) = 0;
5377 TREE_ASM_WRITTEN (block) = 1;
5378 BLOCK_SUPERCONTEXT (block) = current_block;
5379 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5380 BLOCK_SUBBLOCKS (current_block) = block;
5381 current_block = block;
5382 NOTE_SOURCE_FILE (insn) = 0;
5383 }
5384 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5385 {
5386 BLOCK_SUBBLOCKS (current_block)
5387 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5388 current_block = BLOCK_SUPERCONTEXT (current_block);
5389 NOTE_SOURCE_FILE (insn) = 0;
5390 }
5391 }
5392
5393 BLOCK_SUBBLOCKS (current_block)
5394 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5395 return current_block;
5396 }
5397
5398 /* Reverse the order of elements in the chain T of blocks,
5399 and return the new head of the chain (old last element). */
5400
5401 static tree
5402 blocks_nreverse (t)
5403 tree t;
5404 {
5405 register tree prev = 0, decl, next;
5406 for (decl = t; decl; decl = next)
5407 {
5408 next = BLOCK_CHAIN (decl);
5409 BLOCK_CHAIN (decl) = prev;
5410 prev = decl;
5411 }
5412 return prev;
5413 }
5414
5415 /* Count the subblocks of the list starting with BLOCK, and list them
5416 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5417 blocks. */
5418
5419 static int
5420 all_blocks (block, vector)
5421 tree block;
5422 tree *vector;
5423 {
5424 int n_blocks = 0;
5425
5426 while (block)
5427 {
5428 TREE_ASM_WRITTEN (block) = 0;
5429
5430 /* Record this block. */
5431 if (vector)
5432 vector[n_blocks] = block;
5433
5434 ++n_blocks;
5435
5436 /* Record the subblocks, and their subblocks... */
5437 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5438 vector ? vector + n_blocks : 0);
5439 block = BLOCK_CHAIN (block);
5440 }
5441
5442 return n_blocks;
5443 }
5444 \f
5445 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5446 and initialize static variables for generating RTL for the statements
5447 of the function. */
5448
5449 void
5450 init_function_start (subr, filename, line)
5451 tree subr;
5452 char *filename;
5453 int line;
5454 {
5455 init_stmt_for_function ();
5456
5457 cse_not_expected = ! optimize;
5458
5459 /* Caller save not needed yet. */
5460 caller_save_needed = 0;
5461
5462 /* No stack slots have been made yet. */
5463 stack_slot_list = 0;
5464
5465 /* There is no stack slot for handling nonlocal gotos. */
5466 nonlocal_goto_handler_slots = 0;
5467 nonlocal_goto_stack_level = 0;
5468
5469 /* No labels have been declared for nonlocal use. */
5470 nonlocal_labels = 0;
5471
5472 /* No function calls so far in this function. */
5473 function_call_count = 0;
5474
5475 /* No parm regs have been allocated.
5476 (This is important for output_inline_function.) */
5477 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5478
5479 /* Initialize the RTL mechanism. */
5480 init_emit ();
5481
5482 /* Initialize the queue of pending postincrement and postdecrements,
5483 and some other info in expr.c. */
5484 init_expr ();
5485
5486 /* We haven't done register allocation yet. */
5487 reg_renumber = 0;
5488
5489 init_const_rtx_hash_table ();
5490
5491 current_function_name = (*decl_printable_name) (subr, 2);
5492
5493 /* Nonzero if this is a nested function that uses a static chain. */
5494
5495 current_function_needs_context
5496 = (decl_function_context (current_function_decl) != 0
5497 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5498
5499 /* Set if a call to setjmp is seen. */
5500 current_function_calls_setjmp = 0;
5501
5502 /* Set if a call to longjmp is seen. */
5503 current_function_calls_longjmp = 0;
5504
5505 current_function_calls_alloca = 0;
5506 current_function_has_nonlocal_label = 0;
5507 current_function_has_nonlocal_goto = 0;
5508 current_function_contains_functions = 0;
5509 current_function_sp_is_unchanging = 0;
5510 current_function_is_thunk = 0;
5511
5512 current_function_returns_pcc_struct = 0;
5513 current_function_returns_struct = 0;
5514 current_function_epilogue_delay_list = 0;
5515 current_function_uses_const_pool = 0;
5516 current_function_uses_pic_offset_table = 0;
5517 current_function_cannot_inline = 0;
5518
5519 /* We have not yet needed to make a label to jump to for tail-recursion. */
5520 tail_recursion_label = 0;
5521
5522 /* We haven't had a need to make a save area for ap yet. */
5523
5524 arg_pointer_save_area = 0;
5525
5526 /* No stack slots allocated yet. */
5527 frame_offset = 0;
5528
5529 /* No SAVE_EXPRs in this function yet. */
5530 save_expr_regs = 0;
5531
5532 /* No RTL_EXPRs in this function yet. */
5533 rtl_expr_chain = 0;
5534
5535 /* Set up to allocate temporaries. */
5536 init_temp_slots ();
5537
5538 /* Within function body, compute a type's size as soon it is laid out. */
5539 immediate_size_expand++;
5540
5541 /* We haven't made any trampolines for this function yet. */
5542 trampoline_list = 0;
5543
5544 init_pending_stack_adjust ();
5545 inhibit_defer_pop = 0;
5546
5547 current_function_outgoing_args_size = 0;
5548
5549 /* Prevent ever trying to delete the first instruction of a function.
5550 Also tell final how to output a linenum before the function prologue.
5551 Note linenums could be missing, e.g. when compiling a Java .class file. */
5552 if (line > 0)
5553 emit_line_note (filename, line);
5554
5555 /* Make sure first insn is a note even if we don't want linenums.
5556 This makes sure the first insn will never be deleted.
5557 Also, final expects a note to appear there. */
5558 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5559
5560 /* Set flags used by final.c. */
5561 if (aggregate_value_p (DECL_RESULT (subr)))
5562 {
5563 #ifdef PCC_STATIC_STRUCT_RETURN
5564 current_function_returns_pcc_struct = 1;
5565 #endif
5566 current_function_returns_struct = 1;
5567 }
5568
5569 /* Warn if this value is an aggregate type,
5570 regardless of which calling convention we are using for it. */
5571 if (warn_aggregate_return
5572 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5573 warning ("function returns an aggregate");
5574
5575 current_function_returns_pointer
5576 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5577
5578 /* Indicate that we need to distinguish between the return value of the
5579 present function and the return value of a function being called. */
5580 rtx_equal_function_value_matters = 1;
5581
5582 /* Indicate that we have not instantiated virtual registers yet. */
5583 virtuals_instantiated = 0;
5584
5585 /* Indicate we have no need of a frame pointer yet. */
5586 frame_pointer_needed = 0;
5587
5588 /* By default assume not varargs or stdarg. */
5589 current_function_varargs = 0;
5590 current_function_stdarg = 0;
5591 }
5592
5593 /* Indicate that the current function uses extra args
5594 not explicitly mentioned in the argument list in any fashion. */
5595
5596 void
5597 mark_varargs ()
5598 {
5599 current_function_varargs = 1;
5600 }
5601
5602 /* Expand a call to __main at the beginning of a possible main function. */
5603
5604 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5605 #undef HAS_INIT_SECTION
5606 #define HAS_INIT_SECTION
5607 #endif
5608
5609 void
5610 expand_main_function ()
5611 {
5612 #if !defined (HAS_INIT_SECTION)
5613 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5614 VOIDmode, 0);
5615 #endif /* not HAS_INIT_SECTION */
5616 }
5617 \f
5618 extern struct obstack permanent_obstack;
5619
5620 /* Start the RTL for a new function, and set variables used for
5621 emitting RTL.
5622 SUBR is the FUNCTION_DECL node.
5623 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5624 the function's parameters, which must be run at any return statement. */
5625
5626 void
5627 expand_function_start (subr, parms_have_cleanups)
5628 tree subr;
5629 int parms_have_cleanups;
5630 {
5631 register int i;
5632 tree tem;
5633 rtx last_ptr = NULL_RTX;
5634
5635 /* Make sure volatile mem refs aren't considered
5636 valid operands of arithmetic insns. */
5637 init_recog_no_volatile ();
5638
5639 /* Set this before generating any memory accesses. */
5640 current_function_check_memory_usage
5641 = (flag_check_memory_usage
5642 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
5643
5644 current_function_instrument_entry_exit
5645 = (flag_instrument_function_entry_exit
5646 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5647
5648 /* If function gets a static chain arg, store it in the stack frame.
5649 Do this first, so it gets the first stack slot offset. */
5650 if (current_function_needs_context)
5651 {
5652 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5653
5654 /* Delay copying static chain if it is not a register to avoid
5655 conflicts with regs used for parameters. */
5656 if (! SMALL_REGISTER_CLASSES
5657 || GET_CODE (static_chain_incoming_rtx) == REG)
5658 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5659 }
5660
5661 /* If the parameters of this function need cleaning up, get a label
5662 for the beginning of the code which executes those cleanups. This must
5663 be done before doing anything with return_label. */
5664 if (parms_have_cleanups)
5665 cleanup_label = gen_label_rtx ();
5666 else
5667 cleanup_label = 0;
5668
5669 /* Make the label for return statements to jump to, if this machine
5670 does not have a one-instruction return and uses an epilogue,
5671 or if it returns a structure, or if it has parm cleanups. */
5672 #ifdef HAVE_return
5673 if (cleanup_label == 0 && HAVE_return
5674 && ! current_function_instrument_entry_exit
5675 && ! current_function_returns_pcc_struct
5676 && ! (current_function_returns_struct && ! optimize))
5677 return_label = 0;
5678 else
5679 return_label = gen_label_rtx ();
5680 #else
5681 return_label = gen_label_rtx ();
5682 #endif
5683
5684 /* Initialize rtx used to return the value. */
5685 /* Do this before assign_parms so that we copy the struct value address
5686 before any library calls that assign parms might generate. */
5687
5688 /* Decide whether to return the value in memory or in a register. */
5689 if (aggregate_value_p (DECL_RESULT (subr)))
5690 {
5691 /* Returning something that won't go in a register. */
5692 register rtx value_address = 0;
5693
5694 #ifdef PCC_STATIC_STRUCT_RETURN
5695 if (current_function_returns_pcc_struct)
5696 {
5697 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5698 value_address = assemble_static_space (size);
5699 }
5700 else
5701 #endif
5702 {
5703 /* Expect to be passed the address of a place to store the value.
5704 If it is passed as an argument, assign_parms will take care of
5705 it. */
5706 if (struct_value_incoming_rtx)
5707 {
5708 value_address = gen_reg_rtx (Pmode);
5709 emit_move_insn (value_address, struct_value_incoming_rtx);
5710 }
5711 }
5712 if (value_address)
5713 {
5714 DECL_RTL (DECL_RESULT (subr))
5715 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
5716 MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)))
5717 = AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5718 }
5719 }
5720 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5721 /* If return mode is void, this decl rtl should not be used. */
5722 DECL_RTL (DECL_RESULT (subr)) = 0;
5723 else if (parms_have_cleanups || current_function_instrument_entry_exit)
5724 {
5725 /* If function will end with cleanup code for parms,
5726 compute the return values into a pseudo reg,
5727 which we will copy into the true return register
5728 after the cleanups are done. */
5729
5730 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5731
5732 #ifdef PROMOTE_FUNCTION_RETURN
5733 tree type = TREE_TYPE (DECL_RESULT (subr));
5734 int unsignedp = TREE_UNSIGNED (type);
5735
5736 mode = promote_mode (type, mode, &unsignedp, 1);
5737 #endif
5738
5739 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5740 }
5741 else
5742 /* Scalar, returned in a register. */
5743 {
5744 #ifdef FUNCTION_OUTGOING_VALUE
5745 DECL_RTL (DECL_RESULT (subr))
5746 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5747 #else
5748 DECL_RTL (DECL_RESULT (subr))
5749 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5750 #endif
5751
5752 /* Mark this reg as the function's return value. */
5753 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5754 {
5755 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5756 /* Needed because we may need to move this to memory
5757 in case it's a named return value whose address is taken. */
5758 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5759 }
5760 }
5761
5762 /* Initialize rtx for parameters and local variables.
5763 In some cases this requires emitting insns. */
5764
5765 assign_parms (subr, 0);
5766
5767 /* Copy the static chain now if it wasn't a register. The delay is to
5768 avoid conflicts with the parameter passing registers. */
5769
5770 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
5771 if (GET_CODE (static_chain_incoming_rtx) != REG)
5772 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5773
5774 /* The following was moved from init_function_start.
5775 The move is supposed to make sdb output more accurate. */
5776 /* Indicate the beginning of the function body,
5777 as opposed to parm setup. */
5778 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5779
5780 /* If doing stupid allocation, mark parms as born here. */
5781
5782 if (GET_CODE (get_last_insn ()) != NOTE)
5783 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5784 parm_birth_insn = get_last_insn ();
5785
5786 if (obey_regdecls)
5787 {
5788 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5789 use_variable (regno_reg_rtx[i]);
5790
5791 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5792 use_variable (current_function_internal_arg_pointer);
5793 }
5794
5795 context_display = 0;
5796 if (current_function_needs_context)
5797 {
5798 /* Fetch static chain values for containing functions. */
5799 tem = decl_function_context (current_function_decl);
5800 /* If not doing stupid register allocation copy the static chain
5801 pointer into a pseudo. If we have small register classes, copy
5802 the value from memory if static_chain_incoming_rtx is a REG. If
5803 we do stupid register allocation, we use the stack address
5804 generated above. */
5805 if (tem && ! obey_regdecls)
5806 {
5807 /* If the static chain originally came in a register, put it back
5808 there, then move it out in the next insn. The reason for
5809 this peculiar code is to satisfy function integration. */
5810 if (SMALL_REGISTER_CLASSES
5811 && GET_CODE (static_chain_incoming_rtx) == REG)
5812 emit_move_insn (static_chain_incoming_rtx, last_ptr);
5813 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5814 }
5815
5816 while (tem)
5817 {
5818 tree rtlexp = make_node (RTL_EXPR);
5819
5820 RTL_EXPR_RTL (rtlexp) = last_ptr;
5821 context_display = tree_cons (tem, rtlexp, context_display);
5822 tem = decl_function_context (tem);
5823 if (tem == 0)
5824 break;
5825 /* Chain thru stack frames, assuming pointer to next lexical frame
5826 is found at the place we always store it. */
5827 #ifdef FRAME_GROWS_DOWNWARD
5828 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
5829 #endif
5830 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
5831 memory_address (Pmode, last_ptr)));
5832
5833 /* If we are not optimizing, ensure that we know that this
5834 piece of context is live over the entire function. */
5835 if (! optimize)
5836 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
5837 save_expr_regs);
5838 }
5839 }
5840
5841 if (current_function_instrument_entry_exit)
5842 {
5843 rtx fun = DECL_RTL (current_function_decl);
5844 if (GET_CODE (fun) == MEM)
5845 fun = XEXP (fun, 0);
5846 else
5847 abort ();
5848 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
5849 fun, Pmode,
5850 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5851 0,
5852 hard_frame_pointer_rtx),
5853 Pmode);
5854 }
5855
5856 /* After the display initializations is where the tail-recursion label
5857 should go, if we end up needing one. Ensure we have a NOTE here
5858 since some things (like trampolines) get placed before this. */
5859 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
5860
5861 /* Evaluate now the sizes of any types declared among the arguments. */
5862 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5863 {
5864 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
5865 EXPAND_MEMORY_USE_BAD);
5866 /* Flush the queue in case this parameter declaration has
5867 side-effects. */
5868 emit_queue ();
5869 }
5870
5871 /* Make sure there is a line number after the function entry setup code. */
5872 force_next_line_note ();
5873 }
5874 \f
5875 /* Generate RTL for the end of the current function.
5876 FILENAME and LINE are the current position in the source file.
5877
5878 It is up to language-specific callers to do cleanups for parameters--
5879 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5880
5881 void
5882 expand_function_end (filename, line, end_bindings)
5883 char *filename;
5884 int line;
5885 int end_bindings;
5886 {
5887 register int i;
5888 tree link;
5889
5890 #ifdef TRAMPOLINE_TEMPLATE
5891 static rtx initial_trampoline;
5892 #endif
5893
5894 #ifdef NON_SAVING_SETJMP
5895 /* Don't put any variables in registers if we call setjmp
5896 on a machine that fails to restore the registers. */
5897 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
5898 {
5899 if (DECL_INITIAL (current_function_decl) != error_mark_node)
5900 setjmp_protect (DECL_INITIAL (current_function_decl));
5901
5902 setjmp_protect_args ();
5903 }
5904 #endif
5905
5906 /* Save the argument pointer if a save area was made for it. */
5907 if (arg_pointer_save_area)
5908 {
5909 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
5910 emit_insn_before (x, tail_recursion_reentry);
5911 }
5912
5913 /* Initialize any trampolines required by this function. */
5914 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5915 {
5916 tree function = TREE_PURPOSE (link);
5917 rtx context = lookup_static_chain (function);
5918 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
5919 #ifdef TRAMPOLINE_TEMPLATE
5920 rtx blktramp;
5921 #endif
5922 rtx seq;
5923
5924 #ifdef TRAMPOLINE_TEMPLATE
5925 /* First make sure this compilation has a template for
5926 initializing trampolines. */
5927 if (initial_trampoline == 0)
5928 {
5929 end_temporary_allocation ();
5930 initial_trampoline
5931 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
5932 resume_temporary_allocation ();
5933 }
5934 #endif
5935
5936 /* Generate insns to initialize the trampoline. */
5937 start_sequence ();
5938 tramp = round_trampoline_addr (XEXP (tramp, 0));
5939 #ifdef TRAMPOLINE_TEMPLATE
5940 blktramp = change_address (initial_trampoline, BLKmode, tramp);
5941 emit_block_move (blktramp, initial_trampoline,
5942 GEN_INT (TRAMPOLINE_SIZE),
5943 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5944 #endif
5945 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
5946 seq = get_insns ();
5947 end_sequence ();
5948
5949 /* Put those insns at entry to the containing function (this one). */
5950 emit_insns_before (seq, tail_recursion_reentry);
5951 }
5952
5953 /* If we are doing stack checking and this function makes calls,
5954 do a stack probe at the start of the function to ensure we have enough
5955 space for another stack frame. */
5956 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
5957 {
5958 rtx insn, seq;
5959
5960 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5961 if (GET_CODE (insn) == CALL_INSN)
5962 {
5963 start_sequence ();
5964 probe_stack_range (STACK_CHECK_PROTECT,
5965 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
5966 seq = get_insns ();
5967 end_sequence ();
5968 emit_insns_before (seq, tail_recursion_reentry);
5969 break;
5970 }
5971 }
5972
5973 /* Warn about unused parms if extra warnings were specified. */
5974 if (warn_unused && extra_warnings)
5975 {
5976 tree decl;
5977
5978 for (decl = DECL_ARGUMENTS (current_function_decl);
5979 decl; decl = TREE_CHAIN (decl))
5980 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
5981 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
5982 warning_with_decl (decl, "unused parameter `%s'");
5983 }
5984
5985 /* Delete handlers for nonlocal gotos if nothing uses them. */
5986 if (nonlocal_goto_handler_slots != 0
5987 && ! current_function_has_nonlocal_label)
5988 delete_handlers ();
5989
5990 /* End any sequences that failed to be closed due to syntax errors. */
5991 while (in_sequence_p ())
5992 end_sequence ();
5993
5994 /* Outside function body, can't compute type's actual size
5995 until next function's body starts. */
5996 immediate_size_expand--;
5997
5998 /* If doing stupid register allocation,
5999 mark register parms as dying here. */
6000
6001 if (obey_regdecls)
6002 {
6003 rtx tem;
6004 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6005 use_variable (regno_reg_rtx[i]);
6006
6007 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
6008
6009 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
6010 {
6011 use_variable (XEXP (tem, 0));
6012 use_variable_after (XEXP (tem, 0), parm_birth_insn);
6013 }
6014
6015 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6016 use_variable (current_function_internal_arg_pointer);
6017 }
6018
6019 clear_pending_stack_adjust ();
6020 do_pending_stack_adjust ();
6021
6022 /* Mark the end of the function body.
6023 If control reaches this insn, the function can drop through
6024 without returning a value. */
6025 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6026
6027 /* Must mark the last line number note in the function, so that the test
6028 coverage code can avoid counting the last line twice. This just tells
6029 the code to ignore the immediately following line note, since there
6030 already exists a copy of this note somewhere above. This line number
6031 note is still needed for debugging though, so we can't delete it. */
6032 if (flag_test_coverage)
6033 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
6034
6035 /* Output a linenumber for the end of the function.
6036 SDB depends on this. */
6037 emit_line_note_force (filename, line);
6038
6039 /* Output the label for the actual return from the function,
6040 if one is expected. This happens either because a function epilogue
6041 is used instead of a return instruction, or because a return was done
6042 with a goto in order to run local cleanups, or because of pcc-style
6043 structure returning. */
6044
6045 if (return_label)
6046 emit_label (return_label);
6047
6048 /* C++ uses this. */
6049 if (end_bindings)
6050 expand_end_bindings (0, 0, 0);
6051
6052 /* Now handle any leftover exception regions that may have been
6053 created for the parameters. */
6054 {
6055 rtx last = get_last_insn ();
6056 rtx label;
6057
6058 expand_leftover_cleanups ();
6059
6060 /* If the above emitted any code, may sure we jump around it. */
6061 if (last != get_last_insn ())
6062 {
6063 label = gen_label_rtx ();
6064 last = emit_jump_insn_after (gen_jump (label), last);
6065 last = emit_barrier_after (last);
6066 emit_label (label);
6067 }
6068 }
6069
6070 if (current_function_instrument_entry_exit)
6071 {
6072 rtx fun = DECL_RTL (current_function_decl);
6073 if (GET_CODE (fun) == MEM)
6074 fun = XEXP (fun, 0);
6075 else
6076 abort ();
6077 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6078 fun, Pmode,
6079 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6080 0,
6081 hard_frame_pointer_rtx),
6082 Pmode);
6083 }
6084
6085 /* If we had calls to alloca, and this machine needs
6086 an accurate stack pointer to exit the function,
6087 insert some code to save and restore the stack pointer. */
6088 #ifdef EXIT_IGNORE_STACK
6089 if (! EXIT_IGNORE_STACK)
6090 #endif
6091 if (current_function_calls_alloca)
6092 {
6093 rtx tem = 0;
6094
6095 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6096 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6097 }
6098
6099 /* If scalar return value was computed in a pseudo-reg,
6100 copy that to the hard return register. */
6101 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6102 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6103 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6104 >= FIRST_PSEUDO_REGISTER))
6105 {
6106 rtx real_decl_result;
6107
6108 #ifdef FUNCTION_OUTGOING_VALUE
6109 real_decl_result
6110 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6111 current_function_decl);
6112 #else
6113 real_decl_result
6114 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6115 current_function_decl);
6116 #endif
6117 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
6118 /* If this is a BLKmode structure being returned in registers, then use
6119 the mode computed in expand_return. */
6120 if (GET_MODE (real_decl_result) == BLKmode)
6121 PUT_MODE (real_decl_result,
6122 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6123 emit_move_insn (real_decl_result,
6124 DECL_RTL (DECL_RESULT (current_function_decl)));
6125 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
6126
6127 /* The delay slot scheduler assumes that current_function_return_rtx
6128 holds the hard register containing the return value, not a temporary
6129 pseudo. */
6130 current_function_return_rtx = real_decl_result;
6131 }
6132
6133 /* If returning a structure, arrange to return the address of the value
6134 in a place where debuggers expect to find it.
6135
6136 If returning a structure PCC style,
6137 the caller also depends on this value.
6138 And current_function_returns_pcc_struct is not necessarily set. */
6139 if (current_function_returns_struct
6140 || current_function_returns_pcc_struct)
6141 {
6142 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6143 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6144 #ifdef FUNCTION_OUTGOING_VALUE
6145 rtx outgoing
6146 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6147 current_function_decl);
6148 #else
6149 rtx outgoing
6150 = FUNCTION_VALUE (build_pointer_type (type),
6151 current_function_decl);
6152 #endif
6153
6154 /* Mark this as a function return value so integrate will delete the
6155 assignment and USE below when inlining this function. */
6156 REG_FUNCTION_VALUE_P (outgoing) = 1;
6157
6158 emit_move_insn (outgoing, value_address);
6159 use_variable (outgoing);
6160 }
6161
6162 /* If this is an implementation of __throw, do what's necessary to
6163 communicate between __builtin_eh_return and the epilogue. */
6164 expand_eh_return ();
6165
6166 /* Output a return insn if we are using one.
6167 Otherwise, let the rtl chain end here, to drop through
6168 into the epilogue. */
6169
6170 #ifdef HAVE_return
6171 if (HAVE_return)
6172 {
6173 emit_jump_insn (gen_return ());
6174 emit_barrier ();
6175 }
6176 #endif
6177
6178 /* Fix up any gotos that jumped out to the outermost
6179 binding level of the function.
6180 Must follow emitting RETURN_LABEL. */
6181
6182 /* If you have any cleanups to do at this point,
6183 and they need to create temporary variables,
6184 then you will lose. */
6185 expand_fixups (get_insns ());
6186 }
6187 \f
6188 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
6189
6190 static int *prologue;
6191 static int *epilogue;
6192
6193 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
6194 or a single insn). */
6195
6196 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6197 static int *
6198 record_insns (insns)
6199 rtx insns;
6200 {
6201 int *vec;
6202
6203 if (GET_CODE (insns) == SEQUENCE)
6204 {
6205 int len = XVECLEN (insns, 0);
6206 vec = (int *) oballoc ((len + 1) * sizeof (int));
6207 vec[len] = 0;
6208 while (--len >= 0)
6209 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6210 }
6211 else
6212 {
6213 vec = (int *) oballoc (2 * sizeof (int));
6214 vec[0] = INSN_UID (insns);
6215 vec[1] = 0;
6216 }
6217 return vec;
6218 }
6219
6220 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6221
6222 static int
6223 contains (insn, vec)
6224 rtx insn;
6225 int *vec;
6226 {
6227 register int i, j;
6228
6229 if (GET_CODE (insn) == INSN
6230 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6231 {
6232 int count = 0;
6233 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6234 for (j = 0; vec[j]; j++)
6235 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6236 count++;
6237 return count;
6238 }
6239 else
6240 {
6241 for (j = 0; vec[j]; j++)
6242 if (INSN_UID (insn) == vec[j])
6243 return 1;
6244 }
6245 return 0;
6246 }
6247 #endif /* HAVE_prologue || HAVE_epilogue */
6248
6249 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6250 this into place with notes indicating where the prologue ends and where
6251 the epilogue begins. Update the basic block information when possible. */
6252
6253 void
6254 thread_prologue_and_epilogue_insns (f)
6255 rtx f ATTRIBUTE_UNUSED;
6256 {
6257 #ifdef HAVE_prologue
6258 if (HAVE_prologue)
6259 {
6260 rtx head, seq;
6261
6262 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
6263 prologue insns and a NOTE_INSN_PROLOGUE_END. */
6264 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
6265 seq = gen_prologue ();
6266 head = emit_insn_after (seq, f);
6267
6268 /* Include the new prologue insns in the first block. Ignore them
6269 if they form a basic block unto themselves. */
6270 if (x_basic_block_head && n_basic_blocks
6271 && GET_CODE (BLOCK_HEAD (0)) != CODE_LABEL)
6272 BLOCK_HEAD (0) = NEXT_INSN (f);
6273
6274 /* Retain a map of the prologue insns. */
6275 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
6276 }
6277 else
6278 #endif
6279 prologue = 0;
6280
6281 #ifdef HAVE_epilogue
6282 if (HAVE_epilogue)
6283 {
6284 rtx insn = get_last_insn ();
6285 rtx prev = prev_nonnote_insn (insn);
6286
6287 /* If we end with a BARRIER, we don't need an epilogue. */
6288 if (! (prev && GET_CODE (prev) == BARRIER))
6289 {
6290 rtx tail, seq, tem;
6291 rtx first_use = 0;
6292 rtx last_use = 0;
6293
6294 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6295 epilogue insns, the USE insns at the end of a function,
6296 the jump insn that returns, and then a BARRIER. */
6297
6298 /* Move the USE insns at the end of a function onto a list. */
6299 while (prev
6300 && GET_CODE (prev) == INSN
6301 && GET_CODE (PATTERN (prev)) == USE)
6302 {
6303 tem = prev;
6304 prev = prev_nonnote_insn (prev);
6305
6306 NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem);
6307 PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem);
6308 if (first_use)
6309 {
6310 NEXT_INSN (tem) = first_use;
6311 PREV_INSN (first_use) = tem;
6312 }
6313 first_use = tem;
6314 if (!last_use)
6315 last_use = tem;
6316 }
6317
6318 emit_barrier_after (insn);
6319
6320 seq = gen_epilogue ();
6321 tail = emit_jump_insn_after (seq, insn);
6322
6323 /* Insert the USE insns immediately before the return insn, which
6324 must be the first instruction before the final barrier. */
6325 if (first_use)
6326 {
6327 tem = prev_nonnote_insn (get_last_insn ());
6328 NEXT_INSN (PREV_INSN (tem)) = first_use;
6329 PREV_INSN (first_use) = PREV_INSN (tem);
6330 PREV_INSN (tem) = last_use;
6331 NEXT_INSN (last_use) = tem;
6332 }
6333
6334 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
6335
6336 /* Include the new epilogue insns in the last block. Ignore
6337 them if they form a basic block unto themselves. */
6338 if (x_basic_block_end && n_basic_blocks
6339 && GET_CODE (BLOCK_END (n_basic_blocks - 1)) != JUMP_INSN)
6340 BLOCK_END (n_basic_blocks - 1) = tail;
6341
6342 /* Retain a map of the epilogue insns. */
6343 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6344 return;
6345 }
6346 }
6347 #endif
6348 epilogue = 0;
6349 }
6350
6351 /* Reposition the prologue-end and epilogue-begin notes after instruction
6352 scheduling and delayed branch scheduling. */
6353
6354 void
6355 reposition_prologue_and_epilogue_notes (f)
6356 rtx f ATTRIBUTE_UNUSED;
6357 {
6358 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6359 /* Reposition the prologue and epilogue notes. */
6360 if (n_basic_blocks)
6361 {
6362 rtx next, prev;
6363 int len;
6364
6365 if (prologue)
6366 {
6367 register rtx insn, note = 0;
6368
6369 /* Scan from the beginning until we reach the last prologue insn.
6370 We apparently can't depend on basic_block_{head,end} after
6371 reorg has run. */
6372 for (len = 0; prologue[len]; len++)
6373 ;
6374 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6375 {
6376 if (GET_CODE (insn) == NOTE)
6377 {
6378 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6379 note = insn;
6380 }
6381 else if ((len -= contains (insn, prologue)) == 0)
6382 {
6383 /* Find the prologue-end note if we haven't already, and
6384 move it to just after the last prologue insn. */
6385 if (note == 0)
6386 {
6387 for (note = insn; (note = NEXT_INSN (note));)
6388 if (GET_CODE (note) == NOTE
6389 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6390 break;
6391 }
6392
6393 next = NEXT_INSN (note);
6394 prev = PREV_INSN (note);
6395 if (prev)
6396 NEXT_INSN (prev) = next;
6397 if (next)
6398 PREV_INSN (next) = prev;
6399
6400 /* Whether or not we can depend on BLOCK_HEAD,
6401 attempt to keep it up-to-date. */
6402 if (BLOCK_HEAD (0) == note)
6403 BLOCK_HEAD (0) = next;
6404
6405 add_insn_after (note, insn);
6406 }
6407 }
6408 }
6409
6410 if (epilogue)
6411 {
6412 register rtx insn, note = 0;
6413
6414 /* Scan from the end until we reach the first epilogue insn.
6415 We apparently can't depend on basic_block_{head,end} after
6416 reorg has run. */
6417 for (len = 0; epilogue[len]; len++)
6418 ;
6419 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6420 {
6421 if (GET_CODE (insn) == NOTE)
6422 {
6423 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6424 note = insn;
6425 }
6426 else if ((len -= contains (insn, epilogue)) == 0)
6427 {
6428 /* Find the epilogue-begin note if we haven't already, and
6429 move it to just before the first epilogue insn. */
6430 if (note == 0)
6431 {
6432 for (note = insn; (note = PREV_INSN (note));)
6433 if (GET_CODE (note) == NOTE
6434 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6435 break;
6436 }
6437 next = NEXT_INSN (note);
6438 prev = PREV_INSN (note);
6439 if (prev)
6440 NEXT_INSN (prev) = next;
6441 if (next)
6442 PREV_INSN (next) = prev;
6443
6444 /* Whether or not we can depend on BLOCK_HEAD,
6445 attempt to keep it up-to-date. */
6446 if (n_basic_blocks
6447 && BLOCK_HEAD (n_basic_blocks-1) == insn)
6448 BLOCK_HEAD (n_basic_blocks-1) = note;
6449
6450 add_insn_before (note, insn);
6451 }
6452 }
6453 }
6454 }
6455 #endif /* HAVE_prologue or HAVE_epilogue */
6456 }