function.c (purge_addressof_1): Force the first argument of a CALL insn to memory.
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-97, 1998 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58 #include "toplev.h"
59
60 #ifndef TRAMPOLINE_ALIGNMENT
61 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
62 #endif
63
64 /* Some systems use __main in a way incompatible with its use in gcc, in these
65 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
66 give the same symbol without quotes for an alternative entry point. You
67 must define both, or neither. */
68 #ifndef NAME__MAIN
69 #define NAME__MAIN "__main"
70 #define SYMBOL__MAIN __main
71 #endif
72
73 /* Round a value to the lowest integer less than it that is a multiple of
74 the required alignment. Avoid using division in case the value is
75 negative. Assume the alignment is a power of two. */
76 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
77
78 /* Similar, but round to the next highest integer that meets the
79 alignment. */
80 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
81
82 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
83 during rtl generation. If they are different register numbers, this is
84 always true. It may also be true if
85 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
86 generation. See fix_lexical_addr for details. */
87
88 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
89 #define NEED_SEPARATE_AP
90 #endif
91
92 /* Number of bytes of args popped by function being compiled on its return.
93 Zero if no bytes are to be popped.
94 May affect compilation of return insn or of function epilogue. */
95
96 int current_function_pops_args;
97
98 /* Nonzero if function being compiled needs to be given an address
99 where the value should be stored. */
100
101 int current_function_returns_struct;
102
103 /* Nonzero if function being compiled needs to
104 return the address of where it has put a structure value. */
105
106 int current_function_returns_pcc_struct;
107
108 /* Nonzero if function being compiled needs to be passed a static chain. */
109
110 int current_function_needs_context;
111
112 /* Nonzero if function being compiled can call setjmp. */
113
114 int current_function_calls_setjmp;
115
116 /* Nonzero if function being compiled can call longjmp. */
117
118 int current_function_calls_longjmp;
119
120 /* Nonzero if function being compiled receives nonlocal gotos
121 from nested functions. */
122
123 int current_function_has_nonlocal_label;
124
125 /* Nonzero if function being compiled has nonlocal gotos to parent
126 function. */
127
128 int current_function_has_nonlocal_goto;
129
130 /* Nonzero if this function has a computed goto.
131
132 It is computed during find_basic_blocks or during stupid life
133 analysis. */
134
135 int current_function_has_computed_jump;
136
137 /* Nonzero if function being compiled contains nested functions. */
138
139 int current_function_contains_functions;
140
141 /* Nonzero if function being compiled doesn't modify the stack pointer
142 (ignoring the prologue and epilogue). This is only valid after
143 life_analysis has run. */
144
145 int current_function_sp_is_unchanging;
146
147 /* Nonzero if the current function is a thunk (a lightweight function that
148 just adjusts one of its arguments and forwards to another function), so
149 we should try to cut corners where we can. */
150 int current_function_is_thunk;
151
152 /* Nonzero if function being compiled can call alloca,
153 either as a subroutine or builtin. */
154
155 int current_function_calls_alloca;
156
157 /* Nonzero if the current function returns a pointer type */
158
159 int current_function_returns_pointer;
160
161 /* If some insns can be deferred to the delay slots of the epilogue, the
162 delay list for them is recorded here. */
163
164 rtx current_function_epilogue_delay_list;
165
166 /* If function's args have a fixed size, this is that size, in bytes.
167 Otherwise, it is -1.
168 May affect compilation of return insn or of function epilogue. */
169
170 int current_function_args_size;
171
172 /* # bytes the prologue should push and pretend that the caller pushed them.
173 The prologue must do this, but only if parms can be passed in registers. */
174
175 int current_function_pretend_args_size;
176
177 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
178 defined, the needed space is pushed by the prologue. */
179
180 int current_function_outgoing_args_size;
181
182 /* This is the offset from the arg pointer to the place where the first
183 anonymous arg can be found, if there is one. */
184
185 rtx current_function_arg_offset_rtx;
186
187 /* Nonzero if current function uses varargs.h or equivalent.
188 Zero for functions that use stdarg.h. */
189
190 int current_function_varargs;
191
192 /* Nonzero if current function uses stdarg.h or equivalent.
193 Zero for functions that use varargs.h. */
194
195 int current_function_stdarg;
196
197 /* Quantities of various kinds of registers
198 used for the current function's args. */
199
200 CUMULATIVE_ARGS current_function_args_info;
201
202 /* Name of function now being compiled. */
203
204 char *current_function_name;
205
206 /* If non-zero, an RTL expression for the location at which the current
207 function returns its result. If the current function returns its
208 result in a register, current_function_return_rtx will always be
209 the hard register containing the result. */
210
211 rtx current_function_return_rtx;
212
213 /* Nonzero if the current function uses the constant pool. */
214
215 int current_function_uses_const_pool;
216
217 /* Nonzero if the current function uses pic_offset_table_rtx. */
218 int current_function_uses_pic_offset_table;
219
220 /* The arg pointer hard register, or the pseudo into which it was copied. */
221 rtx current_function_internal_arg_pointer;
222
223 /* Language-specific reason why the current function cannot be made inline. */
224 char *current_function_cannot_inline;
225
226 /* Nonzero if instrumentation calls for function entry and exit should be
227 generated. */
228 int current_function_instrument_entry_exit;
229
230 /* Nonzero if memory access checking be enabled in the current function. */
231 int current_function_check_memory_usage;
232
233 /* The FUNCTION_DECL for an inline function currently being expanded. */
234 tree inline_function_decl;
235
236 /* Number of function calls seen so far in current function. */
237
238 int function_call_count;
239
240 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
241 (labels to which there can be nonlocal gotos from nested functions)
242 in this function. */
243
244 tree nonlocal_labels;
245
246 /* RTX for stack slot that holds the current handler for nonlocal gotos.
247 Zero when function does not have nonlocal labels. */
248
249 rtx nonlocal_goto_handler_slot;
250
251 /* RTX for stack slot that holds the stack pointer value to restore
252 for a nonlocal goto.
253 Zero when function does not have nonlocal labels. */
254
255 rtx nonlocal_goto_stack_level;
256
257 /* Label that will go on parm cleanup code, if any.
258 Jumping to this label runs cleanup code for parameters, if
259 such code must be run. Following this code is the logical return label. */
260
261 rtx cleanup_label;
262
263 /* Label that will go on function epilogue.
264 Jumping to this label serves as a "return" instruction
265 on machines which require execution of the epilogue on all returns. */
266
267 rtx return_label;
268
269 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
270 So we can mark them all live at the end of the function, if nonopt. */
271 rtx save_expr_regs;
272
273 /* List (chain of EXPR_LISTs) of all stack slots in this function.
274 Made for the sake of unshare_all_rtl. */
275 rtx stack_slot_list;
276
277 /* Chain of all RTL_EXPRs that have insns in them. */
278 tree rtl_expr_chain;
279
280 /* Label to jump back to for tail recursion, or 0 if we have
281 not yet needed one for this function. */
282 rtx tail_recursion_label;
283
284 /* Place after which to insert the tail_recursion_label if we need one. */
285 rtx tail_recursion_reentry;
286
287 /* Location at which to save the argument pointer if it will need to be
288 referenced. There are two cases where this is done: if nonlocal gotos
289 exist, or if vars stored at an offset from the argument pointer will be
290 needed by inner routines. */
291
292 rtx arg_pointer_save_area;
293
294 /* Offset to end of allocated area of stack frame.
295 If stack grows down, this is the address of the last stack slot allocated.
296 If stack grows up, this is the address for the next slot. */
297 HOST_WIDE_INT frame_offset;
298
299 /* List (chain of TREE_LISTs) of static chains for containing functions.
300 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
301 in an RTL_EXPR in the TREE_VALUE. */
302 static tree context_display;
303
304 /* List (chain of TREE_LISTs) of trampolines for nested functions.
305 The trampoline sets up the static chain and jumps to the function.
306 We supply the trampoline's address when the function's address is requested.
307
308 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
309 in an RTL_EXPR in the TREE_VALUE. */
310 static tree trampoline_list;
311
312 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
313 static rtx parm_birth_insn;
314
315 #if 0
316 /* Nonzero if a stack slot has been generated whose address is not
317 actually valid. It means that the generated rtl must all be scanned
318 to detect and correct the invalid addresses where they occur. */
319 static int invalid_stack_slot;
320 #endif
321
322 /* Last insn of those whose job was to put parms into their nominal homes. */
323 static rtx last_parm_insn;
324
325 /* 1 + last pseudo register number possibly used for loading a copy
326 of a parameter of this function. */
327 int max_parm_reg;
328
329 /* Vector indexed by REGNO, containing location on stack in which
330 to put the parm which is nominally in pseudo register REGNO,
331 if we discover that that parm must go in the stack. The highest
332 element in this vector is one less than MAX_PARM_REG, above. */
333 rtx *parm_reg_stack_loc;
334
335 /* Nonzero once virtual register instantiation has been done.
336 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
337 static int virtuals_instantiated;
338
339 /* These variables hold pointers to functions to
340 save and restore machine-specific data,
341 in push_function_context and pop_function_context. */
342 void (*save_machine_status) PROTO((struct function *));
343 void (*restore_machine_status) PROTO((struct function *));
344
345 /* Nonzero if we need to distinguish between the return value of this function
346 and the return value of a function called by this function. This helps
347 integrate.c */
348
349 extern int rtx_equal_function_value_matters;
350 extern tree sequence_rtl_expr;
351 \f
352 /* In order to evaluate some expressions, such as function calls returning
353 structures in memory, we need to temporarily allocate stack locations.
354 We record each allocated temporary in the following structure.
355
356 Associated with each temporary slot is a nesting level. When we pop up
357 one level, all temporaries associated with the previous level are freed.
358 Normally, all temporaries are freed after the execution of the statement
359 in which they were created. However, if we are inside a ({...}) grouping,
360 the result may be in a temporary and hence must be preserved. If the
361 result could be in a temporary, we preserve it if we can determine which
362 one it is in. If we cannot determine which temporary may contain the
363 result, all temporaries are preserved. A temporary is preserved by
364 pretending it was allocated at the previous nesting level.
365
366 Automatic variables are also assigned temporary slots, at the nesting
367 level where they are defined. They are marked a "kept" so that
368 free_temp_slots will not free them. */
369
370 struct temp_slot
371 {
372 /* Points to next temporary slot. */
373 struct temp_slot *next;
374 /* The rtx to used to reference the slot. */
375 rtx slot;
376 /* The rtx used to represent the address if not the address of the
377 slot above. May be an EXPR_LIST if multiple addresses exist. */
378 rtx address;
379 /* The size, in units, of the slot. */
380 HOST_WIDE_INT size;
381 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
382 tree rtl_expr;
383 /* Non-zero if this temporary is currently in use. */
384 char in_use;
385 /* Non-zero if this temporary has its address taken. */
386 char addr_taken;
387 /* Nesting level at which this slot is being used. */
388 int level;
389 /* Non-zero if this should survive a call to free_temp_slots. */
390 int keep;
391 /* The offset of the slot from the frame_pointer, including extra space
392 for alignment. This info is for combine_temp_slots. */
393 HOST_WIDE_INT base_offset;
394 /* The size of the slot, including extra space for alignment. This
395 info is for combine_temp_slots. */
396 HOST_WIDE_INT full_size;
397 };
398
399 /* List of all temporaries allocated, both available and in use. */
400
401 struct temp_slot *temp_slots;
402
403 /* Current nesting level for temporaries. */
404
405 int temp_slot_level;
406
407 /* Current nesting level for variables in a block. */
408
409 int var_temp_slot_level;
410
411 /* When temporaries are created by TARGET_EXPRs, they are created at
412 this level of temp_slot_level, so that they can remain allocated
413 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
414 of TARGET_EXPRs. */
415 int target_temp_slot_level;
416 \f
417 /* This structure is used to record MEMs or pseudos used to replace VAR, any
418 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
419 maintain this list in case two operands of an insn were required to match;
420 in that case we must ensure we use the same replacement. */
421
422 struct fixup_replacement
423 {
424 rtx old;
425 rtx new;
426 struct fixup_replacement *next;
427 };
428
429 /* Forward declarations. */
430
431 static rtx assign_outer_stack_local PROTO ((enum machine_mode, HOST_WIDE_INT,
432 int, struct function *));
433 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
434 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
435 enum machine_mode, enum machine_mode,
436 int, int, int));
437 static void fixup_var_refs PROTO((rtx, enum machine_mode, int));
438 static struct fixup_replacement
439 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
440 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
441 rtx, int));
442 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
443 struct fixup_replacement **));
444 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
445 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
446 static rtx fixup_stack_1 PROTO((rtx, rtx));
447 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
448 static void instantiate_decls PROTO((tree, int));
449 static void instantiate_decls_1 PROTO((tree, int));
450 static void instantiate_decl PROTO((rtx, int, int));
451 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
452 static void delete_handlers PROTO((void));
453 static void pad_to_arg_alignment PROTO((struct args_size *, int));
454 #ifndef ARGS_GROW_DOWNWARD
455 static void pad_below PROTO((struct args_size *, enum machine_mode,
456 tree));
457 #endif
458 #ifdef ARGS_GROW_DOWNWARD
459 static tree round_down PROTO((tree, int));
460 #endif
461 static rtx round_trampoline_addr PROTO((rtx));
462 static tree blocks_nreverse PROTO((tree));
463 static int all_blocks PROTO((tree, tree *));
464 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
465 static int *record_insns PROTO((rtx));
466 static int contains PROTO((rtx, int *));
467 #endif /* HAVE_prologue || HAVE_epilogue */
468 static void put_addressof_into_stack PROTO((rtx));
469 static void purge_addressof_1 PROTO((rtx *, rtx, int, int));
470 \f
471 /* Pointer to chain of `struct function' for containing functions. */
472 struct function *outer_function_chain;
473
474 /* Given a function decl for a containing function,
475 return the `struct function' for it. */
476
477 struct function *
478 find_function_data (decl)
479 tree decl;
480 {
481 struct function *p;
482
483 for (p = outer_function_chain; p; p = p->next)
484 if (p->decl == decl)
485 return p;
486
487 abort ();
488 }
489
490 /* Save the current context for compilation of a nested function.
491 This is called from language-specific code.
492 The caller is responsible for saving any language-specific status,
493 since this function knows only about language-independent variables. */
494
495 void
496 push_function_context_to (context)
497 tree context;
498 {
499 struct function *p = (struct function *) xmalloc (sizeof (struct function));
500
501 p->next = outer_function_chain;
502 outer_function_chain = p;
503
504 p->name = current_function_name;
505 p->decl = current_function_decl;
506 p->pops_args = current_function_pops_args;
507 p->returns_struct = current_function_returns_struct;
508 p->returns_pcc_struct = current_function_returns_pcc_struct;
509 p->returns_pointer = current_function_returns_pointer;
510 p->needs_context = current_function_needs_context;
511 p->calls_setjmp = current_function_calls_setjmp;
512 p->calls_longjmp = current_function_calls_longjmp;
513 p->calls_alloca = current_function_calls_alloca;
514 p->has_nonlocal_label = current_function_has_nonlocal_label;
515 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
516 p->contains_functions = current_function_contains_functions;
517 p->is_thunk = current_function_is_thunk;
518 p->args_size = current_function_args_size;
519 p->pretend_args_size = current_function_pretend_args_size;
520 p->arg_offset_rtx = current_function_arg_offset_rtx;
521 p->varargs = current_function_varargs;
522 p->stdarg = current_function_stdarg;
523 p->uses_const_pool = current_function_uses_const_pool;
524 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
525 p->internal_arg_pointer = current_function_internal_arg_pointer;
526 p->cannot_inline = current_function_cannot_inline;
527 p->max_parm_reg = max_parm_reg;
528 p->parm_reg_stack_loc = parm_reg_stack_loc;
529 p->outgoing_args_size = current_function_outgoing_args_size;
530 p->return_rtx = current_function_return_rtx;
531 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
532 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
533 p->nonlocal_labels = nonlocal_labels;
534 p->cleanup_label = cleanup_label;
535 p->return_label = return_label;
536 p->save_expr_regs = save_expr_regs;
537 p->stack_slot_list = stack_slot_list;
538 p->parm_birth_insn = parm_birth_insn;
539 p->frame_offset = frame_offset;
540 p->tail_recursion_label = tail_recursion_label;
541 p->tail_recursion_reentry = tail_recursion_reentry;
542 p->arg_pointer_save_area = arg_pointer_save_area;
543 p->rtl_expr_chain = rtl_expr_chain;
544 p->last_parm_insn = last_parm_insn;
545 p->context_display = context_display;
546 p->trampoline_list = trampoline_list;
547 p->function_call_count = function_call_count;
548 p->temp_slots = temp_slots;
549 p->temp_slot_level = temp_slot_level;
550 p->target_temp_slot_level = target_temp_slot_level;
551 p->var_temp_slot_level = var_temp_slot_level;
552 p->fixup_var_refs_queue = 0;
553 p->epilogue_delay_list = current_function_epilogue_delay_list;
554 p->args_info = current_function_args_info;
555 p->check_memory_usage = current_function_check_memory_usage;
556 p->instrument_entry_exit = current_function_instrument_entry_exit;
557
558 save_tree_status (p, context);
559 save_storage_status (p);
560 save_emit_status (p);
561 save_expr_status (p);
562 save_stmt_status (p);
563 save_varasm_status (p, context);
564 if (save_machine_status)
565 (*save_machine_status) (p);
566 }
567
568 void
569 push_function_context ()
570 {
571 push_function_context_to (current_function_decl);
572 }
573
574 /* Restore the last saved context, at the end of a nested function.
575 This function is called from language-specific code. */
576
577 void
578 pop_function_context_from (context)
579 tree context;
580 {
581 struct function *p = outer_function_chain;
582 struct var_refs_queue *queue;
583
584 outer_function_chain = p->next;
585
586 current_function_contains_functions
587 = p->contains_functions || p->inline_obstacks
588 || context == current_function_decl;
589 current_function_name = p->name;
590 current_function_decl = p->decl;
591 current_function_pops_args = p->pops_args;
592 current_function_returns_struct = p->returns_struct;
593 current_function_returns_pcc_struct = p->returns_pcc_struct;
594 current_function_returns_pointer = p->returns_pointer;
595 current_function_needs_context = p->needs_context;
596 current_function_calls_setjmp = p->calls_setjmp;
597 current_function_calls_longjmp = p->calls_longjmp;
598 current_function_calls_alloca = p->calls_alloca;
599 current_function_has_nonlocal_label = p->has_nonlocal_label;
600 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
601 current_function_is_thunk = p->is_thunk;
602 current_function_args_size = p->args_size;
603 current_function_pretend_args_size = p->pretend_args_size;
604 current_function_arg_offset_rtx = p->arg_offset_rtx;
605 current_function_varargs = p->varargs;
606 current_function_stdarg = p->stdarg;
607 current_function_uses_const_pool = p->uses_const_pool;
608 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
609 current_function_internal_arg_pointer = p->internal_arg_pointer;
610 current_function_cannot_inline = p->cannot_inline;
611 max_parm_reg = p->max_parm_reg;
612 parm_reg_stack_loc = p->parm_reg_stack_loc;
613 current_function_outgoing_args_size = p->outgoing_args_size;
614 current_function_return_rtx = p->return_rtx;
615 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
616 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
617 nonlocal_labels = p->nonlocal_labels;
618 cleanup_label = p->cleanup_label;
619 return_label = p->return_label;
620 save_expr_regs = p->save_expr_regs;
621 stack_slot_list = p->stack_slot_list;
622 parm_birth_insn = p->parm_birth_insn;
623 frame_offset = p->frame_offset;
624 tail_recursion_label = p->tail_recursion_label;
625 tail_recursion_reentry = p->tail_recursion_reentry;
626 arg_pointer_save_area = p->arg_pointer_save_area;
627 rtl_expr_chain = p->rtl_expr_chain;
628 last_parm_insn = p->last_parm_insn;
629 context_display = p->context_display;
630 trampoline_list = p->trampoline_list;
631 function_call_count = p->function_call_count;
632 temp_slots = p->temp_slots;
633 temp_slot_level = p->temp_slot_level;
634 target_temp_slot_level = p->target_temp_slot_level;
635 var_temp_slot_level = p->var_temp_slot_level;
636 current_function_epilogue_delay_list = p->epilogue_delay_list;
637 reg_renumber = 0;
638 current_function_args_info = p->args_info;
639 current_function_check_memory_usage = p->check_memory_usage;
640 current_function_instrument_entry_exit = p->instrument_entry_exit;
641
642 restore_tree_status (p, context);
643 restore_storage_status (p);
644 restore_expr_status (p);
645 restore_emit_status (p);
646 restore_stmt_status (p);
647 restore_varasm_status (p);
648
649 if (restore_machine_status)
650 (*restore_machine_status) (p);
651
652 /* Finish doing put_var_into_stack for any of our variables
653 which became addressable during the nested function. */
654 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
655 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
656
657 free (p);
658
659 /* Reset variables that have known state during rtx generation. */
660 rtx_equal_function_value_matters = 1;
661 virtuals_instantiated = 0;
662 }
663
664 void pop_function_context ()
665 {
666 pop_function_context_from (current_function_decl);
667 }
668 \f
669 /* Allocate fixed slots in the stack frame of the current function. */
670
671 /* Return size needed for stack frame based on slots so far allocated.
672 This size counts from zero. It is not rounded to STACK_BOUNDARY;
673 the caller may have to do that. */
674
675 HOST_WIDE_INT
676 get_frame_size ()
677 {
678 #ifdef FRAME_GROWS_DOWNWARD
679 return -frame_offset;
680 #else
681 return frame_offset;
682 #endif
683 }
684
685 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
686 with machine mode MODE.
687
688 ALIGN controls the amount of alignment for the address of the slot:
689 0 means according to MODE,
690 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
691 positive specifies alignment boundary in bits.
692
693 We do not round to stack_boundary here. */
694
695 rtx
696 assign_stack_local (mode, size, align)
697 enum machine_mode mode;
698 HOST_WIDE_INT size;
699 int align;
700 {
701 register rtx x, addr;
702 int bigend_correction = 0;
703 int alignment;
704
705 if (align == 0)
706 {
707 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
708 if (mode == BLKmode)
709 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
710 }
711 else if (align == -1)
712 {
713 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
714 size = CEIL_ROUND (size, alignment);
715 }
716 else
717 alignment = align / BITS_PER_UNIT;
718
719 /* Round frame offset to that alignment.
720 We must be careful here, since FRAME_OFFSET might be negative and
721 division with a negative dividend isn't as well defined as we might
722 like. So we instead assume that ALIGNMENT is a power of two and
723 use logical operations which are unambiguous. */
724 #ifdef FRAME_GROWS_DOWNWARD
725 frame_offset = FLOOR_ROUND (frame_offset, alignment);
726 #else
727 frame_offset = CEIL_ROUND (frame_offset, alignment);
728 #endif
729
730 /* On a big-endian machine, if we are allocating more space than we will use,
731 use the least significant bytes of those that are allocated. */
732 if (BYTES_BIG_ENDIAN && mode != BLKmode)
733 bigend_correction = size - GET_MODE_SIZE (mode);
734
735 #ifdef FRAME_GROWS_DOWNWARD
736 frame_offset -= size;
737 #endif
738
739 /* If we have already instantiated virtual registers, return the actual
740 address relative to the frame pointer. */
741 if (virtuals_instantiated)
742 addr = plus_constant (frame_pointer_rtx,
743 (frame_offset + bigend_correction
744 + STARTING_FRAME_OFFSET));
745 else
746 addr = plus_constant (virtual_stack_vars_rtx,
747 frame_offset + bigend_correction);
748
749 #ifndef FRAME_GROWS_DOWNWARD
750 frame_offset += size;
751 #endif
752
753 x = gen_rtx_MEM (mode, addr);
754
755 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
756
757 return x;
758 }
759
760 /* Assign a stack slot in a containing function.
761 First three arguments are same as in preceding function.
762 The last argument specifies the function to allocate in. */
763
764 static rtx
765 assign_outer_stack_local (mode, size, align, function)
766 enum machine_mode mode;
767 HOST_WIDE_INT size;
768 int align;
769 struct function *function;
770 {
771 register rtx x, addr;
772 int bigend_correction = 0;
773 int alignment;
774
775 /* Allocate in the memory associated with the function in whose frame
776 we are assigning. */
777 push_obstacks (function->function_obstack,
778 function->function_maybepermanent_obstack);
779
780 if (align == 0)
781 {
782 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
783 if (mode == BLKmode)
784 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
785 }
786 else if (align == -1)
787 {
788 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
789 size = CEIL_ROUND (size, alignment);
790 }
791 else
792 alignment = align / BITS_PER_UNIT;
793
794 /* Round frame offset to that alignment. */
795 #ifdef FRAME_GROWS_DOWNWARD
796 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
797 #else
798 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
799 #endif
800
801 /* On a big-endian machine, if we are allocating more space than we will use,
802 use the least significant bytes of those that are allocated. */
803 if (BYTES_BIG_ENDIAN && mode != BLKmode)
804 bigend_correction = size - GET_MODE_SIZE (mode);
805
806 #ifdef FRAME_GROWS_DOWNWARD
807 function->frame_offset -= size;
808 #endif
809 addr = plus_constant (virtual_stack_vars_rtx,
810 function->frame_offset + bigend_correction);
811 #ifndef FRAME_GROWS_DOWNWARD
812 function->frame_offset += size;
813 #endif
814
815 x = gen_rtx_MEM (mode, addr);
816
817 function->stack_slot_list
818 = gen_rtx_EXPR_LIST (VOIDmode, x, function->stack_slot_list);
819
820 pop_obstacks ();
821
822 return x;
823 }
824 \f
825 /* Allocate a temporary stack slot and record it for possible later
826 reuse.
827
828 MODE is the machine mode to be given to the returned rtx.
829
830 SIZE is the size in units of the space required. We do no rounding here
831 since assign_stack_local will do any required rounding.
832
833 KEEP is 1 if this slot is to be retained after a call to
834 free_temp_slots. Automatic variables for a block are allocated
835 with this flag. KEEP is 2 if we allocate a longer term temporary,
836 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
837 if we are to allocate something at an inner level to be treated as
838 a variable in the block (e.g., a SAVE_EXPR). */
839
840 rtx
841 assign_stack_temp (mode, size, keep)
842 enum machine_mode mode;
843 HOST_WIDE_INT size;
844 int keep;
845 {
846 struct temp_slot *p, *best_p = 0;
847
848 /* If SIZE is -1 it means that somebody tried to allocate a temporary
849 of a variable size. */
850 if (size == -1)
851 abort ();
852
853 /* First try to find an available, already-allocated temporary that is the
854 exact size we require. */
855 for (p = temp_slots; p; p = p->next)
856 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
857 break;
858
859 /* If we didn't find, one, try one that is larger than what we want. We
860 find the smallest such. */
861 if (p == 0)
862 for (p = temp_slots; p; p = p->next)
863 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
864 && (best_p == 0 || best_p->size > p->size))
865 best_p = p;
866
867 /* Make our best, if any, the one to use. */
868 if (best_p)
869 {
870 /* If there are enough aligned bytes left over, make them into a new
871 temp_slot so that the extra bytes don't get wasted. Do this only
872 for BLKmode slots, so that we can be sure of the alignment. */
873 if (GET_MODE (best_p->slot) == BLKmode)
874 {
875 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
876 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
877
878 if (best_p->size - rounded_size >= alignment)
879 {
880 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
881 p->in_use = p->addr_taken = 0;
882 p->size = best_p->size - rounded_size;
883 p->base_offset = best_p->base_offset + rounded_size;
884 p->full_size = best_p->full_size - rounded_size;
885 p->slot = gen_rtx_MEM (BLKmode,
886 plus_constant (XEXP (best_p->slot, 0),
887 rounded_size));
888 p->address = 0;
889 p->rtl_expr = 0;
890 p->next = temp_slots;
891 temp_slots = p;
892
893 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
894 stack_slot_list);
895
896 best_p->size = rounded_size;
897 best_p->full_size = rounded_size;
898 }
899 }
900
901 p = best_p;
902 }
903
904 /* If we still didn't find one, make a new temporary. */
905 if (p == 0)
906 {
907 HOST_WIDE_INT frame_offset_old = frame_offset;
908
909 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
910
911 /* If the temp slot mode doesn't indicate the alignment,
912 use the largest possible, so no one will be disappointed. */
913 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
914
915 /* The following slot size computation is necessary because we don't
916 know the actual size of the temporary slot until assign_stack_local
917 has performed all the frame alignment and size rounding for the
918 requested temporary. Note that extra space added for alignment
919 can be either above or below this stack slot depending on which
920 way the frame grows. We include the extra space if and only if it
921 is above this slot. */
922 #ifdef FRAME_GROWS_DOWNWARD
923 p->size = frame_offset_old - frame_offset;
924 #else
925 p->size = size;
926 #endif
927
928 /* Now define the fields used by combine_temp_slots. */
929 #ifdef FRAME_GROWS_DOWNWARD
930 p->base_offset = frame_offset;
931 p->full_size = frame_offset_old - frame_offset;
932 #else
933 p->base_offset = frame_offset_old;
934 p->full_size = frame_offset - frame_offset_old;
935 #endif
936 p->address = 0;
937 p->next = temp_slots;
938 temp_slots = p;
939 }
940
941 p->in_use = 1;
942 p->addr_taken = 0;
943 p->rtl_expr = sequence_rtl_expr;
944
945 if (keep == 2)
946 {
947 p->level = target_temp_slot_level;
948 p->keep = 0;
949 }
950 else if (keep == 3)
951 {
952 p->level = var_temp_slot_level;
953 p->keep = 0;
954 }
955 else
956 {
957 p->level = temp_slot_level;
958 p->keep = keep;
959 }
960
961 /* We may be reusing an old slot, so clear any MEM flags that may have been
962 set from before. */
963 RTX_UNCHANGING_P (p->slot) = 0;
964 MEM_IN_STRUCT_P (p->slot) = 0;
965 return p->slot;
966 }
967 \f
968 /* Assign a temporary of given TYPE.
969 KEEP is as for assign_stack_temp.
970 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
971 it is 0 if a register is OK.
972 DONT_PROMOTE is 1 if we should not promote values in register
973 to wider modes. */
974
975 rtx
976 assign_temp (type, keep, memory_required, dont_promote)
977 tree type;
978 int keep;
979 int memory_required;
980 int dont_promote;
981 {
982 enum machine_mode mode = TYPE_MODE (type);
983 int unsignedp = TREE_UNSIGNED (type);
984
985 if (mode == BLKmode || memory_required)
986 {
987 HOST_WIDE_INT size = int_size_in_bytes (type);
988 rtx tmp;
989
990 /* Unfortunately, we don't yet know how to allocate variable-sized
991 temporaries. However, sometimes we have a fixed upper limit on
992 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
993 instead. This is the case for Chill variable-sized strings. */
994 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
995 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
996 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
997 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
998
999 tmp = assign_stack_temp (mode, size, keep);
1000 MEM_IN_STRUCT_P (tmp) = AGGREGATE_TYPE_P (type);
1001 return tmp;
1002 }
1003
1004 #ifndef PROMOTE_FOR_CALL_ONLY
1005 if (! dont_promote)
1006 mode = promote_mode (type, mode, &unsignedp, 0);
1007 #endif
1008
1009 return gen_reg_rtx (mode);
1010 }
1011 \f
1012 /* Combine temporary stack slots which are adjacent on the stack.
1013
1014 This allows for better use of already allocated stack space. This is only
1015 done for BLKmode slots because we can be sure that we won't have alignment
1016 problems in this case. */
1017
1018 void
1019 combine_temp_slots ()
1020 {
1021 struct temp_slot *p, *q;
1022 struct temp_slot *prev_p, *prev_q;
1023 int num_slots;
1024
1025 /* If there are a lot of temp slots, don't do anything unless
1026 high levels of optimizaton. */
1027 if (! flag_expensive_optimizations)
1028 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1029 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1030 return;
1031
1032 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
1033 {
1034 int delete_p = 0;
1035
1036 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
1037 for (q = p->next, prev_q = p; q; q = prev_q->next)
1038 {
1039 int delete_q = 0;
1040 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
1041 {
1042 if (p->base_offset + p->full_size == q->base_offset)
1043 {
1044 /* Q comes after P; combine Q into P. */
1045 p->size += q->size;
1046 p->full_size += q->full_size;
1047 delete_q = 1;
1048 }
1049 else if (q->base_offset + q->full_size == p->base_offset)
1050 {
1051 /* P comes after Q; combine P into Q. */
1052 q->size += p->size;
1053 q->full_size += p->full_size;
1054 delete_p = 1;
1055 break;
1056 }
1057 }
1058 /* Either delete Q or advance past it. */
1059 if (delete_q)
1060 prev_q->next = q->next;
1061 else
1062 prev_q = q;
1063 }
1064 /* Either delete P or advance past it. */
1065 if (delete_p)
1066 {
1067 if (prev_p)
1068 prev_p->next = p->next;
1069 else
1070 temp_slots = p->next;
1071 }
1072 else
1073 prev_p = p;
1074 }
1075 }
1076 \f
1077 /* Find the temp slot corresponding to the object at address X. */
1078
1079 static struct temp_slot *
1080 find_temp_slot_from_address (x)
1081 rtx x;
1082 {
1083 struct temp_slot *p;
1084 rtx next;
1085
1086 for (p = temp_slots; p; p = p->next)
1087 {
1088 if (! p->in_use)
1089 continue;
1090
1091 else if (XEXP (p->slot, 0) == x
1092 || p->address == x
1093 || (GET_CODE (x) == PLUS
1094 && XEXP (x, 0) == virtual_stack_vars_rtx
1095 && GET_CODE (XEXP (x, 1)) == CONST_INT
1096 && INTVAL (XEXP (x, 1)) >= p->base_offset
1097 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1098 return p;
1099
1100 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1101 for (next = p->address; next; next = XEXP (next, 1))
1102 if (XEXP (next, 0) == x)
1103 return p;
1104 }
1105
1106 return 0;
1107 }
1108
1109 /* Indicate that NEW is an alternate way of referring to the temp slot
1110 that previously was known by OLD. */
1111
1112 void
1113 update_temp_slot_address (old, new)
1114 rtx old, new;
1115 {
1116 struct temp_slot *p = find_temp_slot_from_address (old);
1117
1118 /* If none, return. Else add NEW as an alias. */
1119 if (p == 0)
1120 return;
1121 else if (p->address == 0)
1122 p->address = new;
1123 else
1124 {
1125 if (GET_CODE (p->address) != EXPR_LIST)
1126 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1127
1128 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1129 }
1130 }
1131
1132 /* If X could be a reference to a temporary slot, mark the fact that its
1133 address was taken. */
1134
1135 void
1136 mark_temp_addr_taken (x)
1137 rtx x;
1138 {
1139 struct temp_slot *p;
1140
1141 if (x == 0)
1142 return;
1143
1144 /* If X is not in memory or is at a constant address, it cannot be in
1145 a temporary slot. */
1146 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1147 return;
1148
1149 p = find_temp_slot_from_address (XEXP (x, 0));
1150 if (p != 0)
1151 p->addr_taken = 1;
1152 }
1153
1154 /* If X could be a reference to a temporary slot, mark that slot as
1155 belonging to the to one level higher than the current level. If X
1156 matched one of our slots, just mark that one. Otherwise, we can't
1157 easily predict which it is, so upgrade all of them. Kept slots
1158 need not be touched.
1159
1160 This is called when an ({...}) construct occurs and a statement
1161 returns a value in memory. */
1162
1163 void
1164 preserve_temp_slots (x)
1165 rtx x;
1166 {
1167 struct temp_slot *p = 0;
1168
1169 /* If there is no result, we still might have some objects whose address
1170 were taken, so we need to make sure they stay around. */
1171 if (x == 0)
1172 {
1173 for (p = temp_slots; p; p = p->next)
1174 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1175 p->level--;
1176
1177 return;
1178 }
1179
1180 /* If X is a register that is being used as a pointer, see if we have
1181 a temporary slot we know it points to. To be consistent with
1182 the code below, we really should preserve all non-kept slots
1183 if we can't find a match, but that seems to be much too costly. */
1184 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1185 p = find_temp_slot_from_address (x);
1186
1187 /* If X is not in memory or is at a constant address, it cannot be in
1188 a temporary slot, but it can contain something whose address was
1189 taken. */
1190 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1191 {
1192 for (p = temp_slots; p; p = p->next)
1193 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1194 p->level--;
1195
1196 return;
1197 }
1198
1199 /* First see if we can find a match. */
1200 if (p == 0)
1201 p = find_temp_slot_from_address (XEXP (x, 0));
1202
1203 if (p != 0)
1204 {
1205 /* Move everything at our level whose address was taken to our new
1206 level in case we used its address. */
1207 struct temp_slot *q;
1208
1209 if (p->level == temp_slot_level)
1210 {
1211 for (q = temp_slots; q; q = q->next)
1212 if (q != p && q->addr_taken && q->level == p->level)
1213 q->level--;
1214
1215 p->level--;
1216 p->addr_taken = 0;
1217 }
1218 return;
1219 }
1220
1221 /* Otherwise, preserve all non-kept slots at this level. */
1222 for (p = temp_slots; p; p = p->next)
1223 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1224 p->level--;
1225 }
1226
1227 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1228 with that RTL_EXPR, promote it into a temporary slot at the present
1229 level so it will not be freed when we free slots made in the
1230 RTL_EXPR. */
1231
1232 void
1233 preserve_rtl_expr_result (x)
1234 rtx x;
1235 {
1236 struct temp_slot *p;
1237
1238 /* If X is not in memory or is at a constant address, it cannot be in
1239 a temporary slot. */
1240 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1241 return;
1242
1243 /* If we can find a match, move it to our level unless it is already at
1244 an upper level. */
1245 p = find_temp_slot_from_address (XEXP (x, 0));
1246 if (p != 0)
1247 {
1248 p->level = MIN (p->level, temp_slot_level);
1249 p->rtl_expr = 0;
1250 }
1251
1252 return;
1253 }
1254
1255 /* Free all temporaries used so far. This is normally called at the end
1256 of generating code for a statement. Don't free any temporaries
1257 currently in use for an RTL_EXPR that hasn't yet been emitted.
1258 We could eventually do better than this since it can be reused while
1259 generating the same RTL_EXPR, but this is complex and probably not
1260 worthwhile. */
1261
1262 void
1263 free_temp_slots ()
1264 {
1265 struct temp_slot *p;
1266
1267 for (p = temp_slots; p; p = p->next)
1268 if (p->in_use && p->level == temp_slot_level && ! p->keep
1269 && p->rtl_expr == 0)
1270 p->in_use = 0;
1271
1272 combine_temp_slots ();
1273 }
1274
1275 /* Free all temporary slots used in T, an RTL_EXPR node. */
1276
1277 void
1278 free_temps_for_rtl_expr (t)
1279 tree t;
1280 {
1281 struct temp_slot *p;
1282
1283 for (p = temp_slots; p; p = p->next)
1284 if (p->rtl_expr == t)
1285 p->in_use = 0;
1286
1287 combine_temp_slots ();
1288 }
1289
1290 /* Mark all temporaries ever allocated in this function as not suitable
1291 for reuse until the current level is exited. */
1292
1293 void
1294 mark_all_temps_used ()
1295 {
1296 struct temp_slot *p;
1297
1298 for (p = temp_slots; p; p = p->next)
1299 {
1300 p->in_use = p->keep = 1;
1301 p->level = MIN (p->level, temp_slot_level);
1302 }
1303 }
1304
1305 /* Push deeper into the nesting level for stack temporaries. */
1306
1307 void
1308 push_temp_slots ()
1309 {
1310 temp_slot_level++;
1311 }
1312
1313 /* Likewise, but save the new level as the place to allocate variables
1314 for blocks. */
1315
1316 void
1317 push_temp_slots_for_block ()
1318 {
1319 push_temp_slots ();
1320
1321 var_temp_slot_level = temp_slot_level;
1322 }
1323
1324 /* Likewise, but save the new level as the place to allocate temporaries
1325 for TARGET_EXPRs. */
1326
1327 void
1328 push_temp_slots_for_target ()
1329 {
1330 push_temp_slots ();
1331
1332 target_temp_slot_level = temp_slot_level;
1333 }
1334
1335 /* Set and get the value of target_temp_slot_level. The only
1336 permitted use of these functions is to save and restore this value. */
1337
1338 int
1339 get_target_temp_slot_level ()
1340 {
1341 return target_temp_slot_level;
1342 }
1343
1344 void
1345 set_target_temp_slot_level (level)
1346 int level;
1347 {
1348 target_temp_slot_level = level;
1349 }
1350
1351 /* Pop a temporary nesting level. All slots in use in the current level
1352 are freed. */
1353
1354 void
1355 pop_temp_slots ()
1356 {
1357 struct temp_slot *p;
1358
1359 for (p = temp_slots; p; p = p->next)
1360 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1361 p->in_use = 0;
1362
1363 combine_temp_slots ();
1364
1365 temp_slot_level--;
1366 }
1367
1368 /* Initialize temporary slots. */
1369
1370 void
1371 init_temp_slots ()
1372 {
1373 /* We have not allocated any temporaries yet. */
1374 temp_slots = 0;
1375 temp_slot_level = 0;
1376 var_temp_slot_level = 0;
1377 target_temp_slot_level = 0;
1378 }
1379 \f
1380 /* Retroactively move an auto variable from a register to a stack slot.
1381 This is done when an address-reference to the variable is seen. */
1382
1383 void
1384 put_var_into_stack (decl)
1385 tree decl;
1386 {
1387 register rtx reg;
1388 enum machine_mode promoted_mode, decl_mode;
1389 struct function *function = 0;
1390 tree context;
1391 int can_use_addressof;
1392
1393 context = decl_function_context (decl);
1394
1395 /* Get the current rtl used for this object and its original mode. */
1396 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1397
1398 /* No need to do anything if decl has no rtx yet
1399 since in that case caller is setting TREE_ADDRESSABLE
1400 and a stack slot will be assigned when the rtl is made. */
1401 if (reg == 0)
1402 return;
1403
1404 /* Get the declared mode for this object. */
1405 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1406 : DECL_MODE (decl));
1407 /* Get the mode it's actually stored in. */
1408 promoted_mode = GET_MODE (reg);
1409
1410 /* If this variable comes from an outer function,
1411 find that function's saved context. */
1412 if (context != current_function_decl && context != inline_function_decl)
1413 for (function = outer_function_chain; function; function = function->next)
1414 if (function->decl == context)
1415 break;
1416
1417 /* If this is a variable-size object with a pseudo to address it,
1418 put that pseudo into the stack, if the var is nonlocal. */
1419 if (DECL_NONLOCAL (decl)
1420 && GET_CODE (reg) == MEM
1421 && GET_CODE (XEXP (reg, 0)) == REG
1422 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1423 {
1424 reg = XEXP (reg, 0);
1425 decl_mode = promoted_mode = GET_MODE (reg);
1426 }
1427
1428 can_use_addressof
1429 = (function == 0
1430 && optimize > 0
1431 /* FIXME make it work for promoted modes too */
1432 && decl_mode == promoted_mode
1433 #ifdef NON_SAVING_SETJMP
1434 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1435 #endif
1436 );
1437
1438 /* If we can't use ADDRESSOF, make sure we see through one we already
1439 generated. */
1440 if (! can_use_addressof && GET_CODE (reg) == MEM
1441 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1442 reg = XEXP (XEXP (reg, 0), 0);
1443
1444 /* Now we should have a value that resides in one or more pseudo regs. */
1445
1446 if (GET_CODE (reg) == REG)
1447 {
1448 /* If this variable lives in the current function and we don't need
1449 to put things in the stack for the sake of setjmp, try to keep it
1450 in a register until we know we actually need the address. */
1451 if (can_use_addressof)
1452 gen_mem_addressof (reg, decl);
1453 else
1454 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1455 promoted_mode, decl_mode,
1456 TREE_SIDE_EFFECTS (decl), 0,
1457 TREE_USED (decl)
1458 || DECL_INITIAL (decl) != 0);
1459 }
1460 else if (GET_CODE (reg) == CONCAT)
1461 {
1462 /* A CONCAT contains two pseudos; put them both in the stack.
1463 We do it so they end up consecutive. */
1464 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1465 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1466 #ifdef FRAME_GROWS_DOWNWARD
1467 /* Since part 0 should have a lower address, do it second. */
1468 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1469 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1470 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1471 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1472 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1473 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1474 #else
1475 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1476 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1477 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1478 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1479 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1480 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1481 #endif
1482
1483 /* Change the CONCAT into a combined MEM for both parts. */
1484 PUT_CODE (reg, MEM);
1485 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1486 MEM_ALIAS_SET (reg) = get_alias_set (decl);
1487
1488 /* The two parts are in memory order already.
1489 Use the lower parts address as ours. */
1490 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1491 /* Prevent sharing of rtl that might lose. */
1492 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1493 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1494 }
1495 else
1496 return;
1497
1498 if (current_function_check_memory_usage)
1499 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1500 XEXP (reg, 0), ptr_mode,
1501 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1502 TYPE_MODE (sizetype),
1503 GEN_INT (MEMORY_USE_RW),
1504 TYPE_MODE (integer_type_node));
1505 }
1506
1507 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1508 into the stack frame of FUNCTION (0 means the current function).
1509 DECL_MODE is the machine mode of the user-level data type.
1510 PROMOTED_MODE is the machine mode of the register.
1511 VOLATILE_P is nonzero if this is for a "volatile" decl.
1512 USED_P is nonzero if this reg might have already been used in an insn. */
1513
1514 static void
1515 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1516 original_regno, used_p)
1517 struct function *function;
1518 rtx reg;
1519 tree type;
1520 enum machine_mode promoted_mode, decl_mode;
1521 int volatile_p;
1522 int original_regno;
1523 int used_p;
1524 {
1525 rtx new = 0;
1526 int regno = original_regno;
1527
1528 if (regno == 0)
1529 regno = REGNO (reg);
1530
1531 if (function)
1532 {
1533 if (regno < function->max_parm_reg)
1534 new = function->parm_reg_stack_loc[regno];
1535 if (new == 0)
1536 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1537 0, function);
1538 }
1539 else
1540 {
1541 if (regno < max_parm_reg)
1542 new = parm_reg_stack_loc[regno];
1543 if (new == 0)
1544 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1545 }
1546
1547 PUT_MODE (reg, decl_mode);
1548 XEXP (reg, 0) = XEXP (new, 0);
1549 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1550 MEM_VOLATILE_P (reg) = volatile_p;
1551 PUT_CODE (reg, MEM);
1552
1553 /* If this is a memory ref that contains aggregate components,
1554 mark it as such for cse and loop optimize. If we are reusing a
1555 previously generated stack slot, then we need to copy the bit in
1556 case it was set for other reasons. For instance, it is set for
1557 __builtin_va_alist. */
1558 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type) | MEM_IN_STRUCT_P (new);
1559 MEM_ALIAS_SET (reg) = get_alias_set (type);
1560
1561 /* Now make sure that all refs to the variable, previously made
1562 when it was a register, are fixed up to be valid again. */
1563
1564 if (used_p && function != 0)
1565 {
1566 struct var_refs_queue *temp;
1567
1568 /* Variable is inherited; fix it up when we get back to its function. */
1569 push_obstacks (function->function_obstack,
1570 function->function_maybepermanent_obstack);
1571
1572 /* See comment in restore_tree_status in tree.c for why this needs to be
1573 on saveable obstack. */
1574 temp
1575 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1576 temp->modified = reg;
1577 temp->promoted_mode = promoted_mode;
1578 temp->unsignedp = TREE_UNSIGNED (type);
1579 temp->next = function->fixup_var_refs_queue;
1580 function->fixup_var_refs_queue = temp;
1581 pop_obstacks ();
1582 }
1583 else if (used_p)
1584 /* Variable is local; fix it up now. */
1585 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
1586 }
1587 \f
1588 static void
1589 fixup_var_refs (var, promoted_mode, unsignedp)
1590 rtx var;
1591 enum machine_mode promoted_mode;
1592 int unsignedp;
1593 {
1594 tree pending;
1595 rtx first_insn = get_insns ();
1596 struct sequence_stack *stack = sequence_stack;
1597 tree rtl_exps = rtl_expr_chain;
1598
1599 /* Must scan all insns for stack-refs that exceed the limit. */
1600 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
1601
1602 /* Scan all pending sequences too. */
1603 for (; stack; stack = stack->next)
1604 {
1605 push_to_sequence (stack->first);
1606 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1607 stack->first, stack->next != 0);
1608 /* Update remembered end of sequence
1609 in case we added an insn at the end. */
1610 stack->last = get_last_insn ();
1611 end_sequence ();
1612 }
1613
1614 /* Scan all waiting RTL_EXPRs too. */
1615 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1616 {
1617 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1618 if (seq != const0_rtx && seq != 0)
1619 {
1620 push_to_sequence (seq);
1621 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
1622 end_sequence ();
1623 }
1624 }
1625 }
1626 \f
1627 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1628 some part of an insn. Return a struct fixup_replacement whose OLD
1629 value is equal to X. Allocate a new structure if no such entry exists. */
1630
1631 static struct fixup_replacement *
1632 find_fixup_replacement (replacements, x)
1633 struct fixup_replacement **replacements;
1634 rtx x;
1635 {
1636 struct fixup_replacement *p;
1637
1638 /* See if we have already replaced this. */
1639 for (p = *replacements; p && p->old != x; p = p->next)
1640 ;
1641
1642 if (p == 0)
1643 {
1644 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1645 p->old = x;
1646 p->new = 0;
1647 p->next = *replacements;
1648 *replacements = p;
1649 }
1650
1651 return p;
1652 }
1653
1654 /* Scan the insn-chain starting with INSN for refs to VAR
1655 and fix them up. TOPLEVEL is nonzero if this chain is the
1656 main chain of insns for the current function. */
1657
1658 static void
1659 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
1660 rtx var;
1661 enum machine_mode promoted_mode;
1662 int unsignedp;
1663 rtx insn;
1664 int toplevel;
1665 {
1666 rtx call_dest = 0;
1667
1668 while (insn)
1669 {
1670 rtx next = NEXT_INSN (insn);
1671 rtx set, prev, prev_set;
1672 rtx note;
1673
1674 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1675 {
1676 /* If this is a CLOBBER of VAR, delete it.
1677
1678 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1679 and REG_RETVAL notes too. */
1680 if (GET_CODE (PATTERN (insn)) == CLOBBER
1681 && (XEXP (PATTERN (insn), 0) == var
1682 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1683 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1684 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1685 {
1686 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1687 /* The REG_LIBCALL note will go away since we are going to
1688 turn INSN into a NOTE, so just delete the
1689 corresponding REG_RETVAL note. */
1690 remove_note (XEXP (note, 0),
1691 find_reg_note (XEXP (note, 0), REG_RETVAL,
1692 NULL_RTX));
1693
1694 /* In unoptimized compilation, we shouldn't call delete_insn
1695 except in jump.c doing warnings. */
1696 PUT_CODE (insn, NOTE);
1697 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1698 NOTE_SOURCE_FILE (insn) = 0;
1699 }
1700
1701 /* The insn to load VAR from a home in the arglist
1702 is now a no-op. When we see it, just delete it.
1703 Similarly if this is storing VAR from a register from which
1704 it was loaded in the previous insn. This will occur
1705 when an ADDRESSOF was made for an arglist slot. */
1706 else if (toplevel
1707 && (set = single_set (insn)) != 0
1708 && SET_DEST (set) == var
1709 /* If this represents the result of an insn group,
1710 don't delete the insn. */
1711 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1712 && (rtx_equal_p (SET_SRC (set), var)
1713 || (GET_CODE (SET_SRC (set)) == REG
1714 && (prev = prev_nonnote_insn (insn)) != 0
1715 && (prev_set = single_set (prev)) != 0
1716 && SET_DEST (prev_set) == SET_SRC (set)
1717 && rtx_equal_p (SET_SRC (prev_set), var))))
1718 {
1719 /* In unoptimized compilation, we shouldn't call delete_insn
1720 except in jump.c doing warnings. */
1721 PUT_CODE (insn, NOTE);
1722 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1723 NOTE_SOURCE_FILE (insn) = 0;
1724 if (insn == last_parm_insn)
1725 last_parm_insn = PREV_INSN (next);
1726 }
1727 else
1728 {
1729 struct fixup_replacement *replacements = 0;
1730 rtx next_insn = NEXT_INSN (insn);
1731
1732 if (SMALL_REGISTER_CLASSES)
1733 {
1734 /* If the insn that copies the results of a CALL_INSN
1735 into a pseudo now references VAR, we have to use an
1736 intermediate pseudo since we want the life of the
1737 return value register to be only a single insn.
1738
1739 If we don't use an intermediate pseudo, such things as
1740 address computations to make the address of VAR valid
1741 if it is not can be placed between the CALL_INSN and INSN.
1742
1743 To make sure this doesn't happen, we record the destination
1744 of the CALL_INSN and see if the next insn uses both that
1745 and VAR. */
1746
1747 if (call_dest != 0 && GET_CODE (insn) == INSN
1748 && reg_mentioned_p (var, PATTERN (insn))
1749 && reg_mentioned_p (call_dest, PATTERN (insn)))
1750 {
1751 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1752
1753 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1754
1755 PATTERN (insn) = replace_rtx (PATTERN (insn),
1756 call_dest, temp);
1757 }
1758
1759 if (GET_CODE (insn) == CALL_INSN
1760 && GET_CODE (PATTERN (insn)) == SET)
1761 call_dest = SET_DEST (PATTERN (insn));
1762 else if (GET_CODE (insn) == CALL_INSN
1763 && GET_CODE (PATTERN (insn)) == PARALLEL
1764 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1765 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1766 else
1767 call_dest = 0;
1768 }
1769
1770 /* See if we have to do anything to INSN now that VAR is in
1771 memory. If it needs to be loaded into a pseudo, use a single
1772 pseudo for the entire insn in case there is a MATCH_DUP
1773 between two operands. We pass a pointer to the head of
1774 a list of struct fixup_replacements. If fixup_var_refs_1
1775 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1776 it will record them in this list.
1777
1778 If it allocated a pseudo for any replacement, we copy into
1779 it here. */
1780
1781 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1782 &replacements);
1783
1784 /* If this is last_parm_insn, and any instructions were output
1785 after it to fix it up, then we must set last_parm_insn to
1786 the last such instruction emitted. */
1787 if (insn == last_parm_insn)
1788 last_parm_insn = PREV_INSN (next_insn);
1789
1790 while (replacements)
1791 {
1792 if (GET_CODE (replacements->new) == REG)
1793 {
1794 rtx insert_before;
1795 rtx seq;
1796
1797 /* OLD might be a (subreg (mem)). */
1798 if (GET_CODE (replacements->old) == SUBREG)
1799 replacements->old
1800 = fixup_memory_subreg (replacements->old, insn, 0);
1801 else
1802 replacements->old
1803 = fixup_stack_1 (replacements->old, insn);
1804
1805 insert_before = insn;
1806
1807 /* If we are changing the mode, do a conversion.
1808 This might be wasteful, but combine.c will
1809 eliminate much of the waste. */
1810
1811 if (GET_MODE (replacements->new)
1812 != GET_MODE (replacements->old))
1813 {
1814 start_sequence ();
1815 convert_move (replacements->new,
1816 replacements->old, unsignedp);
1817 seq = gen_sequence ();
1818 end_sequence ();
1819 }
1820 else
1821 seq = gen_move_insn (replacements->new,
1822 replacements->old);
1823
1824 emit_insn_before (seq, insert_before);
1825 }
1826
1827 replacements = replacements->next;
1828 }
1829 }
1830
1831 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1832 But don't touch other insns referred to by reg-notes;
1833 we will get them elsewhere. */
1834 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1835 if (GET_CODE (note) != INSN_LIST)
1836 XEXP (note, 0)
1837 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1838 }
1839 insn = next;
1840 }
1841 }
1842 \f
1843 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1844 See if the rtx expression at *LOC in INSN needs to be changed.
1845
1846 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1847 contain a list of original rtx's and replacements. If we find that we need
1848 to modify this insn by replacing a memory reference with a pseudo or by
1849 making a new MEM to implement a SUBREG, we consult that list to see if
1850 we have already chosen a replacement. If none has already been allocated,
1851 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1852 or the SUBREG, as appropriate, to the pseudo. */
1853
1854 static void
1855 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1856 register rtx var;
1857 enum machine_mode promoted_mode;
1858 register rtx *loc;
1859 rtx insn;
1860 struct fixup_replacement **replacements;
1861 {
1862 register int i;
1863 register rtx x = *loc;
1864 RTX_CODE code = GET_CODE (x);
1865 register char *fmt;
1866 register rtx tem, tem1;
1867 struct fixup_replacement *replacement;
1868
1869 switch (code)
1870 {
1871 case ADDRESSOF:
1872 if (XEXP (x, 0) == var)
1873 {
1874 /* Prevent sharing of rtl that might lose. */
1875 rtx sub = copy_rtx (XEXP (var, 0));
1876
1877 start_sequence ();
1878
1879 if (! validate_change (insn, loc, sub, 0))
1880 {
1881 rtx y = force_operand (sub, NULL_RTX);
1882
1883 if (! validate_change (insn, loc, y, 0))
1884 *loc = copy_to_reg (y);
1885 }
1886
1887 emit_insn_before (gen_sequence (), insn);
1888 end_sequence ();
1889 }
1890 return;
1891
1892 case MEM:
1893 if (var == x)
1894 {
1895 /* If we already have a replacement, use it. Otherwise,
1896 try to fix up this address in case it is invalid. */
1897
1898 replacement = find_fixup_replacement (replacements, var);
1899 if (replacement->new)
1900 {
1901 *loc = replacement->new;
1902 return;
1903 }
1904
1905 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1906
1907 /* Unless we are forcing memory to register or we changed the mode,
1908 we can leave things the way they are if the insn is valid. */
1909
1910 INSN_CODE (insn) = -1;
1911 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1912 && recog_memoized (insn) >= 0)
1913 return;
1914
1915 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1916 return;
1917 }
1918
1919 /* If X contains VAR, we need to unshare it here so that we update
1920 each occurrence separately. But all identical MEMs in one insn
1921 must be replaced with the same rtx because of the possibility of
1922 MATCH_DUPs. */
1923
1924 if (reg_mentioned_p (var, x))
1925 {
1926 replacement = find_fixup_replacement (replacements, x);
1927 if (replacement->new == 0)
1928 replacement->new = copy_most_rtx (x, var);
1929
1930 *loc = x = replacement->new;
1931 }
1932 break;
1933
1934 case REG:
1935 case CC0:
1936 case PC:
1937 case CONST_INT:
1938 case CONST:
1939 case SYMBOL_REF:
1940 case LABEL_REF:
1941 case CONST_DOUBLE:
1942 return;
1943
1944 case SIGN_EXTRACT:
1945 case ZERO_EXTRACT:
1946 /* Note that in some cases those types of expressions are altered
1947 by optimize_bit_field, and do not survive to get here. */
1948 if (XEXP (x, 0) == var
1949 || (GET_CODE (XEXP (x, 0)) == SUBREG
1950 && SUBREG_REG (XEXP (x, 0)) == var))
1951 {
1952 /* Get TEM as a valid MEM in the mode presently in the insn.
1953
1954 We don't worry about the possibility of MATCH_DUP here; it
1955 is highly unlikely and would be tricky to handle. */
1956
1957 tem = XEXP (x, 0);
1958 if (GET_CODE (tem) == SUBREG)
1959 {
1960 if (GET_MODE_BITSIZE (GET_MODE (tem))
1961 > GET_MODE_BITSIZE (GET_MODE (var)))
1962 {
1963 replacement = find_fixup_replacement (replacements, var);
1964 if (replacement->new == 0)
1965 replacement->new = gen_reg_rtx (GET_MODE (var));
1966 SUBREG_REG (tem) = replacement->new;
1967 }
1968 else
1969 tem = fixup_memory_subreg (tem, insn, 0);
1970 }
1971 else
1972 tem = fixup_stack_1 (tem, insn);
1973
1974 /* Unless we want to load from memory, get TEM into the proper mode
1975 for an extract from memory. This can only be done if the
1976 extract is at a constant position and length. */
1977
1978 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1979 && GET_CODE (XEXP (x, 2)) == CONST_INT
1980 && ! mode_dependent_address_p (XEXP (tem, 0))
1981 && ! MEM_VOLATILE_P (tem))
1982 {
1983 enum machine_mode wanted_mode = VOIDmode;
1984 enum machine_mode is_mode = GET_MODE (tem);
1985 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
1986
1987 #ifdef HAVE_extzv
1988 if (GET_CODE (x) == ZERO_EXTRACT)
1989 {
1990 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1991 if (wanted_mode == VOIDmode)
1992 wanted_mode = word_mode;
1993 }
1994 #endif
1995 #ifdef HAVE_extv
1996 if (GET_CODE (x) == SIGN_EXTRACT)
1997 {
1998 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1999 if (wanted_mode == VOIDmode)
2000 wanted_mode = word_mode;
2001 }
2002 #endif
2003 /* If we have a narrower mode, we can do something. */
2004 if (wanted_mode != VOIDmode
2005 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2006 {
2007 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2008 rtx old_pos = XEXP (x, 2);
2009 rtx newmem;
2010
2011 /* If the bytes and bits are counted differently, we
2012 must adjust the offset. */
2013 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2014 offset = (GET_MODE_SIZE (is_mode)
2015 - GET_MODE_SIZE (wanted_mode) - offset);
2016
2017 pos %= GET_MODE_BITSIZE (wanted_mode);
2018
2019 newmem = gen_rtx_MEM (wanted_mode,
2020 plus_constant (XEXP (tem, 0), offset));
2021 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2022 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
2023 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
2024
2025 /* Make the change and see if the insn remains valid. */
2026 INSN_CODE (insn) = -1;
2027 XEXP (x, 0) = newmem;
2028 XEXP (x, 2) = GEN_INT (pos);
2029
2030 if (recog_memoized (insn) >= 0)
2031 return;
2032
2033 /* Otherwise, restore old position. XEXP (x, 0) will be
2034 restored later. */
2035 XEXP (x, 2) = old_pos;
2036 }
2037 }
2038
2039 /* If we get here, the bitfield extract insn can't accept a memory
2040 reference. Copy the input into a register. */
2041
2042 tem1 = gen_reg_rtx (GET_MODE (tem));
2043 emit_insn_before (gen_move_insn (tem1, tem), insn);
2044 XEXP (x, 0) = tem1;
2045 return;
2046 }
2047 break;
2048
2049 case SUBREG:
2050 if (SUBREG_REG (x) == var)
2051 {
2052 /* If this is a special SUBREG made because VAR was promoted
2053 from a wider mode, replace it with VAR and call ourself
2054 recursively, this time saying that the object previously
2055 had its current mode (by virtue of the SUBREG). */
2056
2057 if (SUBREG_PROMOTED_VAR_P (x))
2058 {
2059 *loc = var;
2060 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2061 return;
2062 }
2063
2064 /* If this SUBREG makes VAR wider, it has become a paradoxical
2065 SUBREG with VAR in memory, but these aren't allowed at this
2066 stage of the compilation. So load VAR into a pseudo and take
2067 a SUBREG of that pseudo. */
2068 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2069 {
2070 replacement = find_fixup_replacement (replacements, var);
2071 if (replacement->new == 0)
2072 replacement->new = gen_reg_rtx (GET_MODE (var));
2073 SUBREG_REG (x) = replacement->new;
2074 return;
2075 }
2076
2077 /* See if we have already found a replacement for this SUBREG.
2078 If so, use it. Otherwise, make a MEM and see if the insn
2079 is recognized. If not, or if we should force MEM into a register,
2080 make a pseudo for this SUBREG. */
2081 replacement = find_fixup_replacement (replacements, x);
2082 if (replacement->new)
2083 {
2084 *loc = replacement->new;
2085 return;
2086 }
2087
2088 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2089
2090 INSN_CODE (insn) = -1;
2091 if (! flag_force_mem && recog_memoized (insn) >= 0)
2092 return;
2093
2094 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2095 return;
2096 }
2097 break;
2098
2099 case SET:
2100 /* First do special simplification of bit-field references. */
2101 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2102 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2103 optimize_bit_field (x, insn, 0);
2104 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2105 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2106 optimize_bit_field (x, insn, NULL_PTR);
2107
2108 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2109 into a register and then store it back out. */
2110 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2111 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2112 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2113 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2114 > GET_MODE_SIZE (GET_MODE (var))))
2115 {
2116 replacement = find_fixup_replacement (replacements, var);
2117 if (replacement->new == 0)
2118 replacement->new = gen_reg_rtx (GET_MODE (var));
2119
2120 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2121 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2122 }
2123
2124 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2125 insn into a pseudo and store the low part of the pseudo into VAR. */
2126 if (GET_CODE (SET_DEST (x)) == SUBREG
2127 && SUBREG_REG (SET_DEST (x)) == var
2128 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2129 > GET_MODE_SIZE (GET_MODE (var))))
2130 {
2131 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2132 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2133 tem)),
2134 insn);
2135 break;
2136 }
2137
2138 {
2139 rtx dest = SET_DEST (x);
2140 rtx src = SET_SRC (x);
2141 #ifdef HAVE_insv
2142 rtx outerdest = dest;
2143 #endif
2144
2145 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2146 || GET_CODE (dest) == SIGN_EXTRACT
2147 || GET_CODE (dest) == ZERO_EXTRACT)
2148 dest = XEXP (dest, 0);
2149
2150 if (GET_CODE (src) == SUBREG)
2151 src = XEXP (src, 0);
2152
2153 /* If VAR does not appear at the top level of the SET
2154 just scan the lower levels of the tree. */
2155
2156 if (src != var && dest != var)
2157 break;
2158
2159 /* We will need to rerecognize this insn. */
2160 INSN_CODE (insn) = -1;
2161
2162 #ifdef HAVE_insv
2163 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2164 {
2165 /* Since this case will return, ensure we fixup all the
2166 operands here. */
2167 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2168 insn, replacements);
2169 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2170 insn, replacements);
2171 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2172 insn, replacements);
2173
2174 tem = XEXP (outerdest, 0);
2175
2176 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2177 that may appear inside a ZERO_EXTRACT.
2178 This was legitimate when the MEM was a REG. */
2179 if (GET_CODE (tem) == SUBREG
2180 && SUBREG_REG (tem) == var)
2181 tem = fixup_memory_subreg (tem, insn, 0);
2182 else
2183 tem = fixup_stack_1 (tem, insn);
2184
2185 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2186 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2187 && ! mode_dependent_address_p (XEXP (tem, 0))
2188 && ! MEM_VOLATILE_P (tem))
2189 {
2190 enum machine_mode wanted_mode;
2191 enum machine_mode is_mode = GET_MODE (tem);
2192 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2193
2194 wanted_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
2195 if (wanted_mode == VOIDmode)
2196 wanted_mode = word_mode;
2197
2198 /* If we have a narrower mode, we can do something. */
2199 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2200 {
2201 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2202 rtx old_pos = XEXP (outerdest, 2);
2203 rtx newmem;
2204
2205 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2206 offset = (GET_MODE_SIZE (is_mode)
2207 - GET_MODE_SIZE (wanted_mode) - offset);
2208
2209 pos %= GET_MODE_BITSIZE (wanted_mode);
2210
2211 newmem = gen_rtx_MEM (wanted_mode,
2212 plus_constant (XEXP (tem, 0), offset));
2213 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2214 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
2215 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
2216
2217 /* Make the change and see if the insn remains valid. */
2218 INSN_CODE (insn) = -1;
2219 XEXP (outerdest, 0) = newmem;
2220 XEXP (outerdest, 2) = GEN_INT (pos);
2221
2222 if (recog_memoized (insn) >= 0)
2223 return;
2224
2225 /* Otherwise, restore old position. XEXP (x, 0) will be
2226 restored later. */
2227 XEXP (outerdest, 2) = old_pos;
2228 }
2229 }
2230
2231 /* If we get here, the bit-field store doesn't allow memory
2232 or isn't located at a constant position. Load the value into
2233 a register, do the store, and put it back into memory. */
2234
2235 tem1 = gen_reg_rtx (GET_MODE (tem));
2236 emit_insn_before (gen_move_insn (tem1, tem), insn);
2237 emit_insn_after (gen_move_insn (tem, tem1), insn);
2238 XEXP (outerdest, 0) = tem1;
2239 return;
2240 }
2241 #endif
2242
2243 /* STRICT_LOW_PART is a no-op on memory references
2244 and it can cause combinations to be unrecognizable,
2245 so eliminate it. */
2246
2247 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2248 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2249
2250 /* A valid insn to copy VAR into or out of a register
2251 must be left alone, to avoid an infinite loop here.
2252 If the reference to VAR is by a subreg, fix that up,
2253 since SUBREG is not valid for a memref.
2254 Also fix up the address of the stack slot.
2255
2256 Note that we must not try to recognize the insn until
2257 after we know that we have valid addresses and no
2258 (subreg (mem ...) ...) constructs, since these interfere
2259 with determining the validity of the insn. */
2260
2261 if ((SET_SRC (x) == var
2262 || (GET_CODE (SET_SRC (x)) == SUBREG
2263 && SUBREG_REG (SET_SRC (x)) == var))
2264 && (GET_CODE (SET_DEST (x)) == REG
2265 || (GET_CODE (SET_DEST (x)) == SUBREG
2266 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2267 && GET_MODE (var) == promoted_mode
2268 && x == single_set (insn))
2269 {
2270 rtx pat;
2271
2272 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2273 if (replacement->new)
2274 SET_SRC (x) = replacement->new;
2275 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2276 SET_SRC (x) = replacement->new
2277 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2278 else
2279 SET_SRC (x) = replacement->new
2280 = fixup_stack_1 (SET_SRC (x), insn);
2281
2282 if (recog_memoized (insn) >= 0)
2283 return;
2284
2285 /* INSN is not valid, but we know that we want to
2286 copy SET_SRC (x) to SET_DEST (x) in some way. So
2287 we generate the move and see whether it requires more
2288 than one insn. If it does, we emit those insns and
2289 delete INSN. Otherwise, we an just replace the pattern
2290 of INSN; we have already verified above that INSN has
2291 no other function that to do X. */
2292
2293 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2294 if (GET_CODE (pat) == SEQUENCE)
2295 {
2296 emit_insn_after (pat, insn);
2297 PUT_CODE (insn, NOTE);
2298 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2299 NOTE_SOURCE_FILE (insn) = 0;
2300 }
2301 else
2302 PATTERN (insn) = pat;
2303
2304 return;
2305 }
2306
2307 if ((SET_DEST (x) == var
2308 || (GET_CODE (SET_DEST (x)) == SUBREG
2309 && SUBREG_REG (SET_DEST (x)) == var))
2310 && (GET_CODE (SET_SRC (x)) == REG
2311 || (GET_CODE (SET_SRC (x)) == SUBREG
2312 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2313 && GET_MODE (var) == promoted_mode
2314 && x == single_set (insn))
2315 {
2316 rtx pat;
2317
2318 if (GET_CODE (SET_DEST (x)) == SUBREG)
2319 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2320 else
2321 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2322
2323 if (recog_memoized (insn) >= 0)
2324 return;
2325
2326 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2327 if (GET_CODE (pat) == SEQUENCE)
2328 {
2329 emit_insn_after (pat, insn);
2330 PUT_CODE (insn, NOTE);
2331 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2332 NOTE_SOURCE_FILE (insn) = 0;
2333 }
2334 else
2335 PATTERN (insn) = pat;
2336
2337 return;
2338 }
2339
2340 /* Otherwise, storing into VAR must be handled specially
2341 by storing into a temporary and copying that into VAR
2342 with a new insn after this one. Note that this case
2343 will be used when storing into a promoted scalar since
2344 the insn will now have different modes on the input
2345 and output and hence will be invalid (except for the case
2346 of setting it to a constant, which does not need any
2347 change if it is valid). We generate extra code in that case,
2348 but combine.c will eliminate it. */
2349
2350 if (dest == var)
2351 {
2352 rtx temp;
2353 rtx fixeddest = SET_DEST (x);
2354
2355 /* STRICT_LOW_PART can be discarded, around a MEM. */
2356 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2357 fixeddest = XEXP (fixeddest, 0);
2358 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2359 if (GET_CODE (fixeddest) == SUBREG)
2360 {
2361 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2362 promoted_mode = GET_MODE (fixeddest);
2363 }
2364 else
2365 fixeddest = fixup_stack_1 (fixeddest, insn);
2366
2367 temp = gen_reg_rtx (promoted_mode);
2368
2369 emit_insn_after (gen_move_insn (fixeddest,
2370 gen_lowpart (GET_MODE (fixeddest),
2371 temp)),
2372 insn);
2373
2374 SET_DEST (x) = temp;
2375 }
2376 }
2377
2378 default:
2379 break;
2380 }
2381
2382 /* Nothing special about this RTX; fix its operands. */
2383
2384 fmt = GET_RTX_FORMAT (code);
2385 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2386 {
2387 if (fmt[i] == 'e')
2388 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2389 if (fmt[i] == 'E')
2390 {
2391 register int j;
2392 for (j = 0; j < XVECLEN (x, i); j++)
2393 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2394 insn, replacements);
2395 }
2396 }
2397 }
2398 \f
2399 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2400 return an rtx (MEM:m1 newaddr) which is equivalent.
2401 If any insns must be emitted to compute NEWADDR, put them before INSN.
2402
2403 UNCRITICAL nonzero means accept paradoxical subregs.
2404 This is used for subregs found inside REG_NOTES. */
2405
2406 static rtx
2407 fixup_memory_subreg (x, insn, uncritical)
2408 rtx x;
2409 rtx insn;
2410 int uncritical;
2411 {
2412 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2413 rtx addr = XEXP (SUBREG_REG (x), 0);
2414 enum machine_mode mode = GET_MODE (x);
2415 rtx result;
2416
2417 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2418 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2419 && ! uncritical)
2420 abort ();
2421
2422 if (BYTES_BIG_ENDIAN)
2423 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2424 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2425 addr = plus_constant (addr, offset);
2426 if (!flag_force_addr && memory_address_p (mode, addr))
2427 /* Shortcut if no insns need be emitted. */
2428 return change_address (SUBREG_REG (x), mode, addr);
2429 start_sequence ();
2430 result = change_address (SUBREG_REG (x), mode, addr);
2431 emit_insn_before (gen_sequence (), insn);
2432 end_sequence ();
2433 return result;
2434 }
2435
2436 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2437 Replace subexpressions of X in place.
2438 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2439 Otherwise return X, with its contents possibly altered.
2440
2441 If any insns must be emitted to compute NEWADDR, put them before INSN.
2442
2443 UNCRITICAL is as in fixup_memory_subreg. */
2444
2445 static rtx
2446 walk_fixup_memory_subreg (x, insn, uncritical)
2447 register rtx x;
2448 rtx insn;
2449 int uncritical;
2450 {
2451 register enum rtx_code code;
2452 register char *fmt;
2453 register int i;
2454
2455 if (x == 0)
2456 return 0;
2457
2458 code = GET_CODE (x);
2459
2460 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2461 return fixup_memory_subreg (x, insn, uncritical);
2462
2463 /* Nothing special about this RTX; fix its operands. */
2464
2465 fmt = GET_RTX_FORMAT (code);
2466 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2467 {
2468 if (fmt[i] == 'e')
2469 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2470 if (fmt[i] == 'E')
2471 {
2472 register int j;
2473 for (j = 0; j < XVECLEN (x, i); j++)
2474 XVECEXP (x, i, j)
2475 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2476 }
2477 }
2478 return x;
2479 }
2480 \f
2481 /* For each memory ref within X, if it refers to a stack slot
2482 with an out of range displacement, put the address in a temp register
2483 (emitting new insns before INSN to load these registers)
2484 and alter the memory ref to use that register.
2485 Replace each such MEM rtx with a copy, to avoid clobberage. */
2486
2487 static rtx
2488 fixup_stack_1 (x, insn)
2489 rtx x;
2490 rtx insn;
2491 {
2492 register int i;
2493 register RTX_CODE code = GET_CODE (x);
2494 register char *fmt;
2495
2496 if (code == MEM)
2497 {
2498 register rtx ad = XEXP (x, 0);
2499 /* If we have address of a stack slot but it's not valid
2500 (displacement is too large), compute the sum in a register. */
2501 if (GET_CODE (ad) == PLUS
2502 && GET_CODE (XEXP (ad, 0)) == REG
2503 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2504 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2505 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2506 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2507 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2508 #endif
2509 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2510 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2511 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2512 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2513 {
2514 rtx temp, seq;
2515 if (memory_address_p (GET_MODE (x), ad))
2516 return x;
2517
2518 start_sequence ();
2519 temp = copy_to_reg (ad);
2520 seq = gen_sequence ();
2521 end_sequence ();
2522 emit_insn_before (seq, insn);
2523 return change_address (x, VOIDmode, temp);
2524 }
2525 return x;
2526 }
2527
2528 fmt = GET_RTX_FORMAT (code);
2529 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2530 {
2531 if (fmt[i] == 'e')
2532 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2533 if (fmt[i] == 'E')
2534 {
2535 register int j;
2536 for (j = 0; j < XVECLEN (x, i); j++)
2537 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2538 }
2539 }
2540 return x;
2541 }
2542 \f
2543 /* Optimization: a bit-field instruction whose field
2544 happens to be a byte or halfword in memory
2545 can be changed to a move instruction.
2546
2547 We call here when INSN is an insn to examine or store into a bit-field.
2548 BODY is the SET-rtx to be altered.
2549
2550 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2551 (Currently this is called only from function.c, and EQUIV_MEM
2552 is always 0.) */
2553
2554 static void
2555 optimize_bit_field (body, insn, equiv_mem)
2556 rtx body;
2557 rtx insn;
2558 rtx *equiv_mem;
2559 {
2560 register rtx bitfield;
2561 int destflag;
2562 rtx seq = 0;
2563 enum machine_mode mode;
2564
2565 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2566 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2567 bitfield = SET_DEST (body), destflag = 1;
2568 else
2569 bitfield = SET_SRC (body), destflag = 0;
2570
2571 /* First check that the field being stored has constant size and position
2572 and is in fact a byte or halfword suitably aligned. */
2573
2574 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2575 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2576 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2577 != BLKmode)
2578 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2579 {
2580 register rtx memref = 0;
2581
2582 /* Now check that the containing word is memory, not a register,
2583 and that it is safe to change the machine mode. */
2584
2585 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2586 memref = XEXP (bitfield, 0);
2587 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2588 && equiv_mem != 0)
2589 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2590 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2591 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2592 memref = SUBREG_REG (XEXP (bitfield, 0));
2593 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2594 && equiv_mem != 0
2595 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2596 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2597
2598 if (memref
2599 && ! mode_dependent_address_p (XEXP (memref, 0))
2600 && ! MEM_VOLATILE_P (memref))
2601 {
2602 /* Now adjust the address, first for any subreg'ing
2603 that we are now getting rid of,
2604 and then for which byte of the word is wanted. */
2605
2606 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2607 rtx insns;
2608
2609 /* Adjust OFFSET to count bits from low-address byte. */
2610 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2611 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2612 - offset - INTVAL (XEXP (bitfield, 1)));
2613
2614 /* Adjust OFFSET to count bytes from low-address byte. */
2615 offset /= BITS_PER_UNIT;
2616 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2617 {
2618 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2619 if (BYTES_BIG_ENDIAN)
2620 offset -= (MIN (UNITS_PER_WORD,
2621 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2622 - MIN (UNITS_PER_WORD,
2623 GET_MODE_SIZE (GET_MODE (memref))));
2624 }
2625
2626 start_sequence ();
2627 memref = change_address (memref, mode,
2628 plus_constant (XEXP (memref, 0), offset));
2629 insns = get_insns ();
2630 end_sequence ();
2631 emit_insns_before (insns, insn);
2632
2633 /* Store this memory reference where
2634 we found the bit field reference. */
2635
2636 if (destflag)
2637 {
2638 validate_change (insn, &SET_DEST (body), memref, 1);
2639 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2640 {
2641 rtx src = SET_SRC (body);
2642 while (GET_CODE (src) == SUBREG
2643 && SUBREG_WORD (src) == 0)
2644 src = SUBREG_REG (src);
2645 if (GET_MODE (src) != GET_MODE (memref))
2646 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2647 validate_change (insn, &SET_SRC (body), src, 1);
2648 }
2649 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2650 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2651 /* This shouldn't happen because anything that didn't have
2652 one of these modes should have got converted explicitly
2653 and then referenced through a subreg.
2654 This is so because the original bit-field was
2655 handled by agg_mode and so its tree structure had
2656 the same mode that memref now has. */
2657 abort ();
2658 }
2659 else
2660 {
2661 rtx dest = SET_DEST (body);
2662
2663 while (GET_CODE (dest) == SUBREG
2664 && SUBREG_WORD (dest) == 0
2665 && (GET_MODE_CLASS (GET_MODE (dest))
2666 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest)))))
2667 dest = SUBREG_REG (dest);
2668
2669 validate_change (insn, &SET_DEST (body), dest, 1);
2670
2671 if (GET_MODE (dest) == GET_MODE (memref))
2672 validate_change (insn, &SET_SRC (body), memref, 1);
2673 else
2674 {
2675 /* Convert the mem ref to the destination mode. */
2676 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2677
2678 start_sequence ();
2679 convert_move (newreg, memref,
2680 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2681 seq = get_insns ();
2682 end_sequence ();
2683
2684 validate_change (insn, &SET_SRC (body), newreg, 1);
2685 }
2686 }
2687
2688 /* See if we can convert this extraction or insertion into
2689 a simple move insn. We might not be able to do so if this
2690 was, for example, part of a PARALLEL.
2691
2692 If we succeed, write out any needed conversions. If we fail,
2693 it is hard to guess why we failed, so don't do anything
2694 special; just let the optimization be suppressed. */
2695
2696 if (apply_change_group () && seq)
2697 emit_insns_before (seq, insn);
2698 }
2699 }
2700 }
2701 \f
2702 /* These routines are responsible for converting virtual register references
2703 to the actual hard register references once RTL generation is complete.
2704
2705 The following four variables are used for communication between the
2706 routines. They contain the offsets of the virtual registers from their
2707 respective hard registers. */
2708
2709 static int in_arg_offset;
2710 static int var_offset;
2711 static int dynamic_offset;
2712 static int out_arg_offset;
2713 static int cfa_offset;
2714
2715 /* In most machines, the stack pointer register is equivalent to the bottom
2716 of the stack. */
2717
2718 #ifndef STACK_POINTER_OFFSET
2719 #define STACK_POINTER_OFFSET 0
2720 #endif
2721
2722 /* If not defined, pick an appropriate default for the offset of dynamically
2723 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2724 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2725
2726 #ifndef STACK_DYNAMIC_OFFSET
2727
2728 #ifdef ACCUMULATE_OUTGOING_ARGS
2729 /* The bottom of the stack points to the actual arguments. If
2730 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2731 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2732 stack space for register parameters is not pushed by the caller, but
2733 rather part of the fixed stack areas and hence not included in
2734 `current_function_outgoing_args_size'. Nevertheless, we must allow
2735 for it when allocating stack dynamic objects. */
2736
2737 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2738 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2739 (current_function_outgoing_args_size \
2740 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2741
2742 #else
2743 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2744 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2745 #endif
2746
2747 #else
2748 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2749 #endif
2750 #endif
2751
2752 /* On a few machines, the CFA coincides with the arg pointer. */
2753
2754 #ifndef ARG_POINTER_CFA_OFFSET
2755 #define ARG_POINTER_CFA_OFFSET 0
2756 #endif
2757
2758
2759 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2760 its address taken. DECL is the decl for the object stored in the
2761 register, for later use if we do need to force REG into the stack.
2762 REG is overwritten by the MEM like in put_reg_into_stack. */
2763
2764 rtx
2765 gen_mem_addressof (reg, decl)
2766 rtx reg;
2767 tree decl;
2768 {
2769 tree type = TREE_TYPE (decl);
2770 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)), REGNO (reg));
2771 SET_ADDRESSOF_DECL (r, decl);
2772 /* If the original REG was a user-variable, then so is the REG whose
2773 address is being taken. */
2774 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2775
2776 XEXP (reg, 0) = r;
2777 PUT_CODE (reg, MEM);
2778 PUT_MODE (reg, DECL_MODE (decl));
2779 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2780 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type);
2781 MEM_ALIAS_SET (reg) = get_alias_set (decl);
2782
2783 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2784 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type));
2785
2786 return reg;
2787 }
2788
2789 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2790
2791 void
2792 flush_addressof (decl)
2793 tree decl;
2794 {
2795 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2796 && DECL_RTL (decl) != 0
2797 && GET_CODE (DECL_RTL (decl)) == MEM
2798 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2799 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2800 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0));
2801 }
2802
2803 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2804
2805 static void
2806 put_addressof_into_stack (r)
2807 rtx r;
2808 {
2809 tree decl = ADDRESSOF_DECL (r);
2810 rtx reg = XEXP (r, 0);
2811
2812 if (GET_CODE (reg) != REG)
2813 abort ();
2814
2815 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2816 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
2817 ADDRESSOF_REGNO (r),
2818 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
2819 }
2820
2821 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2822 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2823 the stack. */
2824
2825 static void
2826 purge_addressof_1 (loc, insn, force, store)
2827 rtx *loc;
2828 rtx insn;
2829 int force, store;
2830 {
2831 rtx x;
2832 RTX_CODE code;
2833 int i, j;
2834 char *fmt;
2835
2836 /* Re-start here to avoid recursion in common cases. */
2837 restart:
2838
2839 x = *loc;
2840 if (x == 0)
2841 return;
2842
2843 code = GET_CODE (x);
2844
2845 if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2846 {
2847 rtx insns;
2848 /* We must create a copy of the rtx because it was created by
2849 overwriting a REG rtx which is always shared. */
2850 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2851
2852 if (validate_change (insn, loc, sub, 0))
2853 return;
2854
2855 start_sequence ();
2856 if (! validate_change (insn, loc,
2857 force_operand (sub, NULL_RTX),
2858 0))
2859 abort ();
2860
2861 insns = gen_sequence ();
2862 end_sequence ();
2863 emit_insns_before (insns, insn);
2864 return;
2865 }
2866 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2867 {
2868 rtx sub = XEXP (XEXP (x, 0), 0);
2869
2870 if (GET_CODE (sub) == MEM)
2871 sub = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2872
2873 if (GET_CODE (sub) == REG
2874 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2875 {
2876 put_addressof_into_stack (XEXP (x, 0));
2877 return;
2878 }
2879 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
2880 {
2881 int size_x, size_sub;
2882
2883 size_x = GET_MODE_BITSIZE (GET_MODE (x));
2884 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
2885
2886 /* Don't even consider working with paradoxical subregs,
2887 or the moral equivalent seen here. */
2888 if (size_x <= size_sub
2889 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
2890 {
2891 /* Do a bitfield insertion to mirror what would happen
2892 in memory. */
2893
2894 rtx val, seq;
2895
2896 if (store)
2897 {
2898 /* If we can't replace with a register, be afraid. */
2899
2900 start_sequence ();
2901 val = gen_reg_rtx (GET_MODE (x));
2902 if (! validate_change (insn, loc, val, 0))
2903 abort ();
2904 seq = gen_sequence ();
2905 end_sequence ();
2906 emit_insn_before (seq, insn);
2907
2908 start_sequence ();
2909 store_bit_field (sub, size_x, 0, GET_MODE (x),
2910 val, GET_MODE_SIZE (GET_MODE (sub)),
2911 GET_MODE_SIZE (GET_MODE (sub)));
2912
2913 seq = gen_sequence ();
2914 end_sequence ();
2915 emit_insn_after (seq, insn);
2916 }
2917 else
2918 {
2919 start_sequence ();
2920 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
2921 GET_MODE (x), GET_MODE (x),
2922 GET_MODE_SIZE (GET_MODE (sub)),
2923 GET_MODE_SIZE (GET_MODE (sub)));
2924
2925 /* If we can't replace with a register, be afraid. */
2926 if (! validate_change (insn, loc, val, 0))
2927 abort ();
2928
2929 seq = gen_sequence ();
2930 end_sequence ();
2931 emit_insn_before (seq, insn);
2932 }
2933
2934 /* We replaced with a reg -- all done. */
2935 return;
2936 }
2937 }
2938 else if (validate_change (insn, loc, sub, 0))
2939 goto restart;
2940 /* else give up and put it into the stack */
2941 }
2942 else if (code == ADDRESSOF)
2943 {
2944 put_addressof_into_stack (x);
2945 return;
2946 }
2947 else if (code == SET)
2948 {
2949 purge_addressof_1 (&SET_DEST (x), insn, force, 1);
2950 purge_addressof_1 (&SET_SRC (x), insn, force, 0);
2951 return;
2952 }
2953 else if (code == CALL)
2954 {
2955 purge_addressof_1 (&XEXP (x, 0), insn, 1, 0);
2956 purge_addressof_1 (&XEXP (x, 1), insn, force, 0);
2957 return;
2958 }
2959
2960 /* Scan all subexpressions. */
2961 fmt = GET_RTX_FORMAT (code);
2962 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2963 {
2964 if (*fmt == 'e')
2965 purge_addressof_1 (&XEXP (x, i), insn, force, 0);
2966 else if (*fmt == 'E')
2967 for (j = 0; j < XVECLEN (x, i); j++)
2968 purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0);
2969 }
2970 }
2971
2972 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
2973 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
2974 stack. */
2975
2976 void
2977 purge_addressof (insns)
2978 rtx insns;
2979 {
2980 rtx insn;
2981 for (insn = insns; insn; insn = NEXT_INSN (insn))
2982 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2983 || GET_CODE (insn) == CALL_INSN)
2984 {
2985 purge_addressof_1 (&PATTERN (insn), insn,
2986 asm_noperands (PATTERN (insn)) > 0, 0);
2987 purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0);
2988 }
2989 }
2990 \f
2991 /* Pass through the INSNS of function FNDECL and convert virtual register
2992 references to hard register references. */
2993
2994 void
2995 instantiate_virtual_regs (fndecl, insns)
2996 tree fndecl;
2997 rtx insns;
2998 {
2999 rtx insn;
3000 int i;
3001
3002 /* Compute the offsets to use for this function. */
3003 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3004 var_offset = STARTING_FRAME_OFFSET;
3005 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3006 out_arg_offset = STACK_POINTER_OFFSET;
3007 cfa_offset = ARG_POINTER_CFA_OFFSET;
3008
3009 /* Scan all variables and parameters of this function. For each that is
3010 in memory, instantiate all virtual registers if the result is a valid
3011 address. If not, we do it later. That will handle most uses of virtual
3012 regs on many machines. */
3013 instantiate_decls (fndecl, 1);
3014
3015 /* Initialize recognition, indicating that volatile is OK. */
3016 init_recog ();
3017
3018 /* Scan through all the insns, instantiating every virtual register still
3019 present. */
3020 for (insn = insns; insn; insn = NEXT_INSN (insn))
3021 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3022 || GET_CODE (insn) == CALL_INSN)
3023 {
3024 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3025 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3026 }
3027
3028 /* Instantiate the stack slots for the parm registers, for later use in
3029 addressof elimination. */
3030 for (i = 0; i < max_parm_reg; ++i)
3031 if (parm_reg_stack_loc[i])
3032 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3033
3034 /* Now instantiate the remaining register equivalences for debugging info.
3035 These will not be valid addresses. */
3036 instantiate_decls (fndecl, 0);
3037
3038 /* Indicate that, from now on, assign_stack_local should use
3039 frame_pointer_rtx. */
3040 virtuals_instantiated = 1;
3041 }
3042
3043 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3044 all virtual registers in their DECL_RTL's.
3045
3046 If VALID_ONLY, do this only if the resulting address is still valid.
3047 Otherwise, always do it. */
3048
3049 static void
3050 instantiate_decls (fndecl, valid_only)
3051 tree fndecl;
3052 int valid_only;
3053 {
3054 tree decl;
3055
3056 if (DECL_SAVED_INSNS (fndecl))
3057 /* When compiling an inline function, the obstack used for
3058 rtl allocation is the maybepermanent_obstack. Calling
3059 `resume_temporary_allocation' switches us back to that
3060 obstack while we process this function's parameters. */
3061 resume_temporary_allocation ();
3062
3063 /* Process all parameters of the function. */
3064 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3065 {
3066 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3067
3068 instantiate_decl (DECL_RTL (decl), size, valid_only);
3069
3070 /* If the parameter was promoted, then the incoming RTL mode may be
3071 larger than the declared type size. We must use the larger of
3072 the two sizes. */
3073 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3074 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3075 }
3076
3077 /* Now process all variables defined in the function or its subblocks. */
3078 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3079
3080 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
3081 {
3082 /* Save all rtl allocated for this function by raising the
3083 high-water mark on the maybepermanent_obstack. */
3084 preserve_data ();
3085 /* All further rtl allocation is now done in the current_obstack. */
3086 rtl_in_current_obstack ();
3087 }
3088 }
3089
3090 /* Subroutine of instantiate_decls: Process all decls in the given
3091 BLOCK node and all its subblocks. */
3092
3093 static void
3094 instantiate_decls_1 (let, valid_only)
3095 tree let;
3096 int valid_only;
3097 {
3098 tree t;
3099
3100 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3101 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3102 valid_only);
3103
3104 /* Process all subblocks. */
3105 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3106 instantiate_decls_1 (t, valid_only);
3107 }
3108
3109 /* Subroutine of the preceding procedures: Given RTL representing a
3110 decl and the size of the object, do any instantiation required.
3111
3112 If VALID_ONLY is non-zero, it means that the RTL should only be
3113 changed if the new address is valid. */
3114
3115 static void
3116 instantiate_decl (x, size, valid_only)
3117 rtx x;
3118 int size;
3119 int valid_only;
3120 {
3121 enum machine_mode mode;
3122 rtx addr;
3123
3124 /* If this is not a MEM, no need to do anything. Similarly if the
3125 address is a constant or a register that is not a virtual register. */
3126
3127 if (x == 0 || GET_CODE (x) != MEM)
3128 return;
3129
3130 addr = XEXP (x, 0);
3131 if (CONSTANT_P (addr)
3132 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3133 || (GET_CODE (addr) == REG
3134 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3135 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3136 return;
3137
3138 /* If we should only do this if the address is valid, copy the address.
3139 We need to do this so we can undo any changes that might make the
3140 address invalid. This copy is unfortunate, but probably can't be
3141 avoided. */
3142
3143 if (valid_only)
3144 addr = copy_rtx (addr);
3145
3146 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3147
3148 if (valid_only)
3149 {
3150 /* Now verify that the resulting address is valid for every integer or
3151 floating-point mode up to and including SIZE bytes long. We do this
3152 since the object might be accessed in any mode and frame addresses
3153 are shared. */
3154
3155 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3156 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3157 mode = GET_MODE_WIDER_MODE (mode))
3158 if (! memory_address_p (mode, addr))
3159 return;
3160
3161 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3162 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3163 mode = GET_MODE_WIDER_MODE (mode))
3164 if (! memory_address_p (mode, addr))
3165 return;
3166 }
3167
3168 /* Put back the address now that we have updated it and we either know
3169 it is valid or we don't care whether it is valid. */
3170
3171 XEXP (x, 0) = addr;
3172 }
3173 \f
3174 /* Given a pointer to a piece of rtx and an optional pointer to the
3175 containing object, instantiate any virtual registers present in it.
3176
3177 If EXTRA_INSNS, we always do the replacement and generate
3178 any extra insns before OBJECT. If it zero, we do nothing if replacement
3179 is not valid.
3180
3181 Return 1 if we either had nothing to do or if we were able to do the
3182 needed replacement. Return 0 otherwise; we only return zero if
3183 EXTRA_INSNS is zero.
3184
3185 We first try some simple transformations to avoid the creation of extra
3186 pseudos. */
3187
3188 static int
3189 instantiate_virtual_regs_1 (loc, object, extra_insns)
3190 rtx *loc;
3191 rtx object;
3192 int extra_insns;
3193 {
3194 rtx x;
3195 RTX_CODE code;
3196 rtx new = 0;
3197 HOST_WIDE_INT offset;
3198 rtx temp;
3199 rtx seq;
3200 int i, j;
3201 char *fmt;
3202
3203 /* Re-start here to avoid recursion in common cases. */
3204 restart:
3205
3206 x = *loc;
3207 if (x == 0)
3208 return 1;
3209
3210 code = GET_CODE (x);
3211
3212 /* Check for some special cases. */
3213 switch (code)
3214 {
3215 case CONST_INT:
3216 case CONST_DOUBLE:
3217 case CONST:
3218 case SYMBOL_REF:
3219 case CODE_LABEL:
3220 case PC:
3221 case CC0:
3222 case ASM_INPUT:
3223 case ADDR_VEC:
3224 case ADDR_DIFF_VEC:
3225 case RETURN:
3226 return 1;
3227
3228 case SET:
3229 /* We are allowed to set the virtual registers. This means that
3230 the actual register should receive the source minus the
3231 appropriate offset. This is used, for example, in the handling
3232 of non-local gotos. */
3233 if (SET_DEST (x) == virtual_incoming_args_rtx)
3234 new = arg_pointer_rtx, offset = - in_arg_offset;
3235 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3236 new = frame_pointer_rtx, offset = - var_offset;
3237 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3238 new = stack_pointer_rtx, offset = - dynamic_offset;
3239 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3240 new = stack_pointer_rtx, offset = - out_arg_offset;
3241 else if (SET_DEST (x) == virtual_cfa_rtx)
3242 new = arg_pointer_rtx, offset = - cfa_offset;
3243
3244 if (new)
3245 {
3246 /* The only valid sources here are PLUS or REG. Just do
3247 the simplest possible thing to handle them. */
3248 if (GET_CODE (SET_SRC (x)) != REG
3249 && GET_CODE (SET_SRC (x)) != PLUS)
3250 abort ();
3251
3252 start_sequence ();
3253 if (GET_CODE (SET_SRC (x)) != REG)
3254 temp = force_operand (SET_SRC (x), NULL_RTX);
3255 else
3256 temp = SET_SRC (x);
3257 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3258 seq = get_insns ();
3259 end_sequence ();
3260
3261 emit_insns_before (seq, object);
3262 SET_DEST (x) = new;
3263
3264 if (! validate_change (object, &SET_SRC (x), temp, 0)
3265 || ! extra_insns)
3266 abort ();
3267
3268 return 1;
3269 }
3270
3271 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3272 loc = &SET_SRC (x);
3273 goto restart;
3274
3275 case PLUS:
3276 /* Handle special case of virtual register plus constant. */
3277 if (CONSTANT_P (XEXP (x, 1)))
3278 {
3279 rtx old, new_offset;
3280
3281 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3282 if (GET_CODE (XEXP (x, 0)) == PLUS)
3283 {
3284 rtx inner = XEXP (XEXP (x, 0), 0);
3285
3286 if (inner == virtual_incoming_args_rtx)
3287 new = arg_pointer_rtx, offset = in_arg_offset;
3288 else if (inner == virtual_stack_vars_rtx)
3289 new = frame_pointer_rtx, offset = var_offset;
3290 else if (inner == virtual_stack_dynamic_rtx)
3291 new = stack_pointer_rtx, offset = dynamic_offset;
3292 else if (inner == virtual_outgoing_args_rtx)
3293 new = stack_pointer_rtx, offset = out_arg_offset;
3294 else if (inner == virtual_cfa_rtx)
3295 new = arg_pointer_rtx, offset = cfa_offset;
3296 else
3297 {
3298 loc = &XEXP (x, 0);
3299 goto restart;
3300 }
3301
3302 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3303 extra_insns);
3304 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3305 }
3306
3307 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3308 new = arg_pointer_rtx, offset = in_arg_offset;
3309 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3310 new = frame_pointer_rtx, offset = var_offset;
3311 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3312 new = stack_pointer_rtx, offset = dynamic_offset;
3313 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3314 new = stack_pointer_rtx, offset = out_arg_offset;
3315 else if (XEXP (x, 0) == virtual_cfa_rtx)
3316 new = arg_pointer_rtx, offset = cfa_offset;
3317 else
3318 {
3319 /* We know the second operand is a constant. Unless the
3320 first operand is a REG (which has been already checked),
3321 it needs to be checked. */
3322 if (GET_CODE (XEXP (x, 0)) != REG)
3323 {
3324 loc = &XEXP (x, 0);
3325 goto restart;
3326 }
3327 return 1;
3328 }
3329
3330 new_offset = plus_constant (XEXP (x, 1), offset);
3331
3332 /* If the new constant is zero, try to replace the sum with just
3333 the register. */
3334 if (new_offset == const0_rtx
3335 && validate_change (object, loc, new, 0))
3336 return 1;
3337
3338 /* Next try to replace the register and new offset.
3339 There are two changes to validate here and we can't assume that
3340 in the case of old offset equals new just changing the register
3341 will yield a valid insn. In the interests of a little efficiency,
3342 however, we only call validate change once (we don't queue up the
3343 changes and then call apply_change_group). */
3344
3345 old = XEXP (x, 0);
3346 if (offset == 0
3347 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3348 : (XEXP (x, 0) = new,
3349 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3350 {
3351 if (! extra_insns)
3352 {
3353 XEXP (x, 0) = old;
3354 return 0;
3355 }
3356
3357 /* Otherwise copy the new constant into a register and replace
3358 constant with that register. */
3359 temp = gen_reg_rtx (Pmode);
3360 XEXP (x, 0) = new;
3361 if (validate_change (object, &XEXP (x, 1), temp, 0))
3362 emit_insn_before (gen_move_insn (temp, new_offset), object);
3363 else
3364 {
3365 /* If that didn't work, replace this expression with a
3366 register containing the sum. */
3367
3368 XEXP (x, 0) = old;
3369 new = gen_rtx_PLUS (Pmode, new, new_offset);
3370
3371 start_sequence ();
3372 temp = force_operand (new, NULL_RTX);
3373 seq = get_insns ();
3374 end_sequence ();
3375
3376 emit_insns_before (seq, object);
3377 if (! validate_change (object, loc, temp, 0)
3378 && ! validate_replace_rtx (x, temp, object))
3379 abort ();
3380 }
3381 }
3382
3383 return 1;
3384 }
3385
3386 /* Fall through to generic two-operand expression case. */
3387 case EXPR_LIST:
3388 case CALL:
3389 case COMPARE:
3390 case MINUS:
3391 case MULT:
3392 case DIV: case UDIV:
3393 case MOD: case UMOD:
3394 case AND: case IOR: case XOR:
3395 case ROTATERT: case ROTATE:
3396 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3397 case NE: case EQ:
3398 case GE: case GT: case GEU: case GTU:
3399 case LE: case LT: case LEU: case LTU:
3400 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3401 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3402 loc = &XEXP (x, 0);
3403 goto restart;
3404
3405 case MEM:
3406 /* Most cases of MEM that convert to valid addresses have already been
3407 handled by our scan of decls. The only special handling we
3408 need here is to make a copy of the rtx to ensure it isn't being
3409 shared if we have to change it to a pseudo.
3410
3411 If the rtx is a simple reference to an address via a virtual register,
3412 it can potentially be shared. In such cases, first try to make it
3413 a valid address, which can also be shared. Otherwise, copy it and
3414 proceed normally.
3415
3416 First check for common cases that need no processing. These are
3417 usually due to instantiation already being done on a previous instance
3418 of a shared rtx. */
3419
3420 temp = XEXP (x, 0);
3421 if (CONSTANT_ADDRESS_P (temp)
3422 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3423 || temp == arg_pointer_rtx
3424 #endif
3425 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3426 || temp == hard_frame_pointer_rtx
3427 #endif
3428 || temp == frame_pointer_rtx)
3429 return 1;
3430
3431 if (GET_CODE (temp) == PLUS
3432 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3433 && (XEXP (temp, 0) == frame_pointer_rtx
3434 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3435 || XEXP (temp, 0) == hard_frame_pointer_rtx
3436 #endif
3437 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3438 || XEXP (temp, 0) == arg_pointer_rtx
3439 #endif
3440 ))
3441 return 1;
3442
3443 if (temp == virtual_stack_vars_rtx
3444 || temp == virtual_incoming_args_rtx
3445 || (GET_CODE (temp) == PLUS
3446 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3447 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3448 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3449 {
3450 /* This MEM may be shared. If the substitution can be done without
3451 the need to generate new pseudos, we want to do it in place
3452 so all copies of the shared rtx benefit. The call below will
3453 only make substitutions if the resulting address is still
3454 valid.
3455
3456 Note that we cannot pass X as the object in the recursive call
3457 since the insn being processed may not allow all valid
3458 addresses. However, if we were not passed on object, we can
3459 only modify X without copying it if X will have a valid
3460 address.
3461
3462 ??? Also note that this can still lose if OBJECT is an insn that
3463 has less restrictions on an address that some other insn.
3464 In that case, we will modify the shared address. This case
3465 doesn't seem very likely, though. One case where this could
3466 happen is in the case of a USE or CLOBBER reference, but we
3467 take care of that below. */
3468
3469 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3470 object ? object : x, 0))
3471 return 1;
3472
3473 /* Otherwise make a copy and process that copy. We copy the entire
3474 RTL expression since it might be a PLUS which could also be
3475 shared. */
3476 *loc = x = copy_rtx (x);
3477 }
3478
3479 /* Fall through to generic unary operation case. */
3480 case SUBREG:
3481 case STRICT_LOW_PART:
3482 case NEG: case NOT:
3483 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3484 case SIGN_EXTEND: case ZERO_EXTEND:
3485 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3486 case FLOAT: case FIX:
3487 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3488 case ABS:
3489 case SQRT:
3490 case FFS:
3491 /* These case either have just one operand or we know that we need not
3492 check the rest of the operands. */
3493 loc = &XEXP (x, 0);
3494 goto restart;
3495
3496 case USE:
3497 case CLOBBER:
3498 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3499 go ahead and make the invalid one, but do it to a copy. For a REG,
3500 just make the recursive call, since there's no chance of a problem. */
3501
3502 if ((GET_CODE (XEXP (x, 0)) == MEM
3503 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3504 0))
3505 || (GET_CODE (XEXP (x, 0)) == REG
3506 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3507 return 1;
3508
3509 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3510 loc = &XEXP (x, 0);
3511 goto restart;
3512
3513 case REG:
3514 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3515 in front of this insn and substitute the temporary. */
3516 if (x == virtual_incoming_args_rtx)
3517 new = arg_pointer_rtx, offset = in_arg_offset;
3518 else if (x == virtual_stack_vars_rtx)
3519 new = frame_pointer_rtx, offset = var_offset;
3520 else if (x == virtual_stack_dynamic_rtx)
3521 new = stack_pointer_rtx, offset = dynamic_offset;
3522 else if (x == virtual_outgoing_args_rtx)
3523 new = stack_pointer_rtx, offset = out_arg_offset;
3524 else if (x == virtual_cfa_rtx)
3525 new = arg_pointer_rtx, offset = cfa_offset;
3526
3527 if (new)
3528 {
3529 temp = plus_constant (new, offset);
3530 if (!validate_change (object, loc, temp, 0))
3531 {
3532 if (! extra_insns)
3533 return 0;
3534
3535 start_sequence ();
3536 temp = force_operand (temp, NULL_RTX);
3537 seq = get_insns ();
3538 end_sequence ();
3539
3540 emit_insns_before (seq, object);
3541 if (! validate_change (object, loc, temp, 0)
3542 && ! validate_replace_rtx (x, temp, object))
3543 abort ();
3544 }
3545 }
3546
3547 return 1;
3548
3549 case ADDRESSOF:
3550 if (GET_CODE (XEXP (x, 0)) == REG)
3551 return 1;
3552
3553 else if (GET_CODE (XEXP (x, 0)) == MEM)
3554 {
3555 /* If we have a (addressof (mem ..)), do any instantiation inside
3556 since we know we'll be making the inside valid when we finally
3557 remove the ADDRESSOF. */
3558 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3559 return 1;
3560 }
3561 break;
3562
3563 default:
3564 break;
3565 }
3566
3567 /* Scan all subexpressions. */
3568 fmt = GET_RTX_FORMAT (code);
3569 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3570 if (*fmt == 'e')
3571 {
3572 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3573 return 0;
3574 }
3575 else if (*fmt == 'E')
3576 for (j = 0; j < XVECLEN (x, i); j++)
3577 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3578 extra_insns))
3579 return 0;
3580
3581 return 1;
3582 }
3583 \f
3584 /* Optimization: assuming this function does not receive nonlocal gotos,
3585 delete the handlers for such, as well as the insns to establish
3586 and disestablish them. */
3587
3588 static void
3589 delete_handlers ()
3590 {
3591 rtx insn;
3592 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3593 {
3594 /* Delete the handler by turning off the flag that would
3595 prevent jump_optimize from deleting it.
3596 Also permit deletion of the nonlocal labels themselves
3597 if nothing local refers to them. */
3598 if (GET_CODE (insn) == CODE_LABEL)
3599 {
3600 tree t, last_t;
3601
3602 LABEL_PRESERVE_P (insn) = 0;
3603
3604 /* Remove it from the nonlocal_label list, to avoid confusing
3605 flow. */
3606 for (t = nonlocal_labels, last_t = 0; t;
3607 last_t = t, t = TREE_CHAIN (t))
3608 if (DECL_RTL (TREE_VALUE (t)) == insn)
3609 break;
3610 if (t)
3611 {
3612 if (! last_t)
3613 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3614 else
3615 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3616 }
3617 }
3618 if (GET_CODE (insn) == INSN
3619 && ((nonlocal_goto_handler_slot != 0
3620 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
3621 || (nonlocal_goto_stack_level != 0
3622 && reg_mentioned_p (nonlocal_goto_stack_level,
3623 PATTERN (insn)))))
3624 delete_insn (insn);
3625 }
3626 }
3627
3628 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3629 of the current function. */
3630
3631 rtx
3632 nonlocal_label_rtx_list ()
3633 {
3634 tree t;
3635 rtx x = 0;
3636
3637 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
3638 x = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (TREE_VALUE (t)), x);
3639
3640 return x;
3641 }
3642 \f
3643 /* Output a USE for any register use in RTL.
3644 This is used with -noreg to mark the extent of lifespan
3645 of any registers used in a user-visible variable's DECL_RTL. */
3646
3647 void
3648 use_variable (rtl)
3649 rtx rtl;
3650 {
3651 if (GET_CODE (rtl) == REG)
3652 /* This is a register variable. */
3653 emit_insn (gen_rtx_USE (VOIDmode, rtl));
3654 else if (GET_CODE (rtl) == MEM
3655 && GET_CODE (XEXP (rtl, 0)) == REG
3656 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3657 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3658 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3659 /* This is a variable-sized structure. */
3660 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
3661 }
3662
3663 /* Like use_variable except that it outputs the USEs after INSN
3664 instead of at the end of the insn-chain. */
3665
3666 void
3667 use_variable_after (rtl, insn)
3668 rtx rtl, insn;
3669 {
3670 if (GET_CODE (rtl) == REG)
3671 /* This is a register variable. */
3672 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
3673 else if (GET_CODE (rtl) == MEM
3674 && GET_CODE (XEXP (rtl, 0)) == REG
3675 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3676 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3677 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3678 /* This is a variable-sized structure. */
3679 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
3680 }
3681 \f
3682 int
3683 max_parm_reg_num ()
3684 {
3685 return max_parm_reg;
3686 }
3687
3688 /* Return the first insn following those generated by `assign_parms'. */
3689
3690 rtx
3691 get_first_nonparm_insn ()
3692 {
3693 if (last_parm_insn)
3694 return NEXT_INSN (last_parm_insn);
3695 return get_insns ();
3696 }
3697
3698 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3699 Crash if there is none. */
3700
3701 rtx
3702 get_first_block_beg ()
3703 {
3704 register rtx searcher;
3705 register rtx insn = get_first_nonparm_insn ();
3706
3707 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3708 if (GET_CODE (searcher) == NOTE
3709 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3710 return searcher;
3711
3712 abort (); /* Invalid call to this function. (See comments above.) */
3713 return NULL_RTX;
3714 }
3715
3716 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3717 This means a type for which function calls must pass an address to the
3718 function or get an address back from the function.
3719 EXP may be a type node or an expression (whose type is tested). */
3720
3721 int
3722 aggregate_value_p (exp)
3723 tree exp;
3724 {
3725 int i, regno, nregs;
3726 rtx reg;
3727 tree type;
3728 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3729 type = exp;
3730 else
3731 type = TREE_TYPE (exp);
3732
3733 if (RETURN_IN_MEMORY (type))
3734 return 1;
3735 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3736 and thus can't be returned in registers. */
3737 if (TREE_ADDRESSABLE (type))
3738 return 1;
3739 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3740 return 1;
3741 /* Make sure we have suitable call-clobbered regs to return
3742 the value in; if not, we must return it in memory. */
3743 reg = hard_function_value (type, 0);
3744
3745 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3746 it is OK. */
3747 if (GET_CODE (reg) != REG)
3748 return 0;
3749
3750 regno = REGNO (reg);
3751 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3752 for (i = 0; i < nregs; i++)
3753 if (! call_used_regs[regno + i])
3754 return 1;
3755 return 0;
3756 }
3757 \f
3758 /* Assign RTL expressions to the function's parameters.
3759 This may involve copying them into registers and using
3760 those registers as the RTL for them.
3761
3762 If SECOND_TIME is non-zero it means that this function is being
3763 called a second time. This is done by integrate.c when a function's
3764 compilation is deferred. We need to come back here in case the
3765 FUNCTION_ARG macro computes items needed for the rest of the compilation
3766 (such as changing which registers are fixed or caller-saved). But suppress
3767 writing any insns or setting DECL_RTL of anything in this case. */
3768
3769 void
3770 assign_parms (fndecl, second_time)
3771 tree fndecl;
3772 int second_time;
3773 {
3774 register tree parm;
3775 register rtx entry_parm = 0;
3776 register rtx stack_parm = 0;
3777 CUMULATIVE_ARGS args_so_far;
3778 enum machine_mode promoted_mode, passed_mode;
3779 enum machine_mode nominal_mode, promoted_nominal_mode;
3780 int unsignedp;
3781 /* Total space needed so far for args on the stack,
3782 given as a constant and a tree-expression. */
3783 struct args_size stack_args_size;
3784 tree fntype = TREE_TYPE (fndecl);
3785 tree fnargs = DECL_ARGUMENTS (fndecl);
3786 /* This is used for the arg pointer when referring to stack args. */
3787 rtx internal_arg_pointer;
3788 /* This is a dummy PARM_DECL that we used for the function result if
3789 the function returns a structure. */
3790 tree function_result_decl = 0;
3791 int varargs_setup = 0;
3792 rtx conversion_insns = 0;
3793
3794 /* Nonzero if the last arg is named `__builtin_va_alist',
3795 which is used on some machines for old-fashioned non-ANSI varargs.h;
3796 this should be stuck onto the stack as if it had arrived there. */
3797 int hide_last_arg
3798 = (current_function_varargs
3799 && fnargs
3800 && (parm = tree_last (fnargs)) != 0
3801 && DECL_NAME (parm)
3802 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3803 "__builtin_va_alist")));
3804
3805 /* Nonzero if function takes extra anonymous args.
3806 This means the last named arg must be on the stack
3807 right before the anonymous ones. */
3808 int stdarg
3809 = (TYPE_ARG_TYPES (fntype) != 0
3810 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3811 != void_type_node));
3812
3813 current_function_stdarg = stdarg;
3814
3815 /* If the reg that the virtual arg pointer will be translated into is
3816 not a fixed reg or is the stack pointer, make a copy of the virtual
3817 arg pointer, and address parms via the copy. The frame pointer is
3818 considered fixed even though it is not marked as such.
3819
3820 The second time through, simply use ap to avoid generating rtx. */
3821
3822 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3823 || ! (fixed_regs[ARG_POINTER_REGNUM]
3824 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3825 && ! second_time)
3826 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3827 else
3828 internal_arg_pointer = virtual_incoming_args_rtx;
3829 current_function_internal_arg_pointer = internal_arg_pointer;
3830
3831 stack_args_size.constant = 0;
3832 stack_args_size.var = 0;
3833
3834 /* If struct value address is treated as the first argument, make it so. */
3835 if (aggregate_value_p (DECL_RESULT (fndecl))
3836 && ! current_function_returns_pcc_struct
3837 && struct_value_incoming_rtx == 0)
3838 {
3839 tree type = build_pointer_type (TREE_TYPE (fntype));
3840
3841 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
3842
3843 DECL_ARG_TYPE (function_result_decl) = type;
3844 TREE_CHAIN (function_result_decl) = fnargs;
3845 fnargs = function_result_decl;
3846 }
3847
3848 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3849 parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
3850 bzero ((char *) parm_reg_stack_loc, max_parm_reg * sizeof (rtx));
3851
3852 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3853 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
3854 #else
3855 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
3856 #endif
3857
3858 /* We haven't yet found an argument that we must push and pretend the
3859 caller did. */
3860 current_function_pretend_args_size = 0;
3861
3862 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3863 {
3864 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
3865 struct args_size stack_offset;
3866 struct args_size arg_size;
3867 int passed_pointer = 0;
3868 int did_conversion = 0;
3869 tree passed_type = DECL_ARG_TYPE (parm);
3870 tree nominal_type = TREE_TYPE (parm);
3871
3872 /* Set LAST_NAMED if this is last named arg before some
3873 anonymous args. */
3874 int last_named = ((TREE_CHAIN (parm) == 0
3875 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3876 && (stdarg || current_function_varargs));
3877 /* Set NAMED_ARG if this arg should be treated as a named arg. For
3878 most machines, if this is a varargs/stdarg function, then we treat
3879 the last named arg as if it were anonymous too. */
3880 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
3881
3882 if (TREE_TYPE (parm) == error_mark_node
3883 /* This can happen after weird syntax errors
3884 or if an enum type is defined among the parms. */
3885 || TREE_CODE (parm) != PARM_DECL
3886 || passed_type == NULL)
3887 {
3888 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
3889 = gen_rtx_MEM (BLKmode, const0_rtx);
3890 TREE_USED (parm) = 1;
3891 continue;
3892 }
3893
3894 /* For varargs.h function, save info about regs and stack space
3895 used by the individual args, not including the va_alist arg. */
3896 if (hide_last_arg && last_named)
3897 current_function_args_info = args_so_far;
3898
3899 /* Find mode of arg as it is passed, and mode of arg
3900 as it should be during execution of this function. */
3901 passed_mode = TYPE_MODE (passed_type);
3902 nominal_mode = TYPE_MODE (nominal_type);
3903
3904 /* If the parm's mode is VOID, its value doesn't matter,
3905 and avoid the usual things like emit_move_insn that could crash. */
3906 if (nominal_mode == VOIDmode)
3907 {
3908 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
3909 continue;
3910 }
3911
3912 /* If the parm is to be passed as a transparent union, use the
3913 type of the first field for the tests below. We have already
3914 verified that the modes are the same. */
3915 if (DECL_TRANSPARENT_UNION (parm)
3916 || TYPE_TRANSPARENT_UNION (passed_type))
3917 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
3918
3919 /* See if this arg was passed by invisible reference. It is if
3920 it is an object whose size depends on the contents of the
3921 object itself or if the machine requires these objects be passed
3922 that way. */
3923
3924 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
3925 && contains_placeholder_p (TYPE_SIZE (passed_type)))
3926 || TREE_ADDRESSABLE (passed_type)
3927 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3928 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
3929 passed_type, named_arg)
3930 #endif
3931 )
3932 {
3933 passed_type = nominal_type = build_pointer_type (passed_type);
3934 passed_pointer = 1;
3935 passed_mode = nominal_mode = Pmode;
3936 }
3937
3938 promoted_mode = passed_mode;
3939
3940 #ifdef PROMOTE_FUNCTION_ARGS
3941 /* Compute the mode in which the arg is actually extended to. */
3942 unsignedp = TREE_UNSIGNED (passed_type);
3943 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
3944 #endif
3945
3946 /* Let machine desc say which reg (if any) the parm arrives in.
3947 0 means it arrives on the stack. */
3948 #ifdef FUNCTION_INCOMING_ARG
3949 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3950 passed_type, named_arg);
3951 #else
3952 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
3953 passed_type, named_arg);
3954 #endif
3955
3956 if (entry_parm == 0)
3957 promoted_mode = passed_mode;
3958
3959 #ifdef SETUP_INCOMING_VARARGS
3960 /* If this is the last named parameter, do any required setup for
3961 varargs or stdargs. We need to know about the case of this being an
3962 addressable type, in which case we skip the registers it
3963 would have arrived in.
3964
3965 For stdargs, LAST_NAMED will be set for two parameters, the one that
3966 is actually the last named, and the dummy parameter. We only
3967 want to do this action once.
3968
3969 Also, indicate when RTL generation is to be suppressed. */
3970 if (last_named && !varargs_setup)
3971 {
3972 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
3973 current_function_pretend_args_size,
3974 second_time);
3975 varargs_setup = 1;
3976 }
3977 #endif
3978
3979 /* Determine parm's home in the stack,
3980 in case it arrives in the stack or we should pretend it did.
3981
3982 Compute the stack position and rtx where the argument arrives
3983 and its size.
3984
3985 There is one complexity here: If this was a parameter that would
3986 have been passed in registers, but wasn't only because it is
3987 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
3988 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
3989 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
3990 0 as it was the previous time. */
3991
3992 locate_and_pad_parm (promoted_mode, passed_type,
3993 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3994 1,
3995 #else
3996 #ifdef FUNCTION_INCOMING_ARG
3997 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3998 passed_type,
3999 (named_arg
4000 || varargs_setup)) != 0,
4001 #else
4002 FUNCTION_ARG (args_so_far, promoted_mode,
4003 passed_type,
4004 named_arg || varargs_setup) != 0,
4005 #endif
4006 #endif
4007 fndecl, &stack_args_size, &stack_offset, &arg_size);
4008
4009 if (! second_time)
4010 {
4011 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4012
4013 if (offset_rtx == const0_rtx)
4014 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4015 else
4016 stack_parm = gen_rtx_MEM (promoted_mode,
4017 gen_rtx_PLUS (Pmode,
4018 internal_arg_pointer,
4019 offset_rtx));
4020
4021 /* If this is a memory ref that contains aggregate components,
4022 mark it as such for cse and loop optimize. Likewise if it
4023 is readonly. */
4024 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4025 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
4026 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
4027 }
4028
4029 /* If this parameter was passed both in registers and in the stack,
4030 use the copy on the stack. */
4031 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4032 entry_parm = 0;
4033
4034 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4035 /* If this parm was passed part in regs and part in memory,
4036 pretend it arrived entirely in memory
4037 by pushing the register-part onto the stack.
4038
4039 In the special case of a DImode or DFmode that is split,
4040 we could put it together in a pseudoreg directly,
4041 but for now that's not worth bothering with. */
4042
4043 if (entry_parm)
4044 {
4045 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4046 passed_type, named_arg);
4047
4048 if (nregs > 0)
4049 {
4050 current_function_pretend_args_size
4051 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4052 / (PARM_BOUNDARY / BITS_PER_UNIT)
4053 * (PARM_BOUNDARY / BITS_PER_UNIT));
4054
4055 if (! second_time)
4056 {
4057 /* Handle calls that pass values in multiple non-contiguous
4058 locations. The Irix 6 ABI has examples of this. */
4059 if (GET_CODE (entry_parm) == PARALLEL)
4060 emit_group_store (validize_mem (stack_parm), entry_parm,
4061 int_size_in_bytes (TREE_TYPE (parm)),
4062 (TYPE_ALIGN (TREE_TYPE (parm))
4063 / BITS_PER_UNIT));
4064 else
4065 move_block_from_reg (REGNO (entry_parm),
4066 validize_mem (stack_parm), nregs,
4067 int_size_in_bytes (TREE_TYPE (parm)));
4068 }
4069 entry_parm = stack_parm;
4070 }
4071 }
4072 #endif
4073
4074 /* If we didn't decide this parm came in a register,
4075 by default it came on the stack. */
4076 if (entry_parm == 0)
4077 entry_parm = stack_parm;
4078
4079 /* Record permanently how this parm was passed. */
4080 if (! second_time)
4081 DECL_INCOMING_RTL (parm) = entry_parm;
4082
4083 /* If there is actually space on the stack for this parm,
4084 count it in stack_args_size; otherwise set stack_parm to 0
4085 to indicate there is no preallocated stack slot for the parm. */
4086
4087 if (entry_parm == stack_parm
4088 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4089 /* On some machines, even if a parm value arrives in a register
4090 there is still an (uninitialized) stack slot allocated for it.
4091
4092 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4093 whether this parameter already has a stack slot allocated,
4094 because an arg block exists only if current_function_args_size
4095 is larger than some threshold, and we haven't calculated that
4096 yet. So, for now, we just assume that stack slots never exist
4097 in this case. */
4098 || REG_PARM_STACK_SPACE (fndecl) > 0
4099 #endif
4100 )
4101 {
4102 stack_args_size.constant += arg_size.constant;
4103 if (arg_size.var)
4104 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4105 }
4106 else
4107 /* No stack slot was pushed for this parm. */
4108 stack_parm = 0;
4109
4110 /* Update info on where next arg arrives in registers. */
4111
4112 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4113 passed_type, named_arg);
4114
4115 /* If this is our second time through, we are done with this parm. */
4116 if (second_time)
4117 continue;
4118
4119 /* If we can't trust the parm stack slot to be aligned enough
4120 for its ultimate type, don't use that slot after entry.
4121 We'll make another stack slot, if we need one. */
4122 {
4123 int thisparm_boundary
4124 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4125
4126 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4127 stack_parm = 0;
4128 }
4129
4130 /* If parm was passed in memory, and we need to convert it on entry,
4131 don't store it back in that same slot. */
4132 if (entry_parm != 0
4133 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4134 stack_parm = 0;
4135
4136 #if 0
4137 /* Now adjust STACK_PARM to the mode and precise location
4138 where this parameter should live during execution,
4139 if we discover that it must live in the stack during execution.
4140 To make debuggers happier on big-endian machines, we store
4141 the value in the last bytes of the space available. */
4142
4143 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4144 && stack_parm != 0)
4145 {
4146 rtx offset_rtx;
4147
4148 if (BYTES_BIG_ENDIAN
4149 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4150 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4151 - GET_MODE_SIZE (nominal_mode));
4152
4153 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4154 if (offset_rtx == const0_rtx)
4155 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4156 else
4157 stack_parm = gen_rtx_MEM (nominal_mode,
4158 gen_rtx_PLUS (Pmode,
4159 internal_arg_pointer,
4160 offset_rtx));
4161
4162 /* If this is a memory ref that contains aggregate components,
4163 mark it as such for cse and loop optimize. */
4164 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4165 }
4166 #endif /* 0 */
4167
4168 #ifdef STACK_REGS
4169 /* We need this "use" info, because the gcc-register->stack-register
4170 converter in reg-stack.c needs to know which registers are active
4171 at the start of the function call. The actual parameter loading
4172 instructions are not always available then anymore, since they might
4173 have been optimised away. */
4174
4175 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
4176 emit_insn (gen_rtx_USE (GET_MODE (entry_parm), entry_parm));
4177 #endif
4178
4179 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4180 in the mode in which it arrives.
4181 STACK_PARM is an RTX for a stack slot where the parameter can live
4182 during the function (in case we want to put it there).
4183 STACK_PARM is 0 if no stack slot was pushed for it.
4184
4185 Now output code if necessary to convert ENTRY_PARM to
4186 the type in which this function declares it,
4187 and store that result in an appropriate place,
4188 which may be a pseudo reg, may be STACK_PARM,
4189 or may be a local stack slot if STACK_PARM is 0.
4190
4191 Set DECL_RTL to that place. */
4192
4193 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4194 {
4195 /* If a BLKmode arrives in registers, copy it to a stack slot.
4196 Handle calls that pass values in multiple non-contiguous
4197 locations. The Irix 6 ABI has examples of this. */
4198 if (GET_CODE (entry_parm) == REG
4199 || GET_CODE (entry_parm) == PARALLEL)
4200 {
4201 int size_stored
4202 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4203 UNITS_PER_WORD);
4204
4205 /* Note that we will be storing an integral number of words.
4206 So we have to be careful to ensure that we allocate an
4207 integral number of words. We do this below in the
4208 assign_stack_local if space was not allocated in the argument
4209 list. If it was, this will not work if PARM_BOUNDARY is not
4210 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4211 if it becomes a problem. */
4212
4213 if (stack_parm == 0)
4214 {
4215 stack_parm
4216 = assign_stack_local (GET_MODE (entry_parm),
4217 size_stored, 0);
4218
4219 /* If this is a memory ref that contains aggregate
4220 components, mark it as such for cse and loop optimize. */
4221 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4222 }
4223
4224 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4225 abort ();
4226
4227 if (TREE_READONLY (parm))
4228 RTX_UNCHANGING_P (stack_parm) = 1;
4229
4230 /* Handle calls that pass values in multiple non-contiguous
4231 locations. The Irix 6 ABI has examples of this. */
4232 if (GET_CODE (entry_parm) == PARALLEL)
4233 emit_group_store (validize_mem (stack_parm), entry_parm,
4234 int_size_in_bytes (TREE_TYPE (parm)),
4235 (TYPE_ALIGN (TREE_TYPE (parm))
4236 / BITS_PER_UNIT));
4237 else
4238 move_block_from_reg (REGNO (entry_parm),
4239 validize_mem (stack_parm),
4240 size_stored / UNITS_PER_WORD,
4241 int_size_in_bytes (TREE_TYPE (parm)));
4242 }
4243 DECL_RTL (parm) = stack_parm;
4244 }
4245 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
4246 && ! DECL_INLINE (fndecl))
4247 /* layout_decl may set this. */
4248 || TREE_ADDRESSABLE (parm)
4249 || TREE_SIDE_EFFECTS (parm)
4250 /* If -ffloat-store specified, don't put explicit
4251 float variables into registers. */
4252 || (flag_float_store
4253 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4254 /* Always assign pseudo to structure return or item passed
4255 by invisible reference. */
4256 || passed_pointer || parm == function_result_decl)
4257 {
4258 /* Store the parm in a pseudoregister during the function, but we
4259 may need to do it in a wider mode. */
4260
4261 register rtx parmreg;
4262 int regno, regnoi = 0, regnor = 0;
4263
4264 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4265
4266 promoted_nominal_mode
4267 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4268
4269 parmreg = gen_reg_rtx (promoted_nominal_mode);
4270 mark_user_reg (parmreg);
4271
4272 /* If this was an item that we received a pointer to, set DECL_RTL
4273 appropriately. */
4274 if (passed_pointer)
4275 {
4276 DECL_RTL (parm)
4277 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4278 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
4279 }
4280 else
4281 DECL_RTL (parm) = parmreg;
4282
4283 /* Copy the value into the register. */
4284 if (nominal_mode != passed_mode
4285 || promoted_nominal_mode != promoted_mode)
4286 {
4287 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4288 mode, by the caller. We now have to convert it to
4289 NOMINAL_MODE, if different. However, PARMREG may be in
4290 a different mode than NOMINAL_MODE if it is being stored
4291 promoted.
4292
4293 If ENTRY_PARM is a hard register, it might be in a register
4294 not valid for operating in its mode (e.g., an odd-numbered
4295 register for a DFmode). In that case, moves are the only
4296 thing valid, so we can't do a convert from there. This
4297 occurs when the calling sequence allow such misaligned
4298 usages.
4299
4300 In addition, the conversion may involve a call, which could
4301 clobber parameters which haven't been copied to pseudo
4302 registers yet. Therefore, we must first copy the parm to
4303 a pseudo reg here, and save the conversion until after all
4304 parameters have been moved. */
4305
4306 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4307
4308 emit_move_insn (tempreg, validize_mem (entry_parm));
4309
4310 push_to_sequence (conversion_insns);
4311 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4312
4313 expand_assignment (parm,
4314 make_tree (nominal_type, tempreg), 0, 0);
4315 conversion_insns = get_insns ();
4316 did_conversion = 1;
4317 end_sequence ();
4318 }
4319 else
4320 emit_move_insn (parmreg, validize_mem (entry_parm));
4321
4322 /* If we were passed a pointer but the actual value
4323 can safely live in a register, put it in one. */
4324 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4325 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4326 && ! DECL_INLINE (fndecl))
4327 /* layout_decl may set this. */
4328 || TREE_ADDRESSABLE (parm)
4329 || TREE_SIDE_EFFECTS (parm)
4330 /* If -ffloat-store specified, don't put explicit
4331 float variables into registers. */
4332 || (flag_float_store
4333 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4334 {
4335 /* We can't use nominal_mode, because it will have been set to
4336 Pmode above. We must use the actual mode of the parm. */
4337 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4338 mark_user_reg (parmreg);
4339 emit_move_insn (parmreg, DECL_RTL (parm));
4340 DECL_RTL (parm) = parmreg;
4341 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4342 now the parm. */
4343 stack_parm = 0;
4344 }
4345 #ifdef FUNCTION_ARG_CALLEE_COPIES
4346 /* If we are passed an arg by reference and it is our responsibility
4347 to make a copy, do it now.
4348 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4349 original argument, so we must recreate them in the call to
4350 FUNCTION_ARG_CALLEE_COPIES. */
4351 /* ??? Later add code to handle the case that if the argument isn't
4352 modified, don't do the copy. */
4353
4354 else if (passed_pointer
4355 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4356 TYPE_MODE (DECL_ARG_TYPE (parm)),
4357 DECL_ARG_TYPE (parm),
4358 named_arg)
4359 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4360 {
4361 rtx copy;
4362 tree type = DECL_ARG_TYPE (parm);
4363
4364 /* This sequence may involve a library call perhaps clobbering
4365 registers that haven't been copied to pseudos yet. */
4366
4367 push_to_sequence (conversion_insns);
4368
4369 if (TYPE_SIZE (type) == 0
4370 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4371 /* This is a variable sized object. */
4372 copy = gen_rtx_MEM (BLKmode,
4373 allocate_dynamic_stack_space
4374 (expr_size (parm), NULL_RTX,
4375 TYPE_ALIGN (type)));
4376 else
4377 copy = assign_stack_temp (TYPE_MODE (type),
4378 int_size_in_bytes (type), 1);
4379 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
4380 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4381
4382 store_expr (parm, copy, 0);
4383 emit_move_insn (parmreg, XEXP (copy, 0));
4384 if (current_function_check_memory_usage)
4385 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4386 XEXP (copy, 0), ptr_mode,
4387 GEN_INT (int_size_in_bytes (type)),
4388 TYPE_MODE (sizetype),
4389 GEN_INT (MEMORY_USE_RW),
4390 TYPE_MODE (integer_type_node));
4391 conversion_insns = get_insns ();
4392 did_conversion = 1;
4393 end_sequence ();
4394 }
4395 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4396
4397 /* In any case, record the parm's desired stack location
4398 in case we later discover it must live in the stack.
4399
4400 If it is a COMPLEX value, store the stack location for both
4401 halves. */
4402
4403 if (GET_CODE (parmreg) == CONCAT)
4404 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4405 else
4406 regno = REGNO (parmreg);
4407
4408 if (regno >= max_parm_reg)
4409 {
4410 rtx *new;
4411 int old_max_parm_reg = max_parm_reg;
4412
4413 /* It's slow to expand this one register at a time,
4414 but it's also rare and we need max_parm_reg to be
4415 precisely correct. */
4416 max_parm_reg = regno + 1;
4417 new = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4418 bcopy ((char *) parm_reg_stack_loc, (char *) new,
4419 old_max_parm_reg * sizeof (rtx));
4420 bzero ((char *) (new + old_max_parm_reg),
4421 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4422 parm_reg_stack_loc = new;
4423 }
4424
4425 if (GET_CODE (parmreg) == CONCAT)
4426 {
4427 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4428
4429 regnor = REGNO (gen_realpart (submode, parmreg));
4430 regnoi = REGNO (gen_imagpart (submode, parmreg));
4431
4432 if (stack_parm != 0)
4433 {
4434 parm_reg_stack_loc[regnor]
4435 = gen_realpart (submode, stack_parm);
4436 parm_reg_stack_loc[regnoi]
4437 = gen_imagpart (submode, stack_parm);
4438 }
4439 else
4440 {
4441 parm_reg_stack_loc[regnor] = 0;
4442 parm_reg_stack_loc[regnoi] = 0;
4443 }
4444 }
4445 else
4446 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4447
4448 /* Mark the register as eliminable if we did no conversion
4449 and it was copied from memory at a fixed offset,
4450 and the arg pointer was not copied to a pseudo-reg.
4451 If the arg pointer is a pseudo reg or the offset formed
4452 an invalid address, such memory-equivalences
4453 as we make here would screw up life analysis for it. */
4454 if (nominal_mode == passed_mode
4455 && ! did_conversion
4456 && stack_parm != 0
4457 && GET_CODE (stack_parm) == MEM
4458 && stack_offset.var == 0
4459 && reg_mentioned_p (virtual_incoming_args_rtx,
4460 XEXP (stack_parm, 0)))
4461 {
4462 rtx linsn = get_last_insn ();
4463 rtx sinsn, set;
4464
4465 /* Mark complex types separately. */
4466 if (GET_CODE (parmreg) == CONCAT)
4467 /* Scan backwards for the set of the real and
4468 imaginary parts. */
4469 for (sinsn = linsn; sinsn != 0;
4470 sinsn = prev_nonnote_insn (sinsn))
4471 {
4472 set = single_set (sinsn);
4473 if (set != 0
4474 && SET_DEST (set) == regno_reg_rtx [regnoi])
4475 REG_NOTES (sinsn)
4476 = gen_rtx_EXPR_LIST (REG_EQUIV,
4477 parm_reg_stack_loc[regnoi],
4478 REG_NOTES (sinsn));
4479 else if (set != 0
4480 && SET_DEST (set) == regno_reg_rtx [regnor])
4481 REG_NOTES (sinsn)
4482 = gen_rtx_EXPR_LIST (REG_EQUIV,
4483 parm_reg_stack_loc[regnor],
4484 REG_NOTES (sinsn));
4485 }
4486 else if ((set = single_set (linsn)) != 0
4487 && SET_DEST (set) == parmreg)
4488 REG_NOTES (linsn)
4489 = gen_rtx_EXPR_LIST (REG_EQUIV,
4490 stack_parm, REG_NOTES (linsn));
4491 }
4492
4493 /* For pointer data type, suggest pointer register. */
4494 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4495 mark_reg_pointer (parmreg,
4496 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4497 / BITS_PER_UNIT));
4498 }
4499 else
4500 {
4501 /* Value must be stored in the stack slot STACK_PARM
4502 during function execution. */
4503
4504 if (promoted_mode != nominal_mode)
4505 {
4506 /* Conversion is required. */
4507 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4508
4509 emit_move_insn (tempreg, validize_mem (entry_parm));
4510
4511 push_to_sequence (conversion_insns);
4512 entry_parm = convert_to_mode (nominal_mode, tempreg,
4513 TREE_UNSIGNED (TREE_TYPE (parm)));
4514 if (stack_parm)
4515 {
4516 /* ??? This may need a big-endian conversion on sparc64. */
4517 stack_parm = change_address (stack_parm, nominal_mode,
4518 NULL_RTX);
4519 }
4520 conversion_insns = get_insns ();
4521 did_conversion = 1;
4522 end_sequence ();
4523 }
4524
4525 if (entry_parm != stack_parm)
4526 {
4527 if (stack_parm == 0)
4528 {
4529 stack_parm
4530 = assign_stack_local (GET_MODE (entry_parm),
4531 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4532 /* If this is a memory ref that contains aggregate components,
4533 mark it as such for cse and loop optimize. */
4534 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4535 }
4536
4537 if (promoted_mode != nominal_mode)
4538 {
4539 push_to_sequence (conversion_insns);
4540 emit_move_insn (validize_mem (stack_parm),
4541 validize_mem (entry_parm));
4542 conversion_insns = get_insns ();
4543 end_sequence ();
4544 }
4545 else
4546 emit_move_insn (validize_mem (stack_parm),
4547 validize_mem (entry_parm));
4548 }
4549 if (current_function_check_memory_usage)
4550 {
4551 push_to_sequence (conversion_insns);
4552 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4553 XEXP (stack_parm, 0), ptr_mode,
4554 GEN_INT (GET_MODE_SIZE (GET_MODE
4555 (entry_parm))),
4556 TYPE_MODE (sizetype),
4557 GEN_INT (MEMORY_USE_RW),
4558 TYPE_MODE (integer_type_node));
4559
4560 conversion_insns = get_insns ();
4561 end_sequence ();
4562 }
4563 DECL_RTL (parm) = stack_parm;
4564 }
4565
4566 /* If this "parameter" was the place where we are receiving the
4567 function's incoming structure pointer, set up the result. */
4568 if (parm == function_result_decl)
4569 {
4570 tree result = DECL_RESULT (fndecl);
4571 tree restype = TREE_TYPE (result);
4572
4573 DECL_RTL (result)
4574 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4575
4576 MEM_IN_STRUCT_P (DECL_RTL (result)) = AGGREGATE_TYPE_P (restype);
4577 }
4578
4579 if (TREE_THIS_VOLATILE (parm))
4580 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4581 if (TREE_READONLY (parm))
4582 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4583 }
4584
4585 /* Output all parameter conversion instructions (possibly including calls)
4586 now that all parameters have been copied out of hard registers. */
4587 emit_insns (conversion_insns);
4588
4589 last_parm_insn = get_last_insn ();
4590
4591 current_function_args_size = stack_args_size.constant;
4592
4593 /* Adjust function incoming argument size for alignment and
4594 minimum length. */
4595
4596 #ifdef REG_PARM_STACK_SPACE
4597 #ifndef MAYBE_REG_PARM_STACK_SPACE
4598 current_function_args_size = MAX (current_function_args_size,
4599 REG_PARM_STACK_SPACE (fndecl));
4600 #endif
4601 #endif
4602
4603 #ifdef STACK_BOUNDARY
4604 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4605
4606 current_function_args_size
4607 = ((current_function_args_size + STACK_BYTES - 1)
4608 / STACK_BYTES) * STACK_BYTES;
4609 #endif
4610
4611 #ifdef ARGS_GROW_DOWNWARD
4612 current_function_arg_offset_rtx
4613 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4614 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4615 size_int (-stack_args_size.constant)),
4616 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4617 #else
4618 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4619 #endif
4620
4621 /* See how many bytes, if any, of its args a function should try to pop
4622 on return. */
4623
4624 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4625 current_function_args_size);
4626
4627 /* For stdarg.h function, save info about
4628 regs and stack space used by the named args. */
4629
4630 if (!hide_last_arg)
4631 current_function_args_info = args_so_far;
4632
4633 /* Set the rtx used for the function return value. Put this in its
4634 own variable so any optimizers that need this information don't have
4635 to include tree.h. Do this here so it gets done when an inlined
4636 function gets output. */
4637
4638 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4639 }
4640 \f
4641 /* Indicate whether REGNO is an incoming argument to the current function
4642 that was promoted to a wider mode. If so, return the RTX for the
4643 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4644 that REGNO is promoted from and whether the promotion was signed or
4645 unsigned. */
4646
4647 #ifdef PROMOTE_FUNCTION_ARGS
4648
4649 rtx
4650 promoted_input_arg (regno, pmode, punsignedp)
4651 int regno;
4652 enum machine_mode *pmode;
4653 int *punsignedp;
4654 {
4655 tree arg;
4656
4657 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4658 arg = TREE_CHAIN (arg))
4659 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4660 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4661 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4662 {
4663 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4664 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4665
4666 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4667 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4668 && mode != DECL_MODE (arg))
4669 {
4670 *pmode = DECL_MODE (arg);
4671 *punsignedp = unsignedp;
4672 return DECL_INCOMING_RTL (arg);
4673 }
4674 }
4675
4676 return 0;
4677 }
4678
4679 #endif
4680 \f
4681 /* Compute the size and offset from the start of the stacked arguments for a
4682 parm passed in mode PASSED_MODE and with type TYPE.
4683
4684 INITIAL_OFFSET_PTR points to the current offset into the stacked
4685 arguments.
4686
4687 The starting offset and size for this parm are returned in *OFFSET_PTR
4688 and *ARG_SIZE_PTR, respectively.
4689
4690 IN_REGS is non-zero if the argument will be passed in registers. It will
4691 never be set if REG_PARM_STACK_SPACE is not defined.
4692
4693 FNDECL is the function in which the argument was defined.
4694
4695 There are two types of rounding that are done. The first, controlled by
4696 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4697 list to be aligned to the specific boundary (in bits). This rounding
4698 affects the initial and starting offsets, but not the argument size.
4699
4700 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4701 optionally rounds the size of the parm to PARM_BOUNDARY. The
4702 initial offset is not affected by this rounding, while the size always
4703 is and the starting offset may be. */
4704
4705 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4706 initial_offset_ptr is positive because locate_and_pad_parm's
4707 callers pass in the total size of args so far as
4708 initial_offset_ptr. arg_size_ptr is always positive.*/
4709
4710 void
4711 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4712 initial_offset_ptr, offset_ptr, arg_size_ptr)
4713 enum machine_mode passed_mode;
4714 tree type;
4715 int in_regs;
4716 tree fndecl;
4717 struct args_size *initial_offset_ptr;
4718 struct args_size *offset_ptr;
4719 struct args_size *arg_size_ptr;
4720 {
4721 tree sizetree
4722 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4723 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4724 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4725
4726 #ifdef REG_PARM_STACK_SPACE
4727 /* If we have found a stack parm before we reach the end of the
4728 area reserved for registers, skip that area. */
4729 if (! in_regs)
4730 {
4731 int reg_parm_stack_space = 0;
4732
4733 #ifdef MAYBE_REG_PARM_STACK_SPACE
4734 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4735 #else
4736 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4737 #endif
4738 if (reg_parm_stack_space > 0)
4739 {
4740 if (initial_offset_ptr->var)
4741 {
4742 initial_offset_ptr->var
4743 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4744 size_int (reg_parm_stack_space));
4745 initial_offset_ptr->constant = 0;
4746 }
4747 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4748 initial_offset_ptr->constant = reg_parm_stack_space;
4749 }
4750 }
4751 #endif /* REG_PARM_STACK_SPACE */
4752
4753 arg_size_ptr->var = 0;
4754 arg_size_ptr->constant = 0;
4755
4756 #ifdef ARGS_GROW_DOWNWARD
4757 if (initial_offset_ptr->var)
4758 {
4759 offset_ptr->constant = 0;
4760 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4761 initial_offset_ptr->var);
4762 }
4763 else
4764 {
4765 offset_ptr->constant = - initial_offset_ptr->constant;
4766 offset_ptr->var = 0;
4767 }
4768 if (where_pad != none
4769 && (TREE_CODE (sizetree) != INTEGER_CST
4770 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4771 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4772 SUB_PARM_SIZE (*offset_ptr, sizetree);
4773 if (where_pad != downward)
4774 pad_to_arg_alignment (offset_ptr, boundary);
4775 if (initial_offset_ptr->var)
4776 {
4777 arg_size_ptr->var = size_binop (MINUS_EXPR,
4778 size_binop (MINUS_EXPR,
4779 integer_zero_node,
4780 initial_offset_ptr->var),
4781 offset_ptr->var);
4782 }
4783 else
4784 {
4785 arg_size_ptr->constant = (- initial_offset_ptr->constant
4786 - offset_ptr->constant);
4787 }
4788 #else /* !ARGS_GROW_DOWNWARD */
4789 pad_to_arg_alignment (initial_offset_ptr, boundary);
4790 *offset_ptr = *initial_offset_ptr;
4791
4792 #ifdef PUSH_ROUNDING
4793 if (passed_mode != BLKmode)
4794 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4795 #endif
4796
4797 /* Pad_below needs the pre-rounded size to know how much to pad below
4798 so this must be done before rounding up. */
4799 if (where_pad == downward
4800 /* However, BLKmode args passed in regs have their padding done elsewhere.
4801 The stack slot must be able to hold the entire register. */
4802 && !(in_regs && passed_mode == BLKmode))
4803 pad_below (offset_ptr, passed_mode, sizetree);
4804
4805 if (where_pad != none
4806 && (TREE_CODE (sizetree) != INTEGER_CST
4807 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4808 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4809
4810 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4811 #endif /* ARGS_GROW_DOWNWARD */
4812 }
4813
4814 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4815 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4816
4817 static void
4818 pad_to_arg_alignment (offset_ptr, boundary)
4819 struct args_size *offset_ptr;
4820 int boundary;
4821 {
4822 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4823
4824 if (boundary > BITS_PER_UNIT)
4825 {
4826 if (offset_ptr->var)
4827 {
4828 offset_ptr->var =
4829 #ifdef ARGS_GROW_DOWNWARD
4830 round_down
4831 #else
4832 round_up
4833 #endif
4834 (ARGS_SIZE_TREE (*offset_ptr),
4835 boundary / BITS_PER_UNIT);
4836 offset_ptr->constant = 0; /*?*/
4837 }
4838 else
4839 offset_ptr->constant =
4840 #ifdef ARGS_GROW_DOWNWARD
4841 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4842 #else
4843 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4844 #endif
4845 }
4846 }
4847
4848 #ifndef ARGS_GROW_DOWNWARD
4849 static void
4850 pad_below (offset_ptr, passed_mode, sizetree)
4851 struct args_size *offset_ptr;
4852 enum machine_mode passed_mode;
4853 tree sizetree;
4854 {
4855 if (passed_mode != BLKmode)
4856 {
4857 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4858 offset_ptr->constant
4859 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4860 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4861 - GET_MODE_SIZE (passed_mode));
4862 }
4863 else
4864 {
4865 if (TREE_CODE (sizetree) != INTEGER_CST
4866 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4867 {
4868 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4869 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4870 /* Add it in. */
4871 ADD_PARM_SIZE (*offset_ptr, s2);
4872 SUB_PARM_SIZE (*offset_ptr, sizetree);
4873 }
4874 }
4875 }
4876 #endif
4877
4878 #ifdef ARGS_GROW_DOWNWARD
4879 static tree
4880 round_down (value, divisor)
4881 tree value;
4882 int divisor;
4883 {
4884 return size_binop (MULT_EXPR,
4885 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
4886 size_int (divisor));
4887 }
4888 #endif
4889 \f
4890 /* Walk the tree of blocks describing the binding levels within a function
4891 and warn about uninitialized variables.
4892 This is done after calling flow_analysis and before global_alloc
4893 clobbers the pseudo-regs to hard regs. */
4894
4895 void
4896 uninitialized_vars_warning (block)
4897 tree block;
4898 {
4899 register tree decl, sub;
4900 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4901 {
4902 if (TREE_CODE (decl) == VAR_DECL
4903 /* These warnings are unreliable for and aggregates
4904 because assigning the fields one by one can fail to convince
4905 flow.c that the entire aggregate was initialized.
4906 Unions are troublesome because members may be shorter. */
4907 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
4908 && DECL_RTL (decl) != 0
4909 && GET_CODE (DECL_RTL (decl)) == REG
4910 && regno_uninitialized (REGNO (DECL_RTL (decl))))
4911 warning_with_decl (decl,
4912 "`%s' might be used uninitialized in this function");
4913 if (TREE_CODE (decl) == VAR_DECL
4914 && DECL_RTL (decl) != 0
4915 && GET_CODE (DECL_RTL (decl)) == REG
4916 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4917 warning_with_decl (decl,
4918 "variable `%s' might be clobbered by `longjmp' or `vfork'");
4919 }
4920 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4921 uninitialized_vars_warning (sub);
4922 }
4923
4924 /* Do the appropriate part of uninitialized_vars_warning
4925 but for arguments instead of local variables. */
4926
4927 void
4928 setjmp_args_warning ()
4929 {
4930 register tree decl;
4931 for (decl = DECL_ARGUMENTS (current_function_decl);
4932 decl; decl = TREE_CHAIN (decl))
4933 if (DECL_RTL (decl) != 0
4934 && GET_CODE (DECL_RTL (decl)) == REG
4935 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4936 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
4937 }
4938
4939 /* If this function call setjmp, put all vars into the stack
4940 unless they were declared `register'. */
4941
4942 void
4943 setjmp_protect (block)
4944 tree block;
4945 {
4946 register tree decl, sub;
4947 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4948 if ((TREE_CODE (decl) == VAR_DECL
4949 || TREE_CODE (decl) == PARM_DECL)
4950 && DECL_RTL (decl) != 0
4951 && (GET_CODE (DECL_RTL (decl)) == REG
4952 || (GET_CODE (DECL_RTL (decl)) == MEM
4953 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
4954 /* If this variable came from an inline function, it must be
4955 that its life doesn't overlap the setjmp. If there was a
4956 setjmp in the function, it would already be in memory. We
4957 must exclude such variable because their DECL_RTL might be
4958 set to strange things such as virtual_stack_vars_rtx. */
4959 && ! DECL_FROM_INLINE (decl)
4960 && (
4961 #ifdef NON_SAVING_SETJMP
4962 /* If longjmp doesn't restore the registers,
4963 don't put anything in them. */
4964 NON_SAVING_SETJMP
4965 ||
4966 #endif
4967 ! DECL_REGISTER (decl)))
4968 put_var_into_stack (decl);
4969 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4970 setjmp_protect (sub);
4971 }
4972 \f
4973 /* Like the previous function, but for args instead of local variables. */
4974
4975 void
4976 setjmp_protect_args ()
4977 {
4978 register tree decl;
4979 for (decl = DECL_ARGUMENTS (current_function_decl);
4980 decl; decl = TREE_CHAIN (decl))
4981 if ((TREE_CODE (decl) == VAR_DECL
4982 || TREE_CODE (decl) == PARM_DECL)
4983 && DECL_RTL (decl) != 0
4984 && (GET_CODE (DECL_RTL (decl)) == REG
4985 || (GET_CODE (DECL_RTL (decl)) == MEM
4986 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
4987 && (
4988 /* If longjmp doesn't restore the registers,
4989 don't put anything in them. */
4990 #ifdef NON_SAVING_SETJMP
4991 NON_SAVING_SETJMP
4992 ||
4993 #endif
4994 ! DECL_REGISTER (decl)))
4995 put_var_into_stack (decl);
4996 }
4997 \f
4998 /* Return the context-pointer register corresponding to DECL,
4999 or 0 if it does not need one. */
5000
5001 rtx
5002 lookup_static_chain (decl)
5003 tree decl;
5004 {
5005 tree context = decl_function_context (decl);
5006 tree link;
5007
5008 if (context == 0
5009 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5010 return 0;
5011
5012 /* We treat inline_function_decl as an alias for the current function
5013 because that is the inline function whose vars, types, etc.
5014 are being merged into the current function.
5015 See expand_inline_function. */
5016 if (context == current_function_decl || context == inline_function_decl)
5017 return virtual_stack_vars_rtx;
5018
5019 for (link = context_display; link; link = TREE_CHAIN (link))
5020 if (TREE_PURPOSE (link) == context)
5021 return RTL_EXPR_RTL (TREE_VALUE (link));
5022
5023 abort ();
5024 }
5025 \f
5026 /* Convert a stack slot address ADDR for variable VAR
5027 (from a containing function)
5028 into an address valid in this function (using a static chain). */
5029
5030 rtx
5031 fix_lexical_addr (addr, var)
5032 rtx addr;
5033 tree var;
5034 {
5035 rtx basereg;
5036 HOST_WIDE_INT displacement;
5037 tree context = decl_function_context (var);
5038 struct function *fp;
5039 rtx base = 0;
5040
5041 /* If this is the present function, we need not do anything. */
5042 if (context == current_function_decl || context == inline_function_decl)
5043 return addr;
5044
5045 for (fp = outer_function_chain; fp; fp = fp->next)
5046 if (fp->decl == context)
5047 break;
5048
5049 if (fp == 0)
5050 abort ();
5051
5052 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5053 addr = XEXP (XEXP (addr, 0), 0);
5054
5055 /* Decode given address as base reg plus displacement. */
5056 if (GET_CODE (addr) == REG)
5057 basereg = addr, displacement = 0;
5058 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5059 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5060 else
5061 abort ();
5062
5063 /* We accept vars reached via the containing function's
5064 incoming arg pointer and via its stack variables pointer. */
5065 if (basereg == fp->internal_arg_pointer)
5066 {
5067 /* If reached via arg pointer, get the arg pointer value
5068 out of that function's stack frame.
5069
5070 There are two cases: If a separate ap is needed, allocate a
5071 slot in the outer function for it and dereference it that way.
5072 This is correct even if the real ap is actually a pseudo.
5073 Otherwise, just adjust the offset from the frame pointer to
5074 compensate. */
5075
5076 #ifdef NEED_SEPARATE_AP
5077 rtx addr;
5078
5079 if (fp->arg_pointer_save_area == 0)
5080 fp->arg_pointer_save_area
5081 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5082
5083 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
5084 addr = memory_address (Pmode, addr);
5085
5086 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
5087 #else
5088 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5089 base = lookup_static_chain (var);
5090 #endif
5091 }
5092
5093 else if (basereg == virtual_stack_vars_rtx)
5094 {
5095 /* This is the same code as lookup_static_chain, duplicated here to
5096 avoid an extra call to decl_function_context. */
5097 tree link;
5098
5099 for (link = context_display; link; link = TREE_CHAIN (link))
5100 if (TREE_PURPOSE (link) == context)
5101 {
5102 base = RTL_EXPR_RTL (TREE_VALUE (link));
5103 break;
5104 }
5105 }
5106
5107 if (base == 0)
5108 abort ();
5109
5110 /* Use same offset, relative to appropriate static chain or argument
5111 pointer. */
5112 return plus_constant (base, displacement);
5113 }
5114 \f
5115 /* Return the address of the trampoline for entering nested fn FUNCTION.
5116 If necessary, allocate a trampoline (in the stack frame)
5117 and emit rtl to initialize its contents (at entry to this function). */
5118
5119 rtx
5120 trampoline_address (function)
5121 tree function;
5122 {
5123 tree link;
5124 tree rtlexp;
5125 rtx tramp;
5126 struct function *fp;
5127 tree fn_context;
5128
5129 /* Find an existing trampoline and return it. */
5130 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5131 if (TREE_PURPOSE (link) == function)
5132 return
5133 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5134
5135 for (fp = outer_function_chain; fp; fp = fp->next)
5136 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
5137 if (TREE_PURPOSE (link) == function)
5138 {
5139 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5140 function);
5141 return round_trampoline_addr (tramp);
5142 }
5143
5144 /* None exists; we must make one. */
5145
5146 /* Find the `struct function' for the function containing FUNCTION. */
5147 fp = 0;
5148 fn_context = decl_function_context (function);
5149 if (fn_context != current_function_decl
5150 && fn_context != inline_function_decl)
5151 for (fp = outer_function_chain; fp; fp = fp->next)
5152 if (fp->decl == fn_context)
5153 break;
5154
5155 /* Allocate run-time space for this trampoline
5156 (usually in the defining function's stack frame). */
5157 #ifdef ALLOCATE_TRAMPOLINE
5158 tramp = ALLOCATE_TRAMPOLINE (fp);
5159 #else
5160 /* If rounding needed, allocate extra space
5161 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5162 #ifdef TRAMPOLINE_ALIGNMENT
5163 #define TRAMPOLINE_REAL_SIZE \
5164 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5165 #else
5166 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5167 #endif
5168 if (fp != 0)
5169 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
5170 else
5171 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
5172 #endif
5173
5174 /* Record the trampoline for reuse and note it for later initialization
5175 by expand_function_end. */
5176 if (fp != 0)
5177 {
5178 push_obstacks (fp->function_maybepermanent_obstack,
5179 fp->function_maybepermanent_obstack);
5180 rtlexp = make_node (RTL_EXPR);
5181 RTL_EXPR_RTL (rtlexp) = tramp;
5182 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
5183 pop_obstacks ();
5184 }
5185 else
5186 {
5187 /* Make the RTL_EXPR node temporary, not momentary, so that the
5188 trampoline_list doesn't become garbage. */
5189 int momentary = suspend_momentary ();
5190 rtlexp = make_node (RTL_EXPR);
5191 resume_momentary (momentary);
5192
5193 RTL_EXPR_RTL (rtlexp) = tramp;
5194 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5195 }
5196
5197 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5198 return round_trampoline_addr (tramp);
5199 }
5200
5201 /* Given a trampoline address,
5202 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5203
5204 static rtx
5205 round_trampoline_addr (tramp)
5206 rtx tramp;
5207 {
5208 #ifdef TRAMPOLINE_ALIGNMENT
5209 /* Round address up to desired boundary. */
5210 rtx temp = gen_reg_rtx (Pmode);
5211 temp = expand_binop (Pmode, add_optab, tramp,
5212 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5213 temp, 0, OPTAB_LIB_WIDEN);
5214 tramp = expand_binop (Pmode, and_optab, temp,
5215 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5216 temp, 0, OPTAB_LIB_WIDEN);
5217 #endif
5218 return tramp;
5219 }
5220 \f
5221 /* The functions identify_blocks and reorder_blocks provide a way to
5222 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5223 duplicate portions of the RTL code. Call identify_blocks before
5224 changing the RTL, and call reorder_blocks after. */
5225
5226 /* Put all this function's BLOCK nodes including those that are chained
5227 onto the first block into a vector, and return it.
5228 Also store in each NOTE for the beginning or end of a block
5229 the index of that block in the vector.
5230 The arguments are BLOCK, the chain of top-level blocks of the function,
5231 and INSNS, the insn chain of the function. */
5232
5233 tree *
5234 identify_blocks (block, insns)
5235 tree block;
5236 rtx insns;
5237 {
5238 int n_blocks;
5239 tree *block_vector;
5240 int *block_stack;
5241 int depth = 0;
5242 int next_block_number = 1;
5243 int current_block_number = 1;
5244 rtx insn;
5245
5246 if (block == 0)
5247 return 0;
5248
5249 n_blocks = all_blocks (block, 0);
5250 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5251 block_stack = (int *) alloca (n_blocks * sizeof (int));
5252
5253 all_blocks (block, block_vector);
5254
5255 for (insn = insns; insn; insn = NEXT_INSN (insn))
5256 if (GET_CODE (insn) == NOTE)
5257 {
5258 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5259 {
5260 block_stack[depth++] = current_block_number;
5261 current_block_number = next_block_number;
5262 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
5263 }
5264 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5265 {
5266 NOTE_BLOCK_NUMBER (insn) = current_block_number;
5267 current_block_number = block_stack[--depth];
5268 }
5269 }
5270
5271 if (n_blocks != next_block_number)
5272 abort ();
5273
5274 return block_vector;
5275 }
5276
5277 /* Given BLOCK_VECTOR which was returned by identify_blocks,
5278 and a revised instruction chain, rebuild the tree structure
5279 of BLOCK nodes to correspond to the new order of RTL.
5280 The new block tree is inserted below TOP_BLOCK.
5281 Returns the current top-level block. */
5282
5283 tree
5284 reorder_blocks (block_vector, block, insns)
5285 tree *block_vector;
5286 tree block;
5287 rtx insns;
5288 {
5289 tree current_block = block;
5290 rtx insn;
5291
5292 if (block_vector == 0)
5293 return block;
5294
5295 /* Prune the old trees away, so that it doesn't get in the way. */
5296 BLOCK_SUBBLOCKS (current_block) = 0;
5297 BLOCK_CHAIN (current_block) = 0;
5298
5299 for (insn = insns; insn; insn = NEXT_INSN (insn))
5300 if (GET_CODE (insn) == NOTE)
5301 {
5302 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5303 {
5304 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
5305 /* If we have seen this block before, copy it. */
5306 if (TREE_ASM_WRITTEN (block))
5307 block = copy_node (block);
5308 BLOCK_SUBBLOCKS (block) = 0;
5309 TREE_ASM_WRITTEN (block) = 1;
5310 BLOCK_SUPERCONTEXT (block) = current_block;
5311 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5312 BLOCK_SUBBLOCKS (current_block) = block;
5313 current_block = block;
5314 NOTE_SOURCE_FILE (insn) = 0;
5315 }
5316 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5317 {
5318 BLOCK_SUBBLOCKS (current_block)
5319 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5320 current_block = BLOCK_SUPERCONTEXT (current_block);
5321 NOTE_SOURCE_FILE (insn) = 0;
5322 }
5323 }
5324
5325 BLOCK_SUBBLOCKS (current_block)
5326 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5327 return current_block;
5328 }
5329
5330 /* Reverse the order of elements in the chain T of blocks,
5331 and return the new head of the chain (old last element). */
5332
5333 static tree
5334 blocks_nreverse (t)
5335 tree t;
5336 {
5337 register tree prev = 0, decl, next;
5338 for (decl = t; decl; decl = next)
5339 {
5340 next = BLOCK_CHAIN (decl);
5341 BLOCK_CHAIN (decl) = prev;
5342 prev = decl;
5343 }
5344 return prev;
5345 }
5346
5347 /* Count the subblocks of the list starting with BLOCK, and list them
5348 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5349 blocks. */
5350
5351 static int
5352 all_blocks (block, vector)
5353 tree block;
5354 tree *vector;
5355 {
5356 int n_blocks = 0;
5357
5358 while (block)
5359 {
5360 TREE_ASM_WRITTEN (block) = 0;
5361
5362 /* Record this block. */
5363 if (vector)
5364 vector[n_blocks] = block;
5365
5366 ++n_blocks;
5367
5368 /* Record the subblocks, and their subblocks... */
5369 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5370 vector ? vector + n_blocks : 0);
5371 block = BLOCK_CHAIN (block);
5372 }
5373
5374 return n_blocks;
5375 }
5376 \f
5377 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5378 and initialize static variables for generating RTL for the statements
5379 of the function. */
5380
5381 void
5382 init_function_start (subr, filename, line)
5383 tree subr;
5384 char *filename;
5385 int line;
5386 {
5387 init_stmt_for_function ();
5388
5389 cse_not_expected = ! optimize;
5390
5391 /* Caller save not needed yet. */
5392 caller_save_needed = 0;
5393
5394 /* No stack slots have been made yet. */
5395 stack_slot_list = 0;
5396
5397 /* There is no stack slot for handling nonlocal gotos. */
5398 nonlocal_goto_handler_slot = 0;
5399 nonlocal_goto_stack_level = 0;
5400
5401 /* No labels have been declared for nonlocal use. */
5402 nonlocal_labels = 0;
5403
5404 /* No function calls so far in this function. */
5405 function_call_count = 0;
5406
5407 /* No parm regs have been allocated.
5408 (This is important for output_inline_function.) */
5409 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5410
5411 /* Initialize the RTL mechanism. */
5412 init_emit ();
5413
5414 /* Initialize the queue of pending postincrement and postdecrements,
5415 and some other info in expr.c. */
5416 init_expr ();
5417
5418 /* We haven't done register allocation yet. */
5419 reg_renumber = 0;
5420
5421 init_const_rtx_hash_table ();
5422
5423 current_function_name = (*decl_printable_name) (subr, 2);
5424
5425 /* Nonzero if this is a nested function that uses a static chain. */
5426
5427 current_function_needs_context
5428 = (decl_function_context (current_function_decl) != 0
5429 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5430
5431 /* Set if a call to setjmp is seen. */
5432 current_function_calls_setjmp = 0;
5433
5434 /* Set if a call to longjmp is seen. */
5435 current_function_calls_longjmp = 0;
5436
5437 current_function_calls_alloca = 0;
5438 current_function_has_nonlocal_label = 0;
5439 current_function_has_nonlocal_goto = 0;
5440 current_function_contains_functions = 0;
5441 current_function_sp_is_unchanging = 0;
5442 current_function_is_thunk = 0;
5443
5444 current_function_returns_pcc_struct = 0;
5445 current_function_returns_struct = 0;
5446 current_function_epilogue_delay_list = 0;
5447 current_function_uses_const_pool = 0;
5448 current_function_uses_pic_offset_table = 0;
5449 current_function_cannot_inline = 0;
5450
5451 /* We have not yet needed to make a label to jump to for tail-recursion. */
5452 tail_recursion_label = 0;
5453
5454 /* We haven't had a need to make a save area for ap yet. */
5455
5456 arg_pointer_save_area = 0;
5457
5458 /* No stack slots allocated yet. */
5459 frame_offset = 0;
5460
5461 /* No SAVE_EXPRs in this function yet. */
5462 save_expr_regs = 0;
5463
5464 /* No RTL_EXPRs in this function yet. */
5465 rtl_expr_chain = 0;
5466
5467 /* Set up to allocate temporaries. */
5468 init_temp_slots ();
5469
5470 /* Within function body, compute a type's size as soon it is laid out. */
5471 immediate_size_expand++;
5472
5473 /* We haven't made any trampolines for this function yet. */
5474 trampoline_list = 0;
5475
5476 init_pending_stack_adjust ();
5477 inhibit_defer_pop = 0;
5478
5479 current_function_outgoing_args_size = 0;
5480
5481 /* Prevent ever trying to delete the first instruction of a function.
5482 Also tell final how to output a linenum before the function prologue.
5483 Note linenums could be missing, e.g. when compiling a Java .class file. */
5484 if (line > 0)
5485 emit_line_note (filename, line);
5486
5487 /* Make sure first insn is a note even if we don't want linenums.
5488 This makes sure the first insn will never be deleted.
5489 Also, final expects a note to appear there. */
5490 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5491
5492 /* Set flags used by final.c. */
5493 if (aggregate_value_p (DECL_RESULT (subr)))
5494 {
5495 #ifdef PCC_STATIC_STRUCT_RETURN
5496 current_function_returns_pcc_struct = 1;
5497 #endif
5498 current_function_returns_struct = 1;
5499 }
5500
5501 /* Warn if this value is an aggregate type,
5502 regardless of which calling convention we are using for it. */
5503 if (warn_aggregate_return
5504 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5505 warning ("function returns an aggregate");
5506
5507 current_function_returns_pointer
5508 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5509
5510 /* Indicate that we need to distinguish between the return value of the
5511 present function and the return value of a function being called. */
5512 rtx_equal_function_value_matters = 1;
5513
5514 /* Indicate that we have not instantiated virtual registers yet. */
5515 virtuals_instantiated = 0;
5516
5517 /* Indicate we have no need of a frame pointer yet. */
5518 frame_pointer_needed = 0;
5519
5520 /* By default assume not varargs or stdarg. */
5521 current_function_varargs = 0;
5522 current_function_stdarg = 0;
5523 }
5524
5525 /* Indicate that the current function uses extra args
5526 not explicitly mentioned in the argument list in any fashion. */
5527
5528 void
5529 mark_varargs ()
5530 {
5531 current_function_varargs = 1;
5532 }
5533
5534 /* Expand a call to __main at the beginning of a possible main function. */
5535
5536 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5537 #undef HAS_INIT_SECTION
5538 #define HAS_INIT_SECTION
5539 #endif
5540
5541 void
5542 expand_main_function ()
5543 {
5544 #if !defined (HAS_INIT_SECTION)
5545 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5546 VOIDmode, 0);
5547 #endif /* not HAS_INIT_SECTION */
5548 }
5549 \f
5550 extern struct obstack permanent_obstack;
5551
5552 /* Start the RTL for a new function, and set variables used for
5553 emitting RTL.
5554 SUBR is the FUNCTION_DECL node.
5555 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5556 the function's parameters, which must be run at any return statement. */
5557
5558 void
5559 expand_function_start (subr, parms_have_cleanups)
5560 tree subr;
5561 int parms_have_cleanups;
5562 {
5563 register int i;
5564 tree tem;
5565 rtx last_ptr = NULL_RTX;
5566
5567 /* Make sure volatile mem refs aren't considered
5568 valid operands of arithmetic insns. */
5569 init_recog_no_volatile ();
5570
5571 /* Set this before generating any memory accesses. */
5572 current_function_check_memory_usage
5573 = (flag_check_memory_usage
5574 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
5575
5576 current_function_instrument_entry_exit
5577 = (flag_instrument_function_entry_exit
5578 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5579
5580 /* If function gets a static chain arg, store it in the stack frame.
5581 Do this first, so it gets the first stack slot offset. */
5582 if (current_function_needs_context)
5583 {
5584 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5585
5586 /* Delay copying static chain if it is not a register to avoid
5587 conflicts with regs used for parameters. */
5588 if (! SMALL_REGISTER_CLASSES
5589 || GET_CODE (static_chain_incoming_rtx) == REG)
5590 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5591 }
5592
5593 /* If the parameters of this function need cleaning up, get a label
5594 for the beginning of the code which executes those cleanups. This must
5595 be done before doing anything with return_label. */
5596 if (parms_have_cleanups)
5597 cleanup_label = gen_label_rtx ();
5598 else
5599 cleanup_label = 0;
5600
5601 /* Make the label for return statements to jump to, if this machine
5602 does not have a one-instruction return and uses an epilogue,
5603 or if it returns a structure, or if it has parm cleanups. */
5604 #ifdef HAVE_return
5605 if (cleanup_label == 0 && HAVE_return
5606 && ! current_function_instrument_entry_exit
5607 && ! current_function_returns_pcc_struct
5608 && ! (current_function_returns_struct && ! optimize))
5609 return_label = 0;
5610 else
5611 return_label = gen_label_rtx ();
5612 #else
5613 return_label = gen_label_rtx ();
5614 #endif
5615
5616 /* Initialize rtx used to return the value. */
5617 /* Do this before assign_parms so that we copy the struct value address
5618 before any library calls that assign parms might generate. */
5619
5620 /* Decide whether to return the value in memory or in a register. */
5621 if (aggregate_value_p (DECL_RESULT (subr)))
5622 {
5623 /* Returning something that won't go in a register. */
5624 register rtx value_address = 0;
5625
5626 #ifdef PCC_STATIC_STRUCT_RETURN
5627 if (current_function_returns_pcc_struct)
5628 {
5629 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5630 value_address = assemble_static_space (size);
5631 }
5632 else
5633 #endif
5634 {
5635 /* Expect to be passed the address of a place to store the value.
5636 If it is passed as an argument, assign_parms will take care of
5637 it. */
5638 if (struct_value_incoming_rtx)
5639 {
5640 value_address = gen_reg_rtx (Pmode);
5641 emit_move_insn (value_address, struct_value_incoming_rtx);
5642 }
5643 }
5644 if (value_address)
5645 {
5646 DECL_RTL (DECL_RESULT (subr))
5647 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
5648 MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)))
5649 = AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5650 }
5651 }
5652 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5653 /* If return mode is void, this decl rtl should not be used. */
5654 DECL_RTL (DECL_RESULT (subr)) = 0;
5655 else if (parms_have_cleanups || current_function_instrument_entry_exit)
5656 {
5657 /* If function will end with cleanup code for parms,
5658 compute the return values into a pseudo reg,
5659 which we will copy into the true return register
5660 after the cleanups are done. */
5661
5662 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5663
5664 #ifdef PROMOTE_FUNCTION_RETURN
5665 tree type = TREE_TYPE (DECL_RESULT (subr));
5666 int unsignedp = TREE_UNSIGNED (type);
5667
5668 mode = promote_mode (type, mode, &unsignedp, 1);
5669 #endif
5670
5671 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5672 }
5673 else
5674 /* Scalar, returned in a register. */
5675 {
5676 #ifdef FUNCTION_OUTGOING_VALUE
5677 DECL_RTL (DECL_RESULT (subr))
5678 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5679 #else
5680 DECL_RTL (DECL_RESULT (subr))
5681 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5682 #endif
5683
5684 /* Mark this reg as the function's return value. */
5685 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5686 {
5687 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5688 /* Needed because we may need to move this to memory
5689 in case it's a named return value whose address is taken. */
5690 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5691 }
5692 }
5693
5694 /* Initialize rtx for parameters and local variables.
5695 In some cases this requires emitting insns. */
5696
5697 assign_parms (subr, 0);
5698
5699 /* Copy the static chain now if it wasn't a register. The delay is to
5700 avoid conflicts with the parameter passing registers. */
5701
5702 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
5703 if (GET_CODE (static_chain_incoming_rtx) != REG)
5704 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5705
5706 /* The following was moved from init_function_start.
5707 The move is supposed to make sdb output more accurate. */
5708 /* Indicate the beginning of the function body,
5709 as opposed to parm setup. */
5710 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5711
5712 /* If doing stupid allocation, mark parms as born here. */
5713
5714 if (GET_CODE (get_last_insn ()) != NOTE)
5715 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5716 parm_birth_insn = get_last_insn ();
5717
5718 if (obey_regdecls)
5719 {
5720 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5721 use_variable (regno_reg_rtx[i]);
5722
5723 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5724 use_variable (current_function_internal_arg_pointer);
5725 }
5726
5727 context_display = 0;
5728 if (current_function_needs_context)
5729 {
5730 /* Fetch static chain values for containing functions. */
5731 tem = decl_function_context (current_function_decl);
5732 /* If not doing stupid register allocation copy the static chain
5733 pointer into a pseudo. If we have small register classes, copy
5734 the value from memory if static_chain_incoming_rtx is a REG. If
5735 we do stupid register allocation, we use the stack address
5736 generated above. */
5737 if (tem && ! obey_regdecls)
5738 {
5739 /* If the static chain originally came in a register, put it back
5740 there, then move it out in the next insn. The reason for
5741 this peculiar code is to satisfy function integration. */
5742 if (SMALL_REGISTER_CLASSES
5743 && GET_CODE (static_chain_incoming_rtx) == REG)
5744 emit_move_insn (static_chain_incoming_rtx, last_ptr);
5745 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5746 }
5747
5748 while (tem)
5749 {
5750 tree rtlexp = make_node (RTL_EXPR);
5751
5752 RTL_EXPR_RTL (rtlexp) = last_ptr;
5753 context_display = tree_cons (tem, rtlexp, context_display);
5754 tem = decl_function_context (tem);
5755 if (tem == 0)
5756 break;
5757 /* Chain thru stack frames, assuming pointer to next lexical frame
5758 is found at the place we always store it. */
5759 #ifdef FRAME_GROWS_DOWNWARD
5760 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
5761 #endif
5762 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
5763 memory_address (Pmode, last_ptr)));
5764
5765 /* If we are not optimizing, ensure that we know that this
5766 piece of context is live over the entire function. */
5767 if (! optimize)
5768 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
5769 save_expr_regs);
5770 }
5771 }
5772
5773 if (current_function_instrument_entry_exit)
5774 {
5775 rtx fun = DECL_RTL (current_function_decl);
5776 if (GET_CODE (fun) == MEM)
5777 fun = XEXP (fun, 0);
5778 else
5779 abort ();
5780 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
5781 fun, Pmode,
5782 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5783 0,
5784 hard_frame_pointer_rtx),
5785 Pmode);
5786 }
5787
5788 /* After the display initializations is where the tail-recursion label
5789 should go, if we end up needing one. Ensure we have a NOTE here
5790 since some things (like trampolines) get placed before this. */
5791 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
5792
5793 /* Evaluate now the sizes of any types declared among the arguments. */
5794 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5795 {
5796 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
5797 EXPAND_MEMORY_USE_BAD);
5798 /* Flush the queue in case this parameter declaration has
5799 side-effects. */
5800 emit_queue ();
5801 }
5802
5803 /* Make sure there is a line number after the function entry setup code. */
5804 force_next_line_note ();
5805 }
5806 \f
5807 /* Generate RTL for the end of the current function.
5808 FILENAME and LINE are the current position in the source file.
5809
5810 It is up to language-specific callers to do cleanups for parameters--
5811 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5812
5813 void
5814 expand_function_end (filename, line, end_bindings)
5815 char *filename;
5816 int line;
5817 int end_bindings;
5818 {
5819 register int i;
5820 tree link;
5821
5822 #ifdef TRAMPOLINE_TEMPLATE
5823 static rtx initial_trampoline;
5824 #endif
5825
5826 #ifdef NON_SAVING_SETJMP
5827 /* Don't put any variables in registers if we call setjmp
5828 on a machine that fails to restore the registers. */
5829 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
5830 {
5831 if (DECL_INITIAL (current_function_decl) != error_mark_node)
5832 setjmp_protect (DECL_INITIAL (current_function_decl));
5833
5834 setjmp_protect_args ();
5835 }
5836 #endif
5837
5838 /* Save the argument pointer if a save area was made for it. */
5839 if (arg_pointer_save_area)
5840 {
5841 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
5842 emit_insn_before (x, tail_recursion_reentry);
5843 }
5844
5845 /* Initialize any trampolines required by this function. */
5846 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5847 {
5848 tree function = TREE_PURPOSE (link);
5849 rtx context = lookup_static_chain (function);
5850 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
5851 #ifdef TRAMPOLINE_TEMPLATE
5852 rtx blktramp;
5853 #endif
5854 rtx seq;
5855
5856 #ifdef TRAMPOLINE_TEMPLATE
5857 /* First make sure this compilation has a template for
5858 initializing trampolines. */
5859 if (initial_trampoline == 0)
5860 {
5861 end_temporary_allocation ();
5862 initial_trampoline
5863 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
5864 resume_temporary_allocation ();
5865 }
5866 #endif
5867
5868 /* Generate insns to initialize the trampoline. */
5869 start_sequence ();
5870 tramp = round_trampoline_addr (XEXP (tramp, 0));
5871 #ifdef TRAMPOLINE_TEMPLATE
5872 blktramp = change_address (initial_trampoline, BLKmode, tramp);
5873 emit_block_move (blktramp, initial_trampoline,
5874 GEN_INT (TRAMPOLINE_SIZE),
5875 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5876 #endif
5877 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
5878 seq = get_insns ();
5879 end_sequence ();
5880
5881 /* Put those insns at entry to the containing function (this one). */
5882 emit_insns_before (seq, tail_recursion_reentry);
5883 }
5884
5885 /* If we are doing stack checking and this function makes calls,
5886 do a stack probe at the start of the function to ensure we have enough
5887 space for another stack frame. */
5888 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
5889 {
5890 rtx insn, seq;
5891
5892 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5893 if (GET_CODE (insn) == CALL_INSN)
5894 {
5895 start_sequence ();
5896 probe_stack_range (STACK_CHECK_PROTECT,
5897 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
5898 seq = get_insns ();
5899 end_sequence ();
5900 emit_insns_before (seq, tail_recursion_reentry);
5901 break;
5902 }
5903 }
5904
5905 /* Warn about unused parms if extra warnings were specified. */
5906 if (warn_unused && extra_warnings)
5907 {
5908 tree decl;
5909
5910 for (decl = DECL_ARGUMENTS (current_function_decl);
5911 decl; decl = TREE_CHAIN (decl))
5912 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
5913 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
5914 warning_with_decl (decl, "unused parameter `%s'");
5915 }
5916
5917 /* Delete handlers for nonlocal gotos if nothing uses them. */
5918 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
5919 delete_handlers ();
5920
5921 /* End any sequences that failed to be closed due to syntax errors. */
5922 while (in_sequence_p ())
5923 end_sequence ();
5924
5925 /* Outside function body, can't compute type's actual size
5926 until next function's body starts. */
5927 immediate_size_expand--;
5928
5929 /* If doing stupid register allocation,
5930 mark register parms as dying here. */
5931
5932 if (obey_regdecls)
5933 {
5934 rtx tem;
5935 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5936 use_variable (regno_reg_rtx[i]);
5937
5938 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
5939
5940 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
5941 {
5942 use_variable (XEXP (tem, 0));
5943 use_variable_after (XEXP (tem, 0), parm_birth_insn);
5944 }
5945
5946 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5947 use_variable (current_function_internal_arg_pointer);
5948 }
5949
5950 clear_pending_stack_adjust ();
5951 do_pending_stack_adjust ();
5952
5953 /* Mark the end of the function body.
5954 If control reaches this insn, the function can drop through
5955 without returning a value. */
5956 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
5957
5958 /* Must mark the last line number note in the function, so that the test
5959 coverage code can avoid counting the last line twice. This just tells
5960 the code to ignore the immediately following line note, since there
5961 already exists a copy of this note somewhere above. This line number
5962 note is still needed for debugging though, so we can't delete it. */
5963 if (flag_test_coverage)
5964 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
5965
5966 /* Output a linenumber for the end of the function.
5967 SDB depends on this. */
5968 emit_line_note_force (filename, line);
5969
5970 /* Output the label for the actual return from the function,
5971 if one is expected. This happens either because a function epilogue
5972 is used instead of a return instruction, or because a return was done
5973 with a goto in order to run local cleanups, or because of pcc-style
5974 structure returning. */
5975
5976 if (return_label)
5977 emit_label (return_label);
5978
5979 /* C++ uses this. */
5980 if (end_bindings)
5981 expand_end_bindings (0, 0, 0);
5982
5983 /* Now handle any leftover exception regions that may have been
5984 created for the parameters. */
5985 {
5986 rtx last = get_last_insn ();
5987 rtx label;
5988
5989 expand_leftover_cleanups ();
5990
5991 /* If the above emitted any code, may sure we jump around it. */
5992 if (last != get_last_insn ())
5993 {
5994 label = gen_label_rtx ();
5995 last = emit_jump_insn_after (gen_jump (label), last);
5996 last = emit_barrier_after (last);
5997 emit_label (label);
5998 }
5999 }
6000
6001 if (current_function_instrument_entry_exit)
6002 {
6003 rtx fun = DECL_RTL (current_function_decl);
6004 if (GET_CODE (fun) == MEM)
6005 fun = XEXP (fun, 0);
6006 else
6007 abort ();
6008 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6009 fun, Pmode,
6010 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6011 0,
6012 hard_frame_pointer_rtx),
6013 Pmode);
6014 }
6015
6016 /* If we had calls to alloca, and this machine needs
6017 an accurate stack pointer to exit the function,
6018 insert some code to save and restore the stack pointer. */
6019 #ifdef EXIT_IGNORE_STACK
6020 if (! EXIT_IGNORE_STACK)
6021 #endif
6022 if (current_function_calls_alloca)
6023 {
6024 rtx tem = 0;
6025
6026 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6027 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6028 }
6029
6030 /* If scalar return value was computed in a pseudo-reg,
6031 copy that to the hard return register. */
6032 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6033 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6034 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6035 >= FIRST_PSEUDO_REGISTER))
6036 {
6037 rtx real_decl_result;
6038
6039 #ifdef FUNCTION_OUTGOING_VALUE
6040 real_decl_result
6041 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6042 current_function_decl);
6043 #else
6044 real_decl_result
6045 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6046 current_function_decl);
6047 #endif
6048 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
6049 /* If this is a BLKmode structure being returned in registers, then use
6050 the mode computed in expand_return. */
6051 if (GET_MODE (real_decl_result) == BLKmode)
6052 PUT_MODE (real_decl_result,
6053 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6054 emit_move_insn (real_decl_result,
6055 DECL_RTL (DECL_RESULT (current_function_decl)));
6056 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
6057
6058 /* The delay slot scheduler assumes that current_function_return_rtx
6059 holds the hard register containing the return value, not a temporary
6060 pseudo. */
6061 current_function_return_rtx = real_decl_result;
6062 }
6063
6064 /* If returning a structure, arrange to return the address of the value
6065 in a place where debuggers expect to find it.
6066
6067 If returning a structure PCC style,
6068 the caller also depends on this value.
6069 And current_function_returns_pcc_struct is not necessarily set. */
6070 if (current_function_returns_struct
6071 || current_function_returns_pcc_struct)
6072 {
6073 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6074 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6075 #ifdef FUNCTION_OUTGOING_VALUE
6076 rtx outgoing
6077 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6078 current_function_decl);
6079 #else
6080 rtx outgoing
6081 = FUNCTION_VALUE (build_pointer_type (type),
6082 current_function_decl);
6083 #endif
6084
6085 /* Mark this as a function return value so integrate will delete the
6086 assignment and USE below when inlining this function. */
6087 REG_FUNCTION_VALUE_P (outgoing) = 1;
6088
6089 emit_move_insn (outgoing, value_address);
6090 use_variable (outgoing);
6091 }
6092
6093 /* If this is an implementation of __throw, do what's necessary to
6094 communicate between __builtin_eh_return and the epilogue. */
6095 expand_eh_return ();
6096
6097 /* Output a return insn if we are using one.
6098 Otherwise, let the rtl chain end here, to drop through
6099 into the epilogue. */
6100
6101 #ifdef HAVE_return
6102 if (HAVE_return)
6103 {
6104 emit_jump_insn (gen_return ());
6105 emit_barrier ();
6106 }
6107 #endif
6108
6109 /* Fix up any gotos that jumped out to the outermost
6110 binding level of the function.
6111 Must follow emitting RETURN_LABEL. */
6112
6113 /* If you have any cleanups to do at this point,
6114 and they need to create temporary variables,
6115 then you will lose. */
6116 expand_fixups (get_insns ());
6117 }
6118 \f
6119 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
6120
6121 static int *prologue;
6122 static int *epilogue;
6123
6124 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
6125 or a single insn). */
6126
6127 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6128 static int *
6129 record_insns (insns)
6130 rtx insns;
6131 {
6132 int *vec;
6133
6134 if (GET_CODE (insns) == SEQUENCE)
6135 {
6136 int len = XVECLEN (insns, 0);
6137 vec = (int *) oballoc ((len + 1) * sizeof (int));
6138 vec[len] = 0;
6139 while (--len >= 0)
6140 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6141 }
6142 else
6143 {
6144 vec = (int *) oballoc (2 * sizeof (int));
6145 vec[0] = INSN_UID (insns);
6146 vec[1] = 0;
6147 }
6148 return vec;
6149 }
6150
6151 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6152
6153 static int
6154 contains (insn, vec)
6155 rtx insn;
6156 int *vec;
6157 {
6158 register int i, j;
6159
6160 if (GET_CODE (insn) == INSN
6161 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6162 {
6163 int count = 0;
6164 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6165 for (j = 0; vec[j]; j++)
6166 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6167 count++;
6168 return count;
6169 }
6170 else
6171 {
6172 for (j = 0; vec[j]; j++)
6173 if (INSN_UID (insn) == vec[j])
6174 return 1;
6175 }
6176 return 0;
6177 }
6178 #endif /* HAVE_prologue || HAVE_epilogue */
6179
6180 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6181 this into place with notes indicating where the prologue ends and where
6182 the epilogue begins. Update the basic block information when possible. */
6183
6184 void
6185 thread_prologue_and_epilogue_insns (f)
6186 rtx f;
6187 {
6188 #ifdef HAVE_prologue
6189 if (HAVE_prologue)
6190 {
6191 rtx head, seq;
6192
6193 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
6194 prologue insns and a NOTE_INSN_PROLOGUE_END. */
6195 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
6196 seq = gen_prologue ();
6197 head = emit_insn_after (seq, f);
6198
6199 /* Include the new prologue insns in the first block. Ignore them
6200 if they form a basic block unto themselves. */
6201 if (basic_block_head && n_basic_blocks
6202 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
6203 basic_block_head[0] = NEXT_INSN (f);
6204
6205 /* Retain a map of the prologue insns. */
6206 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
6207 }
6208 else
6209 #endif
6210 prologue = 0;
6211
6212 #ifdef HAVE_epilogue
6213 if (HAVE_epilogue)
6214 {
6215 rtx insn = get_last_insn ();
6216 rtx prev = prev_nonnote_insn (insn);
6217
6218 /* If we end with a BARRIER, we don't need an epilogue. */
6219 if (! (prev && GET_CODE (prev) == BARRIER))
6220 {
6221 rtx tail, seq, tem;
6222 rtx first_use = 0;
6223 rtx last_use = 0;
6224
6225 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6226 epilogue insns, the USE insns at the end of a function,
6227 the jump insn that returns, and then a BARRIER. */
6228
6229 /* Move the USE insns at the end of a function onto a list. */
6230 while (prev
6231 && GET_CODE (prev) == INSN
6232 && GET_CODE (PATTERN (prev)) == USE)
6233 {
6234 tem = prev;
6235 prev = prev_nonnote_insn (prev);
6236
6237 NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem);
6238 PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem);
6239 if (first_use)
6240 {
6241 NEXT_INSN (tem) = first_use;
6242 PREV_INSN (first_use) = tem;
6243 }
6244 first_use = tem;
6245 if (!last_use)
6246 last_use = tem;
6247 }
6248
6249 emit_barrier_after (insn);
6250
6251 seq = gen_epilogue ();
6252 tail = emit_jump_insn_after (seq, insn);
6253
6254 /* Insert the USE insns immediately before the return insn, which
6255 must be the first instruction before the final barrier. */
6256 if (first_use)
6257 {
6258 tem = prev_nonnote_insn (get_last_insn ());
6259 NEXT_INSN (PREV_INSN (tem)) = first_use;
6260 PREV_INSN (first_use) = PREV_INSN (tem);
6261 PREV_INSN (tem) = last_use;
6262 NEXT_INSN (last_use) = tem;
6263 }
6264
6265 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
6266
6267 /* Include the new epilogue insns in the last block. Ignore
6268 them if they form a basic block unto themselves. */
6269 if (basic_block_end && n_basic_blocks
6270 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
6271 basic_block_end[n_basic_blocks - 1] = tail;
6272
6273 /* Retain a map of the epilogue insns. */
6274 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6275 return;
6276 }
6277 }
6278 #endif
6279 epilogue = 0;
6280 }
6281
6282 /* Reposition the prologue-end and epilogue-begin notes after instruction
6283 scheduling and delayed branch scheduling. */
6284
6285 void
6286 reposition_prologue_and_epilogue_notes (f)
6287 rtx f;
6288 {
6289 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6290 /* Reposition the prologue and epilogue notes. */
6291 if (n_basic_blocks)
6292 {
6293 rtx next, prev;
6294 int len;
6295
6296 if (prologue)
6297 {
6298 register rtx insn, note = 0;
6299
6300 /* Scan from the beginning until we reach the last prologue insn.
6301 We apparently can't depend on basic_block_{head,end} after
6302 reorg has run. */
6303 for (len = 0; prologue[len]; len++)
6304 ;
6305 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6306 {
6307 if (GET_CODE (insn) == NOTE)
6308 {
6309 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6310 note = insn;
6311 }
6312 else if ((len -= contains (insn, prologue)) == 0)
6313 {
6314 /* Find the prologue-end note if we haven't already, and
6315 move it to just after the last prologue insn. */
6316 if (note == 0)
6317 {
6318 for (note = insn; (note = NEXT_INSN (note));)
6319 if (GET_CODE (note) == NOTE
6320 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6321 break;
6322 }
6323
6324 next = NEXT_INSN (note);
6325 prev = PREV_INSN (note);
6326 if (prev)
6327 NEXT_INSN (prev) = next;
6328 if (next)
6329 PREV_INSN (next) = prev;
6330
6331 /* Whether or not we can depend on basic_block_head,
6332 attempt to keep it up-to-date. */
6333 if (basic_block_head[0] == note)
6334 basic_block_head[0] = next;
6335
6336 add_insn_after (note, insn);
6337 }
6338 }
6339 }
6340
6341 if (epilogue)
6342 {
6343 register rtx insn, note = 0;
6344
6345 /* Scan from the end until we reach the first epilogue insn.
6346 We apparently can't depend on basic_block_{head,end} after
6347 reorg has run. */
6348 for (len = 0; epilogue[len]; len++)
6349 ;
6350 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6351 {
6352 if (GET_CODE (insn) == NOTE)
6353 {
6354 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6355 note = insn;
6356 }
6357 else if ((len -= contains (insn, epilogue)) == 0)
6358 {
6359 /* Find the epilogue-begin note if we haven't already, and
6360 move it to just before the first epilogue insn. */
6361 if (note == 0)
6362 {
6363 for (note = insn; (note = PREV_INSN (note));)
6364 if (GET_CODE (note) == NOTE
6365 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6366 break;
6367 }
6368 next = NEXT_INSN (note);
6369 prev = PREV_INSN (note);
6370 if (prev)
6371 NEXT_INSN (prev) = next;
6372 if (next)
6373 PREV_INSN (next) = prev;
6374
6375 /* Whether or not we can depend on basic_block_head,
6376 attempt to keep it up-to-date. */
6377 if (n_basic_blocks
6378 && basic_block_head[n_basic_blocks-1] == insn)
6379 basic_block_head[n_basic_blocks-1] = note;
6380
6381 add_insn_before (note, insn);
6382 }
6383 }
6384 }
6385 }
6386 #endif /* HAVE_prologue or HAVE_epilogue */
6387 }