Fine-grained control of -fcheck-memory-usage with new no_check_memory_usage attribute.
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-97, 1998 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58 #include "toplev.h"
59
60 #ifndef TRAMPOLINE_ALIGNMENT
61 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
62 #endif
63
64 /* Some systems use __main in a way incompatible with its use in gcc, in these
65 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
66 give the same symbol without quotes for an alternative entry point. You
67 must define both, or neither. */
68 #ifndef NAME__MAIN
69 #define NAME__MAIN "__main"
70 #define SYMBOL__MAIN __main
71 #endif
72
73 /* Round a value to the lowest integer less than it that is a multiple of
74 the required alignment. Avoid using division in case the value is
75 negative. Assume the alignment is a power of two. */
76 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
77
78 /* Similar, but round to the next highest integer that meets the
79 alignment. */
80 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
81
82 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
83 during rtl generation. If they are different register numbers, this is
84 always true. It may also be true if
85 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
86 generation. See fix_lexical_addr for details. */
87
88 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
89 #define NEED_SEPARATE_AP
90 #endif
91
92 /* Number of bytes of args popped by function being compiled on its return.
93 Zero if no bytes are to be popped.
94 May affect compilation of return insn or of function epilogue. */
95
96 int current_function_pops_args;
97
98 /* Nonzero if function being compiled needs to be given an address
99 where the value should be stored. */
100
101 int current_function_returns_struct;
102
103 /* Nonzero if function being compiled needs to
104 return the address of where it has put a structure value. */
105
106 int current_function_returns_pcc_struct;
107
108 /* Nonzero if function being compiled needs to be passed a static chain. */
109
110 int current_function_needs_context;
111
112 /* Nonzero if function being compiled can call setjmp. */
113
114 int current_function_calls_setjmp;
115
116 /* Nonzero if function being compiled can call longjmp. */
117
118 int current_function_calls_longjmp;
119
120 /* Nonzero if function being compiled receives nonlocal gotos
121 from nested functions. */
122
123 int current_function_has_nonlocal_label;
124
125 /* Nonzero if function being compiled has nonlocal gotos to parent
126 function. */
127
128 int current_function_has_nonlocal_goto;
129
130 /* Nonzero if this function has a computed goto.
131
132 It is computed during find_basic_blocks or during stupid life
133 analysis. */
134
135 int current_function_has_computed_jump;
136
137 /* Nonzero if function being compiled contains nested functions. */
138
139 int current_function_contains_functions;
140
141 /* Nonzero if the current function is a thunk (a lightweight function that
142 just adjusts one of its arguments and forwards to another function), so
143 we should try to cut corners where we can. */
144 int current_function_is_thunk;
145
146 /* Nonzero if function being compiled can call alloca,
147 either as a subroutine or builtin. */
148
149 int current_function_calls_alloca;
150
151 /* Nonzero if the current function returns a pointer type */
152
153 int current_function_returns_pointer;
154
155 /* If some insns can be deferred to the delay slots of the epilogue, the
156 delay list for them is recorded here. */
157
158 rtx current_function_epilogue_delay_list;
159
160 /* If function's args have a fixed size, this is that size, in bytes.
161 Otherwise, it is -1.
162 May affect compilation of return insn or of function epilogue. */
163
164 int current_function_args_size;
165
166 /* # bytes the prologue should push and pretend that the caller pushed them.
167 The prologue must do this, but only if parms can be passed in registers. */
168
169 int current_function_pretend_args_size;
170
171 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
172 defined, the needed space is pushed by the prologue. */
173
174 int current_function_outgoing_args_size;
175
176 /* This is the offset from the arg pointer to the place where the first
177 anonymous arg can be found, if there is one. */
178
179 rtx current_function_arg_offset_rtx;
180
181 /* Nonzero if current function uses varargs.h or equivalent.
182 Zero for functions that use stdarg.h. */
183
184 int current_function_varargs;
185
186 /* Nonzero if current function uses stdarg.h or equivalent.
187 Zero for functions that use varargs.h. */
188
189 int current_function_stdarg;
190
191 /* Quantities of various kinds of registers
192 used for the current function's args. */
193
194 CUMULATIVE_ARGS current_function_args_info;
195
196 /* Name of function now being compiled. */
197
198 char *current_function_name;
199
200 /* If non-zero, an RTL expression for the location at which the current
201 function returns its result. If the current function returns its
202 result in a register, current_function_return_rtx will always be
203 the hard register containing the result. */
204
205 rtx current_function_return_rtx;
206
207 /* Nonzero if the current function uses the constant pool. */
208
209 int current_function_uses_const_pool;
210
211 /* Nonzero if the current function uses pic_offset_table_rtx. */
212 int current_function_uses_pic_offset_table;
213
214 /* The arg pointer hard register, or the pseudo into which it was copied. */
215 rtx current_function_internal_arg_pointer;
216
217 /* Language-specific reason why the current function cannot be made inline. */
218 char *current_function_cannot_inline;
219
220 /* Nonzero if instrumentation calls for function entry and exit should be
221 generated. */
222 int current_function_instrument_entry_exit;
223
224 /* Nonzero if memory access checking be enabled in the current function. */
225 int current_function_check_memory_usage;
226
227 /* The FUNCTION_DECL for an inline function currently being expanded. */
228 tree inline_function_decl;
229
230 /* Number of function calls seen so far in current function. */
231
232 int function_call_count;
233
234 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
235 (labels to which there can be nonlocal gotos from nested functions)
236 in this function. */
237
238 tree nonlocal_labels;
239
240 /* RTX for stack slot that holds the current handler for nonlocal gotos.
241 Zero when function does not have nonlocal labels. */
242
243 rtx nonlocal_goto_handler_slot;
244
245 /* RTX for stack slot that holds the stack pointer value to restore
246 for a nonlocal goto.
247 Zero when function does not have nonlocal labels. */
248
249 rtx nonlocal_goto_stack_level;
250
251 /* Label that will go on parm cleanup code, if any.
252 Jumping to this label runs cleanup code for parameters, if
253 such code must be run. Following this code is the logical return label. */
254
255 rtx cleanup_label;
256
257 /* Label that will go on function epilogue.
258 Jumping to this label serves as a "return" instruction
259 on machines which require execution of the epilogue on all returns. */
260
261 rtx return_label;
262
263 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
264 So we can mark them all live at the end of the function, if nonopt. */
265 rtx save_expr_regs;
266
267 /* List (chain of EXPR_LISTs) of all stack slots in this function.
268 Made for the sake of unshare_all_rtl. */
269 rtx stack_slot_list;
270
271 /* Chain of all RTL_EXPRs that have insns in them. */
272 tree rtl_expr_chain;
273
274 /* Label to jump back to for tail recursion, or 0 if we have
275 not yet needed one for this function. */
276 rtx tail_recursion_label;
277
278 /* Place after which to insert the tail_recursion_label if we need one. */
279 rtx tail_recursion_reentry;
280
281 /* Location at which to save the argument pointer if it will need to be
282 referenced. There are two cases where this is done: if nonlocal gotos
283 exist, or if vars stored at an offset from the argument pointer will be
284 needed by inner routines. */
285
286 rtx arg_pointer_save_area;
287
288 /* Offset to end of allocated area of stack frame.
289 If stack grows down, this is the address of the last stack slot allocated.
290 If stack grows up, this is the address for the next slot. */
291 HOST_WIDE_INT frame_offset;
292
293 /* List (chain of TREE_LISTs) of static chains for containing functions.
294 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
295 in an RTL_EXPR in the TREE_VALUE. */
296 static tree context_display;
297
298 /* List (chain of TREE_LISTs) of trampolines for nested functions.
299 The trampoline sets up the static chain and jumps to the function.
300 We supply the trampoline's address when the function's address is requested.
301
302 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
303 in an RTL_EXPR in the TREE_VALUE. */
304 static tree trampoline_list;
305
306 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
307 static rtx parm_birth_insn;
308
309 #if 0
310 /* Nonzero if a stack slot has been generated whose address is not
311 actually valid. It means that the generated rtl must all be scanned
312 to detect and correct the invalid addresses where they occur. */
313 static int invalid_stack_slot;
314 #endif
315
316 /* Last insn of those whose job was to put parms into their nominal homes. */
317 static rtx last_parm_insn;
318
319 /* 1 + last pseudo register number possibly used for loading a copy
320 of a parameter of this function. */
321 int max_parm_reg;
322
323 /* Vector indexed by REGNO, containing location on stack in which
324 to put the parm which is nominally in pseudo register REGNO,
325 if we discover that that parm must go in the stack. The highest
326 element in this vector is one less than MAX_PARM_REG, above. */
327 rtx *parm_reg_stack_loc;
328
329 /* Nonzero once virtual register instantiation has been done.
330 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
331 static int virtuals_instantiated;
332
333 /* These variables hold pointers to functions to
334 save and restore machine-specific data,
335 in push_function_context and pop_function_context. */
336 void (*save_machine_status) PROTO((struct function *));
337 void (*restore_machine_status) PROTO((struct function *));
338
339 /* Nonzero if we need to distinguish between the return value of this function
340 and the return value of a function called by this function. This helps
341 integrate.c */
342
343 extern int rtx_equal_function_value_matters;
344 extern tree sequence_rtl_expr;
345 \f
346 /* In order to evaluate some expressions, such as function calls returning
347 structures in memory, we need to temporarily allocate stack locations.
348 We record each allocated temporary in the following structure.
349
350 Associated with each temporary slot is a nesting level. When we pop up
351 one level, all temporaries associated with the previous level are freed.
352 Normally, all temporaries are freed after the execution of the statement
353 in which they were created. However, if we are inside a ({...}) grouping,
354 the result may be in a temporary and hence must be preserved. If the
355 result could be in a temporary, we preserve it if we can determine which
356 one it is in. If we cannot determine which temporary may contain the
357 result, all temporaries are preserved. A temporary is preserved by
358 pretending it was allocated at the previous nesting level.
359
360 Automatic variables are also assigned temporary slots, at the nesting
361 level where they are defined. They are marked a "kept" so that
362 free_temp_slots will not free them. */
363
364 struct temp_slot
365 {
366 /* Points to next temporary slot. */
367 struct temp_slot *next;
368 /* The rtx to used to reference the slot. */
369 rtx slot;
370 /* The rtx used to represent the address if not the address of the
371 slot above. May be an EXPR_LIST if multiple addresses exist. */
372 rtx address;
373 /* The size, in units, of the slot. */
374 HOST_WIDE_INT size;
375 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
376 tree rtl_expr;
377 /* Non-zero if this temporary is currently in use. */
378 char in_use;
379 /* Non-zero if this temporary has its address taken. */
380 char addr_taken;
381 /* Nesting level at which this slot is being used. */
382 int level;
383 /* Non-zero if this should survive a call to free_temp_slots. */
384 int keep;
385 /* The offset of the slot from the frame_pointer, including extra space
386 for alignment. This info is for combine_temp_slots. */
387 HOST_WIDE_INT base_offset;
388 /* The size of the slot, including extra space for alignment. This
389 info is for combine_temp_slots. */
390 HOST_WIDE_INT full_size;
391 };
392
393 /* List of all temporaries allocated, both available and in use. */
394
395 struct temp_slot *temp_slots;
396
397 /* Current nesting level for temporaries. */
398
399 int temp_slot_level;
400
401 /* Current nesting level for variables in a block. */
402
403 int var_temp_slot_level;
404
405 /* When temporaries are created by TARGET_EXPRs, they are created at
406 this level of temp_slot_level, so that they can remain allocated
407 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
408 of TARGET_EXPRs. */
409 int target_temp_slot_level;
410 \f
411 /* This structure is used to record MEMs or pseudos used to replace VAR, any
412 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
413 maintain this list in case two operands of an insn were required to match;
414 in that case we must ensure we use the same replacement. */
415
416 struct fixup_replacement
417 {
418 rtx old;
419 rtx new;
420 struct fixup_replacement *next;
421 };
422
423 /* Forward declarations. */
424
425 static rtx assign_outer_stack_local PROTO ((enum machine_mode, HOST_WIDE_INT,
426 int, struct function *));
427 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
428 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
429 enum machine_mode, enum machine_mode,
430 int, int, int));
431 static void fixup_var_refs PROTO((rtx, enum machine_mode, int));
432 static struct fixup_replacement
433 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
434 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
435 rtx, int));
436 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
437 struct fixup_replacement **));
438 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
439 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
440 static rtx fixup_stack_1 PROTO((rtx, rtx));
441 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
442 static void instantiate_decls PROTO((tree, int));
443 static void instantiate_decls_1 PROTO((tree, int));
444 static void instantiate_decl PROTO((rtx, int, int));
445 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
446 static void delete_handlers PROTO((void));
447 static void pad_to_arg_alignment PROTO((struct args_size *, int));
448 #ifndef ARGS_GROW_DOWNWARD
449 static void pad_below PROTO((struct args_size *, enum machine_mode,
450 tree));
451 #endif
452 #ifdef ARGS_GROW_DOWNWARD
453 static tree round_down PROTO((tree, int));
454 #endif
455 static rtx round_trampoline_addr PROTO((rtx));
456 static tree blocks_nreverse PROTO((tree));
457 static int all_blocks PROTO((tree, tree *));
458 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
459 static int *record_insns PROTO((rtx));
460 static int contains PROTO((rtx, int *));
461 #endif /* HAVE_prologue || HAVE_epilogue */
462 static void put_addressof_into_stack PROTO((rtx));
463 static void purge_addressof_1 PROTO((rtx *, rtx, int, int));
464 \f
465 /* Pointer to chain of `struct function' for containing functions. */
466 struct function *outer_function_chain;
467
468 /* Given a function decl for a containing function,
469 return the `struct function' for it. */
470
471 struct function *
472 find_function_data (decl)
473 tree decl;
474 {
475 struct function *p;
476
477 for (p = outer_function_chain; p; p = p->next)
478 if (p->decl == decl)
479 return p;
480
481 abort ();
482 }
483
484 /* Save the current context for compilation of a nested function.
485 This is called from language-specific code.
486 The caller is responsible for saving any language-specific status,
487 since this function knows only about language-independent variables. */
488
489 void
490 push_function_context_to (context)
491 tree context;
492 {
493 struct function *p = (struct function *) xmalloc (sizeof (struct function));
494
495 p->next = outer_function_chain;
496 outer_function_chain = p;
497
498 p->name = current_function_name;
499 p->decl = current_function_decl;
500 p->pops_args = current_function_pops_args;
501 p->returns_struct = current_function_returns_struct;
502 p->returns_pcc_struct = current_function_returns_pcc_struct;
503 p->returns_pointer = current_function_returns_pointer;
504 p->needs_context = current_function_needs_context;
505 p->calls_setjmp = current_function_calls_setjmp;
506 p->calls_longjmp = current_function_calls_longjmp;
507 p->calls_alloca = current_function_calls_alloca;
508 p->has_nonlocal_label = current_function_has_nonlocal_label;
509 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
510 p->contains_functions = current_function_contains_functions;
511 p->is_thunk = current_function_is_thunk;
512 p->args_size = current_function_args_size;
513 p->pretend_args_size = current_function_pretend_args_size;
514 p->arg_offset_rtx = current_function_arg_offset_rtx;
515 p->varargs = current_function_varargs;
516 p->stdarg = current_function_stdarg;
517 p->uses_const_pool = current_function_uses_const_pool;
518 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
519 p->internal_arg_pointer = current_function_internal_arg_pointer;
520 p->cannot_inline = current_function_cannot_inline;
521 p->max_parm_reg = max_parm_reg;
522 p->parm_reg_stack_loc = parm_reg_stack_loc;
523 p->outgoing_args_size = current_function_outgoing_args_size;
524 p->return_rtx = current_function_return_rtx;
525 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
526 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
527 p->nonlocal_labels = nonlocal_labels;
528 p->cleanup_label = cleanup_label;
529 p->return_label = return_label;
530 p->save_expr_regs = save_expr_regs;
531 p->stack_slot_list = stack_slot_list;
532 p->parm_birth_insn = parm_birth_insn;
533 p->frame_offset = frame_offset;
534 p->tail_recursion_label = tail_recursion_label;
535 p->tail_recursion_reentry = tail_recursion_reentry;
536 p->arg_pointer_save_area = arg_pointer_save_area;
537 p->rtl_expr_chain = rtl_expr_chain;
538 p->last_parm_insn = last_parm_insn;
539 p->context_display = context_display;
540 p->trampoline_list = trampoline_list;
541 p->function_call_count = function_call_count;
542 p->temp_slots = temp_slots;
543 p->temp_slot_level = temp_slot_level;
544 p->target_temp_slot_level = target_temp_slot_level;
545 p->var_temp_slot_level = var_temp_slot_level;
546 p->fixup_var_refs_queue = 0;
547 p->epilogue_delay_list = current_function_epilogue_delay_list;
548 p->args_info = current_function_args_info;
549 p->check_memory_usage = current_function_check_memory_usage;
550 p->instrument_entry_exit = current_function_instrument_entry_exit;
551
552 save_tree_status (p, context);
553 save_storage_status (p);
554 save_emit_status (p);
555 save_expr_status (p);
556 save_stmt_status (p);
557 save_varasm_status (p, context);
558 if (save_machine_status)
559 (*save_machine_status) (p);
560 }
561
562 void
563 push_function_context ()
564 {
565 push_function_context_to (current_function_decl);
566 }
567
568 /* Restore the last saved context, at the end of a nested function.
569 This function is called from language-specific code. */
570
571 void
572 pop_function_context_from (context)
573 tree context;
574 {
575 struct function *p = outer_function_chain;
576 struct var_refs_queue *queue;
577
578 outer_function_chain = p->next;
579
580 current_function_contains_functions
581 = p->contains_functions || p->inline_obstacks
582 || context == current_function_decl;
583 current_function_name = p->name;
584 current_function_decl = p->decl;
585 current_function_pops_args = p->pops_args;
586 current_function_returns_struct = p->returns_struct;
587 current_function_returns_pcc_struct = p->returns_pcc_struct;
588 current_function_returns_pointer = p->returns_pointer;
589 current_function_needs_context = p->needs_context;
590 current_function_calls_setjmp = p->calls_setjmp;
591 current_function_calls_longjmp = p->calls_longjmp;
592 current_function_calls_alloca = p->calls_alloca;
593 current_function_has_nonlocal_label = p->has_nonlocal_label;
594 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
595 current_function_is_thunk = p->is_thunk;
596 current_function_args_size = p->args_size;
597 current_function_pretend_args_size = p->pretend_args_size;
598 current_function_arg_offset_rtx = p->arg_offset_rtx;
599 current_function_varargs = p->varargs;
600 current_function_stdarg = p->stdarg;
601 current_function_uses_const_pool = p->uses_const_pool;
602 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
603 current_function_internal_arg_pointer = p->internal_arg_pointer;
604 current_function_cannot_inline = p->cannot_inline;
605 max_parm_reg = p->max_parm_reg;
606 parm_reg_stack_loc = p->parm_reg_stack_loc;
607 current_function_outgoing_args_size = p->outgoing_args_size;
608 current_function_return_rtx = p->return_rtx;
609 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
610 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
611 nonlocal_labels = p->nonlocal_labels;
612 cleanup_label = p->cleanup_label;
613 return_label = p->return_label;
614 save_expr_regs = p->save_expr_regs;
615 stack_slot_list = p->stack_slot_list;
616 parm_birth_insn = p->parm_birth_insn;
617 frame_offset = p->frame_offset;
618 tail_recursion_label = p->tail_recursion_label;
619 tail_recursion_reentry = p->tail_recursion_reentry;
620 arg_pointer_save_area = p->arg_pointer_save_area;
621 rtl_expr_chain = p->rtl_expr_chain;
622 last_parm_insn = p->last_parm_insn;
623 context_display = p->context_display;
624 trampoline_list = p->trampoline_list;
625 function_call_count = p->function_call_count;
626 temp_slots = p->temp_slots;
627 temp_slot_level = p->temp_slot_level;
628 target_temp_slot_level = p->target_temp_slot_level;
629 var_temp_slot_level = p->var_temp_slot_level;
630 current_function_epilogue_delay_list = p->epilogue_delay_list;
631 reg_renumber = 0;
632 current_function_args_info = p->args_info;
633 current_function_check_memory_usage = p->check_memory_usage;
634 current_function_instrument_entry_exit = p->instrument_entry_exit;
635
636 restore_tree_status (p, context);
637 restore_storage_status (p);
638 restore_expr_status (p);
639 restore_emit_status (p);
640 restore_stmt_status (p);
641 restore_varasm_status (p);
642
643 if (restore_machine_status)
644 (*restore_machine_status) (p);
645
646 /* Finish doing put_var_into_stack for any of our variables
647 which became addressable during the nested function. */
648 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
649 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
650
651 free (p);
652
653 /* Reset variables that have known state during rtx generation. */
654 rtx_equal_function_value_matters = 1;
655 virtuals_instantiated = 0;
656 }
657
658 void pop_function_context ()
659 {
660 pop_function_context_from (current_function_decl);
661 }
662 \f
663 /* Allocate fixed slots in the stack frame of the current function. */
664
665 /* Return size needed for stack frame based on slots so far allocated.
666 This size counts from zero. It is not rounded to STACK_BOUNDARY;
667 the caller may have to do that. */
668
669 HOST_WIDE_INT
670 get_frame_size ()
671 {
672 #ifdef FRAME_GROWS_DOWNWARD
673 return -frame_offset;
674 #else
675 return frame_offset;
676 #endif
677 }
678
679 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
680 with machine mode MODE.
681
682 ALIGN controls the amount of alignment for the address of the slot:
683 0 means according to MODE,
684 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
685 positive specifies alignment boundary in bits.
686
687 We do not round to stack_boundary here. */
688
689 rtx
690 assign_stack_local (mode, size, align)
691 enum machine_mode mode;
692 HOST_WIDE_INT size;
693 int align;
694 {
695 register rtx x, addr;
696 int bigend_correction = 0;
697 int alignment;
698
699 if (align == 0)
700 {
701 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
702 if (mode == BLKmode)
703 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
704 }
705 else if (align == -1)
706 {
707 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
708 size = CEIL_ROUND (size, alignment);
709 }
710 else
711 alignment = align / BITS_PER_UNIT;
712
713 /* Round frame offset to that alignment.
714 We must be careful here, since FRAME_OFFSET might be negative and
715 division with a negative dividend isn't as well defined as we might
716 like. So we instead assume that ALIGNMENT is a power of two and
717 use logical operations which are unambiguous. */
718 #ifdef FRAME_GROWS_DOWNWARD
719 frame_offset = FLOOR_ROUND (frame_offset, alignment);
720 #else
721 frame_offset = CEIL_ROUND (frame_offset, alignment);
722 #endif
723
724 /* On a big-endian machine, if we are allocating more space than we will use,
725 use the least significant bytes of those that are allocated. */
726 if (BYTES_BIG_ENDIAN && mode != BLKmode)
727 bigend_correction = size - GET_MODE_SIZE (mode);
728
729 #ifdef FRAME_GROWS_DOWNWARD
730 frame_offset -= size;
731 #endif
732
733 /* If we have already instantiated virtual registers, return the actual
734 address relative to the frame pointer. */
735 if (virtuals_instantiated)
736 addr = plus_constant (frame_pointer_rtx,
737 (frame_offset + bigend_correction
738 + STARTING_FRAME_OFFSET));
739 else
740 addr = plus_constant (virtual_stack_vars_rtx,
741 frame_offset + bigend_correction);
742
743 #ifndef FRAME_GROWS_DOWNWARD
744 frame_offset += size;
745 #endif
746
747 x = gen_rtx_MEM (mode, addr);
748
749 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
750
751 return x;
752 }
753
754 /* Assign a stack slot in a containing function.
755 First three arguments are same as in preceding function.
756 The last argument specifies the function to allocate in. */
757
758 static rtx
759 assign_outer_stack_local (mode, size, align, function)
760 enum machine_mode mode;
761 HOST_WIDE_INT size;
762 int align;
763 struct function *function;
764 {
765 register rtx x, addr;
766 int bigend_correction = 0;
767 int alignment;
768
769 /* Allocate in the memory associated with the function in whose frame
770 we are assigning. */
771 push_obstacks (function->function_obstack,
772 function->function_maybepermanent_obstack);
773
774 if (align == 0)
775 {
776 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
777 if (mode == BLKmode)
778 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
779 }
780 else if (align == -1)
781 {
782 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
783 size = CEIL_ROUND (size, alignment);
784 }
785 else
786 alignment = align / BITS_PER_UNIT;
787
788 /* Round frame offset to that alignment. */
789 #ifdef FRAME_GROWS_DOWNWARD
790 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
791 #else
792 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
793 #endif
794
795 /* On a big-endian machine, if we are allocating more space than we will use,
796 use the least significant bytes of those that are allocated. */
797 if (BYTES_BIG_ENDIAN && mode != BLKmode)
798 bigend_correction = size - GET_MODE_SIZE (mode);
799
800 #ifdef FRAME_GROWS_DOWNWARD
801 function->frame_offset -= size;
802 #endif
803 addr = plus_constant (virtual_stack_vars_rtx,
804 function->frame_offset + bigend_correction);
805 #ifndef FRAME_GROWS_DOWNWARD
806 function->frame_offset += size;
807 #endif
808
809 x = gen_rtx_MEM (mode, addr);
810
811 function->stack_slot_list
812 = gen_rtx_EXPR_LIST (VOIDmode, x, function->stack_slot_list);
813
814 pop_obstacks ();
815
816 return x;
817 }
818 \f
819 /* Allocate a temporary stack slot and record it for possible later
820 reuse.
821
822 MODE is the machine mode to be given to the returned rtx.
823
824 SIZE is the size in units of the space required. We do no rounding here
825 since assign_stack_local will do any required rounding.
826
827 KEEP is 1 if this slot is to be retained after a call to
828 free_temp_slots. Automatic variables for a block are allocated
829 with this flag. KEEP is 2 if we allocate a longer term temporary,
830 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
831 if we are to allocate something at an inner level to be treated as
832 a variable in the block (e.g., a SAVE_EXPR). */
833
834 rtx
835 assign_stack_temp (mode, size, keep)
836 enum machine_mode mode;
837 HOST_WIDE_INT size;
838 int keep;
839 {
840 struct temp_slot *p, *best_p = 0;
841
842 /* If SIZE is -1 it means that somebody tried to allocate a temporary
843 of a variable size. */
844 if (size == -1)
845 abort ();
846
847 /* First try to find an available, already-allocated temporary that is the
848 exact size we require. */
849 for (p = temp_slots; p; p = p->next)
850 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
851 break;
852
853 /* If we didn't find, one, try one that is larger than what we want. We
854 find the smallest such. */
855 if (p == 0)
856 for (p = temp_slots; p; p = p->next)
857 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
858 && (best_p == 0 || best_p->size > p->size))
859 best_p = p;
860
861 /* Make our best, if any, the one to use. */
862 if (best_p)
863 {
864 /* If there are enough aligned bytes left over, make them into a new
865 temp_slot so that the extra bytes don't get wasted. Do this only
866 for BLKmode slots, so that we can be sure of the alignment. */
867 if (GET_MODE (best_p->slot) == BLKmode)
868 {
869 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
870 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
871
872 if (best_p->size - rounded_size >= alignment)
873 {
874 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
875 p->in_use = p->addr_taken = 0;
876 p->size = best_p->size - rounded_size;
877 p->base_offset = best_p->base_offset + rounded_size;
878 p->full_size = best_p->full_size - rounded_size;
879 p->slot = gen_rtx_MEM (BLKmode,
880 plus_constant (XEXP (best_p->slot, 0),
881 rounded_size));
882 p->address = 0;
883 p->rtl_expr = 0;
884 p->next = temp_slots;
885 temp_slots = p;
886
887 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
888 stack_slot_list);
889
890 best_p->size = rounded_size;
891 best_p->full_size = rounded_size;
892 }
893 }
894
895 p = best_p;
896 }
897
898 /* If we still didn't find one, make a new temporary. */
899 if (p == 0)
900 {
901 HOST_WIDE_INT frame_offset_old = frame_offset;
902
903 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
904
905 /* If the temp slot mode doesn't indicate the alignment,
906 use the largest possible, so no one will be disappointed. */
907 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
908
909 /* The following slot size computation is necessary because we don't
910 know the actual size of the temporary slot until assign_stack_local
911 has performed all the frame alignment and size rounding for the
912 requested temporary. Note that extra space added for alignment
913 can be either above or below this stack slot depending on which
914 way the frame grows. We include the extra space if and only if it
915 is above this slot. */
916 #ifdef FRAME_GROWS_DOWNWARD
917 p->size = frame_offset_old - frame_offset;
918 #else
919 p->size = size;
920 #endif
921
922 /* Now define the fields used by combine_temp_slots. */
923 #ifdef FRAME_GROWS_DOWNWARD
924 p->base_offset = frame_offset;
925 p->full_size = frame_offset_old - frame_offset;
926 #else
927 p->base_offset = frame_offset_old;
928 p->full_size = frame_offset - frame_offset_old;
929 #endif
930 p->address = 0;
931 p->next = temp_slots;
932 temp_slots = p;
933 }
934
935 p->in_use = 1;
936 p->addr_taken = 0;
937 p->rtl_expr = sequence_rtl_expr;
938
939 if (keep == 2)
940 {
941 p->level = target_temp_slot_level;
942 p->keep = 0;
943 }
944 else if (keep == 3)
945 {
946 p->level = var_temp_slot_level;
947 p->keep = 0;
948 }
949 else
950 {
951 p->level = temp_slot_level;
952 p->keep = keep;
953 }
954
955 /* We may be reusing an old slot, so clear any MEM flags that may have been
956 set from before. */
957 RTX_UNCHANGING_P (p->slot) = 0;
958 MEM_IN_STRUCT_P (p->slot) = 0;
959 return p->slot;
960 }
961 \f
962 /* Assign a temporary of given TYPE.
963 KEEP is as for assign_stack_temp.
964 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
965 it is 0 if a register is OK.
966 DONT_PROMOTE is 1 if we should not promote values in register
967 to wider modes. */
968
969 rtx
970 assign_temp (type, keep, memory_required, dont_promote)
971 tree type;
972 int keep;
973 int memory_required;
974 int dont_promote;
975 {
976 enum machine_mode mode = TYPE_MODE (type);
977 int unsignedp = TREE_UNSIGNED (type);
978
979 if (mode == BLKmode || memory_required)
980 {
981 HOST_WIDE_INT size = int_size_in_bytes (type);
982 rtx tmp;
983
984 /* Unfortunately, we don't yet know how to allocate variable-sized
985 temporaries. However, sometimes we have a fixed upper limit on
986 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
987 instead. This is the case for Chill variable-sized strings. */
988 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
989 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
990 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
991 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
992
993 tmp = assign_stack_temp (mode, size, keep);
994 MEM_IN_STRUCT_P (tmp) = AGGREGATE_TYPE_P (type);
995 return tmp;
996 }
997
998 #ifndef PROMOTE_FOR_CALL_ONLY
999 if (! dont_promote)
1000 mode = promote_mode (type, mode, &unsignedp, 0);
1001 #endif
1002
1003 return gen_reg_rtx (mode);
1004 }
1005 \f
1006 /* Combine temporary stack slots which are adjacent on the stack.
1007
1008 This allows for better use of already allocated stack space. This is only
1009 done for BLKmode slots because we can be sure that we won't have alignment
1010 problems in this case. */
1011
1012 void
1013 combine_temp_slots ()
1014 {
1015 struct temp_slot *p, *q;
1016 struct temp_slot *prev_p, *prev_q;
1017 int num_slots;
1018
1019 /* If there are a lot of temp slots, don't do anything unless
1020 high levels of optimizaton. */
1021 if (! flag_expensive_optimizations)
1022 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1023 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1024 return;
1025
1026 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
1027 {
1028 int delete_p = 0;
1029
1030 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
1031 for (q = p->next, prev_q = p; q; q = prev_q->next)
1032 {
1033 int delete_q = 0;
1034 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
1035 {
1036 if (p->base_offset + p->full_size == q->base_offset)
1037 {
1038 /* Q comes after P; combine Q into P. */
1039 p->size += q->size;
1040 p->full_size += q->full_size;
1041 delete_q = 1;
1042 }
1043 else if (q->base_offset + q->full_size == p->base_offset)
1044 {
1045 /* P comes after Q; combine P into Q. */
1046 q->size += p->size;
1047 q->full_size += p->full_size;
1048 delete_p = 1;
1049 break;
1050 }
1051 }
1052 /* Either delete Q or advance past it. */
1053 if (delete_q)
1054 prev_q->next = q->next;
1055 else
1056 prev_q = q;
1057 }
1058 /* Either delete P or advance past it. */
1059 if (delete_p)
1060 {
1061 if (prev_p)
1062 prev_p->next = p->next;
1063 else
1064 temp_slots = p->next;
1065 }
1066 else
1067 prev_p = p;
1068 }
1069 }
1070 \f
1071 /* Find the temp slot corresponding to the object at address X. */
1072
1073 static struct temp_slot *
1074 find_temp_slot_from_address (x)
1075 rtx x;
1076 {
1077 struct temp_slot *p;
1078 rtx next;
1079
1080 for (p = temp_slots; p; p = p->next)
1081 {
1082 if (! p->in_use)
1083 continue;
1084
1085 else if (XEXP (p->slot, 0) == x
1086 || p->address == x
1087 || (GET_CODE (x) == PLUS
1088 && XEXP (x, 0) == virtual_stack_vars_rtx
1089 && GET_CODE (XEXP (x, 1)) == CONST_INT
1090 && INTVAL (XEXP (x, 1)) >= p->base_offset
1091 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1092 return p;
1093
1094 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1095 for (next = p->address; next; next = XEXP (next, 1))
1096 if (XEXP (next, 0) == x)
1097 return p;
1098 }
1099
1100 return 0;
1101 }
1102
1103 /* Indicate that NEW is an alternate way of referring to the temp slot
1104 that previously was known by OLD. */
1105
1106 void
1107 update_temp_slot_address (old, new)
1108 rtx old, new;
1109 {
1110 struct temp_slot *p = find_temp_slot_from_address (old);
1111
1112 /* If none, return. Else add NEW as an alias. */
1113 if (p == 0)
1114 return;
1115 else if (p->address == 0)
1116 p->address = new;
1117 else
1118 {
1119 if (GET_CODE (p->address) != EXPR_LIST)
1120 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1121
1122 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1123 }
1124 }
1125
1126 /* If X could be a reference to a temporary slot, mark the fact that its
1127 address was taken. */
1128
1129 void
1130 mark_temp_addr_taken (x)
1131 rtx x;
1132 {
1133 struct temp_slot *p;
1134
1135 if (x == 0)
1136 return;
1137
1138 /* If X is not in memory or is at a constant address, it cannot be in
1139 a temporary slot. */
1140 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1141 return;
1142
1143 p = find_temp_slot_from_address (XEXP (x, 0));
1144 if (p != 0)
1145 p->addr_taken = 1;
1146 }
1147
1148 /* If X could be a reference to a temporary slot, mark that slot as
1149 belonging to the to one level higher than the current level. If X
1150 matched one of our slots, just mark that one. Otherwise, we can't
1151 easily predict which it is, so upgrade all of them. Kept slots
1152 need not be touched.
1153
1154 This is called when an ({...}) construct occurs and a statement
1155 returns a value in memory. */
1156
1157 void
1158 preserve_temp_slots (x)
1159 rtx x;
1160 {
1161 struct temp_slot *p = 0;
1162
1163 /* If there is no result, we still might have some objects whose address
1164 were taken, so we need to make sure they stay around. */
1165 if (x == 0)
1166 {
1167 for (p = temp_slots; p; p = p->next)
1168 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1169 p->level--;
1170
1171 return;
1172 }
1173
1174 /* If X is a register that is being used as a pointer, see if we have
1175 a temporary slot we know it points to. To be consistent with
1176 the code below, we really should preserve all non-kept slots
1177 if we can't find a match, but that seems to be much too costly. */
1178 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1179 p = find_temp_slot_from_address (x);
1180
1181 /* If X is not in memory or is at a constant address, it cannot be in
1182 a temporary slot, but it can contain something whose address was
1183 taken. */
1184 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1185 {
1186 for (p = temp_slots; p; p = p->next)
1187 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1188 p->level--;
1189
1190 return;
1191 }
1192
1193 /* First see if we can find a match. */
1194 if (p == 0)
1195 p = find_temp_slot_from_address (XEXP (x, 0));
1196
1197 if (p != 0)
1198 {
1199 /* Move everything at our level whose address was taken to our new
1200 level in case we used its address. */
1201 struct temp_slot *q;
1202
1203 if (p->level == temp_slot_level)
1204 {
1205 for (q = temp_slots; q; q = q->next)
1206 if (q != p && q->addr_taken && q->level == p->level)
1207 q->level--;
1208
1209 p->level--;
1210 p->addr_taken = 0;
1211 }
1212 return;
1213 }
1214
1215 /* Otherwise, preserve all non-kept slots at this level. */
1216 for (p = temp_slots; p; p = p->next)
1217 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1218 p->level--;
1219 }
1220
1221 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1222 with that RTL_EXPR, promote it into a temporary slot at the present
1223 level so it will not be freed when we free slots made in the
1224 RTL_EXPR. */
1225
1226 void
1227 preserve_rtl_expr_result (x)
1228 rtx x;
1229 {
1230 struct temp_slot *p;
1231
1232 /* If X is not in memory or is at a constant address, it cannot be in
1233 a temporary slot. */
1234 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1235 return;
1236
1237 /* If we can find a match, move it to our level unless it is already at
1238 an upper level. */
1239 p = find_temp_slot_from_address (XEXP (x, 0));
1240 if (p != 0)
1241 {
1242 p->level = MIN (p->level, temp_slot_level);
1243 p->rtl_expr = 0;
1244 }
1245
1246 return;
1247 }
1248
1249 /* Free all temporaries used so far. This is normally called at the end
1250 of generating code for a statement. Don't free any temporaries
1251 currently in use for an RTL_EXPR that hasn't yet been emitted.
1252 We could eventually do better than this since it can be reused while
1253 generating the same RTL_EXPR, but this is complex and probably not
1254 worthwhile. */
1255
1256 void
1257 free_temp_slots ()
1258 {
1259 struct temp_slot *p;
1260
1261 for (p = temp_slots; p; p = p->next)
1262 if (p->in_use && p->level == temp_slot_level && ! p->keep
1263 && p->rtl_expr == 0)
1264 p->in_use = 0;
1265
1266 combine_temp_slots ();
1267 }
1268
1269 /* Free all temporary slots used in T, an RTL_EXPR node. */
1270
1271 void
1272 free_temps_for_rtl_expr (t)
1273 tree t;
1274 {
1275 struct temp_slot *p;
1276
1277 for (p = temp_slots; p; p = p->next)
1278 if (p->rtl_expr == t)
1279 p->in_use = 0;
1280
1281 combine_temp_slots ();
1282 }
1283
1284 /* Mark all temporaries ever allocated in this function as not suitable
1285 for reuse until the current level is exited. */
1286
1287 void
1288 mark_all_temps_used ()
1289 {
1290 struct temp_slot *p;
1291
1292 for (p = temp_slots; p; p = p->next)
1293 {
1294 p->in_use = p->keep = 1;
1295 p->level = MIN (p->level, temp_slot_level);
1296 }
1297 }
1298
1299 /* Push deeper into the nesting level for stack temporaries. */
1300
1301 void
1302 push_temp_slots ()
1303 {
1304 temp_slot_level++;
1305 }
1306
1307 /* Likewise, but save the new level as the place to allocate variables
1308 for blocks. */
1309
1310 void
1311 push_temp_slots_for_block ()
1312 {
1313 push_temp_slots ();
1314
1315 var_temp_slot_level = temp_slot_level;
1316 }
1317
1318 /* Likewise, but save the new level as the place to allocate temporaries
1319 for TARGET_EXPRs. */
1320
1321 void
1322 push_temp_slots_for_target ()
1323 {
1324 push_temp_slots ();
1325
1326 target_temp_slot_level = temp_slot_level;
1327 }
1328
1329 /* Set and get the value of target_temp_slot_level. The only
1330 permitted use of these functions is to save and restore this value. */
1331
1332 int
1333 get_target_temp_slot_level ()
1334 {
1335 return target_temp_slot_level;
1336 }
1337
1338 void
1339 set_target_temp_slot_level (level)
1340 int level;
1341 {
1342 target_temp_slot_level = level;
1343 }
1344
1345 /* Pop a temporary nesting level. All slots in use in the current level
1346 are freed. */
1347
1348 void
1349 pop_temp_slots ()
1350 {
1351 struct temp_slot *p;
1352
1353 for (p = temp_slots; p; p = p->next)
1354 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1355 p->in_use = 0;
1356
1357 combine_temp_slots ();
1358
1359 temp_slot_level--;
1360 }
1361
1362 /* Initialize temporary slots. */
1363
1364 void
1365 init_temp_slots ()
1366 {
1367 /* We have not allocated any temporaries yet. */
1368 temp_slots = 0;
1369 temp_slot_level = 0;
1370 var_temp_slot_level = 0;
1371 target_temp_slot_level = 0;
1372 }
1373 \f
1374 /* Retroactively move an auto variable from a register to a stack slot.
1375 This is done when an address-reference to the variable is seen. */
1376
1377 void
1378 put_var_into_stack (decl)
1379 tree decl;
1380 {
1381 register rtx reg;
1382 enum machine_mode promoted_mode, decl_mode;
1383 struct function *function = 0;
1384 tree context;
1385 int can_use_addressof;
1386
1387 context = decl_function_context (decl);
1388
1389 /* Get the current rtl used for this object and its original mode. */
1390 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1391
1392 /* No need to do anything if decl has no rtx yet
1393 since in that case caller is setting TREE_ADDRESSABLE
1394 and a stack slot will be assigned when the rtl is made. */
1395 if (reg == 0)
1396 return;
1397
1398 /* Get the declared mode for this object. */
1399 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1400 : DECL_MODE (decl));
1401 /* Get the mode it's actually stored in. */
1402 promoted_mode = GET_MODE (reg);
1403
1404 /* If this variable comes from an outer function,
1405 find that function's saved context. */
1406 if (context != current_function_decl && context != inline_function_decl)
1407 for (function = outer_function_chain; function; function = function->next)
1408 if (function->decl == context)
1409 break;
1410
1411 /* If this is a variable-size object with a pseudo to address it,
1412 put that pseudo into the stack, if the var is nonlocal. */
1413 if (DECL_NONLOCAL (decl)
1414 && GET_CODE (reg) == MEM
1415 && GET_CODE (XEXP (reg, 0)) == REG
1416 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1417 {
1418 reg = XEXP (reg, 0);
1419 decl_mode = promoted_mode = GET_MODE (reg);
1420 }
1421
1422 can_use_addressof
1423 = (function == 0
1424 && optimize > 0
1425 /* FIXME make it work for promoted modes too */
1426 && decl_mode == promoted_mode
1427 #ifdef NON_SAVING_SETJMP
1428 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1429 #endif
1430 );
1431
1432 /* If we can't use ADDRESSOF, make sure we see through one we already
1433 generated. */
1434 if (! can_use_addressof && GET_CODE (reg) == MEM
1435 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1436 reg = XEXP (XEXP (reg, 0), 0);
1437
1438 /* Now we should have a value that resides in one or more pseudo regs. */
1439
1440 if (GET_CODE (reg) == REG)
1441 {
1442 /* If this variable lives in the current function and we don't need
1443 to put things in the stack for the sake of setjmp, try to keep it
1444 in a register until we know we actually need the address. */
1445 if (can_use_addressof)
1446 gen_mem_addressof (reg, decl);
1447 else
1448 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1449 promoted_mode, decl_mode,
1450 TREE_SIDE_EFFECTS (decl), 0,
1451 TREE_USED (decl)
1452 || DECL_INITIAL (decl) != 0);
1453 }
1454 else if (GET_CODE (reg) == CONCAT)
1455 {
1456 /* A CONCAT contains two pseudos; put them both in the stack.
1457 We do it so they end up consecutive. */
1458 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1459 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1460 #ifdef FRAME_GROWS_DOWNWARD
1461 /* Since part 0 should have a lower address, do it second. */
1462 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1463 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1464 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1465 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1466 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1467 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1468 #else
1469 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1470 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1471 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1472 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1473 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1474 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1475 #endif
1476
1477 /* Change the CONCAT into a combined MEM for both parts. */
1478 PUT_CODE (reg, MEM);
1479 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1480 MEM_ALIAS_SET (reg) = get_alias_set (decl);
1481
1482 /* The two parts are in memory order already.
1483 Use the lower parts address as ours. */
1484 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1485 /* Prevent sharing of rtl that might lose. */
1486 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1487 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1488 }
1489 else
1490 return;
1491
1492 if (current_function_check_memory_usage)
1493 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1494 XEXP (reg, 0), ptr_mode,
1495 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1496 TYPE_MODE (sizetype),
1497 GEN_INT (MEMORY_USE_RW),
1498 TYPE_MODE (integer_type_node));
1499 }
1500
1501 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1502 into the stack frame of FUNCTION (0 means the current function).
1503 DECL_MODE is the machine mode of the user-level data type.
1504 PROMOTED_MODE is the machine mode of the register.
1505 VOLATILE_P is nonzero if this is for a "volatile" decl.
1506 USED_P is nonzero if this reg might have already been used in an insn. */
1507
1508 static void
1509 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1510 original_regno, used_p)
1511 struct function *function;
1512 rtx reg;
1513 tree type;
1514 enum machine_mode promoted_mode, decl_mode;
1515 int volatile_p;
1516 int original_regno;
1517 int used_p;
1518 {
1519 rtx new = 0;
1520 int regno = original_regno;
1521
1522 if (regno == 0)
1523 regno = REGNO (reg);
1524
1525 if (function)
1526 {
1527 if (regno < function->max_parm_reg)
1528 new = function->parm_reg_stack_loc[regno];
1529 if (new == 0)
1530 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1531 0, function);
1532 }
1533 else
1534 {
1535 if (regno < max_parm_reg)
1536 new = parm_reg_stack_loc[regno];
1537 if (new == 0)
1538 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1539 }
1540
1541 PUT_MODE (reg, decl_mode);
1542 XEXP (reg, 0) = XEXP (new, 0);
1543 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1544 MEM_VOLATILE_P (reg) = volatile_p;
1545 PUT_CODE (reg, MEM);
1546
1547 /* If this is a memory ref that contains aggregate components,
1548 mark it as such for cse and loop optimize. If we are reusing a
1549 previously generated stack slot, then we need to copy the bit in
1550 case it was set for other reasons. For instance, it is set for
1551 __builtin_va_alist. */
1552 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type) | MEM_IN_STRUCT_P (new);
1553 MEM_ALIAS_SET (reg) = get_alias_set (type);
1554
1555 /* Now make sure that all refs to the variable, previously made
1556 when it was a register, are fixed up to be valid again. */
1557
1558 if (used_p && function != 0)
1559 {
1560 struct var_refs_queue *temp;
1561
1562 /* Variable is inherited; fix it up when we get back to its function. */
1563 push_obstacks (function->function_obstack,
1564 function->function_maybepermanent_obstack);
1565
1566 /* See comment in restore_tree_status in tree.c for why this needs to be
1567 on saveable obstack. */
1568 temp
1569 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1570 temp->modified = reg;
1571 temp->promoted_mode = promoted_mode;
1572 temp->unsignedp = TREE_UNSIGNED (type);
1573 temp->next = function->fixup_var_refs_queue;
1574 function->fixup_var_refs_queue = temp;
1575 pop_obstacks ();
1576 }
1577 else if (used_p)
1578 /* Variable is local; fix it up now. */
1579 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
1580 }
1581 \f
1582 static void
1583 fixup_var_refs (var, promoted_mode, unsignedp)
1584 rtx var;
1585 enum machine_mode promoted_mode;
1586 int unsignedp;
1587 {
1588 tree pending;
1589 rtx first_insn = get_insns ();
1590 struct sequence_stack *stack = sequence_stack;
1591 tree rtl_exps = rtl_expr_chain;
1592
1593 /* Must scan all insns for stack-refs that exceed the limit. */
1594 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
1595
1596 /* Scan all pending sequences too. */
1597 for (; stack; stack = stack->next)
1598 {
1599 push_to_sequence (stack->first);
1600 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1601 stack->first, stack->next != 0);
1602 /* Update remembered end of sequence
1603 in case we added an insn at the end. */
1604 stack->last = get_last_insn ();
1605 end_sequence ();
1606 }
1607
1608 /* Scan all waiting RTL_EXPRs too. */
1609 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1610 {
1611 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1612 if (seq != const0_rtx && seq != 0)
1613 {
1614 push_to_sequence (seq);
1615 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
1616 end_sequence ();
1617 }
1618 }
1619 }
1620 \f
1621 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1622 some part of an insn. Return a struct fixup_replacement whose OLD
1623 value is equal to X. Allocate a new structure if no such entry exists. */
1624
1625 static struct fixup_replacement *
1626 find_fixup_replacement (replacements, x)
1627 struct fixup_replacement **replacements;
1628 rtx x;
1629 {
1630 struct fixup_replacement *p;
1631
1632 /* See if we have already replaced this. */
1633 for (p = *replacements; p && p->old != x; p = p->next)
1634 ;
1635
1636 if (p == 0)
1637 {
1638 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1639 p->old = x;
1640 p->new = 0;
1641 p->next = *replacements;
1642 *replacements = p;
1643 }
1644
1645 return p;
1646 }
1647
1648 /* Scan the insn-chain starting with INSN for refs to VAR
1649 and fix them up. TOPLEVEL is nonzero if this chain is the
1650 main chain of insns for the current function. */
1651
1652 static void
1653 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
1654 rtx var;
1655 enum machine_mode promoted_mode;
1656 int unsignedp;
1657 rtx insn;
1658 int toplevel;
1659 {
1660 rtx call_dest = 0;
1661
1662 while (insn)
1663 {
1664 rtx next = NEXT_INSN (insn);
1665 rtx set, prev, prev_set;
1666 rtx note;
1667
1668 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1669 {
1670 /* If this is a CLOBBER of VAR, delete it.
1671
1672 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1673 and REG_RETVAL notes too. */
1674 if (GET_CODE (PATTERN (insn)) == CLOBBER
1675 && (XEXP (PATTERN (insn), 0) == var
1676 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1677 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1678 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1679 {
1680 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1681 /* The REG_LIBCALL note will go away since we are going to
1682 turn INSN into a NOTE, so just delete the
1683 corresponding REG_RETVAL note. */
1684 remove_note (XEXP (note, 0),
1685 find_reg_note (XEXP (note, 0), REG_RETVAL,
1686 NULL_RTX));
1687
1688 /* In unoptimized compilation, we shouldn't call delete_insn
1689 except in jump.c doing warnings. */
1690 PUT_CODE (insn, NOTE);
1691 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1692 NOTE_SOURCE_FILE (insn) = 0;
1693 }
1694
1695 /* The insn to load VAR from a home in the arglist
1696 is now a no-op. When we see it, just delete it.
1697 Similarly if this is storing VAR from a register from which
1698 it was loaded in the previous insn. This will occur
1699 when an ADDRESSOF was made for an arglist slot. */
1700 else if (toplevel
1701 && (set = single_set (insn)) != 0
1702 && SET_DEST (set) == var
1703 /* If this represents the result of an insn group,
1704 don't delete the insn. */
1705 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1706 && (rtx_equal_p (SET_SRC (set), var)
1707 || (GET_CODE (SET_SRC (set)) == REG
1708 && (prev = prev_nonnote_insn (insn)) != 0
1709 && (prev_set = single_set (prev)) != 0
1710 && SET_DEST (prev_set) == SET_SRC (set)
1711 && rtx_equal_p (SET_SRC (prev_set), var))))
1712 {
1713 /* In unoptimized compilation, we shouldn't call delete_insn
1714 except in jump.c doing warnings. */
1715 PUT_CODE (insn, NOTE);
1716 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1717 NOTE_SOURCE_FILE (insn) = 0;
1718 if (insn == last_parm_insn)
1719 last_parm_insn = PREV_INSN (next);
1720 }
1721 else
1722 {
1723 struct fixup_replacement *replacements = 0;
1724 rtx next_insn = NEXT_INSN (insn);
1725
1726 if (SMALL_REGISTER_CLASSES)
1727 {
1728 /* If the insn that copies the results of a CALL_INSN
1729 into a pseudo now references VAR, we have to use an
1730 intermediate pseudo since we want the life of the
1731 return value register to be only a single insn.
1732
1733 If we don't use an intermediate pseudo, such things as
1734 address computations to make the address of VAR valid
1735 if it is not can be placed between the CALL_INSN and INSN.
1736
1737 To make sure this doesn't happen, we record the destination
1738 of the CALL_INSN and see if the next insn uses both that
1739 and VAR. */
1740
1741 if (call_dest != 0 && GET_CODE (insn) == INSN
1742 && reg_mentioned_p (var, PATTERN (insn))
1743 && reg_mentioned_p (call_dest, PATTERN (insn)))
1744 {
1745 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1746
1747 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1748
1749 PATTERN (insn) = replace_rtx (PATTERN (insn),
1750 call_dest, temp);
1751 }
1752
1753 if (GET_CODE (insn) == CALL_INSN
1754 && GET_CODE (PATTERN (insn)) == SET)
1755 call_dest = SET_DEST (PATTERN (insn));
1756 else if (GET_CODE (insn) == CALL_INSN
1757 && GET_CODE (PATTERN (insn)) == PARALLEL
1758 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1759 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1760 else
1761 call_dest = 0;
1762 }
1763
1764 /* See if we have to do anything to INSN now that VAR is in
1765 memory. If it needs to be loaded into a pseudo, use a single
1766 pseudo for the entire insn in case there is a MATCH_DUP
1767 between two operands. We pass a pointer to the head of
1768 a list of struct fixup_replacements. If fixup_var_refs_1
1769 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1770 it will record them in this list.
1771
1772 If it allocated a pseudo for any replacement, we copy into
1773 it here. */
1774
1775 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1776 &replacements);
1777
1778 /* If this is last_parm_insn, and any instructions were output
1779 after it to fix it up, then we must set last_parm_insn to
1780 the last such instruction emitted. */
1781 if (insn == last_parm_insn)
1782 last_parm_insn = PREV_INSN (next_insn);
1783
1784 while (replacements)
1785 {
1786 if (GET_CODE (replacements->new) == REG)
1787 {
1788 rtx insert_before;
1789 rtx seq;
1790
1791 /* OLD might be a (subreg (mem)). */
1792 if (GET_CODE (replacements->old) == SUBREG)
1793 replacements->old
1794 = fixup_memory_subreg (replacements->old, insn, 0);
1795 else
1796 replacements->old
1797 = fixup_stack_1 (replacements->old, insn);
1798
1799 insert_before = insn;
1800
1801 /* If we are changing the mode, do a conversion.
1802 This might be wasteful, but combine.c will
1803 eliminate much of the waste. */
1804
1805 if (GET_MODE (replacements->new)
1806 != GET_MODE (replacements->old))
1807 {
1808 start_sequence ();
1809 convert_move (replacements->new,
1810 replacements->old, unsignedp);
1811 seq = gen_sequence ();
1812 end_sequence ();
1813 }
1814 else
1815 seq = gen_move_insn (replacements->new,
1816 replacements->old);
1817
1818 emit_insn_before (seq, insert_before);
1819 }
1820
1821 replacements = replacements->next;
1822 }
1823 }
1824
1825 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1826 But don't touch other insns referred to by reg-notes;
1827 we will get them elsewhere. */
1828 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1829 if (GET_CODE (note) != INSN_LIST)
1830 XEXP (note, 0)
1831 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1832 }
1833 insn = next;
1834 }
1835 }
1836 \f
1837 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1838 See if the rtx expression at *LOC in INSN needs to be changed.
1839
1840 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1841 contain a list of original rtx's and replacements. If we find that we need
1842 to modify this insn by replacing a memory reference with a pseudo or by
1843 making a new MEM to implement a SUBREG, we consult that list to see if
1844 we have already chosen a replacement. If none has already been allocated,
1845 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1846 or the SUBREG, as appropriate, to the pseudo. */
1847
1848 static void
1849 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1850 register rtx var;
1851 enum machine_mode promoted_mode;
1852 register rtx *loc;
1853 rtx insn;
1854 struct fixup_replacement **replacements;
1855 {
1856 register int i;
1857 register rtx x = *loc;
1858 RTX_CODE code = GET_CODE (x);
1859 register char *fmt;
1860 register rtx tem, tem1;
1861 struct fixup_replacement *replacement;
1862
1863 switch (code)
1864 {
1865 case ADDRESSOF:
1866 if (XEXP (x, 0) == var)
1867 {
1868 /* Prevent sharing of rtl that might lose. */
1869 rtx sub = copy_rtx (XEXP (var, 0));
1870
1871 start_sequence ();
1872
1873 if (! validate_change (insn, loc, sub, 0))
1874 {
1875 rtx y = force_operand (sub, NULL_RTX);
1876
1877 if (! validate_change (insn, loc, y, 0))
1878 *loc = copy_to_reg (y);
1879 }
1880
1881 emit_insn_before (gen_sequence (), insn);
1882 end_sequence ();
1883 }
1884 return;
1885
1886 case MEM:
1887 if (var == x)
1888 {
1889 /* If we already have a replacement, use it. Otherwise,
1890 try to fix up this address in case it is invalid. */
1891
1892 replacement = find_fixup_replacement (replacements, var);
1893 if (replacement->new)
1894 {
1895 *loc = replacement->new;
1896 return;
1897 }
1898
1899 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1900
1901 /* Unless we are forcing memory to register or we changed the mode,
1902 we can leave things the way they are if the insn is valid. */
1903
1904 INSN_CODE (insn) = -1;
1905 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1906 && recog_memoized (insn) >= 0)
1907 return;
1908
1909 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1910 return;
1911 }
1912
1913 /* If X contains VAR, we need to unshare it here so that we update
1914 each occurrence separately. But all identical MEMs in one insn
1915 must be replaced with the same rtx because of the possibility of
1916 MATCH_DUPs. */
1917
1918 if (reg_mentioned_p (var, x))
1919 {
1920 replacement = find_fixup_replacement (replacements, x);
1921 if (replacement->new == 0)
1922 replacement->new = copy_most_rtx (x, var);
1923
1924 *loc = x = replacement->new;
1925 }
1926 break;
1927
1928 case REG:
1929 case CC0:
1930 case PC:
1931 case CONST_INT:
1932 case CONST:
1933 case SYMBOL_REF:
1934 case LABEL_REF:
1935 case CONST_DOUBLE:
1936 return;
1937
1938 case SIGN_EXTRACT:
1939 case ZERO_EXTRACT:
1940 /* Note that in some cases those types of expressions are altered
1941 by optimize_bit_field, and do not survive to get here. */
1942 if (XEXP (x, 0) == var
1943 || (GET_CODE (XEXP (x, 0)) == SUBREG
1944 && SUBREG_REG (XEXP (x, 0)) == var))
1945 {
1946 /* Get TEM as a valid MEM in the mode presently in the insn.
1947
1948 We don't worry about the possibility of MATCH_DUP here; it
1949 is highly unlikely and would be tricky to handle. */
1950
1951 tem = XEXP (x, 0);
1952 if (GET_CODE (tem) == SUBREG)
1953 {
1954 if (GET_MODE_BITSIZE (GET_MODE (tem))
1955 > GET_MODE_BITSIZE (GET_MODE (var)))
1956 {
1957 replacement = find_fixup_replacement (replacements, var);
1958 if (replacement->new == 0)
1959 replacement->new = gen_reg_rtx (GET_MODE (var));
1960 SUBREG_REG (tem) = replacement->new;
1961 }
1962 else
1963 tem = fixup_memory_subreg (tem, insn, 0);
1964 }
1965 else
1966 tem = fixup_stack_1 (tem, insn);
1967
1968 /* Unless we want to load from memory, get TEM into the proper mode
1969 for an extract from memory. This can only be done if the
1970 extract is at a constant position and length. */
1971
1972 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1973 && GET_CODE (XEXP (x, 2)) == CONST_INT
1974 && ! mode_dependent_address_p (XEXP (tem, 0))
1975 && ! MEM_VOLATILE_P (tem))
1976 {
1977 enum machine_mode wanted_mode = VOIDmode;
1978 enum machine_mode is_mode = GET_MODE (tem);
1979 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
1980
1981 #ifdef HAVE_extzv
1982 if (GET_CODE (x) == ZERO_EXTRACT)
1983 {
1984 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1985 if (wanted_mode == VOIDmode)
1986 wanted_mode = word_mode;
1987 }
1988 #endif
1989 #ifdef HAVE_extv
1990 if (GET_CODE (x) == SIGN_EXTRACT)
1991 {
1992 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1993 if (wanted_mode == VOIDmode)
1994 wanted_mode = word_mode;
1995 }
1996 #endif
1997 /* If we have a narrower mode, we can do something. */
1998 if (wanted_mode != VOIDmode
1999 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2000 {
2001 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2002 rtx old_pos = XEXP (x, 2);
2003 rtx newmem;
2004
2005 /* If the bytes and bits are counted differently, we
2006 must adjust the offset. */
2007 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2008 offset = (GET_MODE_SIZE (is_mode)
2009 - GET_MODE_SIZE (wanted_mode) - offset);
2010
2011 pos %= GET_MODE_BITSIZE (wanted_mode);
2012
2013 newmem = gen_rtx_MEM (wanted_mode,
2014 plus_constant (XEXP (tem, 0), offset));
2015 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2016 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
2017 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
2018
2019 /* Make the change and see if the insn remains valid. */
2020 INSN_CODE (insn) = -1;
2021 XEXP (x, 0) = newmem;
2022 XEXP (x, 2) = GEN_INT (pos);
2023
2024 if (recog_memoized (insn) >= 0)
2025 return;
2026
2027 /* Otherwise, restore old position. XEXP (x, 0) will be
2028 restored later. */
2029 XEXP (x, 2) = old_pos;
2030 }
2031 }
2032
2033 /* If we get here, the bitfield extract insn can't accept a memory
2034 reference. Copy the input into a register. */
2035
2036 tem1 = gen_reg_rtx (GET_MODE (tem));
2037 emit_insn_before (gen_move_insn (tem1, tem), insn);
2038 XEXP (x, 0) = tem1;
2039 return;
2040 }
2041 break;
2042
2043 case SUBREG:
2044 if (SUBREG_REG (x) == var)
2045 {
2046 /* If this is a special SUBREG made because VAR was promoted
2047 from a wider mode, replace it with VAR and call ourself
2048 recursively, this time saying that the object previously
2049 had its current mode (by virtue of the SUBREG). */
2050
2051 if (SUBREG_PROMOTED_VAR_P (x))
2052 {
2053 *loc = var;
2054 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2055 return;
2056 }
2057
2058 /* If this SUBREG makes VAR wider, it has become a paradoxical
2059 SUBREG with VAR in memory, but these aren't allowed at this
2060 stage of the compilation. So load VAR into a pseudo and take
2061 a SUBREG of that pseudo. */
2062 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2063 {
2064 replacement = find_fixup_replacement (replacements, var);
2065 if (replacement->new == 0)
2066 replacement->new = gen_reg_rtx (GET_MODE (var));
2067 SUBREG_REG (x) = replacement->new;
2068 return;
2069 }
2070
2071 /* See if we have already found a replacement for this SUBREG.
2072 If so, use it. Otherwise, make a MEM and see if the insn
2073 is recognized. If not, or if we should force MEM into a register,
2074 make a pseudo for this SUBREG. */
2075 replacement = find_fixup_replacement (replacements, x);
2076 if (replacement->new)
2077 {
2078 *loc = replacement->new;
2079 return;
2080 }
2081
2082 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2083
2084 INSN_CODE (insn) = -1;
2085 if (! flag_force_mem && recog_memoized (insn) >= 0)
2086 return;
2087
2088 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2089 return;
2090 }
2091 break;
2092
2093 case SET:
2094 /* First do special simplification of bit-field references. */
2095 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2096 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2097 optimize_bit_field (x, insn, 0);
2098 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2099 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2100 optimize_bit_field (x, insn, NULL_PTR);
2101
2102 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2103 into a register and then store it back out. */
2104 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2105 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2106 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2107 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2108 > GET_MODE_SIZE (GET_MODE (var))))
2109 {
2110 replacement = find_fixup_replacement (replacements, var);
2111 if (replacement->new == 0)
2112 replacement->new = gen_reg_rtx (GET_MODE (var));
2113
2114 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2115 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2116 }
2117
2118 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2119 insn into a pseudo and store the low part of the pseudo into VAR. */
2120 if (GET_CODE (SET_DEST (x)) == SUBREG
2121 && SUBREG_REG (SET_DEST (x)) == var
2122 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2123 > GET_MODE_SIZE (GET_MODE (var))))
2124 {
2125 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2126 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2127 tem)),
2128 insn);
2129 break;
2130 }
2131
2132 {
2133 rtx dest = SET_DEST (x);
2134 rtx src = SET_SRC (x);
2135 #ifdef HAVE_insv
2136 rtx outerdest = dest;
2137 #endif
2138
2139 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2140 || GET_CODE (dest) == SIGN_EXTRACT
2141 || GET_CODE (dest) == ZERO_EXTRACT)
2142 dest = XEXP (dest, 0);
2143
2144 if (GET_CODE (src) == SUBREG)
2145 src = XEXP (src, 0);
2146
2147 /* If VAR does not appear at the top level of the SET
2148 just scan the lower levels of the tree. */
2149
2150 if (src != var && dest != var)
2151 break;
2152
2153 /* We will need to rerecognize this insn. */
2154 INSN_CODE (insn) = -1;
2155
2156 #ifdef HAVE_insv
2157 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2158 {
2159 /* Since this case will return, ensure we fixup all the
2160 operands here. */
2161 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2162 insn, replacements);
2163 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2164 insn, replacements);
2165 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2166 insn, replacements);
2167
2168 tem = XEXP (outerdest, 0);
2169
2170 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2171 that may appear inside a ZERO_EXTRACT.
2172 This was legitimate when the MEM was a REG. */
2173 if (GET_CODE (tem) == SUBREG
2174 && SUBREG_REG (tem) == var)
2175 tem = fixup_memory_subreg (tem, insn, 0);
2176 else
2177 tem = fixup_stack_1 (tem, insn);
2178
2179 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2180 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2181 && ! mode_dependent_address_p (XEXP (tem, 0))
2182 && ! MEM_VOLATILE_P (tem))
2183 {
2184 enum machine_mode wanted_mode;
2185 enum machine_mode is_mode = GET_MODE (tem);
2186 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2187
2188 wanted_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
2189 if (wanted_mode == VOIDmode)
2190 wanted_mode = word_mode;
2191
2192 /* If we have a narrower mode, we can do something. */
2193 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2194 {
2195 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2196 rtx old_pos = XEXP (outerdest, 2);
2197 rtx newmem;
2198
2199 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2200 offset = (GET_MODE_SIZE (is_mode)
2201 - GET_MODE_SIZE (wanted_mode) - offset);
2202
2203 pos %= GET_MODE_BITSIZE (wanted_mode);
2204
2205 newmem = gen_rtx_MEM (wanted_mode,
2206 plus_constant (XEXP (tem, 0), offset));
2207 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2208 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
2209 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
2210
2211 /* Make the change and see if the insn remains valid. */
2212 INSN_CODE (insn) = -1;
2213 XEXP (outerdest, 0) = newmem;
2214 XEXP (outerdest, 2) = GEN_INT (pos);
2215
2216 if (recog_memoized (insn) >= 0)
2217 return;
2218
2219 /* Otherwise, restore old position. XEXP (x, 0) will be
2220 restored later. */
2221 XEXP (outerdest, 2) = old_pos;
2222 }
2223 }
2224
2225 /* If we get here, the bit-field store doesn't allow memory
2226 or isn't located at a constant position. Load the value into
2227 a register, do the store, and put it back into memory. */
2228
2229 tem1 = gen_reg_rtx (GET_MODE (tem));
2230 emit_insn_before (gen_move_insn (tem1, tem), insn);
2231 emit_insn_after (gen_move_insn (tem, tem1), insn);
2232 XEXP (outerdest, 0) = tem1;
2233 return;
2234 }
2235 #endif
2236
2237 /* STRICT_LOW_PART is a no-op on memory references
2238 and it can cause combinations to be unrecognizable,
2239 so eliminate it. */
2240
2241 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2242 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2243
2244 /* A valid insn to copy VAR into or out of a register
2245 must be left alone, to avoid an infinite loop here.
2246 If the reference to VAR is by a subreg, fix that up,
2247 since SUBREG is not valid for a memref.
2248 Also fix up the address of the stack slot.
2249
2250 Note that we must not try to recognize the insn until
2251 after we know that we have valid addresses and no
2252 (subreg (mem ...) ...) constructs, since these interfere
2253 with determining the validity of the insn. */
2254
2255 if ((SET_SRC (x) == var
2256 || (GET_CODE (SET_SRC (x)) == SUBREG
2257 && SUBREG_REG (SET_SRC (x)) == var))
2258 && (GET_CODE (SET_DEST (x)) == REG
2259 || (GET_CODE (SET_DEST (x)) == SUBREG
2260 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2261 && GET_MODE (var) == promoted_mode
2262 && x == single_set (insn))
2263 {
2264 rtx pat;
2265
2266 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2267 if (replacement->new)
2268 SET_SRC (x) = replacement->new;
2269 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2270 SET_SRC (x) = replacement->new
2271 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2272 else
2273 SET_SRC (x) = replacement->new
2274 = fixup_stack_1 (SET_SRC (x), insn);
2275
2276 if (recog_memoized (insn) >= 0)
2277 return;
2278
2279 /* INSN is not valid, but we know that we want to
2280 copy SET_SRC (x) to SET_DEST (x) in some way. So
2281 we generate the move and see whether it requires more
2282 than one insn. If it does, we emit those insns and
2283 delete INSN. Otherwise, we an just replace the pattern
2284 of INSN; we have already verified above that INSN has
2285 no other function that to do X. */
2286
2287 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2288 if (GET_CODE (pat) == SEQUENCE)
2289 {
2290 emit_insn_after (pat, insn);
2291 PUT_CODE (insn, NOTE);
2292 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2293 NOTE_SOURCE_FILE (insn) = 0;
2294 }
2295 else
2296 PATTERN (insn) = pat;
2297
2298 return;
2299 }
2300
2301 if ((SET_DEST (x) == var
2302 || (GET_CODE (SET_DEST (x)) == SUBREG
2303 && SUBREG_REG (SET_DEST (x)) == var))
2304 && (GET_CODE (SET_SRC (x)) == REG
2305 || (GET_CODE (SET_SRC (x)) == SUBREG
2306 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2307 && GET_MODE (var) == promoted_mode
2308 && x == single_set (insn))
2309 {
2310 rtx pat;
2311
2312 if (GET_CODE (SET_DEST (x)) == SUBREG)
2313 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2314 else
2315 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2316
2317 if (recog_memoized (insn) >= 0)
2318 return;
2319
2320 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2321 if (GET_CODE (pat) == SEQUENCE)
2322 {
2323 emit_insn_after (pat, insn);
2324 PUT_CODE (insn, NOTE);
2325 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2326 NOTE_SOURCE_FILE (insn) = 0;
2327 }
2328 else
2329 PATTERN (insn) = pat;
2330
2331 return;
2332 }
2333
2334 /* Otherwise, storing into VAR must be handled specially
2335 by storing into a temporary and copying that into VAR
2336 with a new insn after this one. Note that this case
2337 will be used when storing into a promoted scalar since
2338 the insn will now have different modes on the input
2339 and output and hence will be invalid (except for the case
2340 of setting it to a constant, which does not need any
2341 change if it is valid). We generate extra code in that case,
2342 but combine.c will eliminate it. */
2343
2344 if (dest == var)
2345 {
2346 rtx temp;
2347 rtx fixeddest = SET_DEST (x);
2348
2349 /* STRICT_LOW_PART can be discarded, around a MEM. */
2350 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2351 fixeddest = XEXP (fixeddest, 0);
2352 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2353 if (GET_CODE (fixeddest) == SUBREG)
2354 {
2355 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2356 promoted_mode = GET_MODE (fixeddest);
2357 }
2358 else
2359 fixeddest = fixup_stack_1 (fixeddest, insn);
2360
2361 temp = gen_reg_rtx (promoted_mode);
2362
2363 emit_insn_after (gen_move_insn (fixeddest,
2364 gen_lowpart (GET_MODE (fixeddest),
2365 temp)),
2366 insn);
2367
2368 SET_DEST (x) = temp;
2369 }
2370 }
2371
2372 default:
2373 break;
2374 }
2375
2376 /* Nothing special about this RTX; fix its operands. */
2377
2378 fmt = GET_RTX_FORMAT (code);
2379 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2380 {
2381 if (fmt[i] == 'e')
2382 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2383 if (fmt[i] == 'E')
2384 {
2385 register int j;
2386 for (j = 0; j < XVECLEN (x, i); j++)
2387 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2388 insn, replacements);
2389 }
2390 }
2391 }
2392 \f
2393 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2394 return an rtx (MEM:m1 newaddr) which is equivalent.
2395 If any insns must be emitted to compute NEWADDR, put them before INSN.
2396
2397 UNCRITICAL nonzero means accept paradoxical subregs.
2398 This is used for subregs found inside REG_NOTES. */
2399
2400 static rtx
2401 fixup_memory_subreg (x, insn, uncritical)
2402 rtx x;
2403 rtx insn;
2404 int uncritical;
2405 {
2406 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2407 rtx addr = XEXP (SUBREG_REG (x), 0);
2408 enum machine_mode mode = GET_MODE (x);
2409 rtx result;
2410
2411 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2412 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2413 && ! uncritical)
2414 abort ();
2415
2416 if (BYTES_BIG_ENDIAN)
2417 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2418 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2419 addr = plus_constant (addr, offset);
2420 if (!flag_force_addr && memory_address_p (mode, addr))
2421 /* Shortcut if no insns need be emitted. */
2422 return change_address (SUBREG_REG (x), mode, addr);
2423 start_sequence ();
2424 result = change_address (SUBREG_REG (x), mode, addr);
2425 emit_insn_before (gen_sequence (), insn);
2426 end_sequence ();
2427 return result;
2428 }
2429
2430 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2431 Replace subexpressions of X in place.
2432 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2433 Otherwise return X, with its contents possibly altered.
2434
2435 If any insns must be emitted to compute NEWADDR, put them before INSN.
2436
2437 UNCRITICAL is as in fixup_memory_subreg. */
2438
2439 static rtx
2440 walk_fixup_memory_subreg (x, insn, uncritical)
2441 register rtx x;
2442 rtx insn;
2443 int uncritical;
2444 {
2445 register enum rtx_code code;
2446 register char *fmt;
2447 register int i;
2448
2449 if (x == 0)
2450 return 0;
2451
2452 code = GET_CODE (x);
2453
2454 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2455 return fixup_memory_subreg (x, insn, uncritical);
2456
2457 /* Nothing special about this RTX; fix its operands. */
2458
2459 fmt = GET_RTX_FORMAT (code);
2460 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2461 {
2462 if (fmt[i] == 'e')
2463 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2464 if (fmt[i] == 'E')
2465 {
2466 register int j;
2467 for (j = 0; j < XVECLEN (x, i); j++)
2468 XVECEXP (x, i, j)
2469 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2470 }
2471 }
2472 return x;
2473 }
2474 \f
2475 /* For each memory ref within X, if it refers to a stack slot
2476 with an out of range displacement, put the address in a temp register
2477 (emitting new insns before INSN to load these registers)
2478 and alter the memory ref to use that register.
2479 Replace each such MEM rtx with a copy, to avoid clobberage. */
2480
2481 static rtx
2482 fixup_stack_1 (x, insn)
2483 rtx x;
2484 rtx insn;
2485 {
2486 register int i;
2487 register RTX_CODE code = GET_CODE (x);
2488 register char *fmt;
2489
2490 if (code == MEM)
2491 {
2492 register rtx ad = XEXP (x, 0);
2493 /* If we have address of a stack slot but it's not valid
2494 (displacement is too large), compute the sum in a register. */
2495 if (GET_CODE (ad) == PLUS
2496 && GET_CODE (XEXP (ad, 0)) == REG
2497 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2498 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2499 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2500 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2501 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2502 #endif
2503 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2504 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2505 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2506 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2507 {
2508 rtx temp, seq;
2509 if (memory_address_p (GET_MODE (x), ad))
2510 return x;
2511
2512 start_sequence ();
2513 temp = copy_to_reg (ad);
2514 seq = gen_sequence ();
2515 end_sequence ();
2516 emit_insn_before (seq, insn);
2517 return change_address (x, VOIDmode, temp);
2518 }
2519 return x;
2520 }
2521
2522 fmt = GET_RTX_FORMAT (code);
2523 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2524 {
2525 if (fmt[i] == 'e')
2526 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2527 if (fmt[i] == 'E')
2528 {
2529 register int j;
2530 for (j = 0; j < XVECLEN (x, i); j++)
2531 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2532 }
2533 }
2534 return x;
2535 }
2536 \f
2537 /* Optimization: a bit-field instruction whose field
2538 happens to be a byte or halfword in memory
2539 can be changed to a move instruction.
2540
2541 We call here when INSN is an insn to examine or store into a bit-field.
2542 BODY is the SET-rtx to be altered.
2543
2544 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2545 (Currently this is called only from function.c, and EQUIV_MEM
2546 is always 0.) */
2547
2548 static void
2549 optimize_bit_field (body, insn, equiv_mem)
2550 rtx body;
2551 rtx insn;
2552 rtx *equiv_mem;
2553 {
2554 register rtx bitfield;
2555 int destflag;
2556 rtx seq = 0;
2557 enum machine_mode mode;
2558
2559 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2560 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2561 bitfield = SET_DEST (body), destflag = 1;
2562 else
2563 bitfield = SET_SRC (body), destflag = 0;
2564
2565 /* First check that the field being stored has constant size and position
2566 and is in fact a byte or halfword suitably aligned. */
2567
2568 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2569 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2570 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2571 != BLKmode)
2572 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2573 {
2574 register rtx memref = 0;
2575
2576 /* Now check that the containing word is memory, not a register,
2577 and that it is safe to change the machine mode. */
2578
2579 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2580 memref = XEXP (bitfield, 0);
2581 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2582 && equiv_mem != 0)
2583 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2584 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2585 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2586 memref = SUBREG_REG (XEXP (bitfield, 0));
2587 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2588 && equiv_mem != 0
2589 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2590 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2591
2592 if (memref
2593 && ! mode_dependent_address_p (XEXP (memref, 0))
2594 && ! MEM_VOLATILE_P (memref))
2595 {
2596 /* Now adjust the address, first for any subreg'ing
2597 that we are now getting rid of,
2598 and then for which byte of the word is wanted. */
2599
2600 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2601 rtx insns;
2602
2603 /* Adjust OFFSET to count bits from low-address byte. */
2604 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2605 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2606 - offset - INTVAL (XEXP (bitfield, 1)));
2607
2608 /* Adjust OFFSET to count bytes from low-address byte. */
2609 offset /= BITS_PER_UNIT;
2610 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2611 {
2612 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2613 if (BYTES_BIG_ENDIAN)
2614 offset -= (MIN (UNITS_PER_WORD,
2615 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2616 - MIN (UNITS_PER_WORD,
2617 GET_MODE_SIZE (GET_MODE (memref))));
2618 }
2619
2620 start_sequence ();
2621 memref = change_address (memref, mode,
2622 plus_constant (XEXP (memref, 0), offset));
2623 insns = get_insns ();
2624 end_sequence ();
2625 emit_insns_before (insns, insn);
2626
2627 /* Store this memory reference where
2628 we found the bit field reference. */
2629
2630 if (destflag)
2631 {
2632 validate_change (insn, &SET_DEST (body), memref, 1);
2633 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2634 {
2635 rtx src = SET_SRC (body);
2636 while (GET_CODE (src) == SUBREG
2637 && SUBREG_WORD (src) == 0)
2638 src = SUBREG_REG (src);
2639 if (GET_MODE (src) != GET_MODE (memref))
2640 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2641 validate_change (insn, &SET_SRC (body), src, 1);
2642 }
2643 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2644 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2645 /* This shouldn't happen because anything that didn't have
2646 one of these modes should have got converted explicitly
2647 and then referenced through a subreg.
2648 This is so because the original bit-field was
2649 handled by agg_mode and so its tree structure had
2650 the same mode that memref now has. */
2651 abort ();
2652 }
2653 else
2654 {
2655 rtx dest = SET_DEST (body);
2656
2657 while (GET_CODE (dest) == SUBREG
2658 && SUBREG_WORD (dest) == 0
2659 && (GET_MODE_CLASS (GET_MODE (dest))
2660 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest)))))
2661 dest = SUBREG_REG (dest);
2662
2663 validate_change (insn, &SET_DEST (body), dest, 1);
2664
2665 if (GET_MODE (dest) == GET_MODE (memref))
2666 validate_change (insn, &SET_SRC (body), memref, 1);
2667 else
2668 {
2669 /* Convert the mem ref to the destination mode. */
2670 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2671
2672 start_sequence ();
2673 convert_move (newreg, memref,
2674 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2675 seq = get_insns ();
2676 end_sequence ();
2677
2678 validate_change (insn, &SET_SRC (body), newreg, 1);
2679 }
2680 }
2681
2682 /* See if we can convert this extraction or insertion into
2683 a simple move insn. We might not be able to do so if this
2684 was, for example, part of a PARALLEL.
2685
2686 If we succeed, write out any needed conversions. If we fail,
2687 it is hard to guess why we failed, so don't do anything
2688 special; just let the optimization be suppressed. */
2689
2690 if (apply_change_group () && seq)
2691 emit_insns_before (seq, insn);
2692 }
2693 }
2694 }
2695 \f
2696 /* These routines are responsible for converting virtual register references
2697 to the actual hard register references once RTL generation is complete.
2698
2699 The following four variables are used for communication between the
2700 routines. They contain the offsets of the virtual registers from their
2701 respective hard registers. */
2702
2703 static int in_arg_offset;
2704 static int var_offset;
2705 static int dynamic_offset;
2706 static int out_arg_offset;
2707 static int cfa_offset;
2708
2709 /* In most machines, the stack pointer register is equivalent to the bottom
2710 of the stack. */
2711
2712 #ifndef STACK_POINTER_OFFSET
2713 #define STACK_POINTER_OFFSET 0
2714 #endif
2715
2716 /* If not defined, pick an appropriate default for the offset of dynamically
2717 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2718 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2719
2720 #ifndef STACK_DYNAMIC_OFFSET
2721
2722 #ifdef ACCUMULATE_OUTGOING_ARGS
2723 /* The bottom of the stack points to the actual arguments. If
2724 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2725 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2726 stack space for register parameters is not pushed by the caller, but
2727 rather part of the fixed stack areas and hence not included in
2728 `current_function_outgoing_args_size'. Nevertheless, we must allow
2729 for it when allocating stack dynamic objects. */
2730
2731 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2732 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2733 (current_function_outgoing_args_size \
2734 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2735
2736 #else
2737 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2738 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2739 #endif
2740
2741 #else
2742 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2743 #endif
2744 #endif
2745
2746 /* On a few machines, the CFA coincides with the arg pointer. */
2747
2748 #ifndef ARG_POINTER_CFA_OFFSET
2749 #define ARG_POINTER_CFA_OFFSET 0
2750 #endif
2751
2752
2753 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2754 its address taken. DECL is the decl for the object stored in the
2755 register, for later use if we do need to force REG into the stack.
2756 REG is overwritten by the MEM like in put_reg_into_stack. */
2757
2758 rtx
2759 gen_mem_addressof (reg, decl)
2760 rtx reg;
2761 tree decl;
2762 {
2763 tree type = TREE_TYPE (decl);
2764 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)), REGNO (reg));
2765 SET_ADDRESSOF_DECL (r, decl);
2766 /* If the original REG was a user-variable, then so is the REG whose
2767 address is being taken. */
2768 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2769
2770 XEXP (reg, 0) = r;
2771 PUT_CODE (reg, MEM);
2772 PUT_MODE (reg, DECL_MODE (decl));
2773 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2774 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type);
2775 MEM_ALIAS_SET (reg) = get_alias_set (decl);
2776
2777 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2778 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type));
2779
2780 return reg;
2781 }
2782
2783 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2784
2785 void
2786 flush_addressof (decl)
2787 tree decl;
2788 {
2789 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2790 && DECL_RTL (decl) != 0
2791 && GET_CODE (DECL_RTL (decl)) == MEM
2792 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2793 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2794 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0));
2795 }
2796
2797 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2798
2799 static void
2800 put_addressof_into_stack (r)
2801 rtx r;
2802 {
2803 tree decl = ADDRESSOF_DECL (r);
2804 rtx reg = XEXP (r, 0);
2805
2806 if (GET_CODE (reg) != REG)
2807 abort ();
2808
2809 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2810 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
2811 ADDRESSOF_REGNO (r),
2812 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
2813 }
2814
2815 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2816 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2817 the stack. */
2818
2819 static void
2820 purge_addressof_1 (loc, insn, force, store)
2821 rtx *loc;
2822 rtx insn;
2823 int force, store;
2824 {
2825 rtx x;
2826 RTX_CODE code;
2827 int i, j;
2828 char *fmt;
2829
2830 /* Re-start here to avoid recursion in common cases. */
2831 restart:
2832
2833 x = *loc;
2834 if (x == 0)
2835 return;
2836
2837 code = GET_CODE (x);
2838
2839 if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2840 {
2841 rtx insns;
2842 /* We must create a copy of the rtx because it was created by
2843 overwriting a REG rtx which is always shared. */
2844 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2845
2846 if (validate_change (insn, loc, sub, 0))
2847 return;
2848
2849 start_sequence ();
2850 if (! validate_change (insn, loc,
2851 force_operand (sub, NULL_RTX),
2852 0))
2853 abort ();
2854
2855 insns = gen_sequence ();
2856 end_sequence ();
2857 emit_insns_before (insns, insn);
2858 return;
2859 }
2860 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2861 {
2862 rtx sub = XEXP (XEXP (x, 0), 0);
2863
2864 if (GET_CODE (sub) == MEM)
2865 sub = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2866
2867 if (GET_CODE (sub) == REG
2868 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2869 {
2870 put_addressof_into_stack (XEXP (x, 0));
2871 return;
2872 }
2873 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
2874 {
2875 int size_x, size_sub;
2876
2877 size_x = GET_MODE_BITSIZE (GET_MODE (x));
2878 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
2879
2880 /* Don't even consider working with paradoxical subregs,
2881 or the moral equivalent seen here. */
2882 if (size_x < size_sub
2883 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
2884 {
2885 /* Do a bitfield insertion to mirror what would happen
2886 in memory. */
2887
2888 rtx val, seq;
2889
2890 if (store)
2891 {
2892 /* If we can't replace with a register, be afraid. */
2893
2894 start_sequence ();
2895 val = gen_reg_rtx (GET_MODE (x));
2896 if (! validate_change (insn, loc, val, 0))
2897 abort ();
2898 seq = gen_sequence ();
2899 end_sequence ();
2900 emit_insn_before (seq, insn);
2901
2902 start_sequence ();
2903 store_bit_field (sub, size_x, 0, GET_MODE (x),
2904 val, GET_MODE_SIZE (GET_MODE (sub)),
2905 GET_MODE_SIZE (GET_MODE (sub)));
2906
2907 seq = gen_sequence ();
2908 end_sequence ();
2909 emit_insn_after (seq, insn);
2910 }
2911 else
2912 {
2913 start_sequence ();
2914 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
2915 GET_MODE (x), GET_MODE (x),
2916 GET_MODE_SIZE (GET_MODE (sub)),
2917 GET_MODE_SIZE (GET_MODE (sub)));
2918
2919 /* If we can't replace with a register, be afraid. */
2920 if (! validate_change (insn, loc, val, 0))
2921 abort ();
2922
2923 seq = gen_sequence ();
2924 end_sequence ();
2925 emit_insn_before (seq, insn);
2926 }
2927
2928 /* We replaced with a reg -- all done. */
2929 return;
2930 }
2931 }
2932 else if (validate_change (insn, loc, sub, 0))
2933 goto restart;
2934 /* else give up and put it into the stack */
2935 }
2936 else if (code == ADDRESSOF)
2937 {
2938 put_addressof_into_stack (x);
2939 return;
2940 }
2941 else if (code == SET)
2942 {
2943 purge_addressof_1 (&SET_DEST (x), insn, force, 1);
2944 purge_addressof_1 (&SET_SRC (x), insn, force, 0);
2945 return;
2946 }
2947
2948 /* Scan all subexpressions. */
2949 fmt = GET_RTX_FORMAT (code);
2950 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2951 {
2952 if (*fmt == 'e')
2953 purge_addressof_1 (&XEXP (x, i), insn, force, 0);
2954 else if (*fmt == 'E')
2955 for (j = 0; j < XVECLEN (x, i); j++)
2956 purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0);
2957 }
2958 }
2959
2960 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
2961 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
2962 stack. */
2963
2964 void
2965 purge_addressof (insns)
2966 rtx insns;
2967 {
2968 rtx insn;
2969 for (insn = insns; insn; insn = NEXT_INSN (insn))
2970 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2971 || GET_CODE (insn) == CALL_INSN)
2972 {
2973 purge_addressof_1 (&PATTERN (insn), insn,
2974 asm_noperands (PATTERN (insn)) > 0, 0);
2975 purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0);
2976 }
2977 }
2978 \f
2979 /* Pass through the INSNS of function FNDECL and convert virtual register
2980 references to hard register references. */
2981
2982 void
2983 instantiate_virtual_regs (fndecl, insns)
2984 tree fndecl;
2985 rtx insns;
2986 {
2987 rtx insn;
2988 int i;
2989
2990 /* Compute the offsets to use for this function. */
2991 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
2992 var_offset = STARTING_FRAME_OFFSET;
2993 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
2994 out_arg_offset = STACK_POINTER_OFFSET;
2995 cfa_offset = ARG_POINTER_CFA_OFFSET;
2996
2997 /* Scan all variables and parameters of this function. For each that is
2998 in memory, instantiate all virtual registers if the result is a valid
2999 address. If not, we do it later. That will handle most uses of virtual
3000 regs on many machines. */
3001 instantiate_decls (fndecl, 1);
3002
3003 /* Initialize recognition, indicating that volatile is OK. */
3004 init_recog ();
3005
3006 /* Scan through all the insns, instantiating every virtual register still
3007 present. */
3008 for (insn = insns; insn; insn = NEXT_INSN (insn))
3009 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3010 || GET_CODE (insn) == CALL_INSN)
3011 {
3012 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3013 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3014 }
3015
3016 /* Instantiate the stack slots for the parm registers, for later use in
3017 addressof elimination. */
3018 for (i = 0; i < max_parm_reg; ++i)
3019 if (parm_reg_stack_loc[i])
3020 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3021
3022 /* Now instantiate the remaining register equivalences for debugging info.
3023 These will not be valid addresses. */
3024 instantiate_decls (fndecl, 0);
3025
3026 /* Indicate that, from now on, assign_stack_local should use
3027 frame_pointer_rtx. */
3028 virtuals_instantiated = 1;
3029 }
3030
3031 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3032 all virtual registers in their DECL_RTL's.
3033
3034 If VALID_ONLY, do this only if the resulting address is still valid.
3035 Otherwise, always do it. */
3036
3037 static void
3038 instantiate_decls (fndecl, valid_only)
3039 tree fndecl;
3040 int valid_only;
3041 {
3042 tree decl;
3043
3044 if (DECL_SAVED_INSNS (fndecl))
3045 /* When compiling an inline function, the obstack used for
3046 rtl allocation is the maybepermanent_obstack. Calling
3047 `resume_temporary_allocation' switches us back to that
3048 obstack while we process this function's parameters. */
3049 resume_temporary_allocation ();
3050
3051 /* Process all parameters of the function. */
3052 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3053 {
3054 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3055
3056 instantiate_decl (DECL_RTL (decl), size, valid_only);
3057
3058 /* If the parameter was promoted, then the incoming RTL mode may be
3059 larger than the declared type size. We must use the larger of
3060 the two sizes. */
3061 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3062 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3063 }
3064
3065 /* Now process all variables defined in the function or its subblocks. */
3066 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3067
3068 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
3069 {
3070 /* Save all rtl allocated for this function by raising the
3071 high-water mark on the maybepermanent_obstack. */
3072 preserve_data ();
3073 /* All further rtl allocation is now done in the current_obstack. */
3074 rtl_in_current_obstack ();
3075 }
3076 }
3077
3078 /* Subroutine of instantiate_decls: Process all decls in the given
3079 BLOCK node and all its subblocks. */
3080
3081 static void
3082 instantiate_decls_1 (let, valid_only)
3083 tree let;
3084 int valid_only;
3085 {
3086 tree t;
3087
3088 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3089 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3090 valid_only);
3091
3092 /* Process all subblocks. */
3093 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3094 instantiate_decls_1 (t, valid_only);
3095 }
3096
3097 /* Subroutine of the preceding procedures: Given RTL representing a
3098 decl and the size of the object, do any instantiation required.
3099
3100 If VALID_ONLY is non-zero, it means that the RTL should only be
3101 changed if the new address is valid. */
3102
3103 static void
3104 instantiate_decl (x, size, valid_only)
3105 rtx x;
3106 int size;
3107 int valid_only;
3108 {
3109 enum machine_mode mode;
3110 rtx addr;
3111
3112 /* If this is not a MEM, no need to do anything. Similarly if the
3113 address is a constant or a register that is not a virtual register. */
3114
3115 if (x == 0 || GET_CODE (x) != MEM)
3116 return;
3117
3118 addr = XEXP (x, 0);
3119 if (CONSTANT_P (addr)
3120 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3121 || (GET_CODE (addr) == REG
3122 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3123 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3124 return;
3125
3126 /* If we should only do this if the address is valid, copy the address.
3127 We need to do this so we can undo any changes that might make the
3128 address invalid. This copy is unfortunate, but probably can't be
3129 avoided. */
3130
3131 if (valid_only)
3132 addr = copy_rtx (addr);
3133
3134 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3135
3136 if (valid_only)
3137 {
3138 /* Now verify that the resulting address is valid for every integer or
3139 floating-point mode up to and including SIZE bytes long. We do this
3140 since the object might be accessed in any mode and frame addresses
3141 are shared. */
3142
3143 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3144 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3145 mode = GET_MODE_WIDER_MODE (mode))
3146 if (! memory_address_p (mode, addr))
3147 return;
3148
3149 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3150 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3151 mode = GET_MODE_WIDER_MODE (mode))
3152 if (! memory_address_p (mode, addr))
3153 return;
3154 }
3155
3156 /* Put back the address now that we have updated it and we either know
3157 it is valid or we don't care whether it is valid. */
3158
3159 XEXP (x, 0) = addr;
3160 }
3161 \f
3162 /* Given a pointer to a piece of rtx and an optional pointer to the
3163 containing object, instantiate any virtual registers present in it.
3164
3165 If EXTRA_INSNS, we always do the replacement and generate
3166 any extra insns before OBJECT. If it zero, we do nothing if replacement
3167 is not valid.
3168
3169 Return 1 if we either had nothing to do or if we were able to do the
3170 needed replacement. Return 0 otherwise; we only return zero if
3171 EXTRA_INSNS is zero.
3172
3173 We first try some simple transformations to avoid the creation of extra
3174 pseudos. */
3175
3176 static int
3177 instantiate_virtual_regs_1 (loc, object, extra_insns)
3178 rtx *loc;
3179 rtx object;
3180 int extra_insns;
3181 {
3182 rtx x;
3183 RTX_CODE code;
3184 rtx new = 0;
3185 HOST_WIDE_INT offset;
3186 rtx temp;
3187 rtx seq;
3188 int i, j;
3189 char *fmt;
3190
3191 /* Re-start here to avoid recursion in common cases. */
3192 restart:
3193
3194 x = *loc;
3195 if (x == 0)
3196 return 1;
3197
3198 code = GET_CODE (x);
3199
3200 /* Check for some special cases. */
3201 switch (code)
3202 {
3203 case CONST_INT:
3204 case CONST_DOUBLE:
3205 case CONST:
3206 case SYMBOL_REF:
3207 case CODE_LABEL:
3208 case PC:
3209 case CC0:
3210 case ASM_INPUT:
3211 case ADDR_VEC:
3212 case ADDR_DIFF_VEC:
3213 case RETURN:
3214 return 1;
3215
3216 case SET:
3217 /* We are allowed to set the virtual registers. This means that
3218 the actual register should receive the source minus the
3219 appropriate offset. This is used, for example, in the handling
3220 of non-local gotos. */
3221 if (SET_DEST (x) == virtual_incoming_args_rtx)
3222 new = arg_pointer_rtx, offset = - in_arg_offset;
3223 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3224 new = frame_pointer_rtx, offset = - var_offset;
3225 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3226 new = stack_pointer_rtx, offset = - dynamic_offset;
3227 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3228 new = stack_pointer_rtx, offset = - out_arg_offset;
3229 else if (SET_DEST (x) == virtual_cfa_rtx)
3230 new = arg_pointer_rtx, offset = - cfa_offset;
3231
3232 if (new)
3233 {
3234 /* The only valid sources here are PLUS or REG. Just do
3235 the simplest possible thing to handle them. */
3236 if (GET_CODE (SET_SRC (x)) != REG
3237 && GET_CODE (SET_SRC (x)) != PLUS)
3238 abort ();
3239
3240 start_sequence ();
3241 if (GET_CODE (SET_SRC (x)) != REG)
3242 temp = force_operand (SET_SRC (x), NULL_RTX);
3243 else
3244 temp = SET_SRC (x);
3245 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3246 seq = get_insns ();
3247 end_sequence ();
3248
3249 emit_insns_before (seq, object);
3250 SET_DEST (x) = new;
3251
3252 if (! validate_change (object, &SET_SRC (x), temp, 0)
3253 || ! extra_insns)
3254 abort ();
3255
3256 return 1;
3257 }
3258
3259 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3260 loc = &SET_SRC (x);
3261 goto restart;
3262
3263 case PLUS:
3264 /* Handle special case of virtual register plus constant. */
3265 if (CONSTANT_P (XEXP (x, 1)))
3266 {
3267 rtx old, new_offset;
3268
3269 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3270 if (GET_CODE (XEXP (x, 0)) == PLUS)
3271 {
3272 rtx inner = XEXP (XEXP (x, 0), 0);
3273
3274 if (inner == virtual_incoming_args_rtx)
3275 new = arg_pointer_rtx, offset = in_arg_offset;
3276 else if (inner == virtual_stack_vars_rtx)
3277 new = frame_pointer_rtx, offset = var_offset;
3278 else if (inner == virtual_stack_dynamic_rtx)
3279 new = stack_pointer_rtx, offset = dynamic_offset;
3280 else if (inner == virtual_outgoing_args_rtx)
3281 new = stack_pointer_rtx, offset = out_arg_offset;
3282 else if (inner == virtual_cfa_rtx)
3283 new = arg_pointer_rtx, offset = cfa_offset;
3284 else
3285 {
3286 loc = &XEXP (x, 0);
3287 goto restart;
3288 }
3289
3290 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3291 extra_insns);
3292 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3293 }
3294
3295 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3296 new = arg_pointer_rtx, offset = in_arg_offset;
3297 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3298 new = frame_pointer_rtx, offset = var_offset;
3299 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3300 new = stack_pointer_rtx, offset = dynamic_offset;
3301 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3302 new = stack_pointer_rtx, offset = out_arg_offset;
3303 else if (XEXP (x, 0) == virtual_cfa_rtx)
3304 new = arg_pointer_rtx, offset = cfa_offset;
3305 else
3306 {
3307 /* We know the second operand is a constant. Unless the
3308 first operand is a REG (which has been already checked),
3309 it needs to be checked. */
3310 if (GET_CODE (XEXP (x, 0)) != REG)
3311 {
3312 loc = &XEXP (x, 0);
3313 goto restart;
3314 }
3315 return 1;
3316 }
3317
3318 new_offset = plus_constant (XEXP (x, 1), offset);
3319
3320 /* If the new constant is zero, try to replace the sum with just
3321 the register. */
3322 if (new_offset == const0_rtx
3323 && validate_change (object, loc, new, 0))
3324 return 1;
3325
3326 /* Next try to replace the register and new offset.
3327 There are two changes to validate here and we can't assume that
3328 in the case of old offset equals new just changing the register
3329 will yield a valid insn. In the interests of a little efficiency,
3330 however, we only call validate change once (we don't queue up the
3331 changes and then call apply_change_group). */
3332
3333 old = XEXP (x, 0);
3334 if (offset == 0
3335 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3336 : (XEXP (x, 0) = new,
3337 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3338 {
3339 if (! extra_insns)
3340 {
3341 XEXP (x, 0) = old;
3342 return 0;
3343 }
3344
3345 /* Otherwise copy the new constant into a register and replace
3346 constant with that register. */
3347 temp = gen_reg_rtx (Pmode);
3348 XEXP (x, 0) = new;
3349 if (validate_change (object, &XEXP (x, 1), temp, 0))
3350 emit_insn_before (gen_move_insn (temp, new_offset), object);
3351 else
3352 {
3353 /* If that didn't work, replace this expression with a
3354 register containing the sum. */
3355
3356 XEXP (x, 0) = old;
3357 new = gen_rtx_PLUS (Pmode, new, new_offset);
3358
3359 start_sequence ();
3360 temp = force_operand (new, NULL_RTX);
3361 seq = get_insns ();
3362 end_sequence ();
3363
3364 emit_insns_before (seq, object);
3365 if (! validate_change (object, loc, temp, 0)
3366 && ! validate_replace_rtx (x, temp, object))
3367 abort ();
3368 }
3369 }
3370
3371 return 1;
3372 }
3373
3374 /* Fall through to generic two-operand expression case. */
3375 case EXPR_LIST:
3376 case CALL:
3377 case COMPARE:
3378 case MINUS:
3379 case MULT:
3380 case DIV: case UDIV:
3381 case MOD: case UMOD:
3382 case AND: case IOR: case XOR:
3383 case ROTATERT: case ROTATE:
3384 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3385 case NE: case EQ:
3386 case GE: case GT: case GEU: case GTU:
3387 case LE: case LT: case LEU: case LTU:
3388 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3389 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3390 loc = &XEXP (x, 0);
3391 goto restart;
3392
3393 case MEM:
3394 /* Most cases of MEM that convert to valid addresses have already been
3395 handled by our scan of decls. The only special handling we
3396 need here is to make a copy of the rtx to ensure it isn't being
3397 shared if we have to change it to a pseudo.
3398
3399 If the rtx is a simple reference to an address via a virtual register,
3400 it can potentially be shared. In such cases, first try to make it
3401 a valid address, which can also be shared. Otherwise, copy it and
3402 proceed normally.
3403
3404 First check for common cases that need no processing. These are
3405 usually due to instantiation already being done on a previous instance
3406 of a shared rtx. */
3407
3408 temp = XEXP (x, 0);
3409 if (CONSTANT_ADDRESS_P (temp)
3410 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3411 || temp == arg_pointer_rtx
3412 #endif
3413 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3414 || temp == hard_frame_pointer_rtx
3415 #endif
3416 || temp == frame_pointer_rtx)
3417 return 1;
3418
3419 if (GET_CODE (temp) == PLUS
3420 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3421 && (XEXP (temp, 0) == frame_pointer_rtx
3422 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3423 || XEXP (temp, 0) == hard_frame_pointer_rtx
3424 #endif
3425 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3426 || XEXP (temp, 0) == arg_pointer_rtx
3427 #endif
3428 ))
3429 return 1;
3430
3431 if (temp == virtual_stack_vars_rtx
3432 || temp == virtual_incoming_args_rtx
3433 || (GET_CODE (temp) == PLUS
3434 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3435 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3436 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3437 {
3438 /* This MEM may be shared. If the substitution can be done without
3439 the need to generate new pseudos, we want to do it in place
3440 so all copies of the shared rtx benefit. The call below will
3441 only make substitutions if the resulting address is still
3442 valid.
3443
3444 Note that we cannot pass X as the object in the recursive call
3445 since the insn being processed may not allow all valid
3446 addresses. However, if we were not passed on object, we can
3447 only modify X without copying it if X will have a valid
3448 address.
3449
3450 ??? Also note that this can still lose if OBJECT is an insn that
3451 has less restrictions on an address that some other insn.
3452 In that case, we will modify the shared address. This case
3453 doesn't seem very likely, though. One case where this could
3454 happen is in the case of a USE or CLOBBER reference, but we
3455 take care of that below. */
3456
3457 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3458 object ? object : x, 0))
3459 return 1;
3460
3461 /* Otherwise make a copy and process that copy. We copy the entire
3462 RTL expression since it might be a PLUS which could also be
3463 shared. */
3464 *loc = x = copy_rtx (x);
3465 }
3466
3467 /* Fall through to generic unary operation case. */
3468 case SUBREG:
3469 case STRICT_LOW_PART:
3470 case NEG: case NOT:
3471 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3472 case SIGN_EXTEND: case ZERO_EXTEND:
3473 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3474 case FLOAT: case FIX:
3475 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3476 case ABS:
3477 case SQRT:
3478 case FFS:
3479 /* These case either have just one operand or we know that we need not
3480 check the rest of the operands. */
3481 loc = &XEXP (x, 0);
3482 goto restart;
3483
3484 case USE:
3485 case CLOBBER:
3486 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3487 go ahead and make the invalid one, but do it to a copy. For a REG,
3488 just make the recursive call, since there's no chance of a problem. */
3489
3490 if ((GET_CODE (XEXP (x, 0)) == MEM
3491 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3492 0))
3493 || (GET_CODE (XEXP (x, 0)) == REG
3494 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3495 return 1;
3496
3497 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3498 loc = &XEXP (x, 0);
3499 goto restart;
3500
3501 case REG:
3502 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3503 in front of this insn and substitute the temporary. */
3504 if (x == virtual_incoming_args_rtx)
3505 new = arg_pointer_rtx, offset = in_arg_offset;
3506 else if (x == virtual_stack_vars_rtx)
3507 new = frame_pointer_rtx, offset = var_offset;
3508 else if (x == virtual_stack_dynamic_rtx)
3509 new = stack_pointer_rtx, offset = dynamic_offset;
3510 else if (x == virtual_outgoing_args_rtx)
3511 new = stack_pointer_rtx, offset = out_arg_offset;
3512 else if (x == virtual_cfa_rtx)
3513 new = arg_pointer_rtx, offset = cfa_offset;
3514
3515 if (new)
3516 {
3517 temp = plus_constant (new, offset);
3518 if (!validate_change (object, loc, temp, 0))
3519 {
3520 if (! extra_insns)
3521 return 0;
3522
3523 start_sequence ();
3524 temp = force_operand (temp, NULL_RTX);
3525 seq = get_insns ();
3526 end_sequence ();
3527
3528 emit_insns_before (seq, object);
3529 if (! validate_change (object, loc, temp, 0)
3530 && ! validate_replace_rtx (x, temp, object))
3531 abort ();
3532 }
3533 }
3534
3535 return 1;
3536
3537 case ADDRESSOF:
3538 if (GET_CODE (XEXP (x, 0)) == REG)
3539 return 1;
3540
3541 else if (GET_CODE (XEXP (x, 0)) == MEM)
3542 {
3543 /* If we have a (addressof (mem ..)), do any instantiation inside
3544 since we know we'll be making the inside valid when we finally
3545 remove the ADDRESSOF. */
3546 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3547 return 1;
3548 }
3549 break;
3550
3551 default:
3552 break;
3553 }
3554
3555 /* Scan all subexpressions. */
3556 fmt = GET_RTX_FORMAT (code);
3557 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3558 if (*fmt == 'e')
3559 {
3560 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3561 return 0;
3562 }
3563 else if (*fmt == 'E')
3564 for (j = 0; j < XVECLEN (x, i); j++)
3565 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3566 extra_insns))
3567 return 0;
3568
3569 return 1;
3570 }
3571 \f
3572 /* Optimization: assuming this function does not receive nonlocal gotos,
3573 delete the handlers for such, as well as the insns to establish
3574 and disestablish them. */
3575
3576 static void
3577 delete_handlers ()
3578 {
3579 rtx insn;
3580 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3581 {
3582 /* Delete the handler by turning off the flag that would
3583 prevent jump_optimize from deleting it.
3584 Also permit deletion of the nonlocal labels themselves
3585 if nothing local refers to them. */
3586 if (GET_CODE (insn) == CODE_LABEL)
3587 {
3588 tree t, last_t;
3589
3590 LABEL_PRESERVE_P (insn) = 0;
3591
3592 /* Remove it from the nonlocal_label list, to avoid confusing
3593 flow. */
3594 for (t = nonlocal_labels, last_t = 0; t;
3595 last_t = t, t = TREE_CHAIN (t))
3596 if (DECL_RTL (TREE_VALUE (t)) == insn)
3597 break;
3598 if (t)
3599 {
3600 if (! last_t)
3601 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3602 else
3603 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3604 }
3605 }
3606 if (GET_CODE (insn) == INSN
3607 && ((nonlocal_goto_handler_slot != 0
3608 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
3609 || (nonlocal_goto_stack_level != 0
3610 && reg_mentioned_p (nonlocal_goto_stack_level,
3611 PATTERN (insn)))))
3612 delete_insn (insn);
3613 }
3614 }
3615
3616 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3617 of the current function. */
3618
3619 rtx
3620 nonlocal_label_rtx_list ()
3621 {
3622 tree t;
3623 rtx x = 0;
3624
3625 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
3626 x = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (TREE_VALUE (t)), x);
3627
3628 return x;
3629 }
3630 \f
3631 /* Output a USE for any register use in RTL.
3632 This is used with -noreg to mark the extent of lifespan
3633 of any registers used in a user-visible variable's DECL_RTL. */
3634
3635 void
3636 use_variable (rtl)
3637 rtx rtl;
3638 {
3639 if (GET_CODE (rtl) == REG)
3640 /* This is a register variable. */
3641 emit_insn (gen_rtx_USE (VOIDmode, rtl));
3642 else if (GET_CODE (rtl) == MEM
3643 && GET_CODE (XEXP (rtl, 0)) == REG
3644 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3645 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3646 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3647 /* This is a variable-sized structure. */
3648 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
3649 }
3650
3651 /* Like use_variable except that it outputs the USEs after INSN
3652 instead of at the end of the insn-chain. */
3653
3654 void
3655 use_variable_after (rtl, insn)
3656 rtx rtl, insn;
3657 {
3658 if (GET_CODE (rtl) == REG)
3659 /* This is a register variable. */
3660 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
3661 else if (GET_CODE (rtl) == MEM
3662 && GET_CODE (XEXP (rtl, 0)) == REG
3663 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3664 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3665 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3666 /* This is a variable-sized structure. */
3667 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
3668 }
3669 \f
3670 int
3671 max_parm_reg_num ()
3672 {
3673 return max_parm_reg;
3674 }
3675
3676 /* Return the first insn following those generated by `assign_parms'. */
3677
3678 rtx
3679 get_first_nonparm_insn ()
3680 {
3681 if (last_parm_insn)
3682 return NEXT_INSN (last_parm_insn);
3683 return get_insns ();
3684 }
3685
3686 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3687 Crash if there is none. */
3688
3689 rtx
3690 get_first_block_beg ()
3691 {
3692 register rtx searcher;
3693 register rtx insn = get_first_nonparm_insn ();
3694
3695 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3696 if (GET_CODE (searcher) == NOTE
3697 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3698 return searcher;
3699
3700 abort (); /* Invalid call to this function. (See comments above.) */
3701 return NULL_RTX;
3702 }
3703
3704 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3705 This means a type for which function calls must pass an address to the
3706 function or get an address back from the function.
3707 EXP may be a type node or an expression (whose type is tested). */
3708
3709 int
3710 aggregate_value_p (exp)
3711 tree exp;
3712 {
3713 int i, regno, nregs;
3714 rtx reg;
3715 tree type;
3716 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3717 type = exp;
3718 else
3719 type = TREE_TYPE (exp);
3720
3721 if (RETURN_IN_MEMORY (type))
3722 return 1;
3723 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3724 and thus can't be returned in registers. */
3725 if (TREE_ADDRESSABLE (type))
3726 return 1;
3727 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3728 return 1;
3729 /* Make sure we have suitable call-clobbered regs to return
3730 the value in; if not, we must return it in memory. */
3731 reg = hard_function_value (type, 0);
3732
3733 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3734 it is OK. */
3735 if (GET_CODE (reg) != REG)
3736 return 0;
3737
3738 regno = REGNO (reg);
3739 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3740 for (i = 0; i < nregs; i++)
3741 if (! call_used_regs[regno + i])
3742 return 1;
3743 return 0;
3744 }
3745 \f
3746 /* Assign RTL expressions to the function's parameters.
3747 This may involve copying them into registers and using
3748 those registers as the RTL for them.
3749
3750 If SECOND_TIME is non-zero it means that this function is being
3751 called a second time. This is done by integrate.c when a function's
3752 compilation is deferred. We need to come back here in case the
3753 FUNCTION_ARG macro computes items needed for the rest of the compilation
3754 (such as changing which registers are fixed or caller-saved). But suppress
3755 writing any insns or setting DECL_RTL of anything in this case. */
3756
3757 void
3758 assign_parms (fndecl, second_time)
3759 tree fndecl;
3760 int second_time;
3761 {
3762 register tree parm;
3763 register rtx entry_parm = 0;
3764 register rtx stack_parm = 0;
3765 CUMULATIVE_ARGS args_so_far;
3766 enum machine_mode promoted_mode, passed_mode;
3767 enum machine_mode nominal_mode, promoted_nominal_mode;
3768 int unsignedp;
3769 /* Total space needed so far for args on the stack,
3770 given as a constant and a tree-expression. */
3771 struct args_size stack_args_size;
3772 tree fntype = TREE_TYPE (fndecl);
3773 tree fnargs = DECL_ARGUMENTS (fndecl);
3774 /* This is used for the arg pointer when referring to stack args. */
3775 rtx internal_arg_pointer;
3776 /* This is a dummy PARM_DECL that we used for the function result if
3777 the function returns a structure. */
3778 tree function_result_decl = 0;
3779 int varargs_setup = 0;
3780 rtx conversion_insns = 0;
3781
3782 /* Nonzero if the last arg is named `__builtin_va_alist',
3783 which is used on some machines for old-fashioned non-ANSI varargs.h;
3784 this should be stuck onto the stack as if it had arrived there. */
3785 int hide_last_arg
3786 = (current_function_varargs
3787 && fnargs
3788 && (parm = tree_last (fnargs)) != 0
3789 && DECL_NAME (parm)
3790 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3791 "__builtin_va_alist")));
3792
3793 /* Nonzero if function takes extra anonymous args.
3794 This means the last named arg must be on the stack
3795 right before the anonymous ones. */
3796 int stdarg
3797 = (TYPE_ARG_TYPES (fntype) != 0
3798 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3799 != void_type_node));
3800
3801 current_function_stdarg = stdarg;
3802
3803 /* If the reg that the virtual arg pointer will be translated into is
3804 not a fixed reg or is the stack pointer, make a copy of the virtual
3805 arg pointer, and address parms via the copy. The frame pointer is
3806 considered fixed even though it is not marked as such.
3807
3808 The second time through, simply use ap to avoid generating rtx. */
3809
3810 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3811 || ! (fixed_regs[ARG_POINTER_REGNUM]
3812 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3813 && ! second_time)
3814 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3815 else
3816 internal_arg_pointer = virtual_incoming_args_rtx;
3817 current_function_internal_arg_pointer = internal_arg_pointer;
3818
3819 stack_args_size.constant = 0;
3820 stack_args_size.var = 0;
3821
3822 /* If struct value address is treated as the first argument, make it so. */
3823 if (aggregate_value_p (DECL_RESULT (fndecl))
3824 && ! current_function_returns_pcc_struct
3825 && struct_value_incoming_rtx == 0)
3826 {
3827 tree type = build_pointer_type (TREE_TYPE (fntype));
3828
3829 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
3830
3831 DECL_ARG_TYPE (function_result_decl) = type;
3832 TREE_CHAIN (function_result_decl) = fnargs;
3833 fnargs = function_result_decl;
3834 }
3835
3836 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3837 parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
3838 bzero ((char *) parm_reg_stack_loc, max_parm_reg * sizeof (rtx));
3839
3840 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3841 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
3842 #else
3843 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
3844 #endif
3845
3846 /* We haven't yet found an argument that we must push and pretend the
3847 caller did. */
3848 current_function_pretend_args_size = 0;
3849
3850 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3851 {
3852 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
3853 struct args_size stack_offset;
3854 struct args_size arg_size;
3855 int passed_pointer = 0;
3856 int did_conversion = 0;
3857 tree passed_type = DECL_ARG_TYPE (parm);
3858 tree nominal_type = TREE_TYPE (parm);
3859
3860 /* Set LAST_NAMED if this is last named arg before some
3861 anonymous args. */
3862 int last_named = ((TREE_CHAIN (parm) == 0
3863 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3864 && (stdarg || current_function_varargs));
3865 /* Set NAMED_ARG if this arg should be treated as a named arg. For
3866 most machines, if this is a varargs/stdarg function, then we treat
3867 the last named arg as if it were anonymous too. */
3868 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
3869
3870 if (TREE_TYPE (parm) == error_mark_node
3871 /* This can happen after weird syntax errors
3872 or if an enum type is defined among the parms. */
3873 || TREE_CODE (parm) != PARM_DECL
3874 || passed_type == NULL)
3875 {
3876 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
3877 = gen_rtx_MEM (BLKmode, const0_rtx);
3878 TREE_USED (parm) = 1;
3879 continue;
3880 }
3881
3882 /* For varargs.h function, save info about regs and stack space
3883 used by the individual args, not including the va_alist arg. */
3884 if (hide_last_arg && last_named)
3885 current_function_args_info = args_so_far;
3886
3887 /* Find mode of arg as it is passed, and mode of arg
3888 as it should be during execution of this function. */
3889 passed_mode = TYPE_MODE (passed_type);
3890 nominal_mode = TYPE_MODE (nominal_type);
3891
3892 /* If the parm's mode is VOID, its value doesn't matter,
3893 and avoid the usual things like emit_move_insn that could crash. */
3894 if (nominal_mode == VOIDmode)
3895 {
3896 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
3897 continue;
3898 }
3899
3900 /* If the parm is to be passed as a transparent union, use the
3901 type of the first field for the tests below. We have already
3902 verified that the modes are the same. */
3903 if (DECL_TRANSPARENT_UNION (parm)
3904 || TYPE_TRANSPARENT_UNION (passed_type))
3905 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
3906
3907 /* See if this arg was passed by invisible reference. It is if
3908 it is an object whose size depends on the contents of the
3909 object itself or if the machine requires these objects be passed
3910 that way. */
3911
3912 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
3913 && contains_placeholder_p (TYPE_SIZE (passed_type)))
3914 || TREE_ADDRESSABLE (passed_type)
3915 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3916 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
3917 passed_type, named_arg)
3918 #endif
3919 )
3920 {
3921 passed_type = nominal_type = build_pointer_type (passed_type);
3922 passed_pointer = 1;
3923 passed_mode = nominal_mode = Pmode;
3924 }
3925
3926 promoted_mode = passed_mode;
3927
3928 #ifdef PROMOTE_FUNCTION_ARGS
3929 /* Compute the mode in which the arg is actually extended to. */
3930 unsignedp = TREE_UNSIGNED (passed_type);
3931 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
3932 #endif
3933
3934 /* Let machine desc say which reg (if any) the parm arrives in.
3935 0 means it arrives on the stack. */
3936 #ifdef FUNCTION_INCOMING_ARG
3937 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3938 passed_type, named_arg);
3939 #else
3940 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
3941 passed_type, named_arg);
3942 #endif
3943
3944 if (entry_parm == 0)
3945 promoted_mode = passed_mode;
3946
3947 #ifdef SETUP_INCOMING_VARARGS
3948 /* If this is the last named parameter, do any required setup for
3949 varargs or stdargs. We need to know about the case of this being an
3950 addressable type, in which case we skip the registers it
3951 would have arrived in.
3952
3953 For stdargs, LAST_NAMED will be set for two parameters, the one that
3954 is actually the last named, and the dummy parameter. We only
3955 want to do this action once.
3956
3957 Also, indicate when RTL generation is to be suppressed. */
3958 if (last_named && !varargs_setup)
3959 {
3960 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
3961 current_function_pretend_args_size,
3962 second_time);
3963 varargs_setup = 1;
3964 }
3965 #endif
3966
3967 /* Determine parm's home in the stack,
3968 in case it arrives in the stack or we should pretend it did.
3969
3970 Compute the stack position and rtx where the argument arrives
3971 and its size.
3972
3973 There is one complexity here: If this was a parameter that would
3974 have been passed in registers, but wasn't only because it is
3975 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
3976 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
3977 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
3978 0 as it was the previous time. */
3979
3980 locate_and_pad_parm (promoted_mode, passed_type,
3981 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3982 1,
3983 #else
3984 #ifdef FUNCTION_INCOMING_ARG
3985 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3986 passed_type,
3987 (named_arg
3988 || varargs_setup)) != 0,
3989 #else
3990 FUNCTION_ARG (args_so_far, promoted_mode,
3991 passed_type,
3992 named_arg || varargs_setup) != 0,
3993 #endif
3994 #endif
3995 fndecl, &stack_args_size, &stack_offset, &arg_size);
3996
3997 if (! second_time)
3998 {
3999 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4000
4001 if (offset_rtx == const0_rtx)
4002 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4003 else
4004 stack_parm = gen_rtx_MEM (promoted_mode,
4005 gen_rtx_PLUS (Pmode,
4006 internal_arg_pointer,
4007 offset_rtx));
4008
4009 /* If this is a memory ref that contains aggregate components,
4010 mark it as such for cse and loop optimize. Likewise if it
4011 is readonly. */
4012 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4013 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
4014 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
4015 }
4016
4017 /* If this parameter was passed both in registers and in the stack,
4018 use the copy on the stack. */
4019 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4020 entry_parm = 0;
4021
4022 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4023 /* If this parm was passed part in regs and part in memory,
4024 pretend it arrived entirely in memory
4025 by pushing the register-part onto the stack.
4026
4027 In the special case of a DImode or DFmode that is split,
4028 we could put it together in a pseudoreg directly,
4029 but for now that's not worth bothering with. */
4030
4031 if (entry_parm)
4032 {
4033 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4034 passed_type, named_arg);
4035
4036 if (nregs > 0)
4037 {
4038 current_function_pretend_args_size
4039 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4040 / (PARM_BOUNDARY / BITS_PER_UNIT)
4041 * (PARM_BOUNDARY / BITS_PER_UNIT));
4042
4043 if (! second_time)
4044 {
4045 /* Handle calls that pass values in multiple non-contiguous
4046 locations. The Irix 6 ABI has examples of this. */
4047 if (GET_CODE (entry_parm) == PARALLEL)
4048 emit_group_store (validize_mem (stack_parm), entry_parm,
4049 int_size_in_bytes (TREE_TYPE (parm)),
4050 (TYPE_ALIGN (TREE_TYPE (parm))
4051 / BITS_PER_UNIT));
4052 else
4053 move_block_from_reg (REGNO (entry_parm),
4054 validize_mem (stack_parm), nregs,
4055 int_size_in_bytes (TREE_TYPE (parm)));
4056 }
4057 entry_parm = stack_parm;
4058 }
4059 }
4060 #endif
4061
4062 /* If we didn't decide this parm came in a register,
4063 by default it came on the stack. */
4064 if (entry_parm == 0)
4065 entry_parm = stack_parm;
4066
4067 /* Record permanently how this parm was passed. */
4068 if (! second_time)
4069 DECL_INCOMING_RTL (parm) = entry_parm;
4070
4071 /* If there is actually space on the stack for this parm,
4072 count it in stack_args_size; otherwise set stack_parm to 0
4073 to indicate there is no preallocated stack slot for the parm. */
4074
4075 if (entry_parm == stack_parm
4076 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4077 /* On some machines, even if a parm value arrives in a register
4078 there is still an (uninitialized) stack slot allocated for it.
4079
4080 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4081 whether this parameter already has a stack slot allocated,
4082 because an arg block exists only if current_function_args_size
4083 is larger than some threshold, and we haven't calculated that
4084 yet. So, for now, we just assume that stack slots never exist
4085 in this case. */
4086 || REG_PARM_STACK_SPACE (fndecl) > 0
4087 #endif
4088 )
4089 {
4090 stack_args_size.constant += arg_size.constant;
4091 if (arg_size.var)
4092 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4093 }
4094 else
4095 /* No stack slot was pushed for this parm. */
4096 stack_parm = 0;
4097
4098 /* Update info on where next arg arrives in registers. */
4099
4100 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4101 passed_type, named_arg);
4102
4103 /* If this is our second time through, we are done with this parm. */
4104 if (second_time)
4105 continue;
4106
4107 /* If we can't trust the parm stack slot to be aligned enough
4108 for its ultimate type, don't use that slot after entry.
4109 We'll make another stack slot, if we need one. */
4110 {
4111 int thisparm_boundary
4112 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4113
4114 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4115 stack_parm = 0;
4116 }
4117
4118 /* If parm was passed in memory, and we need to convert it on entry,
4119 don't store it back in that same slot. */
4120 if (entry_parm != 0
4121 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4122 stack_parm = 0;
4123
4124 #if 0
4125 /* Now adjust STACK_PARM to the mode and precise location
4126 where this parameter should live during execution,
4127 if we discover that it must live in the stack during execution.
4128 To make debuggers happier on big-endian machines, we store
4129 the value in the last bytes of the space available. */
4130
4131 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4132 && stack_parm != 0)
4133 {
4134 rtx offset_rtx;
4135
4136 if (BYTES_BIG_ENDIAN
4137 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4138 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4139 - GET_MODE_SIZE (nominal_mode));
4140
4141 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4142 if (offset_rtx == const0_rtx)
4143 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4144 else
4145 stack_parm = gen_rtx_MEM (nominal_mode,
4146 gen_rtx_PLUS (Pmode,
4147 internal_arg_pointer,
4148 offset_rtx));
4149
4150 /* If this is a memory ref that contains aggregate components,
4151 mark it as such for cse and loop optimize. */
4152 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4153 }
4154 #endif /* 0 */
4155
4156 #ifdef STACK_REGS
4157 /* We need this "use" info, because the gcc-register->stack-register
4158 converter in reg-stack.c needs to know which registers are active
4159 at the start of the function call. The actual parameter loading
4160 instructions are not always available then anymore, since they might
4161 have been optimised away. */
4162
4163 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
4164 emit_insn (gen_rtx_USE (GET_MODE (entry_parm), entry_parm));
4165 #endif
4166
4167 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4168 in the mode in which it arrives.
4169 STACK_PARM is an RTX for a stack slot where the parameter can live
4170 during the function (in case we want to put it there).
4171 STACK_PARM is 0 if no stack slot was pushed for it.
4172
4173 Now output code if necessary to convert ENTRY_PARM to
4174 the type in which this function declares it,
4175 and store that result in an appropriate place,
4176 which may be a pseudo reg, may be STACK_PARM,
4177 or may be a local stack slot if STACK_PARM is 0.
4178
4179 Set DECL_RTL to that place. */
4180
4181 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4182 {
4183 /* If a BLKmode arrives in registers, copy it to a stack slot.
4184 Handle calls that pass values in multiple non-contiguous
4185 locations. The Irix 6 ABI has examples of this. */
4186 if (GET_CODE (entry_parm) == REG
4187 || GET_CODE (entry_parm) == PARALLEL)
4188 {
4189 int size_stored
4190 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4191 UNITS_PER_WORD);
4192
4193 /* Note that we will be storing an integral number of words.
4194 So we have to be careful to ensure that we allocate an
4195 integral number of words. We do this below in the
4196 assign_stack_local if space was not allocated in the argument
4197 list. If it was, this will not work if PARM_BOUNDARY is not
4198 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4199 if it becomes a problem. */
4200
4201 if (stack_parm == 0)
4202 {
4203 stack_parm
4204 = assign_stack_local (GET_MODE (entry_parm),
4205 size_stored, 0);
4206
4207 /* If this is a memory ref that contains aggregate
4208 components, mark it as such for cse and loop optimize. */
4209 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4210 }
4211
4212 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4213 abort ();
4214
4215 if (TREE_READONLY (parm))
4216 RTX_UNCHANGING_P (stack_parm) = 1;
4217
4218 /* Handle calls that pass values in multiple non-contiguous
4219 locations. The Irix 6 ABI has examples of this. */
4220 if (GET_CODE (entry_parm) == PARALLEL)
4221 emit_group_store (validize_mem (stack_parm), entry_parm,
4222 int_size_in_bytes (TREE_TYPE (parm)),
4223 (TYPE_ALIGN (TREE_TYPE (parm))
4224 / BITS_PER_UNIT));
4225 else
4226 move_block_from_reg (REGNO (entry_parm),
4227 validize_mem (stack_parm),
4228 size_stored / UNITS_PER_WORD,
4229 int_size_in_bytes (TREE_TYPE (parm)));
4230 }
4231 DECL_RTL (parm) = stack_parm;
4232 }
4233 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
4234 && ! DECL_INLINE (fndecl))
4235 /* layout_decl may set this. */
4236 || TREE_ADDRESSABLE (parm)
4237 || TREE_SIDE_EFFECTS (parm)
4238 /* If -ffloat-store specified, don't put explicit
4239 float variables into registers. */
4240 || (flag_float_store
4241 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4242 /* Always assign pseudo to structure return or item passed
4243 by invisible reference. */
4244 || passed_pointer || parm == function_result_decl)
4245 {
4246 /* Store the parm in a pseudoregister during the function, but we
4247 may need to do it in a wider mode. */
4248
4249 register rtx parmreg;
4250 int regno, regnoi = 0, regnor = 0;
4251
4252 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4253
4254 promoted_nominal_mode
4255 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4256
4257 parmreg = gen_reg_rtx (promoted_nominal_mode);
4258 mark_user_reg (parmreg);
4259
4260 /* If this was an item that we received a pointer to, set DECL_RTL
4261 appropriately. */
4262 if (passed_pointer)
4263 {
4264 DECL_RTL (parm)
4265 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4266 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
4267 }
4268 else
4269 DECL_RTL (parm) = parmreg;
4270
4271 /* Copy the value into the register. */
4272 if (nominal_mode != passed_mode
4273 || promoted_nominal_mode != promoted_mode)
4274 {
4275 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4276 mode, by the caller. We now have to convert it to
4277 NOMINAL_MODE, if different. However, PARMREG may be in
4278 a different mode than NOMINAL_MODE if it is being stored
4279 promoted.
4280
4281 If ENTRY_PARM is a hard register, it might be in a register
4282 not valid for operating in its mode (e.g., an odd-numbered
4283 register for a DFmode). In that case, moves are the only
4284 thing valid, so we can't do a convert from there. This
4285 occurs when the calling sequence allow such misaligned
4286 usages.
4287
4288 In addition, the conversion may involve a call, which could
4289 clobber parameters which haven't been copied to pseudo
4290 registers yet. Therefore, we must first copy the parm to
4291 a pseudo reg here, and save the conversion until after all
4292 parameters have been moved. */
4293
4294 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4295
4296 emit_move_insn (tempreg, validize_mem (entry_parm));
4297
4298 push_to_sequence (conversion_insns);
4299 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4300
4301 expand_assignment (parm,
4302 make_tree (nominal_type, tempreg), 0, 0);
4303 conversion_insns = get_insns ();
4304 did_conversion = 1;
4305 end_sequence ();
4306 }
4307 else
4308 emit_move_insn (parmreg, validize_mem (entry_parm));
4309
4310 /* If we were passed a pointer but the actual value
4311 can safely live in a register, put it in one. */
4312 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4313 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4314 && ! DECL_INLINE (fndecl))
4315 /* layout_decl may set this. */
4316 || TREE_ADDRESSABLE (parm)
4317 || TREE_SIDE_EFFECTS (parm)
4318 /* If -ffloat-store specified, don't put explicit
4319 float variables into registers. */
4320 || (flag_float_store
4321 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4322 {
4323 /* We can't use nominal_mode, because it will have been set to
4324 Pmode above. We must use the actual mode of the parm. */
4325 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4326 mark_user_reg (parmreg);
4327 emit_move_insn (parmreg, DECL_RTL (parm));
4328 DECL_RTL (parm) = parmreg;
4329 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4330 now the parm. */
4331 stack_parm = 0;
4332 }
4333 #ifdef FUNCTION_ARG_CALLEE_COPIES
4334 /* If we are passed an arg by reference and it is our responsibility
4335 to make a copy, do it now.
4336 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4337 original argument, so we must recreate them in the call to
4338 FUNCTION_ARG_CALLEE_COPIES. */
4339 /* ??? Later add code to handle the case that if the argument isn't
4340 modified, don't do the copy. */
4341
4342 else if (passed_pointer
4343 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4344 TYPE_MODE (DECL_ARG_TYPE (parm)),
4345 DECL_ARG_TYPE (parm),
4346 named_arg)
4347 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4348 {
4349 rtx copy;
4350 tree type = DECL_ARG_TYPE (parm);
4351
4352 /* This sequence may involve a library call perhaps clobbering
4353 registers that haven't been copied to pseudos yet. */
4354
4355 push_to_sequence (conversion_insns);
4356
4357 if (TYPE_SIZE (type) == 0
4358 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4359 /* This is a variable sized object. */
4360 copy = gen_rtx_MEM (BLKmode,
4361 allocate_dynamic_stack_space
4362 (expr_size (parm), NULL_RTX,
4363 TYPE_ALIGN (type)));
4364 else
4365 copy = assign_stack_temp (TYPE_MODE (type),
4366 int_size_in_bytes (type), 1);
4367 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
4368 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4369
4370 store_expr (parm, copy, 0);
4371 emit_move_insn (parmreg, XEXP (copy, 0));
4372 if (current_function_check_memory_usage)
4373 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4374 XEXP (copy, 0), ptr_mode,
4375 GEN_INT (int_size_in_bytes (type)),
4376 TYPE_MODE (sizetype),
4377 GEN_INT (MEMORY_USE_RW),
4378 TYPE_MODE (integer_type_node));
4379 conversion_insns = get_insns ();
4380 did_conversion = 1;
4381 end_sequence ();
4382 }
4383 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4384
4385 /* In any case, record the parm's desired stack location
4386 in case we later discover it must live in the stack.
4387
4388 If it is a COMPLEX value, store the stack location for both
4389 halves. */
4390
4391 if (GET_CODE (parmreg) == CONCAT)
4392 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4393 else
4394 regno = REGNO (parmreg);
4395
4396 if (regno >= max_parm_reg)
4397 {
4398 rtx *new;
4399 int old_max_parm_reg = max_parm_reg;
4400
4401 /* It's slow to expand this one register at a time,
4402 but it's also rare and we need max_parm_reg to be
4403 precisely correct. */
4404 max_parm_reg = regno + 1;
4405 new = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4406 bcopy ((char *) parm_reg_stack_loc, (char *) new,
4407 old_max_parm_reg * sizeof (rtx));
4408 bzero ((char *) (new + old_max_parm_reg),
4409 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4410 parm_reg_stack_loc = new;
4411 }
4412
4413 if (GET_CODE (parmreg) == CONCAT)
4414 {
4415 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4416
4417 regnor = REGNO (gen_realpart (submode, parmreg));
4418 regnoi = REGNO (gen_imagpart (submode, parmreg));
4419
4420 if (stack_parm != 0)
4421 {
4422 parm_reg_stack_loc[regnor]
4423 = gen_realpart (submode, stack_parm);
4424 parm_reg_stack_loc[regnoi]
4425 = gen_imagpart (submode, stack_parm);
4426 }
4427 else
4428 {
4429 parm_reg_stack_loc[regnor] = 0;
4430 parm_reg_stack_loc[regnoi] = 0;
4431 }
4432 }
4433 else
4434 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4435
4436 /* Mark the register as eliminable if we did no conversion
4437 and it was copied from memory at a fixed offset,
4438 and the arg pointer was not copied to a pseudo-reg.
4439 If the arg pointer is a pseudo reg or the offset formed
4440 an invalid address, such memory-equivalences
4441 as we make here would screw up life analysis for it. */
4442 if (nominal_mode == passed_mode
4443 && ! did_conversion
4444 && stack_parm != 0
4445 && GET_CODE (stack_parm) == MEM
4446 && stack_offset.var == 0
4447 && reg_mentioned_p (virtual_incoming_args_rtx,
4448 XEXP (stack_parm, 0)))
4449 {
4450 rtx linsn = get_last_insn ();
4451 rtx sinsn, set;
4452
4453 /* Mark complex types separately. */
4454 if (GET_CODE (parmreg) == CONCAT)
4455 /* Scan backwards for the set of the real and
4456 imaginary parts. */
4457 for (sinsn = linsn; sinsn != 0;
4458 sinsn = prev_nonnote_insn (sinsn))
4459 {
4460 set = single_set (sinsn);
4461 if (set != 0
4462 && SET_DEST (set) == regno_reg_rtx [regnoi])
4463 REG_NOTES (sinsn)
4464 = gen_rtx_EXPR_LIST (REG_EQUIV,
4465 parm_reg_stack_loc[regnoi],
4466 REG_NOTES (sinsn));
4467 else if (set != 0
4468 && SET_DEST (set) == regno_reg_rtx [regnor])
4469 REG_NOTES (sinsn)
4470 = gen_rtx_EXPR_LIST (REG_EQUIV,
4471 parm_reg_stack_loc[regnor],
4472 REG_NOTES (sinsn));
4473 }
4474 else if ((set = single_set (linsn)) != 0
4475 && SET_DEST (set) == parmreg)
4476 REG_NOTES (linsn)
4477 = gen_rtx_EXPR_LIST (REG_EQUIV,
4478 stack_parm, REG_NOTES (linsn));
4479 }
4480
4481 /* For pointer data type, suggest pointer register. */
4482 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4483 mark_reg_pointer (parmreg,
4484 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4485 / BITS_PER_UNIT));
4486 }
4487 else
4488 {
4489 /* Value must be stored in the stack slot STACK_PARM
4490 during function execution. */
4491
4492 if (promoted_mode != nominal_mode)
4493 {
4494 /* Conversion is required. */
4495 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4496
4497 emit_move_insn (tempreg, validize_mem (entry_parm));
4498
4499 push_to_sequence (conversion_insns);
4500 entry_parm = convert_to_mode (nominal_mode, tempreg,
4501 TREE_UNSIGNED (TREE_TYPE (parm)));
4502 if (stack_parm)
4503 {
4504 /* ??? This may need a big-endian conversion on sparc64. */
4505 stack_parm = change_address (stack_parm, nominal_mode,
4506 NULL_RTX);
4507 }
4508 conversion_insns = get_insns ();
4509 did_conversion = 1;
4510 end_sequence ();
4511 }
4512
4513 if (entry_parm != stack_parm)
4514 {
4515 if (stack_parm == 0)
4516 {
4517 stack_parm
4518 = assign_stack_local (GET_MODE (entry_parm),
4519 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4520 /* If this is a memory ref that contains aggregate components,
4521 mark it as such for cse and loop optimize. */
4522 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4523 }
4524
4525 if (promoted_mode != nominal_mode)
4526 {
4527 push_to_sequence (conversion_insns);
4528 emit_move_insn (validize_mem (stack_parm),
4529 validize_mem (entry_parm));
4530 conversion_insns = get_insns ();
4531 end_sequence ();
4532 }
4533 else
4534 emit_move_insn (validize_mem (stack_parm),
4535 validize_mem (entry_parm));
4536 }
4537 if (current_function_check_memory_usage)
4538 {
4539 push_to_sequence (conversion_insns);
4540 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4541 XEXP (stack_parm, 0), ptr_mode,
4542 GEN_INT (GET_MODE_SIZE (GET_MODE
4543 (entry_parm))),
4544 TYPE_MODE (sizetype),
4545 GEN_INT (MEMORY_USE_RW),
4546 TYPE_MODE (integer_type_node));
4547
4548 conversion_insns = get_insns ();
4549 end_sequence ();
4550 }
4551 DECL_RTL (parm) = stack_parm;
4552 }
4553
4554 /* If this "parameter" was the place where we are receiving the
4555 function's incoming structure pointer, set up the result. */
4556 if (parm == function_result_decl)
4557 {
4558 tree result = DECL_RESULT (fndecl);
4559 tree restype = TREE_TYPE (result);
4560
4561 DECL_RTL (result)
4562 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4563
4564 MEM_IN_STRUCT_P (DECL_RTL (result)) = AGGREGATE_TYPE_P (restype);
4565 }
4566
4567 if (TREE_THIS_VOLATILE (parm))
4568 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4569 if (TREE_READONLY (parm))
4570 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4571 }
4572
4573 /* Output all parameter conversion instructions (possibly including calls)
4574 now that all parameters have been copied out of hard registers. */
4575 emit_insns (conversion_insns);
4576
4577 last_parm_insn = get_last_insn ();
4578
4579 current_function_args_size = stack_args_size.constant;
4580
4581 /* Adjust function incoming argument size for alignment and
4582 minimum length. */
4583
4584 #ifdef REG_PARM_STACK_SPACE
4585 #ifndef MAYBE_REG_PARM_STACK_SPACE
4586 current_function_args_size = MAX (current_function_args_size,
4587 REG_PARM_STACK_SPACE (fndecl));
4588 #endif
4589 #endif
4590
4591 #ifdef STACK_BOUNDARY
4592 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4593
4594 current_function_args_size
4595 = ((current_function_args_size + STACK_BYTES - 1)
4596 / STACK_BYTES) * STACK_BYTES;
4597 #endif
4598
4599 #ifdef ARGS_GROW_DOWNWARD
4600 current_function_arg_offset_rtx
4601 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4602 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4603 size_int (-stack_args_size.constant)),
4604 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4605 #else
4606 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4607 #endif
4608
4609 /* See how many bytes, if any, of its args a function should try to pop
4610 on return. */
4611
4612 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4613 current_function_args_size);
4614
4615 /* For stdarg.h function, save info about
4616 regs and stack space used by the named args. */
4617
4618 if (!hide_last_arg)
4619 current_function_args_info = args_so_far;
4620
4621 /* Set the rtx used for the function return value. Put this in its
4622 own variable so any optimizers that need this information don't have
4623 to include tree.h. Do this here so it gets done when an inlined
4624 function gets output. */
4625
4626 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4627 }
4628 \f
4629 /* Indicate whether REGNO is an incoming argument to the current function
4630 that was promoted to a wider mode. If so, return the RTX for the
4631 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4632 that REGNO is promoted from and whether the promotion was signed or
4633 unsigned. */
4634
4635 #ifdef PROMOTE_FUNCTION_ARGS
4636
4637 rtx
4638 promoted_input_arg (regno, pmode, punsignedp)
4639 int regno;
4640 enum machine_mode *pmode;
4641 int *punsignedp;
4642 {
4643 tree arg;
4644
4645 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4646 arg = TREE_CHAIN (arg))
4647 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4648 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4649 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4650 {
4651 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4652 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4653
4654 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4655 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4656 && mode != DECL_MODE (arg))
4657 {
4658 *pmode = DECL_MODE (arg);
4659 *punsignedp = unsignedp;
4660 return DECL_INCOMING_RTL (arg);
4661 }
4662 }
4663
4664 return 0;
4665 }
4666
4667 #endif
4668 \f
4669 /* Compute the size and offset from the start of the stacked arguments for a
4670 parm passed in mode PASSED_MODE and with type TYPE.
4671
4672 INITIAL_OFFSET_PTR points to the current offset into the stacked
4673 arguments.
4674
4675 The starting offset and size for this parm are returned in *OFFSET_PTR
4676 and *ARG_SIZE_PTR, respectively.
4677
4678 IN_REGS is non-zero if the argument will be passed in registers. It will
4679 never be set if REG_PARM_STACK_SPACE is not defined.
4680
4681 FNDECL is the function in which the argument was defined.
4682
4683 There are two types of rounding that are done. The first, controlled by
4684 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4685 list to be aligned to the specific boundary (in bits). This rounding
4686 affects the initial and starting offsets, but not the argument size.
4687
4688 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4689 optionally rounds the size of the parm to PARM_BOUNDARY. The
4690 initial offset is not affected by this rounding, while the size always
4691 is and the starting offset may be. */
4692
4693 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4694 initial_offset_ptr is positive because locate_and_pad_parm's
4695 callers pass in the total size of args so far as
4696 initial_offset_ptr. arg_size_ptr is always positive.*/
4697
4698 void
4699 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4700 initial_offset_ptr, offset_ptr, arg_size_ptr)
4701 enum machine_mode passed_mode;
4702 tree type;
4703 int in_regs;
4704 tree fndecl;
4705 struct args_size *initial_offset_ptr;
4706 struct args_size *offset_ptr;
4707 struct args_size *arg_size_ptr;
4708 {
4709 tree sizetree
4710 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4711 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4712 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4713
4714 #ifdef REG_PARM_STACK_SPACE
4715 /* If we have found a stack parm before we reach the end of the
4716 area reserved for registers, skip that area. */
4717 if (! in_regs)
4718 {
4719 int reg_parm_stack_space = 0;
4720
4721 #ifdef MAYBE_REG_PARM_STACK_SPACE
4722 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4723 #else
4724 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4725 #endif
4726 if (reg_parm_stack_space > 0)
4727 {
4728 if (initial_offset_ptr->var)
4729 {
4730 initial_offset_ptr->var
4731 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4732 size_int (reg_parm_stack_space));
4733 initial_offset_ptr->constant = 0;
4734 }
4735 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4736 initial_offset_ptr->constant = reg_parm_stack_space;
4737 }
4738 }
4739 #endif /* REG_PARM_STACK_SPACE */
4740
4741 arg_size_ptr->var = 0;
4742 arg_size_ptr->constant = 0;
4743
4744 #ifdef ARGS_GROW_DOWNWARD
4745 if (initial_offset_ptr->var)
4746 {
4747 offset_ptr->constant = 0;
4748 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4749 initial_offset_ptr->var);
4750 }
4751 else
4752 {
4753 offset_ptr->constant = - initial_offset_ptr->constant;
4754 offset_ptr->var = 0;
4755 }
4756 if (where_pad != none
4757 && (TREE_CODE (sizetree) != INTEGER_CST
4758 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4759 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4760 SUB_PARM_SIZE (*offset_ptr, sizetree);
4761 if (where_pad != downward)
4762 pad_to_arg_alignment (offset_ptr, boundary);
4763 if (initial_offset_ptr->var)
4764 {
4765 arg_size_ptr->var = size_binop (MINUS_EXPR,
4766 size_binop (MINUS_EXPR,
4767 integer_zero_node,
4768 initial_offset_ptr->var),
4769 offset_ptr->var);
4770 }
4771 else
4772 {
4773 arg_size_ptr->constant = (- initial_offset_ptr->constant
4774 - offset_ptr->constant);
4775 }
4776 #else /* !ARGS_GROW_DOWNWARD */
4777 pad_to_arg_alignment (initial_offset_ptr, boundary);
4778 *offset_ptr = *initial_offset_ptr;
4779
4780 #ifdef PUSH_ROUNDING
4781 if (passed_mode != BLKmode)
4782 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4783 #endif
4784
4785 /* Pad_below needs the pre-rounded size to know how much to pad below
4786 so this must be done before rounding up. */
4787 if (where_pad == downward
4788 /* However, BLKmode args passed in regs have their padding done elsewhere.
4789 The stack slot must be able to hold the entire register. */
4790 && !(in_regs && passed_mode == BLKmode))
4791 pad_below (offset_ptr, passed_mode, sizetree);
4792
4793 if (where_pad != none
4794 && (TREE_CODE (sizetree) != INTEGER_CST
4795 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4796 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4797
4798 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4799 #endif /* ARGS_GROW_DOWNWARD */
4800 }
4801
4802 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4803 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4804
4805 static void
4806 pad_to_arg_alignment (offset_ptr, boundary)
4807 struct args_size *offset_ptr;
4808 int boundary;
4809 {
4810 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4811
4812 if (boundary > BITS_PER_UNIT)
4813 {
4814 if (offset_ptr->var)
4815 {
4816 offset_ptr->var =
4817 #ifdef ARGS_GROW_DOWNWARD
4818 round_down
4819 #else
4820 round_up
4821 #endif
4822 (ARGS_SIZE_TREE (*offset_ptr),
4823 boundary / BITS_PER_UNIT);
4824 offset_ptr->constant = 0; /*?*/
4825 }
4826 else
4827 offset_ptr->constant =
4828 #ifdef ARGS_GROW_DOWNWARD
4829 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4830 #else
4831 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4832 #endif
4833 }
4834 }
4835
4836 #ifndef ARGS_GROW_DOWNWARD
4837 static void
4838 pad_below (offset_ptr, passed_mode, sizetree)
4839 struct args_size *offset_ptr;
4840 enum machine_mode passed_mode;
4841 tree sizetree;
4842 {
4843 if (passed_mode != BLKmode)
4844 {
4845 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4846 offset_ptr->constant
4847 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4848 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4849 - GET_MODE_SIZE (passed_mode));
4850 }
4851 else
4852 {
4853 if (TREE_CODE (sizetree) != INTEGER_CST
4854 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4855 {
4856 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4857 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4858 /* Add it in. */
4859 ADD_PARM_SIZE (*offset_ptr, s2);
4860 SUB_PARM_SIZE (*offset_ptr, sizetree);
4861 }
4862 }
4863 }
4864 #endif
4865
4866 #ifdef ARGS_GROW_DOWNWARD
4867 static tree
4868 round_down (value, divisor)
4869 tree value;
4870 int divisor;
4871 {
4872 return size_binop (MULT_EXPR,
4873 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
4874 size_int (divisor));
4875 }
4876 #endif
4877 \f
4878 /* Walk the tree of blocks describing the binding levels within a function
4879 and warn about uninitialized variables.
4880 This is done after calling flow_analysis and before global_alloc
4881 clobbers the pseudo-regs to hard regs. */
4882
4883 void
4884 uninitialized_vars_warning (block)
4885 tree block;
4886 {
4887 register tree decl, sub;
4888 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4889 {
4890 if (TREE_CODE (decl) == VAR_DECL
4891 /* These warnings are unreliable for and aggregates
4892 because assigning the fields one by one can fail to convince
4893 flow.c that the entire aggregate was initialized.
4894 Unions are troublesome because members may be shorter. */
4895 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
4896 && DECL_RTL (decl) != 0
4897 && GET_CODE (DECL_RTL (decl)) == REG
4898 && regno_uninitialized (REGNO (DECL_RTL (decl))))
4899 warning_with_decl (decl,
4900 "`%s' might be used uninitialized in this function");
4901 if (TREE_CODE (decl) == VAR_DECL
4902 && DECL_RTL (decl) != 0
4903 && GET_CODE (DECL_RTL (decl)) == REG
4904 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4905 warning_with_decl (decl,
4906 "variable `%s' might be clobbered by `longjmp' or `vfork'");
4907 }
4908 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4909 uninitialized_vars_warning (sub);
4910 }
4911
4912 /* Do the appropriate part of uninitialized_vars_warning
4913 but for arguments instead of local variables. */
4914
4915 void
4916 setjmp_args_warning ()
4917 {
4918 register tree decl;
4919 for (decl = DECL_ARGUMENTS (current_function_decl);
4920 decl; decl = TREE_CHAIN (decl))
4921 if (DECL_RTL (decl) != 0
4922 && GET_CODE (DECL_RTL (decl)) == REG
4923 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4924 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
4925 }
4926
4927 /* If this function call setjmp, put all vars into the stack
4928 unless they were declared `register'. */
4929
4930 void
4931 setjmp_protect (block)
4932 tree block;
4933 {
4934 register tree decl, sub;
4935 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4936 if ((TREE_CODE (decl) == VAR_DECL
4937 || TREE_CODE (decl) == PARM_DECL)
4938 && DECL_RTL (decl) != 0
4939 && (GET_CODE (DECL_RTL (decl)) == REG
4940 || (GET_CODE (DECL_RTL (decl)) == MEM
4941 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
4942 /* If this variable came from an inline function, it must be
4943 that its life doesn't overlap the setjmp. If there was a
4944 setjmp in the function, it would already be in memory. We
4945 must exclude such variable because their DECL_RTL might be
4946 set to strange things such as virtual_stack_vars_rtx. */
4947 && ! DECL_FROM_INLINE (decl)
4948 && (
4949 #ifdef NON_SAVING_SETJMP
4950 /* If longjmp doesn't restore the registers,
4951 don't put anything in them. */
4952 NON_SAVING_SETJMP
4953 ||
4954 #endif
4955 ! DECL_REGISTER (decl)))
4956 put_var_into_stack (decl);
4957 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4958 setjmp_protect (sub);
4959 }
4960 \f
4961 /* Like the previous function, but for args instead of local variables. */
4962
4963 void
4964 setjmp_protect_args ()
4965 {
4966 register tree decl;
4967 for (decl = DECL_ARGUMENTS (current_function_decl);
4968 decl; decl = TREE_CHAIN (decl))
4969 if ((TREE_CODE (decl) == VAR_DECL
4970 || TREE_CODE (decl) == PARM_DECL)
4971 && DECL_RTL (decl) != 0
4972 && (GET_CODE (DECL_RTL (decl)) == REG
4973 || (GET_CODE (DECL_RTL (decl)) == MEM
4974 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
4975 && (
4976 /* If longjmp doesn't restore the registers,
4977 don't put anything in them. */
4978 #ifdef NON_SAVING_SETJMP
4979 NON_SAVING_SETJMP
4980 ||
4981 #endif
4982 ! DECL_REGISTER (decl)))
4983 put_var_into_stack (decl);
4984 }
4985 \f
4986 /* Return the context-pointer register corresponding to DECL,
4987 or 0 if it does not need one. */
4988
4989 rtx
4990 lookup_static_chain (decl)
4991 tree decl;
4992 {
4993 tree context = decl_function_context (decl);
4994 tree link;
4995
4996 if (context == 0
4997 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
4998 return 0;
4999
5000 /* We treat inline_function_decl as an alias for the current function
5001 because that is the inline function whose vars, types, etc.
5002 are being merged into the current function.
5003 See expand_inline_function. */
5004 if (context == current_function_decl || context == inline_function_decl)
5005 return virtual_stack_vars_rtx;
5006
5007 for (link = context_display; link; link = TREE_CHAIN (link))
5008 if (TREE_PURPOSE (link) == context)
5009 return RTL_EXPR_RTL (TREE_VALUE (link));
5010
5011 abort ();
5012 }
5013 \f
5014 /* Convert a stack slot address ADDR for variable VAR
5015 (from a containing function)
5016 into an address valid in this function (using a static chain). */
5017
5018 rtx
5019 fix_lexical_addr (addr, var)
5020 rtx addr;
5021 tree var;
5022 {
5023 rtx basereg;
5024 HOST_WIDE_INT displacement;
5025 tree context = decl_function_context (var);
5026 struct function *fp;
5027 rtx base = 0;
5028
5029 /* If this is the present function, we need not do anything. */
5030 if (context == current_function_decl || context == inline_function_decl)
5031 return addr;
5032
5033 for (fp = outer_function_chain; fp; fp = fp->next)
5034 if (fp->decl == context)
5035 break;
5036
5037 if (fp == 0)
5038 abort ();
5039
5040 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5041 addr = XEXP (XEXP (addr, 0), 0);
5042
5043 /* Decode given address as base reg plus displacement. */
5044 if (GET_CODE (addr) == REG)
5045 basereg = addr, displacement = 0;
5046 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5047 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5048 else
5049 abort ();
5050
5051 /* We accept vars reached via the containing function's
5052 incoming arg pointer and via its stack variables pointer. */
5053 if (basereg == fp->internal_arg_pointer)
5054 {
5055 /* If reached via arg pointer, get the arg pointer value
5056 out of that function's stack frame.
5057
5058 There are two cases: If a separate ap is needed, allocate a
5059 slot in the outer function for it and dereference it that way.
5060 This is correct even if the real ap is actually a pseudo.
5061 Otherwise, just adjust the offset from the frame pointer to
5062 compensate. */
5063
5064 #ifdef NEED_SEPARATE_AP
5065 rtx addr;
5066
5067 if (fp->arg_pointer_save_area == 0)
5068 fp->arg_pointer_save_area
5069 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5070
5071 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
5072 addr = memory_address (Pmode, addr);
5073
5074 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
5075 #else
5076 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5077 base = lookup_static_chain (var);
5078 #endif
5079 }
5080
5081 else if (basereg == virtual_stack_vars_rtx)
5082 {
5083 /* This is the same code as lookup_static_chain, duplicated here to
5084 avoid an extra call to decl_function_context. */
5085 tree link;
5086
5087 for (link = context_display; link; link = TREE_CHAIN (link))
5088 if (TREE_PURPOSE (link) == context)
5089 {
5090 base = RTL_EXPR_RTL (TREE_VALUE (link));
5091 break;
5092 }
5093 }
5094
5095 if (base == 0)
5096 abort ();
5097
5098 /* Use same offset, relative to appropriate static chain or argument
5099 pointer. */
5100 return plus_constant (base, displacement);
5101 }
5102 \f
5103 /* Return the address of the trampoline for entering nested fn FUNCTION.
5104 If necessary, allocate a trampoline (in the stack frame)
5105 and emit rtl to initialize its contents (at entry to this function). */
5106
5107 rtx
5108 trampoline_address (function)
5109 tree function;
5110 {
5111 tree link;
5112 tree rtlexp;
5113 rtx tramp;
5114 struct function *fp;
5115 tree fn_context;
5116
5117 /* Find an existing trampoline and return it. */
5118 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5119 if (TREE_PURPOSE (link) == function)
5120 return
5121 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5122
5123 for (fp = outer_function_chain; fp; fp = fp->next)
5124 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
5125 if (TREE_PURPOSE (link) == function)
5126 {
5127 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5128 function);
5129 return round_trampoline_addr (tramp);
5130 }
5131
5132 /* None exists; we must make one. */
5133
5134 /* Find the `struct function' for the function containing FUNCTION. */
5135 fp = 0;
5136 fn_context = decl_function_context (function);
5137 if (fn_context != current_function_decl
5138 && fn_context != inline_function_decl)
5139 for (fp = outer_function_chain; fp; fp = fp->next)
5140 if (fp->decl == fn_context)
5141 break;
5142
5143 /* Allocate run-time space for this trampoline
5144 (usually in the defining function's stack frame). */
5145 #ifdef ALLOCATE_TRAMPOLINE
5146 tramp = ALLOCATE_TRAMPOLINE (fp);
5147 #else
5148 /* If rounding needed, allocate extra space
5149 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5150 #ifdef TRAMPOLINE_ALIGNMENT
5151 #define TRAMPOLINE_REAL_SIZE \
5152 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5153 #else
5154 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5155 #endif
5156 if (fp != 0)
5157 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
5158 else
5159 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
5160 #endif
5161
5162 /* Record the trampoline for reuse and note it for later initialization
5163 by expand_function_end. */
5164 if (fp != 0)
5165 {
5166 push_obstacks (fp->function_maybepermanent_obstack,
5167 fp->function_maybepermanent_obstack);
5168 rtlexp = make_node (RTL_EXPR);
5169 RTL_EXPR_RTL (rtlexp) = tramp;
5170 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
5171 pop_obstacks ();
5172 }
5173 else
5174 {
5175 /* Make the RTL_EXPR node temporary, not momentary, so that the
5176 trampoline_list doesn't become garbage. */
5177 int momentary = suspend_momentary ();
5178 rtlexp = make_node (RTL_EXPR);
5179 resume_momentary (momentary);
5180
5181 RTL_EXPR_RTL (rtlexp) = tramp;
5182 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5183 }
5184
5185 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5186 return round_trampoline_addr (tramp);
5187 }
5188
5189 /* Given a trampoline address,
5190 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5191
5192 static rtx
5193 round_trampoline_addr (tramp)
5194 rtx tramp;
5195 {
5196 #ifdef TRAMPOLINE_ALIGNMENT
5197 /* Round address up to desired boundary. */
5198 rtx temp = gen_reg_rtx (Pmode);
5199 temp = expand_binop (Pmode, add_optab, tramp,
5200 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5201 temp, 0, OPTAB_LIB_WIDEN);
5202 tramp = expand_binop (Pmode, and_optab, temp,
5203 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5204 temp, 0, OPTAB_LIB_WIDEN);
5205 #endif
5206 return tramp;
5207 }
5208 \f
5209 /* The functions identify_blocks and reorder_blocks provide a way to
5210 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5211 duplicate portions of the RTL code. Call identify_blocks before
5212 changing the RTL, and call reorder_blocks after. */
5213
5214 /* Put all this function's BLOCK nodes including those that are chained
5215 onto the first block into a vector, and return it.
5216 Also store in each NOTE for the beginning or end of a block
5217 the index of that block in the vector.
5218 The arguments are BLOCK, the chain of top-level blocks of the function,
5219 and INSNS, the insn chain of the function. */
5220
5221 tree *
5222 identify_blocks (block, insns)
5223 tree block;
5224 rtx insns;
5225 {
5226 int n_blocks;
5227 tree *block_vector;
5228 int *block_stack;
5229 int depth = 0;
5230 int next_block_number = 1;
5231 int current_block_number = 1;
5232 rtx insn;
5233
5234 if (block == 0)
5235 return 0;
5236
5237 n_blocks = all_blocks (block, 0);
5238 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5239 block_stack = (int *) alloca (n_blocks * sizeof (int));
5240
5241 all_blocks (block, block_vector);
5242
5243 for (insn = insns; insn; insn = NEXT_INSN (insn))
5244 if (GET_CODE (insn) == NOTE)
5245 {
5246 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5247 {
5248 block_stack[depth++] = current_block_number;
5249 current_block_number = next_block_number;
5250 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
5251 }
5252 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5253 {
5254 NOTE_BLOCK_NUMBER (insn) = current_block_number;
5255 current_block_number = block_stack[--depth];
5256 }
5257 }
5258
5259 if (n_blocks != next_block_number)
5260 abort ();
5261
5262 return block_vector;
5263 }
5264
5265 /* Given BLOCK_VECTOR which was returned by identify_blocks,
5266 and a revised instruction chain, rebuild the tree structure
5267 of BLOCK nodes to correspond to the new order of RTL.
5268 The new block tree is inserted below TOP_BLOCK.
5269 Returns the current top-level block. */
5270
5271 tree
5272 reorder_blocks (block_vector, block, insns)
5273 tree *block_vector;
5274 tree block;
5275 rtx insns;
5276 {
5277 tree current_block = block;
5278 rtx insn;
5279
5280 if (block_vector == 0)
5281 return block;
5282
5283 /* Prune the old trees away, so that it doesn't get in the way. */
5284 BLOCK_SUBBLOCKS (current_block) = 0;
5285 BLOCK_CHAIN (current_block) = 0;
5286
5287 for (insn = insns; insn; insn = NEXT_INSN (insn))
5288 if (GET_CODE (insn) == NOTE)
5289 {
5290 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5291 {
5292 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
5293 /* If we have seen this block before, copy it. */
5294 if (TREE_ASM_WRITTEN (block))
5295 block = copy_node (block);
5296 BLOCK_SUBBLOCKS (block) = 0;
5297 TREE_ASM_WRITTEN (block) = 1;
5298 BLOCK_SUPERCONTEXT (block) = current_block;
5299 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5300 BLOCK_SUBBLOCKS (current_block) = block;
5301 current_block = block;
5302 NOTE_SOURCE_FILE (insn) = 0;
5303 }
5304 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5305 {
5306 BLOCK_SUBBLOCKS (current_block)
5307 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5308 current_block = BLOCK_SUPERCONTEXT (current_block);
5309 NOTE_SOURCE_FILE (insn) = 0;
5310 }
5311 }
5312
5313 BLOCK_SUBBLOCKS (current_block)
5314 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5315 return current_block;
5316 }
5317
5318 /* Reverse the order of elements in the chain T of blocks,
5319 and return the new head of the chain (old last element). */
5320
5321 static tree
5322 blocks_nreverse (t)
5323 tree t;
5324 {
5325 register tree prev = 0, decl, next;
5326 for (decl = t; decl; decl = next)
5327 {
5328 next = BLOCK_CHAIN (decl);
5329 BLOCK_CHAIN (decl) = prev;
5330 prev = decl;
5331 }
5332 return prev;
5333 }
5334
5335 /* Count the subblocks of the list starting with BLOCK, and list them
5336 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5337 blocks. */
5338
5339 static int
5340 all_blocks (block, vector)
5341 tree block;
5342 tree *vector;
5343 {
5344 int n_blocks = 0;
5345
5346 while (block)
5347 {
5348 TREE_ASM_WRITTEN (block) = 0;
5349
5350 /* Record this block. */
5351 if (vector)
5352 vector[n_blocks] = block;
5353
5354 ++n_blocks;
5355
5356 /* Record the subblocks, and their subblocks... */
5357 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5358 vector ? vector + n_blocks : 0);
5359 block = BLOCK_CHAIN (block);
5360 }
5361
5362 return n_blocks;
5363 }
5364 \f
5365 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5366 and initialize static variables for generating RTL for the statements
5367 of the function. */
5368
5369 void
5370 init_function_start (subr, filename, line)
5371 tree subr;
5372 char *filename;
5373 int line;
5374 {
5375 init_stmt_for_function ();
5376
5377 cse_not_expected = ! optimize;
5378
5379 /* Caller save not needed yet. */
5380 caller_save_needed = 0;
5381
5382 /* No stack slots have been made yet. */
5383 stack_slot_list = 0;
5384
5385 /* There is no stack slot for handling nonlocal gotos. */
5386 nonlocal_goto_handler_slot = 0;
5387 nonlocal_goto_stack_level = 0;
5388
5389 /* No labels have been declared for nonlocal use. */
5390 nonlocal_labels = 0;
5391
5392 /* No function calls so far in this function. */
5393 function_call_count = 0;
5394
5395 /* No parm regs have been allocated.
5396 (This is important for output_inline_function.) */
5397 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5398
5399 /* Initialize the RTL mechanism. */
5400 init_emit ();
5401
5402 /* Initialize the queue of pending postincrement and postdecrements,
5403 and some other info in expr.c. */
5404 init_expr ();
5405
5406 /* We haven't done register allocation yet. */
5407 reg_renumber = 0;
5408
5409 init_const_rtx_hash_table ();
5410
5411 current_function_name = (*decl_printable_name) (subr, 2);
5412
5413 /* Nonzero if this is a nested function that uses a static chain. */
5414
5415 current_function_needs_context
5416 = (decl_function_context (current_function_decl) != 0
5417 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5418
5419 /* Set if a call to setjmp is seen. */
5420 current_function_calls_setjmp = 0;
5421
5422 /* Set if a call to longjmp is seen. */
5423 current_function_calls_longjmp = 0;
5424
5425 current_function_calls_alloca = 0;
5426 current_function_has_nonlocal_label = 0;
5427 current_function_has_nonlocal_goto = 0;
5428 current_function_contains_functions = 0;
5429 current_function_is_thunk = 0;
5430
5431 current_function_returns_pcc_struct = 0;
5432 current_function_returns_struct = 0;
5433 current_function_epilogue_delay_list = 0;
5434 current_function_uses_const_pool = 0;
5435 current_function_uses_pic_offset_table = 0;
5436 current_function_cannot_inline = 0;
5437
5438 /* We have not yet needed to make a label to jump to for tail-recursion. */
5439 tail_recursion_label = 0;
5440
5441 /* We haven't had a need to make a save area for ap yet. */
5442
5443 arg_pointer_save_area = 0;
5444
5445 /* No stack slots allocated yet. */
5446 frame_offset = 0;
5447
5448 /* No SAVE_EXPRs in this function yet. */
5449 save_expr_regs = 0;
5450
5451 /* No RTL_EXPRs in this function yet. */
5452 rtl_expr_chain = 0;
5453
5454 /* Set up to allocate temporaries. */
5455 init_temp_slots ();
5456
5457 /* Within function body, compute a type's size as soon it is laid out. */
5458 immediate_size_expand++;
5459
5460 /* We haven't made any trampolines for this function yet. */
5461 trampoline_list = 0;
5462
5463 init_pending_stack_adjust ();
5464 inhibit_defer_pop = 0;
5465
5466 current_function_outgoing_args_size = 0;
5467
5468 /* Prevent ever trying to delete the first instruction of a function.
5469 Also tell final how to output a linenum before the function prologue.
5470 Note linenums could be missing, e.g. when compiling a Java .class file. */
5471 if (line > 0)
5472 emit_line_note (filename, line);
5473
5474 /* Make sure first insn is a note even if we don't want linenums.
5475 This makes sure the first insn will never be deleted.
5476 Also, final expects a note to appear there. */
5477 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5478
5479 /* Set flags used by final.c. */
5480 if (aggregate_value_p (DECL_RESULT (subr)))
5481 {
5482 #ifdef PCC_STATIC_STRUCT_RETURN
5483 current_function_returns_pcc_struct = 1;
5484 #endif
5485 current_function_returns_struct = 1;
5486 }
5487
5488 /* Warn if this value is an aggregate type,
5489 regardless of which calling convention we are using for it. */
5490 if (warn_aggregate_return
5491 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5492 warning ("function returns an aggregate");
5493
5494 current_function_returns_pointer
5495 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5496
5497 /* Indicate that we need to distinguish between the return value of the
5498 present function and the return value of a function being called. */
5499 rtx_equal_function_value_matters = 1;
5500
5501 /* Indicate that we have not instantiated virtual registers yet. */
5502 virtuals_instantiated = 0;
5503
5504 /* Indicate we have no need of a frame pointer yet. */
5505 frame_pointer_needed = 0;
5506
5507 /* By default assume not varargs or stdarg. */
5508 current_function_varargs = 0;
5509 current_function_stdarg = 0;
5510 }
5511
5512 /* Indicate that the current function uses extra args
5513 not explicitly mentioned in the argument list in any fashion. */
5514
5515 void
5516 mark_varargs ()
5517 {
5518 current_function_varargs = 1;
5519 }
5520
5521 /* Expand a call to __main at the beginning of a possible main function. */
5522
5523 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5524 #undef HAS_INIT_SECTION
5525 #define HAS_INIT_SECTION
5526 #endif
5527
5528 void
5529 expand_main_function ()
5530 {
5531 #if !defined (HAS_INIT_SECTION)
5532 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5533 VOIDmode, 0);
5534 #endif /* not HAS_INIT_SECTION */
5535 }
5536 \f
5537 extern struct obstack permanent_obstack;
5538
5539 /* Start the RTL for a new function, and set variables used for
5540 emitting RTL.
5541 SUBR is the FUNCTION_DECL node.
5542 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5543 the function's parameters, which must be run at any return statement. */
5544
5545 void
5546 expand_function_start (subr, parms_have_cleanups)
5547 tree subr;
5548 int parms_have_cleanups;
5549 {
5550 register int i;
5551 tree tem;
5552 rtx last_ptr = NULL_RTX;
5553
5554 /* Make sure volatile mem refs aren't considered
5555 valid operands of arithmetic insns. */
5556 init_recog_no_volatile ();
5557
5558 /* Set this before generating any memory accesses. */
5559 current_function_check_memory_usage
5560 = (flag_check_memory_usage
5561 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
5562
5563 current_function_instrument_entry_exit
5564 = (flag_instrument_function_entry_exit
5565 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5566
5567 /* If function gets a static chain arg, store it in the stack frame.
5568 Do this first, so it gets the first stack slot offset. */
5569 if (current_function_needs_context)
5570 {
5571 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5572
5573 /* Delay copying static chain if it is not a register to avoid
5574 conflicts with regs used for parameters. */
5575 if (! SMALL_REGISTER_CLASSES
5576 || GET_CODE (static_chain_incoming_rtx) == REG)
5577 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5578 }
5579
5580 /* If the parameters of this function need cleaning up, get a label
5581 for the beginning of the code which executes those cleanups. This must
5582 be done before doing anything with return_label. */
5583 if (parms_have_cleanups)
5584 cleanup_label = gen_label_rtx ();
5585 else
5586 cleanup_label = 0;
5587
5588 /* Make the label for return statements to jump to, if this machine
5589 does not have a one-instruction return and uses an epilogue,
5590 or if it returns a structure, or if it has parm cleanups. */
5591 #ifdef HAVE_return
5592 if (cleanup_label == 0 && HAVE_return
5593 && ! current_function_instrument_entry_exit
5594 && ! current_function_returns_pcc_struct
5595 && ! (current_function_returns_struct && ! optimize))
5596 return_label = 0;
5597 else
5598 return_label = gen_label_rtx ();
5599 #else
5600 return_label = gen_label_rtx ();
5601 #endif
5602
5603 /* Initialize rtx used to return the value. */
5604 /* Do this before assign_parms so that we copy the struct value address
5605 before any library calls that assign parms might generate. */
5606
5607 /* Decide whether to return the value in memory or in a register. */
5608 if (aggregate_value_p (DECL_RESULT (subr)))
5609 {
5610 /* Returning something that won't go in a register. */
5611 register rtx value_address = 0;
5612
5613 #ifdef PCC_STATIC_STRUCT_RETURN
5614 if (current_function_returns_pcc_struct)
5615 {
5616 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5617 value_address = assemble_static_space (size);
5618 }
5619 else
5620 #endif
5621 {
5622 /* Expect to be passed the address of a place to store the value.
5623 If it is passed as an argument, assign_parms will take care of
5624 it. */
5625 if (struct_value_incoming_rtx)
5626 {
5627 value_address = gen_reg_rtx (Pmode);
5628 emit_move_insn (value_address, struct_value_incoming_rtx);
5629 }
5630 }
5631 if (value_address)
5632 {
5633 DECL_RTL (DECL_RESULT (subr))
5634 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
5635 MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)))
5636 = AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5637 }
5638 }
5639 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5640 /* If return mode is void, this decl rtl should not be used. */
5641 DECL_RTL (DECL_RESULT (subr)) = 0;
5642 else if (parms_have_cleanups || current_function_instrument_entry_exit)
5643 {
5644 /* If function will end with cleanup code for parms,
5645 compute the return values into a pseudo reg,
5646 which we will copy into the true return register
5647 after the cleanups are done. */
5648
5649 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5650
5651 #ifdef PROMOTE_FUNCTION_RETURN
5652 tree type = TREE_TYPE (DECL_RESULT (subr));
5653 int unsignedp = TREE_UNSIGNED (type);
5654
5655 mode = promote_mode (type, mode, &unsignedp, 1);
5656 #endif
5657
5658 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5659 }
5660 else
5661 /* Scalar, returned in a register. */
5662 {
5663 #ifdef FUNCTION_OUTGOING_VALUE
5664 DECL_RTL (DECL_RESULT (subr))
5665 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5666 #else
5667 DECL_RTL (DECL_RESULT (subr))
5668 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5669 #endif
5670
5671 /* Mark this reg as the function's return value. */
5672 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5673 {
5674 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5675 /* Needed because we may need to move this to memory
5676 in case it's a named return value whose address is taken. */
5677 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5678 }
5679 }
5680
5681 /* Initialize rtx for parameters and local variables.
5682 In some cases this requires emitting insns. */
5683
5684 assign_parms (subr, 0);
5685
5686 /* Copy the static chain now if it wasn't a register. The delay is to
5687 avoid conflicts with the parameter passing registers. */
5688
5689 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
5690 if (GET_CODE (static_chain_incoming_rtx) != REG)
5691 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5692
5693 /* The following was moved from init_function_start.
5694 The move is supposed to make sdb output more accurate. */
5695 /* Indicate the beginning of the function body,
5696 as opposed to parm setup. */
5697 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5698
5699 /* If doing stupid allocation, mark parms as born here. */
5700
5701 if (GET_CODE (get_last_insn ()) != NOTE)
5702 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5703 parm_birth_insn = get_last_insn ();
5704
5705 if (obey_regdecls)
5706 {
5707 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5708 use_variable (regno_reg_rtx[i]);
5709
5710 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5711 use_variable (current_function_internal_arg_pointer);
5712 }
5713
5714 context_display = 0;
5715 if (current_function_needs_context)
5716 {
5717 /* Fetch static chain values for containing functions. */
5718 tem = decl_function_context (current_function_decl);
5719 /* If not doing stupid register allocation copy the static chain
5720 pointer into a pseudo. If we have small register classes, copy
5721 the value from memory if static_chain_incoming_rtx is a REG. If
5722 we do stupid register allocation, we use the stack address
5723 generated above. */
5724 if (tem && ! obey_regdecls)
5725 {
5726 /* If the static chain originally came in a register, put it back
5727 there, then move it out in the next insn. The reason for
5728 this peculiar code is to satisfy function integration. */
5729 if (SMALL_REGISTER_CLASSES
5730 && GET_CODE (static_chain_incoming_rtx) == REG)
5731 emit_move_insn (static_chain_incoming_rtx, last_ptr);
5732 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5733 }
5734
5735 while (tem)
5736 {
5737 tree rtlexp = make_node (RTL_EXPR);
5738
5739 RTL_EXPR_RTL (rtlexp) = last_ptr;
5740 context_display = tree_cons (tem, rtlexp, context_display);
5741 tem = decl_function_context (tem);
5742 if (tem == 0)
5743 break;
5744 /* Chain thru stack frames, assuming pointer to next lexical frame
5745 is found at the place we always store it. */
5746 #ifdef FRAME_GROWS_DOWNWARD
5747 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
5748 #endif
5749 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
5750 memory_address (Pmode, last_ptr)));
5751
5752 /* If we are not optimizing, ensure that we know that this
5753 piece of context is live over the entire function. */
5754 if (! optimize)
5755 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
5756 save_expr_regs);
5757 }
5758 }
5759
5760 if (current_function_instrument_entry_exit)
5761 {
5762 rtx fun = DECL_RTL (current_function_decl);
5763 if (GET_CODE (fun) == MEM)
5764 fun = XEXP (fun, 0);
5765 else
5766 abort ();
5767 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
5768 fun, Pmode,
5769 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5770 0,
5771 hard_frame_pointer_rtx),
5772 Pmode);
5773 }
5774
5775 /* After the display initializations is where the tail-recursion label
5776 should go, if we end up needing one. Ensure we have a NOTE here
5777 since some things (like trampolines) get placed before this. */
5778 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
5779
5780 /* Evaluate now the sizes of any types declared among the arguments. */
5781 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5782 {
5783 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
5784 EXPAND_MEMORY_USE_BAD);
5785 /* Flush the queue in case this parameter declaration has
5786 side-effects. */
5787 emit_queue ();
5788 }
5789
5790 /* Make sure there is a line number after the function entry setup code. */
5791 force_next_line_note ();
5792 }
5793 \f
5794 /* Generate RTL for the end of the current function.
5795 FILENAME and LINE are the current position in the source file.
5796
5797 It is up to language-specific callers to do cleanups for parameters--
5798 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5799
5800 void
5801 expand_function_end (filename, line, end_bindings)
5802 char *filename;
5803 int line;
5804 int end_bindings;
5805 {
5806 register int i;
5807 tree link;
5808
5809 #ifdef TRAMPOLINE_TEMPLATE
5810 static rtx initial_trampoline;
5811 #endif
5812
5813 #ifdef NON_SAVING_SETJMP
5814 /* Don't put any variables in registers if we call setjmp
5815 on a machine that fails to restore the registers. */
5816 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
5817 {
5818 if (DECL_INITIAL (current_function_decl) != error_mark_node)
5819 setjmp_protect (DECL_INITIAL (current_function_decl));
5820
5821 setjmp_protect_args ();
5822 }
5823 #endif
5824
5825 /* Save the argument pointer if a save area was made for it. */
5826 if (arg_pointer_save_area)
5827 {
5828 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
5829 emit_insn_before (x, tail_recursion_reentry);
5830 }
5831
5832 /* Initialize any trampolines required by this function. */
5833 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5834 {
5835 tree function = TREE_PURPOSE (link);
5836 rtx context = lookup_static_chain (function);
5837 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
5838 #ifdef TRAMPOLINE_TEMPLATE
5839 rtx blktramp;
5840 #endif
5841 rtx seq;
5842
5843 #ifdef TRAMPOLINE_TEMPLATE
5844 /* First make sure this compilation has a template for
5845 initializing trampolines. */
5846 if (initial_trampoline == 0)
5847 {
5848 end_temporary_allocation ();
5849 initial_trampoline
5850 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
5851 resume_temporary_allocation ();
5852 }
5853 #endif
5854
5855 /* Generate insns to initialize the trampoline. */
5856 start_sequence ();
5857 tramp = round_trampoline_addr (XEXP (tramp, 0));
5858 #ifdef TRAMPOLINE_TEMPLATE
5859 blktramp = change_address (initial_trampoline, BLKmode, tramp);
5860 emit_block_move (blktramp, initial_trampoline,
5861 GEN_INT (TRAMPOLINE_SIZE),
5862 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5863 #endif
5864 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
5865 seq = get_insns ();
5866 end_sequence ();
5867
5868 /* Put those insns at entry to the containing function (this one). */
5869 emit_insns_before (seq, tail_recursion_reentry);
5870 }
5871
5872 /* If we are doing stack checking and this function makes calls,
5873 do a stack probe at the start of the function to ensure we have enough
5874 space for another stack frame. */
5875 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
5876 {
5877 rtx insn, seq;
5878
5879 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5880 if (GET_CODE (insn) == CALL_INSN)
5881 {
5882 start_sequence ();
5883 probe_stack_range (STACK_CHECK_PROTECT,
5884 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
5885 seq = get_insns ();
5886 end_sequence ();
5887 emit_insns_before (seq, tail_recursion_reentry);
5888 break;
5889 }
5890 }
5891
5892 /* Warn about unused parms if extra warnings were specified. */
5893 if (warn_unused && extra_warnings)
5894 {
5895 tree decl;
5896
5897 for (decl = DECL_ARGUMENTS (current_function_decl);
5898 decl; decl = TREE_CHAIN (decl))
5899 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
5900 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
5901 warning_with_decl (decl, "unused parameter `%s'");
5902 }
5903
5904 /* Delete handlers for nonlocal gotos if nothing uses them. */
5905 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
5906 delete_handlers ();
5907
5908 /* End any sequences that failed to be closed due to syntax errors. */
5909 while (in_sequence_p ())
5910 end_sequence ();
5911
5912 /* Outside function body, can't compute type's actual size
5913 until next function's body starts. */
5914 immediate_size_expand--;
5915
5916 /* If doing stupid register allocation,
5917 mark register parms as dying here. */
5918
5919 if (obey_regdecls)
5920 {
5921 rtx tem;
5922 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5923 use_variable (regno_reg_rtx[i]);
5924
5925 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
5926
5927 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
5928 {
5929 use_variable (XEXP (tem, 0));
5930 use_variable_after (XEXP (tem, 0), parm_birth_insn);
5931 }
5932
5933 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5934 use_variable (current_function_internal_arg_pointer);
5935 }
5936
5937 clear_pending_stack_adjust ();
5938 do_pending_stack_adjust ();
5939
5940 /* Mark the end of the function body.
5941 If control reaches this insn, the function can drop through
5942 without returning a value. */
5943 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
5944
5945 /* Must mark the last line number note in the function, so that the test
5946 coverage code can avoid counting the last line twice. This just tells
5947 the code to ignore the immediately following line note, since there
5948 already exists a copy of this note somewhere above. This line number
5949 note is still needed for debugging though, so we can't delete it. */
5950 if (flag_test_coverage)
5951 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
5952
5953 /* Output a linenumber for the end of the function.
5954 SDB depends on this. */
5955 emit_line_note_force (filename, line);
5956
5957 /* Output the label for the actual return from the function,
5958 if one is expected. This happens either because a function epilogue
5959 is used instead of a return instruction, or because a return was done
5960 with a goto in order to run local cleanups, or because of pcc-style
5961 structure returning. */
5962
5963 if (return_label)
5964 emit_label (return_label);
5965
5966 /* C++ uses this. */
5967 if (end_bindings)
5968 expand_end_bindings (0, 0, 0);
5969
5970 /* Now handle any leftover exception regions that may have been
5971 created for the parameters. */
5972 {
5973 rtx last = get_last_insn ();
5974 rtx label;
5975
5976 expand_leftover_cleanups ();
5977
5978 /* If the above emitted any code, may sure we jump around it. */
5979 if (last != get_last_insn ())
5980 {
5981 label = gen_label_rtx ();
5982 last = emit_jump_insn_after (gen_jump (label), last);
5983 last = emit_barrier_after (last);
5984 emit_label (label);
5985 }
5986 }
5987
5988 if (current_function_instrument_entry_exit)
5989 {
5990 rtx fun = DECL_RTL (current_function_decl);
5991 if (GET_CODE (fun) == MEM)
5992 fun = XEXP (fun, 0);
5993 else
5994 abort ();
5995 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
5996 fun, Pmode,
5997 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5998 0,
5999 hard_frame_pointer_rtx),
6000 Pmode);
6001 }
6002
6003 /* If we had calls to alloca, and this machine needs
6004 an accurate stack pointer to exit the function,
6005 insert some code to save and restore the stack pointer. */
6006 #ifdef EXIT_IGNORE_STACK
6007 if (! EXIT_IGNORE_STACK)
6008 #endif
6009 if (current_function_calls_alloca)
6010 {
6011 rtx tem = 0;
6012
6013 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6014 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6015 }
6016
6017 /* If scalar return value was computed in a pseudo-reg,
6018 copy that to the hard return register. */
6019 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6020 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6021 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6022 >= FIRST_PSEUDO_REGISTER))
6023 {
6024 rtx real_decl_result;
6025
6026 #ifdef FUNCTION_OUTGOING_VALUE
6027 real_decl_result
6028 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6029 current_function_decl);
6030 #else
6031 real_decl_result
6032 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6033 current_function_decl);
6034 #endif
6035 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
6036 /* If this is a BLKmode structure being returned in registers, then use
6037 the mode computed in expand_return. */
6038 if (GET_MODE (real_decl_result) == BLKmode)
6039 PUT_MODE (real_decl_result,
6040 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6041 emit_move_insn (real_decl_result,
6042 DECL_RTL (DECL_RESULT (current_function_decl)));
6043 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
6044
6045 /* The delay slot scheduler assumes that current_function_return_rtx
6046 holds the hard register containing the return value, not a temporary
6047 pseudo. */
6048 current_function_return_rtx = real_decl_result;
6049 }
6050
6051 /* If returning a structure, arrange to return the address of the value
6052 in a place where debuggers expect to find it.
6053
6054 If returning a structure PCC style,
6055 the caller also depends on this value.
6056 And current_function_returns_pcc_struct is not necessarily set. */
6057 if (current_function_returns_struct
6058 || current_function_returns_pcc_struct)
6059 {
6060 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6061 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6062 #ifdef FUNCTION_OUTGOING_VALUE
6063 rtx outgoing
6064 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6065 current_function_decl);
6066 #else
6067 rtx outgoing
6068 = FUNCTION_VALUE (build_pointer_type (type),
6069 current_function_decl);
6070 #endif
6071
6072 /* Mark this as a function return value so integrate will delete the
6073 assignment and USE below when inlining this function. */
6074 REG_FUNCTION_VALUE_P (outgoing) = 1;
6075
6076 emit_move_insn (outgoing, value_address);
6077 use_variable (outgoing);
6078 }
6079
6080 /* If this is an implementation of __throw, do what's necessary to
6081 communicate between __builtin_eh_return and the epilogue. */
6082 expand_eh_return ();
6083
6084 /* Output a return insn if we are using one.
6085 Otherwise, let the rtl chain end here, to drop through
6086 into the epilogue. */
6087
6088 #ifdef HAVE_return
6089 if (HAVE_return)
6090 {
6091 emit_jump_insn (gen_return ());
6092 emit_barrier ();
6093 }
6094 #endif
6095
6096 /* Fix up any gotos that jumped out to the outermost
6097 binding level of the function.
6098 Must follow emitting RETURN_LABEL. */
6099
6100 /* If you have any cleanups to do at this point,
6101 and they need to create temporary variables,
6102 then you will lose. */
6103 expand_fixups (get_insns ());
6104 }
6105 \f
6106 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
6107
6108 static int *prologue;
6109 static int *epilogue;
6110
6111 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
6112 or a single insn). */
6113
6114 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6115 static int *
6116 record_insns (insns)
6117 rtx insns;
6118 {
6119 int *vec;
6120
6121 if (GET_CODE (insns) == SEQUENCE)
6122 {
6123 int len = XVECLEN (insns, 0);
6124 vec = (int *) oballoc ((len + 1) * sizeof (int));
6125 vec[len] = 0;
6126 while (--len >= 0)
6127 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6128 }
6129 else
6130 {
6131 vec = (int *) oballoc (2 * sizeof (int));
6132 vec[0] = INSN_UID (insns);
6133 vec[1] = 0;
6134 }
6135 return vec;
6136 }
6137
6138 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6139
6140 static int
6141 contains (insn, vec)
6142 rtx insn;
6143 int *vec;
6144 {
6145 register int i, j;
6146
6147 if (GET_CODE (insn) == INSN
6148 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6149 {
6150 int count = 0;
6151 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6152 for (j = 0; vec[j]; j++)
6153 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6154 count++;
6155 return count;
6156 }
6157 else
6158 {
6159 for (j = 0; vec[j]; j++)
6160 if (INSN_UID (insn) == vec[j])
6161 return 1;
6162 }
6163 return 0;
6164 }
6165 #endif /* HAVE_prologue || HAVE_epilogue */
6166
6167 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6168 this into place with notes indicating where the prologue ends and where
6169 the epilogue begins. Update the basic block information when possible. */
6170
6171 void
6172 thread_prologue_and_epilogue_insns (f)
6173 rtx f;
6174 {
6175 #ifdef HAVE_prologue
6176 if (HAVE_prologue)
6177 {
6178 rtx head, seq;
6179
6180 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
6181 prologue insns and a NOTE_INSN_PROLOGUE_END. */
6182 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
6183 seq = gen_prologue ();
6184 head = emit_insn_after (seq, f);
6185
6186 /* Include the new prologue insns in the first block. Ignore them
6187 if they form a basic block unto themselves. */
6188 if (basic_block_head && n_basic_blocks
6189 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
6190 basic_block_head[0] = NEXT_INSN (f);
6191
6192 /* Retain a map of the prologue insns. */
6193 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
6194 }
6195 else
6196 #endif
6197 prologue = 0;
6198
6199 #ifdef HAVE_epilogue
6200 if (HAVE_epilogue)
6201 {
6202 rtx insn = get_last_insn ();
6203 rtx prev = prev_nonnote_insn (insn);
6204
6205 /* If we end with a BARRIER, we don't need an epilogue. */
6206 if (! (prev && GET_CODE (prev) == BARRIER))
6207 {
6208 rtx tail, seq, tem;
6209 rtx first_use = 0;
6210 rtx last_use = 0;
6211
6212 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6213 epilogue insns, the USE insns at the end of a function,
6214 the jump insn that returns, and then a BARRIER. */
6215
6216 /* Move the USE insns at the end of a function onto a list. */
6217 while (prev
6218 && GET_CODE (prev) == INSN
6219 && GET_CODE (PATTERN (prev)) == USE)
6220 {
6221 tem = prev;
6222 prev = prev_nonnote_insn (prev);
6223
6224 NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem);
6225 PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem);
6226 if (first_use)
6227 {
6228 NEXT_INSN (tem) = first_use;
6229 PREV_INSN (first_use) = tem;
6230 }
6231 first_use = tem;
6232 if (!last_use)
6233 last_use = tem;
6234 }
6235
6236 emit_barrier_after (insn);
6237
6238 seq = gen_epilogue ();
6239 tail = emit_jump_insn_after (seq, insn);
6240
6241 /* Insert the USE insns immediately before the return insn, which
6242 must be the first instruction before the final barrier. */
6243 if (first_use)
6244 {
6245 tem = prev_nonnote_insn (get_last_insn ());
6246 NEXT_INSN (PREV_INSN (tem)) = first_use;
6247 PREV_INSN (first_use) = PREV_INSN (tem);
6248 PREV_INSN (tem) = last_use;
6249 NEXT_INSN (last_use) = tem;
6250 }
6251
6252 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
6253
6254 /* Include the new epilogue insns in the last block. Ignore
6255 them if they form a basic block unto themselves. */
6256 if (basic_block_end && n_basic_blocks
6257 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
6258 basic_block_end[n_basic_blocks - 1] = tail;
6259
6260 /* Retain a map of the epilogue insns. */
6261 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6262 return;
6263 }
6264 }
6265 #endif
6266 epilogue = 0;
6267 }
6268
6269 /* Reposition the prologue-end and epilogue-begin notes after instruction
6270 scheduling and delayed branch scheduling. */
6271
6272 void
6273 reposition_prologue_and_epilogue_notes (f)
6274 rtx f;
6275 {
6276 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6277 /* Reposition the prologue and epilogue notes. */
6278 if (n_basic_blocks)
6279 {
6280 rtx next, prev;
6281 int len;
6282
6283 if (prologue)
6284 {
6285 register rtx insn, note = 0;
6286
6287 /* Scan from the beginning until we reach the last prologue insn.
6288 We apparently can't depend on basic_block_{head,end} after
6289 reorg has run. */
6290 for (len = 0; prologue[len]; len++)
6291 ;
6292 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6293 {
6294 if (GET_CODE (insn) == NOTE)
6295 {
6296 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6297 note = insn;
6298 }
6299 else if ((len -= contains (insn, prologue)) == 0)
6300 {
6301 /* Find the prologue-end note if we haven't already, and
6302 move it to just after the last prologue insn. */
6303 if (note == 0)
6304 {
6305 for (note = insn; (note = NEXT_INSN (note));)
6306 if (GET_CODE (note) == NOTE
6307 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6308 break;
6309 }
6310
6311 next = NEXT_INSN (note);
6312 prev = PREV_INSN (note);
6313 if (prev)
6314 NEXT_INSN (prev) = next;
6315 if (next)
6316 PREV_INSN (next) = prev;
6317
6318 /* Whether or not we can depend on basic_block_head,
6319 attempt to keep it up-to-date. */
6320 if (basic_block_head[0] == note)
6321 basic_block_head[0] = next;
6322
6323 add_insn_after (note, insn);
6324 }
6325 }
6326 }
6327
6328 if (epilogue)
6329 {
6330 register rtx insn, note = 0;
6331
6332 /* Scan from the end until we reach the first epilogue insn.
6333 We apparently can't depend on basic_block_{head,end} after
6334 reorg has run. */
6335 for (len = 0; epilogue[len]; len++)
6336 ;
6337 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6338 {
6339 if (GET_CODE (insn) == NOTE)
6340 {
6341 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6342 note = insn;
6343 }
6344 else if ((len -= contains (insn, epilogue)) == 0)
6345 {
6346 /* Find the epilogue-begin note if we haven't already, and
6347 move it to just before the first epilogue insn. */
6348 if (note == 0)
6349 {
6350 for (note = insn; (note = PREV_INSN (note));)
6351 if (GET_CODE (note) == NOTE
6352 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6353 break;
6354 }
6355 next = NEXT_INSN (note);
6356 prev = PREV_INSN (note);
6357 if (prev)
6358 NEXT_INSN (prev) = next;
6359 if (next)
6360 PREV_INSN (next) = prev;
6361
6362 /* Whether or not we can depend on basic_block_head,
6363 attempt to keep it up-to-date. */
6364 if (n_basic_blocks
6365 && basic_block_head[n_basic_blocks-1] == insn)
6366 basic_block_head[n_basic_blocks-1] = note;
6367
6368 add_insn_before (note, insn);
6369 }
6370 }
6371 }
6372 }
6373 #endif /* HAVE_prologue or HAVE_epilogue */
6374 }