Andreas Schwab <schwab@issan.cs.uni-dortmund.de>
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-97, 1998 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58 #include "toplev.h"
59
60 #ifndef TRAMPOLINE_ALIGNMENT
61 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
62 #endif
63
64 /* Some systems use __main in a way incompatible with its use in gcc, in these
65 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
66 give the same symbol without quotes for an alternative entry point. You
67 must define both, or neither. */
68 #ifndef NAME__MAIN
69 #define NAME__MAIN "__main"
70 #define SYMBOL__MAIN __main
71 #endif
72
73 /* Round a value to the lowest integer less than it that is a multiple of
74 the required alignment. Avoid using division in case the value is
75 negative. Assume the alignment is a power of two. */
76 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
77
78 /* Similar, but round to the next highest integer that meets the
79 alignment. */
80 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
81
82 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
83 during rtl generation. If they are different register numbers, this is
84 always true. It may also be true if
85 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
86 generation. See fix_lexical_addr for details. */
87
88 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
89 #define NEED_SEPARATE_AP
90 #endif
91
92 /* Number of bytes of args popped by function being compiled on its return.
93 Zero if no bytes are to be popped.
94 May affect compilation of return insn or of function epilogue. */
95
96 int current_function_pops_args;
97
98 /* Nonzero if function being compiled needs to be given an address
99 where the value should be stored. */
100
101 int current_function_returns_struct;
102
103 /* Nonzero if function being compiled needs to
104 return the address of where it has put a structure value. */
105
106 int current_function_returns_pcc_struct;
107
108 /* Nonzero if function being compiled needs to be passed a static chain. */
109
110 int current_function_needs_context;
111
112 /* Nonzero if function being compiled can call setjmp. */
113
114 int current_function_calls_setjmp;
115
116 /* Nonzero if function being compiled can call longjmp. */
117
118 int current_function_calls_longjmp;
119
120 /* Nonzero if function being compiled receives nonlocal gotos
121 from nested functions. */
122
123 int current_function_has_nonlocal_label;
124
125 /* Nonzero if function being compiled has nonlocal gotos to parent
126 function. */
127
128 int current_function_has_nonlocal_goto;
129
130 /* Nonzero if this function has a computed goto.
131
132 It is computed during find_basic_blocks or during stupid life
133 analysis. */
134
135 int current_function_has_computed_jump;
136
137 /* Nonzero if function being compiled contains nested functions. */
138
139 int current_function_contains_functions;
140
141 /* Nonzero if function being compiled doesn't modify the stack pointer
142 (ignoring the prologue and epilogue). This is only valid after
143 life_analysis has run. */
144
145 int current_function_sp_is_unchanging;
146
147 /* Nonzero if the current function is a thunk (a lightweight function that
148 just adjusts one of its arguments and forwards to another function), so
149 we should try to cut corners where we can. */
150 int current_function_is_thunk;
151
152 /* Nonzero if function being compiled can call alloca,
153 either as a subroutine or builtin. */
154
155 int current_function_calls_alloca;
156
157 /* Nonzero if the current function returns a pointer type */
158
159 int current_function_returns_pointer;
160
161 /* If some insns can be deferred to the delay slots of the epilogue, the
162 delay list for them is recorded here. */
163
164 rtx current_function_epilogue_delay_list;
165
166 /* If function's args have a fixed size, this is that size, in bytes.
167 Otherwise, it is -1.
168 May affect compilation of return insn or of function epilogue. */
169
170 int current_function_args_size;
171
172 /* # bytes the prologue should push and pretend that the caller pushed them.
173 The prologue must do this, but only if parms can be passed in registers. */
174
175 int current_function_pretend_args_size;
176
177 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
178 defined, the needed space is pushed by the prologue. */
179
180 int current_function_outgoing_args_size;
181
182 /* This is the offset from the arg pointer to the place where the first
183 anonymous arg can be found, if there is one. */
184
185 rtx current_function_arg_offset_rtx;
186
187 /* Nonzero if current function uses varargs.h or equivalent.
188 Zero for functions that use stdarg.h. */
189
190 int current_function_varargs;
191
192 /* Nonzero if current function uses stdarg.h or equivalent.
193 Zero for functions that use varargs.h. */
194
195 int current_function_stdarg;
196
197 /* Quantities of various kinds of registers
198 used for the current function's args. */
199
200 CUMULATIVE_ARGS current_function_args_info;
201
202 /* Name of function now being compiled. */
203
204 char *current_function_name;
205
206 /* If non-zero, an RTL expression for the location at which the current
207 function returns its result. If the current function returns its
208 result in a register, current_function_return_rtx will always be
209 the hard register containing the result. */
210
211 rtx current_function_return_rtx;
212
213 /* Nonzero if the current function uses the constant pool. */
214
215 int current_function_uses_const_pool;
216
217 /* Nonzero if the current function uses pic_offset_table_rtx. */
218 int current_function_uses_pic_offset_table;
219
220 /* The arg pointer hard register, or the pseudo into which it was copied. */
221 rtx current_function_internal_arg_pointer;
222
223 /* Language-specific reason why the current function cannot be made inline. */
224 char *current_function_cannot_inline;
225
226 /* Nonzero if instrumentation calls for function entry and exit should be
227 generated. */
228 int current_function_instrument_entry_exit;
229
230 /* Nonzero if memory access checking be enabled in the current function. */
231 int current_function_check_memory_usage;
232
233 /* The FUNCTION_DECL for an inline function currently being expanded. */
234 tree inline_function_decl;
235
236 /* Number of function calls seen so far in current function. */
237
238 int function_call_count;
239
240 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
241 (labels to which there can be nonlocal gotos from nested functions)
242 in this function. */
243
244 tree nonlocal_labels;
245
246 /* RTX for stack slot that holds the current handler for nonlocal gotos.
247 Zero when function does not have nonlocal labels. */
248
249 rtx nonlocal_goto_handler_slot;
250
251 /* RTX for stack slot that holds the stack pointer value to restore
252 for a nonlocal goto.
253 Zero when function does not have nonlocal labels. */
254
255 rtx nonlocal_goto_stack_level;
256
257 /* Label that will go on parm cleanup code, if any.
258 Jumping to this label runs cleanup code for parameters, if
259 such code must be run. Following this code is the logical return label. */
260
261 rtx cleanup_label;
262
263 /* Label that will go on function epilogue.
264 Jumping to this label serves as a "return" instruction
265 on machines which require execution of the epilogue on all returns. */
266
267 rtx return_label;
268
269 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
270 So we can mark them all live at the end of the function, if nonopt. */
271 rtx save_expr_regs;
272
273 /* List (chain of EXPR_LISTs) of all stack slots in this function.
274 Made for the sake of unshare_all_rtl. */
275 rtx stack_slot_list;
276
277 /* Chain of all RTL_EXPRs that have insns in them. */
278 tree rtl_expr_chain;
279
280 /* Label to jump back to for tail recursion, or 0 if we have
281 not yet needed one for this function. */
282 rtx tail_recursion_label;
283
284 /* Place after which to insert the tail_recursion_label if we need one. */
285 rtx tail_recursion_reentry;
286
287 /* Location at which to save the argument pointer if it will need to be
288 referenced. There are two cases where this is done: if nonlocal gotos
289 exist, or if vars stored at an offset from the argument pointer will be
290 needed by inner routines. */
291
292 rtx arg_pointer_save_area;
293
294 /* Offset to end of allocated area of stack frame.
295 If stack grows down, this is the address of the last stack slot allocated.
296 If stack grows up, this is the address for the next slot. */
297 HOST_WIDE_INT frame_offset;
298
299 /* List (chain of TREE_LISTs) of static chains for containing functions.
300 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
301 in an RTL_EXPR in the TREE_VALUE. */
302 static tree context_display;
303
304 /* List (chain of TREE_LISTs) of trampolines for nested functions.
305 The trampoline sets up the static chain and jumps to the function.
306 We supply the trampoline's address when the function's address is requested.
307
308 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
309 in an RTL_EXPR in the TREE_VALUE. */
310 static tree trampoline_list;
311
312 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
313 static rtx parm_birth_insn;
314
315 #if 0
316 /* Nonzero if a stack slot has been generated whose address is not
317 actually valid. It means that the generated rtl must all be scanned
318 to detect and correct the invalid addresses where they occur. */
319 static int invalid_stack_slot;
320 #endif
321
322 /* Last insn of those whose job was to put parms into their nominal homes. */
323 static rtx last_parm_insn;
324
325 /* 1 + last pseudo register number possibly used for loading a copy
326 of a parameter of this function. */
327 int max_parm_reg;
328
329 /* Vector indexed by REGNO, containing location on stack in which
330 to put the parm which is nominally in pseudo register REGNO,
331 if we discover that that parm must go in the stack. The highest
332 element in this vector is one less than MAX_PARM_REG, above. */
333 rtx *parm_reg_stack_loc;
334
335 /* Nonzero once virtual register instantiation has been done.
336 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
337 static int virtuals_instantiated;
338
339 /* These variables hold pointers to functions to
340 save and restore machine-specific data,
341 in push_function_context and pop_function_context. */
342 void (*save_machine_status) PROTO((struct function *));
343 void (*restore_machine_status) PROTO((struct function *));
344
345 /* Nonzero if we need to distinguish between the return value of this function
346 and the return value of a function called by this function. This helps
347 integrate.c */
348
349 extern int rtx_equal_function_value_matters;
350 extern tree sequence_rtl_expr;
351 \f
352 /* In order to evaluate some expressions, such as function calls returning
353 structures in memory, we need to temporarily allocate stack locations.
354 We record each allocated temporary in the following structure.
355
356 Associated with each temporary slot is a nesting level. When we pop up
357 one level, all temporaries associated with the previous level are freed.
358 Normally, all temporaries are freed after the execution of the statement
359 in which they were created. However, if we are inside a ({...}) grouping,
360 the result may be in a temporary and hence must be preserved. If the
361 result could be in a temporary, we preserve it if we can determine which
362 one it is in. If we cannot determine which temporary may contain the
363 result, all temporaries are preserved. A temporary is preserved by
364 pretending it was allocated at the previous nesting level.
365
366 Automatic variables are also assigned temporary slots, at the nesting
367 level where they are defined. They are marked a "kept" so that
368 free_temp_slots will not free them. */
369
370 struct temp_slot
371 {
372 /* Points to next temporary slot. */
373 struct temp_slot *next;
374 /* The rtx to used to reference the slot. */
375 rtx slot;
376 /* The rtx used to represent the address if not the address of the
377 slot above. May be an EXPR_LIST if multiple addresses exist. */
378 rtx address;
379 /* The size, in units, of the slot. */
380 HOST_WIDE_INT size;
381 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
382 tree rtl_expr;
383 /* Non-zero if this temporary is currently in use. */
384 char in_use;
385 /* Non-zero if this temporary has its address taken. */
386 char addr_taken;
387 /* Nesting level at which this slot is being used. */
388 int level;
389 /* Non-zero if this should survive a call to free_temp_slots. */
390 int keep;
391 /* The offset of the slot from the frame_pointer, including extra space
392 for alignment. This info is for combine_temp_slots. */
393 HOST_WIDE_INT base_offset;
394 /* The size of the slot, including extra space for alignment. This
395 info is for combine_temp_slots. */
396 HOST_WIDE_INT full_size;
397 };
398
399 /* List of all temporaries allocated, both available and in use. */
400
401 struct temp_slot *temp_slots;
402
403 /* Current nesting level for temporaries. */
404
405 int temp_slot_level;
406
407 /* Current nesting level for variables in a block. */
408
409 int var_temp_slot_level;
410
411 /* When temporaries are created by TARGET_EXPRs, they are created at
412 this level of temp_slot_level, so that they can remain allocated
413 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
414 of TARGET_EXPRs. */
415 int target_temp_slot_level;
416 \f
417 /* This structure is used to record MEMs or pseudos used to replace VAR, any
418 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
419 maintain this list in case two operands of an insn were required to match;
420 in that case we must ensure we use the same replacement. */
421
422 struct fixup_replacement
423 {
424 rtx old;
425 rtx new;
426 struct fixup_replacement *next;
427 };
428
429 /* Forward declarations. */
430
431 static rtx assign_outer_stack_local PROTO ((enum machine_mode, HOST_WIDE_INT,
432 int, struct function *));
433 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
434 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
435 enum machine_mode, enum machine_mode,
436 int, int, int));
437 static void fixup_var_refs PROTO((rtx, enum machine_mode, int));
438 static struct fixup_replacement
439 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
440 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
441 rtx, int));
442 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
443 struct fixup_replacement **));
444 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
445 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
446 static rtx fixup_stack_1 PROTO((rtx, rtx));
447 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
448 static void instantiate_decls PROTO((tree, int));
449 static void instantiate_decls_1 PROTO((tree, int));
450 static void instantiate_decl PROTO((rtx, int, int));
451 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
452 static void delete_handlers PROTO((void));
453 static void pad_to_arg_alignment PROTO((struct args_size *, int));
454 #ifndef ARGS_GROW_DOWNWARD
455 static void pad_below PROTO((struct args_size *, enum machine_mode,
456 tree));
457 #endif
458 #ifdef ARGS_GROW_DOWNWARD
459 static tree round_down PROTO((tree, int));
460 #endif
461 static rtx round_trampoline_addr PROTO((rtx));
462 static tree blocks_nreverse PROTO((tree));
463 static int all_blocks PROTO((tree, tree *));
464 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
465 static int *record_insns PROTO((rtx));
466 static int contains PROTO((rtx, int *));
467 #endif /* HAVE_prologue || HAVE_epilogue */
468 static void put_addressof_into_stack PROTO((rtx));
469 static void purge_addressof_1 PROTO((rtx *, rtx, int, int));
470 \f
471 /* Pointer to chain of `struct function' for containing functions. */
472 struct function *outer_function_chain;
473
474 /* Given a function decl for a containing function,
475 return the `struct function' for it. */
476
477 struct function *
478 find_function_data (decl)
479 tree decl;
480 {
481 struct function *p;
482
483 for (p = outer_function_chain; p; p = p->next)
484 if (p->decl == decl)
485 return p;
486
487 abort ();
488 }
489
490 /* Save the current context for compilation of a nested function.
491 This is called from language-specific code.
492 The caller is responsible for saving any language-specific status,
493 since this function knows only about language-independent variables. */
494
495 void
496 push_function_context_to (context)
497 tree context;
498 {
499 struct function *p = (struct function *) xmalloc (sizeof (struct function));
500
501 p->next = outer_function_chain;
502 outer_function_chain = p;
503
504 p->name = current_function_name;
505 p->decl = current_function_decl;
506 p->pops_args = current_function_pops_args;
507 p->returns_struct = current_function_returns_struct;
508 p->returns_pcc_struct = current_function_returns_pcc_struct;
509 p->returns_pointer = current_function_returns_pointer;
510 p->needs_context = current_function_needs_context;
511 p->calls_setjmp = current_function_calls_setjmp;
512 p->calls_longjmp = current_function_calls_longjmp;
513 p->calls_alloca = current_function_calls_alloca;
514 p->has_nonlocal_label = current_function_has_nonlocal_label;
515 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
516 p->contains_functions = current_function_contains_functions;
517 p->is_thunk = current_function_is_thunk;
518 p->args_size = current_function_args_size;
519 p->pretend_args_size = current_function_pretend_args_size;
520 p->arg_offset_rtx = current_function_arg_offset_rtx;
521 p->varargs = current_function_varargs;
522 p->stdarg = current_function_stdarg;
523 p->uses_const_pool = current_function_uses_const_pool;
524 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
525 p->internal_arg_pointer = current_function_internal_arg_pointer;
526 p->cannot_inline = current_function_cannot_inline;
527 p->max_parm_reg = max_parm_reg;
528 p->parm_reg_stack_loc = parm_reg_stack_loc;
529 p->outgoing_args_size = current_function_outgoing_args_size;
530 p->return_rtx = current_function_return_rtx;
531 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
532 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
533 p->nonlocal_labels = nonlocal_labels;
534 p->cleanup_label = cleanup_label;
535 p->return_label = return_label;
536 p->save_expr_regs = save_expr_regs;
537 p->stack_slot_list = stack_slot_list;
538 p->parm_birth_insn = parm_birth_insn;
539 p->frame_offset = frame_offset;
540 p->tail_recursion_label = tail_recursion_label;
541 p->tail_recursion_reentry = tail_recursion_reentry;
542 p->arg_pointer_save_area = arg_pointer_save_area;
543 p->rtl_expr_chain = rtl_expr_chain;
544 p->last_parm_insn = last_parm_insn;
545 p->context_display = context_display;
546 p->trampoline_list = trampoline_list;
547 p->function_call_count = function_call_count;
548 p->temp_slots = temp_slots;
549 p->temp_slot_level = temp_slot_level;
550 p->target_temp_slot_level = target_temp_slot_level;
551 p->var_temp_slot_level = var_temp_slot_level;
552 p->fixup_var_refs_queue = 0;
553 p->epilogue_delay_list = current_function_epilogue_delay_list;
554 p->args_info = current_function_args_info;
555 p->check_memory_usage = current_function_check_memory_usage;
556 p->instrument_entry_exit = current_function_instrument_entry_exit;
557
558 save_tree_status (p, context);
559 save_storage_status (p);
560 save_emit_status (p);
561 save_expr_status (p);
562 save_stmt_status (p);
563 save_varasm_status (p, context);
564 if (save_machine_status)
565 (*save_machine_status) (p);
566 }
567
568 void
569 push_function_context ()
570 {
571 push_function_context_to (current_function_decl);
572 }
573
574 /* Restore the last saved context, at the end of a nested function.
575 This function is called from language-specific code. */
576
577 void
578 pop_function_context_from (context)
579 tree context;
580 {
581 struct function *p = outer_function_chain;
582 struct var_refs_queue *queue;
583
584 outer_function_chain = p->next;
585
586 current_function_contains_functions
587 = p->contains_functions || p->inline_obstacks
588 || context == current_function_decl;
589 current_function_name = p->name;
590 current_function_decl = p->decl;
591 current_function_pops_args = p->pops_args;
592 current_function_returns_struct = p->returns_struct;
593 current_function_returns_pcc_struct = p->returns_pcc_struct;
594 current_function_returns_pointer = p->returns_pointer;
595 current_function_needs_context = p->needs_context;
596 current_function_calls_setjmp = p->calls_setjmp;
597 current_function_calls_longjmp = p->calls_longjmp;
598 current_function_calls_alloca = p->calls_alloca;
599 current_function_has_nonlocal_label = p->has_nonlocal_label;
600 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
601 current_function_is_thunk = p->is_thunk;
602 current_function_args_size = p->args_size;
603 current_function_pretend_args_size = p->pretend_args_size;
604 current_function_arg_offset_rtx = p->arg_offset_rtx;
605 current_function_varargs = p->varargs;
606 current_function_stdarg = p->stdarg;
607 current_function_uses_const_pool = p->uses_const_pool;
608 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
609 current_function_internal_arg_pointer = p->internal_arg_pointer;
610 current_function_cannot_inline = p->cannot_inline;
611 max_parm_reg = p->max_parm_reg;
612 parm_reg_stack_loc = p->parm_reg_stack_loc;
613 current_function_outgoing_args_size = p->outgoing_args_size;
614 current_function_return_rtx = p->return_rtx;
615 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
616 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
617 nonlocal_labels = p->nonlocal_labels;
618 cleanup_label = p->cleanup_label;
619 return_label = p->return_label;
620 save_expr_regs = p->save_expr_regs;
621 stack_slot_list = p->stack_slot_list;
622 parm_birth_insn = p->parm_birth_insn;
623 frame_offset = p->frame_offset;
624 tail_recursion_label = p->tail_recursion_label;
625 tail_recursion_reentry = p->tail_recursion_reentry;
626 arg_pointer_save_area = p->arg_pointer_save_area;
627 rtl_expr_chain = p->rtl_expr_chain;
628 last_parm_insn = p->last_parm_insn;
629 context_display = p->context_display;
630 trampoline_list = p->trampoline_list;
631 function_call_count = p->function_call_count;
632 temp_slots = p->temp_slots;
633 temp_slot_level = p->temp_slot_level;
634 target_temp_slot_level = p->target_temp_slot_level;
635 var_temp_slot_level = p->var_temp_slot_level;
636 current_function_epilogue_delay_list = p->epilogue_delay_list;
637 reg_renumber = 0;
638 current_function_args_info = p->args_info;
639 current_function_check_memory_usage = p->check_memory_usage;
640 current_function_instrument_entry_exit = p->instrument_entry_exit;
641
642 restore_tree_status (p, context);
643 restore_storage_status (p);
644 restore_expr_status (p);
645 restore_emit_status (p);
646 restore_stmt_status (p);
647 restore_varasm_status (p);
648
649 if (restore_machine_status)
650 (*restore_machine_status) (p);
651
652 /* Finish doing put_var_into_stack for any of our variables
653 which became addressable during the nested function. */
654 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
655 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
656
657 free (p);
658
659 /* Reset variables that have known state during rtx generation. */
660 rtx_equal_function_value_matters = 1;
661 virtuals_instantiated = 0;
662 }
663
664 void pop_function_context ()
665 {
666 pop_function_context_from (current_function_decl);
667 }
668 \f
669 /* Allocate fixed slots in the stack frame of the current function. */
670
671 /* Return size needed for stack frame based on slots so far allocated.
672 This size counts from zero. It is not rounded to STACK_BOUNDARY;
673 the caller may have to do that. */
674
675 HOST_WIDE_INT
676 get_frame_size ()
677 {
678 #ifdef FRAME_GROWS_DOWNWARD
679 return -frame_offset;
680 #else
681 return frame_offset;
682 #endif
683 }
684
685 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
686 with machine mode MODE.
687
688 ALIGN controls the amount of alignment for the address of the slot:
689 0 means according to MODE,
690 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
691 positive specifies alignment boundary in bits.
692
693 We do not round to stack_boundary here. */
694
695 rtx
696 assign_stack_local (mode, size, align)
697 enum machine_mode mode;
698 HOST_WIDE_INT size;
699 int align;
700 {
701 register rtx x, addr;
702 int bigend_correction = 0;
703 int alignment;
704
705 if (align == 0)
706 {
707 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
708 if (mode == BLKmode)
709 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
710 }
711 else if (align == -1)
712 {
713 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
714 size = CEIL_ROUND (size, alignment);
715 }
716 else
717 alignment = align / BITS_PER_UNIT;
718
719 /* Round frame offset to that alignment.
720 We must be careful here, since FRAME_OFFSET might be negative and
721 division with a negative dividend isn't as well defined as we might
722 like. So we instead assume that ALIGNMENT is a power of two and
723 use logical operations which are unambiguous. */
724 #ifdef FRAME_GROWS_DOWNWARD
725 frame_offset = FLOOR_ROUND (frame_offset, alignment);
726 #else
727 frame_offset = CEIL_ROUND (frame_offset, alignment);
728 #endif
729
730 /* On a big-endian machine, if we are allocating more space than we will use,
731 use the least significant bytes of those that are allocated. */
732 if (BYTES_BIG_ENDIAN && mode != BLKmode)
733 bigend_correction = size - GET_MODE_SIZE (mode);
734
735 #ifdef FRAME_GROWS_DOWNWARD
736 frame_offset -= size;
737 #endif
738
739 /* If we have already instantiated virtual registers, return the actual
740 address relative to the frame pointer. */
741 if (virtuals_instantiated)
742 addr = plus_constant (frame_pointer_rtx,
743 (frame_offset + bigend_correction
744 + STARTING_FRAME_OFFSET));
745 else
746 addr = plus_constant (virtual_stack_vars_rtx,
747 frame_offset + bigend_correction);
748
749 #ifndef FRAME_GROWS_DOWNWARD
750 frame_offset += size;
751 #endif
752
753 x = gen_rtx_MEM (mode, addr);
754
755 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
756
757 return x;
758 }
759
760 /* Assign a stack slot in a containing function.
761 First three arguments are same as in preceding function.
762 The last argument specifies the function to allocate in. */
763
764 static rtx
765 assign_outer_stack_local (mode, size, align, function)
766 enum machine_mode mode;
767 HOST_WIDE_INT size;
768 int align;
769 struct function *function;
770 {
771 register rtx x, addr;
772 int bigend_correction = 0;
773 int alignment;
774
775 /* Allocate in the memory associated with the function in whose frame
776 we are assigning. */
777 push_obstacks (function->function_obstack,
778 function->function_maybepermanent_obstack);
779
780 if (align == 0)
781 {
782 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
783 if (mode == BLKmode)
784 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
785 }
786 else if (align == -1)
787 {
788 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
789 size = CEIL_ROUND (size, alignment);
790 }
791 else
792 alignment = align / BITS_PER_UNIT;
793
794 /* Round frame offset to that alignment. */
795 #ifdef FRAME_GROWS_DOWNWARD
796 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
797 #else
798 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
799 #endif
800
801 /* On a big-endian machine, if we are allocating more space than we will use,
802 use the least significant bytes of those that are allocated. */
803 if (BYTES_BIG_ENDIAN && mode != BLKmode)
804 bigend_correction = size - GET_MODE_SIZE (mode);
805
806 #ifdef FRAME_GROWS_DOWNWARD
807 function->frame_offset -= size;
808 #endif
809 addr = plus_constant (virtual_stack_vars_rtx,
810 function->frame_offset + bigend_correction);
811 #ifndef FRAME_GROWS_DOWNWARD
812 function->frame_offset += size;
813 #endif
814
815 x = gen_rtx_MEM (mode, addr);
816
817 function->stack_slot_list
818 = gen_rtx_EXPR_LIST (VOIDmode, x, function->stack_slot_list);
819
820 pop_obstacks ();
821
822 return x;
823 }
824 \f
825 /* Allocate a temporary stack slot and record it for possible later
826 reuse.
827
828 MODE is the machine mode to be given to the returned rtx.
829
830 SIZE is the size in units of the space required. We do no rounding here
831 since assign_stack_local will do any required rounding.
832
833 KEEP is 1 if this slot is to be retained after a call to
834 free_temp_slots. Automatic variables for a block are allocated
835 with this flag. KEEP is 2 if we allocate a longer term temporary,
836 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
837 if we are to allocate something at an inner level to be treated as
838 a variable in the block (e.g., a SAVE_EXPR). */
839
840 rtx
841 assign_stack_temp (mode, size, keep)
842 enum machine_mode mode;
843 HOST_WIDE_INT size;
844 int keep;
845 {
846 struct temp_slot *p, *best_p = 0;
847
848 /* If SIZE is -1 it means that somebody tried to allocate a temporary
849 of a variable size. */
850 if (size == -1)
851 abort ();
852
853 /* First try to find an available, already-allocated temporary that is the
854 exact size we require. */
855 for (p = temp_slots; p; p = p->next)
856 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
857 break;
858
859 /* If we didn't find, one, try one that is larger than what we want. We
860 find the smallest such. */
861 if (p == 0)
862 for (p = temp_slots; p; p = p->next)
863 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
864 && (best_p == 0 || best_p->size > p->size))
865 best_p = p;
866
867 /* Make our best, if any, the one to use. */
868 if (best_p)
869 {
870 /* If there are enough aligned bytes left over, make them into a new
871 temp_slot so that the extra bytes don't get wasted. Do this only
872 for BLKmode slots, so that we can be sure of the alignment. */
873 if (GET_MODE (best_p->slot) == BLKmode)
874 {
875 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
876 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
877
878 if (best_p->size - rounded_size >= alignment)
879 {
880 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
881 p->in_use = p->addr_taken = 0;
882 p->size = best_p->size - rounded_size;
883 p->base_offset = best_p->base_offset + rounded_size;
884 p->full_size = best_p->full_size - rounded_size;
885 p->slot = gen_rtx_MEM (BLKmode,
886 plus_constant (XEXP (best_p->slot, 0),
887 rounded_size));
888 p->address = 0;
889 p->rtl_expr = 0;
890 p->next = temp_slots;
891 temp_slots = p;
892
893 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
894 stack_slot_list);
895
896 best_p->size = rounded_size;
897 best_p->full_size = rounded_size;
898 }
899 }
900
901 p = best_p;
902 }
903
904 /* If we still didn't find one, make a new temporary. */
905 if (p == 0)
906 {
907 HOST_WIDE_INT frame_offset_old = frame_offset;
908
909 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
910
911 /* If the temp slot mode doesn't indicate the alignment,
912 use the largest possible, so no one will be disappointed. */
913 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
914
915 /* The following slot size computation is necessary because we don't
916 know the actual size of the temporary slot until assign_stack_local
917 has performed all the frame alignment and size rounding for the
918 requested temporary. Note that extra space added for alignment
919 can be either above or below this stack slot depending on which
920 way the frame grows. We include the extra space if and only if it
921 is above this slot. */
922 #ifdef FRAME_GROWS_DOWNWARD
923 p->size = frame_offset_old - frame_offset;
924 #else
925 p->size = size;
926 #endif
927
928 /* Now define the fields used by combine_temp_slots. */
929 #ifdef FRAME_GROWS_DOWNWARD
930 p->base_offset = frame_offset;
931 p->full_size = frame_offset_old - frame_offset;
932 #else
933 p->base_offset = frame_offset_old;
934 p->full_size = frame_offset - frame_offset_old;
935 #endif
936 p->address = 0;
937 p->next = temp_slots;
938 temp_slots = p;
939 }
940
941 p->in_use = 1;
942 p->addr_taken = 0;
943 p->rtl_expr = sequence_rtl_expr;
944
945 if (keep == 2)
946 {
947 p->level = target_temp_slot_level;
948 p->keep = 0;
949 }
950 else if (keep == 3)
951 {
952 p->level = var_temp_slot_level;
953 p->keep = 0;
954 }
955 else
956 {
957 p->level = temp_slot_level;
958 p->keep = keep;
959 }
960
961 /* We may be reusing an old slot, so clear any MEM flags that may have been
962 set from before. */
963 RTX_UNCHANGING_P (p->slot) = 0;
964 MEM_IN_STRUCT_P (p->slot) = 0;
965 return p->slot;
966 }
967 \f
968 /* Assign a temporary of given TYPE.
969 KEEP is as for assign_stack_temp.
970 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
971 it is 0 if a register is OK.
972 DONT_PROMOTE is 1 if we should not promote values in register
973 to wider modes. */
974
975 rtx
976 assign_temp (type, keep, memory_required, dont_promote)
977 tree type;
978 int keep;
979 int memory_required;
980 int dont_promote;
981 {
982 enum machine_mode mode = TYPE_MODE (type);
983 int unsignedp = TREE_UNSIGNED (type);
984
985 if (mode == BLKmode || memory_required)
986 {
987 HOST_WIDE_INT size = int_size_in_bytes (type);
988 rtx tmp;
989
990 /* Unfortunately, we don't yet know how to allocate variable-sized
991 temporaries. However, sometimes we have a fixed upper limit on
992 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
993 instead. This is the case for Chill variable-sized strings. */
994 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
995 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
996 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
997 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
998
999 tmp = assign_stack_temp (mode, size, keep);
1000 MEM_IN_STRUCT_P (tmp) = AGGREGATE_TYPE_P (type);
1001 return tmp;
1002 }
1003
1004 #ifndef PROMOTE_FOR_CALL_ONLY
1005 if (! dont_promote)
1006 mode = promote_mode (type, mode, &unsignedp, 0);
1007 #endif
1008
1009 return gen_reg_rtx (mode);
1010 }
1011 \f
1012 /* Combine temporary stack slots which are adjacent on the stack.
1013
1014 This allows for better use of already allocated stack space. This is only
1015 done for BLKmode slots because we can be sure that we won't have alignment
1016 problems in this case. */
1017
1018 void
1019 combine_temp_slots ()
1020 {
1021 struct temp_slot *p, *q;
1022 struct temp_slot *prev_p, *prev_q;
1023 int num_slots;
1024
1025 /* If there are a lot of temp slots, don't do anything unless
1026 high levels of optimizaton. */
1027 if (! flag_expensive_optimizations)
1028 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1029 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1030 return;
1031
1032 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
1033 {
1034 int delete_p = 0;
1035
1036 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
1037 for (q = p->next, prev_q = p; q; q = prev_q->next)
1038 {
1039 int delete_q = 0;
1040 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
1041 {
1042 if (p->base_offset + p->full_size == q->base_offset)
1043 {
1044 /* Q comes after P; combine Q into P. */
1045 p->size += q->size;
1046 p->full_size += q->full_size;
1047 delete_q = 1;
1048 }
1049 else if (q->base_offset + q->full_size == p->base_offset)
1050 {
1051 /* P comes after Q; combine P into Q. */
1052 q->size += p->size;
1053 q->full_size += p->full_size;
1054 delete_p = 1;
1055 break;
1056 }
1057 }
1058 /* Either delete Q or advance past it. */
1059 if (delete_q)
1060 prev_q->next = q->next;
1061 else
1062 prev_q = q;
1063 }
1064 /* Either delete P or advance past it. */
1065 if (delete_p)
1066 {
1067 if (prev_p)
1068 prev_p->next = p->next;
1069 else
1070 temp_slots = p->next;
1071 }
1072 else
1073 prev_p = p;
1074 }
1075 }
1076 \f
1077 /* Find the temp slot corresponding to the object at address X. */
1078
1079 static struct temp_slot *
1080 find_temp_slot_from_address (x)
1081 rtx x;
1082 {
1083 struct temp_slot *p;
1084 rtx next;
1085
1086 for (p = temp_slots; p; p = p->next)
1087 {
1088 if (! p->in_use)
1089 continue;
1090
1091 else if (XEXP (p->slot, 0) == x
1092 || p->address == x
1093 || (GET_CODE (x) == PLUS
1094 && XEXP (x, 0) == virtual_stack_vars_rtx
1095 && GET_CODE (XEXP (x, 1)) == CONST_INT
1096 && INTVAL (XEXP (x, 1)) >= p->base_offset
1097 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1098 return p;
1099
1100 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1101 for (next = p->address; next; next = XEXP (next, 1))
1102 if (XEXP (next, 0) == x)
1103 return p;
1104 }
1105
1106 return 0;
1107 }
1108
1109 /* Indicate that NEW is an alternate way of referring to the temp slot
1110 that previously was known by OLD. */
1111
1112 void
1113 update_temp_slot_address (old, new)
1114 rtx old, new;
1115 {
1116 struct temp_slot *p = find_temp_slot_from_address (old);
1117
1118 /* If none, return. Else add NEW as an alias. */
1119 if (p == 0)
1120 return;
1121 else if (p->address == 0)
1122 p->address = new;
1123 else
1124 {
1125 if (GET_CODE (p->address) != EXPR_LIST)
1126 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1127
1128 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1129 }
1130 }
1131
1132 /* If X could be a reference to a temporary slot, mark the fact that its
1133 address was taken. */
1134
1135 void
1136 mark_temp_addr_taken (x)
1137 rtx x;
1138 {
1139 struct temp_slot *p;
1140
1141 if (x == 0)
1142 return;
1143
1144 /* If X is not in memory or is at a constant address, it cannot be in
1145 a temporary slot. */
1146 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1147 return;
1148
1149 p = find_temp_slot_from_address (XEXP (x, 0));
1150 if (p != 0)
1151 p->addr_taken = 1;
1152 }
1153
1154 /* If X could be a reference to a temporary slot, mark that slot as
1155 belonging to the to one level higher than the current level. If X
1156 matched one of our slots, just mark that one. Otherwise, we can't
1157 easily predict which it is, so upgrade all of them. Kept slots
1158 need not be touched.
1159
1160 This is called when an ({...}) construct occurs and a statement
1161 returns a value in memory. */
1162
1163 void
1164 preserve_temp_slots (x)
1165 rtx x;
1166 {
1167 struct temp_slot *p = 0;
1168
1169 /* If there is no result, we still might have some objects whose address
1170 were taken, so we need to make sure they stay around. */
1171 if (x == 0)
1172 {
1173 for (p = temp_slots; p; p = p->next)
1174 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1175 p->level--;
1176
1177 return;
1178 }
1179
1180 /* If X is a register that is being used as a pointer, see if we have
1181 a temporary slot we know it points to. To be consistent with
1182 the code below, we really should preserve all non-kept slots
1183 if we can't find a match, but that seems to be much too costly. */
1184 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1185 p = find_temp_slot_from_address (x);
1186
1187 /* If X is not in memory or is at a constant address, it cannot be in
1188 a temporary slot, but it can contain something whose address was
1189 taken. */
1190 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1191 {
1192 for (p = temp_slots; p; p = p->next)
1193 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1194 p->level--;
1195
1196 return;
1197 }
1198
1199 /* First see if we can find a match. */
1200 if (p == 0)
1201 p = find_temp_slot_from_address (XEXP (x, 0));
1202
1203 if (p != 0)
1204 {
1205 /* Move everything at our level whose address was taken to our new
1206 level in case we used its address. */
1207 struct temp_slot *q;
1208
1209 if (p->level == temp_slot_level)
1210 {
1211 for (q = temp_slots; q; q = q->next)
1212 if (q != p && q->addr_taken && q->level == p->level)
1213 q->level--;
1214
1215 p->level--;
1216 p->addr_taken = 0;
1217 }
1218 return;
1219 }
1220
1221 /* Otherwise, preserve all non-kept slots at this level. */
1222 for (p = temp_slots; p; p = p->next)
1223 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1224 p->level--;
1225 }
1226
1227 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1228 with that RTL_EXPR, promote it into a temporary slot at the present
1229 level so it will not be freed when we free slots made in the
1230 RTL_EXPR. */
1231
1232 void
1233 preserve_rtl_expr_result (x)
1234 rtx x;
1235 {
1236 struct temp_slot *p;
1237
1238 /* If X is not in memory or is at a constant address, it cannot be in
1239 a temporary slot. */
1240 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1241 return;
1242
1243 /* If we can find a match, move it to our level unless it is already at
1244 an upper level. */
1245 p = find_temp_slot_from_address (XEXP (x, 0));
1246 if (p != 0)
1247 {
1248 p->level = MIN (p->level, temp_slot_level);
1249 p->rtl_expr = 0;
1250 }
1251
1252 return;
1253 }
1254
1255 /* Free all temporaries used so far. This is normally called at the end
1256 of generating code for a statement. Don't free any temporaries
1257 currently in use for an RTL_EXPR that hasn't yet been emitted.
1258 We could eventually do better than this since it can be reused while
1259 generating the same RTL_EXPR, but this is complex and probably not
1260 worthwhile. */
1261
1262 void
1263 free_temp_slots ()
1264 {
1265 struct temp_slot *p;
1266
1267 for (p = temp_slots; p; p = p->next)
1268 if (p->in_use && p->level == temp_slot_level && ! p->keep
1269 && p->rtl_expr == 0)
1270 p->in_use = 0;
1271
1272 combine_temp_slots ();
1273 }
1274
1275 /* Free all temporary slots used in T, an RTL_EXPR node. */
1276
1277 void
1278 free_temps_for_rtl_expr (t)
1279 tree t;
1280 {
1281 struct temp_slot *p;
1282
1283 for (p = temp_slots; p; p = p->next)
1284 if (p->rtl_expr == t)
1285 p->in_use = 0;
1286
1287 combine_temp_slots ();
1288 }
1289
1290 /* Mark all temporaries ever allocated in this function as not suitable
1291 for reuse until the current level is exited. */
1292
1293 void
1294 mark_all_temps_used ()
1295 {
1296 struct temp_slot *p;
1297
1298 for (p = temp_slots; p; p = p->next)
1299 {
1300 p->in_use = p->keep = 1;
1301 p->level = MIN (p->level, temp_slot_level);
1302 }
1303 }
1304
1305 /* Push deeper into the nesting level for stack temporaries. */
1306
1307 void
1308 push_temp_slots ()
1309 {
1310 temp_slot_level++;
1311 }
1312
1313 /* Likewise, but save the new level as the place to allocate variables
1314 for blocks. */
1315
1316 void
1317 push_temp_slots_for_block ()
1318 {
1319 push_temp_slots ();
1320
1321 var_temp_slot_level = temp_slot_level;
1322 }
1323
1324 /* Likewise, but save the new level as the place to allocate temporaries
1325 for TARGET_EXPRs. */
1326
1327 void
1328 push_temp_slots_for_target ()
1329 {
1330 push_temp_slots ();
1331
1332 target_temp_slot_level = temp_slot_level;
1333 }
1334
1335 /* Set and get the value of target_temp_slot_level. The only
1336 permitted use of these functions is to save and restore this value. */
1337
1338 int
1339 get_target_temp_slot_level ()
1340 {
1341 return target_temp_slot_level;
1342 }
1343
1344 void
1345 set_target_temp_slot_level (level)
1346 int level;
1347 {
1348 target_temp_slot_level = level;
1349 }
1350
1351 /* Pop a temporary nesting level. All slots in use in the current level
1352 are freed. */
1353
1354 void
1355 pop_temp_slots ()
1356 {
1357 struct temp_slot *p;
1358
1359 for (p = temp_slots; p; p = p->next)
1360 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1361 p->in_use = 0;
1362
1363 combine_temp_slots ();
1364
1365 temp_slot_level--;
1366 }
1367
1368 /* Initialize temporary slots. */
1369
1370 void
1371 init_temp_slots ()
1372 {
1373 /* We have not allocated any temporaries yet. */
1374 temp_slots = 0;
1375 temp_slot_level = 0;
1376 var_temp_slot_level = 0;
1377 target_temp_slot_level = 0;
1378 }
1379 \f
1380 /* Retroactively move an auto variable from a register to a stack slot.
1381 This is done when an address-reference to the variable is seen. */
1382
1383 void
1384 put_var_into_stack (decl)
1385 tree decl;
1386 {
1387 register rtx reg;
1388 enum machine_mode promoted_mode, decl_mode;
1389 struct function *function = 0;
1390 tree context;
1391 int can_use_addressof;
1392
1393 context = decl_function_context (decl);
1394
1395 /* Get the current rtl used for this object and its original mode. */
1396 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1397
1398 /* No need to do anything if decl has no rtx yet
1399 since in that case caller is setting TREE_ADDRESSABLE
1400 and a stack slot will be assigned when the rtl is made. */
1401 if (reg == 0)
1402 return;
1403
1404 /* Get the declared mode for this object. */
1405 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1406 : DECL_MODE (decl));
1407 /* Get the mode it's actually stored in. */
1408 promoted_mode = GET_MODE (reg);
1409
1410 /* If this variable comes from an outer function,
1411 find that function's saved context. */
1412 if (context != current_function_decl && context != inline_function_decl)
1413 for (function = outer_function_chain; function; function = function->next)
1414 if (function->decl == context)
1415 break;
1416
1417 /* If this is a variable-size object with a pseudo to address it,
1418 put that pseudo into the stack, if the var is nonlocal. */
1419 if (DECL_NONLOCAL (decl)
1420 && GET_CODE (reg) == MEM
1421 && GET_CODE (XEXP (reg, 0)) == REG
1422 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1423 {
1424 reg = XEXP (reg, 0);
1425 decl_mode = promoted_mode = GET_MODE (reg);
1426 }
1427
1428 can_use_addressof
1429 = (function == 0
1430 && optimize > 0
1431 /* FIXME make it work for promoted modes too */
1432 && decl_mode == promoted_mode
1433 #ifdef NON_SAVING_SETJMP
1434 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1435 #endif
1436 );
1437
1438 /* If we can't use ADDRESSOF, make sure we see through one we already
1439 generated. */
1440 if (! can_use_addressof && GET_CODE (reg) == MEM
1441 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1442 reg = XEXP (XEXP (reg, 0), 0);
1443
1444 /* Now we should have a value that resides in one or more pseudo regs. */
1445
1446 if (GET_CODE (reg) == REG)
1447 {
1448 /* If this variable lives in the current function and we don't need
1449 to put things in the stack for the sake of setjmp, try to keep it
1450 in a register until we know we actually need the address. */
1451 if (can_use_addressof)
1452 gen_mem_addressof (reg, decl);
1453 else
1454 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1455 promoted_mode, decl_mode,
1456 TREE_SIDE_EFFECTS (decl), 0,
1457 TREE_USED (decl)
1458 || DECL_INITIAL (decl) != 0);
1459 }
1460 else if (GET_CODE (reg) == CONCAT)
1461 {
1462 /* A CONCAT contains two pseudos; put them both in the stack.
1463 We do it so they end up consecutive. */
1464 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1465 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1466 #ifdef FRAME_GROWS_DOWNWARD
1467 /* Since part 0 should have a lower address, do it second. */
1468 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1469 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1470 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1471 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1472 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1473 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1474 #else
1475 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1476 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1477 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1478 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1479 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1480 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1481 #endif
1482
1483 /* Change the CONCAT into a combined MEM for both parts. */
1484 PUT_CODE (reg, MEM);
1485 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1486 MEM_ALIAS_SET (reg) = get_alias_set (decl);
1487
1488 /* The two parts are in memory order already.
1489 Use the lower parts address as ours. */
1490 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1491 /* Prevent sharing of rtl that might lose. */
1492 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1493 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1494 }
1495 else
1496 return;
1497
1498 if (current_function_check_memory_usage)
1499 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1500 XEXP (reg, 0), ptr_mode,
1501 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1502 TYPE_MODE (sizetype),
1503 GEN_INT (MEMORY_USE_RW),
1504 TYPE_MODE (integer_type_node));
1505 }
1506
1507 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1508 into the stack frame of FUNCTION (0 means the current function).
1509 DECL_MODE is the machine mode of the user-level data type.
1510 PROMOTED_MODE is the machine mode of the register.
1511 VOLATILE_P is nonzero if this is for a "volatile" decl.
1512 USED_P is nonzero if this reg might have already been used in an insn. */
1513
1514 static void
1515 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1516 original_regno, used_p)
1517 struct function *function;
1518 rtx reg;
1519 tree type;
1520 enum machine_mode promoted_mode, decl_mode;
1521 int volatile_p;
1522 int original_regno;
1523 int used_p;
1524 {
1525 rtx new = 0;
1526 int regno = original_regno;
1527
1528 if (regno == 0)
1529 regno = REGNO (reg);
1530
1531 if (function)
1532 {
1533 if (regno < function->max_parm_reg)
1534 new = function->parm_reg_stack_loc[regno];
1535 if (new == 0)
1536 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1537 0, function);
1538 }
1539 else
1540 {
1541 if (regno < max_parm_reg)
1542 new = parm_reg_stack_loc[regno];
1543 if (new == 0)
1544 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1545 }
1546
1547 PUT_MODE (reg, decl_mode);
1548 XEXP (reg, 0) = XEXP (new, 0);
1549 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1550 MEM_VOLATILE_P (reg) = volatile_p;
1551 PUT_CODE (reg, MEM);
1552
1553 /* If this is a memory ref that contains aggregate components,
1554 mark it as such for cse and loop optimize. If we are reusing a
1555 previously generated stack slot, then we need to copy the bit in
1556 case it was set for other reasons. For instance, it is set for
1557 __builtin_va_alist. */
1558 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type) | MEM_IN_STRUCT_P (new);
1559 MEM_ALIAS_SET (reg) = get_alias_set (type);
1560
1561 /* Now make sure that all refs to the variable, previously made
1562 when it was a register, are fixed up to be valid again. */
1563
1564 if (used_p && function != 0)
1565 {
1566 struct var_refs_queue *temp;
1567
1568 /* Variable is inherited; fix it up when we get back to its function. */
1569 push_obstacks (function->function_obstack,
1570 function->function_maybepermanent_obstack);
1571
1572 /* See comment in restore_tree_status in tree.c for why this needs to be
1573 on saveable obstack. */
1574 temp
1575 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1576 temp->modified = reg;
1577 temp->promoted_mode = promoted_mode;
1578 temp->unsignedp = TREE_UNSIGNED (type);
1579 temp->next = function->fixup_var_refs_queue;
1580 function->fixup_var_refs_queue = temp;
1581 pop_obstacks ();
1582 }
1583 else if (used_p)
1584 /* Variable is local; fix it up now. */
1585 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
1586 }
1587 \f
1588 static void
1589 fixup_var_refs (var, promoted_mode, unsignedp)
1590 rtx var;
1591 enum machine_mode promoted_mode;
1592 int unsignedp;
1593 {
1594 tree pending;
1595 rtx first_insn = get_insns ();
1596 struct sequence_stack *stack = sequence_stack;
1597 tree rtl_exps = rtl_expr_chain;
1598
1599 /* Must scan all insns for stack-refs that exceed the limit. */
1600 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
1601
1602 /* Scan all pending sequences too. */
1603 for (; stack; stack = stack->next)
1604 {
1605 push_to_sequence (stack->first);
1606 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1607 stack->first, stack->next != 0);
1608 /* Update remembered end of sequence
1609 in case we added an insn at the end. */
1610 stack->last = get_last_insn ();
1611 end_sequence ();
1612 }
1613
1614 /* Scan all waiting RTL_EXPRs too. */
1615 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1616 {
1617 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1618 if (seq != const0_rtx && seq != 0)
1619 {
1620 push_to_sequence (seq);
1621 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
1622 end_sequence ();
1623 }
1624 }
1625 }
1626 \f
1627 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1628 some part of an insn. Return a struct fixup_replacement whose OLD
1629 value is equal to X. Allocate a new structure if no such entry exists. */
1630
1631 static struct fixup_replacement *
1632 find_fixup_replacement (replacements, x)
1633 struct fixup_replacement **replacements;
1634 rtx x;
1635 {
1636 struct fixup_replacement *p;
1637
1638 /* See if we have already replaced this. */
1639 for (p = *replacements; p && p->old != x; p = p->next)
1640 ;
1641
1642 if (p == 0)
1643 {
1644 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1645 p->old = x;
1646 p->new = 0;
1647 p->next = *replacements;
1648 *replacements = p;
1649 }
1650
1651 return p;
1652 }
1653
1654 /* Scan the insn-chain starting with INSN for refs to VAR
1655 and fix them up. TOPLEVEL is nonzero if this chain is the
1656 main chain of insns for the current function. */
1657
1658 static void
1659 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
1660 rtx var;
1661 enum machine_mode promoted_mode;
1662 int unsignedp;
1663 rtx insn;
1664 int toplevel;
1665 {
1666 rtx call_dest = 0;
1667
1668 while (insn)
1669 {
1670 rtx next = NEXT_INSN (insn);
1671 rtx set, prev, prev_set;
1672 rtx note;
1673
1674 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1675 {
1676 /* If this is a CLOBBER of VAR, delete it.
1677
1678 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1679 and REG_RETVAL notes too. */
1680 if (GET_CODE (PATTERN (insn)) == CLOBBER
1681 && (XEXP (PATTERN (insn), 0) == var
1682 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1683 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1684 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1685 {
1686 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1687 /* The REG_LIBCALL note will go away since we are going to
1688 turn INSN into a NOTE, so just delete the
1689 corresponding REG_RETVAL note. */
1690 remove_note (XEXP (note, 0),
1691 find_reg_note (XEXP (note, 0), REG_RETVAL,
1692 NULL_RTX));
1693
1694 /* In unoptimized compilation, we shouldn't call delete_insn
1695 except in jump.c doing warnings. */
1696 PUT_CODE (insn, NOTE);
1697 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1698 NOTE_SOURCE_FILE (insn) = 0;
1699 }
1700
1701 /* The insn to load VAR from a home in the arglist
1702 is now a no-op. When we see it, just delete it.
1703 Similarly if this is storing VAR from a register from which
1704 it was loaded in the previous insn. This will occur
1705 when an ADDRESSOF was made for an arglist slot. */
1706 else if (toplevel
1707 && (set = single_set (insn)) != 0
1708 && SET_DEST (set) == var
1709 /* If this represents the result of an insn group,
1710 don't delete the insn. */
1711 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1712 && (rtx_equal_p (SET_SRC (set), var)
1713 || (GET_CODE (SET_SRC (set)) == REG
1714 && (prev = prev_nonnote_insn (insn)) != 0
1715 && (prev_set = single_set (prev)) != 0
1716 && SET_DEST (prev_set) == SET_SRC (set)
1717 && rtx_equal_p (SET_SRC (prev_set), var))))
1718 {
1719 /* In unoptimized compilation, we shouldn't call delete_insn
1720 except in jump.c doing warnings. */
1721 PUT_CODE (insn, NOTE);
1722 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1723 NOTE_SOURCE_FILE (insn) = 0;
1724 if (insn == last_parm_insn)
1725 last_parm_insn = PREV_INSN (next);
1726 }
1727 else
1728 {
1729 struct fixup_replacement *replacements = 0;
1730 rtx next_insn = NEXT_INSN (insn);
1731
1732 if (SMALL_REGISTER_CLASSES)
1733 {
1734 /* If the insn that copies the results of a CALL_INSN
1735 into a pseudo now references VAR, we have to use an
1736 intermediate pseudo since we want the life of the
1737 return value register to be only a single insn.
1738
1739 If we don't use an intermediate pseudo, such things as
1740 address computations to make the address of VAR valid
1741 if it is not can be placed between the CALL_INSN and INSN.
1742
1743 To make sure this doesn't happen, we record the destination
1744 of the CALL_INSN and see if the next insn uses both that
1745 and VAR. */
1746
1747 if (call_dest != 0 && GET_CODE (insn) == INSN
1748 && reg_mentioned_p (var, PATTERN (insn))
1749 && reg_mentioned_p (call_dest, PATTERN (insn)))
1750 {
1751 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1752
1753 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1754
1755 PATTERN (insn) = replace_rtx (PATTERN (insn),
1756 call_dest, temp);
1757 }
1758
1759 if (GET_CODE (insn) == CALL_INSN
1760 && GET_CODE (PATTERN (insn)) == SET)
1761 call_dest = SET_DEST (PATTERN (insn));
1762 else if (GET_CODE (insn) == CALL_INSN
1763 && GET_CODE (PATTERN (insn)) == PARALLEL
1764 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1765 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1766 else
1767 call_dest = 0;
1768 }
1769
1770 /* See if we have to do anything to INSN now that VAR is in
1771 memory. If it needs to be loaded into a pseudo, use a single
1772 pseudo for the entire insn in case there is a MATCH_DUP
1773 between two operands. We pass a pointer to the head of
1774 a list of struct fixup_replacements. If fixup_var_refs_1
1775 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1776 it will record them in this list.
1777
1778 If it allocated a pseudo for any replacement, we copy into
1779 it here. */
1780
1781 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1782 &replacements);
1783
1784 /* If this is last_parm_insn, and any instructions were output
1785 after it to fix it up, then we must set last_parm_insn to
1786 the last such instruction emitted. */
1787 if (insn == last_parm_insn)
1788 last_parm_insn = PREV_INSN (next_insn);
1789
1790 while (replacements)
1791 {
1792 if (GET_CODE (replacements->new) == REG)
1793 {
1794 rtx insert_before;
1795 rtx seq;
1796
1797 /* OLD might be a (subreg (mem)). */
1798 if (GET_CODE (replacements->old) == SUBREG)
1799 replacements->old
1800 = fixup_memory_subreg (replacements->old, insn, 0);
1801 else
1802 replacements->old
1803 = fixup_stack_1 (replacements->old, insn);
1804
1805 insert_before = insn;
1806
1807 /* If we are changing the mode, do a conversion.
1808 This might be wasteful, but combine.c will
1809 eliminate much of the waste. */
1810
1811 if (GET_MODE (replacements->new)
1812 != GET_MODE (replacements->old))
1813 {
1814 start_sequence ();
1815 convert_move (replacements->new,
1816 replacements->old, unsignedp);
1817 seq = gen_sequence ();
1818 end_sequence ();
1819 }
1820 else
1821 seq = gen_move_insn (replacements->new,
1822 replacements->old);
1823
1824 emit_insn_before (seq, insert_before);
1825 }
1826
1827 replacements = replacements->next;
1828 }
1829 }
1830
1831 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1832 But don't touch other insns referred to by reg-notes;
1833 we will get them elsewhere. */
1834 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1835 if (GET_CODE (note) != INSN_LIST)
1836 XEXP (note, 0)
1837 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1838 }
1839 insn = next;
1840 }
1841 }
1842 \f
1843 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1844 See if the rtx expression at *LOC in INSN needs to be changed.
1845
1846 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1847 contain a list of original rtx's and replacements. If we find that we need
1848 to modify this insn by replacing a memory reference with a pseudo or by
1849 making a new MEM to implement a SUBREG, we consult that list to see if
1850 we have already chosen a replacement. If none has already been allocated,
1851 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1852 or the SUBREG, as appropriate, to the pseudo. */
1853
1854 static void
1855 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1856 register rtx var;
1857 enum machine_mode promoted_mode;
1858 register rtx *loc;
1859 rtx insn;
1860 struct fixup_replacement **replacements;
1861 {
1862 register int i;
1863 register rtx x = *loc;
1864 RTX_CODE code = GET_CODE (x);
1865 register char *fmt;
1866 register rtx tem, tem1;
1867 struct fixup_replacement *replacement;
1868
1869 switch (code)
1870 {
1871 case ADDRESSOF:
1872 if (XEXP (x, 0) == var)
1873 {
1874 /* Prevent sharing of rtl that might lose. */
1875 rtx sub = copy_rtx (XEXP (var, 0));
1876
1877 start_sequence ();
1878
1879 if (! validate_change (insn, loc, sub, 0))
1880 {
1881 rtx y = force_operand (sub, NULL_RTX);
1882
1883 if (! validate_change (insn, loc, y, 0))
1884 *loc = copy_to_reg (y);
1885 }
1886
1887 emit_insn_before (gen_sequence (), insn);
1888 end_sequence ();
1889 }
1890 return;
1891
1892 case MEM:
1893 if (var == x)
1894 {
1895 /* If we already have a replacement, use it. Otherwise,
1896 try to fix up this address in case it is invalid. */
1897
1898 replacement = find_fixup_replacement (replacements, var);
1899 if (replacement->new)
1900 {
1901 *loc = replacement->new;
1902 return;
1903 }
1904
1905 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1906
1907 /* Unless we are forcing memory to register or we changed the mode,
1908 we can leave things the way they are if the insn is valid. */
1909
1910 INSN_CODE (insn) = -1;
1911 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1912 && recog_memoized (insn) >= 0)
1913 return;
1914
1915 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1916 return;
1917 }
1918
1919 /* If X contains VAR, we need to unshare it here so that we update
1920 each occurrence separately. But all identical MEMs in one insn
1921 must be replaced with the same rtx because of the possibility of
1922 MATCH_DUPs. */
1923
1924 if (reg_mentioned_p (var, x))
1925 {
1926 replacement = find_fixup_replacement (replacements, x);
1927 if (replacement->new == 0)
1928 replacement->new = copy_most_rtx (x, var);
1929
1930 *loc = x = replacement->new;
1931 }
1932 break;
1933
1934 case REG:
1935 case CC0:
1936 case PC:
1937 case CONST_INT:
1938 case CONST:
1939 case SYMBOL_REF:
1940 case LABEL_REF:
1941 case CONST_DOUBLE:
1942 return;
1943
1944 case SIGN_EXTRACT:
1945 case ZERO_EXTRACT:
1946 /* Note that in some cases those types of expressions are altered
1947 by optimize_bit_field, and do not survive to get here. */
1948 if (XEXP (x, 0) == var
1949 || (GET_CODE (XEXP (x, 0)) == SUBREG
1950 && SUBREG_REG (XEXP (x, 0)) == var))
1951 {
1952 /* Get TEM as a valid MEM in the mode presently in the insn.
1953
1954 We don't worry about the possibility of MATCH_DUP here; it
1955 is highly unlikely and would be tricky to handle. */
1956
1957 tem = XEXP (x, 0);
1958 if (GET_CODE (tem) == SUBREG)
1959 {
1960 if (GET_MODE_BITSIZE (GET_MODE (tem))
1961 > GET_MODE_BITSIZE (GET_MODE (var)))
1962 {
1963 replacement = find_fixup_replacement (replacements, var);
1964 if (replacement->new == 0)
1965 replacement->new = gen_reg_rtx (GET_MODE (var));
1966 SUBREG_REG (tem) = replacement->new;
1967 }
1968 else
1969 tem = fixup_memory_subreg (tem, insn, 0);
1970 }
1971 else
1972 tem = fixup_stack_1 (tem, insn);
1973
1974 /* Unless we want to load from memory, get TEM into the proper mode
1975 for an extract from memory. This can only be done if the
1976 extract is at a constant position and length. */
1977
1978 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1979 && GET_CODE (XEXP (x, 2)) == CONST_INT
1980 && ! mode_dependent_address_p (XEXP (tem, 0))
1981 && ! MEM_VOLATILE_P (tem))
1982 {
1983 enum machine_mode wanted_mode = VOIDmode;
1984 enum machine_mode is_mode = GET_MODE (tem);
1985 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
1986
1987 #ifdef HAVE_extzv
1988 if (GET_CODE (x) == ZERO_EXTRACT)
1989 {
1990 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1991 if (wanted_mode == VOIDmode)
1992 wanted_mode = word_mode;
1993 }
1994 #endif
1995 #ifdef HAVE_extv
1996 if (GET_CODE (x) == SIGN_EXTRACT)
1997 {
1998 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1999 if (wanted_mode == VOIDmode)
2000 wanted_mode = word_mode;
2001 }
2002 #endif
2003 /* If we have a narrower mode, we can do something. */
2004 if (wanted_mode != VOIDmode
2005 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2006 {
2007 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2008 rtx old_pos = XEXP (x, 2);
2009 rtx newmem;
2010
2011 /* If the bytes and bits are counted differently, we
2012 must adjust the offset. */
2013 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2014 offset = (GET_MODE_SIZE (is_mode)
2015 - GET_MODE_SIZE (wanted_mode) - offset);
2016
2017 pos %= GET_MODE_BITSIZE (wanted_mode);
2018
2019 newmem = gen_rtx_MEM (wanted_mode,
2020 plus_constant (XEXP (tem, 0), offset));
2021 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2022 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
2023 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
2024
2025 /* Make the change and see if the insn remains valid. */
2026 INSN_CODE (insn) = -1;
2027 XEXP (x, 0) = newmem;
2028 XEXP (x, 2) = GEN_INT (pos);
2029
2030 if (recog_memoized (insn) >= 0)
2031 return;
2032
2033 /* Otherwise, restore old position. XEXP (x, 0) will be
2034 restored later. */
2035 XEXP (x, 2) = old_pos;
2036 }
2037 }
2038
2039 /* If we get here, the bitfield extract insn can't accept a memory
2040 reference. Copy the input into a register. */
2041
2042 tem1 = gen_reg_rtx (GET_MODE (tem));
2043 emit_insn_before (gen_move_insn (tem1, tem), insn);
2044 XEXP (x, 0) = tem1;
2045 return;
2046 }
2047 break;
2048
2049 case SUBREG:
2050 if (SUBREG_REG (x) == var)
2051 {
2052 /* If this is a special SUBREG made because VAR was promoted
2053 from a wider mode, replace it with VAR and call ourself
2054 recursively, this time saying that the object previously
2055 had its current mode (by virtue of the SUBREG). */
2056
2057 if (SUBREG_PROMOTED_VAR_P (x))
2058 {
2059 *loc = var;
2060 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2061 return;
2062 }
2063
2064 /* If this SUBREG makes VAR wider, it has become a paradoxical
2065 SUBREG with VAR in memory, but these aren't allowed at this
2066 stage of the compilation. So load VAR into a pseudo and take
2067 a SUBREG of that pseudo. */
2068 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2069 {
2070 replacement = find_fixup_replacement (replacements, var);
2071 if (replacement->new == 0)
2072 replacement->new = gen_reg_rtx (GET_MODE (var));
2073 SUBREG_REG (x) = replacement->new;
2074 return;
2075 }
2076
2077 /* See if we have already found a replacement for this SUBREG.
2078 If so, use it. Otherwise, make a MEM and see if the insn
2079 is recognized. If not, or if we should force MEM into a register,
2080 make a pseudo for this SUBREG. */
2081 replacement = find_fixup_replacement (replacements, x);
2082 if (replacement->new)
2083 {
2084 *loc = replacement->new;
2085 return;
2086 }
2087
2088 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2089
2090 INSN_CODE (insn) = -1;
2091 if (! flag_force_mem && recog_memoized (insn) >= 0)
2092 return;
2093
2094 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2095 return;
2096 }
2097 break;
2098
2099 case SET:
2100 /* First do special simplification of bit-field references. */
2101 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2102 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2103 optimize_bit_field (x, insn, 0);
2104 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2105 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2106 optimize_bit_field (x, insn, NULL_PTR);
2107
2108 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2109 into a register and then store it back out. */
2110 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2111 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2112 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2113 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2114 > GET_MODE_SIZE (GET_MODE (var))))
2115 {
2116 replacement = find_fixup_replacement (replacements, var);
2117 if (replacement->new == 0)
2118 replacement->new = gen_reg_rtx (GET_MODE (var));
2119
2120 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2121 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2122 }
2123
2124 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2125 insn into a pseudo and store the low part of the pseudo into VAR. */
2126 if (GET_CODE (SET_DEST (x)) == SUBREG
2127 && SUBREG_REG (SET_DEST (x)) == var
2128 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2129 > GET_MODE_SIZE (GET_MODE (var))))
2130 {
2131 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2132 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2133 tem)),
2134 insn);
2135 break;
2136 }
2137
2138 {
2139 rtx dest = SET_DEST (x);
2140 rtx src = SET_SRC (x);
2141 #ifdef HAVE_insv
2142 rtx outerdest = dest;
2143 #endif
2144
2145 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2146 || GET_CODE (dest) == SIGN_EXTRACT
2147 || GET_CODE (dest) == ZERO_EXTRACT)
2148 dest = XEXP (dest, 0);
2149
2150 if (GET_CODE (src) == SUBREG)
2151 src = XEXP (src, 0);
2152
2153 /* If VAR does not appear at the top level of the SET
2154 just scan the lower levels of the tree. */
2155
2156 if (src != var && dest != var)
2157 break;
2158
2159 /* We will need to rerecognize this insn. */
2160 INSN_CODE (insn) = -1;
2161
2162 #ifdef HAVE_insv
2163 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2164 {
2165 /* Since this case will return, ensure we fixup all the
2166 operands here. */
2167 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2168 insn, replacements);
2169 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2170 insn, replacements);
2171 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2172 insn, replacements);
2173
2174 tem = XEXP (outerdest, 0);
2175
2176 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2177 that may appear inside a ZERO_EXTRACT.
2178 This was legitimate when the MEM was a REG. */
2179 if (GET_CODE (tem) == SUBREG
2180 && SUBREG_REG (tem) == var)
2181 tem = fixup_memory_subreg (tem, insn, 0);
2182 else
2183 tem = fixup_stack_1 (tem, insn);
2184
2185 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2186 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2187 && ! mode_dependent_address_p (XEXP (tem, 0))
2188 && ! MEM_VOLATILE_P (tem))
2189 {
2190 enum machine_mode wanted_mode;
2191 enum machine_mode is_mode = GET_MODE (tem);
2192 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2193
2194 wanted_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
2195 if (wanted_mode == VOIDmode)
2196 wanted_mode = word_mode;
2197
2198 /* If we have a narrower mode, we can do something. */
2199 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2200 {
2201 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2202 rtx old_pos = XEXP (outerdest, 2);
2203 rtx newmem;
2204
2205 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2206 offset = (GET_MODE_SIZE (is_mode)
2207 - GET_MODE_SIZE (wanted_mode) - offset);
2208
2209 pos %= GET_MODE_BITSIZE (wanted_mode);
2210
2211 newmem = gen_rtx_MEM (wanted_mode,
2212 plus_constant (XEXP (tem, 0), offset));
2213 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2214 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
2215 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
2216
2217 /* Make the change and see if the insn remains valid. */
2218 INSN_CODE (insn) = -1;
2219 XEXP (outerdest, 0) = newmem;
2220 XEXP (outerdest, 2) = GEN_INT (pos);
2221
2222 if (recog_memoized (insn) >= 0)
2223 return;
2224
2225 /* Otherwise, restore old position. XEXP (x, 0) will be
2226 restored later. */
2227 XEXP (outerdest, 2) = old_pos;
2228 }
2229 }
2230
2231 /* If we get here, the bit-field store doesn't allow memory
2232 or isn't located at a constant position. Load the value into
2233 a register, do the store, and put it back into memory. */
2234
2235 tem1 = gen_reg_rtx (GET_MODE (tem));
2236 emit_insn_before (gen_move_insn (tem1, tem), insn);
2237 emit_insn_after (gen_move_insn (tem, tem1), insn);
2238 XEXP (outerdest, 0) = tem1;
2239 return;
2240 }
2241 #endif
2242
2243 /* STRICT_LOW_PART is a no-op on memory references
2244 and it can cause combinations to be unrecognizable,
2245 so eliminate it. */
2246
2247 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2248 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2249
2250 /* A valid insn to copy VAR into or out of a register
2251 must be left alone, to avoid an infinite loop here.
2252 If the reference to VAR is by a subreg, fix that up,
2253 since SUBREG is not valid for a memref.
2254 Also fix up the address of the stack slot.
2255
2256 Note that we must not try to recognize the insn until
2257 after we know that we have valid addresses and no
2258 (subreg (mem ...) ...) constructs, since these interfere
2259 with determining the validity of the insn. */
2260
2261 if ((SET_SRC (x) == var
2262 || (GET_CODE (SET_SRC (x)) == SUBREG
2263 && SUBREG_REG (SET_SRC (x)) == var))
2264 && (GET_CODE (SET_DEST (x)) == REG
2265 || (GET_CODE (SET_DEST (x)) == SUBREG
2266 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2267 && GET_MODE (var) == promoted_mode
2268 && x == single_set (insn))
2269 {
2270 rtx pat;
2271
2272 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2273 if (replacement->new)
2274 SET_SRC (x) = replacement->new;
2275 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2276 SET_SRC (x) = replacement->new
2277 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2278 else
2279 SET_SRC (x) = replacement->new
2280 = fixup_stack_1 (SET_SRC (x), insn);
2281
2282 if (recog_memoized (insn) >= 0)
2283 return;
2284
2285 /* INSN is not valid, but we know that we want to
2286 copy SET_SRC (x) to SET_DEST (x) in some way. So
2287 we generate the move and see whether it requires more
2288 than one insn. If it does, we emit those insns and
2289 delete INSN. Otherwise, we an just replace the pattern
2290 of INSN; we have already verified above that INSN has
2291 no other function that to do X. */
2292
2293 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2294 if (GET_CODE (pat) == SEQUENCE)
2295 {
2296 emit_insn_after (pat, insn);
2297 PUT_CODE (insn, NOTE);
2298 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2299 NOTE_SOURCE_FILE (insn) = 0;
2300 }
2301 else
2302 PATTERN (insn) = pat;
2303
2304 return;
2305 }
2306
2307 if ((SET_DEST (x) == var
2308 || (GET_CODE (SET_DEST (x)) == SUBREG
2309 && SUBREG_REG (SET_DEST (x)) == var))
2310 && (GET_CODE (SET_SRC (x)) == REG
2311 || (GET_CODE (SET_SRC (x)) == SUBREG
2312 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2313 && GET_MODE (var) == promoted_mode
2314 && x == single_set (insn))
2315 {
2316 rtx pat;
2317
2318 if (GET_CODE (SET_DEST (x)) == SUBREG)
2319 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2320 else
2321 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2322
2323 if (recog_memoized (insn) >= 0)
2324 return;
2325
2326 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2327 if (GET_CODE (pat) == SEQUENCE)
2328 {
2329 emit_insn_after (pat, insn);
2330 PUT_CODE (insn, NOTE);
2331 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2332 NOTE_SOURCE_FILE (insn) = 0;
2333 }
2334 else
2335 PATTERN (insn) = pat;
2336
2337 return;
2338 }
2339
2340 /* Otherwise, storing into VAR must be handled specially
2341 by storing into a temporary and copying that into VAR
2342 with a new insn after this one. Note that this case
2343 will be used when storing into a promoted scalar since
2344 the insn will now have different modes on the input
2345 and output and hence will be invalid (except for the case
2346 of setting it to a constant, which does not need any
2347 change if it is valid). We generate extra code in that case,
2348 but combine.c will eliminate it. */
2349
2350 if (dest == var)
2351 {
2352 rtx temp;
2353 rtx fixeddest = SET_DEST (x);
2354
2355 /* STRICT_LOW_PART can be discarded, around a MEM. */
2356 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2357 fixeddest = XEXP (fixeddest, 0);
2358 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2359 if (GET_CODE (fixeddest) == SUBREG)
2360 {
2361 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2362 promoted_mode = GET_MODE (fixeddest);
2363 }
2364 else
2365 fixeddest = fixup_stack_1 (fixeddest, insn);
2366
2367 temp = gen_reg_rtx (promoted_mode);
2368
2369 emit_insn_after (gen_move_insn (fixeddest,
2370 gen_lowpart (GET_MODE (fixeddest),
2371 temp)),
2372 insn);
2373
2374 SET_DEST (x) = temp;
2375 }
2376 }
2377
2378 default:
2379 break;
2380 }
2381
2382 /* Nothing special about this RTX; fix its operands. */
2383
2384 fmt = GET_RTX_FORMAT (code);
2385 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2386 {
2387 if (fmt[i] == 'e')
2388 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2389 if (fmt[i] == 'E')
2390 {
2391 register int j;
2392 for (j = 0; j < XVECLEN (x, i); j++)
2393 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2394 insn, replacements);
2395 }
2396 }
2397 }
2398 \f
2399 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2400 return an rtx (MEM:m1 newaddr) which is equivalent.
2401 If any insns must be emitted to compute NEWADDR, put them before INSN.
2402
2403 UNCRITICAL nonzero means accept paradoxical subregs.
2404 This is used for subregs found inside REG_NOTES. */
2405
2406 static rtx
2407 fixup_memory_subreg (x, insn, uncritical)
2408 rtx x;
2409 rtx insn;
2410 int uncritical;
2411 {
2412 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2413 rtx addr = XEXP (SUBREG_REG (x), 0);
2414 enum machine_mode mode = GET_MODE (x);
2415 rtx result;
2416
2417 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2418 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2419 && ! uncritical)
2420 abort ();
2421
2422 if (BYTES_BIG_ENDIAN)
2423 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2424 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2425 addr = plus_constant (addr, offset);
2426 if (!flag_force_addr && memory_address_p (mode, addr))
2427 /* Shortcut if no insns need be emitted. */
2428 return change_address (SUBREG_REG (x), mode, addr);
2429 start_sequence ();
2430 result = change_address (SUBREG_REG (x), mode, addr);
2431 emit_insn_before (gen_sequence (), insn);
2432 end_sequence ();
2433 return result;
2434 }
2435
2436 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2437 Replace subexpressions of X in place.
2438 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2439 Otherwise return X, with its contents possibly altered.
2440
2441 If any insns must be emitted to compute NEWADDR, put them before INSN.
2442
2443 UNCRITICAL is as in fixup_memory_subreg. */
2444
2445 static rtx
2446 walk_fixup_memory_subreg (x, insn, uncritical)
2447 register rtx x;
2448 rtx insn;
2449 int uncritical;
2450 {
2451 register enum rtx_code code;
2452 register char *fmt;
2453 register int i;
2454
2455 if (x == 0)
2456 return 0;
2457
2458 code = GET_CODE (x);
2459
2460 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2461 return fixup_memory_subreg (x, insn, uncritical);
2462
2463 /* Nothing special about this RTX; fix its operands. */
2464
2465 fmt = GET_RTX_FORMAT (code);
2466 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2467 {
2468 if (fmt[i] == 'e')
2469 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2470 if (fmt[i] == 'E')
2471 {
2472 register int j;
2473 for (j = 0; j < XVECLEN (x, i); j++)
2474 XVECEXP (x, i, j)
2475 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2476 }
2477 }
2478 return x;
2479 }
2480 \f
2481 /* For each memory ref within X, if it refers to a stack slot
2482 with an out of range displacement, put the address in a temp register
2483 (emitting new insns before INSN to load these registers)
2484 and alter the memory ref to use that register.
2485 Replace each such MEM rtx with a copy, to avoid clobberage. */
2486
2487 static rtx
2488 fixup_stack_1 (x, insn)
2489 rtx x;
2490 rtx insn;
2491 {
2492 register int i;
2493 register RTX_CODE code = GET_CODE (x);
2494 register char *fmt;
2495
2496 if (code == MEM)
2497 {
2498 register rtx ad = XEXP (x, 0);
2499 /* If we have address of a stack slot but it's not valid
2500 (displacement is too large), compute the sum in a register. */
2501 if (GET_CODE (ad) == PLUS
2502 && GET_CODE (XEXP (ad, 0)) == REG
2503 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2504 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2505 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2506 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2507 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2508 #endif
2509 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2510 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2511 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2512 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2513 {
2514 rtx temp, seq;
2515 if (memory_address_p (GET_MODE (x), ad))
2516 return x;
2517
2518 start_sequence ();
2519 temp = copy_to_reg (ad);
2520 seq = gen_sequence ();
2521 end_sequence ();
2522 emit_insn_before (seq, insn);
2523 return change_address (x, VOIDmode, temp);
2524 }
2525 return x;
2526 }
2527
2528 fmt = GET_RTX_FORMAT (code);
2529 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2530 {
2531 if (fmt[i] == 'e')
2532 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2533 if (fmt[i] == 'E')
2534 {
2535 register int j;
2536 for (j = 0; j < XVECLEN (x, i); j++)
2537 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2538 }
2539 }
2540 return x;
2541 }
2542 \f
2543 /* Optimization: a bit-field instruction whose field
2544 happens to be a byte or halfword in memory
2545 can be changed to a move instruction.
2546
2547 We call here when INSN is an insn to examine or store into a bit-field.
2548 BODY is the SET-rtx to be altered.
2549
2550 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2551 (Currently this is called only from function.c, and EQUIV_MEM
2552 is always 0.) */
2553
2554 static void
2555 optimize_bit_field (body, insn, equiv_mem)
2556 rtx body;
2557 rtx insn;
2558 rtx *equiv_mem;
2559 {
2560 register rtx bitfield;
2561 int destflag;
2562 rtx seq = 0;
2563 enum machine_mode mode;
2564
2565 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2566 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2567 bitfield = SET_DEST (body), destflag = 1;
2568 else
2569 bitfield = SET_SRC (body), destflag = 0;
2570
2571 /* First check that the field being stored has constant size and position
2572 and is in fact a byte or halfword suitably aligned. */
2573
2574 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2575 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2576 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2577 != BLKmode)
2578 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2579 {
2580 register rtx memref = 0;
2581
2582 /* Now check that the containing word is memory, not a register,
2583 and that it is safe to change the machine mode. */
2584
2585 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2586 memref = XEXP (bitfield, 0);
2587 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2588 && equiv_mem != 0)
2589 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2590 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2591 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2592 memref = SUBREG_REG (XEXP (bitfield, 0));
2593 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2594 && equiv_mem != 0
2595 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2596 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2597
2598 if (memref
2599 && ! mode_dependent_address_p (XEXP (memref, 0))
2600 && ! MEM_VOLATILE_P (memref))
2601 {
2602 /* Now adjust the address, first for any subreg'ing
2603 that we are now getting rid of,
2604 and then for which byte of the word is wanted. */
2605
2606 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2607 rtx insns;
2608
2609 /* Adjust OFFSET to count bits from low-address byte. */
2610 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2611 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2612 - offset - INTVAL (XEXP (bitfield, 1)));
2613
2614 /* Adjust OFFSET to count bytes from low-address byte. */
2615 offset /= BITS_PER_UNIT;
2616 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2617 {
2618 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2619 if (BYTES_BIG_ENDIAN)
2620 offset -= (MIN (UNITS_PER_WORD,
2621 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2622 - MIN (UNITS_PER_WORD,
2623 GET_MODE_SIZE (GET_MODE (memref))));
2624 }
2625
2626 start_sequence ();
2627 memref = change_address (memref, mode,
2628 plus_constant (XEXP (memref, 0), offset));
2629 insns = get_insns ();
2630 end_sequence ();
2631 emit_insns_before (insns, insn);
2632
2633 /* Store this memory reference where
2634 we found the bit field reference. */
2635
2636 if (destflag)
2637 {
2638 validate_change (insn, &SET_DEST (body), memref, 1);
2639 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2640 {
2641 rtx src = SET_SRC (body);
2642 while (GET_CODE (src) == SUBREG
2643 && SUBREG_WORD (src) == 0)
2644 src = SUBREG_REG (src);
2645 if (GET_MODE (src) != GET_MODE (memref))
2646 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2647 validate_change (insn, &SET_SRC (body), src, 1);
2648 }
2649 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2650 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2651 /* This shouldn't happen because anything that didn't have
2652 one of these modes should have got converted explicitly
2653 and then referenced through a subreg.
2654 This is so because the original bit-field was
2655 handled by agg_mode and so its tree structure had
2656 the same mode that memref now has. */
2657 abort ();
2658 }
2659 else
2660 {
2661 rtx dest = SET_DEST (body);
2662
2663 while (GET_CODE (dest) == SUBREG
2664 && SUBREG_WORD (dest) == 0
2665 && (GET_MODE_CLASS (GET_MODE (dest))
2666 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest)))))
2667 dest = SUBREG_REG (dest);
2668
2669 validate_change (insn, &SET_DEST (body), dest, 1);
2670
2671 if (GET_MODE (dest) == GET_MODE (memref))
2672 validate_change (insn, &SET_SRC (body), memref, 1);
2673 else
2674 {
2675 /* Convert the mem ref to the destination mode. */
2676 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2677
2678 start_sequence ();
2679 convert_move (newreg, memref,
2680 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2681 seq = get_insns ();
2682 end_sequence ();
2683
2684 validate_change (insn, &SET_SRC (body), newreg, 1);
2685 }
2686 }
2687
2688 /* See if we can convert this extraction or insertion into
2689 a simple move insn. We might not be able to do so if this
2690 was, for example, part of a PARALLEL.
2691
2692 If we succeed, write out any needed conversions. If we fail,
2693 it is hard to guess why we failed, so don't do anything
2694 special; just let the optimization be suppressed. */
2695
2696 if (apply_change_group () && seq)
2697 emit_insns_before (seq, insn);
2698 }
2699 }
2700 }
2701 \f
2702 /* These routines are responsible for converting virtual register references
2703 to the actual hard register references once RTL generation is complete.
2704
2705 The following four variables are used for communication between the
2706 routines. They contain the offsets of the virtual registers from their
2707 respective hard registers. */
2708
2709 static int in_arg_offset;
2710 static int var_offset;
2711 static int dynamic_offset;
2712 static int out_arg_offset;
2713 static int cfa_offset;
2714
2715 /* In most machines, the stack pointer register is equivalent to the bottom
2716 of the stack. */
2717
2718 #ifndef STACK_POINTER_OFFSET
2719 #define STACK_POINTER_OFFSET 0
2720 #endif
2721
2722 /* If not defined, pick an appropriate default for the offset of dynamically
2723 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2724 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2725
2726 #ifndef STACK_DYNAMIC_OFFSET
2727
2728 #ifdef ACCUMULATE_OUTGOING_ARGS
2729 /* The bottom of the stack points to the actual arguments. If
2730 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2731 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2732 stack space for register parameters is not pushed by the caller, but
2733 rather part of the fixed stack areas and hence not included in
2734 `current_function_outgoing_args_size'. Nevertheless, we must allow
2735 for it when allocating stack dynamic objects. */
2736
2737 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2738 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2739 (current_function_outgoing_args_size \
2740 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2741
2742 #else
2743 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2744 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2745 #endif
2746
2747 #else
2748 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2749 #endif
2750 #endif
2751
2752 /* On a few machines, the CFA coincides with the arg pointer. */
2753
2754 #ifndef ARG_POINTER_CFA_OFFSET
2755 #define ARG_POINTER_CFA_OFFSET 0
2756 #endif
2757
2758
2759 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2760 its address taken. DECL is the decl for the object stored in the
2761 register, for later use if we do need to force REG into the stack.
2762 REG is overwritten by the MEM like in put_reg_into_stack. */
2763
2764 rtx
2765 gen_mem_addressof (reg, decl)
2766 rtx reg;
2767 tree decl;
2768 {
2769 tree type = TREE_TYPE (decl);
2770 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)), REGNO (reg));
2771 SET_ADDRESSOF_DECL (r, decl);
2772 /* If the original REG was a user-variable, then so is the REG whose
2773 address is being taken. */
2774 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2775
2776 XEXP (reg, 0) = r;
2777 PUT_CODE (reg, MEM);
2778 PUT_MODE (reg, DECL_MODE (decl));
2779 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2780 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type);
2781 MEM_ALIAS_SET (reg) = get_alias_set (decl);
2782
2783 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2784 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type));
2785
2786 return reg;
2787 }
2788
2789 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2790
2791 void
2792 flush_addressof (decl)
2793 tree decl;
2794 {
2795 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2796 && DECL_RTL (decl) != 0
2797 && GET_CODE (DECL_RTL (decl)) == MEM
2798 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2799 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2800 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0));
2801 }
2802
2803 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2804
2805 static void
2806 put_addressof_into_stack (r)
2807 rtx r;
2808 {
2809 tree decl = ADDRESSOF_DECL (r);
2810 rtx reg = XEXP (r, 0);
2811
2812 if (GET_CODE (reg) != REG)
2813 abort ();
2814
2815 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2816 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
2817 ADDRESSOF_REGNO (r),
2818 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
2819 }
2820
2821 /* List of replacements made below in purge_addressof_1 when creating
2822 bitfield insertions. */
2823 static rtx purge_addressof_replacements;
2824
2825 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2826 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2827 the stack. */
2828
2829 static void
2830 purge_addressof_1 (loc, insn, force, store)
2831 rtx *loc;
2832 rtx insn;
2833 int force, store;
2834 {
2835 rtx x;
2836 RTX_CODE code;
2837 int i, j;
2838 char *fmt;
2839
2840 /* Re-start here to avoid recursion in common cases. */
2841 restart:
2842
2843 x = *loc;
2844 if (x == 0)
2845 return;
2846
2847 code = GET_CODE (x);
2848
2849 if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2850 {
2851 rtx insns;
2852 /* We must create a copy of the rtx because it was created by
2853 overwriting a REG rtx which is always shared. */
2854 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2855
2856 if (validate_change (insn, loc, sub, 0))
2857 return;
2858
2859 start_sequence ();
2860 if (! validate_change (insn, loc,
2861 force_operand (sub, NULL_RTX),
2862 0))
2863 abort ();
2864
2865 insns = gen_sequence ();
2866 end_sequence ();
2867 emit_insn_before (insns, insn);
2868 return;
2869 }
2870 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2871 {
2872 rtx sub = XEXP (XEXP (x, 0), 0);
2873
2874 if (GET_CODE (sub) == MEM)
2875 sub = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2876
2877 if (GET_CODE (sub) == REG
2878 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2879 {
2880 put_addressof_into_stack (XEXP (x, 0));
2881 return;
2882 }
2883 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
2884 {
2885 int size_x, size_sub;
2886
2887 if (!insn)
2888 {
2889 /* When processing REG_NOTES look at the list of
2890 replacements done on the insn to find the register that X
2891 was replaced by. */
2892 rtx tem;
2893
2894 for (tem = purge_addressof_replacements; tem != NULL_RTX;
2895 tem = XEXP (XEXP (tem, 1), 1))
2896 if (rtx_equal_p (x, XEXP (tem, 0)))
2897 {
2898 *loc = XEXP (XEXP (tem, 1), 0);
2899 return;
2900 }
2901
2902 /* There should always be such a replacement. */
2903 abort ();
2904 }
2905
2906 size_x = GET_MODE_BITSIZE (GET_MODE (x));
2907 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
2908
2909 /* Don't even consider working with paradoxical subregs,
2910 or the moral equivalent seen here. */
2911 if (size_x <= size_sub
2912 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
2913 {
2914 /* Do a bitfield insertion to mirror what would happen
2915 in memory. */
2916
2917 rtx val, seq;
2918
2919 if (store)
2920 {
2921 start_sequence ();
2922 val = gen_reg_rtx (GET_MODE (x));
2923 if (! validate_change (insn, loc, val, 0))
2924 {
2925 /* Discard the current sequence and put the
2926 ADDRESSOF on stack. */
2927 end_sequence ();
2928 goto give_up;
2929 }
2930 seq = gen_sequence ();
2931 end_sequence ();
2932 emit_insn_before (seq, insn);
2933
2934 start_sequence ();
2935 store_bit_field (sub, size_x, 0, GET_MODE (x),
2936 val, GET_MODE_SIZE (GET_MODE (sub)),
2937 GET_MODE_SIZE (GET_MODE (sub)));
2938
2939 seq = gen_sequence ();
2940 end_sequence ();
2941 emit_insn_after (seq, insn);
2942 }
2943 else
2944 {
2945 start_sequence ();
2946 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
2947 GET_MODE (x), GET_MODE (x),
2948 GET_MODE_SIZE (GET_MODE (sub)),
2949 GET_MODE_SIZE (GET_MODE (sub)));
2950
2951 if (! validate_change (insn, loc, val, 0))
2952 {
2953 /* Discard the current sequence and put the
2954 ADDRESSOF on stack. */
2955 end_sequence ();
2956 goto give_up;
2957 }
2958
2959 seq = gen_sequence ();
2960 end_sequence ();
2961 emit_insn_before (seq, insn);
2962 }
2963
2964 /* Remember the replacement so that the same one can be done
2965 on the REG_NOTES. */
2966 purge_addressof_replacements
2967 = gen_rtx_EXPR_LIST (VOIDmode, x,
2968 gen_rtx_EXPR_LIST (VOIDmode, val,
2969 purge_addressof_replacements));
2970
2971 /* We replaced with a reg -- all done. */
2972 return;
2973 }
2974 }
2975 else if (validate_change (insn, loc, sub, 0))
2976 goto restart;
2977 give_up:;
2978 /* else give up and put it into the stack */
2979 }
2980 else if (code == ADDRESSOF)
2981 {
2982 put_addressof_into_stack (x);
2983 return;
2984 }
2985 else if (code == SET)
2986 {
2987 purge_addressof_1 (&SET_DEST (x), insn, force, 1);
2988 purge_addressof_1 (&SET_SRC (x), insn, force, 0);
2989 return;
2990 }
2991
2992 /* Scan all subexpressions. */
2993 fmt = GET_RTX_FORMAT (code);
2994 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2995 {
2996 if (*fmt == 'e')
2997 purge_addressof_1 (&XEXP (x, i), insn, force, 0);
2998 else if (*fmt == 'E')
2999 for (j = 0; j < XVECLEN (x, i); j++)
3000 purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0);
3001 }
3002 }
3003
3004 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3005 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3006 stack. */
3007
3008 void
3009 purge_addressof (insns)
3010 rtx insns;
3011 {
3012 rtx insn;
3013 for (insn = insns; insn; insn = NEXT_INSN (insn))
3014 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3015 || GET_CODE (insn) == CALL_INSN)
3016 {
3017 purge_addressof_1 (&PATTERN (insn), insn,
3018 asm_noperands (PATTERN (insn)) > 0, 0);
3019 purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0);
3020 purge_addressof_replacements = 0;
3021 }
3022 }
3023 \f
3024 /* Pass through the INSNS of function FNDECL and convert virtual register
3025 references to hard register references. */
3026
3027 void
3028 instantiate_virtual_regs (fndecl, insns)
3029 tree fndecl;
3030 rtx insns;
3031 {
3032 rtx insn;
3033 int i;
3034
3035 /* Compute the offsets to use for this function. */
3036 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3037 var_offset = STARTING_FRAME_OFFSET;
3038 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3039 out_arg_offset = STACK_POINTER_OFFSET;
3040 cfa_offset = ARG_POINTER_CFA_OFFSET;
3041
3042 /* Scan all variables and parameters of this function. For each that is
3043 in memory, instantiate all virtual registers if the result is a valid
3044 address. If not, we do it later. That will handle most uses of virtual
3045 regs on many machines. */
3046 instantiate_decls (fndecl, 1);
3047
3048 /* Initialize recognition, indicating that volatile is OK. */
3049 init_recog ();
3050
3051 /* Scan through all the insns, instantiating every virtual register still
3052 present. */
3053 for (insn = insns; insn; insn = NEXT_INSN (insn))
3054 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3055 || GET_CODE (insn) == CALL_INSN)
3056 {
3057 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3058 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3059 }
3060
3061 /* Instantiate the stack slots for the parm registers, for later use in
3062 addressof elimination. */
3063 for (i = 0; i < max_parm_reg; ++i)
3064 if (parm_reg_stack_loc[i])
3065 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3066
3067 /* Now instantiate the remaining register equivalences for debugging info.
3068 These will not be valid addresses. */
3069 instantiate_decls (fndecl, 0);
3070
3071 /* Indicate that, from now on, assign_stack_local should use
3072 frame_pointer_rtx. */
3073 virtuals_instantiated = 1;
3074 }
3075
3076 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3077 all virtual registers in their DECL_RTL's.
3078
3079 If VALID_ONLY, do this only if the resulting address is still valid.
3080 Otherwise, always do it. */
3081
3082 static void
3083 instantiate_decls (fndecl, valid_only)
3084 tree fndecl;
3085 int valid_only;
3086 {
3087 tree decl;
3088
3089 if (DECL_SAVED_INSNS (fndecl))
3090 /* When compiling an inline function, the obstack used for
3091 rtl allocation is the maybepermanent_obstack. Calling
3092 `resume_temporary_allocation' switches us back to that
3093 obstack while we process this function's parameters. */
3094 resume_temporary_allocation ();
3095
3096 /* Process all parameters of the function. */
3097 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3098 {
3099 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3100
3101 instantiate_decl (DECL_RTL (decl), size, valid_only);
3102
3103 /* If the parameter was promoted, then the incoming RTL mode may be
3104 larger than the declared type size. We must use the larger of
3105 the two sizes. */
3106 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3107 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3108 }
3109
3110 /* Now process all variables defined in the function or its subblocks. */
3111 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3112
3113 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
3114 {
3115 /* Save all rtl allocated for this function by raising the
3116 high-water mark on the maybepermanent_obstack. */
3117 preserve_data ();
3118 /* All further rtl allocation is now done in the current_obstack. */
3119 rtl_in_current_obstack ();
3120 }
3121 }
3122
3123 /* Subroutine of instantiate_decls: Process all decls in the given
3124 BLOCK node and all its subblocks. */
3125
3126 static void
3127 instantiate_decls_1 (let, valid_only)
3128 tree let;
3129 int valid_only;
3130 {
3131 tree t;
3132
3133 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3134 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3135 valid_only);
3136
3137 /* Process all subblocks. */
3138 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3139 instantiate_decls_1 (t, valid_only);
3140 }
3141
3142 /* Subroutine of the preceding procedures: Given RTL representing a
3143 decl and the size of the object, do any instantiation required.
3144
3145 If VALID_ONLY is non-zero, it means that the RTL should only be
3146 changed if the new address is valid. */
3147
3148 static void
3149 instantiate_decl (x, size, valid_only)
3150 rtx x;
3151 int size;
3152 int valid_only;
3153 {
3154 enum machine_mode mode;
3155 rtx addr;
3156
3157 /* If this is not a MEM, no need to do anything. Similarly if the
3158 address is a constant or a register that is not a virtual register. */
3159
3160 if (x == 0 || GET_CODE (x) != MEM)
3161 return;
3162
3163 addr = XEXP (x, 0);
3164 if (CONSTANT_P (addr)
3165 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3166 || (GET_CODE (addr) == REG
3167 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3168 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3169 return;
3170
3171 /* If we should only do this if the address is valid, copy the address.
3172 We need to do this so we can undo any changes that might make the
3173 address invalid. This copy is unfortunate, but probably can't be
3174 avoided. */
3175
3176 if (valid_only)
3177 addr = copy_rtx (addr);
3178
3179 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3180
3181 if (valid_only)
3182 {
3183 /* Now verify that the resulting address is valid for every integer or
3184 floating-point mode up to and including SIZE bytes long. We do this
3185 since the object might be accessed in any mode and frame addresses
3186 are shared. */
3187
3188 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3189 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3190 mode = GET_MODE_WIDER_MODE (mode))
3191 if (! memory_address_p (mode, addr))
3192 return;
3193
3194 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3195 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3196 mode = GET_MODE_WIDER_MODE (mode))
3197 if (! memory_address_p (mode, addr))
3198 return;
3199 }
3200
3201 /* Put back the address now that we have updated it and we either know
3202 it is valid or we don't care whether it is valid. */
3203
3204 XEXP (x, 0) = addr;
3205 }
3206 \f
3207 /* Given a pointer to a piece of rtx and an optional pointer to the
3208 containing object, instantiate any virtual registers present in it.
3209
3210 If EXTRA_INSNS, we always do the replacement and generate
3211 any extra insns before OBJECT. If it zero, we do nothing if replacement
3212 is not valid.
3213
3214 Return 1 if we either had nothing to do or if we were able to do the
3215 needed replacement. Return 0 otherwise; we only return zero if
3216 EXTRA_INSNS is zero.
3217
3218 We first try some simple transformations to avoid the creation of extra
3219 pseudos. */
3220
3221 static int
3222 instantiate_virtual_regs_1 (loc, object, extra_insns)
3223 rtx *loc;
3224 rtx object;
3225 int extra_insns;
3226 {
3227 rtx x;
3228 RTX_CODE code;
3229 rtx new = 0;
3230 HOST_WIDE_INT offset;
3231 rtx temp;
3232 rtx seq;
3233 int i, j;
3234 char *fmt;
3235
3236 /* Re-start here to avoid recursion in common cases. */
3237 restart:
3238
3239 x = *loc;
3240 if (x == 0)
3241 return 1;
3242
3243 code = GET_CODE (x);
3244
3245 /* Check for some special cases. */
3246 switch (code)
3247 {
3248 case CONST_INT:
3249 case CONST_DOUBLE:
3250 case CONST:
3251 case SYMBOL_REF:
3252 case CODE_LABEL:
3253 case PC:
3254 case CC0:
3255 case ASM_INPUT:
3256 case ADDR_VEC:
3257 case ADDR_DIFF_VEC:
3258 case RETURN:
3259 return 1;
3260
3261 case SET:
3262 /* We are allowed to set the virtual registers. This means that
3263 the actual register should receive the source minus the
3264 appropriate offset. This is used, for example, in the handling
3265 of non-local gotos. */
3266 if (SET_DEST (x) == virtual_incoming_args_rtx)
3267 new = arg_pointer_rtx, offset = - in_arg_offset;
3268 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3269 new = frame_pointer_rtx, offset = - var_offset;
3270 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3271 new = stack_pointer_rtx, offset = - dynamic_offset;
3272 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3273 new = stack_pointer_rtx, offset = - out_arg_offset;
3274 else if (SET_DEST (x) == virtual_cfa_rtx)
3275 new = arg_pointer_rtx, offset = - cfa_offset;
3276
3277 if (new)
3278 {
3279 /* The only valid sources here are PLUS or REG. Just do
3280 the simplest possible thing to handle them. */
3281 if (GET_CODE (SET_SRC (x)) != REG
3282 && GET_CODE (SET_SRC (x)) != PLUS)
3283 abort ();
3284
3285 start_sequence ();
3286 if (GET_CODE (SET_SRC (x)) != REG)
3287 temp = force_operand (SET_SRC (x), NULL_RTX);
3288 else
3289 temp = SET_SRC (x);
3290 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3291 seq = get_insns ();
3292 end_sequence ();
3293
3294 emit_insns_before (seq, object);
3295 SET_DEST (x) = new;
3296
3297 if (! validate_change (object, &SET_SRC (x), temp, 0)
3298 || ! extra_insns)
3299 abort ();
3300
3301 return 1;
3302 }
3303
3304 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3305 loc = &SET_SRC (x);
3306 goto restart;
3307
3308 case PLUS:
3309 /* Handle special case of virtual register plus constant. */
3310 if (CONSTANT_P (XEXP (x, 1)))
3311 {
3312 rtx old, new_offset;
3313
3314 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3315 if (GET_CODE (XEXP (x, 0)) == PLUS)
3316 {
3317 rtx inner = XEXP (XEXP (x, 0), 0);
3318
3319 if (inner == virtual_incoming_args_rtx)
3320 new = arg_pointer_rtx, offset = in_arg_offset;
3321 else if (inner == virtual_stack_vars_rtx)
3322 new = frame_pointer_rtx, offset = var_offset;
3323 else if (inner == virtual_stack_dynamic_rtx)
3324 new = stack_pointer_rtx, offset = dynamic_offset;
3325 else if (inner == virtual_outgoing_args_rtx)
3326 new = stack_pointer_rtx, offset = out_arg_offset;
3327 else if (inner == virtual_cfa_rtx)
3328 new = arg_pointer_rtx, offset = cfa_offset;
3329 else
3330 {
3331 loc = &XEXP (x, 0);
3332 goto restart;
3333 }
3334
3335 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3336 extra_insns);
3337 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3338 }
3339
3340 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3341 new = arg_pointer_rtx, offset = in_arg_offset;
3342 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3343 new = frame_pointer_rtx, offset = var_offset;
3344 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3345 new = stack_pointer_rtx, offset = dynamic_offset;
3346 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3347 new = stack_pointer_rtx, offset = out_arg_offset;
3348 else if (XEXP (x, 0) == virtual_cfa_rtx)
3349 new = arg_pointer_rtx, offset = cfa_offset;
3350 else
3351 {
3352 /* We know the second operand is a constant. Unless the
3353 first operand is a REG (which has been already checked),
3354 it needs to be checked. */
3355 if (GET_CODE (XEXP (x, 0)) != REG)
3356 {
3357 loc = &XEXP (x, 0);
3358 goto restart;
3359 }
3360 return 1;
3361 }
3362
3363 new_offset = plus_constant (XEXP (x, 1), offset);
3364
3365 /* If the new constant is zero, try to replace the sum with just
3366 the register. */
3367 if (new_offset == const0_rtx
3368 && validate_change (object, loc, new, 0))
3369 return 1;
3370
3371 /* Next try to replace the register and new offset.
3372 There are two changes to validate here and we can't assume that
3373 in the case of old offset equals new just changing the register
3374 will yield a valid insn. In the interests of a little efficiency,
3375 however, we only call validate change once (we don't queue up the
3376 changes and then call apply_change_group). */
3377
3378 old = XEXP (x, 0);
3379 if (offset == 0
3380 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3381 : (XEXP (x, 0) = new,
3382 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3383 {
3384 if (! extra_insns)
3385 {
3386 XEXP (x, 0) = old;
3387 return 0;
3388 }
3389
3390 /* Otherwise copy the new constant into a register and replace
3391 constant with that register. */
3392 temp = gen_reg_rtx (Pmode);
3393 XEXP (x, 0) = new;
3394 if (validate_change (object, &XEXP (x, 1), temp, 0))
3395 emit_insn_before (gen_move_insn (temp, new_offset), object);
3396 else
3397 {
3398 /* If that didn't work, replace this expression with a
3399 register containing the sum. */
3400
3401 XEXP (x, 0) = old;
3402 new = gen_rtx_PLUS (Pmode, new, new_offset);
3403
3404 start_sequence ();
3405 temp = force_operand (new, NULL_RTX);
3406 seq = get_insns ();
3407 end_sequence ();
3408
3409 emit_insns_before (seq, object);
3410 if (! validate_change (object, loc, temp, 0)
3411 && ! validate_replace_rtx (x, temp, object))
3412 abort ();
3413 }
3414 }
3415
3416 return 1;
3417 }
3418
3419 /* Fall through to generic two-operand expression case. */
3420 case EXPR_LIST:
3421 case CALL:
3422 case COMPARE:
3423 case MINUS:
3424 case MULT:
3425 case DIV: case UDIV:
3426 case MOD: case UMOD:
3427 case AND: case IOR: case XOR:
3428 case ROTATERT: case ROTATE:
3429 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3430 case NE: case EQ:
3431 case GE: case GT: case GEU: case GTU:
3432 case LE: case LT: case LEU: case LTU:
3433 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3434 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3435 loc = &XEXP (x, 0);
3436 goto restart;
3437
3438 case MEM:
3439 /* Most cases of MEM that convert to valid addresses have already been
3440 handled by our scan of decls. The only special handling we
3441 need here is to make a copy of the rtx to ensure it isn't being
3442 shared if we have to change it to a pseudo.
3443
3444 If the rtx is a simple reference to an address via a virtual register,
3445 it can potentially be shared. In such cases, first try to make it
3446 a valid address, which can also be shared. Otherwise, copy it and
3447 proceed normally.
3448
3449 First check for common cases that need no processing. These are
3450 usually due to instantiation already being done on a previous instance
3451 of a shared rtx. */
3452
3453 temp = XEXP (x, 0);
3454 if (CONSTANT_ADDRESS_P (temp)
3455 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3456 || temp == arg_pointer_rtx
3457 #endif
3458 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3459 || temp == hard_frame_pointer_rtx
3460 #endif
3461 || temp == frame_pointer_rtx)
3462 return 1;
3463
3464 if (GET_CODE (temp) == PLUS
3465 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3466 && (XEXP (temp, 0) == frame_pointer_rtx
3467 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3468 || XEXP (temp, 0) == hard_frame_pointer_rtx
3469 #endif
3470 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3471 || XEXP (temp, 0) == arg_pointer_rtx
3472 #endif
3473 ))
3474 return 1;
3475
3476 if (temp == virtual_stack_vars_rtx
3477 || temp == virtual_incoming_args_rtx
3478 || (GET_CODE (temp) == PLUS
3479 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3480 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3481 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3482 {
3483 /* This MEM may be shared. If the substitution can be done without
3484 the need to generate new pseudos, we want to do it in place
3485 so all copies of the shared rtx benefit. The call below will
3486 only make substitutions if the resulting address is still
3487 valid.
3488
3489 Note that we cannot pass X as the object in the recursive call
3490 since the insn being processed may not allow all valid
3491 addresses. However, if we were not passed on object, we can
3492 only modify X without copying it if X will have a valid
3493 address.
3494
3495 ??? Also note that this can still lose if OBJECT is an insn that
3496 has less restrictions on an address that some other insn.
3497 In that case, we will modify the shared address. This case
3498 doesn't seem very likely, though. One case where this could
3499 happen is in the case of a USE or CLOBBER reference, but we
3500 take care of that below. */
3501
3502 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3503 object ? object : x, 0))
3504 return 1;
3505
3506 /* Otherwise make a copy and process that copy. We copy the entire
3507 RTL expression since it might be a PLUS which could also be
3508 shared. */
3509 *loc = x = copy_rtx (x);
3510 }
3511
3512 /* Fall through to generic unary operation case. */
3513 case SUBREG:
3514 case STRICT_LOW_PART:
3515 case NEG: case NOT:
3516 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3517 case SIGN_EXTEND: case ZERO_EXTEND:
3518 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3519 case FLOAT: case FIX:
3520 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3521 case ABS:
3522 case SQRT:
3523 case FFS:
3524 /* These case either have just one operand or we know that we need not
3525 check the rest of the operands. */
3526 loc = &XEXP (x, 0);
3527 goto restart;
3528
3529 case USE:
3530 case CLOBBER:
3531 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3532 go ahead and make the invalid one, but do it to a copy. For a REG,
3533 just make the recursive call, since there's no chance of a problem. */
3534
3535 if ((GET_CODE (XEXP (x, 0)) == MEM
3536 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3537 0))
3538 || (GET_CODE (XEXP (x, 0)) == REG
3539 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3540 return 1;
3541
3542 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3543 loc = &XEXP (x, 0);
3544 goto restart;
3545
3546 case REG:
3547 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3548 in front of this insn and substitute the temporary. */
3549 if (x == virtual_incoming_args_rtx)
3550 new = arg_pointer_rtx, offset = in_arg_offset;
3551 else if (x == virtual_stack_vars_rtx)
3552 new = frame_pointer_rtx, offset = var_offset;
3553 else if (x == virtual_stack_dynamic_rtx)
3554 new = stack_pointer_rtx, offset = dynamic_offset;
3555 else if (x == virtual_outgoing_args_rtx)
3556 new = stack_pointer_rtx, offset = out_arg_offset;
3557 else if (x == virtual_cfa_rtx)
3558 new = arg_pointer_rtx, offset = cfa_offset;
3559
3560 if (new)
3561 {
3562 temp = plus_constant (new, offset);
3563 if (!validate_change (object, loc, temp, 0))
3564 {
3565 if (! extra_insns)
3566 return 0;
3567
3568 start_sequence ();
3569 temp = force_operand (temp, NULL_RTX);
3570 seq = get_insns ();
3571 end_sequence ();
3572
3573 emit_insns_before (seq, object);
3574 if (! validate_change (object, loc, temp, 0)
3575 && ! validate_replace_rtx (x, temp, object))
3576 abort ();
3577 }
3578 }
3579
3580 return 1;
3581
3582 case ADDRESSOF:
3583 if (GET_CODE (XEXP (x, 0)) == REG)
3584 return 1;
3585
3586 else if (GET_CODE (XEXP (x, 0)) == MEM)
3587 {
3588 /* If we have a (addressof (mem ..)), do any instantiation inside
3589 since we know we'll be making the inside valid when we finally
3590 remove the ADDRESSOF. */
3591 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3592 return 1;
3593 }
3594 break;
3595
3596 default:
3597 break;
3598 }
3599
3600 /* Scan all subexpressions. */
3601 fmt = GET_RTX_FORMAT (code);
3602 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3603 if (*fmt == 'e')
3604 {
3605 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3606 return 0;
3607 }
3608 else if (*fmt == 'E')
3609 for (j = 0; j < XVECLEN (x, i); j++)
3610 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3611 extra_insns))
3612 return 0;
3613
3614 return 1;
3615 }
3616 \f
3617 /* Optimization: assuming this function does not receive nonlocal gotos,
3618 delete the handlers for such, as well as the insns to establish
3619 and disestablish them. */
3620
3621 static void
3622 delete_handlers ()
3623 {
3624 rtx insn;
3625 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3626 {
3627 /* Delete the handler by turning off the flag that would
3628 prevent jump_optimize from deleting it.
3629 Also permit deletion of the nonlocal labels themselves
3630 if nothing local refers to them. */
3631 if (GET_CODE (insn) == CODE_LABEL)
3632 {
3633 tree t, last_t;
3634
3635 LABEL_PRESERVE_P (insn) = 0;
3636
3637 /* Remove it from the nonlocal_label list, to avoid confusing
3638 flow. */
3639 for (t = nonlocal_labels, last_t = 0; t;
3640 last_t = t, t = TREE_CHAIN (t))
3641 if (DECL_RTL (TREE_VALUE (t)) == insn)
3642 break;
3643 if (t)
3644 {
3645 if (! last_t)
3646 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3647 else
3648 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3649 }
3650 }
3651 if (GET_CODE (insn) == INSN
3652 && ((nonlocal_goto_handler_slot != 0
3653 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
3654 || (nonlocal_goto_stack_level != 0
3655 && reg_mentioned_p (nonlocal_goto_stack_level,
3656 PATTERN (insn)))))
3657 delete_insn (insn);
3658 }
3659 }
3660
3661 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3662 of the current function. */
3663
3664 rtx
3665 nonlocal_label_rtx_list ()
3666 {
3667 tree t;
3668 rtx x = 0;
3669
3670 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
3671 x = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (TREE_VALUE (t)), x);
3672
3673 return x;
3674 }
3675 \f
3676 /* Output a USE for any register use in RTL.
3677 This is used with -noreg to mark the extent of lifespan
3678 of any registers used in a user-visible variable's DECL_RTL. */
3679
3680 void
3681 use_variable (rtl)
3682 rtx rtl;
3683 {
3684 if (GET_CODE (rtl) == REG)
3685 /* This is a register variable. */
3686 emit_insn (gen_rtx_USE (VOIDmode, rtl));
3687 else if (GET_CODE (rtl) == MEM
3688 && GET_CODE (XEXP (rtl, 0)) == REG
3689 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3690 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3691 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3692 /* This is a variable-sized structure. */
3693 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
3694 }
3695
3696 /* Like use_variable except that it outputs the USEs after INSN
3697 instead of at the end of the insn-chain. */
3698
3699 void
3700 use_variable_after (rtl, insn)
3701 rtx rtl, insn;
3702 {
3703 if (GET_CODE (rtl) == REG)
3704 /* This is a register variable. */
3705 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
3706 else if (GET_CODE (rtl) == MEM
3707 && GET_CODE (XEXP (rtl, 0)) == REG
3708 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3709 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3710 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3711 /* This is a variable-sized structure. */
3712 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
3713 }
3714 \f
3715 int
3716 max_parm_reg_num ()
3717 {
3718 return max_parm_reg;
3719 }
3720
3721 /* Return the first insn following those generated by `assign_parms'. */
3722
3723 rtx
3724 get_first_nonparm_insn ()
3725 {
3726 if (last_parm_insn)
3727 return NEXT_INSN (last_parm_insn);
3728 return get_insns ();
3729 }
3730
3731 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3732 Crash if there is none. */
3733
3734 rtx
3735 get_first_block_beg ()
3736 {
3737 register rtx searcher;
3738 register rtx insn = get_first_nonparm_insn ();
3739
3740 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3741 if (GET_CODE (searcher) == NOTE
3742 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3743 return searcher;
3744
3745 abort (); /* Invalid call to this function. (See comments above.) */
3746 return NULL_RTX;
3747 }
3748
3749 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3750 This means a type for which function calls must pass an address to the
3751 function or get an address back from the function.
3752 EXP may be a type node or an expression (whose type is tested). */
3753
3754 int
3755 aggregate_value_p (exp)
3756 tree exp;
3757 {
3758 int i, regno, nregs;
3759 rtx reg;
3760 tree type;
3761 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3762 type = exp;
3763 else
3764 type = TREE_TYPE (exp);
3765
3766 if (RETURN_IN_MEMORY (type))
3767 return 1;
3768 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3769 and thus can't be returned in registers. */
3770 if (TREE_ADDRESSABLE (type))
3771 return 1;
3772 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3773 return 1;
3774 /* Make sure we have suitable call-clobbered regs to return
3775 the value in; if not, we must return it in memory. */
3776 reg = hard_function_value (type, 0);
3777
3778 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3779 it is OK. */
3780 if (GET_CODE (reg) != REG)
3781 return 0;
3782
3783 regno = REGNO (reg);
3784 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3785 for (i = 0; i < nregs; i++)
3786 if (! call_used_regs[regno + i])
3787 return 1;
3788 return 0;
3789 }
3790 \f
3791 /* Assign RTL expressions to the function's parameters.
3792 This may involve copying them into registers and using
3793 those registers as the RTL for them.
3794
3795 If SECOND_TIME is non-zero it means that this function is being
3796 called a second time. This is done by integrate.c when a function's
3797 compilation is deferred. We need to come back here in case the
3798 FUNCTION_ARG macro computes items needed for the rest of the compilation
3799 (such as changing which registers are fixed or caller-saved). But suppress
3800 writing any insns or setting DECL_RTL of anything in this case. */
3801
3802 void
3803 assign_parms (fndecl, second_time)
3804 tree fndecl;
3805 int second_time;
3806 {
3807 register tree parm;
3808 register rtx entry_parm = 0;
3809 register rtx stack_parm = 0;
3810 CUMULATIVE_ARGS args_so_far;
3811 enum machine_mode promoted_mode, passed_mode;
3812 enum machine_mode nominal_mode, promoted_nominal_mode;
3813 int unsignedp;
3814 /* Total space needed so far for args on the stack,
3815 given as a constant and a tree-expression. */
3816 struct args_size stack_args_size;
3817 tree fntype = TREE_TYPE (fndecl);
3818 tree fnargs = DECL_ARGUMENTS (fndecl);
3819 /* This is used for the arg pointer when referring to stack args. */
3820 rtx internal_arg_pointer;
3821 /* This is a dummy PARM_DECL that we used for the function result if
3822 the function returns a structure. */
3823 tree function_result_decl = 0;
3824 int varargs_setup = 0;
3825 rtx conversion_insns = 0;
3826
3827 /* Nonzero if the last arg is named `__builtin_va_alist',
3828 which is used on some machines for old-fashioned non-ANSI varargs.h;
3829 this should be stuck onto the stack as if it had arrived there. */
3830 int hide_last_arg
3831 = (current_function_varargs
3832 && fnargs
3833 && (parm = tree_last (fnargs)) != 0
3834 && DECL_NAME (parm)
3835 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3836 "__builtin_va_alist")));
3837
3838 /* Nonzero if function takes extra anonymous args.
3839 This means the last named arg must be on the stack
3840 right before the anonymous ones. */
3841 int stdarg
3842 = (TYPE_ARG_TYPES (fntype) != 0
3843 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3844 != void_type_node));
3845
3846 current_function_stdarg = stdarg;
3847
3848 /* If the reg that the virtual arg pointer will be translated into is
3849 not a fixed reg or is the stack pointer, make a copy of the virtual
3850 arg pointer, and address parms via the copy. The frame pointer is
3851 considered fixed even though it is not marked as such.
3852
3853 The second time through, simply use ap to avoid generating rtx. */
3854
3855 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3856 || ! (fixed_regs[ARG_POINTER_REGNUM]
3857 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3858 && ! second_time)
3859 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3860 else
3861 internal_arg_pointer = virtual_incoming_args_rtx;
3862 current_function_internal_arg_pointer = internal_arg_pointer;
3863
3864 stack_args_size.constant = 0;
3865 stack_args_size.var = 0;
3866
3867 /* If struct value address is treated as the first argument, make it so. */
3868 if (aggregate_value_p (DECL_RESULT (fndecl))
3869 && ! current_function_returns_pcc_struct
3870 && struct_value_incoming_rtx == 0)
3871 {
3872 tree type = build_pointer_type (TREE_TYPE (fntype));
3873
3874 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
3875
3876 DECL_ARG_TYPE (function_result_decl) = type;
3877 TREE_CHAIN (function_result_decl) = fnargs;
3878 fnargs = function_result_decl;
3879 }
3880
3881 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3882 parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
3883 bzero ((char *) parm_reg_stack_loc, max_parm_reg * sizeof (rtx));
3884
3885 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3886 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
3887 #else
3888 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
3889 #endif
3890
3891 /* We haven't yet found an argument that we must push and pretend the
3892 caller did. */
3893 current_function_pretend_args_size = 0;
3894
3895 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3896 {
3897 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
3898 struct args_size stack_offset;
3899 struct args_size arg_size;
3900 int passed_pointer = 0;
3901 int did_conversion = 0;
3902 tree passed_type = DECL_ARG_TYPE (parm);
3903 tree nominal_type = TREE_TYPE (parm);
3904
3905 /* Set LAST_NAMED if this is last named arg before some
3906 anonymous args. */
3907 int last_named = ((TREE_CHAIN (parm) == 0
3908 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3909 && (stdarg || current_function_varargs));
3910 /* Set NAMED_ARG if this arg should be treated as a named arg. For
3911 most machines, if this is a varargs/stdarg function, then we treat
3912 the last named arg as if it were anonymous too. */
3913 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
3914
3915 if (TREE_TYPE (parm) == error_mark_node
3916 /* This can happen after weird syntax errors
3917 or if an enum type is defined among the parms. */
3918 || TREE_CODE (parm) != PARM_DECL
3919 || passed_type == NULL)
3920 {
3921 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
3922 = gen_rtx_MEM (BLKmode, const0_rtx);
3923 TREE_USED (parm) = 1;
3924 continue;
3925 }
3926
3927 /* For varargs.h function, save info about regs and stack space
3928 used by the individual args, not including the va_alist arg. */
3929 if (hide_last_arg && last_named)
3930 current_function_args_info = args_so_far;
3931
3932 /* Find mode of arg as it is passed, and mode of arg
3933 as it should be during execution of this function. */
3934 passed_mode = TYPE_MODE (passed_type);
3935 nominal_mode = TYPE_MODE (nominal_type);
3936
3937 /* If the parm's mode is VOID, its value doesn't matter,
3938 and avoid the usual things like emit_move_insn that could crash. */
3939 if (nominal_mode == VOIDmode)
3940 {
3941 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
3942 continue;
3943 }
3944
3945 /* If the parm is to be passed as a transparent union, use the
3946 type of the first field for the tests below. We have already
3947 verified that the modes are the same. */
3948 if (DECL_TRANSPARENT_UNION (parm)
3949 || TYPE_TRANSPARENT_UNION (passed_type))
3950 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
3951
3952 /* See if this arg was passed by invisible reference. It is if
3953 it is an object whose size depends on the contents of the
3954 object itself or if the machine requires these objects be passed
3955 that way. */
3956
3957 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
3958 && contains_placeholder_p (TYPE_SIZE (passed_type)))
3959 || TREE_ADDRESSABLE (passed_type)
3960 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3961 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
3962 passed_type, named_arg)
3963 #endif
3964 )
3965 {
3966 passed_type = nominal_type = build_pointer_type (passed_type);
3967 passed_pointer = 1;
3968 passed_mode = nominal_mode = Pmode;
3969 }
3970
3971 promoted_mode = passed_mode;
3972
3973 #ifdef PROMOTE_FUNCTION_ARGS
3974 /* Compute the mode in which the arg is actually extended to. */
3975 unsignedp = TREE_UNSIGNED (passed_type);
3976 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
3977 #endif
3978
3979 /* Let machine desc say which reg (if any) the parm arrives in.
3980 0 means it arrives on the stack. */
3981 #ifdef FUNCTION_INCOMING_ARG
3982 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3983 passed_type, named_arg);
3984 #else
3985 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
3986 passed_type, named_arg);
3987 #endif
3988
3989 if (entry_parm == 0)
3990 promoted_mode = passed_mode;
3991
3992 #ifdef SETUP_INCOMING_VARARGS
3993 /* If this is the last named parameter, do any required setup for
3994 varargs or stdargs. We need to know about the case of this being an
3995 addressable type, in which case we skip the registers it
3996 would have arrived in.
3997
3998 For stdargs, LAST_NAMED will be set for two parameters, the one that
3999 is actually the last named, and the dummy parameter. We only
4000 want to do this action once.
4001
4002 Also, indicate when RTL generation is to be suppressed. */
4003 if (last_named && !varargs_setup)
4004 {
4005 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4006 current_function_pretend_args_size,
4007 second_time);
4008 varargs_setup = 1;
4009 }
4010 #endif
4011
4012 /* Determine parm's home in the stack,
4013 in case it arrives in the stack or we should pretend it did.
4014
4015 Compute the stack position and rtx where the argument arrives
4016 and its size.
4017
4018 There is one complexity here: If this was a parameter that would
4019 have been passed in registers, but wasn't only because it is
4020 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4021 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4022 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4023 0 as it was the previous time. */
4024
4025 locate_and_pad_parm (promoted_mode, passed_type,
4026 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4027 1,
4028 #else
4029 #ifdef FUNCTION_INCOMING_ARG
4030 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4031 passed_type,
4032 (named_arg
4033 || varargs_setup)) != 0,
4034 #else
4035 FUNCTION_ARG (args_so_far, promoted_mode,
4036 passed_type,
4037 named_arg || varargs_setup) != 0,
4038 #endif
4039 #endif
4040 fndecl, &stack_args_size, &stack_offset, &arg_size);
4041
4042 if (! second_time)
4043 {
4044 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4045
4046 if (offset_rtx == const0_rtx)
4047 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4048 else
4049 stack_parm = gen_rtx_MEM (promoted_mode,
4050 gen_rtx_PLUS (Pmode,
4051 internal_arg_pointer,
4052 offset_rtx));
4053
4054 /* If this is a memory ref that contains aggregate components,
4055 mark it as such for cse and loop optimize. Likewise if it
4056 is readonly. */
4057 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4058 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
4059 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
4060 }
4061
4062 /* If this parameter was passed both in registers and in the stack,
4063 use the copy on the stack. */
4064 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4065 entry_parm = 0;
4066
4067 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4068 /* If this parm was passed part in regs and part in memory,
4069 pretend it arrived entirely in memory
4070 by pushing the register-part onto the stack.
4071
4072 In the special case of a DImode or DFmode that is split,
4073 we could put it together in a pseudoreg directly,
4074 but for now that's not worth bothering with. */
4075
4076 if (entry_parm)
4077 {
4078 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4079 passed_type, named_arg);
4080
4081 if (nregs > 0)
4082 {
4083 current_function_pretend_args_size
4084 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4085 / (PARM_BOUNDARY / BITS_PER_UNIT)
4086 * (PARM_BOUNDARY / BITS_PER_UNIT));
4087
4088 if (! second_time)
4089 {
4090 /* Handle calls that pass values in multiple non-contiguous
4091 locations. The Irix 6 ABI has examples of this. */
4092 if (GET_CODE (entry_parm) == PARALLEL)
4093 emit_group_store (validize_mem (stack_parm), entry_parm,
4094 int_size_in_bytes (TREE_TYPE (parm)),
4095 (TYPE_ALIGN (TREE_TYPE (parm))
4096 / BITS_PER_UNIT));
4097 else
4098 move_block_from_reg (REGNO (entry_parm),
4099 validize_mem (stack_parm), nregs,
4100 int_size_in_bytes (TREE_TYPE (parm)));
4101 }
4102 entry_parm = stack_parm;
4103 }
4104 }
4105 #endif
4106
4107 /* If we didn't decide this parm came in a register,
4108 by default it came on the stack. */
4109 if (entry_parm == 0)
4110 entry_parm = stack_parm;
4111
4112 /* Record permanently how this parm was passed. */
4113 if (! second_time)
4114 DECL_INCOMING_RTL (parm) = entry_parm;
4115
4116 /* If there is actually space on the stack for this parm,
4117 count it in stack_args_size; otherwise set stack_parm to 0
4118 to indicate there is no preallocated stack slot for the parm. */
4119
4120 if (entry_parm == stack_parm
4121 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4122 /* On some machines, even if a parm value arrives in a register
4123 there is still an (uninitialized) stack slot allocated for it.
4124
4125 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4126 whether this parameter already has a stack slot allocated,
4127 because an arg block exists only if current_function_args_size
4128 is larger than some threshold, and we haven't calculated that
4129 yet. So, for now, we just assume that stack slots never exist
4130 in this case. */
4131 || REG_PARM_STACK_SPACE (fndecl) > 0
4132 #endif
4133 )
4134 {
4135 stack_args_size.constant += arg_size.constant;
4136 if (arg_size.var)
4137 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4138 }
4139 else
4140 /* No stack slot was pushed for this parm. */
4141 stack_parm = 0;
4142
4143 /* Update info on where next arg arrives in registers. */
4144
4145 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4146 passed_type, named_arg);
4147
4148 /* If this is our second time through, we are done with this parm. */
4149 if (second_time)
4150 continue;
4151
4152 /* If we can't trust the parm stack slot to be aligned enough
4153 for its ultimate type, don't use that slot after entry.
4154 We'll make another stack slot, if we need one. */
4155 {
4156 int thisparm_boundary
4157 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4158
4159 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4160 stack_parm = 0;
4161 }
4162
4163 /* If parm was passed in memory, and we need to convert it on entry,
4164 don't store it back in that same slot. */
4165 if (entry_parm != 0
4166 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4167 stack_parm = 0;
4168
4169 #if 0
4170 /* Now adjust STACK_PARM to the mode and precise location
4171 where this parameter should live during execution,
4172 if we discover that it must live in the stack during execution.
4173 To make debuggers happier on big-endian machines, we store
4174 the value in the last bytes of the space available. */
4175
4176 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4177 && stack_parm != 0)
4178 {
4179 rtx offset_rtx;
4180
4181 if (BYTES_BIG_ENDIAN
4182 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4183 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4184 - GET_MODE_SIZE (nominal_mode));
4185
4186 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4187 if (offset_rtx == const0_rtx)
4188 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4189 else
4190 stack_parm = gen_rtx_MEM (nominal_mode,
4191 gen_rtx_PLUS (Pmode,
4192 internal_arg_pointer,
4193 offset_rtx));
4194
4195 /* If this is a memory ref that contains aggregate components,
4196 mark it as such for cse and loop optimize. */
4197 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4198 }
4199 #endif /* 0 */
4200
4201 #ifdef STACK_REGS
4202 /* We need this "use" info, because the gcc-register->stack-register
4203 converter in reg-stack.c needs to know which registers are active
4204 at the start of the function call. The actual parameter loading
4205 instructions are not always available then anymore, since they might
4206 have been optimised away. */
4207
4208 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
4209 emit_insn (gen_rtx_USE (GET_MODE (entry_parm), entry_parm));
4210 #endif
4211
4212 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4213 in the mode in which it arrives.
4214 STACK_PARM is an RTX for a stack slot where the parameter can live
4215 during the function (in case we want to put it there).
4216 STACK_PARM is 0 if no stack slot was pushed for it.
4217
4218 Now output code if necessary to convert ENTRY_PARM to
4219 the type in which this function declares it,
4220 and store that result in an appropriate place,
4221 which may be a pseudo reg, may be STACK_PARM,
4222 or may be a local stack slot if STACK_PARM is 0.
4223
4224 Set DECL_RTL to that place. */
4225
4226 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4227 {
4228 /* If a BLKmode arrives in registers, copy it to a stack slot.
4229 Handle calls that pass values in multiple non-contiguous
4230 locations. The Irix 6 ABI has examples of this. */
4231 if (GET_CODE (entry_parm) == REG
4232 || GET_CODE (entry_parm) == PARALLEL)
4233 {
4234 int size_stored
4235 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4236 UNITS_PER_WORD);
4237
4238 /* Note that we will be storing an integral number of words.
4239 So we have to be careful to ensure that we allocate an
4240 integral number of words. We do this below in the
4241 assign_stack_local if space was not allocated in the argument
4242 list. If it was, this will not work if PARM_BOUNDARY is not
4243 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4244 if it becomes a problem. */
4245
4246 if (stack_parm == 0)
4247 {
4248 stack_parm
4249 = assign_stack_local (GET_MODE (entry_parm),
4250 size_stored, 0);
4251
4252 /* If this is a memory ref that contains aggregate
4253 components, mark it as such for cse and loop optimize. */
4254 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4255 }
4256
4257 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4258 abort ();
4259
4260 if (TREE_READONLY (parm))
4261 RTX_UNCHANGING_P (stack_parm) = 1;
4262
4263 /* Handle calls that pass values in multiple non-contiguous
4264 locations. The Irix 6 ABI has examples of this. */
4265 if (GET_CODE (entry_parm) == PARALLEL)
4266 emit_group_store (validize_mem (stack_parm), entry_parm,
4267 int_size_in_bytes (TREE_TYPE (parm)),
4268 (TYPE_ALIGN (TREE_TYPE (parm))
4269 / BITS_PER_UNIT));
4270 else
4271 move_block_from_reg (REGNO (entry_parm),
4272 validize_mem (stack_parm),
4273 size_stored / UNITS_PER_WORD,
4274 int_size_in_bytes (TREE_TYPE (parm)));
4275 }
4276 DECL_RTL (parm) = stack_parm;
4277 }
4278 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
4279 && ! DECL_INLINE (fndecl))
4280 /* layout_decl may set this. */
4281 || TREE_ADDRESSABLE (parm)
4282 || TREE_SIDE_EFFECTS (parm)
4283 /* If -ffloat-store specified, don't put explicit
4284 float variables into registers. */
4285 || (flag_float_store
4286 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4287 /* Always assign pseudo to structure return or item passed
4288 by invisible reference. */
4289 || passed_pointer || parm == function_result_decl)
4290 {
4291 /* Store the parm in a pseudoregister during the function, but we
4292 may need to do it in a wider mode. */
4293
4294 register rtx parmreg;
4295 int regno, regnoi = 0, regnor = 0;
4296
4297 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4298
4299 promoted_nominal_mode
4300 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4301
4302 parmreg = gen_reg_rtx (promoted_nominal_mode);
4303 mark_user_reg (parmreg);
4304
4305 /* If this was an item that we received a pointer to, set DECL_RTL
4306 appropriately. */
4307 if (passed_pointer)
4308 {
4309 DECL_RTL (parm)
4310 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4311 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
4312 }
4313 else
4314 DECL_RTL (parm) = parmreg;
4315
4316 /* Copy the value into the register. */
4317 if (nominal_mode != passed_mode
4318 || promoted_nominal_mode != promoted_mode)
4319 {
4320 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4321 mode, by the caller. We now have to convert it to
4322 NOMINAL_MODE, if different. However, PARMREG may be in
4323 a different mode than NOMINAL_MODE if it is being stored
4324 promoted.
4325
4326 If ENTRY_PARM is a hard register, it might be in a register
4327 not valid for operating in its mode (e.g., an odd-numbered
4328 register for a DFmode). In that case, moves are the only
4329 thing valid, so we can't do a convert from there. This
4330 occurs when the calling sequence allow such misaligned
4331 usages.
4332
4333 In addition, the conversion may involve a call, which could
4334 clobber parameters which haven't been copied to pseudo
4335 registers yet. Therefore, we must first copy the parm to
4336 a pseudo reg here, and save the conversion until after all
4337 parameters have been moved. */
4338
4339 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4340
4341 emit_move_insn (tempreg, validize_mem (entry_parm));
4342
4343 push_to_sequence (conversion_insns);
4344 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4345
4346 expand_assignment (parm,
4347 make_tree (nominal_type, tempreg), 0, 0);
4348 conversion_insns = get_insns ();
4349 did_conversion = 1;
4350 end_sequence ();
4351 }
4352 else
4353 emit_move_insn (parmreg, validize_mem (entry_parm));
4354
4355 /* If we were passed a pointer but the actual value
4356 can safely live in a register, put it in one. */
4357 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4358 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4359 && ! DECL_INLINE (fndecl))
4360 /* layout_decl may set this. */
4361 || TREE_ADDRESSABLE (parm)
4362 || TREE_SIDE_EFFECTS (parm)
4363 /* If -ffloat-store specified, don't put explicit
4364 float variables into registers. */
4365 || (flag_float_store
4366 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4367 {
4368 /* We can't use nominal_mode, because it will have been set to
4369 Pmode above. We must use the actual mode of the parm. */
4370 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4371 mark_user_reg (parmreg);
4372 emit_move_insn (parmreg, DECL_RTL (parm));
4373 DECL_RTL (parm) = parmreg;
4374 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4375 now the parm. */
4376 stack_parm = 0;
4377 }
4378 #ifdef FUNCTION_ARG_CALLEE_COPIES
4379 /* If we are passed an arg by reference and it is our responsibility
4380 to make a copy, do it now.
4381 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4382 original argument, so we must recreate them in the call to
4383 FUNCTION_ARG_CALLEE_COPIES. */
4384 /* ??? Later add code to handle the case that if the argument isn't
4385 modified, don't do the copy. */
4386
4387 else if (passed_pointer
4388 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4389 TYPE_MODE (DECL_ARG_TYPE (parm)),
4390 DECL_ARG_TYPE (parm),
4391 named_arg)
4392 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4393 {
4394 rtx copy;
4395 tree type = DECL_ARG_TYPE (parm);
4396
4397 /* This sequence may involve a library call perhaps clobbering
4398 registers that haven't been copied to pseudos yet. */
4399
4400 push_to_sequence (conversion_insns);
4401
4402 if (TYPE_SIZE (type) == 0
4403 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4404 /* This is a variable sized object. */
4405 copy = gen_rtx_MEM (BLKmode,
4406 allocate_dynamic_stack_space
4407 (expr_size (parm), NULL_RTX,
4408 TYPE_ALIGN (type)));
4409 else
4410 copy = assign_stack_temp (TYPE_MODE (type),
4411 int_size_in_bytes (type), 1);
4412 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
4413 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4414
4415 store_expr (parm, copy, 0);
4416 emit_move_insn (parmreg, XEXP (copy, 0));
4417 if (current_function_check_memory_usage)
4418 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4419 XEXP (copy, 0), ptr_mode,
4420 GEN_INT (int_size_in_bytes (type)),
4421 TYPE_MODE (sizetype),
4422 GEN_INT (MEMORY_USE_RW),
4423 TYPE_MODE (integer_type_node));
4424 conversion_insns = get_insns ();
4425 did_conversion = 1;
4426 end_sequence ();
4427 }
4428 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4429
4430 /* In any case, record the parm's desired stack location
4431 in case we later discover it must live in the stack.
4432
4433 If it is a COMPLEX value, store the stack location for both
4434 halves. */
4435
4436 if (GET_CODE (parmreg) == CONCAT)
4437 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4438 else
4439 regno = REGNO (parmreg);
4440
4441 if (regno >= max_parm_reg)
4442 {
4443 rtx *new;
4444 int old_max_parm_reg = max_parm_reg;
4445
4446 /* It's slow to expand this one register at a time,
4447 but it's also rare and we need max_parm_reg to be
4448 precisely correct. */
4449 max_parm_reg = regno + 1;
4450 new = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4451 bcopy ((char *) parm_reg_stack_loc, (char *) new,
4452 old_max_parm_reg * sizeof (rtx));
4453 bzero ((char *) (new + old_max_parm_reg),
4454 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4455 parm_reg_stack_loc = new;
4456 }
4457
4458 if (GET_CODE (parmreg) == CONCAT)
4459 {
4460 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4461
4462 regnor = REGNO (gen_realpart (submode, parmreg));
4463 regnoi = REGNO (gen_imagpart (submode, parmreg));
4464
4465 if (stack_parm != 0)
4466 {
4467 parm_reg_stack_loc[regnor]
4468 = gen_realpart (submode, stack_parm);
4469 parm_reg_stack_loc[regnoi]
4470 = gen_imagpart (submode, stack_parm);
4471 }
4472 else
4473 {
4474 parm_reg_stack_loc[regnor] = 0;
4475 parm_reg_stack_loc[regnoi] = 0;
4476 }
4477 }
4478 else
4479 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4480
4481 /* Mark the register as eliminable if we did no conversion
4482 and it was copied from memory at a fixed offset,
4483 and the arg pointer was not copied to a pseudo-reg.
4484 If the arg pointer is a pseudo reg or the offset formed
4485 an invalid address, such memory-equivalences
4486 as we make here would screw up life analysis for it. */
4487 if (nominal_mode == passed_mode
4488 && ! did_conversion
4489 && stack_parm != 0
4490 && GET_CODE (stack_parm) == MEM
4491 && stack_offset.var == 0
4492 && reg_mentioned_p (virtual_incoming_args_rtx,
4493 XEXP (stack_parm, 0)))
4494 {
4495 rtx linsn = get_last_insn ();
4496 rtx sinsn, set;
4497
4498 /* Mark complex types separately. */
4499 if (GET_CODE (parmreg) == CONCAT)
4500 /* Scan backwards for the set of the real and
4501 imaginary parts. */
4502 for (sinsn = linsn; sinsn != 0;
4503 sinsn = prev_nonnote_insn (sinsn))
4504 {
4505 set = single_set (sinsn);
4506 if (set != 0
4507 && SET_DEST (set) == regno_reg_rtx [regnoi])
4508 REG_NOTES (sinsn)
4509 = gen_rtx_EXPR_LIST (REG_EQUIV,
4510 parm_reg_stack_loc[regnoi],
4511 REG_NOTES (sinsn));
4512 else if (set != 0
4513 && SET_DEST (set) == regno_reg_rtx [regnor])
4514 REG_NOTES (sinsn)
4515 = gen_rtx_EXPR_LIST (REG_EQUIV,
4516 parm_reg_stack_loc[regnor],
4517 REG_NOTES (sinsn));
4518 }
4519 else if ((set = single_set (linsn)) != 0
4520 && SET_DEST (set) == parmreg)
4521 REG_NOTES (linsn)
4522 = gen_rtx_EXPR_LIST (REG_EQUIV,
4523 stack_parm, REG_NOTES (linsn));
4524 }
4525
4526 /* For pointer data type, suggest pointer register. */
4527 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4528 mark_reg_pointer (parmreg,
4529 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4530 / BITS_PER_UNIT));
4531 }
4532 else
4533 {
4534 /* Value must be stored in the stack slot STACK_PARM
4535 during function execution. */
4536
4537 if (promoted_mode != nominal_mode)
4538 {
4539 /* Conversion is required. */
4540 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4541
4542 emit_move_insn (tempreg, validize_mem (entry_parm));
4543
4544 push_to_sequence (conversion_insns);
4545 entry_parm = convert_to_mode (nominal_mode, tempreg,
4546 TREE_UNSIGNED (TREE_TYPE (parm)));
4547 if (stack_parm)
4548 {
4549 /* ??? This may need a big-endian conversion on sparc64. */
4550 stack_parm = change_address (stack_parm, nominal_mode,
4551 NULL_RTX);
4552 }
4553 conversion_insns = get_insns ();
4554 did_conversion = 1;
4555 end_sequence ();
4556 }
4557
4558 if (entry_parm != stack_parm)
4559 {
4560 if (stack_parm == 0)
4561 {
4562 stack_parm
4563 = assign_stack_local (GET_MODE (entry_parm),
4564 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4565 /* If this is a memory ref that contains aggregate components,
4566 mark it as such for cse and loop optimize. */
4567 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4568 }
4569
4570 if (promoted_mode != nominal_mode)
4571 {
4572 push_to_sequence (conversion_insns);
4573 emit_move_insn (validize_mem (stack_parm),
4574 validize_mem (entry_parm));
4575 conversion_insns = get_insns ();
4576 end_sequence ();
4577 }
4578 else
4579 emit_move_insn (validize_mem (stack_parm),
4580 validize_mem (entry_parm));
4581 }
4582 if (current_function_check_memory_usage)
4583 {
4584 push_to_sequence (conversion_insns);
4585 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4586 XEXP (stack_parm, 0), ptr_mode,
4587 GEN_INT (GET_MODE_SIZE (GET_MODE
4588 (entry_parm))),
4589 TYPE_MODE (sizetype),
4590 GEN_INT (MEMORY_USE_RW),
4591 TYPE_MODE (integer_type_node));
4592
4593 conversion_insns = get_insns ();
4594 end_sequence ();
4595 }
4596 DECL_RTL (parm) = stack_parm;
4597 }
4598
4599 /* If this "parameter" was the place where we are receiving the
4600 function's incoming structure pointer, set up the result. */
4601 if (parm == function_result_decl)
4602 {
4603 tree result = DECL_RESULT (fndecl);
4604 tree restype = TREE_TYPE (result);
4605
4606 DECL_RTL (result)
4607 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4608
4609 MEM_IN_STRUCT_P (DECL_RTL (result)) = AGGREGATE_TYPE_P (restype);
4610 }
4611
4612 if (TREE_THIS_VOLATILE (parm))
4613 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4614 if (TREE_READONLY (parm))
4615 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4616 }
4617
4618 /* Output all parameter conversion instructions (possibly including calls)
4619 now that all parameters have been copied out of hard registers. */
4620 emit_insns (conversion_insns);
4621
4622 last_parm_insn = get_last_insn ();
4623
4624 current_function_args_size = stack_args_size.constant;
4625
4626 /* Adjust function incoming argument size for alignment and
4627 minimum length. */
4628
4629 #ifdef REG_PARM_STACK_SPACE
4630 #ifndef MAYBE_REG_PARM_STACK_SPACE
4631 current_function_args_size = MAX (current_function_args_size,
4632 REG_PARM_STACK_SPACE (fndecl));
4633 #endif
4634 #endif
4635
4636 #ifdef STACK_BOUNDARY
4637 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4638
4639 current_function_args_size
4640 = ((current_function_args_size + STACK_BYTES - 1)
4641 / STACK_BYTES) * STACK_BYTES;
4642 #endif
4643
4644 #ifdef ARGS_GROW_DOWNWARD
4645 current_function_arg_offset_rtx
4646 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4647 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4648 size_int (-stack_args_size.constant)),
4649 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4650 #else
4651 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4652 #endif
4653
4654 /* See how many bytes, if any, of its args a function should try to pop
4655 on return. */
4656
4657 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4658 current_function_args_size);
4659
4660 /* For stdarg.h function, save info about
4661 regs and stack space used by the named args. */
4662
4663 if (!hide_last_arg)
4664 current_function_args_info = args_so_far;
4665
4666 /* Set the rtx used for the function return value. Put this in its
4667 own variable so any optimizers that need this information don't have
4668 to include tree.h. Do this here so it gets done when an inlined
4669 function gets output. */
4670
4671 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4672 }
4673 \f
4674 /* Indicate whether REGNO is an incoming argument to the current function
4675 that was promoted to a wider mode. If so, return the RTX for the
4676 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4677 that REGNO is promoted from and whether the promotion was signed or
4678 unsigned. */
4679
4680 #ifdef PROMOTE_FUNCTION_ARGS
4681
4682 rtx
4683 promoted_input_arg (regno, pmode, punsignedp)
4684 int regno;
4685 enum machine_mode *pmode;
4686 int *punsignedp;
4687 {
4688 tree arg;
4689
4690 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4691 arg = TREE_CHAIN (arg))
4692 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4693 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4694 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4695 {
4696 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4697 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4698
4699 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4700 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4701 && mode != DECL_MODE (arg))
4702 {
4703 *pmode = DECL_MODE (arg);
4704 *punsignedp = unsignedp;
4705 return DECL_INCOMING_RTL (arg);
4706 }
4707 }
4708
4709 return 0;
4710 }
4711
4712 #endif
4713 \f
4714 /* Compute the size and offset from the start of the stacked arguments for a
4715 parm passed in mode PASSED_MODE and with type TYPE.
4716
4717 INITIAL_OFFSET_PTR points to the current offset into the stacked
4718 arguments.
4719
4720 The starting offset and size for this parm are returned in *OFFSET_PTR
4721 and *ARG_SIZE_PTR, respectively.
4722
4723 IN_REGS is non-zero if the argument will be passed in registers. It will
4724 never be set if REG_PARM_STACK_SPACE is not defined.
4725
4726 FNDECL is the function in which the argument was defined.
4727
4728 There are two types of rounding that are done. The first, controlled by
4729 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4730 list to be aligned to the specific boundary (in bits). This rounding
4731 affects the initial and starting offsets, but not the argument size.
4732
4733 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4734 optionally rounds the size of the parm to PARM_BOUNDARY. The
4735 initial offset is not affected by this rounding, while the size always
4736 is and the starting offset may be. */
4737
4738 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4739 initial_offset_ptr is positive because locate_and_pad_parm's
4740 callers pass in the total size of args so far as
4741 initial_offset_ptr. arg_size_ptr is always positive.*/
4742
4743 void
4744 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4745 initial_offset_ptr, offset_ptr, arg_size_ptr)
4746 enum machine_mode passed_mode;
4747 tree type;
4748 int in_regs;
4749 tree fndecl;
4750 struct args_size *initial_offset_ptr;
4751 struct args_size *offset_ptr;
4752 struct args_size *arg_size_ptr;
4753 {
4754 tree sizetree
4755 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4756 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4757 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4758
4759 #ifdef REG_PARM_STACK_SPACE
4760 /* If we have found a stack parm before we reach the end of the
4761 area reserved for registers, skip that area. */
4762 if (! in_regs)
4763 {
4764 int reg_parm_stack_space = 0;
4765
4766 #ifdef MAYBE_REG_PARM_STACK_SPACE
4767 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4768 #else
4769 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4770 #endif
4771 if (reg_parm_stack_space > 0)
4772 {
4773 if (initial_offset_ptr->var)
4774 {
4775 initial_offset_ptr->var
4776 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4777 size_int (reg_parm_stack_space));
4778 initial_offset_ptr->constant = 0;
4779 }
4780 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4781 initial_offset_ptr->constant = reg_parm_stack_space;
4782 }
4783 }
4784 #endif /* REG_PARM_STACK_SPACE */
4785
4786 arg_size_ptr->var = 0;
4787 arg_size_ptr->constant = 0;
4788
4789 #ifdef ARGS_GROW_DOWNWARD
4790 if (initial_offset_ptr->var)
4791 {
4792 offset_ptr->constant = 0;
4793 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4794 initial_offset_ptr->var);
4795 }
4796 else
4797 {
4798 offset_ptr->constant = - initial_offset_ptr->constant;
4799 offset_ptr->var = 0;
4800 }
4801 if (where_pad != none
4802 && (TREE_CODE (sizetree) != INTEGER_CST
4803 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4804 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4805 SUB_PARM_SIZE (*offset_ptr, sizetree);
4806 if (where_pad != downward)
4807 pad_to_arg_alignment (offset_ptr, boundary);
4808 if (initial_offset_ptr->var)
4809 {
4810 arg_size_ptr->var = size_binop (MINUS_EXPR,
4811 size_binop (MINUS_EXPR,
4812 integer_zero_node,
4813 initial_offset_ptr->var),
4814 offset_ptr->var);
4815 }
4816 else
4817 {
4818 arg_size_ptr->constant = (- initial_offset_ptr->constant
4819 - offset_ptr->constant);
4820 }
4821 #else /* !ARGS_GROW_DOWNWARD */
4822 pad_to_arg_alignment (initial_offset_ptr, boundary);
4823 *offset_ptr = *initial_offset_ptr;
4824
4825 #ifdef PUSH_ROUNDING
4826 if (passed_mode != BLKmode)
4827 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4828 #endif
4829
4830 /* Pad_below needs the pre-rounded size to know how much to pad below
4831 so this must be done before rounding up. */
4832 if (where_pad == downward
4833 /* However, BLKmode args passed in regs have their padding done elsewhere.
4834 The stack slot must be able to hold the entire register. */
4835 && !(in_regs && passed_mode == BLKmode))
4836 pad_below (offset_ptr, passed_mode, sizetree);
4837
4838 if (where_pad != none
4839 && (TREE_CODE (sizetree) != INTEGER_CST
4840 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4841 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4842
4843 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4844 #endif /* ARGS_GROW_DOWNWARD */
4845 }
4846
4847 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4848 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4849
4850 static void
4851 pad_to_arg_alignment (offset_ptr, boundary)
4852 struct args_size *offset_ptr;
4853 int boundary;
4854 {
4855 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4856
4857 if (boundary > BITS_PER_UNIT)
4858 {
4859 if (offset_ptr->var)
4860 {
4861 offset_ptr->var =
4862 #ifdef ARGS_GROW_DOWNWARD
4863 round_down
4864 #else
4865 round_up
4866 #endif
4867 (ARGS_SIZE_TREE (*offset_ptr),
4868 boundary / BITS_PER_UNIT);
4869 offset_ptr->constant = 0; /*?*/
4870 }
4871 else
4872 offset_ptr->constant =
4873 #ifdef ARGS_GROW_DOWNWARD
4874 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4875 #else
4876 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4877 #endif
4878 }
4879 }
4880
4881 #ifndef ARGS_GROW_DOWNWARD
4882 static void
4883 pad_below (offset_ptr, passed_mode, sizetree)
4884 struct args_size *offset_ptr;
4885 enum machine_mode passed_mode;
4886 tree sizetree;
4887 {
4888 if (passed_mode != BLKmode)
4889 {
4890 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4891 offset_ptr->constant
4892 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4893 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4894 - GET_MODE_SIZE (passed_mode));
4895 }
4896 else
4897 {
4898 if (TREE_CODE (sizetree) != INTEGER_CST
4899 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4900 {
4901 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4902 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4903 /* Add it in. */
4904 ADD_PARM_SIZE (*offset_ptr, s2);
4905 SUB_PARM_SIZE (*offset_ptr, sizetree);
4906 }
4907 }
4908 }
4909 #endif
4910
4911 #ifdef ARGS_GROW_DOWNWARD
4912 static tree
4913 round_down (value, divisor)
4914 tree value;
4915 int divisor;
4916 {
4917 return size_binop (MULT_EXPR,
4918 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
4919 size_int (divisor));
4920 }
4921 #endif
4922 \f
4923 /* Walk the tree of blocks describing the binding levels within a function
4924 and warn about uninitialized variables.
4925 This is done after calling flow_analysis and before global_alloc
4926 clobbers the pseudo-regs to hard regs. */
4927
4928 void
4929 uninitialized_vars_warning (block)
4930 tree block;
4931 {
4932 register tree decl, sub;
4933 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4934 {
4935 if (TREE_CODE (decl) == VAR_DECL
4936 /* These warnings are unreliable for and aggregates
4937 because assigning the fields one by one can fail to convince
4938 flow.c that the entire aggregate was initialized.
4939 Unions are troublesome because members may be shorter. */
4940 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
4941 && DECL_RTL (decl) != 0
4942 && GET_CODE (DECL_RTL (decl)) == REG
4943 /* Global optimizations can make it difficult to determine if a
4944 particular variable has been initialized. However, a VAR_DECL
4945 with a nonzero DECL_INITIAL had an initializer, so do not
4946 claim it is potentially uninitialized.
4947
4948 We do not care about the actual value in DECL_INITIAL, so we do
4949 not worry that it may be a dangling pointer. */
4950 && DECL_INITIAL (decl) == NULL_TREE
4951 && regno_uninitialized (REGNO (DECL_RTL (decl))))
4952 warning_with_decl (decl,
4953 "`%s' might be used uninitialized in this function");
4954 if (TREE_CODE (decl) == VAR_DECL
4955 && DECL_RTL (decl) != 0
4956 && GET_CODE (DECL_RTL (decl)) == REG
4957 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4958 warning_with_decl (decl,
4959 "variable `%s' might be clobbered by `longjmp' or `vfork'");
4960 }
4961 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4962 uninitialized_vars_warning (sub);
4963 }
4964
4965 /* Do the appropriate part of uninitialized_vars_warning
4966 but for arguments instead of local variables. */
4967
4968 void
4969 setjmp_args_warning ()
4970 {
4971 register tree decl;
4972 for (decl = DECL_ARGUMENTS (current_function_decl);
4973 decl; decl = TREE_CHAIN (decl))
4974 if (DECL_RTL (decl) != 0
4975 && GET_CODE (DECL_RTL (decl)) == REG
4976 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4977 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
4978 }
4979
4980 /* If this function call setjmp, put all vars into the stack
4981 unless they were declared `register'. */
4982
4983 void
4984 setjmp_protect (block)
4985 tree block;
4986 {
4987 register tree decl, sub;
4988 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4989 if ((TREE_CODE (decl) == VAR_DECL
4990 || TREE_CODE (decl) == PARM_DECL)
4991 && DECL_RTL (decl) != 0
4992 && (GET_CODE (DECL_RTL (decl)) == REG
4993 || (GET_CODE (DECL_RTL (decl)) == MEM
4994 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
4995 /* If this variable came from an inline function, it must be
4996 that its life doesn't overlap the setjmp. If there was a
4997 setjmp in the function, it would already be in memory. We
4998 must exclude such variable because their DECL_RTL might be
4999 set to strange things such as virtual_stack_vars_rtx. */
5000 && ! DECL_FROM_INLINE (decl)
5001 && (
5002 #ifdef NON_SAVING_SETJMP
5003 /* If longjmp doesn't restore the registers,
5004 don't put anything in them. */
5005 NON_SAVING_SETJMP
5006 ||
5007 #endif
5008 ! DECL_REGISTER (decl)))
5009 put_var_into_stack (decl);
5010 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5011 setjmp_protect (sub);
5012 }
5013 \f
5014 /* Like the previous function, but for args instead of local variables. */
5015
5016 void
5017 setjmp_protect_args ()
5018 {
5019 register tree decl;
5020 for (decl = DECL_ARGUMENTS (current_function_decl);
5021 decl; decl = TREE_CHAIN (decl))
5022 if ((TREE_CODE (decl) == VAR_DECL
5023 || TREE_CODE (decl) == PARM_DECL)
5024 && DECL_RTL (decl) != 0
5025 && (GET_CODE (DECL_RTL (decl)) == REG
5026 || (GET_CODE (DECL_RTL (decl)) == MEM
5027 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5028 && (
5029 /* If longjmp doesn't restore the registers,
5030 don't put anything in them. */
5031 #ifdef NON_SAVING_SETJMP
5032 NON_SAVING_SETJMP
5033 ||
5034 #endif
5035 ! DECL_REGISTER (decl)))
5036 put_var_into_stack (decl);
5037 }
5038 \f
5039 /* Return the context-pointer register corresponding to DECL,
5040 or 0 if it does not need one. */
5041
5042 rtx
5043 lookup_static_chain (decl)
5044 tree decl;
5045 {
5046 tree context = decl_function_context (decl);
5047 tree link;
5048
5049 if (context == 0
5050 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5051 return 0;
5052
5053 /* We treat inline_function_decl as an alias for the current function
5054 because that is the inline function whose vars, types, etc.
5055 are being merged into the current function.
5056 See expand_inline_function. */
5057 if (context == current_function_decl || context == inline_function_decl)
5058 return virtual_stack_vars_rtx;
5059
5060 for (link = context_display; link; link = TREE_CHAIN (link))
5061 if (TREE_PURPOSE (link) == context)
5062 return RTL_EXPR_RTL (TREE_VALUE (link));
5063
5064 abort ();
5065 }
5066 \f
5067 /* Convert a stack slot address ADDR for variable VAR
5068 (from a containing function)
5069 into an address valid in this function (using a static chain). */
5070
5071 rtx
5072 fix_lexical_addr (addr, var)
5073 rtx addr;
5074 tree var;
5075 {
5076 rtx basereg;
5077 HOST_WIDE_INT displacement;
5078 tree context = decl_function_context (var);
5079 struct function *fp;
5080 rtx base = 0;
5081
5082 /* If this is the present function, we need not do anything. */
5083 if (context == current_function_decl || context == inline_function_decl)
5084 return addr;
5085
5086 for (fp = outer_function_chain; fp; fp = fp->next)
5087 if (fp->decl == context)
5088 break;
5089
5090 if (fp == 0)
5091 abort ();
5092
5093 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5094 addr = XEXP (XEXP (addr, 0), 0);
5095
5096 /* Decode given address as base reg plus displacement. */
5097 if (GET_CODE (addr) == REG)
5098 basereg = addr, displacement = 0;
5099 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5100 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5101 else
5102 abort ();
5103
5104 /* We accept vars reached via the containing function's
5105 incoming arg pointer and via its stack variables pointer. */
5106 if (basereg == fp->internal_arg_pointer)
5107 {
5108 /* If reached via arg pointer, get the arg pointer value
5109 out of that function's stack frame.
5110
5111 There are two cases: If a separate ap is needed, allocate a
5112 slot in the outer function for it and dereference it that way.
5113 This is correct even if the real ap is actually a pseudo.
5114 Otherwise, just adjust the offset from the frame pointer to
5115 compensate. */
5116
5117 #ifdef NEED_SEPARATE_AP
5118 rtx addr;
5119
5120 if (fp->arg_pointer_save_area == 0)
5121 fp->arg_pointer_save_area
5122 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5123
5124 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
5125 addr = memory_address (Pmode, addr);
5126
5127 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
5128 #else
5129 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5130 base = lookup_static_chain (var);
5131 #endif
5132 }
5133
5134 else if (basereg == virtual_stack_vars_rtx)
5135 {
5136 /* This is the same code as lookup_static_chain, duplicated here to
5137 avoid an extra call to decl_function_context. */
5138 tree link;
5139
5140 for (link = context_display; link; link = TREE_CHAIN (link))
5141 if (TREE_PURPOSE (link) == context)
5142 {
5143 base = RTL_EXPR_RTL (TREE_VALUE (link));
5144 break;
5145 }
5146 }
5147
5148 if (base == 0)
5149 abort ();
5150
5151 /* Use same offset, relative to appropriate static chain or argument
5152 pointer. */
5153 return plus_constant (base, displacement);
5154 }
5155 \f
5156 /* Return the address of the trampoline for entering nested fn FUNCTION.
5157 If necessary, allocate a trampoline (in the stack frame)
5158 and emit rtl to initialize its contents (at entry to this function). */
5159
5160 rtx
5161 trampoline_address (function)
5162 tree function;
5163 {
5164 tree link;
5165 tree rtlexp;
5166 rtx tramp;
5167 struct function *fp;
5168 tree fn_context;
5169
5170 /* Find an existing trampoline and return it. */
5171 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5172 if (TREE_PURPOSE (link) == function)
5173 return
5174 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5175
5176 for (fp = outer_function_chain; fp; fp = fp->next)
5177 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
5178 if (TREE_PURPOSE (link) == function)
5179 {
5180 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5181 function);
5182 return round_trampoline_addr (tramp);
5183 }
5184
5185 /* None exists; we must make one. */
5186
5187 /* Find the `struct function' for the function containing FUNCTION. */
5188 fp = 0;
5189 fn_context = decl_function_context (function);
5190 if (fn_context != current_function_decl
5191 && fn_context != inline_function_decl)
5192 for (fp = outer_function_chain; fp; fp = fp->next)
5193 if (fp->decl == fn_context)
5194 break;
5195
5196 /* Allocate run-time space for this trampoline
5197 (usually in the defining function's stack frame). */
5198 #ifdef ALLOCATE_TRAMPOLINE
5199 tramp = ALLOCATE_TRAMPOLINE (fp);
5200 #else
5201 /* If rounding needed, allocate extra space
5202 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5203 #ifdef TRAMPOLINE_ALIGNMENT
5204 #define TRAMPOLINE_REAL_SIZE \
5205 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5206 #else
5207 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5208 #endif
5209 if (fp != 0)
5210 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
5211 else
5212 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
5213 #endif
5214
5215 /* Record the trampoline for reuse and note it for later initialization
5216 by expand_function_end. */
5217 if (fp != 0)
5218 {
5219 push_obstacks (fp->function_maybepermanent_obstack,
5220 fp->function_maybepermanent_obstack);
5221 rtlexp = make_node (RTL_EXPR);
5222 RTL_EXPR_RTL (rtlexp) = tramp;
5223 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
5224 pop_obstacks ();
5225 }
5226 else
5227 {
5228 /* Make the RTL_EXPR node temporary, not momentary, so that the
5229 trampoline_list doesn't become garbage. */
5230 int momentary = suspend_momentary ();
5231 rtlexp = make_node (RTL_EXPR);
5232 resume_momentary (momentary);
5233
5234 RTL_EXPR_RTL (rtlexp) = tramp;
5235 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5236 }
5237
5238 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5239 return round_trampoline_addr (tramp);
5240 }
5241
5242 /* Given a trampoline address,
5243 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5244
5245 static rtx
5246 round_trampoline_addr (tramp)
5247 rtx tramp;
5248 {
5249 #ifdef TRAMPOLINE_ALIGNMENT
5250 /* Round address up to desired boundary. */
5251 rtx temp = gen_reg_rtx (Pmode);
5252 temp = expand_binop (Pmode, add_optab, tramp,
5253 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5254 temp, 0, OPTAB_LIB_WIDEN);
5255 tramp = expand_binop (Pmode, and_optab, temp,
5256 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5257 temp, 0, OPTAB_LIB_WIDEN);
5258 #endif
5259 return tramp;
5260 }
5261 \f
5262 /* The functions identify_blocks and reorder_blocks provide a way to
5263 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5264 duplicate portions of the RTL code. Call identify_blocks before
5265 changing the RTL, and call reorder_blocks after. */
5266
5267 /* Put all this function's BLOCK nodes including those that are chained
5268 onto the first block into a vector, and return it.
5269 Also store in each NOTE for the beginning or end of a block
5270 the index of that block in the vector.
5271 The arguments are BLOCK, the chain of top-level blocks of the function,
5272 and INSNS, the insn chain of the function. */
5273
5274 tree *
5275 identify_blocks (block, insns)
5276 tree block;
5277 rtx insns;
5278 {
5279 int n_blocks;
5280 tree *block_vector;
5281 int *block_stack;
5282 int depth = 0;
5283 int next_block_number = 1;
5284 int current_block_number = 1;
5285 rtx insn;
5286
5287 if (block == 0)
5288 return 0;
5289
5290 n_blocks = all_blocks (block, 0);
5291 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5292 block_stack = (int *) alloca (n_blocks * sizeof (int));
5293
5294 all_blocks (block, block_vector);
5295
5296 for (insn = insns; insn; insn = NEXT_INSN (insn))
5297 if (GET_CODE (insn) == NOTE)
5298 {
5299 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5300 {
5301 block_stack[depth++] = current_block_number;
5302 current_block_number = next_block_number;
5303 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
5304 }
5305 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5306 {
5307 NOTE_BLOCK_NUMBER (insn) = current_block_number;
5308 current_block_number = block_stack[--depth];
5309 }
5310 }
5311
5312 if (n_blocks != next_block_number)
5313 abort ();
5314
5315 return block_vector;
5316 }
5317
5318 /* Given BLOCK_VECTOR which was returned by identify_blocks,
5319 and a revised instruction chain, rebuild the tree structure
5320 of BLOCK nodes to correspond to the new order of RTL.
5321 The new block tree is inserted below TOP_BLOCK.
5322 Returns the current top-level block. */
5323
5324 tree
5325 reorder_blocks (block_vector, block, insns)
5326 tree *block_vector;
5327 tree block;
5328 rtx insns;
5329 {
5330 tree current_block = block;
5331 rtx insn;
5332
5333 if (block_vector == 0)
5334 return block;
5335
5336 /* Prune the old trees away, so that it doesn't get in the way. */
5337 BLOCK_SUBBLOCKS (current_block) = 0;
5338 BLOCK_CHAIN (current_block) = 0;
5339
5340 for (insn = insns; insn; insn = NEXT_INSN (insn))
5341 if (GET_CODE (insn) == NOTE)
5342 {
5343 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5344 {
5345 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
5346 /* If we have seen this block before, copy it. */
5347 if (TREE_ASM_WRITTEN (block))
5348 block = copy_node (block);
5349 BLOCK_SUBBLOCKS (block) = 0;
5350 TREE_ASM_WRITTEN (block) = 1;
5351 BLOCK_SUPERCONTEXT (block) = current_block;
5352 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5353 BLOCK_SUBBLOCKS (current_block) = block;
5354 current_block = block;
5355 NOTE_SOURCE_FILE (insn) = 0;
5356 }
5357 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5358 {
5359 BLOCK_SUBBLOCKS (current_block)
5360 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5361 current_block = BLOCK_SUPERCONTEXT (current_block);
5362 NOTE_SOURCE_FILE (insn) = 0;
5363 }
5364 }
5365
5366 BLOCK_SUBBLOCKS (current_block)
5367 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5368 return current_block;
5369 }
5370
5371 /* Reverse the order of elements in the chain T of blocks,
5372 and return the new head of the chain (old last element). */
5373
5374 static tree
5375 blocks_nreverse (t)
5376 tree t;
5377 {
5378 register tree prev = 0, decl, next;
5379 for (decl = t; decl; decl = next)
5380 {
5381 next = BLOCK_CHAIN (decl);
5382 BLOCK_CHAIN (decl) = prev;
5383 prev = decl;
5384 }
5385 return prev;
5386 }
5387
5388 /* Count the subblocks of the list starting with BLOCK, and list them
5389 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5390 blocks. */
5391
5392 static int
5393 all_blocks (block, vector)
5394 tree block;
5395 tree *vector;
5396 {
5397 int n_blocks = 0;
5398
5399 while (block)
5400 {
5401 TREE_ASM_WRITTEN (block) = 0;
5402
5403 /* Record this block. */
5404 if (vector)
5405 vector[n_blocks] = block;
5406
5407 ++n_blocks;
5408
5409 /* Record the subblocks, and their subblocks... */
5410 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5411 vector ? vector + n_blocks : 0);
5412 block = BLOCK_CHAIN (block);
5413 }
5414
5415 return n_blocks;
5416 }
5417 \f
5418 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5419 and initialize static variables for generating RTL for the statements
5420 of the function. */
5421
5422 void
5423 init_function_start (subr, filename, line)
5424 tree subr;
5425 char *filename;
5426 int line;
5427 {
5428 init_stmt_for_function ();
5429
5430 cse_not_expected = ! optimize;
5431
5432 /* Caller save not needed yet. */
5433 caller_save_needed = 0;
5434
5435 /* No stack slots have been made yet. */
5436 stack_slot_list = 0;
5437
5438 /* There is no stack slot for handling nonlocal gotos. */
5439 nonlocal_goto_handler_slot = 0;
5440 nonlocal_goto_stack_level = 0;
5441
5442 /* No labels have been declared for nonlocal use. */
5443 nonlocal_labels = 0;
5444
5445 /* No function calls so far in this function. */
5446 function_call_count = 0;
5447
5448 /* No parm regs have been allocated.
5449 (This is important for output_inline_function.) */
5450 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5451
5452 /* Initialize the RTL mechanism. */
5453 init_emit ();
5454
5455 /* Initialize the queue of pending postincrement and postdecrements,
5456 and some other info in expr.c. */
5457 init_expr ();
5458
5459 /* We haven't done register allocation yet. */
5460 reg_renumber = 0;
5461
5462 init_const_rtx_hash_table ();
5463
5464 current_function_name = (*decl_printable_name) (subr, 2);
5465
5466 /* Nonzero if this is a nested function that uses a static chain. */
5467
5468 current_function_needs_context
5469 = (decl_function_context (current_function_decl) != 0
5470 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5471
5472 /* Set if a call to setjmp is seen. */
5473 current_function_calls_setjmp = 0;
5474
5475 /* Set if a call to longjmp is seen. */
5476 current_function_calls_longjmp = 0;
5477
5478 current_function_calls_alloca = 0;
5479 current_function_has_nonlocal_label = 0;
5480 current_function_has_nonlocal_goto = 0;
5481 current_function_contains_functions = 0;
5482 current_function_sp_is_unchanging = 0;
5483 current_function_is_thunk = 0;
5484
5485 current_function_returns_pcc_struct = 0;
5486 current_function_returns_struct = 0;
5487 current_function_epilogue_delay_list = 0;
5488 current_function_uses_const_pool = 0;
5489 current_function_uses_pic_offset_table = 0;
5490 current_function_cannot_inline = 0;
5491
5492 /* We have not yet needed to make a label to jump to for tail-recursion. */
5493 tail_recursion_label = 0;
5494
5495 /* We haven't had a need to make a save area for ap yet. */
5496
5497 arg_pointer_save_area = 0;
5498
5499 /* No stack slots allocated yet. */
5500 frame_offset = 0;
5501
5502 /* No SAVE_EXPRs in this function yet. */
5503 save_expr_regs = 0;
5504
5505 /* No RTL_EXPRs in this function yet. */
5506 rtl_expr_chain = 0;
5507
5508 /* Set up to allocate temporaries. */
5509 init_temp_slots ();
5510
5511 /* Within function body, compute a type's size as soon it is laid out. */
5512 immediate_size_expand++;
5513
5514 /* We haven't made any trampolines for this function yet. */
5515 trampoline_list = 0;
5516
5517 init_pending_stack_adjust ();
5518 inhibit_defer_pop = 0;
5519
5520 current_function_outgoing_args_size = 0;
5521
5522 /* Prevent ever trying to delete the first instruction of a function.
5523 Also tell final how to output a linenum before the function prologue.
5524 Note linenums could be missing, e.g. when compiling a Java .class file. */
5525 if (line > 0)
5526 emit_line_note (filename, line);
5527
5528 /* Make sure first insn is a note even if we don't want linenums.
5529 This makes sure the first insn will never be deleted.
5530 Also, final expects a note to appear there. */
5531 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5532
5533 /* Set flags used by final.c. */
5534 if (aggregate_value_p (DECL_RESULT (subr)))
5535 {
5536 #ifdef PCC_STATIC_STRUCT_RETURN
5537 current_function_returns_pcc_struct = 1;
5538 #endif
5539 current_function_returns_struct = 1;
5540 }
5541
5542 /* Warn if this value is an aggregate type,
5543 regardless of which calling convention we are using for it. */
5544 if (warn_aggregate_return
5545 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5546 warning ("function returns an aggregate");
5547
5548 current_function_returns_pointer
5549 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5550
5551 /* Indicate that we need to distinguish between the return value of the
5552 present function and the return value of a function being called. */
5553 rtx_equal_function_value_matters = 1;
5554
5555 /* Indicate that we have not instantiated virtual registers yet. */
5556 virtuals_instantiated = 0;
5557
5558 /* Indicate we have no need of a frame pointer yet. */
5559 frame_pointer_needed = 0;
5560
5561 /* By default assume not varargs or stdarg. */
5562 current_function_varargs = 0;
5563 current_function_stdarg = 0;
5564 }
5565
5566 /* Indicate that the current function uses extra args
5567 not explicitly mentioned in the argument list in any fashion. */
5568
5569 void
5570 mark_varargs ()
5571 {
5572 current_function_varargs = 1;
5573 }
5574
5575 /* Expand a call to __main at the beginning of a possible main function. */
5576
5577 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5578 #undef HAS_INIT_SECTION
5579 #define HAS_INIT_SECTION
5580 #endif
5581
5582 void
5583 expand_main_function ()
5584 {
5585 #if !defined (HAS_INIT_SECTION)
5586 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5587 VOIDmode, 0);
5588 #endif /* not HAS_INIT_SECTION */
5589 }
5590 \f
5591 extern struct obstack permanent_obstack;
5592
5593 /* Start the RTL for a new function, and set variables used for
5594 emitting RTL.
5595 SUBR is the FUNCTION_DECL node.
5596 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5597 the function's parameters, which must be run at any return statement. */
5598
5599 void
5600 expand_function_start (subr, parms_have_cleanups)
5601 tree subr;
5602 int parms_have_cleanups;
5603 {
5604 register int i;
5605 tree tem;
5606 rtx last_ptr = NULL_RTX;
5607
5608 /* Make sure volatile mem refs aren't considered
5609 valid operands of arithmetic insns. */
5610 init_recog_no_volatile ();
5611
5612 /* Set this before generating any memory accesses. */
5613 current_function_check_memory_usage
5614 = (flag_check_memory_usage
5615 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
5616
5617 current_function_instrument_entry_exit
5618 = (flag_instrument_function_entry_exit
5619 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5620
5621 /* If function gets a static chain arg, store it in the stack frame.
5622 Do this first, so it gets the first stack slot offset. */
5623 if (current_function_needs_context)
5624 {
5625 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5626
5627 /* Delay copying static chain if it is not a register to avoid
5628 conflicts with regs used for parameters. */
5629 if (! SMALL_REGISTER_CLASSES
5630 || GET_CODE (static_chain_incoming_rtx) == REG)
5631 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5632 }
5633
5634 /* If the parameters of this function need cleaning up, get a label
5635 for the beginning of the code which executes those cleanups. This must
5636 be done before doing anything with return_label. */
5637 if (parms_have_cleanups)
5638 cleanup_label = gen_label_rtx ();
5639 else
5640 cleanup_label = 0;
5641
5642 /* Make the label for return statements to jump to, if this machine
5643 does not have a one-instruction return and uses an epilogue,
5644 or if it returns a structure, or if it has parm cleanups. */
5645 #ifdef HAVE_return
5646 if (cleanup_label == 0 && HAVE_return
5647 && ! current_function_instrument_entry_exit
5648 && ! current_function_returns_pcc_struct
5649 && ! (current_function_returns_struct && ! optimize))
5650 return_label = 0;
5651 else
5652 return_label = gen_label_rtx ();
5653 #else
5654 return_label = gen_label_rtx ();
5655 #endif
5656
5657 /* Initialize rtx used to return the value. */
5658 /* Do this before assign_parms so that we copy the struct value address
5659 before any library calls that assign parms might generate. */
5660
5661 /* Decide whether to return the value in memory or in a register. */
5662 if (aggregate_value_p (DECL_RESULT (subr)))
5663 {
5664 /* Returning something that won't go in a register. */
5665 register rtx value_address = 0;
5666
5667 #ifdef PCC_STATIC_STRUCT_RETURN
5668 if (current_function_returns_pcc_struct)
5669 {
5670 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5671 value_address = assemble_static_space (size);
5672 }
5673 else
5674 #endif
5675 {
5676 /* Expect to be passed the address of a place to store the value.
5677 If it is passed as an argument, assign_parms will take care of
5678 it. */
5679 if (struct_value_incoming_rtx)
5680 {
5681 value_address = gen_reg_rtx (Pmode);
5682 emit_move_insn (value_address, struct_value_incoming_rtx);
5683 }
5684 }
5685 if (value_address)
5686 {
5687 DECL_RTL (DECL_RESULT (subr))
5688 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
5689 MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)))
5690 = AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5691 }
5692 }
5693 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5694 /* If return mode is void, this decl rtl should not be used. */
5695 DECL_RTL (DECL_RESULT (subr)) = 0;
5696 else if (parms_have_cleanups || current_function_instrument_entry_exit)
5697 {
5698 /* If function will end with cleanup code for parms,
5699 compute the return values into a pseudo reg,
5700 which we will copy into the true return register
5701 after the cleanups are done. */
5702
5703 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5704
5705 #ifdef PROMOTE_FUNCTION_RETURN
5706 tree type = TREE_TYPE (DECL_RESULT (subr));
5707 int unsignedp = TREE_UNSIGNED (type);
5708
5709 mode = promote_mode (type, mode, &unsignedp, 1);
5710 #endif
5711
5712 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5713 }
5714 else
5715 /* Scalar, returned in a register. */
5716 {
5717 #ifdef FUNCTION_OUTGOING_VALUE
5718 DECL_RTL (DECL_RESULT (subr))
5719 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5720 #else
5721 DECL_RTL (DECL_RESULT (subr))
5722 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5723 #endif
5724
5725 /* Mark this reg as the function's return value. */
5726 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5727 {
5728 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5729 /* Needed because we may need to move this to memory
5730 in case it's a named return value whose address is taken. */
5731 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5732 }
5733 }
5734
5735 /* Initialize rtx for parameters and local variables.
5736 In some cases this requires emitting insns. */
5737
5738 assign_parms (subr, 0);
5739
5740 /* Copy the static chain now if it wasn't a register. The delay is to
5741 avoid conflicts with the parameter passing registers. */
5742
5743 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
5744 if (GET_CODE (static_chain_incoming_rtx) != REG)
5745 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5746
5747 /* The following was moved from init_function_start.
5748 The move is supposed to make sdb output more accurate. */
5749 /* Indicate the beginning of the function body,
5750 as opposed to parm setup. */
5751 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5752
5753 /* If doing stupid allocation, mark parms as born here. */
5754
5755 if (GET_CODE (get_last_insn ()) != NOTE)
5756 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5757 parm_birth_insn = get_last_insn ();
5758
5759 if (obey_regdecls)
5760 {
5761 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5762 use_variable (regno_reg_rtx[i]);
5763
5764 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5765 use_variable (current_function_internal_arg_pointer);
5766 }
5767
5768 context_display = 0;
5769 if (current_function_needs_context)
5770 {
5771 /* Fetch static chain values for containing functions. */
5772 tem = decl_function_context (current_function_decl);
5773 /* If not doing stupid register allocation copy the static chain
5774 pointer into a pseudo. If we have small register classes, copy
5775 the value from memory if static_chain_incoming_rtx is a REG. If
5776 we do stupid register allocation, we use the stack address
5777 generated above. */
5778 if (tem && ! obey_regdecls)
5779 {
5780 /* If the static chain originally came in a register, put it back
5781 there, then move it out in the next insn. The reason for
5782 this peculiar code is to satisfy function integration. */
5783 if (SMALL_REGISTER_CLASSES
5784 && GET_CODE (static_chain_incoming_rtx) == REG)
5785 emit_move_insn (static_chain_incoming_rtx, last_ptr);
5786 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5787 }
5788
5789 while (tem)
5790 {
5791 tree rtlexp = make_node (RTL_EXPR);
5792
5793 RTL_EXPR_RTL (rtlexp) = last_ptr;
5794 context_display = tree_cons (tem, rtlexp, context_display);
5795 tem = decl_function_context (tem);
5796 if (tem == 0)
5797 break;
5798 /* Chain thru stack frames, assuming pointer to next lexical frame
5799 is found at the place we always store it. */
5800 #ifdef FRAME_GROWS_DOWNWARD
5801 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
5802 #endif
5803 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
5804 memory_address (Pmode, last_ptr)));
5805
5806 /* If we are not optimizing, ensure that we know that this
5807 piece of context is live over the entire function. */
5808 if (! optimize)
5809 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
5810 save_expr_regs);
5811 }
5812 }
5813
5814 if (current_function_instrument_entry_exit)
5815 {
5816 rtx fun = DECL_RTL (current_function_decl);
5817 if (GET_CODE (fun) == MEM)
5818 fun = XEXP (fun, 0);
5819 else
5820 abort ();
5821 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
5822 fun, Pmode,
5823 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5824 0,
5825 hard_frame_pointer_rtx),
5826 Pmode);
5827 }
5828
5829 /* After the display initializations is where the tail-recursion label
5830 should go, if we end up needing one. Ensure we have a NOTE here
5831 since some things (like trampolines) get placed before this. */
5832 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
5833
5834 /* Evaluate now the sizes of any types declared among the arguments. */
5835 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5836 {
5837 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
5838 EXPAND_MEMORY_USE_BAD);
5839 /* Flush the queue in case this parameter declaration has
5840 side-effects. */
5841 emit_queue ();
5842 }
5843
5844 /* Make sure there is a line number after the function entry setup code. */
5845 force_next_line_note ();
5846 }
5847 \f
5848 /* Generate RTL for the end of the current function.
5849 FILENAME and LINE are the current position in the source file.
5850
5851 It is up to language-specific callers to do cleanups for parameters--
5852 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5853
5854 void
5855 expand_function_end (filename, line, end_bindings)
5856 char *filename;
5857 int line;
5858 int end_bindings;
5859 {
5860 register int i;
5861 tree link;
5862
5863 #ifdef TRAMPOLINE_TEMPLATE
5864 static rtx initial_trampoline;
5865 #endif
5866
5867 #ifdef NON_SAVING_SETJMP
5868 /* Don't put any variables in registers if we call setjmp
5869 on a machine that fails to restore the registers. */
5870 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
5871 {
5872 if (DECL_INITIAL (current_function_decl) != error_mark_node)
5873 setjmp_protect (DECL_INITIAL (current_function_decl));
5874
5875 setjmp_protect_args ();
5876 }
5877 #endif
5878
5879 /* Save the argument pointer if a save area was made for it. */
5880 if (arg_pointer_save_area)
5881 {
5882 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
5883 emit_insn_before (x, tail_recursion_reentry);
5884 }
5885
5886 /* Initialize any trampolines required by this function. */
5887 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5888 {
5889 tree function = TREE_PURPOSE (link);
5890 rtx context = lookup_static_chain (function);
5891 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
5892 #ifdef TRAMPOLINE_TEMPLATE
5893 rtx blktramp;
5894 #endif
5895 rtx seq;
5896
5897 #ifdef TRAMPOLINE_TEMPLATE
5898 /* First make sure this compilation has a template for
5899 initializing trampolines. */
5900 if (initial_trampoline == 0)
5901 {
5902 end_temporary_allocation ();
5903 initial_trampoline
5904 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
5905 resume_temporary_allocation ();
5906 }
5907 #endif
5908
5909 /* Generate insns to initialize the trampoline. */
5910 start_sequence ();
5911 tramp = round_trampoline_addr (XEXP (tramp, 0));
5912 #ifdef TRAMPOLINE_TEMPLATE
5913 blktramp = change_address (initial_trampoline, BLKmode, tramp);
5914 emit_block_move (blktramp, initial_trampoline,
5915 GEN_INT (TRAMPOLINE_SIZE),
5916 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5917 #endif
5918 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
5919 seq = get_insns ();
5920 end_sequence ();
5921
5922 /* Put those insns at entry to the containing function (this one). */
5923 emit_insns_before (seq, tail_recursion_reentry);
5924 }
5925
5926 /* If we are doing stack checking and this function makes calls,
5927 do a stack probe at the start of the function to ensure we have enough
5928 space for another stack frame. */
5929 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
5930 {
5931 rtx insn, seq;
5932
5933 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5934 if (GET_CODE (insn) == CALL_INSN)
5935 {
5936 start_sequence ();
5937 probe_stack_range (STACK_CHECK_PROTECT,
5938 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
5939 seq = get_insns ();
5940 end_sequence ();
5941 emit_insns_before (seq, tail_recursion_reentry);
5942 break;
5943 }
5944 }
5945
5946 /* Warn about unused parms if extra warnings were specified. */
5947 if (warn_unused && extra_warnings)
5948 {
5949 tree decl;
5950
5951 for (decl = DECL_ARGUMENTS (current_function_decl);
5952 decl; decl = TREE_CHAIN (decl))
5953 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
5954 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
5955 warning_with_decl (decl, "unused parameter `%s'");
5956 }
5957
5958 /* Delete handlers for nonlocal gotos if nothing uses them. */
5959 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
5960 delete_handlers ();
5961
5962 /* End any sequences that failed to be closed due to syntax errors. */
5963 while (in_sequence_p ())
5964 end_sequence ();
5965
5966 /* Outside function body, can't compute type's actual size
5967 until next function's body starts. */
5968 immediate_size_expand--;
5969
5970 /* If doing stupid register allocation,
5971 mark register parms as dying here. */
5972
5973 if (obey_regdecls)
5974 {
5975 rtx tem;
5976 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5977 use_variable (regno_reg_rtx[i]);
5978
5979 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
5980
5981 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
5982 {
5983 use_variable (XEXP (tem, 0));
5984 use_variable_after (XEXP (tem, 0), parm_birth_insn);
5985 }
5986
5987 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5988 use_variable (current_function_internal_arg_pointer);
5989 }
5990
5991 clear_pending_stack_adjust ();
5992 do_pending_stack_adjust ();
5993
5994 /* Mark the end of the function body.
5995 If control reaches this insn, the function can drop through
5996 without returning a value. */
5997 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
5998
5999 /* Must mark the last line number note in the function, so that the test
6000 coverage code can avoid counting the last line twice. This just tells
6001 the code to ignore the immediately following line note, since there
6002 already exists a copy of this note somewhere above. This line number
6003 note is still needed for debugging though, so we can't delete it. */
6004 if (flag_test_coverage)
6005 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
6006
6007 /* Output a linenumber for the end of the function.
6008 SDB depends on this. */
6009 emit_line_note_force (filename, line);
6010
6011 /* Output the label for the actual return from the function,
6012 if one is expected. This happens either because a function epilogue
6013 is used instead of a return instruction, or because a return was done
6014 with a goto in order to run local cleanups, or because of pcc-style
6015 structure returning. */
6016
6017 if (return_label)
6018 emit_label (return_label);
6019
6020 /* C++ uses this. */
6021 if (end_bindings)
6022 expand_end_bindings (0, 0, 0);
6023
6024 /* Now handle any leftover exception regions that may have been
6025 created for the parameters. */
6026 {
6027 rtx last = get_last_insn ();
6028 rtx label;
6029
6030 expand_leftover_cleanups ();
6031
6032 /* If the above emitted any code, may sure we jump around it. */
6033 if (last != get_last_insn ())
6034 {
6035 label = gen_label_rtx ();
6036 last = emit_jump_insn_after (gen_jump (label), last);
6037 last = emit_barrier_after (last);
6038 emit_label (label);
6039 }
6040 }
6041
6042 if (current_function_instrument_entry_exit)
6043 {
6044 rtx fun = DECL_RTL (current_function_decl);
6045 if (GET_CODE (fun) == MEM)
6046 fun = XEXP (fun, 0);
6047 else
6048 abort ();
6049 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6050 fun, Pmode,
6051 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6052 0,
6053 hard_frame_pointer_rtx),
6054 Pmode);
6055 }
6056
6057 /* If we had calls to alloca, and this machine needs
6058 an accurate stack pointer to exit the function,
6059 insert some code to save and restore the stack pointer. */
6060 #ifdef EXIT_IGNORE_STACK
6061 if (! EXIT_IGNORE_STACK)
6062 #endif
6063 if (current_function_calls_alloca)
6064 {
6065 rtx tem = 0;
6066
6067 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6068 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6069 }
6070
6071 /* If scalar return value was computed in a pseudo-reg,
6072 copy that to the hard return register. */
6073 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6074 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6075 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6076 >= FIRST_PSEUDO_REGISTER))
6077 {
6078 rtx real_decl_result;
6079
6080 #ifdef FUNCTION_OUTGOING_VALUE
6081 real_decl_result
6082 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6083 current_function_decl);
6084 #else
6085 real_decl_result
6086 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6087 current_function_decl);
6088 #endif
6089 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
6090 /* If this is a BLKmode structure being returned in registers, then use
6091 the mode computed in expand_return. */
6092 if (GET_MODE (real_decl_result) == BLKmode)
6093 PUT_MODE (real_decl_result,
6094 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6095 emit_move_insn (real_decl_result,
6096 DECL_RTL (DECL_RESULT (current_function_decl)));
6097 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
6098
6099 /* The delay slot scheduler assumes that current_function_return_rtx
6100 holds the hard register containing the return value, not a temporary
6101 pseudo. */
6102 current_function_return_rtx = real_decl_result;
6103 }
6104
6105 /* If returning a structure, arrange to return the address of the value
6106 in a place where debuggers expect to find it.
6107
6108 If returning a structure PCC style,
6109 the caller also depends on this value.
6110 And current_function_returns_pcc_struct is not necessarily set. */
6111 if (current_function_returns_struct
6112 || current_function_returns_pcc_struct)
6113 {
6114 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6115 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6116 #ifdef FUNCTION_OUTGOING_VALUE
6117 rtx outgoing
6118 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6119 current_function_decl);
6120 #else
6121 rtx outgoing
6122 = FUNCTION_VALUE (build_pointer_type (type),
6123 current_function_decl);
6124 #endif
6125
6126 /* Mark this as a function return value so integrate will delete the
6127 assignment and USE below when inlining this function. */
6128 REG_FUNCTION_VALUE_P (outgoing) = 1;
6129
6130 emit_move_insn (outgoing, value_address);
6131 use_variable (outgoing);
6132 }
6133
6134 /* If this is an implementation of __throw, do what's necessary to
6135 communicate between __builtin_eh_return and the epilogue. */
6136 expand_eh_return ();
6137
6138 /* Output a return insn if we are using one.
6139 Otherwise, let the rtl chain end here, to drop through
6140 into the epilogue. */
6141
6142 #ifdef HAVE_return
6143 if (HAVE_return)
6144 {
6145 emit_jump_insn (gen_return ());
6146 emit_barrier ();
6147 }
6148 #endif
6149
6150 /* Fix up any gotos that jumped out to the outermost
6151 binding level of the function.
6152 Must follow emitting RETURN_LABEL. */
6153
6154 /* If you have any cleanups to do at this point,
6155 and they need to create temporary variables,
6156 then you will lose. */
6157 expand_fixups (get_insns ());
6158 }
6159 \f
6160 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
6161
6162 static int *prologue;
6163 static int *epilogue;
6164
6165 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
6166 or a single insn). */
6167
6168 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6169 static int *
6170 record_insns (insns)
6171 rtx insns;
6172 {
6173 int *vec;
6174
6175 if (GET_CODE (insns) == SEQUENCE)
6176 {
6177 int len = XVECLEN (insns, 0);
6178 vec = (int *) oballoc ((len + 1) * sizeof (int));
6179 vec[len] = 0;
6180 while (--len >= 0)
6181 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6182 }
6183 else
6184 {
6185 vec = (int *) oballoc (2 * sizeof (int));
6186 vec[0] = INSN_UID (insns);
6187 vec[1] = 0;
6188 }
6189 return vec;
6190 }
6191
6192 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6193
6194 static int
6195 contains (insn, vec)
6196 rtx insn;
6197 int *vec;
6198 {
6199 register int i, j;
6200
6201 if (GET_CODE (insn) == INSN
6202 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6203 {
6204 int count = 0;
6205 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6206 for (j = 0; vec[j]; j++)
6207 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6208 count++;
6209 return count;
6210 }
6211 else
6212 {
6213 for (j = 0; vec[j]; j++)
6214 if (INSN_UID (insn) == vec[j])
6215 return 1;
6216 }
6217 return 0;
6218 }
6219 #endif /* HAVE_prologue || HAVE_epilogue */
6220
6221 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6222 this into place with notes indicating where the prologue ends and where
6223 the epilogue begins. Update the basic block information when possible. */
6224
6225 void
6226 thread_prologue_and_epilogue_insns (f)
6227 rtx f ATTRIBUTE_UNUSED;
6228 {
6229 #ifdef HAVE_prologue
6230 if (HAVE_prologue)
6231 {
6232 rtx head, seq;
6233
6234 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
6235 prologue insns and a NOTE_INSN_PROLOGUE_END. */
6236 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
6237 seq = gen_prologue ();
6238 head = emit_insn_after (seq, f);
6239
6240 /* Include the new prologue insns in the first block. Ignore them
6241 if they form a basic block unto themselves. */
6242 if (basic_block_head && n_basic_blocks
6243 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
6244 basic_block_head[0] = NEXT_INSN (f);
6245
6246 /* Retain a map of the prologue insns. */
6247 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
6248 }
6249 else
6250 #endif
6251 prologue = 0;
6252
6253 #ifdef HAVE_epilogue
6254 if (HAVE_epilogue)
6255 {
6256 rtx insn = get_last_insn ();
6257 rtx prev = prev_nonnote_insn (insn);
6258
6259 /* If we end with a BARRIER, we don't need an epilogue. */
6260 if (! (prev && GET_CODE (prev) == BARRIER))
6261 {
6262 rtx tail, seq, tem;
6263 rtx first_use = 0;
6264 rtx last_use = 0;
6265
6266 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6267 epilogue insns, the USE insns at the end of a function,
6268 the jump insn that returns, and then a BARRIER. */
6269
6270 /* Move the USE insns at the end of a function onto a list. */
6271 while (prev
6272 && GET_CODE (prev) == INSN
6273 && GET_CODE (PATTERN (prev)) == USE)
6274 {
6275 tem = prev;
6276 prev = prev_nonnote_insn (prev);
6277
6278 NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem);
6279 PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem);
6280 if (first_use)
6281 {
6282 NEXT_INSN (tem) = first_use;
6283 PREV_INSN (first_use) = tem;
6284 }
6285 first_use = tem;
6286 if (!last_use)
6287 last_use = tem;
6288 }
6289
6290 emit_barrier_after (insn);
6291
6292 seq = gen_epilogue ();
6293 tail = emit_jump_insn_after (seq, insn);
6294
6295 /* Insert the USE insns immediately before the return insn, which
6296 must be the first instruction before the final barrier. */
6297 if (first_use)
6298 {
6299 tem = prev_nonnote_insn (get_last_insn ());
6300 NEXT_INSN (PREV_INSN (tem)) = first_use;
6301 PREV_INSN (first_use) = PREV_INSN (tem);
6302 PREV_INSN (tem) = last_use;
6303 NEXT_INSN (last_use) = tem;
6304 }
6305
6306 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
6307
6308 /* Include the new epilogue insns in the last block. Ignore
6309 them if they form a basic block unto themselves. */
6310 if (basic_block_end && n_basic_blocks
6311 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
6312 basic_block_end[n_basic_blocks - 1] = tail;
6313
6314 /* Retain a map of the epilogue insns. */
6315 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6316 return;
6317 }
6318 }
6319 #endif
6320 epilogue = 0;
6321 }
6322
6323 /* Reposition the prologue-end and epilogue-begin notes after instruction
6324 scheduling and delayed branch scheduling. */
6325
6326 void
6327 reposition_prologue_and_epilogue_notes (f)
6328 rtx f ATTRIBUTE_UNUSED;
6329 {
6330 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6331 /* Reposition the prologue and epilogue notes. */
6332 if (n_basic_blocks)
6333 {
6334 rtx next, prev;
6335 int len;
6336
6337 if (prologue)
6338 {
6339 register rtx insn, note = 0;
6340
6341 /* Scan from the beginning until we reach the last prologue insn.
6342 We apparently can't depend on basic_block_{head,end} after
6343 reorg has run. */
6344 for (len = 0; prologue[len]; len++)
6345 ;
6346 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6347 {
6348 if (GET_CODE (insn) == NOTE)
6349 {
6350 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6351 note = insn;
6352 }
6353 else if ((len -= contains (insn, prologue)) == 0)
6354 {
6355 /* Find the prologue-end note if we haven't already, and
6356 move it to just after the last prologue insn. */
6357 if (note == 0)
6358 {
6359 for (note = insn; (note = NEXT_INSN (note));)
6360 if (GET_CODE (note) == NOTE
6361 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6362 break;
6363 }
6364
6365 next = NEXT_INSN (note);
6366 prev = PREV_INSN (note);
6367 if (prev)
6368 NEXT_INSN (prev) = next;
6369 if (next)
6370 PREV_INSN (next) = prev;
6371
6372 /* Whether or not we can depend on basic_block_head,
6373 attempt to keep it up-to-date. */
6374 if (basic_block_head[0] == note)
6375 basic_block_head[0] = next;
6376
6377 add_insn_after (note, insn);
6378 }
6379 }
6380 }
6381
6382 if (epilogue)
6383 {
6384 register rtx insn, note = 0;
6385
6386 /* Scan from the end until we reach the first epilogue insn.
6387 We apparently can't depend on basic_block_{head,end} after
6388 reorg has run. */
6389 for (len = 0; epilogue[len]; len++)
6390 ;
6391 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6392 {
6393 if (GET_CODE (insn) == NOTE)
6394 {
6395 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6396 note = insn;
6397 }
6398 else if ((len -= contains (insn, epilogue)) == 0)
6399 {
6400 /* Find the epilogue-begin note if we haven't already, and
6401 move it to just before the first epilogue insn. */
6402 if (note == 0)
6403 {
6404 for (note = insn; (note = PREV_INSN (note));)
6405 if (GET_CODE (note) == NOTE
6406 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6407 break;
6408 }
6409 next = NEXT_INSN (note);
6410 prev = PREV_INSN (note);
6411 if (prev)
6412 NEXT_INSN (prev) = next;
6413 if (next)
6414 PREV_INSN (next) = prev;
6415
6416 /* Whether or not we can depend on basic_block_head,
6417 attempt to keep it up-to-date. */
6418 if (n_basic_blocks
6419 && basic_block_head[n_basic_blocks-1] == insn)
6420 basic_block_head[n_basic_blocks-1] = note;
6421
6422 add_insn_before (note, insn);
6423 }
6424 }
6425 }
6426 }
6427 #endif /* HAVE_prologue or HAVE_epilogue */
6428 }