(assign_parms): Tighten up code that makes REG_EQUIV notes for parms.
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-95, 1996 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42 #include <stdio.h>
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "function.h"
47 #include "insn-flags.h"
48 #include "expr.h"
49 #include "insn-codes.h"
50 #include "regs.h"
51 #include "hard-reg-set.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "output.h"
55 #include "basic-block.h"
56 #include "obstack.h"
57 #include "bytecode.h"
58 #include "bc-emit.h"
59
60 /* Some systems use __main in a way incompatible with its use in gcc, in these
61 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
62 give the same symbol without quotes for an alternative entry point. You
63 must define both, or neither. */
64 #ifndef NAME__MAIN
65 #define NAME__MAIN "__main"
66 #define SYMBOL__MAIN __main
67 #endif
68
69 /* Round a value to the lowest integer less than it that is a multiple of
70 the required alignment. Avoid using division in case the value is
71 negative. Assume the alignment is a power of two. */
72 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
73
74 /* Similar, but round to the next highest integer that meets the
75 alignment. */
76 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
77
78 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
79 during rtl generation. If they are different register numbers, this is
80 always true. It may also be true if
81 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
82 generation. See fix_lexical_addr for details. */
83
84 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
85 #define NEED_SEPARATE_AP
86 #endif
87
88 /* Number of bytes of args popped by function being compiled on its return.
89 Zero if no bytes are to be popped.
90 May affect compilation of return insn or of function epilogue. */
91
92 int current_function_pops_args;
93
94 /* Nonzero if function being compiled needs to be given an address
95 where the value should be stored. */
96
97 int current_function_returns_struct;
98
99 /* Nonzero if function being compiled needs to
100 return the address of where it has put a structure value. */
101
102 int current_function_returns_pcc_struct;
103
104 /* Nonzero if function being compiled needs to be passed a static chain. */
105
106 int current_function_needs_context;
107
108 /* Nonzero if function being compiled can call setjmp. */
109
110 int current_function_calls_setjmp;
111
112 /* Nonzero if function being compiled can call longjmp. */
113
114 int current_function_calls_longjmp;
115
116 /* Nonzero if function being compiled receives nonlocal gotos
117 from nested functions. */
118
119 int current_function_has_nonlocal_label;
120
121 /* Nonzero if function being compiled has nonlocal gotos to parent
122 function. */
123
124 int current_function_has_nonlocal_goto;
125
126 /* Nonzero if function being compiled contains nested functions. */
127
128 int current_function_contains_functions;
129
130 /* Nonzero if function being compiled can call alloca,
131 either as a subroutine or builtin. */
132
133 int current_function_calls_alloca;
134
135 /* Nonzero if the current function returns a pointer type */
136
137 int current_function_returns_pointer;
138
139 /* If some insns can be deferred to the delay slots of the epilogue, the
140 delay list for them is recorded here. */
141
142 rtx current_function_epilogue_delay_list;
143
144 /* If function's args have a fixed size, this is that size, in bytes.
145 Otherwise, it is -1.
146 May affect compilation of return insn or of function epilogue. */
147
148 int current_function_args_size;
149
150 /* # bytes the prologue should push and pretend that the caller pushed them.
151 The prologue must do this, but only if parms can be passed in registers. */
152
153 int current_function_pretend_args_size;
154
155 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
156 defined, the needed space is pushed by the prologue. */
157
158 int current_function_outgoing_args_size;
159
160 /* This is the offset from the arg pointer to the place where the first
161 anonymous arg can be found, if there is one. */
162
163 rtx current_function_arg_offset_rtx;
164
165 /* Nonzero if current function uses varargs.h or equivalent.
166 Zero for functions that use stdarg.h. */
167
168 int current_function_varargs;
169
170 /* Nonzero if current function uses stdarg.h or equivalent.
171 Zero for functions that use varargs.h. */
172
173 int current_function_stdarg;
174
175 /* Quantities of various kinds of registers
176 used for the current function's args. */
177
178 CUMULATIVE_ARGS current_function_args_info;
179
180 /* Name of function now being compiled. */
181
182 char *current_function_name;
183
184 /* If non-zero, an RTL expression for that location at which the current
185 function returns its result. Always equal to
186 DECL_RTL (DECL_RESULT (current_function_decl)), but provided
187 independently of the tree structures. */
188
189 rtx current_function_return_rtx;
190
191 /* Nonzero if the current function uses the constant pool. */
192
193 int current_function_uses_const_pool;
194
195 /* Nonzero if the current function uses pic_offset_table_rtx. */
196 int current_function_uses_pic_offset_table;
197
198 /* The arg pointer hard register, or the pseudo into which it was copied. */
199 rtx current_function_internal_arg_pointer;
200
201 /* The FUNCTION_DECL for an inline function currently being expanded. */
202 tree inline_function_decl;
203
204 /* Number of function calls seen so far in current function. */
205
206 int function_call_count;
207
208 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
209 (labels to which there can be nonlocal gotos from nested functions)
210 in this function. */
211
212 tree nonlocal_labels;
213
214 /* RTX for stack slot that holds the current handler for nonlocal gotos.
215 Zero when function does not have nonlocal labels. */
216
217 rtx nonlocal_goto_handler_slot;
218
219 /* RTX for stack slot that holds the stack pointer value to restore
220 for a nonlocal goto.
221 Zero when function does not have nonlocal labels. */
222
223 rtx nonlocal_goto_stack_level;
224
225 /* Label that will go on parm cleanup code, if any.
226 Jumping to this label runs cleanup code for parameters, if
227 such code must be run. Following this code is the logical return label. */
228
229 rtx cleanup_label;
230
231 /* Label that will go on function epilogue.
232 Jumping to this label serves as a "return" instruction
233 on machines which require execution of the epilogue on all returns. */
234
235 rtx return_label;
236
237 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
238 So we can mark them all live at the end of the function, if nonopt. */
239 rtx save_expr_regs;
240
241 /* List (chain of EXPR_LISTs) of all stack slots in this function.
242 Made for the sake of unshare_all_rtl. */
243 rtx stack_slot_list;
244
245 /* Chain of all RTL_EXPRs that have insns in them. */
246 tree rtl_expr_chain;
247
248 /* Label to jump back to for tail recursion, or 0 if we have
249 not yet needed one for this function. */
250 rtx tail_recursion_label;
251
252 /* Place after which to insert the tail_recursion_label if we need one. */
253 rtx tail_recursion_reentry;
254
255 /* Location at which to save the argument pointer if it will need to be
256 referenced. There are two cases where this is done: if nonlocal gotos
257 exist, or if vars stored at an offset from the argument pointer will be
258 needed by inner routines. */
259
260 rtx arg_pointer_save_area;
261
262 /* Offset to end of allocated area of stack frame.
263 If stack grows down, this is the address of the last stack slot allocated.
264 If stack grows up, this is the address for the next slot. */
265 int frame_offset;
266
267 /* List (chain of TREE_LISTs) of static chains for containing functions.
268 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
269 in an RTL_EXPR in the TREE_VALUE. */
270 static tree context_display;
271
272 /* List (chain of TREE_LISTs) of trampolines for nested functions.
273 The trampoline sets up the static chain and jumps to the function.
274 We supply the trampoline's address when the function's address is requested.
275
276 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
277 in an RTL_EXPR in the TREE_VALUE. */
278 static tree trampoline_list;
279
280 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
281 static rtx parm_birth_insn;
282
283 #if 0
284 /* Nonzero if a stack slot has been generated whose address is not
285 actually valid. It means that the generated rtl must all be scanned
286 to detect and correct the invalid addresses where they occur. */
287 static int invalid_stack_slot;
288 #endif
289
290 /* Last insn of those whose job was to put parms into their nominal homes. */
291 static rtx last_parm_insn;
292
293 /* 1 + last pseudo register number used for loading a copy
294 of a parameter of this function. */
295 static int max_parm_reg;
296
297 /* Vector indexed by REGNO, containing location on stack in which
298 to put the parm which is nominally in pseudo register REGNO,
299 if we discover that that parm must go in the stack. */
300 static rtx *parm_reg_stack_loc;
301
302 #if 0 /* Turned off because 0 seems to work just as well. */
303 /* Cleanup lists are required for binding levels regardless of whether
304 that binding level has cleanups or not. This node serves as the
305 cleanup list whenever an empty list is required. */
306 static tree empty_cleanup_list;
307 #endif
308
309 /* Nonzero once virtual register instantiation has been done.
310 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
311 static int virtuals_instantiated;
312
313 /* These variables hold pointers to functions to
314 save and restore machine-specific data,
315 in push_function_context and pop_function_context. */
316 void (*save_machine_status) PROTO((struct function *));
317 void (*restore_machine_status) PROTO((struct function *));
318
319 /* Nonzero if we need to distinguish between the return value of this function
320 and the return value of a function called by this function. This helps
321 integrate.c */
322
323 extern int rtx_equal_function_value_matters;
324 extern tree sequence_rtl_expr;
325 \f
326 /* In order to evaluate some expressions, such as function calls returning
327 structures in memory, we need to temporarily allocate stack locations.
328 We record each allocated temporary in the following structure.
329
330 Associated with each temporary slot is a nesting level. When we pop up
331 one level, all temporaries associated with the previous level are freed.
332 Normally, all temporaries are freed after the execution of the statement
333 in which they were created. However, if we are inside a ({...}) grouping,
334 the result may be in a temporary and hence must be preserved. If the
335 result could be in a temporary, we preserve it if we can determine which
336 one it is in. If we cannot determine which temporary may contain the
337 result, all temporaries are preserved. A temporary is preserved by
338 pretending it was allocated at the previous nesting level.
339
340 Automatic variables are also assigned temporary slots, at the nesting
341 level where they are defined. They are marked a "kept" so that
342 free_temp_slots will not free them. */
343
344 struct temp_slot
345 {
346 /* Points to next temporary slot. */
347 struct temp_slot *next;
348 /* The rtx to used to reference the slot. */
349 rtx slot;
350 /* The rtx used to represent the address if not the address of the
351 slot above. May be an EXPR_LIST if multiple addresses exist. */
352 rtx address;
353 /* The size, in units, of the slot. */
354 int size;
355 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
356 tree rtl_expr;
357 /* Non-zero if this temporary is currently in use. */
358 char in_use;
359 /* Non-zero if this temporary has its address taken. */
360 char addr_taken;
361 /* Nesting level at which this slot is being used. */
362 int level;
363 /* Non-zero if this should survive a call to free_temp_slots. */
364 int keep;
365 /* The offset of the slot from the frame_pointer, including extra space
366 for alignment. This info is for combine_temp_slots. */
367 int base_offset;
368 /* The size of the slot, including extra space for alignment. This
369 info is for combine_temp_slots. */
370 int full_size;
371 };
372
373 /* List of all temporaries allocated, both available and in use. */
374
375 struct temp_slot *temp_slots;
376
377 /* Current nesting level for temporaries. */
378
379 int temp_slot_level;
380 \f
381 /* The FUNCTION_DECL node for the current function. */
382 static tree this_function_decl;
383
384 /* Callinfo pointer for the current function. */
385 static rtx this_function_callinfo;
386
387 /* The label in the bytecode file of this function's actual bytecode.
388 Not an rtx. */
389 static char *this_function_bytecode;
390
391 /* The call description vector for the current function. */
392 static rtx this_function_calldesc;
393
394 /* Size of the local variables allocated for the current function. */
395 int local_vars_size;
396
397 /* Current depth of the bytecode evaluation stack. */
398 int stack_depth;
399
400 /* Maximum depth of the evaluation stack in this function. */
401 int max_stack_depth;
402
403 /* Current depth in statement expressions. */
404 static int stmt_expr_depth;
405
406 /* This structure is used to record MEMs or pseudos used to replace VAR, any
407 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
408 maintain this list in case two operands of an insn were required to match;
409 in that case we must ensure we use the same replacement. */
410
411 struct fixup_replacement
412 {
413 rtx old;
414 rtx new;
415 struct fixup_replacement *next;
416 };
417
418 /* Forward declarations. */
419
420 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
421 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
422 enum machine_mode, enum machine_mode,
423 int));
424 static void fixup_var_refs PROTO((rtx, enum machine_mode, int));
425 static struct fixup_replacement
426 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
427 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
428 rtx, int));
429 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
430 struct fixup_replacement **));
431 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
432 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
433 static rtx fixup_stack_1 PROTO((rtx, rtx));
434 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
435 static void instantiate_decls PROTO((tree, int));
436 static void instantiate_decls_1 PROTO((tree, int));
437 static void instantiate_decl PROTO((rtx, int, int));
438 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
439 static void delete_handlers PROTO((void));
440 static void pad_to_arg_alignment PROTO((struct args_size *, int));
441 static void pad_below PROTO((struct args_size *, enum machine_mode,
442 tree));
443 static tree round_down PROTO((tree, int));
444 static rtx round_trampoline_addr PROTO((rtx));
445 static tree blocks_nreverse PROTO((tree));
446 static int all_blocks PROTO((tree, tree *));
447 static int *record_insns PROTO((rtx));
448 static int contains PROTO((rtx, int *));
449 \f
450 /* Pointer to chain of `struct function' for containing functions. */
451 struct function *outer_function_chain;
452
453 /* Given a function decl for a containing function,
454 return the `struct function' for it. */
455
456 struct function *
457 find_function_data (decl)
458 tree decl;
459 {
460 struct function *p;
461 for (p = outer_function_chain; p; p = p->next)
462 if (p->decl == decl)
463 return p;
464 abort ();
465 }
466
467 /* Save the current context for compilation of a nested function.
468 This is called from language-specific code.
469 The caller is responsible for saving any language-specific status,
470 since this function knows only about language-independent variables. */
471
472 void
473 push_function_context_to (context)
474 tree context;
475 {
476 struct function *p = (struct function *) xmalloc (sizeof (struct function));
477
478 p->next = outer_function_chain;
479 outer_function_chain = p;
480
481 p->name = current_function_name;
482 p->decl = current_function_decl;
483 p->pops_args = current_function_pops_args;
484 p->returns_struct = current_function_returns_struct;
485 p->returns_pcc_struct = current_function_returns_pcc_struct;
486 p->returns_pointer = current_function_returns_pointer;
487 p->needs_context = current_function_needs_context;
488 p->calls_setjmp = current_function_calls_setjmp;
489 p->calls_longjmp = current_function_calls_longjmp;
490 p->calls_alloca = current_function_calls_alloca;
491 p->has_nonlocal_label = current_function_has_nonlocal_label;
492 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
493 p->contains_functions = current_function_contains_functions;
494 p->args_size = current_function_args_size;
495 p->pretend_args_size = current_function_pretend_args_size;
496 p->arg_offset_rtx = current_function_arg_offset_rtx;
497 p->varargs = current_function_varargs;
498 p->stdarg = current_function_stdarg;
499 p->uses_const_pool = current_function_uses_const_pool;
500 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
501 p->internal_arg_pointer = current_function_internal_arg_pointer;
502 p->max_parm_reg = max_parm_reg;
503 p->parm_reg_stack_loc = parm_reg_stack_loc;
504 p->outgoing_args_size = current_function_outgoing_args_size;
505 p->return_rtx = current_function_return_rtx;
506 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
507 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
508 p->nonlocal_labels = nonlocal_labels;
509 p->cleanup_label = cleanup_label;
510 p->return_label = return_label;
511 p->save_expr_regs = save_expr_regs;
512 p->stack_slot_list = stack_slot_list;
513 p->parm_birth_insn = parm_birth_insn;
514 p->frame_offset = frame_offset;
515 p->tail_recursion_label = tail_recursion_label;
516 p->tail_recursion_reentry = tail_recursion_reentry;
517 p->arg_pointer_save_area = arg_pointer_save_area;
518 p->rtl_expr_chain = rtl_expr_chain;
519 p->last_parm_insn = last_parm_insn;
520 p->context_display = context_display;
521 p->trampoline_list = trampoline_list;
522 p->function_call_count = function_call_count;
523 p->temp_slots = temp_slots;
524 p->temp_slot_level = temp_slot_level;
525 p->fixup_var_refs_queue = 0;
526 p->epilogue_delay_list = current_function_epilogue_delay_list;
527
528 save_tree_status (p, context);
529 save_storage_status (p);
530 save_emit_status (p);
531 init_emit ();
532 save_expr_status (p);
533 save_stmt_status (p);
534 save_varasm_status (p);
535
536 if (save_machine_status)
537 (*save_machine_status) (p);
538 }
539
540 void
541 push_function_context ()
542 {
543 push_function_context_to (current_function_decl);
544 }
545
546 /* Restore the last saved context, at the end of a nested function.
547 This function is called from language-specific code. */
548
549 void
550 pop_function_context_from (context)
551 tree context;
552 {
553 struct function *p = outer_function_chain;
554
555 outer_function_chain = p->next;
556
557 current_function_contains_functions
558 = p->contains_functions || p->inline_obstacks
559 || context == current_function_decl;
560 current_function_name = p->name;
561 current_function_decl = p->decl;
562 current_function_pops_args = p->pops_args;
563 current_function_returns_struct = p->returns_struct;
564 current_function_returns_pcc_struct = p->returns_pcc_struct;
565 current_function_returns_pointer = p->returns_pointer;
566 current_function_needs_context = p->needs_context;
567 current_function_calls_setjmp = p->calls_setjmp;
568 current_function_calls_longjmp = p->calls_longjmp;
569 current_function_calls_alloca = p->calls_alloca;
570 current_function_has_nonlocal_label = p->has_nonlocal_label;
571 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
572 current_function_args_size = p->args_size;
573 current_function_pretend_args_size = p->pretend_args_size;
574 current_function_arg_offset_rtx = p->arg_offset_rtx;
575 current_function_varargs = p->varargs;
576 current_function_stdarg = p->stdarg;
577 current_function_uses_const_pool = p->uses_const_pool;
578 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
579 current_function_internal_arg_pointer = p->internal_arg_pointer;
580 max_parm_reg = p->max_parm_reg;
581 parm_reg_stack_loc = p->parm_reg_stack_loc;
582 current_function_outgoing_args_size = p->outgoing_args_size;
583 current_function_return_rtx = p->return_rtx;
584 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
585 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
586 nonlocal_labels = p->nonlocal_labels;
587 cleanup_label = p->cleanup_label;
588 return_label = p->return_label;
589 save_expr_regs = p->save_expr_regs;
590 stack_slot_list = p->stack_slot_list;
591 parm_birth_insn = p->parm_birth_insn;
592 frame_offset = p->frame_offset;
593 tail_recursion_label = p->tail_recursion_label;
594 tail_recursion_reentry = p->tail_recursion_reentry;
595 arg_pointer_save_area = p->arg_pointer_save_area;
596 rtl_expr_chain = p->rtl_expr_chain;
597 last_parm_insn = p->last_parm_insn;
598 context_display = p->context_display;
599 trampoline_list = p->trampoline_list;
600 function_call_count = p->function_call_count;
601 temp_slots = p->temp_slots;
602 temp_slot_level = p->temp_slot_level;
603 current_function_epilogue_delay_list = p->epilogue_delay_list;
604 reg_renumber = 0;
605
606 restore_tree_status (p);
607 restore_storage_status (p);
608 restore_expr_status (p);
609 restore_emit_status (p);
610 restore_stmt_status (p);
611 restore_varasm_status (p);
612
613 if (restore_machine_status)
614 (*restore_machine_status) (p);
615
616 /* Finish doing put_var_into_stack for any of our variables
617 which became addressable during the nested function. */
618 {
619 struct var_refs_queue *queue = p->fixup_var_refs_queue;
620 for (; queue; queue = queue->next)
621 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
622 }
623
624 free (p);
625
626 /* Reset variables that have known state during rtx generation. */
627 rtx_equal_function_value_matters = 1;
628 virtuals_instantiated = 0;
629 }
630
631 void pop_function_context ()
632 {
633 pop_function_context_from (current_function_decl);
634 }
635 \f
636 /* Allocate fixed slots in the stack frame of the current function. */
637
638 /* Return size needed for stack frame based on slots so far allocated.
639 This size counts from zero. It is not rounded to STACK_BOUNDARY;
640 the caller may have to do that. */
641
642 int
643 get_frame_size ()
644 {
645 #ifdef FRAME_GROWS_DOWNWARD
646 return -frame_offset;
647 #else
648 return frame_offset;
649 #endif
650 }
651
652 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
653 with machine mode MODE.
654
655 ALIGN controls the amount of alignment for the address of the slot:
656 0 means according to MODE,
657 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
658 positive specifies alignment boundary in bits.
659
660 We do not round to stack_boundary here. */
661
662 rtx
663 assign_stack_local (mode, size, align)
664 enum machine_mode mode;
665 int size;
666 int align;
667 {
668 register rtx x, addr;
669 int bigend_correction = 0;
670 int alignment;
671
672 if (align == 0)
673 {
674 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
675 if (mode == BLKmode)
676 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
677 }
678 else if (align == -1)
679 {
680 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
681 size = CEIL_ROUND (size, alignment);
682 }
683 else
684 alignment = align / BITS_PER_UNIT;
685
686 /* Round frame offset to that alignment.
687 We must be careful here, since FRAME_OFFSET might be negative and
688 division with a negative dividend isn't as well defined as we might
689 like. So we instead assume that ALIGNMENT is a power of two and
690 use logical operations which are unambiguous. */
691 #ifdef FRAME_GROWS_DOWNWARD
692 frame_offset = FLOOR_ROUND (frame_offset, alignment);
693 #else
694 frame_offset = CEIL_ROUND (frame_offset, alignment);
695 #endif
696
697 /* On a big-endian machine, if we are allocating more space than we will use,
698 use the least significant bytes of those that are allocated. */
699 if (BYTES_BIG_ENDIAN && mode != BLKmode)
700 bigend_correction = size - GET_MODE_SIZE (mode);
701
702 #ifdef FRAME_GROWS_DOWNWARD
703 frame_offset -= size;
704 #endif
705
706 /* If we have already instantiated virtual registers, return the actual
707 address relative to the frame pointer. */
708 if (virtuals_instantiated)
709 addr = plus_constant (frame_pointer_rtx,
710 (frame_offset + bigend_correction
711 + STARTING_FRAME_OFFSET));
712 else
713 addr = plus_constant (virtual_stack_vars_rtx,
714 frame_offset + bigend_correction);
715
716 #ifndef FRAME_GROWS_DOWNWARD
717 frame_offset += size;
718 #endif
719
720 x = gen_rtx (MEM, mode, addr);
721
722 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list);
723
724 return x;
725 }
726
727 /* Assign a stack slot in a containing function.
728 First three arguments are same as in preceding function.
729 The last argument specifies the function to allocate in. */
730
731 rtx
732 assign_outer_stack_local (mode, size, align, function)
733 enum machine_mode mode;
734 int size;
735 int align;
736 struct function *function;
737 {
738 register rtx x, addr;
739 int bigend_correction = 0;
740 int alignment;
741
742 /* Allocate in the memory associated with the function in whose frame
743 we are assigning. */
744 push_obstacks (function->function_obstack,
745 function->function_maybepermanent_obstack);
746
747 if (align == 0)
748 {
749 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
750 if (mode == BLKmode)
751 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
752 }
753 else if (align == -1)
754 {
755 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
756 size = CEIL_ROUND (size, alignment);
757 }
758 else
759 alignment = align / BITS_PER_UNIT;
760
761 /* Round frame offset to that alignment. */
762 #ifdef FRAME_GROWS_DOWNWARD
763 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
764 #else
765 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
766 #endif
767
768 /* On a big-endian machine, if we are allocating more space than we will use,
769 use the least significant bytes of those that are allocated. */
770 if (BYTES_BIG_ENDIAN && mode != BLKmode)
771 bigend_correction = size - GET_MODE_SIZE (mode);
772
773 #ifdef FRAME_GROWS_DOWNWARD
774 function->frame_offset -= size;
775 #endif
776 addr = plus_constant (virtual_stack_vars_rtx,
777 function->frame_offset + bigend_correction);
778 #ifndef FRAME_GROWS_DOWNWARD
779 function->frame_offset += size;
780 #endif
781
782 x = gen_rtx (MEM, mode, addr);
783
784 function->stack_slot_list
785 = gen_rtx (EXPR_LIST, VOIDmode, x, function->stack_slot_list);
786
787 pop_obstacks ();
788
789 return x;
790 }
791 \f
792 /* Allocate a temporary stack slot and record it for possible later
793 reuse.
794
795 MODE is the machine mode to be given to the returned rtx.
796
797 SIZE is the size in units of the space required. We do no rounding here
798 since assign_stack_local will do any required rounding.
799
800 KEEP is 1 if this slot is to be retained after a call to
801 free_temp_slots. Automatic variables for a block are allocated
802 with this flag. KEEP is 2, if we allocate a longer term temporary,
803 whose lifetime is controlled by CLEANUP_POINT_EXPRs. */
804
805 rtx
806 assign_stack_temp (mode, size, keep)
807 enum machine_mode mode;
808 int size;
809 int keep;
810 {
811 struct temp_slot *p, *best_p = 0;
812
813 /* If SIZE is -1 it means that somebody tried to allocate a temporary
814 of a variable size. */
815 if (size == -1)
816 abort ();
817
818 /* First try to find an available, already-allocated temporary that is the
819 exact size we require. */
820 for (p = temp_slots; p; p = p->next)
821 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
822 break;
823
824 /* If we didn't find, one, try one that is larger than what we want. We
825 find the smallest such. */
826 if (p == 0)
827 for (p = temp_slots; p; p = p->next)
828 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
829 && (best_p == 0 || best_p->size > p->size))
830 best_p = p;
831
832 /* Make our best, if any, the one to use. */
833 if (best_p)
834 {
835 /* If there are enough aligned bytes left over, make them into a new
836 temp_slot so that the extra bytes don't get wasted. Do this only
837 for BLKmode slots, so that we can be sure of the alignment. */
838 if (GET_MODE (best_p->slot) == BLKmode)
839 {
840 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
841 int rounded_size = CEIL_ROUND (size, alignment);
842
843 if (best_p->size - rounded_size >= alignment)
844 {
845 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
846 p->in_use = p->addr_taken = 0;
847 p->size = best_p->size - rounded_size;
848 p->base_offset = best_p->base_offset + rounded_size;
849 p->full_size = best_p->full_size - rounded_size;
850 p->slot = gen_rtx (MEM, BLKmode,
851 plus_constant (XEXP (best_p->slot, 0),
852 rounded_size));
853 p->address = 0;
854 p->rtl_expr = 0;
855 p->next = temp_slots;
856 temp_slots = p;
857
858 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, p->slot,
859 stack_slot_list);
860
861 best_p->size = rounded_size;
862 best_p->full_size = rounded_size;
863 }
864 }
865
866 p = best_p;
867 }
868
869 /* If we still didn't find one, make a new temporary. */
870 if (p == 0)
871 {
872 int frame_offset_old = frame_offset;
873 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
874 /* If the temp slot mode doesn't indicate the alignment,
875 use the largest possible, so no one will be disappointed. */
876 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
877 /* The following slot size computation is necessary because we don't
878 know the actual size of the temporary slot until assign_stack_local
879 has performed all the frame alignment and size rounding for the
880 requested temporary. Note that extra space added for alignment
881 can be either above or below this stack slot depending on which
882 way the frame grows. We include the extra space if and only if it
883 is above this slot. */
884 #ifdef FRAME_GROWS_DOWNWARD
885 p->size = frame_offset_old - frame_offset;
886 #else
887 p->size = size;
888 #endif
889 /* Now define the fields used by combine_temp_slots. */
890 #ifdef FRAME_GROWS_DOWNWARD
891 p->base_offset = frame_offset;
892 p->full_size = frame_offset_old - frame_offset;
893 #else
894 p->base_offset = frame_offset_old;
895 p->full_size = frame_offset - frame_offset_old;
896 #endif
897 p->address = 0;
898 p->next = temp_slots;
899 temp_slots = p;
900 }
901
902 p->in_use = 1;
903 p->addr_taken = 0;
904 p->rtl_expr = sequence_rtl_expr;
905
906 if (keep == 2)
907 {
908 p->level = target_temp_slot_level;
909 p->keep = 0;
910 }
911 else
912 {
913 p->level = temp_slot_level;
914 p->keep = keep;
915 }
916 return p->slot;
917 }
918 \f
919 /* Assign a temporary of given TYPE.
920 KEEP is as for assign_stack_temp.
921 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
922 it is 0 if a register is OK.
923 DONT_PROMOTE is 1 if we should not promote values in register
924 to wider modes. */
925
926 rtx
927 assign_temp (type, keep, memory_required, dont_promote)
928 tree type;
929 int keep;
930 int memory_required;
931 int dont_promote;
932 {
933 enum machine_mode mode = TYPE_MODE (type);
934 int unsignedp = TREE_UNSIGNED (type);
935
936 if (mode == BLKmode || memory_required)
937 {
938 int size = int_size_in_bytes (type);
939 rtx tmp;
940
941 /* Unfortunately, we don't yet know how to allocate variable-sized
942 temporaries. However, sometimes we have a fixed upper limit on
943 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
944 instead. This is the case for Chill variable-sized strings. */
945 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
946 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
947 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
948 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
949
950 tmp = assign_stack_temp (mode, size, keep);
951 MEM_IN_STRUCT_P (tmp) = AGGREGATE_TYPE_P (type);
952 return tmp;
953 }
954
955 #ifndef PROMOTE_FOR_CALL_ONLY
956 if (! dont_promote)
957 mode = promote_mode (type, mode, &unsignedp, 0);
958 #endif
959
960 return gen_reg_rtx (mode);
961 }
962 \f
963 /* Combine temporary stack slots which are adjacent on the stack.
964
965 This allows for better use of already allocated stack space. This is only
966 done for BLKmode slots because we can be sure that we won't have alignment
967 problems in this case. */
968
969 void
970 combine_temp_slots ()
971 {
972 struct temp_slot *p, *q;
973 struct temp_slot *prev_p, *prev_q;
974 /* Determine where to free back to after this function. */
975 rtx free_pointer = rtx_alloc (CONST_INT);
976
977 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
978 {
979 int delete_p = 0;
980 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
981 for (q = p->next, prev_q = p; q; q = prev_q->next)
982 {
983 int delete_q = 0;
984 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
985 {
986 if (p->base_offset + p->full_size == q->base_offset)
987 {
988 /* Q comes after P; combine Q into P. */
989 p->size += q->size;
990 p->full_size += q->full_size;
991 delete_q = 1;
992 }
993 else if (q->base_offset + q->full_size == p->base_offset)
994 {
995 /* P comes after Q; combine P into Q. */
996 q->size += p->size;
997 q->full_size += p->full_size;
998 delete_p = 1;
999 break;
1000 }
1001 }
1002 /* Either delete Q or advance past it. */
1003 if (delete_q)
1004 prev_q->next = q->next;
1005 else
1006 prev_q = q;
1007 }
1008 /* Either delete P or advance past it. */
1009 if (delete_p)
1010 {
1011 if (prev_p)
1012 prev_p->next = p->next;
1013 else
1014 temp_slots = p->next;
1015 }
1016 else
1017 prev_p = p;
1018 }
1019
1020 /* Free all the RTL made by plus_constant. */
1021 rtx_free (free_pointer);
1022 }
1023 \f
1024 /* Find the temp slot corresponding to the object at address X. */
1025
1026 static struct temp_slot *
1027 find_temp_slot_from_address (x)
1028 rtx x;
1029 {
1030 struct temp_slot *p;
1031 rtx next;
1032
1033 for (p = temp_slots; p; p = p->next)
1034 {
1035 if (! p->in_use)
1036 continue;
1037 else if (XEXP (p->slot, 0) == x
1038 || p->address == x)
1039 return p;
1040
1041 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1042 for (next = p->address; next; next = XEXP (next, 1))
1043 if (XEXP (next, 0) == x)
1044 return p;
1045 }
1046
1047 return 0;
1048 }
1049
1050 /* Indicate that NEW is an alternate way of referring to the temp slot
1051 that previous was known by OLD. */
1052
1053 void
1054 update_temp_slot_address (old, new)
1055 rtx old, new;
1056 {
1057 struct temp_slot *p = find_temp_slot_from_address (old);
1058
1059 /* If none, return. Else add NEW as an alias. */
1060 if (p == 0)
1061 return;
1062 else if (p->address == 0)
1063 p->address = new;
1064 else
1065 {
1066 if (GET_CODE (p->address) != EXPR_LIST)
1067 p->address = gen_rtx (EXPR_LIST, VOIDmode, p->address, NULL_RTX);
1068
1069 p->address = gen_rtx (EXPR_LIST, VOIDmode, new, p->address);
1070 }
1071 }
1072
1073 /* If X could be a reference to a temporary slot, mark the fact that its
1074 address was taken. */
1075
1076 void
1077 mark_temp_addr_taken (x)
1078 rtx x;
1079 {
1080 struct temp_slot *p;
1081
1082 if (x == 0)
1083 return;
1084
1085 /* If X is not in memory or is at a constant address, it cannot be in
1086 a temporary slot. */
1087 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1088 return;
1089
1090 p = find_temp_slot_from_address (XEXP (x, 0));
1091 if (p != 0)
1092 p->addr_taken = 1;
1093 }
1094
1095 /* If X could be a reference to a temporary slot, mark that slot as
1096 belonging to the to one level higher than the current level. If X
1097 matched one of our slots, just mark that one. Otherwise, we can't
1098 easily predict which it is, so upgrade all of them. Kept slots
1099 need not be touched.
1100
1101 This is called when an ({...}) construct occurs and a statement
1102 returns a value in memory. */
1103
1104 void
1105 preserve_temp_slots (x)
1106 rtx x;
1107 {
1108 struct temp_slot *p = 0;
1109
1110 /* If there is no result, we still might have some objects whose address
1111 were taken, so we need to make sure they stay around. */
1112 if (x == 0)
1113 {
1114 for (p = temp_slots; p; p = p->next)
1115 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1116 p->level--;
1117
1118 return;
1119 }
1120
1121 /* If X is a register that is being used as a pointer, see if we have
1122 a temporary slot we know it points to. To be consistent with
1123 the code below, we really should preserve all non-kept slots
1124 if we can't find a match, but that seems to be much too costly. */
1125 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1126 p = find_temp_slot_from_address (x);
1127
1128 /* If X is not in memory or is at a constant address, it cannot be in
1129 a temporary slot, but it can contain something whose address was
1130 taken. */
1131 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1132 {
1133 for (p = temp_slots; p; p = p->next)
1134 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1135 p->level--;
1136
1137 return;
1138 }
1139
1140 /* First see if we can find a match. */
1141 if (p == 0)
1142 p = find_temp_slot_from_address (XEXP (x, 0));
1143
1144 if (p != 0)
1145 {
1146 /* Move everything at our level whose address was taken to our new
1147 level in case we used its address. */
1148 struct temp_slot *q;
1149
1150 if (p->level == temp_slot_level)
1151 {
1152 for (q = temp_slots; q; q = q->next)
1153 if (q != p && q->addr_taken && q->level == p->level)
1154 q->level--;
1155
1156 p->level--;
1157 p->addr_taken = 0;
1158 }
1159 return;
1160 }
1161
1162 /* Otherwise, preserve all non-kept slots at this level. */
1163 for (p = temp_slots; p; p = p->next)
1164 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1165 p->level--;
1166 }
1167
1168 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1169 with that RTL_EXPR, promote it into a temporary slot at the present
1170 level so it will not be freed when we free slots made in the
1171 RTL_EXPR. */
1172
1173 void
1174 preserve_rtl_expr_result (x)
1175 rtx x;
1176 {
1177 struct temp_slot *p;
1178
1179 /* If X is not in memory or is at a constant address, it cannot be in
1180 a temporary slot. */
1181 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1182 return;
1183
1184 /* If we can find a match, move it to our level unless it is already at
1185 an upper level. */
1186 p = find_temp_slot_from_address (XEXP (x, 0));
1187 if (p != 0)
1188 {
1189 p->level = MIN (p->level, temp_slot_level);
1190 p->rtl_expr = 0;
1191 }
1192
1193 return;
1194 }
1195
1196 /* Free all temporaries used so far. This is normally called at the end
1197 of generating code for a statement. Don't free any temporaries
1198 currently in use for an RTL_EXPR that hasn't yet been emitted.
1199 We could eventually do better than this since it can be reused while
1200 generating the same RTL_EXPR, but this is complex and probably not
1201 worthwhile. */
1202
1203 void
1204 free_temp_slots ()
1205 {
1206 struct temp_slot *p;
1207
1208 for (p = temp_slots; p; p = p->next)
1209 if (p->in_use && p->level == temp_slot_level && ! p->keep
1210 && p->rtl_expr == 0)
1211 p->in_use = 0;
1212
1213 combine_temp_slots ();
1214 }
1215
1216 /* Free all temporary slots used in T, an RTL_EXPR node. */
1217
1218 void
1219 free_temps_for_rtl_expr (t)
1220 tree t;
1221 {
1222 struct temp_slot *p;
1223
1224 for (p = temp_slots; p; p = p->next)
1225 if (p->rtl_expr == t)
1226 p->in_use = 0;
1227
1228 combine_temp_slots ();
1229 }
1230
1231 /* Push deeper into the nesting level for stack temporaries. */
1232
1233 void
1234 push_temp_slots ()
1235 {
1236 temp_slot_level++;
1237 }
1238
1239 /* Pop a temporary nesting level. All slots in use in the current level
1240 are freed. */
1241
1242 void
1243 pop_temp_slots ()
1244 {
1245 struct temp_slot *p;
1246
1247 for (p = temp_slots; p; p = p->next)
1248 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1249 p->in_use = 0;
1250
1251 combine_temp_slots ();
1252
1253 temp_slot_level--;
1254 }
1255
1256 /* Initialize temporary slots. */
1257
1258 void
1259 init_temp_slots ()
1260 {
1261 /* We have not allocated any temporaries yet. */
1262 temp_slots = 0;
1263 temp_slot_level = 0;
1264 target_temp_slot_level = 0;
1265 }
1266 \f
1267 /* Retroactively move an auto variable from a register to a stack slot.
1268 This is done when an address-reference to the variable is seen. */
1269
1270 void
1271 put_var_into_stack (decl)
1272 tree decl;
1273 {
1274 register rtx reg;
1275 enum machine_mode promoted_mode, decl_mode;
1276 struct function *function = 0;
1277 tree context;
1278
1279 if (output_bytecode)
1280 return;
1281
1282 context = decl_function_context (decl);
1283
1284 /* Get the current rtl used for this object and it's original mode. */
1285 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1286
1287 /* No need to do anything if decl has no rtx yet
1288 since in that case caller is setting TREE_ADDRESSABLE
1289 and a stack slot will be assigned when the rtl is made. */
1290 if (reg == 0)
1291 return;
1292
1293 /* Get the declared mode for this object. */
1294 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1295 : DECL_MODE (decl));
1296 /* Get the mode it's actually stored in. */
1297 promoted_mode = GET_MODE (reg);
1298
1299 /* If this variable comes from an outer function,
1300 find that function's saved context. */
1301 if (context != current_function_decl)
1302 for (function = outer_function_chain; function; function = function->next)
1303 if (function->decl == context)
1304 break;
1305
1306 /* If this is a variable-size object with a pseudo to address it,
1307 put that pseudo into the stack, if the var is nonlocal. */
1308 if (DECL_NONLOCAL (decl)
1309 && GET_CODE (reg) == MEM
1310 && GET_CODE (XEXP (reg, 0)) == REG
1311 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1312 {
1313 reg = XEXP (reg, 0);
1314 decl_mode = promoted_mode = GET_MODE (reg);
1315 }
1316
1317 /* Now we should have a value that resides in one or more pseudo regs. */
1318
1319 if (GET_CODE (reg) == REG)
1320 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1321 promoted_mode, decl_mode, TREE_SIDE_EFFECTS (decl));
1322 else if (GET_CODE (reg) == CONCAT)
1323 {
1324 /* A CONCAT contains two pseudos; put them both in the stack.
1325 We do it so they end up consecutive. */
1326 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1327 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1328 #ifdef FRAME_GROWS_DOWNWARD
1329 /* Since part 0 should have a lower address, do it second. */
1330 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1331 part_mode, TREE_SIDE_EFFECTS (decl));
1332 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1333 part_mode, TREE_SIDE_EFFECTS (decl));
1334 #else
1335 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1336 part_mode, TREE_SIDE_EFFECTS (decl));
1337 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1338 part_mode, TREE_SIDE_EFFECTS (decl));
1339 #endif
1340
1341 /* Change the CONCAT into a combined MEM for both parts. */
1342 PUT_CODE (reg, MEM);
1343 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1344
1345 /* The two parts are in memory order already.
1346 Use the lower parts address as ours. */
1347 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1348 /* Prevent sharing of rtl that might lose. */
1349 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1350 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1351 }
1352 }
1353
1354 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1355 into the stack frame of FUNCTION (0 means the current function).
1356 DECL_MODE is the machine mode of the user-level data type.
1357 PROMOTED_MODE is the machine mode of the register.
1358 VOLATILE_P is nonzero if this is for a "volatile" decl. */
1359
1360 static void
1361 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p)
1362 struct function *function;
1363 rtx reg;
1364 tree type;
1365 enum machine_mode promoted_mode, decl_mode;
1366 int volatile_p;
1367 {
1368 rtx new = 0;
1369
1370 if (function)
1371 {
1372 if (REGNO (reg) < function->max_parm_reg)
1373 new = function->parm_reg_stack_loc[REGNO (reg)];
1374 if (new == 0)
1375 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1376 0, function);
1377 }
1378 else
1379 {
1380 if (REGNO (reg) < max_parm_reg)
1381 new = parm_reg_stack_loc[REGNO (reg)];
1382 if (new == 0)
1383 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1384 }
1385
1386 PUT_MODE (reg, decl_mode);
1387 XEXP (reg, 0) = XEXP (new, 0);
1388 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1389 MEM_VOLATILE_P (reg) = volatile_p;
1390 PUT_CODE (reg, MEM);
1391
1392 /* If this is a memory ref that contains aggregate components,
1393 mark it as such for cse and loop optimize. */
1394 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type);
1395
1396 /* Now make sure that all refs to the variable, previously made
1397 when it was a register, are fixed up to be valid again. */
1398 if (function)
1399 {
1400 struct var_refs_queue *temp;
1401
1402 /* Variable is inherited; fix it up when we get back to its function. */
1403 push_obstacks (function->function_obstack,
1404 function->function_maybepermanent_obstack);
1405
1406 /* See comment in restore_tree_status in tree.c for why this needs to be
1407 on saveable obstack. */
1408 temp
1409 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1410 temp->modified = reg;
1411 temp->promoted_mode = promoted_mode;
1412 temp->unsignedp = TREE_UNSIGNED (type);
1413 temp->next = function->fixup_var_refs_queue;
1414 function->fixup_var_refs_queue = temp;
1415 pop_obstacks ();
1416 }
1417 else
1418 /* Variable is local; fix it up now. */
1419 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
1420 }
1421 \f
1422 static void
1423 fixup_var_refs (var, promoted_mode, unsignedp)
1424 rtx var;
1425 enum machine_mode promoted_mode;
1426 int unsignedp;
1427 {
1428 tree pending;
1429 rtx first_insn = get_insns ();
1430 struct sequence_stack *stack = sequence_stack;
1431 tree rtl_exps = rtl_expr_chain;
1432
1433 /* Must scan all insns for stack-refs that exceed the limit. */
1434 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
1435
1436 /* Scan all pending sequences too. */
1437 for (; stack; stack = stack->next)
1438 {
1439 push_to_sequence (stack->first);
1440 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1441 stack->first, stack->next != 0);
1442 /* Update remembered end of sequence
1443 in case we added an insn at the end. */
1444 stack->last = get_last_insn ();
1445 end_sequence ();
1446 }
1447
1448 /* Scan all waiting RTL_EXPRs too. */
1449 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1450 {
1451 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1452 if (seq != const0_rtx && seq != 0)
1453 {
1454 push_to_sequence (seq);
1455 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
1456 end_sequence ();
1457 }
1458 }
1459 }
1460 \f
1461 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1462 some part of an insn. Return a struct fixup_replacement whose OLD
1463 value is equal to X. Allocate a new structure if no such entry exists. */
1464
1465 static struct fixup_replacement *
1466 find_fixup_replacement (replacements, x)
1467 struct fixup_replacement **replacements;
1468 rtx x;
1469 {
1470 struct fixup_replacement *p;
1471
1472 /* See if we have already replaced this. */
1473 for (p = *replacements; p && p->old != x; p = p->next)
1474 ;
1475
1476 if (p == 0)
1477 {
1478 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1479 p->old = x;
1480 p->new = 0;
1481 p->next = *replacements;
1482 *replacements = p;
1483 }
1484
1485 return p;
1486 }
1487
1488 /* Scan the insn-chain starting with INSN for refs to VAR
1489 and fix them up. TOPLEVEL is nonzero if this chain is the
1490 main chain of insns for the current function. */
1491
1492 static void
1493 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
1494 rtx var;
1495 enum machine_mode promoted_mode;
1496 int unsignedp;
1497 rtx insn;
1498 int toplevel;
1499 {
1500 rtx call_dest = 0;
1501
1502 while (insn)
1503 {
1504 rtx next = NEXT_INSN (insn);
1505 rtx note;
1506 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1507 {
1508 /* If this is a CLOBBER of VAR, delete it.
1509
1510 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1511 and REG_RETVAL notes too. */
1512 if (GET_CODE (PATTERN (insn)) == CLOBBER
1513 && XEXP (PATTERN (insn), 0) == var)
1514 {
1515 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1516 /* The REG_LIBCALL note will go away since we are going to
1517 turn INSN into a NOTE, so just delete the
1518 corresponding REG_RETVAL note. */
1519 remove_note (XEXP (note, 0),
1520 find_reg_note (XEXP (note, 0), REG_RETVAL,
1521 NULL_RTX));
1522
1523 /* In unoptimized compilation, we shouldn't call delete_insn
1524 except in jump.c doing warnings. */
1525 PUT_CODE (insn, NOTE);
1526 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1527 NOTE_SOURCE_FILE (insn) = 0;
1528 }
1529
1530 /* The insn to load VAR from a home in the arglist
1531 is now a no-op. When we see it, just delete it. */
1532 else if (toplevel
1533 && GET_CODE (PATTERN (insn)) == SET
1534 && SET_DEST (PATTERN (insn)) == var
1535 /* If this represents the result of an insn group,
1536 don't delete the insn. */
1537 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1538 && rtx_equal_p (SET_SRC (PATTERN (insn)), var))
1539 {
1540 /* In unoptimized compilation, we shouldn't call delete_insn
1541 except in jump.c doing warnings. */
1542 PUT_CODE (insn, NOTE);
1543 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1544 NOTE_SOURCE_FILE (insn) = 0;
1545 if (insn == last_parm_insn)
1546 last_parm_insn = PREV_INSN (next);
1547 }
1548 else
1549 {
1550 struct fixup_replacement *replacements = 0;
1551 rtx next_insn = NEXT_INSN (insn);
1552
1553 #ifdef SMALL_REGISTER_CLASSES
1554 /* If the insn that copies the results of a CALL_INSN
1555 into a pseudo now references VAR, we have to use an
1556 intermediate pseudo since we want the life of the
1557 return value register to be only a single insn.
1558
1559 If we don't use an intermediate pseudo, such things as
1560 address computations to make the address of VAR valid
1561 if it is not can be placed between the CALL_INSN and INSN.
1562
1563 To make sure this doesn't happen, we record the destination
1564 of the CALL_INSN and see if the next insn uses both that
1565 and VAR. */
1566
1567 if (call_dest != 0 && GET_CODE (insn) == INSN
1568 && reg_mentioned_p (var, PATTERN (insn))
1569 && reg_mentioned_p (call_dest, PATTERN (insn)))
1570 {
1571 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1572
1573 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1574
1575 PATTERN (insn) = replace_rtx (PATTERN (insn),
1576 call_dest, temp);
1577 }
1578
1579 if (GET_CODE (insn) == CALL_INSN
1580 && GET_CODE (PATTERN (insn)) == SET)
1581 call_dest = SET_DEST (PATTERN (insn));
1582 else if (GET_CODE (insn) == CALL_INSN
1583 && GET_CODE (PATTERN (insn)) == PARALLEL
1584 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1585 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1586 else
1587 call_dest = 0;
1588 #endif
1589
1590 /* See if we have to do anything to INSN now that VAR is in
1591 memory. If it needs to be loaded into a pseudo, use a single
1592 pseudo for the entire insn in case there is a MATCH_DUP
1593 between two operands. We pass a pointer to the head of
1594 a list of struct fixup_replacements. If fixup_var_refs_1
1595 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1596 it will record them in this list.
1597
1598 If it allocated a pseudo for any replacement, we copy into
1599 it here. */
1600
1601 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1602 &replacements);
1603
1604 /* If this is last_parm_insn, and any instructions were output
1605 after it to fix it up, then we must set last_parm_insn to
1606 the last such instruction emitted. */
1607 if (insn == last_parm_insn)
1608 last_parm_insn = PREV_INSN (next_insn);
1609
1610 while (replacements)
1611 {
1612 if (GET_CODE (replacements->new) == REG)
1613 {
1614 rtx insert_before;
1615 rtx seq;
1616
1617 /* OLD might be a (subreg (mem)). */
1618 if (GET_CODE (replacements->old) == SUBREG)
1619 replacements->old
1620 = fixup_memory_subreg (replacements->old, insn, 0);
1621 else
1622 replacements->old
1623 = fixup_stack_1 (replacements->old, insn);
1624
1625 insert_before = insn;
1626
1627 /* If we are changing the mode, do a conversion.
1628 This might be wasteful, but combine.c will
1629 eliminate much of the waste. */
1630
1631 if (GET_MODE (replacements->new)
1632 != GET_MODE (replacements->old))
1633 {
1634 start_sequence ();
1635 convert_move (replacements->new,
1636 replacements->old, unsignedp);
1637 seq = gen_sequence ();
1638 end_sequence ();
1639 }
1640 else
1641 seq = gen_move_insn (replacements->new,
1642 replacements->old);
1643
1644 emit_insn_before (seq, insert_before);
1645 }
1646
1647 replacements = replacements->next;
1648 }
1649 }
1650
1651 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1652 But don't touch other insns referred to by reg-notes;
1653 we will get them elsewhere. */
1654 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1655 if (GET_CODE (note) != INSN_LIST)
1656 XEXP (note, 0)
1657 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1658 }
1659 insn = next;
1660 }
1661 }
1662 \f
1663 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1664 See if the rtx expression at *LOC in INSN needs to be changed.
1665
1666 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1667 contain a list of original rtx's and replacements. If we find that we need
1668 to modify this insn by replacing a memory reference with a pseudo or by
1669 making a new MEM to implement a SUBREG, we consult that list to see if
1670 we have already chosen a replacement. If none has already been allocated,
1671 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1672 or the SUBREG, as appropriate, to the pseudo. */
1673
1674 static void
1675 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1676 register rtx var;
1677 enum machine_mode promoted_mode;
1678 register rtx *loc;
1679 rtx insn;
1680 struct fixup_replacement **replacements;
1681 {
1682 register int i;
1683 register rtx x = *loc;
1684 RTX_CODE code = GET_CODE (x);
1685 register char *fmt;
1686 register rtx tem, tem1;
1687 struct fixup_replacement *replacement;
1688
1689 switch (code)
1690 {
1691 case MEM:
1692 if (var == x)
1693 {
1694 /* If we already have a replacement, use it. Otherwise,
1695 try to fix up this address in case it is invalid. */
1696
1697 replacement = find_fixup_replacement (replacements, var);
1698 if (replacement->new)
1699 {
1700 *loc = replacement->new;
1701 return;
1702 }
1703
1704 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1705
1706 /* Unless we are forcing memory to register or we changed the mode,
1707 we can leave things the way they are if the insn is valid. */
1708
1709 INSN_CODE (insn) = -1;
1710 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1711 && recog_memoized (insn) >= 0)
1712 return;
1713
1714 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1715 return;
1716 }
1717
1718 /* If X contains VAR, we need to unshare it here so that we update
1719 each occurrence separately. But all identical MEMs in one insn
1720 must be replaced with the same rtx because of the possibility of
1721 MATCH_DUPs. */
1722
1723 if (reg_mentioned_p (var, x))
1724 {
1725 replacement = find_fixup_replacement (replacements, x);
1726 if (replacement->new == 0)
1727 replacement->new = copy_most_rtx (x, var);
1728
1729 *loc = x = replacement->new;
1730 }
1731 break;
1732
1733 case REG:
1734 case CC0:
1735 case PC:
1736 case CONST_INT:
1737 case CONST:
1738 case SYMBOL_REF:
1739 case LABEL_REF:
1740 case CONST_DOUBLE:
1741 return;
1742
1743 case SIGN_EXTRACT:
1744 case ZERO_EXTRACT:
1745 /* Note that in some cases those types of expressions are altered
1746 by optimize_bit_field, and do not survive to get here. */
1747 if (XEXP (x, 0) == var
1748 || (GET_CODE (XEXP (x, 0)) == SUBREG
1749 && SUBREG_REG (XEXP (x, 0)) == var))
1750 {
1751 /* Get TEM as a valid MEM in the mode presently in the insn.
1752
1753 We don't worry about the possibility of MATCH_DUP here; it
1754 is highly unlikely and would be tricky to handle. */
1755
1756 tem = XEXP (x, 0);
1757 if (GET_CODE (tem) == SUBREG)
1758 tem = fixup_memory_subreg (tem, insn, 1);
1759 tem = fixup_stack_1 (tem, insn);
1760
1761 /* Unless we want to load from memory, get TEM into the proper mode
1762 for an extract from memory. This can only be done if the
1763 extract is at a constant position and length. */
1764
1765 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1766 && GET_CODE (XEXP (x, 2)) == CONST_INT
1767 && ! mode_dependent_address_p (XEXP (tem, 0))
1768 && ! MEM_VOLATILE_P (tem))
1769 {
1770 enum machine_mode wanted_mode = VOIDmode;
1771 enum machine_mode is_mode = GET_MODE (tem);
1772 int width = INTVAL (XEXP (x, 1));
1773 int pos = INTVAL (XEXP (x, 2));
1774
1775 #ifdef HAVE_extzv
1776 if (GET_CODE (x) == ZERO_EXTRACT)
1777 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1778 #endif
1779 #ifdef HAVE_extv
1780 if (GET_CODE (x) == SIGN_EXTRACT)
1781 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1782 #endif
1783 /* If we have a narrower mode, we can do something. */
1784 if (wanted_mode != VOIDmode
1785 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1786 {
1787 int offset = pos / BITS_PER_UNIT;
1788 rtx old_pos = XEXP (x, 2);
1789 rtx newmem;
1790
1791 /* If the bytes and bits are counted differently, we
1792 must adjust the offset. */
1793 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1794 offset = (GET_MODE_SIZE (is_mode)
1795 - GET_MODE_SIZE (wanted_mode) - offset);
1796
1797 pos %= GET_MODE_BITSIZE (wanted_mode);
1798
1799 newmem = gen_rtx (MEM, wanted_mode,
1800 plus_constant (XEXP (tem, 0), offset));
1801 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1802 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1803 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1804
1805 /* Make the change and see if the insn remains valid. */
1806 INSN_CODE (insn) = -1;
1807 XEXP (x, 0) = newmem;
1808 XEXP (x, 2) = GEN_INT (pos);
1809
1810 if (recog_memoized (insn) >= 0)
1811 return;
1812
1813 /* Otherwise, restore old position. XEXP (x, 0) will be
1814 restored later. */
1815 XEXP (x, 2) = old_pos;
1816 }
1817 }
1818
1819 /* If we get here, the bitfield extract insn can't accept a memory
1820 reference. Copy the input into a register. */
1821
1822 tem1 = gen_reg_rtx (GET_MODE (tem));
1823 emit_insn_before (gen_move_insn (tem1, tem), insn);
1824 XEXP (x, 0) = tem1;
1825 return;
1826 }
1827 break;
1828
1829 case SUBREG:
1830 if (SUBREG_REG (x) == var)
1831 {
1832 /* If this is a special SUBREG made because VAR was promoted
1833 from a wider mode, replace it with VAR and call ourself
1834 recursively, this time saying that the object previously
1835 had its current mode (by virtue of the SUBREG). */
1836
1837 if (SUBREG_PROMOTED_VAR_P (x))
1838 {
1839 *loc = var;
1840 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
1841 return;
1842 }
1843
1844 /* If this SUBREG makes VAR wider, it has become a paradoxical
1845 SUBREG with VAR in memory, but these aren't allowed at this
1846 stage of the compilation. So load VAR into a pseudo and take
1847 a SUBREG of that pseudo. */
1848 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1849 {
1850 replacement = find_fixup_replacement (replacements, var);
1851 if (replacement->new == 0)
1852 replacement->new = gen_reg_rtx (GET_MODE (var));
1853 SUBREG_REG (x) = replacement->new;
1854 return;
1855 }
1856
1857 /* See if we have already found a replacement for this SUBREG.
1858 If so, use it. Otherwise, make a MEM and see if the insn
1859 is recognized. If not, or if we should force MEM into a register,
1860 make a pseudo for this SUBREG. */
1861 replacement = find_fixup_replacement (replacements, x);
1862 if (replacement->new)
1863 {
1864 *loc = replacement->new;
1865 return;
1866 }
1867
1868 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1869
1870 INSN_CODE (insn) = -1;
1871 if (! flag_force_mem && recog_memoized (insn) >= 0)
1872 return;
1873
1874 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1875 return;
1876 }
1877 break;
1878
1879 case SET:
1880 /* First do special simplification of bit-field references. */
1881 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1882 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1883 optimize_bit_field (x, insn, 0);
1884 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1885 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
1886 optimize_bit_field (x, insn, NULL_PTR);
1887
1888 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1889 insn into a pseudo and store the low part of the pseudo into VAR. */
1890 if (GET_CODE (SET_DEST (x)) == SUBREG
1891 && SUBREG_REG (SET_DEST (x)) == var
1892 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
1893 > GET_MODE_SIZE (GET_MODE (var))))
1894 {
1895 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
1896 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
1897 tem)),
1898 insn);
1899 break;
1900 }
1901
1902 {
1903 rtx dest = SET_DEST (x);
1904 rtx src = SET_SRC (x);
1905 rtx outerdest = dest;
1906
1907 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
1908 || GET_CODE (dest) == SIGN_EXTRACT
1909 || GET_CODE (dest) == ZERO_EXTRACT)
1910 dest = XEXP (dest, 0);
1911
1912 if (GET_CODE (src) == SUBREG)
1913 src = XEXP (src, 0);
1914
1915 /* If VAR does not appear at the top level of the SET
1916 just scan the lower levels of the tree. */
1917
1918 if (src != var && dest != var)
1919 break;
1920
1921 /* We will need to rerecognize this insn. */
1922 INSN_CODE (insn) = -1;
1923
1924 #ifdef HAVE_insv
1925 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
1926 {
1927 /* Since this case will return, ensure we fixup all the
1928 operands here. */
1929 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
1930 insn, replacements);
1931 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
1932 insn, replacements);
1933 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
1934 insn, replacements);
1935
1936 tem = XEXP (outerdest, 0);
1937
1938 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
1939 that may appear inside a ZERO_EXTRACT.
1940 This was legitimate when the MEM was a REG. */
1941 if (GET_CODE (tem) == SUBREG
1942 && SUBREG_REG (tem) == var)
1943 tem = fixup_memory_subreg (tem, insn, 1);
1944 else
1945 tem = fixup_stack_1 (tem, insn);
1946
1947 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
1948 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
1949 && ! mode_dependent_address_p (XEXP (tem, 0))
1950 && ! MEM_VOLATILE_P (tem))
1951 {
1952 enum machine_mode wanted_mode
1953 = insn_operand_mode[(int) CODE_FOR_insv][0];
1954 enum machine_mode is_mode = GET_MODE (tem);
1955 int width = INTVAL (XEXP (outerdest, 1));
1956 int pos = INTVAL (XEXP (outerdest, 2));
1957
1958 /* If we have a narrower mode, we can do something. */
1959 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1960 {
1961 int offset = pos / BITS_PER_UNIT;
1962 rtx old_pos = XEXP (outerdest, 2);
1963 rtx newmem;
1964
1965 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1966 offset = (GET_MODE_SIZE (is_mode)
1967 - GET_MODE_SIZE (wanted_mode) - offset);
1968
1969 pos %= GET_MODE_BITSIZE (wanted_mode);
1970
1971 newmem = gen_rtx (MEM, wanted_mode,
1972 plus_constant (XEXP (tem, 0), offset));
1973 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1974 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1975 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1976
1977 /* Make the change and see if the insn remains valid. */
1978 INSN_CODE (insn) = -1;
1979 XEXP (outerdest, 0) = newmem;
1980 XEXP (outerdest, 2) = GEN_INT (pos);
1981
1982 if (recog_memoized (insn) >= 0)
1983 return;
1984
1985 /* Otherwise, restore old position. XEXP (x, 0) will be
1986 restored later. */
1987 XEXP (outerdest, 2) = old_pos;
1988 }
1989 }
1990
1991 /* If we get here, the bit-field store doesn't allow memory
1992 or isn't located at a constant position. Load the value into
1993 a register, do the store, and put it back into memory. */
1994
1995 tem1 = gen_reg_rtx (GET_MODE (tem));
1996 emit_insn_before (gen_move_insn (tem1, tem), insn);
1997 emit_insn_after (gen_move_insn (tem, tem1), insn);
1998 XEXP (outerdest, 0) = tem1;
1999 return;
2000 }
2001 #endif
2002
2003 /* STRICT_LOW_PART is a no-op on memory references
2004 and it can cause combinations to be unrecognizable,
2005 so eliminate it. */
2006
2007 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2008 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2009
2010 /* A valid insn to copy VAR into or out of a register
2011 must be left alone, to avoid an infinite loop here.
2012 If the reference to VAR is by a subreg, fix that up,
2013 since SUBREG is not valid for a memref.
2014 Also fix up the address of the stack slot.
2015
2016 Note that we must not try to recognize the insn until
2017 after we know that we have valid addresses and no
2018 (subreg (mem ...) ...) constructs, since these interfere
2019 with determining the validity of the insn. */
2020
2021 if ((SET_SRC (x) == var
2022 || (GET_CODE (SET_SRC (x)) == SUBREG
2023 && SUBREG_REG (SET_SRC (x)) == var))
2024 && (GET_CODE (SET_DEST (x)) == REG
2025 || (GET_CODE (SET_DEST (x)) == SUBREG
2026 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2027 && GET_MODE (var) == promoted_mode
2028 && x == single_set (insn))
2029 {
2030 rtx pat;
2031
2032 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2033 if (replacement->new)
2034 SET_SRC (x) = replacement->new;
2035 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2036 SET_SRC (x) = replacement->new
2037 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2038 else
2039 SET_SRC (x) = replacement->new
2040 = fixup_stack_1 (SET_SRC (x), insn);
2041
2042 if (recog_memoized (insn) >= 0)
2043 return;
2044
2045 /* INSN is not valid, but we know that we want to
2046 copy SET_SRC (x) to SET_DEST (x) in some way. So
2047 we generate the move and see whether it requires more
2048 than one insn. If it does, we emit those insns and
2049 delete INSN. Otherwise, we an just replace the pattern
2050 of INSN; we have already verified above that INSN has
2051 no other function that to do X. */
2052
2053 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2054 if (GET_CODE (pat) == SEQUENCE)
2055 {
2056 emit_insn_after (pat, insn);
2057 PUT_CODE (insn, NOTE);
2058 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2059 NOTE_SOURCE_FILE (insn) = 0;
2060 }
2061 else
2062 PATTERN (insn) = pat;
2063
2064 return;
2065 }
2066
2067 if ((SET_DEST (x) == var
2068 || (GET_CODE (SET_DEST (x)) == SUBREG
2069 && SUBREG_REG (SET_DEST (x)) == var))
2070 && (GET_CODE (SET_SRC (x)) == REG
2071 || (GET_CODE (SET_SRC (x)) == SUBREG
2072 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2073 && GET_MODE (var) == promoted_mode
2074 && x == single_set (insn))
2075 {
2076 rtx pat;
2077
2078 if (GET_CODE (SET_DEST (x)) == SUBREG)
2079 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2080 else
2081 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2082
2083 if (recog_memoized (insn) >= 0)
2084 return;
2085
2086 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2087 if (GET_CODE (pat) == SEQUENCE)
2088 {
2089 emit_insn_after (pat, insn);
2090 PUT_CODE (insn, NOTE);
2091 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2092 NOTE_SOURCE_FILE (insn) = 0;
2093 }
2094 else
2095 PATTERN (insn) = pat;
2096
2097 return;
2098 }
2099
2100 /* Otherwise, storing into VAR must be handled specially
2101 by storing into a temporary and copying that into VAR
2102 with a new insn after this one. Note that this case
2103 will be used when storing into a promoted scalar since
2104 the insn will now have different modes on the input
2105 and output and hence will be invalid (except for the case
2106 of setting it to a constant, which does not need any
2107 change if it is valid). We generate extra code in that case,
2108 but combine.c will eliminate it. */
2109
2110 if (dest == var)
2111 {
2112 rtx temp;
2113 rtx fixeddest = SET_DEST (x);
2114
2115 /* STRICT_LOW_PART can be discarded, around a MEM. */
2116 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2117 fixeddest = XEXP (fixeddest, 0);
2118 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2119 if (GET_CODE (fixeddest) == SUBREG)
2120 {
2121 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2122 promoted_mode = GET_MODE (fixeddest);
2123 }
2124 else
2125 fixeddest = fixup_stack_1 (fixeddest, insn);
2126
2127 temp = gen_reg_rtx (promoted_mode);
2128
2129 emit_insn_after (gen_move_insn (fixeddest,
2130 gen_lowpart (GET_MODE (fixeddest),
2131 temp)),
2132 insn);
2133
2134 SET_DEST (x) = temp;
2135 }
2136 }
2137 }
2138
2139 /* Nothing special about this RTX; fix its operands. */
2140
2141 fmt = GET_RTX_FORMAT (code);
2142 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2143 {
2144 if (fmt[i] == 'e')
2145 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2146 if (fmt[i] == 'E')
2147 {
2148 register int j;
2149 for (j = 0; j < XVECLEN (x, i); j++)
2150 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2151 insn, replacements);
2152 }
2153 }
2154 }
2155 \f
2156 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2157 return an rtx (MEM:m1 newaddr) which is equivalent.
2158 If any insns must be emitted to compute NEWADDR, put them before INSN.
2159
2160 UNCRITICAL nonzero means accept paradoxical subregs.
2161 This is used for subregs found inside of ZERO_EXTRACTs and in REG_NOTES. */
2162
2163 static rtx
2164 fixup_memory_subreg (x, insn, uncritical)
2165 rtx x;
2166 rtx insn;
2167 int uncritical;
2168 {
2169 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2170 rtx addr = XEXP (SUBREG_REG (x), 0);
2171 enum machine_mode mode = GET_MODE (x);
2172 rtx saved, result;
2173
2174 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2175 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2176 && ! uncritical)
2177 abort ();
2178
2179 if (BYTES_BIG_ENDIAN)
2180 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2181 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2182 addr = plus_constant (addr, offset);
2183 if (!flag_force_addr && memory_address_p (mode, addr))
2184 /* Shortcut if no insns need be emitted. */
2185 return change_address (SUBREG_REG (x), mode, addr);
2186 start_sequence ();
2187 result = change_address (SUBREG_REG (x), mode, addr);
2188 emit_insn_before (gen_sequence (), insn);
2189 end_sequence ();
2190 return result;
2191 }
2192
2193 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2194 Replace subexpressions of X in place.
2195 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2196 Otherwise return X, with its contents possibly altered.
2197
2198 If any insns must be emitted to compute NEWADDR, put them before INSN.
2199
2200 UNCRITICAL is as in fixup_memory_subreg. */
2201
2202 static rtx
2203 walk_fixup_memory_subreg (x, insn, uncritical)
2204 register rtx x;
2205 rtx insn;
2206 int uncritical;
2207 {
2208 register enum rtx_code code;
2209 register char *fmt;
2210 register int i;
2211
2212 if (x == 0)
2213 return 0;
2214
2215 code = GET_CODE (x);
2216
2217 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2218 return fixup_memory_subreg (x, insn, uncritical);
2219
2220 /* Nothing special about this RTX; fix its operands. */
2221
2222 fmt = GET_RTX_FORMAT (code);
2223 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2224 {
2225 if (fmt[i] == 'e')
2226 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2227 if (fmt[i] == 'E')
2228 {
2229 register int j;
2230 for (j = 0; j < XVECLEN (x, i); j++)
2231 XVECEXP (x, i, j)
2232 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2233 }
2234 }
2235 return x;
2236 }
2237 \f
2238 /* For each memory ref within X, if it refers to a stack slot
2239 with an out of range displacement, put the address in a temp register
2240 (emitting new insns before INSN to load these registers)
2241 and alter the memory ref to use that register.
2242 Replace each such MEM rtx with a copy, to avoid clobberage. */
2243
2244 static rtx
2245 fixup_stack_1 (x, insn)
2246 rtx x;
2247 rtx insn;
2248 {
2249 register int i;
2250 register RTX_CODE code = GET_CODE (x);
2251 register char *fmt;
2252
2253 if (code == MEM)
2254 {
2255 register rtx ad = XEXP (x, 0);
2256 /* If we have address of a stack slot but it's not valid
2257 (displacement is too large), compute the sum in a register. */
2258 if (GET_CODE (ad) == PLUS
2259 && GET_CODE (XEXP (ad, 0)) == REG
2260 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2261 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2262 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2263 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2264 {
2265 rtx temp, seq;
2266 if (memory_address_p (GET_MODE (x), ad))
2267 return x;
2268
2269 start_sequence ();
2270 temp = copy_to_reg (ad);
2271 seq = gen_sequence ();
2272 end_sequence ();
2273 emit_insn_before (seq, insn);
2274 return change_address (x, VOIDmode, temp);
2275 }
2276 return x;
2277 }
2278
2279 fmt = GET_RTX_FORMAT (code);
2280 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2281 {
2282 if (fmt[i] == 'e')
2283 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2284 if (fmt[i] == 'E')
2285 {
2286 register int j;
2287 for (j = 0; j < XVECLEN (x, i); j++)
2288 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2289 }
2290 }
2291 return x;
2292 }
2293 \f
2294 /* Optimization: a bit-field instruction whose field
2295 happens to be a byte or halfword in memory
2296 can be changed to a move instruction.
2297
2298 We call here when INSN is an insn to examine or store into a bit-field.
2299 BODY is the SET-rtx to be altered.
2300
2301 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2302 (Currently this is called only from function.c, and EQUIV_MEM
2303 is always 0.) */
2304
2305 static void
2306 optimize_bit_field (body, insn, equiv_mem)
2307 rtx body;
2308 rtx insn;
2309 rtx *equiv_mem;
2310 {
2311 register rtx bitfield;
2312 int destflag;
2313 rtx seq = 0;
2314 enum machine_mode mode;
2315
2316 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2317 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2318 bitfield = SET_DEST (body), destflag = 1;
2319 else
2320 bitfield = SET_SRC (body), destflag = 0;
2321
2322 /* First check that the field being stored has constant size and position
2323 and is in fact a byte or halfword suitably aligned. */
2324
2325 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2326 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2327 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2328 != BLKmode)
2329 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2330 {
2331 register rtx memref = 0;
2332
2333 /* Now check that the containing word is memory, not a register,
2334 and that it is safe to change the machine mode. */
2335
2336 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2337 memref = XEXP (bitfield, 0);
2338 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2339 && equiv_mem != 0)
2340 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2341 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2342 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2343 memref = SUBREG_REG (XEXP (bitfield, 0));
2344 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2345 && equiv_mem != 0
2346 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2347 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2348
2349 if (memref
2350 && ! mode_dependent_address_p (XEXP (memref, 0))
2351 && ! MEM_VOLATILE_P (memref))
2352 {
2353 /* Now adjust the address, first for any subreg'ing
2354 that we are now getting rid of,
2355 and then for which byte of the word is wanted. */
2356
2357 register int offset = INTVAL (XEXP (bitfield, 2));
2358 rtx insns;
2359
2360 /* Adjust OFFSET to count bits from low-address byte. */
2361 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2362 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2363 - offset - INTVAL (XEXP (bitfield, 1)));
2364
2365 /* Adjust OFFSET to count bytes from low-address byte. */
2366 offset /= BITS_PER_UNIT;
2367 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2368 {
2369 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2370 if (BYTES_BIG_ENDIAN)
2371 offset -= (MIN (UNITS_PER_WORD,
2372 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2373 - MIN (UNITS_PER_WORD,
2374 GET_MODE_SIZE (GET_MODE (memref))));
2375 }
2376
2377 start_sequence ();
2378 memref = change_address (memref, mode,
2379 plus_constant (XEXP (memref, 0), offset));
2380 insns = get_insns ();
2381 end_sequence ();
2382 emit_insns_before (insns, insn);
2383
2384 /* Store this memory reference where
2385 we found the bit field reference. */
2386
2387 if (destflag)
2388 {
2389 validate_change (insn, &SET_DEST (body), memref, 1);
2390 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2391 {
2392 rtx src = SET_SRC (body);
2393 while (GET_CODE (src) == SUBREG
2394 && SUBREG_WORD (src) == 0)
2395 src = SUBREG_REG (src);
2396 if (GET_MODE (src) != GET_MODE (memref))
2397 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2398 validate_change (insn, &SET_SRC (body), src, 1);
2399 }
2400 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2401 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2402 /* This shouldn't happen because anything that didn't have
2403 one of these modes should have got converted explicitly
2404 and then referenced through a subreg.
2405 This is so because the original bit-field was
2406 handled by agg_mode and so its tree structure had
2407 the same mode that memref now has. */
2408 abort ();
2409 }
2410 else
2411 {
2412 rtx dest = SET_DEST (body);
2413
2414 while (GET_CODE (dest) == SUBREG
2415 && SUBREG_WORD (dest) == 0
2416 && (GET_MODE_CLASS (GET_MODE (dest))
2417 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest)))))
2418 dest = SUBREG_REG (dest);
2419
2420 validate_change (insn, &SET_DEST (body), dest, 1);
2421
2422 if (GET_MODE (dest) == GET_MODE (memref))
2423 validate_change (insn, &SET_SRC (body), memref, 1);
2424 else
2425 {
2426 /* Convert the mem ref to the destination mode. */
2427 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2428
2429 start_sequence ();
2430 convert_move (newreg, memref,
2431 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2432 seq = get_insns ();
2433 end_sequence ();
2434
2435 validate_change (insn, &SET_SRC (body), newreg, 1);
2436 }
2437 }
2438
2439 /* See if we can convert this extraction or insertion into
2440 a simple move insn. We might not be able to do so if this
2441 was, for example, part of a PARALLEL.
2442
2443 If we succeed, write out any needed conversions. If we fail,
2444 it is hard to guess why we failed, so don't do anything
2445 special; just let the optimization be suppressed. */
2446
2447 if (apply_change_group () && seq)
2448 emit_insns_before (seq, insn);
2449 }
2450 }
2451 }
2452 \f
2453 /* These routines are responsible for converting virtual register references
2454 to the actual hard register references once RTL generation is complete.
2455
2456 The following four variables are used for communication between the
2457 routines. They contain the offsets of the virtual registers from their
2458 respective hard registers. */
2459
2460 static int in_arg_offset;
2461 static int var_offset;
2462 static int dynamic_offset;
2463 static int out_arg_offset;
2464
2465 /* In most machines, the stack pointer register is equivalent to the bottom
2466 of the stack. */
2467
2468 #ifndef STACK_POINTER_OFFSET
2469 #define STACK_POINTER_OFFSET 0
2470 #endif
2471
2472 /* If not defined, pick an appropriate default for the offset of dynamically
2473 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2474 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2475
2476 #ifndef STACK_DYNAMIC_OFFSET
2477
2478 #ifdef ACCUMULATE_OUTGOING_ARGS
2479 /* The bottom of the stack points to the actual arguments. If
2480 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2481 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2482 stack space for register parameters is not pushed by the caller, but
2483 rather part of the fixed stack areas and hence not included in
2484 `current_function_outgoing_args_size'. Nevertheless, we must allow
2485 for it when allocating stack dynamic objects. */
2486
2487 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2488 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2489 (current_function_outgoing_args_size \
2490 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2491
2492 #else
2493 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2494 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2495 #endif
2496
2497 #else
2498 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2499 #endif
2500 #endif
2501
2502 /* Pass through the INSNS of function FNDECL and convert virtual register
2503 references to hard register references. */
2504
2505 void
2506 instantiate_virtual_regs (fndecl, insns)
2507 tree fndecl;
2508 rtx insns;
2509 {
2510 rtx insn;
2511
2512 /* Compute the offsets to use for this function. */
2513 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
2514 var_offset = STARTING_FRAME_OFFSET;
2515 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
2516 out_arg_offset = STACK_POINTER_OFFSET;
2517
2518 /* Scan all variables and parameters of this function. For each that is
2519 in memory, instantiate all virtual registers if the result is a valid
2520 address. If not, we do it later. That will handle most uses of virtual
2521 regs on many machines. */
2522 instantiate_decls (fndecl, 1);
2523
2524 /* Initialize recognition, indicating that volatile is OK. */
2525 init_recog ();
2526
2527 /* Scan through all the insns, instantiating every virtual register still
2528 present. */
2529 for (insn = insns; insn; insn = NEXT_INSN (insn))
2530 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2531 || GET_CODE (insn) == CALL_INSN)
2532 {
2533 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
2534 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
2535 }
2536
2537 /* Now instantiate the remaining register equivalences for debugging info.
2538 These will not be valid addresses. */
2539 instantiate_decls (fndecl, 0);
2540
2541 /* Indicate that, from now on, assign_stack_local should use
2542 frame_pointer_rtx. */
2543 virtuals_instantiated = 1;
2544 }
2545
2546 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
2547 all virtual registers in their DECL_RTL's.
2548
2549 If VALID_ONLY, do this only if the resulting address is still valid.
2550 Otherwise, always do it. */
2551
2552 static void
2553 instantiate_decls (fndecl, valid_only)
2554 tree fndecl;
2555 int valid_only;
2556 {
2557 tree decl;
2558
2559 if (DECL_SAVED_INSNS (fndecl))
2560 /* When compiling an inline function, the obstack used for
2561 rtl allocation is the maybepermanent_obstack. Calling
2562 `resume_temporary_allocation' switches us back to that
2563 obstack while we process this function's parameters. */
2564 resume_temporary_allocation ();
2565
2566 /* Process all parameters of the function. */
2567 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2568 {
2569 instantiate_decl (DECL_RTL (decl), int_size_in_bytes (TREE_TYPE (decl)),
2570 valid_only);
2571 instantiate_decl (DECL_INCOMING_RTL (decl),
2572 int_size_in_bytes (TREE_TYPE (decl)), valid_only);
2573 }
2574
2575 /* Now process all variables defined in the function or its subblocks. */
2576 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
2577
2578 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
2579 {
2580 /* Save all rtl allocated for this function by raising the
2581 high-water mark on the maybepermanent_obstack. */
2582 preserve_data ();
2583 /* All further rtl allocation is now done in the current_obstack. */
2584 rtl_in_current_obstack ();
2585 }
2586 }
2587
2588 /* Subroutine of instantiate_decls: Process all decls in the given
2589 BLOCK node and all its subblocks. */
2590
2591 static void
2592 instantiate_decls_1 (let, valid_only)
2593 tree let;
2594 int valid_only;
2595 {
2596 tree t;
2597
2598 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2599 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
2600 valid_only);
2601
2602 /* Process all subblocks. */
2603 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2604 instantiate_decls_1 (t, valid_only);
2605 }
2606
2607 /* Subroutine of the preceding procedures: Given RTL representing a
2608 decl and the size of the object, do any instantiation required.
2609
2610 If VALID_ONLY is non-zero, it means that the RTL should only be
2611 changed if the new address is valid. */
2612
2613 static void
2614 instantiate_decl (x, size, valid_only)
2615 rtx x;
2616 int size;
2617 int valid_only;
2618 {
2619 enum machine_mode mode;
2620 rtx addr;
2621
2622 /* If this is not a MEM, no need to do anything. Similarly if the
2623 address is a constant or a register that is not a virtual register. */
2624
2625 if (x == 0 || GET_CODE (x) != MEM)
2626 return;
2627
2628 addr = XEXP (x, 0);
2629 if (CONSTANT_P (addr)
2630 || (GET_CODE (addr) == REG
2631 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
2632 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
2633 return;
2634
2635 /* If we should only do this if the address is valid, copy the address.
2636 We need to do this so we can undo any changes that might make the
2637 address invalid. This copy is unfortunate, but probably can't be
2638 avoided. */
2639
2640 if (valid_only)
2641 addr = copy_rtx (addr);
2642
2643 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
2644
2645 if (! valid_only)
2646 return;
2647
2648 /* Now verify that the resulting address is valid for every integer or
2649 floating-point mode up to and including SIZE bytes long. We do this
2650 since the object might be accessed in any mode and frame addresses
2651 are shared. */
2652
2653 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2654 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2655 mode = GET_MODE_WIDER_MODE (mode))
2656 if (! memory_address_p (mode, addr))
2657 return;
2658
2659 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
2660 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2661 mode = GET_MODE_WIDER_MODE (mode))
2662 if (! memory_address_p (mode, addr))
2663 return;
2664
2665 /* Otherwise, put back the address, now that we have updated it and we
2666 know it is valid. */
2667
2668 XEXP (x, 0) = addr;
2669 }
2670 \f
2671 /* Given a pointer to a piece of rtx and an optional pointer to the
2672 containing object, instantiate any virtual registers present in it.
2673
2674 If EXTRA_INSNS, we always do the replacement and generate
2675 any extra insns before OBJECT. If it zero, we do nothing if replacement
2676 is not valid.
2677
2678 Return 1 if we either had nothing to do or if we were able to do the
2679 needed replacement. Return 0 otherwise; we only return zero if
2680 EXTRA_INSNS is zero.
2681
2682 We first try some simple transformations to avoid the creation of extra
2683 pseudos. */
2684
2685 static int
2686 instantiate_virtual_regs_1 (loc, object, extra_insns)
2687 rtx *loc;
2688 rtx object;
2689 int extra_insns;
2690 {
2691 rtx x;
2692 RTX_CODE code;
2693 rtx new = 0;
2694 int offset;
2695 rtx temp;
2696 rtx seq;
2697 int i, j;
2698 char *fmt;
2699
2700 /* Re-start here to avoid recursion in common cases. */
2701 restart:
2702
2703 x = *loc;
2704 if (x == 0)
2705 return 1;
2706
2707 code = GET_CODE (x);
2708
2709 /* Check for some special cases. */
2710 switch (code)
2711 {
2712 case CONST_INT:
2713 case CONST_DOUBLE:
2714 case CONST:
2715 case SYMBOL_REF:
2716 case CODE_LABEL:
2717 case PC:
2718 case CC0:
2719 case ASM_INPUT:
2720 case ADDR_VEC:
2721 case ADDR_DIFF_VEC:
2722 case RETURN:
2723 return 1;
2724
2725 case SET:
2726 /* We are allowed to set the virtual registers. This means that
2727 that the actual register should receive the source minus the
2728 appropriate offset. This is used, for example, in the handling
2729 of non-local gotos. */
2730 if (SET_DEST (x) == virtual_incoming_args_rtx)
2731 new = arg_pointer_rtx, offset = - in_arg_offset;
2732 else if (SET_DEST (x) == virtual_stack_vars_rtx)
2733 new = frame_pointer_rtx, offset = - var_offset;
2734 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
2735 new = stack_pointer_rtx, offset = - dynamic_offset;
2736 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
2737 new = stack_pointer_rtx, offset = - out_arg_offset;
2738
2739 if (new)
2740 {
2741 /* The only valid sources here are PLUS or REG. Just do
2742 the simplest possible thing to handle them. */
2743 if (GET_CODE (SET_SRC (x)) != REG
2744 && GET_CODE (SET_SRC (x)) != PLUS)
2745 abort ();
2746
2747 start_sequence ();
2748 if (GET_CODE (SET_SRC (x)) != REG)
2749 temp = force_operand (SET_SRC (x), NULL_RTX);
2750 else
2751 temp = SET_SRC (x);
2752 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
2753 seq = get_insns ();
2754 end_sequence ();
2755
2756 emit_insns_before (seq, object);
2757 SET_DEST (x) = new;
2758
2759 if (!validate_change (object, &SET_SRC (x), temp, 0)
2760 || ! extra_insns)
2761 abort ();
2762
2763 return 1;
2764 }
2765
2766 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
2767 loc = &SET_SRC (x);
2768 goto restart;
2769
2770 case PLUS:
2771 /* Handle special case of virtual register plus constant. */
2772 if (CONSTANT_P (XEXP (x, 1)))
2773 {
2774 rtx old, new_offset;
2775
2776 /* Check for (plus (plus VIRT foo) (const_int)) first. */
2777 if (GET_CODE (XEXP (x, 0)) == PLUS)
2778 {
2779 rtx inner = XEXP (XEXP (x, 0), 0);
2780
2781 if (inner == virtual_incoming_args_rtx)
2782 new = arg_pointer_rtx, offset = in_arg_offset;
2783 else if (inner == virtual_stack_vars_rtx)
2784 new = frame_pointer_rtx, offset = var_offset;
2785 else if (inner == virtual_stack_dynamic_rtx)
2786 new = stack_pointer_rtx, offset = dynamic_offset;
2787 else if (inner == virtual_outgoing_args_rtx)
2788 new = stack_pointer_rtx, offset = out_arg_offset;
2789 else
2790 {
2791 loc = &XEXP (x, 0);
2792 goto restart;
2793 }
2794
2795 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
2796 extra_insns);
2797 new = gen_rtx (PLUS, Pmode, new, XEXP (XEXP (x, 0), 1));
2798 }
2799
2800 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
2801 new = arg_pointer_rtx, offset = in_arg_offset;
2802 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
2803 new = frame_pointer_rtx, offset = var_offset;
2804 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
2805 new = stack_pointer_rtx, offset = dynamic_offset;
2806 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
2807 new = stack_pointer_rtx, offset = out_arg_offset;
2808 else
2809 {
2810 /* We know the second operand is a constant. Unless the
2811 first operand is a REG (which has been already checked),
2812 it needs to be checked. */
2813 if (GET_CODE (XEXP (x, 0)) != REG)
2814 {
2815 loc = &XEXP (x, 0);
2816 goto restart;
2817 }
2818 return 1;
2819 }
2820
2821 new_offset = plus_constant (XEXP (x, 1), offset);
2822
2823 /* If the new constant is zero, try to replace the sum with just
2824 the register. */
2825 if (new_offset == const0_rtx
2826 && validate_change (object, loc, new, 0))
2827 return 1;
2828
2829 /* Next try to replace the register and new offset.
2830 There are two changes to validate here and we can't assume that
2831 in the case of old offset equals new just changing the register
2832 will yield a valid insn. In the interests of a little efficiency,
2833 however, we only call validate change once (we don't queue up the
2834 changes and then call apply_change_group). */
2835
2836 old = XEXP (x, 0);
2837 if (offset == 0
2838 ? ! validate_change (object, &XEXP (x, 0), new, 0)
2839 : (XEXP (x, 0) = new,
2840 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
2841 {
2842 if (! extra_insns)
2843 {
2844 XEXP (x, 0) = old;
2845 return 0;
2846 }
2847
2848 /* Otherwise copy the new constant into a register and replace
2849 constant with that register. */
2850 temp = gen_reg_rtx (Pmode);
2851 XEXP (x, 0) = new;
2852 if (validate_change (object, &XEXP (x, 1), temp, 0))
2853 emit_insn_before (gen_move_insn (temp, new_offset), object);
2854 else
2855 {
2856 /* If that didn't work, replace this expression with a
2857 register containing the sum. */
2858
2859 XEXP (x, 0) = old;
2860 new = gen_rtx (PLUS, Pmode, new, new_offset);
2861
2862 start_sequence ();
2863 temp = force_operand (new, NULL_RTX);
2864 seq = get_insns ();
2865 end_sequence ();
2866
2867 emit_insns_before (seq, object);
2868 if (! validate_change (object, loc, temp, 0)
2869 && ! validate_replace_rtx (x, temp, object))
2870 abort ();
2871 }
2872 }
2873
2874 return 1;
2875 }
2876
2877 /* Fall through to generic two-operand expression case. */
2878 case EXPR_LIST:
2879 case CALL:
2880 case COMPARE:
2881 case MINUS:
2882 case MULT:
2883 case DIV: case UDIV:
2884 case MOD: case UMOD:
2885 case AND: case IOR: case XOR:
2886 case ROTATERT: case ROTATE:
2887 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2888 case NE: case EQ:
2889 case GE: case GT: case GEU: case GTU:
2890 case LE: case LT: case LEU: case LTU:
2891 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
2892 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
2893 loc = &XEXP (x, 0);
2894 goto restart;
2895
2896 case MEM:
2897 /* Most cases of MEM that convert to valid addresses have already been
2898 handled by our scan of regno_reg_rtx. The only special handling we
2899 need here is to make a copy of the rtx to ensure it isn't being
2900 shared if we have to change it to a pseudo.
2901
2902 If the rtx is a simple reference to an address via a virtual register,
2903 it can potentially be shared. In such cases, first try to make it
2904 a valid address, which can also be shared. Otherwise, copy it and
2905 proceed normally.
2906
2907 First check for common cases that need no processing. These are
2908 usually due to instantiation already being done on a previous instance
2909 of a shared rtx. */
2910
2911 temp = XEXP (x, 0);
2912 if (CONSTANT_ADDRESS_P (temp)
2913 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2914 || temp == arg_pointer_rtx
2915 #endif
2916 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2917 || temp == hard_frame_pointer_rtx
2918 #endif
2919 || temp == frame_pointer_rtx)
2920 return 1;
2921
2922 if (GET_CODE (temp) == PLUS
2923 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2924 && (XEXP (temp, 0) == frame_pointer_rtx
2925 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2926 || XEXP (temp, 0) == hard_frame_pointer_rtx
2927 #endif
2928 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2929 || XEXP (temp, 0) == arg_pointer_rtx
2930 #endif
2931 ))
2932 return 1;
2933
2934 if (temp == virtual_stack_vars_rtx
2935 || temp == virtual_incoming_args_rtx
2936 || (GET_CODE (temp) == PLUS
2937 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2938 && (XEXP (temp, 0) == virtual_stack_vars_rtx
2939 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
2940 {
2941 /* This MEM may be shared. If the substitution can be done without
2942 the need to generate new pseudos, we want to do it in place
2943 so all copies of the shared rtx benefit. The call below will
2944 only make substitutions if the resulting address is still
2945 valid.
2946
2947 Note that we cannot pass X as the object in the recursive call
2948 since the insn being processed may not allow all valid
2949 addresses. However, if we were not passed on object, we can
2950 only modify X without copying it if X will have a valid
2951 address.
2952
2953 ??? Also note that this can still lose if OBJECT is an insn that
2954 has less restrictions on an address that some other insn.
2955 In that case, we will modify the shared address. This case
2956 doesn't seem very likely, though. */
2957
2958 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
2959 object ? object : x, 0))
2960 return 1;
2961
2962 /* Otherwise make a copy and process that copy. We copy the entire
2963 RTL expression since it might be a PLUS which could also be
2964 shared. */
2965 *loc = x = copy_rtx (x);
2966 }
2967
2968 /* Fall through to generic unary operation case. */
2969 case USE:
2970 case CLOBBER:
2971 case SUBREG:
2972 case STRICT_LOW_PART:
2973 case NEG: case NOT:
2974 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
2975 case SIGN_EXTEND: case ZERO_EXTEND:
2976 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2977 case FLOAT: case FIX:
2978 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2979 case ABS:
2980 case SQRT:
2981 case FFS:
2982 /* These case either have just one operand or we know that we need not
2983 check the rest of the operands. */
2984 loc = &XEXP (x, 0);
2985 goto restart;
2986
2987 case REG:
2988 /* Try to replace with a PLUS. If that doesn't work, compute the sum
2989 in front of this insn and substitute the temporary. */
2990 if (x == virtual_incoming_args_rtx)
2991 new = arg_pointer_rtx, offset = in_arg_offset;
2992 else if (x == virtual_stack_vars_rtx)
2993 new = frame_pointer_rtx, offset = var_offset;
2994 else if (x == virtual_stack_dynamic_rtx)
2995 new = stack_pointer_rtx, offset = dynamic_offset;
2996 else if (x == virtual_outgoing_args_rtx)
2997 new = stack_pointer_rtx, offset = out_arg_offset;
2998
2999 if (new)
3000 {
3001 temp = plus_constant (new, offset);
3002 if (!validate_change (object, loc, temp, 0))
3003 {
3004 if (! extra_insns)
3005 return 0;
3006
3007 start_sequence ();
3008 temp = force_operand (temp, NULL_RTX);
3009 seq = get_insns ();
3010 end_sequence ();
3011
3012 emit_insns_before (seq, object);
3013 if (! validate_change (object, loc, temp, 0)
3014 && ! validate_replace_rtx (x, temp, object))
3015 abort ();
3016 }
3017 }
3018
3019 return 1;
3020 }
3021
3022 /* Scan all subexpressions. */
3023 fmt = GET_RTX_FORMAT (code);
3024 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3025 if (*fmt == 'e')
3026 {
3027 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3028 return 0;
3029 }
3030 else if (*fmt == 'E')
3031 for (j = 0; j < XVECLEN (x, i); j++)
3032 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3033 extra_insns))
3034 return 0;
3035
3036 return 1;
3037 }
3038 \f
3039 /* Optimization: assuming this function does not receive nonlocal gotos,
3040 delete the handlers for such, as well as the insns to establish
3041 and disestablish them. */
3042
3043 static void
3044 delete_handlers ()
3045 {
3046 rtx insn;
3047 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3048 {
3049 /* Delete the handler by turning off the flag that would
3050 prevent jump_optimize from deleting it.
3051 Also permit deletion of the nonlocal labels themselves
3052 if nothing local refers to them. */
3053 if (GET_CODE (insn) == CODE_LABEL)
3054 {
3055 tree t, last_t;
3056
3057 LABEL_PRESERVE_P (insn) = 0;
3058
3059 /* Remove it from the nonlocal_label list, to avoid confusing
3060 flow. */
3061 for (t = nonlocal_labels, last_t = 0; t;
3062 last_t = t, t = TREE_CHAIN (t))
3063 if (DECL_RTL (TREE_VALUE (t)) == insn)
3064 break;
3065 if (t)
3066 {
3067 if (! last_t)
3068 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3069 else
3070 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3071 }
3072 }
3073 if (GET_CODE (insn) == INSN
3074 && ((nonlocal_goto_handler_slot != 0
3075 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
3076 || (nonlocal_goto_stack_level != 0
3077 && reg_mentioned_p (nonlocal_goto_stack_level,
3078 PATTERN (insn)))))
3079 delete_insn (insn);
3080 }
3081 }
3082
3083 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3084 of the current function. */
3085
3086 rtx
3087 nonlocal_label_rtx_list ()
3088 {
3089 tree t;
3090 rtx x = 0;
3091
3092 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
3093 x = gen_rtx (EXPR_LIST, VOIDmode, label_rtx (TREE_VALUE (t)), x);
3094
3095 return x;
3096 }
3097 \f
3098 /* Output a USE for any register use in RTL.
3099 This is used with -noreg to mark the extent of lifespan
3100 of any registers used in a user-visible variable's DECL_RTL. */
3101
3102 void
3103 use_variable (rtl)
3104 rtx rtl;
3105 {
3106 if (GET_CODE (rtl) == REG)
3107 /* This is a register variable. */
3108 emit_insn (gen_rtx (USE, VOIDmode, rtl));
3109 else if (GET_CODE (rtl) == MEM
3110 && GET_CODE (XEXP (rtl, 0)) == REG
3111 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3112 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3113 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3114 /* This is a variable-sized structure. */
3115 emit_insn (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)));
3116 }
3117
3118 /* Like use_variable except that it outputs the USEs after INSN
3119 instead of at the end of the insn-chain. */
3120
3121 void
3122 use_variable_after (rtl, insn)
3123 rtx rtl, insn;
3124 {
3125 if (GET_CODE (rtl) == REG)
3126 /* This is a register variable. */
3127 emit_insn_after (gen_rtx (USE, VOIDmode, rtl), insn);
3128 else if (GET_CODE (rtl) == MEM
3129 && GET_CODE (XEXP (rtl, 0)) == REG
3130 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3131 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3132 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3133 /* This is a variable-sized structure. */
3134 emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)), insn);
3135 }
3136 \f
3137 int
3138 max_parm_reg_num ()
3139 {
3140 return max_parm_reg;
3141 }
3142
3143 /* Return the first insn following those generated by `assign_parms'. */
3144
3145 rtx
3146 get_first_nonparm_insn ()
3147 {
3148 if (last_parm_insn)
3149 return NEXT_INSN (last_parm_insn);
3150 return get_insns ();
3151 }
3152
3153 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3154 Crash if there is none. */
3155
3156 rtx
3157 get_first_block_beg ()
3158 {
3159 register rtx searcher;
3160 register rtx insn = get_first_nonparm_insn ();
3161
3162 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3163 if (GET_CODE (searcher) == NOTE
3164 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3165 return searcher;
3166
3167 abort (); /* Invalid call to this function. (See comments above.) */
3168 return NULL_RTX;
3169 }
3170
3171 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3172 This means a type for which function calls must pass an address to the
3173 function or get an address back from the function.
3174 EXP may be a type node or an expression (whose type is tested). */
3175
3176 int
3177 aggregate_value_p (exp)
3178 tree exp;
3179 {
3180 int i, regno, nregs;
3181 rtx reg;
3182 tree type;
3183 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3184 type = exp;
3185 else
3186 type = TREE_TYPE (exp);
3187
3188 if (RETURN_IN_MEMORY (type))
3189 return 1;
3190 /* Types that are TREE_ADDRESSABLE must be contructed in memory,
3191 and thus can't be returned in registers. */
3192 if (TREE_ADDRESSABLE (type))
3193 return 1;
3194 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3195 return 1;
3196 /* Make sure we have suitable call-clobbered regs to return
3197 the value in; if not, we must return it in memory. */
3198 reg = hard_function_value (type, 0);
3199 regno = REGNO (reg);
3200 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3201 for (i = 0; i < nregs; i++)
3202 if (! call_used_regs[regno + i])
3203 return 1;
3204 return 0;
3205 }
3206 \f
3207 /* Assign RTL expressions to the function's parameters.
3208 This may involve copying them into registers and using
3209 those registers as the RTL for them.
3210
3211 If SECOND_TIME is non-zero it means that this function is being
3212 called a second time. This is done by integrate.c when a function's
3213 compilation is deferred. We need to come back here in case the
3214 FUNCTION_ARG macro computes items needed for the rest of the compilation
3215 (such as changing which registers are fixed or caller-saved). But suppress
3216 writing any insns or setting DECL_RTL of anything in this case. */
3217
3218 void
3219 assign_parms (fndecl, second_time)
3220 tree fndecl;
3221 int second_time;
3222 {
3223 register tree parm;
3224 register rtx entry_parm = 0;
3225 register rtx stack_parm = 0;
3226 CUMULATIVE_ARGS args_so_far;
3227 enum machine_mode promoted_mode, passed_mode;
3228 enum machine_mode nominal_mode, promoted_nominal_mode;
3229 int unsignedp;
3230 /* Total space needed so far for args on the stack,
3231 given as a constant and a tree-expression. */
3232 struct args_size stack_args_size;
3233 tree fntype = TREE_TYPE (fndecl);
3234 tree fnargs = DECL_ARGUMENTS (fndecl);
3235 /* This is used for the arg pointer when referring to stack args. */
3236 rtx internal_arg_pointer;
3237 /* This is a dummy PARM_DECL that we used for the function result if
3238 the function returns a structure. */
3239 tree function_result_decl = 0;
3240 int nparmregs = list_length (fnargs) + LAST_VIRTUAL_REGISTER + 1;
3241 int varargs_setup = 0;
3242 rtx conversion_insns = 0;
3243
3244 /* Nonzero if the last arg is named `__builtin_va_alist',
3245 which is used on some machines for old-fashioned non-ANSI varargs.h;
3246 this should be stuck onto the stack as if it had arrived there. */
3247 int hide_last_arg
3248 = (current_function_varargs
3249 && fnargs
3250 && (parm = tree_last (fnargs)) != 0
3251 && DECL_NAME (parm)
3252 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3253 "__builtin_va_alist")));
3254
3255 /* Nonzero if function takes extra anonymous args.
3256 This means the last named arg must be on the stack
3257 right before the anonymous ones. */
3258 int stdarg
3259 = (TYPE_ARG_TYPES (fntype) != 0
3260 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3261 != void_type_node));
3262
3263 current_function_stdarg = stdarg;
3264
3265 /* If the reg that the virtual arg pointer will be translated into is
3266 not a fixed reg or is the stack pointer, make a copy of the virtual
3267 arg pointer, and address parms via the copy. The frame pointer is
3268 considered fixed even though it is not marked as such.
3269
3270 The second time through, simply use ap to avoid generating rtx. */
3271
3272 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3273 || ! (fixed_regs[ARG_POINTER_REGNUM]
3274 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3275 && ! second_time)
3276 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3277 else
3278 internal_arg_pointer = virtual_incoming_args_rtx;
3279 current_function_internal_arg_pointer = internal_arg_pointer;
3280
3281 stack_args_size.constant = 0;
3282 stack_args_size.var = 0;
3283
3284 /* If struct value address is treated as the first argument, make it so. */
3285 if (aggregate_value_p (DECL_RESULT (fndecl))
3286 && ! current_function_returns_pcc_struct
3287 && struct_value_incoming_rtx == 0)
3288 {
3289 tree type = build_pointer_type (TREE_TYPE (fntype));
3290
3291 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
3292
3293 DECL_ARG_TYPE (function_result_decl) = type;
3294 TREE_CHAIN (function_result_decl) = fnargs;
3295 fnargs = function_result_decl;
3296 }
3297
3298 parm_reg_stack_loc = (rtx *) oballoc (nparmregs * sizeof (rtx));
3299 bzero ((char *) parm_reg_stack_loc, nparmregs * sizeof (rtx));
3300
3301 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3302 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
3303 #else
3304 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
3305 #endif
3306
3307 /* We haven't yet found an argument that we must push and pretend the
3308 caller did. */
3309 current_function_pretend_args_size = 0;
3310
3311 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3312 {
3313 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
3314 struct args_size stack_offset;
3315 struct args_size arg_size;
3316 int passed_pointer = 0;
3317 int did_conversion = 0;
3318 tree passed_type = DECL_ARG_TYPE (parm);
3319 tree nominal_type = TREE_TYPE (parm);
3320
3321 /* Set LAST_NAMED if this is last named arg before some
3322 anonymous args. We treat it as if it were anonymous too. */
3323 int last_named = ((TREE_CHAIN (parm) == 0
3324 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3325 && (stdarg || current_function_varargs));
3326
3327 if (TREE_TYPE (parm) == error_mark_node
3328 /* This can happen after weird syntax errors
3329 or if an enum type is defined among the parms. */
3330 || TREE_CODE (parm) != PARM_DECL
3331 || passed_type == NULL)
3332 {
3333 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = gen_rtx (MEM, BLKmode,
3334 const0_rtx);
3335 TREE_USED (parm) = 1;
3336 continue;
3337 }
3338
3339 /* For varargs.h function, save info about regs and stack space
3340 used by the individual args, not including the va_alist arg. */
3341 if (hide_last_arg && last_named)
3342 current_function_args_info = args_so_far;
3343
3344 /* Find mode of arg as it is passed, and mode of arg
3345 as it should be during execution of this function. */
3346 passed_mode = TYPE_MODE (passed_type);
3347 nominal_mode = TYPE_MODE (nominal_type);
3348
3349 /* If the parm's mode is VOID, its value doesn't matter,
3350 and avoid the usual things like emit_move_insn that could crash. */
3351 if (nominal_mode == VOIDmode)
3352 {
3353 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
3354 continue;
3355 }
3356
3357 /* If the parm is to be passed as a transparent union, use the
3358 type of the first field for the tests below. We have already
3359 verified that the modes are the same. */
3360 if (DECL_TRANSPARENT_UNION (parm)
3361 || TYPE_TRANSPARENT_UNION (passed_type))
3362 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
3363
3364 /* See if this arg was passed by invisible reference. It is if
3365 it is an object whose size depends on the contents of the
3366 object itself or if the machine requires these objects be passed
3367 that way. */
3368
3369 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
3370 && contains_placeholder_p (TYPE_SIZE (passed_type)))
3371 || TREE_ADDRESSABLE (passed_type)
3372 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3373 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
3374 passed_type, ! last_named)
3375 #endif
3376 )
3377 {
3378 passed_type = nominal_type = build_pointer_type (passed_type);
3379 passed_pointer = 1;
3380 passed_mode = nominal_mode = Pmode;
3381 }
3382
3383 promoted_mode = passed_mode;
3384
3385 #ifdef PROMOTE_FUNCTION_ARGS
3386 /* Compute the mode in which the arg is actually extended to. */
3387 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
3388 #endif
3389
3390 /* Let machine desc say which reg (if any) the parm arrives in.
3391 0 means it arrives on the stack. */
3392 #ifdef FUNCTION_INCOMING_ARG
3393 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3394 passed_type, ! last_named);
3395 #else
3396 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
3397 passed_type, ! last_named);
3398 #endif
3399
3400 if (entry_parm == 0)
3401 promoted_mode = passed_mode;
3402
3403 #ifdef SETUP_INCOMING_VARARGS
3404 /* If this is the last named parameter, do any required setup for
3405 varargs or stdargs. We need to know about the case of this being an
3406 addressable type, in which case we skip the registers it
3407 would have arrived in.
3408
3409 For stdargs, LAST_NAMED will be set for two parameters, the one that
3410 is actually the last named, and the dummy parameter. We only
3411 want to do this action once.
3412
3413 Also, indicate when RTL generation is to be suppressed. */
3414 if (last_named && !varargs_setup)
3415 {
3416 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
3417 current_function_pretend_args_size,
3418 second_time);
3419 varargs_setup = 1;
3420 }
3421 #endif
3422
3423 /* Determine parm's home in the stack,
3424 in case it arrives in the stack or we should pretend it did.
3425
3426 Compute the stack position and rtx where the argument arrives
3427 and its size.
3428
3429 There is one complexity here: If this was a parameter that would
3430 have been passed in registers, but wasn't only because it is
3431 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
3432 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
3433 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
3434 0 as it was the previous time. */
3435
3436 locate_and_pad_parm (promoted_mode, passed_type,
3437 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3438 1,
3439 #else
3440 #ifdef FUNCTION_INCOMING_ARG
3441 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3442 passed_type,
3443 (! last_named
3444 || varargs_setup)) != 0,
3445 #else
3446 FUNCTION_ARG (args_so_far, promoted_mode,
3447 passed_type,
3448 ! last_named || varargs_setup) != 0,
3449 #endif
3450 #endif
3451 fndecl, &stack_args_size, &stack_offset, &arg_size);
3452
3453 if (! second_time)
3454 {
3455 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
3456
3457 if (offset_rtx == const0_rtx)
3458 stack_parm = gen_rtx (MEM, promoted_mode, internal_arg_pointer);
3459 else
3460 stack_parm = gen_rtx (MEM, promoted_mode,
3461 gen_rtx (PLUS, Pmode,
3462 internal_arg_pointer, offset_rtx));
3463
3464 /* If this is a memory ref that contains aggregate components,
3465 mark it as such for cse and loop optimize. Likewise if it
3466 is readonly. */
3467 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3468 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
3469 }
3470
3471 /* If this parameter was passed both in registers and in the stack,
3472 use the copy on the stack. */
3473 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
3474 entry_parm = 0;
3475
3476 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3477 /* If this parm was passed part in regs and part in memory,
3478 pretend it arrived entirely in memory
3479 by pushing the register-part onto the stack.
3480
3481 In the special case of a DImode or DFmode that is split,
3482 we could put it together in a pseudoreg directly,
3483 but for now that's not worth bothering with. */
3484
3485 if (entry_parm)
3486 {
3487 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
3488 passed_type, ! last_named);
3489
3490 if (nregs > 0)
3491 {
3492 current_function_pretend_args_size
3493 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
3494 / (PARM_BOUNDARY / BITS_PER_UNIT)
3495 * (PARM_BOUNDARY / BITS_PER_UNIT));
3496
3497 if (! second_time)
3498 move_block_from_reg (REGNO (entry_parm),
3499 validize_mem (stack_parm), nregs,
3500 int_size_in_bytes (TREE_TYPE (parm)));
3501 entry_parm = stack_parm;
3502 }
3503 }
3504 #endif
3505
3506 /* If we didn't decide this parm came in a register,
3507 by default it came on the stack. */
3508 if (entry_parm == 0)
3509 entry_parm = stack_parm;
3510
3511 /* Record permanently how this parm was passed. */
3512 if (! second_time)
3513 DECL_INCOMING_RTL (parm) = entry_parm;
3514
3515 /* If there is actually space on the stack for this parm,
3516 count it in stack_args_size; otherwise set stack_parm to 0
3517 to indicate there is no preallocated stack slot for the parm. */
3518
3519 if (entry_parm == stack_parm
3520 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
3521 /* On some machines, even if a parm value arrives in a register
3522 there is still an (uninitialized) stack slot allocated for it.
3523
3524 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
3525 whether this parameter already has a stack slot allocated,
3526 because an arg block exists only if current_function_args_size
3527 is larger than some threshold, and we haven't calculated that
3528 yet. So, for now, we just assume that stack slots never exist
3529 in this case. */
3530 || REG_PARM_STACK_SPACE (fndecl) > 0
3531 #endif
3532 )
3533 {
3534 stack_args_size.constant += arg_size.constant;
3535 if (arg_size.var)
3536 ADD_PARM_SIZE (stack_args_size, arg_size.var);
3537 }
3538 else
3539 /* No stack slot was pushed for this parm. */
3540 stack_parm = 0;
3541
3542 /* Update info on where next arg arrives in registers. */
3543
3544 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
3545 passed_type, ! last_named);
3546
3547 /* If this is our second time through, we are done with this parm. */
3548 if (second_time)
3549 continue;
3550
3551 /* If we can't trust the parm stack slot to be aligned enough
3552 for its ultimate type, don't use that slot after entry.
3553 We'll make another stack slot, if we need one. */
3554 {
3555 int thisparm_boundary
3556 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
3557
3558 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
3559 stack_parm = 0;
3560 }
3561
3562 /* If parm was passed in memory, and we need to convert it on entry,
3563 don't store it back in that same slot. */
3564 if (entry_parm != 0
3565 && nominal_mode != BLKmode && nominal_mode != passed_mode)
3566 stack_parm = 0;
3567
3568 #if 0
3569 /* Now adjust STACK_PARM to the mode and precise location
3570 where this parameter should live during execution,
3571 if we discover that it must live in the stack during execution.
3572 To make debuggers happier on big-endian machines, we store
3573 the value in the last bytes of the space available. */
3574
3575 if (nominal_mode != BLKmode && nominal_mode != passed_mode
3576 && stack_parm != 0)
3577 {
3578 rtx offset_rtx;
3579
3580 if (BYTES_BIG_ENDIAN
3581 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
3582 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
3583 - GET_MODE_SIZE (nominal_mode));
3584
3585 offset_rtx = ARGS_SIZE_RTX (stack_offset);
3586 if (offset_rtx == const0_rtx)
3587 stack_parm = gen_rtx (MEM, nominal_mode, internal_arg_pointer);
3588 else
3589 stack_parm = gen_rtx (MEM, nominal_mode,
3590 gen_rtx (PLUS, Pmode,
3591 internal_arg_pointer, offset_rtx));
3592
3593 /* If this is a memory ref that contains aggregate components,
3594 mark it as such for cse and loop optimize. */
3595 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3596 }
3597 #endif /* 0 */
3598
3599 #ifdef STACK_REGS
3600 /* We need this "use" info, because the gcc-register->stack-register
3601 converter in reg-stack.c needs to know which registers are active
3602 at the start of the function call. The actual parameter loading
3603 instructions are not always available then anymore, since they might
3604 have been optimised away. */
3605
3606 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
3607 emit_insn (gen_rtx (USE, GET_MODE (entry_parm), entry_parm));
3608 #endif
3609
3610 /* ENTRY_PARM is an RTX for the parameter as it arrives,
3611 in the mode in which it arrives.
3612 STACK_PARM is an RTX for a stack slot where the parameter can live
3613 during the function (in case we want to put it there).
3614 STACK_PARM is 0 if no stack slot was pushed for it.
3615
3616 Now output code if necessary to convert ENTRY_PARM to
3617 the type in which this function declares it,
3618 and store that result in an appropriate place,
3619 which may be a pseudo reg, may be STACK_PARM,
3620 or may be a local stack slot if STACK_PARM is 0.
3621
3622 Set DECL_RTL to that place. */
3623
3624 if (nominal_mode == BLKmode)
3625 {
3626 /* If a BLKmode arrives in registers, copy it to a stack slot. */
3627 if (GET_CODE (entry_parm) == REG)
3628 {
3629 int size_stored
3630 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
3631 UNITS_PER_WORD);
3632
3633 /* Note that we will be storing an integral number of words.
3634 So we have to be careful to ensure that we allocate an
3635 integral number of words. We do this below in the
3636 assign_stack_local if space was not allocated in the argument
3637 list. If it was, this will not work if PARM_BOUNDARY is not
3638 a multiple of BITS_PER_WORD. It isn't clear how to fix this
3639 if it becomes a problem. */
3640
3641 if (stack_parm == 0)
3642 {
3643 stack_parm
3644 = assign_stack_local (GET_MODE (entry_parm),
3645 size_stored, 0);
3646
3647 /* If this is a memory ref that contains aggregate
3648 components, mark it as such for cse and loop optimize. */
3649 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3650 }
3651
3652 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
3653 abort ();
3654
3655 if (TREE_READONLY (parm))
3656 RTX_UNCHANGING_P (stack_parm) = 1;
3657
3658 move_block_from_reg (REGNO (entry_parm),
3659 validize_mem (stack_parm),
3660 size_stored / UNITS_PER_WORD,
3661 int_size_in_bytes (TREE_TYPE (parm)));
3662 }
3663 DECL_RTL (parm) = stack_parm;
3664 }
3665 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
3666 && ! DECL_INLINE (fndecl))
3667 /* layout_decl may set this. */
3668 || TREE_ADDRESSABLE (parm)
3669 || TREE_SIDE_EFFECTS (parm)
3670 /* If -ffloat-store specified, don't put explicit
3671 float variables into registers. */
3672 || (flag_float_store
3673 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
3674 /* Always assign pseudo to structure return or item passed
3675 by invisible reference. */
3676 || passed_pointer || parm == function_result_decl)
3677 {
3678 /* Store the parm in a pseudoregister during the function, but we
3679 may need to do it in a wider mode. */
3680
3681 register rtx parmreg;
3682 int regno, regnoi, regnor;
3683
3684 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
3685
3686 promoted_nominal_mode
3687 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
3688
3689 parmreg = gen_reg_rtx (promoted_nominal_mode);
3690 REG_USERVAR_P (parmreg) = 1;
3691
3692 /* If this was an item that we received a pointer to, set DECL_RTL
3693 appropriately. */
3694 if (passed_pointer)
3695 {
3696 DECL_RTL (parm)
3697 = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
3698 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
3699 }
3700 else
3701 DECL_RTL (parm) = parmreg;
3702
3703 /* Copy the value into the register. */
3704 if (nominal_mode != passed_mode
3705 || promoted_nominal_mode != promoted_mode)
3706 {
3707 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3708 mode, by the caller. We now have to convert it to
3709 NOMINAL_MODE, if different. However, PARMREG may be in
3710 a diffent mode than NOMINAL_MODE if it is being stored
3711 promoted.
3712
3713 If ENTRY_PARM is a hard register, it might be in a register
3714 not valid for operating in its mode (e.g., an odd-numbered
3715 register for a DFmode). In that case, moves are the only
3716 thing valid, so we can't do a convert from there. This
3717 occurs when the calling sequence allow such misaligned
3718 usages.
3719
3720 In addition, the conversion may involve a call, which could
3721 clobber parameters which haven't been copied to pseudo
3722 registers yet. Therefore, we must first copy the parm to
3723 a pseudo reg here, and save the conversion until after all
3724 parameters have been moved. */
3725
3726 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
3727
3728 emit_move_insn (tempreg, validize_mem (entry_parm));
3729
3730 push_to_sequence (conversion_insns);
3731 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
3732
3733 expand_assignment (parm,
3734 make_tree (nominal_type, tempreg), 0, 0);
3735 conversion_insns = get_insns ();
3736 did_conversion = 1;
3737 end_sequence ();
3738 }
3739 else
3740 emit_move_insn (parmreg, validize_mem (entry_parm));
3741
3742 /* If we were passed a pointer but the actual value
3743 can safely live in a register, put it in one. */
3744 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
3745 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
3746 && ! DECL_INLINE (fndecl))
3747 /* layout_decl may set this. */
3748 || TREE_ADDRESSABLE (parm)
3749 || TREE_SIDE_EFFECTS (parm)
3750 /* If -ffloat-store specified, don't put explicit
3751 float variables into registers. */
3752 || (flag_float_store
3753 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
3754 {
3755 /* We can't use nominal_mode, because it will have been set to
3756 Pmode above. We must use the actual mode of the parm. */
3757 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3758 REG_USERVAR_P (parmreg) = 1;
3759 emit_move_insn (parmreg, DECL_RTL (parm));
3760 DECL_RTL (parm) = parmreg;
3761 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3762 now the parm. */
3763 stack_parm = 0;
3764 }
3765 #ifdef FUNCTION_ARG_CALLEE_COPIES
3766 /* If we are passed an arg by reference and it is our responsibility
3767 to make a copy, do it now.
3768 PASSED_TYPE and PASSED mode now refer to the pointer, not the
3769 original argument, so we must recreate them in the call to
3770 FUNCTION_ARG_CALLEE_COPIES. */
3771 /* ??? Later add code to handle the case that if the argument isn't
3772 modified, don't do the copy. */
3773
3774 else if (passed_pointer
3775 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
3776 TYPE_MODE (DECL_ARG_TYPE (parm)),
3777 DECL_ARG_TYPE (parm),
3778 ! last_named)
3779 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
3780 {
3781 rtx copy;
3782 tree type = DECL_ARG_TYPE (parm);
3783
3784 /* This sequence may involve a library call perhaps clobbering
3785 registers that haven't been copied to pseudos yet. */
3786
3787 push_to_sequence (conversion_insns);
3788
3789 if (TYPE_SIZE (type) == 0
3790 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3791 /* This is a variable sized object. */
3792 copy = gen_rtx (MEM, BLKmode,
3793 allocate_dynamic_stack_space
3794 (expr_size (parm), NULL_RTX,
3795 TYPE_ALIGN (type)));
3796 else
3797 copy = assign_stack_temp (TYPE_MODE (type),
3798 int_size_in_bytes (type), 1);
3799 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
3800
3801 store_expr (parm, copy, 0);
3802 emit_move_insn (parmreg, XEXP (copy, 0));
3803 conversion_insns = get_insns ();
3804 did_conversion = 1;
3805 end_sequence ();
3806 }
3807 #endif /* FUNCTION_ARG_CALLEE_COPIES */
3808
3809 /* In any case, record the parm's desired stack location
3810 in case we later discover it must live in the stack.
3811
3812 If it is a COMPLEX value, store the stack location for both
3813 halves. */
3814
3815 if (GET_CODE (parmreg) == CONCAT)
3816 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
3817 else
3818 regno = REGNO (parmreg);
3819
3820 if (regno >= nparmregs)
3821 {
3822 rtx *new;
3823 int old_nparmregs = nparmregs;
3824
3825 nparmregs = regno + 5;
3826 new = (rtx *) oballoc (nparmregs * sizeof (rtx));
3827 bcopy ((char *) parm_reg_stack_loc, (char *) new,
3828 old_nparmregs * sizeof (rtx));
3829 bzero ((char *) (new + old_nparmregs),
3830 (nparmregs - old_nparmregs) * sizeof (rtx));
3831 parm_reg_stack_loc = new;
3832 }
3833
3834 if (GET_CODE (parmreg) == CONCAT)
3835 {
3836 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
3837
3838 regnor = REGNO (gen_realpart (submode, parmreg));
3839 regnoi = REGNO (gen_imagpart (submode, parmreg));
3840
3841 if (stack_parm != 0)
3842 {
3843 parm_reg_stack_loc[regnor]
3844 = gen_realpart (submode, stack_parm);
3845 parm_reg_stack_loc[regnoi]
3846 = gen_imagpart (submode, stack_parm);
3847 }
3848 else
3849 {
3850 parm_reg_stack_loc[regnor] = 0;
3851 parm_reg_stack_loc[regnoi] = 0;
3852 }
3853 }
3854 else
3855 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
3856
3857 /* Mark the register as eliminable if we did no conversion
3858 and it was copied from memory at a fixed offset,
3859 and the arg pointer was not copied to a pseudo-reg.
3860 If the arg pointer is a pseudo reg or the offset formed
3861 an invalid address, such memory-equivalences
3862 as we make here would screw up life analysis for it. */
3863 if (nominal_mode == passed_mode
3864 && ! did_conversion
3865 && GET_CODE (entry_parm) == MEM
3866 && entry_parm == stack_parm
3867 && stack_offset.var == 0
3868 && reg_mentioned_p (virtual_incoming_args_rtx,
3869 XEXP (entry_parm, 0)))
3870 {
3871 rtx linsn = get_last_insn ();
3872 rtx sinsn, set;
3873
3874 /* Mark complex types separately. */
3875 if (GET_CODE (parmreg) == CONCAT)
3876 /* Scan backwards for the set of the real and
3877 imaginary parts. */
3878 for (sinsn = linsn; sinsn != 0;
3879 sinsn = prev_nonnote_insn (sinsn))
3880 {
3881 set = single_set (sinsn);
3882 if (set != 0
3883 && SET_DEST (set) == regno_reg_rtx [regnoi])
3884 REG_NOTES (sinsn)
3885 = gen_rtx (EXPR_LIST, REG_EQUIV,
3886 parm_reg_stack_loc[regnoi],
3887 REG_NOTES (sinsn));
3888 else if (set != 0
3889 && SET_DEST (set) == regno_reg_rtx [regnor])
3890 REG_NOTES (sinsn)
3891 = gen_rtx (EXPR_LIST, REG_EQUIV,
3892 parm_reg_stack_loc[regnor],
3893 REG_NOTES (sinsn));
3894 }
3895 else if ((set = single_set (linsn)) != 0
3896 && SET_DEST (set) == parmreg)
3897 REG_NOTES (linsn)
3898 = gen_rtx (EXPR_LIST, REG_EQUIV,
3899 entry_parm, REG_NOTES (linsn));
3900 }
3901
3902 /* For pointer data type, suggest pointer register. */
3903 if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE)
3904 mark_reg_pointer (parmreg,
3905 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
3906 / BITS_PER_UNIT));
3907 }
3908 else
3909 {
3910 /* Value must be stored in the stack slot STACK_PARM
3911 during function execution. */
3912
3913 if (promoted_mode != nominal_mode)
3914 {
3915 /* Conversion is required. */
3916 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
3917
3918 emit_move_insn (tempreg, validize_mem (entry_parm));
3919
3920 push_to_sequence (conversion_insns);
3921 entry_parm = convert_to_mode (nominal_mode, tempreg,
3922 TREE_UNSIGNED (TREE_TYPE (parm)));
3923 conversion_insns = get_insns ();
3924 did_conversion = 1;
3925 end_sequence ();
3926 }
3927
3928 if (entry_parm != stack_parm)
3929 {
3930 if (stack_parm == 0)
3931 {
3932 stack_parm
3933 = assign_stack_local (GET_MODE (entry_parm),
3934 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
3935 /* If this is a memory ref that contains aggregate components,
3936 mark it as such for cse and loop optimize. */
3937 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3938 }
3939
3940 if (promoted_mode != nominal_mode)
3941 {
3942 push_to_sequence (conversion_insns);
3943 emit_move_insn (validize_mem (stack_parm),
3944 validize_mem (entry_parm));
3945 conversion_insns = get_insns ();
3946 end_sequence ();
3947 }
3948 else
3949 emit_move_insn (validize_mem (stack_parm),
3950 validize_mem (entry_parm));
3951 }
3952
3953 DECL_RTL (parm) = stack_parm;
3954 }
3955
3956 /* If this "parameter" was the place where we are receiving the
3957 function's incoming structure pointer, set up the result. */
3958 if (parm == function_result_decl)
3959 {
3960 tree result = DECL_RESULT (fndecl);
3961 tree restype = TREE_TYPE (result);
3962
3963 DECL_RTL (result)
3964 = gen_rtx (MEM, DECL_MODE (result), DECL_RTL (parm));
3965
3966 MEM_IN_STRUCT_P (DECL_RTL (result)) = AGGREGATE_TYPE_P (restype);
3967 }
3968
3969 if (TREE_THIS_VOLATILE (parm))
3970 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
3971 if (TREE_READONLY (parm))
3972 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
3973 }
3974
3975 /* Output all parameter conversion instructions (possibly including calls)
3976 now that all parameters have been copied out of hard registers. */
3977 emit_insns (conversion_insns);
3978
3979 max_parm_reg = max_reg_num ();
3980 last_parm_insn = get_last_insn ();
3981
3982 current_function_args_size = stack_args_size.constant;
3983
3984 /* Adjust function incoming argument size for alignment and
3985 minimum length. */
3986
3987 #ifdef REG_PARM_STACK_SPACE
3988 #ifndef MAYBE_REG_PARM_STACK_SPACE
3989 current_function_args_size = MAX (current_function_args_size,
3990 REG_PARM_STACK_SPACE (fndecl));
3991 #endif
3992 #endif
3993
3994 #ifdef STACK_BOUNDARY
3995 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
3996
3997 current_function_args_size
3998 = ((current_function_args_size + STACK_BYTES - 1)
3999 / STACK_BYTES) * STACK_BYTES;
4000 #endif
4001
4002 #ifdef ARGS_GROW_DOWNWARD
4003 current_function_arg_offset_rtx
4004 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4005 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4006 size_int (-stack_args_size.constant)),
4007 NULL_RTX, VOIDmode, 0));
4008 #else
4009 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4010 #endif
4011
4012 /* See how many bytes, if any, of its args a function should try to pop
4013 on return. */
4014
4015 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4016 current_function_args_size);
4017
4018 /* For stdarg.h function, save info about
4019 regs and stack space used by the named args. */
4020
4021 if (!hide_last_arg)
4022 current_function_args_info = args_so_far;
4023
4024 /* Set the rtx used for the function return value. Put this in its
4025 own variable so any optimizers that need this information don't have
4026 to include tree.h. Do this here so it gets done when an inlined
4027 function gets output. */
4028
4029 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4030 }
4031 \f
4032 /* Indicate whether REGNO is an incoming argument to the current function
4033 that was promoted to a wider mode. If so, return the RTX for the
4034 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4035 that REGNO is promoted from and whether the promotion was signed or
4036 unsigned. */
4037
4038 #ifdef PROMOTE_FUNCTION_ARGS
4039
4040 rtx
4041 promoted_input_arg (regno, pmode, punsignedp)
4042 int regno;
4043 enum machine_mode *pmode;
4044 int *punsignedp;
4045 {
4046 tree arg;
4047
4048 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4049 arg = TREE_CHAIN (arg))
4050 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4051 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4052 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4053 {
4054 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4055 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4056
4057 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4058 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4059 && mode != DECL_MODE (arg))
4060 {
4061 *pmode = DECL_MODE (arg);
4062 *punsignedp = unsignedp;
4063 return DECL_INCOMING_RTL (arg);
4064 }
4065 }
4066
4067 return 0;
4068 }
4069
4070 #endif
4071 \f
4072 /* Compute the size and offset from the start of the stacked arguments for a
4073 parm passed in mode PASSED_MODE and with type TYPE.
4074
4075 INITIAL_OFFSET_PTR points to the current offset into the stacked
4076 arguments.
4077
4078 The starting offset and size for this parm are returned in *OFFSET_PTR
4079 and *ARG_SIZE_PTR, respectively.
4080
4081 IN_REGS is non-zero if the argument will be passed in registers. It will
4082 never be set if REG_PARM_STACK_SPACE is not defined.
4083
4084 FNDECL is the function in which the argument was defined.
4085
4086 There are two types of rounding that are done. The first, controlled by
4087 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4088 list to be aligned to the specific boundary (in bits). This rounding
4089 affects the initial and starting offsets, but not the argument size.
4090
4091 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4092 optionally rounds the size of the parm to PARM_BOUNDARY. The
4093 initial offset is not affected by this rounding, while the size always
4094 is and the starting offset may be. */
4095
4096 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4097 initial_offset_ptr is positive because locate_and_pad_parm's
4098 callers pass in the total size of args so far as
4099 initial_offset_ptr. arg_size_ptr is always positive.*/
4100
4101 void
4102 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4103 initial_offset_ptr, offset_ptr, arg_size_ptr)
4104 enum machine_mode passed_mode;
4105 tree type;
4106 int in_regs;
4107 tree fndecl;
4108 struct args_size *initial_offset_ptr;
4109 struct args_size *offset_ptr;
4110 struct args_size *arg_size_ptr;
4111 {
4112 tree sizetree
4113 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4114 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4115 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4116 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4117 int reg_parm_stack_space = 0;
4118
4119 #ifdef REG_PARM_STACK_SPACE
4120 /* If we have found a stack parm before we reach the end of the
4121 area reserved for registers, skip that area. */
4122 if (! in_regs)
4123 {
4124 #ifdef MAYBE_REG_PARM_STACK_SPACE
4125 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4126 #else
4127 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4128 #endif
4129 if (reg_parm_stack_space > 0)
4130 {
4131 if (initial_offset_ptr->var)
4132 {
4133 initial_offset_ptr->var
4134 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4135 size_int (reg_parm_stack_space));
4136 initial_offset_ptr->constant = 0;
4137 }
4138 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4139 initial_offset_ptr->constant = reg_parm_stack_space;
4140 }
4141 }
4142 #endif /* REG_PARM_STACK_SPACE */
4143
4144 arg_size_ptr->var = 0;
4145 arg_size_ptr->constant = 0;
4146
4147 #ifdef ARGS_GROW_DOWNWARD
4148 if (initial_offset_ptr->var)
4149 {
4150 offset_ptr->constant = 0;
4151 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4152 initial_offset_ptr->var);
4153 }
4154 else
4155 {
4156 offset_ptr->constant = - initial_offset_ptr->constant;
4157 offset_ptr->var = 0;
4158 }
4159 if (where_pad != none
4160 && (TREE_CODE (sizetree) != INTEGER_CST
4161 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4162 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4163 SUB_PARM_SIZE (*offset_ptr, sizetree);
4164 if (where_pad != downward)
4165 pad_to_arg_alignment (offset_ptr, boundary);
4166 if (initial_offset_ptr->var)
4167 {
4168 arg_size_ptr->var = size_binop (MINUS_EXPR,
4169 size_binop (MINUS_EXPR,
4170 integer_zero_node,
4171 initial_offset_ptr->var),
4172 offset_ptr->var);
4173 }
4174 else
4175 {
4176 arg_size_ptr->constant = (- initial_offset_ptr->constant -
4177 offset_ptr->constant);
4178 }
4179 #else /* !ARGS_GROW_DOWNWARD */
4180 pad_to_arg_alignment (initial_offset_ptr, boundary);
4181 *offset_ptr = *initial_offset_ptr;
4182
4183 #ifdef PUSH_ROUNDING
4184 if (passed_mode != BLKmode)
4185 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4186 #endif
4187
4188 /* Pad_below needs the pre-rounded size to know how much to pad below
4189 so this must be done before rounding up. */
4190 if (where_pad == downward
4191 /* However, BLKmode args passed in regs have their padding done elsewhere.
4192 The stack slot must be able to hold the entire register. */
4193 && !(in_regs && passed_mode == BLKmode))
4194 pad_below (offset_ptr, passed_mode, sizetree);
4195
4196 if (where_pad != none
4197 && (TREE_CODE (sizetree) != INTEGER_CST
4198 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4199 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4200
4201 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4202 #endif /* ARGS_GROW_DOWNWARD */
4203 }
4204
4205 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4206 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4207
4208 static void
4209 pad_to_arg_alignment (offset_ptr, boundary)
4210 struct args_size *offset_ptr;
4211 int boundary;
4212 {
4213 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4214
4215 if (boundary > BITS_PER_UNIT)
4216 {
4217 if (offset_ptr->var)
4218 {
4219 offset_ptr->var =
4220 #ifdef ARGS_GROW_DOWNWARD
4221 round_down
4222 #else
4223 round_up
4224 #endif
4225 (ARGS_SIZE_TREE (*offset_ptr),
4226 boundary / BITS_PER_UNIT);
4227 offset_ptr->constant = 0; /*?*/
4228 }
4229 else
4230 offset_ptr->constant =
4231 #ifdef ARGS_GROW_DOWNWARD
4232 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4233 #else
4234 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4235 #endif
4236 }
4237 }
4238
4239 static void
4240 pad_below (offset_ptr, passed_mode, sizetree)
4241 struct args_size *offset_ptr;
4242 enum machine_mode passed_mode;
4243 tree sizetree;
4244 {
4245 if (passed_mode != BLKmode)
4246 {
4247 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4248 offset_ptr->constant
4249 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4250 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4251 - GET_MODE_SIZE (passed_mode));
4252 }
4253 else
4254 {
4255 if (TREE_CODE (sizetree) != INTEGER_CST
4256 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4257 {
4258 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4259 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4260 /* Add it in. */
4261 ADD_PARM_SIZE (*offset_ptr, s2);
4262 SUB_PARM_SIZE (*offset_ptr, sizetree);
4263 }
4264 }
4265 }
4266
4267 static tree
4268 round_down (value, divisor)
4269 tree value;
4270 int divisor;
4271 {
4272 return size_binop (MULT_EXPR,
4273 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
4274 size_int (divisor));
4275 }
4276 \f
4277 /* Walk the tree of blocks describing the binding levels within a function
4278 and warn about uninitialized variables.
4279 This is done after calling flow_analysis and before global_alloc
4280 clobbers the pseudo-regs to hard regs. */
4281
4282 void
4283 uninitialized_vars_warning (block)
4284 tree block;
4285 {
4286 register tree decl, sub;
4287 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4288 {
4289 if (TREE_CODE (decl) == VAR_DECL
4290 /* These warnings are unreliable for and aggregates
4291 because assigning the fields one by one can fail to convince
4292 flow.c that the entire aggregate was initialized.
4293 Unions are troublesome because members may be shorter. */
4294 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
4295 && DECL_RTL (decl) != 0
4296 && GET_CODE (DECL_RTL (decl)) == REG
4297 && regno_uninitialized (REGNO (DECL_RTL (decl))))
4298 warning_with_decl (decl,
4299 "`%s' might be used uninitialized in this function");
4300 if (TREE_CODE (decl) == VAR_DECL
4301 && DECL_RTL (decl) != 0
4302 && GET_CODE (DECL_RTL (decl)) == REG
4303 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4304 warning_with_decl (decl,
4305 "variable `%s' might be clobbered by `longjmp' or `vfork'");
4306 }
4307 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4308 uninitialized_vars_warning (sub);
4309 }
4310
4311 /* Do the appropriate part of uninitialized_vars_warning
4312 but for arguments instead of local variables. */
4313
4314 void
4315 setjmp_args_warning ()
4316 {
4317 register tree decl;
4318 for (decl = DECL_ARGUMENTS (current_function_decl);
4319 decl; decl = TREE_CHAIN (decl))
4320 if (DECL_RTL (decl) != 0
4321 && GET_CODE (DECL_RTL (decl)) == REG
4322 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4323 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
4324 }
4325
4326 /* If this function call setjmp, put all vars into the stack
4327 unless they were declared `register'. */
4328
4329 void
4330 setjmp_protect (block)
4331 tree block;
4332 {
4333 register tree decl, sub;
4334 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4335 if ((TREE_CODE (decl) == VAR_DECL
4336 || TREE_CODE (decl) == PARM_DECL)
4337 && DECL_RTL (decl) != 0
4338 && GET_CODE (DECL_RTL (decl)) == REG
4339 /* If this variable came from an inline function, it must be
4340 that it's life doesn't overlap the setjmp. If there was a
4341 setjmp in the function, it would already be in memory. We
4342 must exclude such variable because their DECL_RTL might be
4343 set to strange things such as virtual_stack_vars_rtx. */
4344 && ! DECL_FROM_INLINE (decl)
4345 && (
4346 #ifdef NON_SAVING_SETJMP
4347 /* If longjmp doesn't restore the registers,
4348 don't put anything in them. */
4349 NON_SAVING_SETJMP
4350 ||
4351 #endif
4352 ! DECL_REGISTER (decl)))
4353 put_var_into_stack (decl);
4354 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4355 setjmp_protect (sub);
4356 }
4357 \f
4358 /* Like the previous function, but for args instead of local variables. */
4359
4360 void
4361 setjmp_protect_args ()
4362 {
4363 register tree decl, sub;
4364 for (decl = DECL_ARGUMENTS (current_function_decl);
4365 decl; decl = TREE_CHAIN (decl))
4366 if ((TREE_CODE (decl) == VAR_DECL
4367 || TREE_CODE (decl) == PARM_DECL)
4368 && DECL_RTL (decl) != 0
4369 && GET_CODE (DECL_RTL (decl)) == REG
4370 && (
4371 /* If longjmp doesn't restore the registers,
4372 don't put anything in them. */
4373 #ifdef NON_SAVING_SETJMP
4374 NON_SAVING_SETJMP
4375 ||
4376 #endif
4377 ! DECL_REGISTER (decl)))
4378 put_var_into_stack (decl);
4379 }
4380 \f
4381 /* Return the context-pointer register corresponding to DECL,
4382 or 0 if it does not need one. */
4383
4384 rtx
4385 lookup_static_chain (decl)
4386 tree decl;
4387 {
4388 tree context = decl_function_context (decl);
4389 tree link;
4390
4391 if (context == 0
4392 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
4393 return 0;
4394
4395 /* We treat inline_function_decl as an alias for the current function
4396 because that is the inline function whose vars, types, etc.
4397 are being merged into the current function.
4398 See expand_inline_function. */
4399 if (context == current_function_decl || context == inline_function_decl)
4400 return virtual_stack_vars_rtx;
4401
4402 for (link = context_display; link; link = TREE_CHAIN (link))
4403 if (TREE_PURPOSE (link) == context)
4404 return RTL_EXPR_RTL (TREE_VALUE (link));
4405
4406 abort ();
4407 }
4408 \f
4409 /* Convert a stack slot address ADDR for variable VAR
4410 (from a containing function)
4411 into an address valid in this function (using a static chain). */
4412
4413 rtx
4414 fix_lexical_addr (addr, var)
4415 rtx addr;
4416 tree var;
4417 {
4418 rtx basereg;
4419 int displacement;
4420 tree context = decl_function_context (var);
4421 struct function *fp;
4422 rtx base = 0;
4423
4424 /* If this is the present function, we need not do anything. */
4425 if (context == current_function_decl || context == inline_function_decl)
4426 return addr;
4427
4428 for (fp = outer_function_chain; fp; fp = fp->next)
4429 if (fp->decl == context)
4430 break;
4431
4432 if (fp == 0)
4433 abort ();
4434
4435 /* Decode given address as base reg plus displacement. */
4436 if (GET_CODE (addr) == REG)
4437 basereg = addr, displacement = 0;
4438 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4439 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
4440 else
4441 abort ();
4442
4443 /* We accept vars reached via the containing function's
4444 incoming arg pointer and via its stack variables pointer. */
4445 if (basereg == fp->internal_arg_pointer)
4446 {
4447 /* If reached via arg pointer, get the arg pointer value
4448 out of that function's stack frame.
4449
4450 There are two cases: If a separate ap is needed, allocate a
4451 slot in the outer function for it and dereference it that way.
4452 This is correct even if the real ap is actually a pseudo.
4453 Otherwise, just adjust the offset from the frame pointer to
4454 compensate. */
4455
4456 #ifdef NEED_SEPARATE_AP
4457 rtx addr;
4458
4459 if (fp->arg_pointer_save_area == 0)
4460 fp->arg_pointer_save_area
4461 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
4462
4463 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
4464 addr = memory_address (Pmode, addr);
4465
4466 base = copy_to_reg (gen_rtx (MEM, Pmode, addr));
4467 #else
4468 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
4469 base = lookup_static_chain (var);
4470 #endif
4471 }
4472
4473 else if (basereg == virtual_stack_vars_rtx)
4474 {
4475 /* This is the same code as lookup_static_chain, duplicated here to
4476 avoid an extra call to decl_function_context. */
4477 tree link;
4478
4479 for (link = context_display; link; link = TREE_CHAIN (link))
4480 if (TREE_PURPOSE (link) == context)
4481 {
4482 base = RTL_EXPR_RTL (TREE_VALUE (link));
4483 break;
4484 }
4485 }
4486
4487 if (base == 0)
4488 abort ();
4489
4490 /* Use same offset, relative to appropriate static chain or argument
4491 pointer. */
4492 return plus_constant (base, displacement);
4493 }
4494 \f
4495 /* Return the address of the trampoline for entering nested fn FUNCTION.
4496 If necessary, allocate a trampoline (in the stack frame)
4497 and emit rtl to initialize its contents (at entry to this function). */
4498
4499 rtx
4500 trampoline_address (function)
4501 tree function;
4502 {
4503 tree link;
4504 tree rtlexp;
4505 rtx tramp;
4506 struct function *fp;
4507 tree fn_context;
4508
4509 /* Find an existing trampoline and return it. */
4510 for (link = trampoline_list; link; link = TREE_CHAIN (link))
4511 if (TREE_PURPOSE (link) == function)
4512 return
4513 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
4514
4515 for (fp = outer_function_chain; fp; fp = fp->next)
4516 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
4517 if (TREE_PURPOSE (link) == function)
4518 {
4519 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
4520 function);
4521 return round_trampoline_addr (tramp);
4522 }
4523
4524 /* None exists; we must make one. */
4525
4526 /* Find the `struct function' for the function containing FUNCTION. */
4527 fp = 0;
4528 fn_context = decl_function_context (function);
4529 if (fn_context != current_function_decl)
4530 for (fp = outer_function_chain; fp; fp = fp->next)
4531 if (fp->decl == fn_context)
4532 break;
4533
4534 /* Allocate run-time space for this trampoline
4535 (usually in the defining function's stack frame). */
4536 #ifdef ALLOCATE_TRAMPOLINE
4537 tramp = ALLOCATE_TRAMPOLINE (fp);
4538 #else
4539 /* If rounding needed, allocate extra space
4540 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
4541 #ifdef TRAMPOLINE_ALIGNMENT
4542 #define TRAMPOLINE_REAL_SIZE \
4543 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
4544 #else
4545 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
4546 #endif
4547 if (fp != 0)
4548 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
4549 else
4550 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
4551 #endif
4552
4553 /* Record the trampoline for reuse and note it for later initialization
4554 by expand_function_end. */
4555 if (fp != 0)
4556 {
4557 push_obstacks (fp->function_maybepermanent_obstack,
4558 fp->function_maybepermanent_obstack);
4559 rtlexp = make_node (RTL_EXPR);
4560 RTL_EXPR_RTL (rtlexp) = tramp;
4561 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
4562 pop_obstacks ();
4563 }
4564 else
4565 {
4566 /* Make the RTL_EXPR node temporary, not momentary, so that the
4567 trampoline_list doesn't become garbage. */
4568 int momentary = suspend_momentary ();
4569 rtlexp = make_node (RTL_EXPR);
4570 resume_momentary (momentary);
4571
4572 RTL_EXPR_RTL (rtlexp) = tramp;
4573 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
4574 }
4575
4576 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
4577 return round_trampoline_addr (tramp);
4578 }
4579
4580 /* Given a trampoline address,
4581 round it to multiple of TRAMPOLINE_ALIGNMENT. */
4582
4583 static rtx
4584 round_trampoline_addr (tramp)
4585 rtx tramp;
4586 {
4587 #ifdef TRAMPOLINE_ALIGNMENT
4588 /* Round address up to desired boundary. */
4589 rtx temp = gen_reg_rtx (Pmode);
4590 temp = expand_binop (Pmode, add_optab, tramp,
4591 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
4592 temp, 0, OPTAB_LIB_WIDEN);
4593 tramp = expand_binop (Pmode, and_optab, temp,
4594 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
4595 temp, 0, OPTAB_LIB_WIDEN);
4596 #endif
4597 return tramp;
4598 }
4599 \f
4600 /* The functions identify_blocks and reorder_blocks provide a way to
4601 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
4602 duplicate portions of the RTL code. Call identify_blocks before
4603 changing the RTL, and call reorder_blocks after. */
4604
4605 /* Put all this function's BLOCK nodes including those that are chained
4606 onto the first block into a vector, and return it.
4607 Also store in each NOTE for the beginning or end of a block
4608 the index of that block in the vector.
4609 The arguments are BLOCK, the chain of top-level blocks of the function,
4610 and INSNS, the insn chain of the function. */
4611
4612 tree *
4613 identify_blocks (block, insns)
4614 tree block;
4615 rtx insns;
4616 {
4617 int n_blocks;
4618 tree *block_vector;
4619 int *block_stack;
4620 int depth = 0;
4621 int next_block_number = 1;
4622 int current_block_number = 1;
4623 rtx insn;
4624
4625 if (block == 0)
4626 return 0;
4627
4628 n_blocks = all_blocks (block, 0);
4629 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
4630 block_stack = (int *) alloca (n_blocks * sizeof (int));
4631
4632 all_blocks (block, block_vector);
4633
4634 for (insn = insns; insn; insn = NEXT_INSN (insn))
4635 if (GET_CODE (insn) == NOTE)
4636 {
4637 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
4638 {
4639 block_stack[depth++] = current_block_number;
4640 current_block_number = next_block_number;
4641 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
4642 }
4643 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
4644 {
4645 current_block_number = block_stack[--depth];
4646 NOTE_BLOCK_NUMBER (insn) = current_block_number;
4647 }
4648 }
4649
4650 if (n_blocks != next_block_number)
4651 abort ();
4652
4653 return block_vector;
4654 }
4655
4656 /* Given BLOCK_VECTOR which was returned by identify_blocks,
4657 and a revised instruction chain, rebuild the tree structure
4658 of BLOCK nodes to correspond to the new order of RTL.
4659 The new block tree is inserted below TOP_BLOCK.
4660 Returns the current top-level block. */
4661
4662 tree
4663 reorder_blocks (block_vector, block, insns)
4664 tree *block_vector;
4665 tree block;
4666 rtx insns;
4667 {
4668 tree current_block = block;
4669 rtx insn;
4670
4671 if (block_vector == 0)
4672 return block;
4673
4674 /* Prune the old trees away, so that it doesn't get in the way. */
4675 BLOCK_SUBBLOCKS (current_block) = 0;
4676 BLOCK_CHAIN (current_block) = 0;
4677
4678 for (insn = insns; insn; insn = NEXT_INSN (insn))
4679 if (GET_CODE (insn) == NOTE)
4680 {
4681 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
4682 {
4683 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
4684 /* If we have seen this block before, copy it. */
4685 if (TREE_ASM_WRITTEN (block))
4686 block = copy_node (block);
4687 BLOCK_SUBBLOCKS (block) = 0;
4688 TREE_ASM_WRITTEN (block) = 1;
4689 BLOCK_SUPERCONTEXT (block) = current_block;
4690 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4691 BLOCK_SUBBLOCKS (current_block) = block;
4692 current_block = block;
4693 NOTE_SOURCE_FILE (insn) = 0;
4694 }
4695 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
4696 {
4697 BLOCK_SUBBLOCKS (current_block)
4698 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
4699 current_block = BLOCK_SUPERCONTEXT (current_block);
4700 NOTE_SOURCE_FILE (insn) = 0;
4701 }
4702 }
4703
4704 BLOCK_SUBBLOCKS (current_block)
4705 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
4706 return current_block;
4707 }
4708
4709 /* Reverse the order of elements in the chain T of blocks,
4710 and return the new head of the chain (old last element). */
4711
4712 static tree
4713 blocks_nreverse (t)
4714 tree t;
4715 {
4716 register tree prev = 0, decl, next;
4717 for (decl = t; decl; decl = next)
4718 {
4719 next = BLOCK_CHAIN (decl);
4720 BLOCK_CHAIN (decl) = prev;
4721 prev = decl;
4722 }
4723 return prev;
4724 }
4725
4726 /* Count the subblocks of the list starting with BLOCK, and list them
4727 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
4728 blocks. */
4729
4730 static int
4731 all_blocks (block, vector)
4732 tree block;
4733 tree *vector;
4734 {
4735 int n_blocks = 0;
4736
4737 while (block)
4738 {
4739 TREE_ASM_WRITTEN (block) = 0;
4740
4741 /* Record this block. */
4742 if (vector)
4743 vector[n_blocks] = block;
4744
4745 ++n_blocks;
4746
4747 /* Record the subblocks, and their subblocks... */
4748 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4749 vector ? vector + n_blocks : 0);
4750 block = BLOCK_CHAIN (block);
4751 }
4752
4753 return n_blocks;
4754 }
4755 \f
4756 /* Build bytecode call descriptor for function SUBR. */
4757
4758 rtx
4759 bc_build_calldesc (subr)
4760 tree subr;
4761 {
4762 tree calldesc = 0, arg;
4763 int nargs = 0;
4764
4765 /* Build the argument description vector in reverse order. */
4766 DECL_ARGUMENTS (subr) = nreverse (DECL_ARGUMENTS (subr));
4767 nargs = 0;
4768
4769 for (arg = DECL_ARGUMENTS (subr); arg; arg = TREE_CHAIN (arg))
4770 {
4771 ++nargs;
4772
4773 calldesc = tree_cons ((tree) 0, size_in_bytes (TREE_TYPE (arg)), calldesc);
4774 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (TREE_TYPE (arg)), calldesc);
4775 }
4776
4777 DECL_ARGUMENTS (subr) = nreverse (DECL_ARGUMENTS (subr));
4778
4779 /* Prepend the function's return type. */
4780 calldesc = tree_cons ((tree) 0,
4781 size_in_bytes (TREE_TYPE (TREE_TYPE (subr))),
4782 calldesc);
4783
4784 calldesc = tree_cons ((tree) 0,
4785 bc_runtime_type_code (TREE_TYPE (TREE_TYPE (subr))),
4786 calldesc);
4787
4788 /* Prepend the arg count. */
4789 calldesc = tree_cons ((tree) 0, build_int_2 (nargs, 0), calldesc);
4790
4791 /* Output the call description vector and get its address. */
4792 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
4793 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
4794 build_index_type (build_int_2 (nargs * 2, 0)));
4795
4796 return output_constant_def (calldesc);
4797 }
4798
4799
4800 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4801 and initialize static variables for generating RTL for the statements
4802 of the function. */
4803
4804 void
4805 init_function_start (subr, filename, line)
4806 tree subr;
4807 char *filename;
4808 int line;
4809 {
4810 char *junk;
4811
4812 if (output_bytecode)
4813 {
4814 this_function_decl = subr;
4815 this_function_calldesc = bc_build_calldesc (subr);
4816 local_vars_size = 0;
4817 stack_depth = 0;
4818 max_stack_depth = 0;
4819 stmt_expr_depth = 0;
4820 return;
4821 }
4822
4823 init_stmt_for_function ();
4824
4825 cse_not_expected = ! optimize;
4826
4827 /* Caller save not needed yet. */
4828 caller_save_needed = 0;
4829
4830 /* No stack slots have been made yet. */
4831 stack_slot_list = 0;
4832
4833 /* There is no stack slot for handling nonlocal gotos. */
4834 nonlocal_goto_handler_slot = 0;
4835 nonlocal_goto_stack_level = 0;
4836
4837 /* No labels have been declared for nonlocal use. */
4838 nonlocal_labels = 0;
4839
4840 /* No function calls so far in this function. */
4841 function_call_count = 0;
4842
4843 /* No parm regs have been allocated.
4844 (This is important for output_inline_function.) */
4845 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4846
4847 /* Initialize the RTL mechanism. */
4848 init_emit ();
4849
4850 /* Initialize the queue of pending postincrement and postdecrements,
4851 and some other info in expr.c. */
4852 init_expr ();
4853
4854 /* We haven't done register allocation yet. */
4855 reg_renumber = 0;
4856
4857 init_const_rtx_hash_table ();
4858
4859 current_function_name = (*decl_printable_name) (subr, &junk);
4860
4861 /* Nonzero if this is a nested function that uses a static chain. */
4862
4863 current_function_needs_context
4864 = (decl_function_context (current_function_decl) != 0
4865 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
4866
4867 /* Set if a call to setjmp is seen. */
4868 current_function_calls_setjmp = 0;
4869
4870 /* Set if a call to longjmp is seen. */
4871 current_function_calls_longjmp = 0;
4872
4873 current_function_calls_alloca = 0;
4874 current_function_has_nonlocal_label = 0;
4875 current_function_has_nonlocal_goto = 0;
4876 current_function_contains_functions = 0;
4877
4878 current_function_returns_pcc_struct = 0;
4879 current_function_returns_struct = 0;
4880 current_function_epilogue_delay_list = 0;
4881 current_function_uses_const_pool = 0;
4882 current_function_uses_pic_offset_table = 0;
4883
4884 /* We have not yet needed to make a label to jump to for tail-recursion. */
4885 tail_recursion_label = 0;
4886
4887 /* We haven't had a need to make a save area for ap yet. */
4888
4889 arg_pointer_save_area = 0;
4890
4891 /* No stack slots allocated yet. */
4892 frame_offset = 0;
4893
4894 /* No SAVE_EXPRs in this function yet. */
4895 save_expr_regs = 0;
4896
4897 /* No RTL_EXPRs in this function yet. */
4898 rtl_expr_chain = 0;
4899
4900 /* Set up to allocate temporaries. */
4901 init_temp_slots ();
4902
4903 /* Within function body, compute a type's size as soon it is laid out. */
4904 immediate_size_expand++;
4905
4906 /* We haven't made any trampolines for this function yet. */
4907 trampoline_list = 0;
4908
4909 init_pending_stack_adjust ();
4910 inhibit_defer_pop = 0;
4911
4912 current_function_outgoing_args_size = 0;
4913
4914 /* Prevent ever trying to delete the first instruction of a function.
4915 Also tell final how to output a linenum before the function prologue. */
4916 emit_line_note (filename, line);
4917
4918 /* Make sure first insn is a note even if we don't want linenums.
4919 This makes sure the first insn will never be deleted.
4920 Also, final expects a note to appear there. */
4921 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4922
4923 /* Set flags used by final.c. */
4924 if (aggregate_value_p (DECL_RESULT (subr)))
4925 {
4926 #ifdef PCC_STATIC_STRUCT_RETURN
4927 current_function_returns_pcc_struct = 1;
4928 #endif
4929 current_function_returns_struct = 1;
4930 }
4931
4932 /* Warn if this value is an aggregate type,
4933 regardless of which calling convention we are using for it. */
4934 if (warn_aggregate_return
4935 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4936 warning ("function returns an aggregate");
4937
4938 current_function_returns_pointer
4939 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
4940
4941 /* Indicate that we need to distinguish between the return value of the
4942 present function and the return value of a function being called. */
4943 rtx_equal_function_value_matters = 1;
4944
4945 /* Indicate that we have not instantiated virtual registers yet. */
4946 virtuals_instantiated = 0;
4947
4948 /* Indicate we have no need of a frame pointer yet. */
4949 frame_pointer_needed = 0;
4950
4951 /* By default assume not varargs or stdarg. */
4952 current_function_varargs = 0;
4953 current_function_stdarg = 0;
4954 }
4955
4956 /* Indicate that the current function uses extra args
4957 not explicitly mentioned in the argument list in any fashion. */
4958
4959 void
4960 mark_varargs ()
4961 {
4962 current_function_varargs = 1;
4963 }
4964
4965 /* Expand a call to __main at the beginning of a possible main function. */
4966
4967 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
4968 #undef HAS_INIT_SECTION
4969 #define HAS_INIT_SECTION
4970 #endif
4971
4972 void
4973 expand_main_function ()
4974 {
4975 if (!output_bytecode)
4976 {
4977 /* The zero below avoids a possible parse error */
4978 0;
4979 #if !defined (HAS_INIT_SECTION)
4980 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, NAME__MAIN), 0,
4981 VOIDmode, 0);
4982 #endif /* not HAS_INIT_SECTION */
4983 }
4984 }
4985 \f
4986 extern struct obstack permanent_obstack;
4987
4988 /* Expand start of bytecode function. See comment at
4989 expand_function_start below for details. */
4990
4991 void
4992 bc_expand_function_start (subr, parms_have_cleanups)
4993 tree subr;
4994 int parms_have_cleanups;
4995 {
4996 char label[20], *name;
4997 static int nlab;
4998 tree thisarg;
4999 int argsz;
5000
5001 if (TREE_PUBLIC (subr))
5002 bc_globalize_label (IDENTIFIER_POINTER (DECL_NAME (subr)));
5003
5004 #ifdef DEBUG_PRINT_CODE
5005 fprintf (stderr, "\n<func %s>\n", IDENTIFIER_POINTER (DECL_NAME (subr)));
5006 #endif
5007
5008 for (argsz = 0, thisarg = DECL_ARGUMENTS (subr); thisarg; thisarg = TREE_CHAIN (thisarg))
5009 {
5010 if (DECL_RTL (thisarg))
5011 abort (); /* Should be NULL here I think. */
5012 else if (TREE_CONSTANT (DECL_SIZE (thisarg)))
5013 {
5014 DECL_RTL (thisarg) = bc_gen_rtx ((char *) 0, argsz, (struct bc_label *) 0);
5015 argsz += TREE_INT_CST_LOW (DECL_SIZE (thisarg));
5016 }
5017 else
5018 {
5019 /* Variable-sized objects are pointers to their storage. */
5020 DECL_RTL (thisarg) = bc_gen_rtx ((char *) 0, argsz, (struct bc_label *) 0);
5021 argsz += POINTER_SIZE;
5022 }
5023 }
5024
5025 bc_begin_function (xstrdup (IDENTIFIER_POINTER (DECL_NAME (subr))));
5026
5027 ASM_GENERATE_INTERNAL_LABEL (label, "LX", nlab);
5028
5029 ++nlab;
5030 name = (char *) obstack_copy0 (&permanent_obstack, label, strlen (label));
5031 this_function_callinfo = bc_gen_rtx (name, 0, (struct bc_label *) 0);
5032 this_function_bytecode =
5033 bc_emit_trampoline (BYTECODE_LABEL (this_function_callinfo));
5034 }
5035
5036
5037 /* Expand end of bytecode function. See details the comment of
5038 expand_function_end(), below. */
5039
5040 void
5041 bc_expand_function_end ()
5042 {
5043 char *ptrconsts;
5044
5045 expand_null_return ();
5046
5047 /* Emit any fixup code. This must be done before the call to
5048 to BC_END_FUNCTION (), since that will cause the bytecode
5049 segment to be finished off and closed. */
5050
5051 expand_fixups (NULL_RTX);
5052
5053 ptrconsts = bc_end_function ();
5054
5055 bc_align_const (2 /* INT_ALIGN */);
5056
5057 /* If this changes also make sure to change bc-interp.h! */
5058
5059 bc_emit_const_labeldef (BYTECODE_LABEL (this_function_callinfo));
5060 bc_emit_const ((char *) &max_stack_depth, sizeof max_stack_depth);
5061 bc_emit_const ((char *) &local_vars_size, sizeof local_vars_size);
5062 bc_emit_const_labelref (this_function_bytecode, 0);
5063 bc_emit_const_labelref (ptrconsts, 0);
5064 bc_emit_const_labelref (BYTECODE_LABEL (this_function_calldesc), 0);
5065 }
5066
5067
5068 /* Start the RTL for a new function, and set variables used for
5069 emitting RTL.
5070 SUBR is the FUNCTION_DECL node.
5071 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5072 the function's parameters, which must be run at any return statement. */
5073
5074 void
5075 expand_function_start (subr, parms_have_cleanups)
5076 tree subr;
5077 int parms_have_cleanups;
5078 {
5079 register int i;
5080 tree tem;
5081 rtx last_ptr;
5082
5083 if (output_bytecode)
5084 {
5085 bc_expand_function_start (subr, parms_have_cleanups);
5086 return;
5087 }
5088
5089 /* Make sure volatile mem refs aren't considered
5090 valid operands of arithmetic insns. */
5091 init_recog_no_volatile ();
5092
5093 /* If function gets a static chain arg, store it in the stack frame.
5094 Do this first, so it gets the first stack slot offset. */
5095 if (current_function_needs_context)
5096 {
5097 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5098
5099 #ifdef SMALL_REGISTER_CLASSES
5100 /* Delay copying static chain if it is not a register to avoid
5101 conflicts with regs used for parameters. */
5102 if (GET_CODE (static_chain_incoming_rtx) == REG)
5103 #endif
5104 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5105 }
5106
5107 /* If the parameters of this function need cleaning up, get a label
5108 for the beginning of the code which executes those cleanups. This must
5109 be done before doing anything with return_label. */
5110 if (parms_have_cleanups)
5111 cleanup_label = gen_label_rtx ();
5112 else
5113 cleanup_label = 0;
5114
5115 /* Make the label for return statements to jump to, if this machine
5116 does not have a one-instruction return and uses an epilogue,
5117 or if it returns a structure, or if it has parm cleanups. */
5118 #ifdef HAVE_return
5119 if (cleanup_label == 0 && HAVE_return
5120 && ! current_function_returns_pcc_struct
5121 && ! (current_function_returns_struct && ! optimize))
5122 return_label = 0;
5123 else
5124 return_label = gen_label_rtx ();
5125 #else
5126 return_label = gen_label_rtx ();
5127 #endif
5128
5129 /* Initialize rtx used to return the value. */
5130 /* Do this before assign_parms so that we copy the struct value address
5131 before any library calls that assign parms might generate. */
5132
5133 /* Decide whether to return the value in memory or in a register. */
5134 if (aggregate_value_p (DECL_RESULT (subr)))
5135 {
5136 /* Returning something that won't go in a register. */
5137 register rtx value_address = 0;
5138
5139 #ifdef PCC_STATIC_STRUCT_RETURN
5140 if (current_function_returns_pcc_struct)
5141 {
5142 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5143 value_address = assemble_static_space (size);
5144 }
5145 else
5146 #endif
5147 {
5148 /* Expect to be passed the address of a place to store the value.
5149 If it is passed as an argument, assign_parms will take care of
5150 it. */
5151 if (struct_value_incoming_rtx)
5152 {
5153 value_address = gen_reg_rtx (Pmode);
5154 emit_move_insn (value_address, struct_value_incoming_rtx);
5155 }
5156 }
5157 if (value_address)
5158 {
5159 DECL_RTL (DECL_RESULT (subr))
5160 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (subr)), value_address);
5161 MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)))
5162 = AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5163 }
5164 }
5165 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5166 /* If return mode is void, this decl rtl should not be used. */
5167 DECL_RTL (DECL_RESULT (subr)) = 0;
5168 else if (parms_have_cleanups)
5169 {
5170 /* If function will end with cleanup code for parms,
5171 compute the return values into a pseudo reg,
5172 which we will copy into the true return register
5173 after the cleanups are done. */
5174
5175 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5176
5177 #ifdef PROMOTE_FUNCTION_RETURN
5178 tree type = TREE_TYPE (DECL_RESULT (subr));
5179 int unsignedp = TREE_UNSIGNED (type);
5180
5181 mode = promote_mode (type, mode, &unsignedp, 1);
5182 #endif
5183
5184 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5185 }
5186 else
5187 /* Scalar, returned in a register. */
5188 {
5189 #ifdef FUNCTION_OUTGOING_VALUE
5190 DECL_RTL (DECL_RESULT (subr))
5191 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5192 #else
5193 DECL_RTL (DECL_RESULT (subr))
5194 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5195 #endif
5196
5197 /* Mark this reg as the function's return value. */
5198 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5199 {
5200 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5201 /* Needed because we may need to move this to memory
5202 in case it's a named return value whose address is taken. */
5203 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5204 }
5205 }
5206
5207 /* Initialize rtx for parameters and local variables.
5208 In some cases this requires emitting insns. */
5209
5210 assign_parms (subr, 0);
5211
5212 #ifdef SMALL_REGISTER_CLASSES
5213 /* Copy the static chain now if it wasn't a register. The delay is to
5214 avoid conflicts with the parameter passing registers. */
5215
5216 if (current_function_needs_context)
5217 if (GET_CODE (static_chain_incoming_rtx) != REG)
5218 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5219 #endif
5220
5221 /* The following was moved from init_function_start.
5222 The move is supposed to make sdb output more accurate. */
5223 /* Indicate the beginning of the function body,
5224 as opposed to parm setup. */
5225 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5226
5227 /* If doing stupid allocation, mark parms as born here. */
5228
5229 if (GET_CODE (get_last_insn ()) != NOTE)
5230 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5231 parm_birth_insn = get_last_insn ();
5232
5233 if (obey_regdecls)
5234 {
5235 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5236 use_variable (regno_reg_rtx[i]);
5237
5238 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5239 use_variable (current_function_internal_arg_pointer);
5240 }
5241
5242 context_display = 0;
5243 if (current_function_needs_context)
5244 {
5245 /* Fetch static chain values for containing functions. */
5246 tem = decl_function_context (current_function_decl);
5247 /* If not doing stupid register allocation copy the static chain
5248 pointer into a pseudo. If we have small register classes, copy
5249 the value from memory if static_chain_incoming_rtx is a REG. If
5250 we do stupid register allocation, we use the stack address
5251 generated above. */
5252 if (tem && ! obey_regdecls)
5253 {
5254 #ifdef SMALL_REGISTER_CLASSES
5255 /* If the static chain originally came in a register, put it back
5256 there, then move it out in the next insn. The reason for
5257 this peculiar code is to satisfy function integration. */
5258 if (GET_CODE (static_chain_incoming_rtx) == REG)
5259 emit_move_insn (static_chain_incoming_rtx, last_ptr);
5260 #endif
5261
5262 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5263 }
5264
5265 while (tem)
5266 {
5267 tree rtlexp = make_node (RTL_EXPR);
5268
5269 RTL_EXPR_RTL (rtlexp) = last_ptr;
5270 context_display = tree_cons (tem, rtlexp, context_display);
5271 tem = decl_function_context (tem);
5272 if (tem == 0)
5273 break;
5274 /* Chain thru stack frames, assuming pointer to next lexical frame
5275 is found at the place we always store it. */
5276 #ifdef FRAME_GROWS_DOWNWARD
5277 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
5278 #endif
5279 last_ptr = copy_to_reg (gen_rtx (MEM, Pmode,
5280 memory_address (Pmode, last_ptr)));
5281
5282 /* If we are not optimizing, ensure that we know that this
5283 piece of context is live over the entire function. */
5284 if (! optimize)
5285 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, last_ptr,
5286 save_expr_regs);
5287 }
5288 }
5289
5290 /* After the display initializations is where the tail-recursion label
5291 should go, if we end up needing one. Ensure we have a NOTE here
5292 since some things (like trampolines) get placed before this. */
5293 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
5294
5295 /* Evaluate now the sizes of any types declared among the arguments. */
5296 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5297 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
5298
5299 /* Make sure there is a line number after the function entry setup code. */
5300 force_next_line_note ();
5301 }
5302 \f
5303 /* Generate RTL for the end of the current function.
5304 FILENAME and LINE are the current position in the source file.
5305
5306 It is up to language-specific callers to do cleanups for parameters--
5307 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5308
5309 void
5310 expand_function_end (filename, line, end_bindings)
5311 char *filename;
5312 int line;
5313 int end_bindings;
5314 {
5315 register int i;
5316 tree link;
5317
5318 #ifdef TRAMPOLINE_TEMPLATE
5319 static rtx initial_trampoline;
5320 #endif
5321
5322 if (output_bytecode)
5323 {
5324 bc_expand_function_end ();
5325 return;
5326 }
5327
5328 #ifdef NON_SAVING_SETJMP
5329 /* Don't put any variables in registers if we call setjmp
5330 on a machine that fails to restore the registers. */
5331 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
5332 {
5333 if (DECL_INITIAL (current_function_decl) != error_mark_node)
5334 setjmp_protect (DECL_INITIAL (current_function_decl));
5335
5336 setjmp_protect_args ();
5337 }
5338 #endif
5339
5340 /* Save the argument pointer if a save area was made for it. */
5341 if (arg_pointer_save_area)
5342 {
5343 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
5344 emit_insn_before (x, tail_recursion_reentry);
5345 }
5346
5347 /* Initialize any trampolines required by this function. */
5348 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5349 {
5350 tree function = TREE_PURPOSE (link);
5351 rtx context = lookup_static_chain (function);
5352 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
5353 rtx blktramp;
5354 rtx seq;
5355
5356 #ifdef TRAMPOLINE_TEMPLATE
5357 /* First make sure this compilation has a template for
5358 initializing trampolines. */
5359 if (initial_trampoline == 0)
5360 {
5361 end_temporary_allocation ();
5362 initial_trampoline
5363 = gen_rtx (MEM, BLKmode, assemble_trampoline_template ());
5364 resume_temporary_allocation ();
5365 }
5366 #endif
5367
5368 /* Generate insns to initialize the trampoline. */
5369 start_sequence ();
5370 tramp = round_trampoline_addr (XEXP (tramp, 0));
5371 #ifdef TRAMPOLINE_TEMPLATE
5372 blktramp = change_address (initial_trampoline, BLKmode, tramp);
5373 emit_block_move (blktramp, initial_trampoline,
5374 GEN_INT (TRAMPOLINE_SIZE),
5375 FUNCTION_BOUNDARY / BITS_PER_UNIT);
5376 #endif
5377 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
5378 seq = get_insns ();
5379 end_sequence ();
5380
5381 /* Put those insns at entry to the containing function (this one). */
5382 emit_insns_before (seq, tail_recursion_reentry);
5383 }
5384
5385 /* Warn about unused parms if extra warnings were specified. */
5386 if (warn_unused && extra_warnings)
5387 {
5388 tree decl;
5389
5390 for (decl = DECL_ARGUMENTS (current_function_decl);
5391 decl; decl = TREE_CHAIN (decl))
5392 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
5393 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
5394 warning_with_decl (decl, "unused parameter `%s'");
5395 }
5396
5397 /* Delete handlers for nonlocal gotos if nothing uses them. */
5398 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
5399 delete_handlers ();
5400
5401 /* End any sequences that failed to be closed due to syntax errors. */
5402 while (in_sequence_p ())
5403 end_sequence ();
5404
5405 /* Outside function body, can't compute type's actual size
5406 until next function's body starts. */
5407 immediate_size_expand--;
5408
5409 /* If doing stupid register allocation,
5410 mark register parms as dying here. */
5411
5412 if (obey_regdecls)
5413 {
5414 rtx tem;
5415 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5416 use_variable (regno_reg_rtx[i]);
5417
5418 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
5419
5420 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
5421 {
5422 use_variable (XEXP (tem, 0));
5423 use_variable_after (XEXP (tem, 0), parm_birth_insn);
5424 }
5425
5426 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5427 use_variable (current_function_internal_arg_pointer);
5428 }
5429
5430 clear_pending_stack_adjust ();
5431 do_pending_stack_adjust ();
5432
5433 /* Mark the end of the function body.
5434 If control reaches this insn, the function can drop through
5435 without returning a value. */
5436 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
5437
5438 /* Output a linenumber for the end of the function.
5439 SDB depends on this. */
5440 emit_line_note_force (filename, line);
5441
5442 /* Output the label for the actual return from the function,
5443 if one is expected. This happens either because a function epilogue
5444 is used instead of a return instruction, or because a return was done
5445 with a goto in order to run local cleanups, or because of pcc-style
5446 structure returning. */
5447
5448 if (return_label)
5449 emit_label (return_label);
5450
5451 /* C++ uses this. */
5452 if (end_bindings)
5453 expand_end_bindings (0, 0, 0);
5454
5455 /* If we had calls to alloca, and this machine needs
5456 an accurate stack pointer to exit the function,
5457 insert some code to save and restore the stack pointer. */
5458 #ifdef EXIT_IGNORE_STACK
5459 if (! EXIT_IGNORE_STACK)
5460 #endif
5461 if (current_function_calls_alloca)
5462 {
5463 rtx tem = 0;
5464
5465 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
5466 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
5467 }
5468
5469 /* If scalar return value was computed in a pseudo-reg,
5470 copy that to the hard return register. */
5471 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
5472 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
5473 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
5474 >= FIRST_PSEUDO_REGISTER))
5475 {
5476 rtx real_decl_result;
5477
5478 #ifdef FUNCTION_OUTGOING_VALUE
5479 real_decl_result
5480 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5481 current_function_decl);
5482 #else
5483 real_decl_result
5484 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5485 current_function_decl);
5486 #endif
5487 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
5488 emit_move_insn (real_decl_result,
5489 DECL_RTL (DECL_RESULT (current_function_decl)));
5490 emit_insn (gen_rtx (USE, VOIDmode, real_decl_result));
5491 }
5492
5493 /* If returning a structure, arrange to return the address of the value
5494 in a place where debuggers expect to find it.
5495
5496 If returning a structure PCC style,
5497 the caller also depends on this value.
5498 And current_function_returns_pcc_struct is not necessarily set. */
5499 if (current_function_returns_struct
5500 || current_function_returns_pcc_struct)
5501 {
5502 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5503 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5504 #ifdef FUNCTION_OUTGOING_VALUE
5505 rtx outgoing
5506 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
5507 current_function_decl);
5508 #else
5509 rtx outgoing
5510 = FUNCTION_VALUE (build_pointer_type (type),
5511 current_function_decl);
5512 #endif
5513
5514 /* Mark this as a function return value so integrate will delete the
5515 assignment and USE below when inlining this function. */
5516 REG_FUNCTION_VALUE_P (outgoing) = 1;
5517
5518 emit_move_insn (outgoing, value_address);
5519 use_variable (outgoing);
5520 }
5521
5522 /* Output a return insn if we are using one.
5523 Otherwise, let the rtl chain end here, to drop through
5524 into the epilogue. */
5525
5526 #ifdef HAVE_return
5527 if (HAVE_return)
5528 {
5529 emit_jump_insn (gen_return ());
5530 emit_barrier ();
5531 }
5532 #endif
5533
5534 /* Fix up any gotos that jumped out to the outermost
5535 binding level of the function.
5536 Must follow emitting RETURN_LABEL. */
5537
5538 /* If you have any cleanups to do at this point,
5539 and they need to create temporary variables,
5540 then you will lose. */
5541 expand_fixups (get_insns ());
5542 }
5543 \f
5544 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
5545
5546 static int *prologue;
5547 static int *epilogue;
5548
5549 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
5550 or a single insn). */
5551
5552 static int *
5553 record_insns (insns)
5554 rtx insns;
5555 {
5556 int *vec;
5557
5558 if (GET_CODE (insns) == SEQUENCE)
5559 {
5560 int len = XVECLEN (insns, 0);
5561 vec = (int *) oballoc ((len + 1) * sizeof (int));
5562 vec[len] = 0;
5563 while (--len >= 0)
5564 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
5565 }
5566 else
5567 {
5568 vec = (int *) oballoc (2 * sizeof (int));
5569 vec[0] = INSN_UID (insns);
5570 vec[1] = 0;
5571 }
5572 return vec;
5573 }
5574
5575 /* Determine how many INSN_UIDs in VEC are part of INSN. */
5576
5577 static int
5578 contains (insn, vec)
5579 rtx insn;
5580 int *vec;
5581 {
5582 register int i, j;
5583
5584 if (GET_CODE (insn) == INSN
5585 && GET_CODE (PATTERN (insn)) == SEQUENCE)
5586 {
5587 int count = 0;
5588 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
5589 for (j = 0; vec[j]; j++)
5590 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
5591 count++;
5592 return count;
5593 }
5594 else
5595 {
5596 for (j = 0; vec[j]; j++)
5597 if (INSN_UID (insn) == vec[j])
5598 return 1;
5599 }
5600 return 0;
5601 }
5602
5603 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5604 this into place with notes indicating where the prologue ends and where
5605 the epilogue begins. Update the basic block information when possible. */
5606
5607 void
5608 thread_prologue_and_epilogue_insns (f)
5609 rtx f;
5610 {
5611 #ifdef HAVE_prologue
5612 if (HAVE_prologue)
5613 {
5614 rtx head, seq, insn;
5615
5616 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
5617 prologue insns and a NOTE_INSN_PROLOGUE_END. */
5618 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
5619 seq = gen_prologue ();
5620 head = emit_insn_after (seq, f);
5621
5622 /* Include the new prologue insns in the first block. Ignore them
5623 if they form a basic block unto themselves. */
5624 if (basic_block_head && n_basic_blocks
5625 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
5626 basic_block_head[0] = NEXT_INSN (f);
5627
5628 /* Retain a map of the prologue insns. */
5629 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
5630 }
5631 else
5632 #endif
5633 prologue = 0;
5634
5635 #ifdef HAVE_epilogue
5636 if (HAVE_epilogue)
5637 {
5638 rtx insn = get_last_insn ();
5639 rtx prev = prev_nonnote_insn (insn);
5640
5641 /* If we end with a BARRIER, we don't need an epilogue. */
5642 if (! (prev && GET_CODE (prev) == BARRIER))
5643 {
5644 rtx tail, seq, tem;
5645 rtx first_use = 0;
5646 rtx last_use = 0;
5647
5648 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
5649 epilogue insns, the USE insns at the end of a function,
5650 the jump insn that returns, and then a BARRIER. */
5651
5652 /* Move the USE insns at the end of a function onto a list. */
5653 while (prev
5654 && GET_CODE (prev) == INSN
5655 && GET_CODE (PATTERN (prev)) == USE)
5656 {
5657 tem = prev;
5658 prev = prev_nonnote_insn (prev);
5659
5660 NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem);
5661 PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem);
5662 if (first_use)
5663 {
5664 NEXT_INSN (tem) = first_use;
5665 PREV_INSN (first_use) = tem;
5666 }
5667 first_use = tem;
5668 if (!last_use)
5669 last_use = tem;
5670 }
5671
5672 emit_barrier_after (insn);
5673
5674 seq = gen_epilogue ();
5675 tail = emit_jump_insn_after (seq, insn);
5676
5677 /* Insert the USE insns immediately before the return insn, which
5678 must be the first instruction before the final barrier. */
5679 if (first_use)
5680 {
5681 tem = prev_nonnote_insn (get_last_insn ());
5682 NEXT_INSN (PREV_INSN (tem)) = first_use;
5683 PREV_INSN (first_use) = PREV_INSN (tem);
5684 PREV_INSN (tem) = last_use;
5685 NEXT_INSN (last_use) = tem;
5686 }
5687
5688 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
5689
5690 /* Include the new epilogue insns in the last block. Ignore
5691 them if they form a basic block unto themselves. */
5692 if (basic_block_end && n_basic_blocks
5693 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
5694 basic_block_end[n_basic_blocks - 1] = tail;
5695
5696 /* Retain a map of the epilogue insns. */
5697 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
5698 return;
5699 }
5700 }
5701 #endif
5702 epilogue = 0;
5703 }
5704
5705 /* Reposition the prologue-end and epilogue-begin notes after instruction
5706 scheduling and delayed branch scheduling. */
5707
5708 void
5709 reposition_prologue_and_epilogue_notes (f)
5710 rtx f;
5711 {
5712 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5713 /* Reposition the prologue and epilogue notes. */
5714 if (n_basic_blocks)
5715 {
5716 rtx next, prev;
5717 int len;
5718
5719 if (prologue)
5720 {
5721 register rtx insn, note = 0;
5722
5723 /* Scan from the beginning until we reach the last prologue insn.
5724 We apparently can't depend on basic_block_{head,end} after
5725 reorg has run. */
5726 for (len = 0; prologue[len]; len++)
5727 ;
5728 for (insn = f; len && insn; insn = NEXT_INSN (insn))
5729 {
5730 if (GET_CODE (insn) == NOTE)
5731 {
5732 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5733 note = insn;
5734 }
5735 else if ((len -= contains (insn, prologue)) == 0)
5736 {
5737 /* Find the prologue-end note if we haven't already, and
5738 move it to just after the last prologue insn. */
5739 if (note == 0)
5740 {
5741 for (note = insn; note = NEXT_INSN (note);)
5742 if (GET_CODE (note) == NOTE
5743 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5744 break;
5745 }
5746 next = NEXT_INSN (note);
5747 prev = PREV_INSN (note);
5748 if (prev)
5749 NEXT_INSN (prev) = next;
5750 if (next)
5751 PREV_INSN (next) = prev;
5752 add_insn_after (note, insn);
5753 }
5754 }
5755 }
5756
5757 if (epilogue)
5758 {
5759 register rtx insn, note = 0;
5760
5761 /* Scan from the end until we reach the first epilogue insn.
5762 We apparently can't depend on basic_block_{head,end} after
5763 reorg has run. */
5764 for (len = 0; epilogue[len]; len++)
5765 ;
5766 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
5767 {
5768 if (GET_CODE (insn) == NOTE)
5769 {
5770 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5771 note = insn;
5772 }
5773 else if ((len -= contains (insn, epilogue)) == 0)
5774 {
5775 /* Find the epilogue-begin note if we haven't already, and
5776 move it to just before the first epilogue insn. */
5777 if (note == 0)
5778 {
5779 for (note = insn; note = PREV_INSN (note);)
5780 if (GET_CODE (note) == NOTE
5781 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5782 break;
5783 }
5784 next = NEXT_INSN (note);
5785 prev = PREV_INSN (note);
5786 if (prev)
5787 NEXT_INSN (prev) = next;
5788 if (next)
5789 PREV_INSN (next) = prev;
5790 add_insn_after (note, PREV_INSN (insn));
5791 }
5792 }
5793 }
5794 }
5795 #endif /* HAVE_prologue or HAVE_epilogue */
5796 }