(assign_parms): Handle PARALLEL parameter in ENTRY_PARM.
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-95, 1996 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42 #include <stdio.h>
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "function.h"
47 #include "insn-flags.h"
48 #include "expr.h"
49 #include "insn-codes.h"
50 #include "regs.h"
51 #include "hard-reg-set.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "output.h"
55 #include "basic-block.h"
56 #include "obstack.h"
57 #include "bytecode.h"
58 #include "bc-emit.h"
59
60 /* Some systems use __main in a way incompatible with its use in gcc, in these
61 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
62 give the same symbol without quotes for an alternative entry point. You
63 must define both, or neither. */
64 #ifndef NAME__MAIN
65 #define NAME__MAIN "__main"
66 #define SYMBOL__MAIN __main
67 #endif
68
69 /* Round a value to the lowest integer less than it that is a multiple of
70 the required alignment. Avoid using division in case the value is
71 negative. Assume the alignment is a power of two. */
72 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
73
74 /* Similar, but round to the next highest integer that meets the
75 alignment. */
76 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
77
78 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
79 during rtl generation. If they are different register numbers, this is
80 always true. It may also be true if
81 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
82 generation. See fix_lexical_addr for details. */
83
84 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
85 #define NEED_SEPARATE_AP
86 #endif
87
88 /* Number of bytes of args popped by function being compiled on its return.
89 Zero if no bytes are to be popped.
90 May affect compilation of return insn or of function epilogue. */
91
92 int current_function_pops_args;
93
94 /* Nonzero if function being compiled needs to be given an address
95 where the value should be stored. */
96
97 int current_function_returns_struct;
98
99 /* Nonzero if function being compiled needs to
100 return the address of where it has put a structure value. */
101
102 int current_function_returns_pcc_struct;
103
104 /* Nonzero if function being compiled needs to be passed a static chain. */
105
106 int current_function_needs_context;
107
108 /* Nonzero if function being compiled can call setjmp. */
109
110 int current_function_calls_setjmp;
111
112 /* Nonzero if function being compiled can call longjmp. */
113
114 int current_function_calls_longjmp;
115
116 /* Nonzero if function being compiled receives nonlocal gotos
117 from nested functions. */
118
119 int current_function_has_nonlocal_label;
120
121 /* Nonzero if function being compiled has nonlocal gotos to parent
122 function. */
123
124 int current_function_has_nonlocal_goto;
125
126 /* Nonzero if function being compiled contains nested functions. */
127
128 int current_function_contains_functions;
129
130 /* Nonzero if function being compiled can call alloca,
131 either as a subroutine or builtin. */
132
133 int current_function_calls_alloca;
134
135 /* Nonzero if the current function returns a pointer type */
136
137 int current_function_returns_pointer;
138
139 /* If some insns can be deferred to the delay slots of the epilogue, the
140 delay list for them is recorded here. */
141
142 rtx current_function_epilogue_delay_list;
143
144 /* If function's args have a fixed size, this is that size, in bytes.
145 Otherwise, it is -1.
146 May affect compilation of return insn or of function epilogue. */
147
148 int current_function_args_size;
149
150 /* # bytes the prologue should push and pretend that the caller pushed them.
151 The prologue must do this, but only if parms can be passed in registers. */
152
153 int current_function_pretend_args_size;
154
155 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
156 defined, the needed space is pushed by the prologue. */
157
158 int current_function_outgoing_args_size;
159
160 /* This is the offset from the arg pointer to the place where the first
161 anonymous arg can be found, if there is one. */
162
163 rtx current_function_arg_offset_rtx;
164
165 /* Nonzero if current function uses varargs.h or equivalent.
166 Zero for functions that use stdarg.h. */
167
168 int current_function_varargs;
169
170 /* Nonzero if current function uses stdarg.h or equivalent.
171 Zero for functions that use varargs.h. */
172
173 int current_function_stdarg;
174
175 /* Quantities of various kinds of registers
176 used for the current function's args. */
177
178 CUMULATIVE_ARGS current_function_args_info;
179
180 /* Name of function now being compiled. */
181
182 char *current_function_name;
183
184 /* If non-zero, an RTL expression for that location at which the current
185 function returns its result. Always equal to
186 DECL_RTL (DECL_RESULT (current_function_decl)), but provided
187 independently of the tree structures. */
188
189 rtx current_function_return_rtx;
190
191 /* Nonzero if the current function uses the constant pool. */
192
193 int current_function_uses_const_pool;
194
195 /* Nonzero if the current function uses pic_offset_table_rtx. */
196 int current_function_uses_pic_offset_table;
197
198 /* The arg pointer hard register, or the pseudo into which it was copied. */
199 rtx current_function_internal_arg_pointer;
200
201 /* The FUNCTION_DECL for an inline function currently being expanded. */
202 tree inline_function_decl;
203
204 /* Number of function calls seen so far in current function. */
205
206 int function_call_count;
207
208 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
209 (labels to which there can be nonlocal gotos from nested functions)
210 in this function. */
211
212 tree nonlocal_labels;
213
214 /* RTX for stack slot that holds the current handler for nonlocal gotos.
215 Zero when function does not have nonlocal labels. */
216
217 rtx nonlocal_goto_handler_slot;
218
219 /* RTX for stack slot that holds the stack pointer value to restore
220 for a nonlocal goto.
221 Zero when function does not have nonlocal labels. */
222
223 rtx nonlocal_goto_stack_level;
224
225 /* Label that will go on parm cleanup code, if any.
226 Jumping to this label runs cleanup code for parameters, if
227 such code must be run. Following this code is the logical return label. */
228
229 rtx cleanup_label;
230
231 /* Label that will go on function epilogue.
232 Jumping to this label serves as a "return" instruction
233 on machines which require execution of the epilogue on all returns. */
234
235 rtx return_label;
236
237 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
238 So we can mark them all live at the end of the function, if nonopt. */
239 rtx save_expr_regs;
240
241 /* List (chain of EXPR_LISTs) of all stack slots in this function.
242 Made for the sake of unshare_all_rtl. */
243 rtx stack_slot_list;
244
245 /* Chain of all RTL_EXPRs that have insns in them. */
246 tree rtl_expr_chain;
247
248 /* Label to jump back to for tail recursion, or 0 if we have
249 not yet needed one for this function. */
250 rtx tail_recursion_label;
251
252 /* Place after which to insert the tail_recursion_label if we need one. */
253 rtx tail_recursion_reentry;
254
255 /* Location at which to save the argument pointer if it will need to be
256 referenced. There are two cases where this is done: if nonlocal gotos
257 exist, or if vars stored at an offset from the argument pointer will be
258 needed by inner routines. */
259
260 rtx arg_pointer_save_area;
261
262 /* Offset to end of allocated area of stack frame.
263 If stack grows down, this is the address of the last stack slot allocated.
264 If stack grows up, this is the address for the next slot. */
265 int frame_offset;
266
267 /* List (chain of TREE_LISTs) of static chains for containing functions.
268 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
269 in an RTL_EXPR in the TREE_VALUE. */
270 static tree context_display;
271
272 /* List (chain of TREE_LISTs) of trampolines for nested functions.
273 The trampoline sets up the static chain and jumps to the function.
274 We supply the trampoline's address when the function's address is requested.
275
276 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
277 in an RTL_EXPR in the TREE_VALUE. */
278 static tree trampoline_list;
279
280 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
281 static rtx parm_birth_insn;
282
283 #if 0
284 /* Nonzero if a stack slot has been generated whose address is not
285 actually valid. It means that the generated rtl must all be scanned
286 to detect and correct the invalid addresses where they occur. */
287 static int invalid_stack_slot;
288 #endif
289
290 /* Last insn of those whose job was to put parms into their nominal homes. */
291 static rtx last_parm_insn;
292
293 /* 1 + last pseudo register number used for loading a copy
294 of a parameter of this function. */
295 static int max_parm_reg;
296
297 /* Vector indexed by REGNO, containing location on stack in which
298 to put the parm which is nominally in pseudo register REGNO,
299 if we discover that that parm must go in the stack. */
300 static rtx *parm_reg_stack_loc;
301
302 #if 0 /* Turned off because 0 seems to work just as well. */
303 /* Cleanup lists are required for binding levels regardless of whether
304 that binding level has cleanups or not. This node serves as the
305 cleanup list whenever an empty list is required. */
306 static tree empty_cleanup_list;
307 #endif
308
309 /* Nonzero once virtual register instantiation has been done.
310 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
311 static int virtuals_instantiated;
312
313 /* These variables hold pointers to functions to
314 save and restore machine-specific data,
315 in push_function_context and pop_function_context. */
316 void (*save_machine_status) PROTO((struct function *));
317 void (*restore_machine_status) PROTO((struct function *));
318
319 /* Nonzero if we need to distinguish between the return value of this function
320 and the return value of a function called by this function. This helps
321 integrate.c */
322
323 extern int rtx_equal_function_value_matters;
324 extern tree sequence_rtl_expr;
325 \f
326 /* In order to evaluate some expressions, such as function calls returning
327 structures in memory, we need to temporarily allocate stack locations.
328 We record each allocated temporary in the following structure.
329
330 Associated with each temporary slot is a nesting level. When we pop up
331 one level, all temporaries associated with the previous level are freed.
332 Normally, all temporaries are freed after the execution of the statement
333 in which they were created. However, if we are inside a ({...}) grouping,
334 the result may be in a temporary and hence must be preserved. If the
335 result could be in a temporary, we preserve it if we can determine which
336 one it is in. If we cannot determine which temporary may contain the
337 result, all temporaries are preserved. A temporary is preserved by
338 pretending it was allocated at the previous nesting level.
339
340 Automatic variables are also assigned temporary slots, at the nesting
341 level where they are defined. They are marked a "kept" so that
342 free_temp_slots will not free them. */
343
344 struct temp_slot
345 {
346 /* Points to next temporary slot. */
347 struct temp_slot *next;
348 /* The rtx to used to reference the slot. */
349 rtx slot;
350 /* The rtx used to represent the address if not the address of the
351 slot above. May be an EXPR_LIST if multiple addresses exist. */
352 rtx address;
353 /* The size, in units, of the slot. */
354 int size;
355 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
356 tree rtl_expr;
357 /* Non-zero if this temporary is currently in use. */
358 char in_use;
359 /* Non-zero if this temporary has its address taken. */
360 char addr_taken;
361 /* Nesting level at which this slot is being used. */
362 int level;
363 /* Non-zero if this should survive a call to free_temp_slots. */
364 int keep;
365 /* The offset of the slot from the frame_pointer, including extra space
366 for alignment. This info is for combine_temp_slots. */
367 int base_offset;
368 /* The size of the slot, including extra space for alignment. This
369 info is for combine_temp_slots. */
370 int full_size;
371 };
372
373 /* List of all temporaries allocated, both available and in use. */
374
375 struct temp_slot *temp_slots;
376
377 /* Current nesting level for temporaries. */
378
379 int temp_slot_level;
380 \f
381 /* The FUNCTION_DECL node for the current function. */
382 static tree this_function_decl;
383
384 /* Callinfo pointer for the current function. */
385 static rtx this_function_callinfo;
386
387 /* The label in the bytecode file of this function's actual bytecode.
388 Not an rtx. */
389 static char *this_function_bytecode;
390
391 /* The call description vector for the current function. */
392 static rtx this_function_calldesc;
393
394 /* Size of the local variables allocated for the current function. */
395 int local_vars_size;
396
397 /* Current depth of the bytecode evaluation stack. */
398 int stack_depth;
399
400 /* Maximum depth of the evaluation stack in this function. */
401 int max_stack_depth;
402
403 /* Current depth in statement expressions. */
404 static int stmt_expr_depth;
405
406 /* This structure is used to record MEMs or pseudos used to replace VAR, any
407 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
408 maintain this list in case two operands of an insn were required to match;
409 in that case we must ensure we use the same replacement. */
410
411 struct fixup_replacement
412 {
413 rtx old;
414 rtx new;
415 struct fixup_replacement *next;
416 };
417
418 /* Forward declarations. */
419
420 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
421 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
422 enum machine_mode, enum machine_mode,
423 int));
424 static void fixup_var_refs PROTO((rtx, enum machine_mode, int));
425 static struct fixup_replacement
426 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
427 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
428 rtx, int));
429 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
430 struct fixup_replacement **));
431 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
432 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
433 static rtx fixup_stack_1 PROTO((rtx, rtx));
434 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
435 static void instantiate_decls PROTO((tree, int));
436 static void instantiate_decls_1 PROTO((tree, int));
437 static void instantiate_decl PROTO((rtx, int, int));
438 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
439 static void delete_handlers PROTO((void));
440 static void pad_to_arg_alignment PROTO((struct args_size *, int));
441 static void pad_below PROTO((struct args_size *, enum machine_mode,
442 tree));
443 static tree round_down PROTO((tree, int));
444 static rtx round_trampoline_addr PROTO((rtx));
445 static tree blocks_nreverse PROTO((tree));
446 static int all_blocks PROTO((tree, tree *));
447 static int *record_insns PROTO((rtx));
448 static int contains PROTO((rtx, int *));
449 \f
450 /* Pointer to chain of `struct function' for containing functions. */
451 struct function *outer_function_chain;
452
453 /* Given a function decl for a containing function,
454 return the `struct function' for it. */
455
456 struct function *
457 find_function_data (decl)
458 tree decl;
459 {
460 struct function *p;
461 for (p = outer_function_chain; p; p = p->next)
462 if (p->decl == decl)
463 return p;
464 abort ();
465 }
466
467 /* Save the current context for compilation of a nested function.
468 This is called from language-specific code.
469 The caller is responsible for saving any language-specific status,
470 since this function knows only about language-independent variables. */
471
472 void
473 push_function_context_to (context)
474 tree context;
475 {
476 struct function *p = (struct function *) xmalloc (sizeof (struct function));
477
478 p->next = outer_function_chain;
479 outer_function_chain = p;
480
481 p->name = current_function_name;
482 p->decl = current_function_decl;
483 p->pops_args = current_function_pops_args;
484 p->returns_struct = current_function_returns_struct;
485 p->returns_pcc_struct = current_function_returns_pcc_struct;
486 p->returns_pointer = current_function_returns_pointer;
487 p->needs_context = current_function_needs_context;
488 p->calls_setjmp = current_function_calls_setjmp;
489 p->calls_longjmp = current_function_calls_longjmp;
490 p->calls_alloca = current_function_calls_alloca;
491 p->has_nonlocal_label = current_function_has_nonlocal_label;
492 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
493 p->contains_functions = current_function_contains_functions;
494 p->args_size = current_function_args_size;
495 p->pretend_args_size = current_function_pretend_args_size;
496 p->arg_offset_rtx = current_function_arg_offset_rtx;
497 p->varargs = current_function_varargs;
498 p->stdarg = current_function_stdarg;
499 p->uses_const_pool = current_function_uses_const_pool;
500 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
501 p->internal_arg_pointer = current_function_internal_arg_pointer;
502 p->max_parm_reg = max_parm_reg;
503 p->parm_reg_stack_loc = parm_reg_stack_loc;
504 p->outgoing_args_size = current_function_outgoing_args_size;
505 p->return_rtx = current_function_return_rtx;
506 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
507 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
508 p->nonlocal_labels = nonlocal_labels;
509 p->cleanup_label = cleanup_label;
510 p->return_label = return_label;
511 p->save_expr_regs = save_expr_regs;
512 p->stack_slot_list = stack_slot_list;
513 p->parm_birth_insn = parm_birth_insn;
514 p->frame_offset = frame_offset;
515 p->tail_recursion_label = tail_recursion_label;
516 p->tail_recursion_reentry = tail_recursion_reentry;
517 p->arg_pointer_save_area = arg_pointer_save_area;
518 p->rtl_expr_chain = rtl_expr_chain;
519 p->last_parm_insn = last_parm_insn;
520 p->context_display = context_display;
521 p->trampoline_list = trampoline_list;
522 p->function_call_count = function_call_count;
523 p->temp_slots = temp_slots;
524 p->temp_slot_level = temp_slot_level;
525 p->fixup_var_refs_queue = 0;
526 p->epilogue_delay_list = current_function_epilogue_delay_list;
527
528 save_tree_status (p, context);
529 save_storage_status (p);
530 save_emit_status (p);
531 init_emit ();
532 save_expr_status (p);
533 save_stmt_status (p);
534 save_varasm_status (p);
535
536 if (save_machine_status)
537 (*save_machine_status) (p);
538 }
539
540 void
541 push_function_context ()
542 {
543 push_function_context_to (current_function_decl);
544 }
545
546 /* Restore the last saved context, at the end of a nested function.
547 This function is called from language-specific code. */
548
549 void
550 pop_function_context_from (context)
551 tree context;
552 {
553 struct function *p = outer_function_chain;
554
555 outer_function_chain = p->next;
556
557 current_function_contains_functions
558 = p->contains_functions || p->inline_obstacks
559 || context == current_function_decl;
560 current_function_name = p->name;
561 current_function_decl = p->decl;
562 current_function_pops_args = p->pops_args;
563 current_function_returns_struct = p->returns_struct;
564 current_function_returns_pcc_struct = p->returns_pcc_struct;
565 current_function_returns_pointer = p->returns_pointer;
566 current_function_needs_context = p->needs_context;
567 current_function_calls_setjmp = p->calls_setjmp;
568 current_function_calls_longjmp = p->calls_longjmp;
569 current_function_calls_alloca = p->calls_alloca;
570 current_function_has_nonlocal_label = p->has_nonlocal_label;
571 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
572 current_function_args_size = p->args_size;
573 current_function_pretend_args_size = p->pretend_args_size;
574 current_function_arg_offset_rtx = p->arg_offset_rtx;
575 current_function_varargs = p->varargs;
576 current_function_stdarg = p->stdarg;
577 current_function_uses_const_pool = p->uses_const_pool;
578 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
579 current_function_internal_arg_pointer = p->internal_arg_pointer;
580 max_parm_reg = p->max_parm_reg;
581 parm_reg_stack_loc = p->parm_reg_stack_loc;
582 current_function_outgoing_args_size = p->outgoing_args_size;
583 current_function_return_rtx = p->return_rtx;
584 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
585 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
586 nonlocal_labels = p->nonlocal_labels;
587 cleanup_label = p->cleanup_label;
588 return_label = p->return_label;
589 save_expr_regs = p->save_expr_regs;
590 stack_slot_list = p->stack_slot_list;
591 parm_birth_insn = p->parm_birth_insn;
592 frame_offset = p->frame_offset;
593 tail_recursion_label = p->tail_recursion_label;
594 tail_recursion_reentry = p->tail_recursion_reentry;
595 arg_pointer_save_area = p->arg_pointer_save_area;
596 rtl_expr_chain = p->rtl_expr_chain;
597 last_parm_insn = p->last_parm_insn;
598 context_display = p->context_display;
599 trampoline_list = p->trampoline_list;
600 function_call_count = p->function_call_count;
601 temp_slots = p->temp_slots;
602 temp_slot_level = p->temp_slot_level;
603 current_function_epilogue_delay_list = p->epilogue_delay_list;
604 reg_renumber = 0;
605
606 restore_tree_status (p);
607 restore_storage_status (p);
608 restore_expr_status (p);
609 restore_emit_status (p);
610 restore_stmt_status (p);
611 restore_varasm_status (p);
612
613 if (restore_machine_status)
614 (*restore_machine_status) (p);
615
616 /* Finish doing put_var_into_stack for any of our variables
617 which became addressable during the nested function. */
618 {
619 struct var_refs_queue *queue = p->fixup_var_refs_queue;
620 for (; queue; queue = queue->next)
621 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
622 }
623
624 free (p);
625
626 /* Reset variables that have known state during rtx generation. */
627 rtx_equal_function_value_matters = 1;
628 virtuals_instantiated = 0;
629 }
630
631 void pop_function_context ()
632 {
633 pop_function_context_from (current_function_decl);
634 }
635 \f
636 /* Allocate fixed slots in the stack frame of the current function. */
637
638 /* Return size needed for stack frame based on slots so far allocated.
639 This size counts from zero. It is not rounded to STACK_BOUNDARY;
640 the caller may have to do that. */
641
642 int
643 get_frame_size ()
644 {
645 #ifdef FRAME_GROWS_DOWNWARD
646 return -frame_offset;
647 #else
648 return frame_offset;
649 #endif
650 }
651
652 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
653 with machine mode MODE.
654
655 ALIGN controls the amount of alignment for the address of the slot:
656 0 means according to MODE,
657 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
658 positive specifies alignment boundary in bits.
659
660 We do not round to stack_boundary here. */
661
662 rtx
663 assign_stack_local (mode, size, align)
664 enum machine_mode mode;
665 int size;
666 int align;
667 {
668 register rtx x, addr;
669 int bigend_correction = 0;
670 int alignment;
671
672 if (align == 0)
673 {
674 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
675 if (mode == BLKmode)
676 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
677 }
678 else if (align == -1)
679 {
680 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
681 size = CEIL_ROUND (size, alignment);
682 }
683 else
684 alignment = align / BITS_PER_UNIT;
685
686 /* Round frame offset to that alignment.
687 We must be careful here, since FRAME_OFFSET might be negative and
688 division with a negative dividend isn't as well defined as we might
689 like. So we instead assume that ALIGNMENT is a power of two and
690 use logical operations which are unambiguous. */
691 #ifdef FRAME_GROWS_DOWNWARD
692 frame_offset = FLOOR_ROUND (frame_offset, alignment);
693 #else
694 frame_offset = CEIL_ROUND (frame_offset, alignment);
695 #endif
696
697 /* On a big-endian machine, if we are allocating more space than we will use,
698 use the least significant bytes of those that are allocated. */
699 if (BYTES_BIG_ENDIAN && mode != BLKmode)
700 bigend_correction = size - GET_MODE_SIZE (mode);
701
702 #ifdef FRAME_GROWS_DOWNWARD
703 frame_offset -= size;
704 #endif
705
706 /* If we have already instantiated virtual registers, return the actual
707 address relative to the frame pointer. */
708 if (virtuals_instantiated)
709 addr = plus_constant (frame_pointer_rtx,
710 (frame_offset + bigend_correction
711 + STARTING_FRAME_OFFSET));
712 else
713 addr = plus_constant (virtual_stack_vars_rtx,
714 frame_offset + bigend_correction);
715
716 #ifndef FRAME_GROWS_DOWNWARD
717 frame_offset += size;
718 #endif
719
720 x = gen_rtx (MEM, mode, addr);
721
722 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list);
723
724 return x;
725 }
726
727 /* Assign a stack slot in a containing function.
728 First three arguments are same as in preceding function.
729 The last argument specifies the function to allocate in. */
730
731 rtx
732 assign_outer_stack_local (mode, size, align, function)
733 enum machine_mode mode;
734 int size;
735 int align;
736 struct function *function;
737 {
738 register rtx x, addr;
739 int bigend_correction = 0;
740 int alignment;
741
742 /* Allocate in the memory associated with the function in whose frame
743 we are assigning. */
744 push_obstacks (function->function_obstack,
745 function->function_maybepermanent_obstack);
746
747 if (align == 0)
748 {
749 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
750 if (mode == BLKmode)
751 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
752 }
753 else if (align == -1)
754 {
755 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
756 size = CEIL_ROUND (size, alignment);
757 }
758 else
759 alignment = align / BITS_PER_UNIT;
760
761 /* Round frame offset to that alignment. */
762 #ifdef FRAME_GROWS_DOWNWARD
763 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
764 #else
765 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
766 #endif
767
768 /* On a big-endian machine, if we are allocating more space than we will use,
769 use the least significant bytes of those that are allocated. */
770 if (BYTES_BIG_ENDIAN && mode != BLKmode)
771 bigend_correction = size - GET_MODE_SIZE (mode);
772
773 #ifdef FRAME_GROWS_DOWNWARD
774 function->frame_offset -= size;
775 #endif
776 addr = plus_constant (virtual_stack_vars_rtx,
777 function->frame_offset + bigend_correction);
778 #ifndef FRAME_GROWS_DOWNWARD
779 function->frame_offset += size;
780 #endif
781
782 x = gen_rtx (MEM, mode, addr);
783
784 function->stack_slot_list
785 = gen_rtx (EXPR_LIST, VOIDmode, x, function->stack_slot_list);
786
787 pop_obstacks ();
788
789 return x;
790 }
791 \f
792 /* Allocate a temporary stack slot and record it for possible later
793 reuse.
794
795 MODE is the machine mode to be given to the returned rtx.
796
797 SIZE is the size in units of the space required. We do no rounding here
798 since assign_stack_local will do any required rounding.
799
800 KEEP is 1 if this slot is to be retained after a call to
801 free_temp_slots. Automatic variables for a block are allocated
802 with this flag. KEEP is 2, if we allocate a longer term temporary,
803 whose lifetime is controlled by CLEANUP_POINT_EXPRs. */
804
805 rtx
806 assign_stack_temp (mode, size, keep)
807 enum machine_mode mode;
808 int size;
809 int keep;
810 {
811 struct temp_slot *p, *best_p = 0;
812
813 /* If SIZE is -1 it means that somebody tried to allocate a temporary
814 of a variable size. */
815 if (size == -1)
816 abort ();
817
818 /* First try to find an available, already-allocated temporary that is the
819 exact size we require. */
820 for (p = temp_slots; p; p = p->next)
821 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
822 break;
823
824 /* If we didn't find, one, try one that is larger than what we want. We
825 find the smallest such. */
826 if (p == 0)
827 for (p = temp_slots; p; p = p->next)
828 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
829 && (best_p == 0 || best_p->size > p->size))
830 best_p = p;
831
832 /* Make our best, if any, the one to use. */
833 if (best_p)
834 {
835 /* If there are enough aligned bytes left over, make them into a new
836 temp_slot so that the extra bytes don't get wasted. Do this only
837 for BLKmode slots, so that we can be sure of the alignment. */
838 if (GET_MODE (best_p->slot) == BLKmode)
839 {
840 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
841 int rounded_size = CEIL_ROUND (size, alignment);
842
843 if (best_p->size - rounded_size >= alignment)
844 {
845 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
846 p->in_use = p->addr_taken = 0;
847 p->size = best_p->size - rounded_size;
848 p->base_offset = best_p->base_offset + rounded_size;
849 p->full_size = best_p->full_size - rounded_size;
850 p->slot = gen_rtx (MEM, BLKmode,
851 plus_constant (XEXP (best_p->slot, 0),
852 rounded_size));
853 p->address = 0;
854 p->rtl_expr = 0;
855 p->next = temp_slots;
856 temp_slots = p;
857
858 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, p->slot,
859 stack_slot_list);
860
861 best_p->size = rounded_size;
862 best_p->full_size = rounded_size;
863 }
864 }
865
866 p = best_p;
867 }
868
869 /* If we still didn't find one, make a new temporary. */
870 if (p == 0)
871 {
872 int frame_offset_old = frame_offset;
873 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
874 /* If the temp slot mode doesn't indicate the alignment,
875 use the largest possible, so no one will be disappointed. */
876 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
877 /* The following slot size computation is necessary because we don't
878 know the actual size of the temporary slot until assign_stack_local
879 has performed all the frame alignment and size rounding for the
880 requested temporary. Note that extra space added for alignment
881 can be either above or below this stack slot depending on which
882 way the frame grows. We include the extra space if and only if it
883 is above this slot. */
884 #ifdef FRAME_GROWS_DOWNWARD
885 p->size = frame_offset_old - frame_offset;
886 #else
887 p->size = size;
888 #endif
889 /* Now define the fields used by combine_temp_slots. */
890 #ifdef FRAME_GROWS_DOWNWARD
891 p->base_offset = frame_offset;
892 p->full_size = frame_offset_old - frame_offset;
893 #else
894 p->base_offset = frame_offset_old;
895 p->full_size = frame_offset - frame_offset_old;
896 #endif
897 p->address = 0;
898 p->next = temp_slots;
899 temp_slots = p;
900 }
901
902 p->in_use = 1;
903 p->addr_taken = 0;
904 p->rtl_expr = sequence_rtl_expr;
905
906 if (keep == 2)
907 {
908 p->level = target_temp_slot_level;
909 p->keep = 0;
910 }
911 else
912 {
913 p->level = temp_slot_level;
914 p->keep = keep;
915 }
916 return p->slot;
917 }
918 \f
919 /* Assign a temporary of given TYPE.
920 KEEP is as for assign_stack_temp.
921 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
922 it is 0 if a register is OK.
923 DONT_PROMOTE is 1 if we should not promote values in register
924 to wider modes. */
925
926 rtx
927 assign_temp (type, keep, memory_required, dont_promote)
928 tree type;
929 int keep;
930 int memory_required;
931 int dont_promote;
932 {
933 enum machine_mode mode = TYPE_MODE (type);
934 int unsignedp = TREE_UNSIGNED (type);
935
936 if (mode == BLKmode || memory_required)
937 {
938 int size = int_size_in_bytes (type);
939 rtx tmp;
940
941 /* Unfortunately, we don't yet know how to allocate variable-sized
942 temporaries. However, sometimes we have a fixed upper limit on
943 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
944 instead. This is the case for Chill variable-sized strings. */
945 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
946 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
947 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
948 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
949
950 tmp = assign_stack_temp (mode, size, keep);
951 MEM_IN_STRUCT_P (tmp) = AGGREGATE_TYPE_P (type);
952 return tmp;
953 }
954
955 #ifndef PROMOTE_FOR_CALL_ONLY
956 if (! dont_promote)
957 mode = promote_mode (type, mode, &unsignedp, 0);
958 #endif
959
960 return gen_reg_rtx (mode);
961 }
962 \f
963 /* Combine temporary stack slots which are adjacent on the stack.
964
965 This allows for better use of already allocated stack space. This is only
966 done for BLKmode slots because we can be sure that we won't have alignment
967 problems in this case. */
968
969 void
970 combine_temp_slots ()
971 {
972 struct temp_slot *p, *q;
973 struct temp_slot *prev_p, *prev_q;
974 /* Determine where to free back to after this function. */
975 rtx free_pointer = rtx_alloc (CONST_INT);
976
977 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
978 {
979 int delete_p = 0;
980 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
981 for (q = p->next, prev_q = p; q; q = prev_q->next)
982 {
983 int delete_q = 0;
984 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
985 {
986 if (p->base_offset + p->full_size == q->base_offset)
987 {
988 /* Q comes after P; combine Q into P. */
989 p->size += q->size;
990 p->full_size += q->full_size;
991 delete_q = 1;
992 }
993 else if (q->base_offset + q->full_size == p->base_offset)
994 {
995 /* P comes after Q; combine P into Q. */
996 q->size += p->size;
997 q->full_size += p->full_size;
998 delete_p = 1;
999 break;
1000 }
1001 }
1002 /* Either delete Q or advance past it. */
1003 if (delete_q)
1004 prev_q->next = q->next;
1005 else
1006 prev_q = q;
1007 }
1008 /* Either delete P or advance past it. */
1009 if (delete_p)
1010 {
1011 if (prev_p)
1012 prev_p->next = p->next;
1013 else
1014 temp_slots = p->next;
1015 }
1016 else
1017 prev_p = p;
1018 }
1019
1020 /* Free all the RTL made by plus_constant. */
1021 rtx_free (free_pointer);
1022 }
1023 \f
1024 /* Find the temp slot corresponding to the object at address X. */
1025
1026 static struct temp_slot *
1027 find_temp_slot_from_address (x)
1028 rtx x;
1029 {
1030 struct temp_slot *p;
1031 rtx next;
1032
1033 for (p = temp_slots; p; p = p->next)
1034 {
1035 if (! p->in_use)
1036 continue;
1037 else if (XEXP (p->slot, 0) == x
1038 || p->address == x)
1039 return p;
1040
1041 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1042 for (next = p->address; next; next = XEXP (next, 1))
1043 if (XEXP (next, 0) == x)
1044 return p;
1045 }
1046
1047 return 0;
1048 }
1049
1050 /* Indicate that NEW is an alternate way of referring to the temp slot
1051 that previous was known by OLD. */
1052
1053 void
1054 update_temp_slot_address (old, new)
1055 rtx old, new;
1056 {
1057 struct temp_slot *p = find_temp_slot_from_address (old);
1058
1059 /* If none, return. Else add NEW as an alias. */
1060 if (p == 0)
1061 return;
1062 else if (p->address == 0)
1063 p->address = new;
1064 else
1065 {
1066 if (GET_CODE (p->address) != EXPR_LIST)
1067 p->address = gen_rtx (EXPR_LIST, VOIDmode, p->address, NULL_RTX);
1068
1069 p->address = gen_rtx (EXPR_LIST, VOIDmode, new, p->address);
1070 }
1071 }
1072
1073 /* If X could be a reference to a temporary slot, mark the fact that its
1074 address was taken. */
1075
1076 void
1077 mark_temp_addr_taken (x)
1078 rtx x;
1079 {
1080 struct temp_slot *p;
1081
1082 if (x == 0)
1083 return;
1084
1085 /* If X is not in memory or is at a constant address, it cannot be in
1086 a temporary slot. */
1087 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1088 return;
1089
1090 p = find_temp_slot_from_address (XEXP (x, 0));
1091 if (p != 0)
1092 p->addr_taken = 1;
1093 }
1094
1095 /* If X could be a reference to a temporary slot, mark that slot as
1096 belonging to the to one level higher than the current level. If X
1097 matched one of our slots, just mark that one. Otherwise, we can't
1098 easily predict which it is, so upgrade all of them. Kept slots
1099 need not be touched.
1100
1101 This is called when an ({...}) construct occurs and a statement
1102 returns a value in memory. */
1103
1104 void
1105 preserve_temp_slots (x)
1106 rtx x;
1107 {
1108 struct temp_slot *p = 0;
1109
1110 /* If there is no result, we still might have some objects whose address
1111 were taken, so we need to make sure they stay around. */
1112 if (x == 0)
1113 {
1114 for (p = temp_slots; p; p = p->next)
1115 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1116 p->level--;
1117
1118 return;
1119 }
1120
1121 /* If X is a register that is being used as a pointer, see if we have
1122 a temporary slot we know it points to. To be consistent with
1123 the code below, we really should preserve all non-kept slots
1124 if we can't find a match, but that seems to be much too costly. */
1125 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1126 p = find_temp_slot_from_address (x);
1127
1128 /* If X is not in memory or is at a constant address, it cannot be in
1129 a temporary slot, but it can contain something whose address was
1130 taken. */
1131 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1132 {
1133 for (p = temp_slots; p; p = p->next)
1134 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1135 p->level--;
1136
1137 return;
1138 }
1139
1140 /* First see if we can find a match. */
1141 if (p == 0)
1142 p = find_temp_slot_from_address (XEXP (x, 0));
1143
1144 if (p != 0)
1145 {
1146 /* Move everything at our level whose address was taken to our new
1147 level in case we used its address. */
1148 struct temp_slot *q;
1149
1150 if (p->level == temp_slot_level)
1151 {
1152 for (q = temp_slots; q; q = q->next)
1153 if (q != p && q->addr_taken && q->level == p->level)
1154 q->level--;
1155
1156 p->level--;
1157 p->addr_taken = 0;
1158 }
1159 return;
1160 }
1161
1162 /* Otherwise, preserve all non-kept slots at this level. */
1163 for (p = temp_slots; p; p = p->next)
1164 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1165 p->level--;
1166 }
1167
1168 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1169 with that RTL_EXPR, promote it into a temporary slot at the present
1170 level so it will not be freed when we free slots made in the
1171 RTL_EXPR. */
1172
1173 void
1174 preserve_rtl_expr_result (x)
1175 rtx x;
1176 {
1177 struct temp_slot *p;
1178
1179 /* If X is not in memory or is at a constant address, it cannot be in
1180 a temporary slot. */
1181 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1182 return;
1183
1184 /* If we can find a match, move it to our level unless it is already at
1185 an upper level. */
1186 p = find_temp_slot_from_address (XEXP (x, 0));
1187 if (p != 0)
1188 {
1189 p->level = MIN (p->level, temp_slot_level);
1190 p->rtl_expr = 0;
1191 }
1192
1193 return;
1194 }
1195
1196 /* Free all temporaries used so far. This is normally called at the end
1197 of generating code for a statement. Don't free any temporaries
1198 currently in use for an RTL_EXPR that hasn't yet been emitted.
1199 We could eventually do better than this since it can be reused while
1200 generating the same RTL_EXPR, but this is complex and probably not
1201 worthwhile. */
1202
1203 void
1204 free_temp_slots ()
1205 {
1206 struct temp_slot *p;
1207
1208 for (p = temp_slots; p; p = p->next)
1209 if (p->in_use && p->level == temp_slot_level && ! p->keep
1210 && p->rtl_expr == 0)
1211 p->in_use = 0;
1212
1213 combine_temp_slots ();
1214 }
1215
1216 /* Free all temporary slots used in T, an RTL_EXPR node. */
1217
1218 void
1219 free_temps_for_rtl_expr (t)
1220 tree t;
1221 {
1222 struct temp_slot *p;
1223
1224 for (p = temp_slots; p; p = p->next)
1225 if (p->rtl_expr == t)
1226 p->in_use = 0;
1227
1228 combine_temp_slots ();
1229 }
1230
1231 /* Push deeper into the nesting level for stack temporaries. */
1232
1233 void
1234 push_temp_slots ()
1235 {
1236 temp_slot_level++;
1237 }
1238
1239 /* Pop a temporary nesting level. All slots in use in the current level
1240 are freed. */
1241
1242 void
1243 pop_temp_slots ()
1244 {
1245 struct temp_slot *p;
1246
1247 for (p = temp_slots; p; p = p->next)
1248 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1249 p->in_use = 0;
1250
1251 combine_temp_slots ();
1252
1253 temp_slot_level--;
1254 }
1255
1256 /* Initialize temporary slots. */
1257
1258 void
1259 init_temp_slots ()
1260 {
1261 /* We have not allocated any temporaries yet. */
1262 temp_slots = 0;
1263 temp_slot_level = 0;
1264 target_temp_slot_level = 0;
1265 }
1266 \f
1267 /* Retroactively move an auto variable from a register to a stack slot.
1268 This is done when an address-reference to the variable is seen. */
1269
1270 void
1271 put_var_into_stack (decl)
1272 tree decl;
1273 {
1274 register rtx reg;
1275 enum machine_mode promoted_mode, decl_mode;
1276 struct function *function = 0;
1277 tree context;
1278
1279 if (output_bytecode)
1280 return;
1281
1282 context = decl_function_context (decl);
1283
1284 /* Get the current rtl used for this object and it's original mode. */
1285 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1286
1287 /* No need to do anything if decl has no rtx yet
1288 since in that case caller is setting TREE_ADDRESSABLE
1289 and a stack slot will be assigned when the rtl is made. */
1290 if (reg == 0)
1291 return;
1292
1293 /* Get the declared mode for this object. */
1294 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1295 : DECL_MODE (decl));
1296 /* Get the mode it's actually stored in. */
1297 promoted_mode = GET_MODE (reg);
1298
1299 /* If this variable comes from an outer function,
1300 find that function's saved context. */
1301 if (context != current_function_decl)
1302 for (function = outer_function_chain; function; function = function->next)
1303 if (function->decl == context)
1304 break;
1305
1306 /* If this is a variable-size object with a pseudo to address it,
1307 put that pseudo into the stack, if the var is nonlocal. */
1308 if (DECL_NONLOCAL (decl)
1309 && GET_CODE (reg) == MEM
1310 && GET_CODE (XEXP (reg, 0)) == REG
1311 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1312 {
1313 reg = XEXP (reg, 0);
1314 decl_mode = promoted_mode = GET_MODE (reg);
1315 }
1316
1317 /* Now we should have a value that resides in one or more pseudo regs. */
1318
1319 if (GET_CODE (reg) == REG)
1320 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1321 promoted_mode, decl_mode, TREE_SIDE_EFFECTS (decl));
1322 else if (GET_CODE (reg) == CONCAT)
1323 {
1324 /* A CONCAT contains two pseudos; put them both in the stack.
1325 We do it so they end up consecutive. */
1326 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1327 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1328 #ifdef FRAME_GROWS_DOWNWARD
1329 /* Since part 0 should have a lower address, do it second. */
1330 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1331 part_mode, TREE_SIDE_EFFECTS (decl));
1332 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1333 part_mode, TREE_SIDE_EFFECTS (decl));
1334 #else
1335 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1336 part_mode, TREE_SIDE_EFFECTS (decl));
1337 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1338 part_mode, TREE_SIDE_EFFECTS (decl));
1339 #endif
1340
1341 /* Change the CONCAT into a combined MEM for both parts. */
1342 PUT_CODE (reg, MEM);
1343 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1344
1345 /* The two parts are in memory order already.
1346 Use the lower parts address as ours. */
1347 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1348 /* Prevent sharing of rtl that might lose. */
1349 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1350 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1351 }
1352 }
1353
1354 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1355 into the stack frame of FUNCTION (0 means the current function).
1356 DECL_MODE is the machine mode of the user-level data type.
1357 PROMOTED_MODE is the machine mode of the register.
1358 VOLATILE_P is nonzero if this is for a "volatile" decl. */
1359
1360 static void
1361 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p)
1362 struct function *function;
1363 rtx reg;
1364 tree type;
1365 enum machine_mode promoted_mode, decl_mode;
1366 int volatile_p;
1367 {
1368 rtx new = 0;
1369
1370 if (function)
1371 {
1372 if (REGNO (reg) < function->max_parm_reg)
1373 new = function->parm_reg_stack_loc[REGNO (reg)];
1374 if (new == 0)
1375 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1376 0, function);
1377 }
1378 else
1379 {
1380 if (REGNO (reg) < max_parm_reg)
1381 new = parm_reg_stack_loc[REGNO (reg)];
1382 if (new == 0)
1383 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1384 }
1385
1386 PUT_MODE (reg, decl_mode);
1387 XEXP (reg, 0) = XEXP (new, 0);
1388 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1389 MEM_VOLATILE_P (reg) = volatile_p;
1390 PUT_CODE (reg, MEM);
1391
1392 /* If this is a memory ref that contains aggregate components,
1393 mark it as such for cse and loop optimize. */
1394 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type);
1395
1396 /* Now make sure that all refs to the variable, previously made
1397 when it was a register, are fixed up to be valid again. */
1398 if (function)
1399 {
1400 struct var_refs_queue *temp;
1401
1402 /* Variable is inherited; fix it up when we get back to its function. */
1403 push_obstacks (function->function_obstack,
1404 function->function_maybepermanent_obstack);
1405
1406 /* See comment in restore_tree_status in tree.c for why this needs to be
1407 on saveable obstack. */
1408 temp
1409 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1410 temp->modified = reg;
1411 temp->promoted_mode = promoted_mode;
1412 temp->unsignedp = TREE_UNSIGNED (type);
1413 temp->next = function->fixup_var_refs_queue;
1414 function->fixup_var_refs_queue = temp;
1415 pop_obstacks ();
1416 }
1417 else
1418 /* Variable is local; fix it up now. */
1419 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
1420 }
1421 \f
1422 static void
1423 fixup_var_refs (var, promoted_mode, unsignedp)
1424 rtx var;
1425 enum machine_mode promoted_mode;
1426 int unsignedp;
1427 {
1428 tree pending;
1429 rtx first_insn = get_insns ();
1430 struct sequence_stack *stack = sequence_stack;
1431 tree rtl_exps = rtl_expr_chain;
1432
1433 /* Must scan all insns for stack-refs that exceed the limit. */
1434 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
1435
1436 /* Scan all pending sequences too. */
1437 for (; stack; stack = stack->next)
1438 {
1439 push_to_sequence (stack->first);
1440 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1441 stack->first, stack->next != 0);
1442 /* Update remembered end of sequence
1443 in case we added an insn at the end. */
1444 stack->last = get_last_insn ();
1445 end_sequence ();
1446 }
1447
1448 /* Scan all waiting RTL_EXPRs too. */
1449 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1450 {
1451 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1452 if (seq != const0_rtx && seq != 0)
1453 {
1454 push_to_sequence (seq);
1455 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
1456 end_sequence ();
1457 }
1458 }
1459 }
1460 \f
1461 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1462 some part of an insn. Return a struct fixup_replacement whose OLD
1463 value is equal to X. Allocate a new structure if no such entry exists. */
1464
1465 static struct fixup_replacement *
1466 find_fixup_replacement (replacements, x)
1467 struct fixup_replacement **replacements;
1468 rtx x;
1469 {
1470 struct fixup_replacement *p;
1471
1472 /* See if we have already replaced this. */
1473 for (p = *replacements; p && p->old != x; p = p->next)
1474 ;
1475
1476 if (p == 0)
1477 {
1478 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1479 p->old = x;
1480 p->new = 0;
1481 p->next = *replacements;
1482 *replacements = p;
1483 }
1484
1485 return p;
1486 }
1487
1488 /* Scan the insn-chain starting with INSN for refs to VAR
1489 and fix them up. TOPLEVEL is nonzero if this chain is the
1490 main chain of insns for the current function. */
1491
1492 static void
1493 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
1494 rtx var;
1495 enum machine_mode promoted_mode;
1496 int unsignedp;
1497 rtx insn;
1498 int toplevel;
1499 {
1500 rtx call_dest = 0;
1501
1502 while (insn)
1503 {
1504 rtx next = NEXT_INSN (insn);
1505 rtx note;
1506 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1507 {
1508 /* If this is a CLOBBER of VAR, delete it.
1509
1510 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1511 and REG_RETVAL notes too. */
1512 if (GET_CODE (PATTERN (insn)) == CLOBBER
1513 && XEXP (PATTERN (insn), 0) == var)
1514 {
1515 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1516 /* The REG_LIBCALL note will go away since we are going to
1517 turn INSN into a NOTE, so just delete the
1518 corresponding REG_RETVAL note. */
1519 remove_note (XEXP (note, 0),
1520 find_reg_note (XEXP (note, 0), REG_RETVAL,
1521 NULL_RTX));
1522
1523 /* In unoptimized compilation, we shouldn't call delete_insn
1524 except in jump.c doing warnings. */
1525 PUT_CODE (insn, NOTE);
1526 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1527 NOTE_SOURCE_FILE (insn) = 0;
1528 }
1529
1530 /* The insn to load VAR from a home in the arglist
1531 is now a no-op. When we see it, just delete it. */
1532 else if (toplevel
1533 && GET_CODE (PATTERN (insn)) == SET
1534 && SET_DEST (PATTERN (insn)) == var
1535 /* If this represents the result of an insn group,
1536 don't delete the insn. */
1537 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1538 && rtx_equal_p (SET_SRC (PATTERN (insn)), var))
1539 {
1540 /* In unoptimized compilation, we shouldn't call delete_insn
1541 except in jump.c doing warnings. */
1542 PUT_CODE (insn, NOTE);
1543 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1544 NOTE_SOURCE_FILE (insn) = 0;
1545 if (insn == last_parm_insn)
1546 last_parm_insn = PREV_INSN (next);
1547 }
1548 else
1549 {
1550 struct fixup_replacement *replacements = 0;
1551 rtx next_insn = NEXT_INSN (insn);
1552
1553 #ifdef SMALL_REGISTER_CLASSES
1554 /* If the insn that copies the results of a CALL_INSN
1555 into a pseudo now references VAR, we have to use an
1556 intermediate pseudo since we want the life of the
1557 return value register to be only a single insn.
1558
1559 If we don't use an intermediate pseudo, such things as
1560 address computations to make the address of VAR valid
1561 if it is not can be placed between the CALL_INSN and INSN.
1562
1563 To make sure this doesn't happen, we record the destination
1564 of the CALL_INSN and see if the next insn uses both that
1565 and VAR. */
1566
1567 if (call_dest != 0 && GET_CODE (insn) == INSN
1568 && reg_mentioned_p (var, PATTERN (insn))
1569 && reg_mentioned_p (call_dest, PATTERN (insn)))
1570 {
1571 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1572
1573 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1574
1575 PATTERN (insn) = replace_rtx (PATTERN (insn),
1576 call_dest, temp);
1577 }
1578
1579 if (GET_CODE (insn) == CALL_INSN
1580 && GET_CODE (PATTERN (insn)) == SET)
1581 call_dest = SET_DEST (PATTERN (insn));
1582 else if (GET_CODE (insn) == CALL_INSN
1583 && GET_CODE (PATTERN (insn)) == PARALLEL
1584 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1585 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1586 else
1587 call_dest = 0;
1588 #endif
1589
1590 /* See if we have to do anything to INSN now that VAR is in
1591 memory. If it needs to be loaded into a pseudo, use a single
1592 pseudo for the entire insn in case there is a MATCH_DUP
1593 between two operands. We pass a pointer to the head of
1594 a list of struct fixup_replacements. If fixup_var_refs_1
1595 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1596 it will record them in this list.
1597
1598 If it allocated a pseudo for any replacement, we copy into
1599 it here. */
1600
1601 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1602 &replacements);
1603
1604 /* If this is last_parm_insn, and any instructions were output
1605 after it to fix it up, then we must set last_parm_insn to
1606 the last such instruction emitted. */
1607 if (insn == last_parm_insn)
1608 last_parm_insn = PREV_INSN (next_insn);
1609
1610 while (replacements)
1611 {
1612 if (GET_CODE (replacements->new) == REG)
1613 {
1614 rtx insert_before;
1615 rtx seq;
1616
1617 /* OLD might be a (subreg (mem)). */
1618 if (GET_CODE (replacements->old) == SUBREG)
1619 replacements->old
1620 = fixup_memory_subreg (replacements->old, insn, 0);
1621 else
1622 replacements->old
1623 = fixup_stack_1 (replacements->old, insn);
1624
1625 insert_before = insn;
1626
1627 /* If we are changing the mode, do a conversion.
1628 This might be wasteful, but combine.c will
1629 eliminate much of the waste. */
1630
1631 if (GET_MODE (replacements->new)
1632 != GET_MODE (replacements->old))
1633 {
1634 start_sequence ();
1635 convert_move (replacements->new,
1636 replacements->old, unsignedp);
1637 seq = gen_sequence ();
1638 end_sequence ();
1639 }
1640 else
1641 seq = gen_move_insn (replacements->new,
1642 replacements->old);
1643
1644 emit_insn_before (seq, insert_before);
1645 }
1646
1647 replacements = replacements->next;
1648 }
1649 }
1650
1651 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1652 But don't touch other insns referred to by reg-notes;
1653 we will get them elsewhere. */
1654 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1655 if (GET_CODE (note) != INSN_LIST)
1656 XEXP (note, 0)
1657 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1658 }
1659 insn = next;
1660 }
1661 }
1662 \f
1663 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1664 See if the rtx expression at *LOC in INSN needs to be changed.
1665
1666 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1667 contain a list of original rtx's and replacements. If we find that we need
1668 to modify this insn by replacing a memory reference with a pseudo or by
1669 making a new MEM to implement a SUBREG, we consult that list to see if
1670 we have already chosen a replacement. If none has already been allocated,
1671 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1672 or the SUBREG, as appropriate, to the pseudo. */
1673
1674 static void
1675 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1676 register rtx var;
1677 enum machine_mode promoted_mode;
1678 register rtx *loc;
1679 rtx insn;
1680 struct fixup_replacement **replacements;
1681 {
1682 register int i;
1683 register rtx x = *loc;
1684 RTX_CODE code = GET_CODE (x);
1685 register char *fmt;
1686 register rtx tem, tem1;
1687 struct fixup_replacement *replacement;
1688
1689 switch (code)
1690 {
1691 case MEM:
1692 if (var == x)
1693 {
1694 /* If we already have a replacement, use it. Otherwise,
1695 try to fix up this address in case it is invalid. */
1696
1697 replacement = find_fixup_replacement (replacements, var);
1698 if (replacement->new)
1699 {
1700 *loc = replacement->new;
1701 return;
1702 }
1703
1704 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1705
1706 /* Unless we are forcing memory to register or we changed the mode,
1707 we can leave things the way they are if the insn is valid. */
1708
1709 INSN_CODE (insn) = -1;
1710 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1711 && recog_memoized (insn) >= 0)
1712 return;
1713
1714 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1715 return;
1716 }
1717
1718 /* If X contains VAR, we need to unshare it here so that we update
1719 each occurrence separately. But all identical MEMs in one insn
1720 must be replaced with the same rtx because of the possibility of
1721 MATCH_DUPs. */
1722
1723 if (reg_mentioned_p (var, x))
1724 {
1725 replacement = find_fixup_replacement (replacements, x);
1726 if (replacement->new == 0)
1727 replacement->new = copy_most_rtx (x, var);
1728
1729 *loc = x = replacement->new;
1730 }
1731 break;
1732
1733 case REG:
1734 case CC0:
1735 case PC:
1736 case CONST_INT:
1737 case CONST:
1738 case SYMBOL_REF:
1739 case LABEL_REF:
1740 case CONST_DOUBLE:
1741 return;
1742
1743 case SIGN_EXTRACT:
1744 case ZERO_EXTRACT:
1745 /* Note that in some cases those types of expressions are altered
1746 by optimize_bit_field, and do not survive to get here. */
1747 if (XEXP (x, 0) == var
1748 || (GET_CODE (XEXP (x, 0)) == SUBREG
1749 && SUBREG_REG (XEXP (x, 0)) == var))
1750 {
1751 /* Get TEM as a valid MEM in the mode presently in the insn.
1752
1753 We don't worry about the possibility of MATCH_DUP here; it
1754 is highly unlikely and would be tricky to handle. */
1755
1756 tem = XEXP (x, 0);
1757 if (GET_CODE (tem) == SUBREG)
1758 tem = fixup_memory_subreg (tem, insn, 1);
1759 tem = fixup_stack_1 (tem, insn);
1760
1761 /* Unless we want to load from memory, get TEM into the proper mode
1762 for an extract from memory. This can only be done if the
1763 extract is at a constant position and length. */
1764
1765 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1766 && GET_CODE (XEXP (x, 2)) == CONST_INT
1767 && ! mode_dependent_address_p (XEXP (tem, 0))
1768 && ! MEM_VOLATILE_P (tem))
1769 {
1770 enum machine_mode wanted_mode = VOIDmode;
1771 enum machine_mode is_mode = GET_MODE (tem);
1772 int width = INTVAL (XEXP (x, 1));
1773 int pos = INTVAL (XEXP (x, 2));
1774
1775 #ifdef HAVE_extzv
1776 if (GET_CODE (x) == ZERO_EXTRACT)
1777 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1778 #endif
1779 #ifdef HAVE_extv
1780 if (GET_CODE (x) == SIGN_EXTRACT)
1781 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1782 #endif
1783 /* If we have a narrower mode, we can do something. */
1784 if (wanted_mode != VOIDmode
1785 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1786 {
1787 int offset = pos / BITS_PER_UNIT;
1788 rtx old_pos = XEXP (x, 2);
1789 rtx newmem;
1790
1791 /* If the bytes and bits are counted differently, we
1792 must adjust the offset. */
1793 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1794 offset = (GET_MODE_SIZE (is_mode)
1795 - GET_MODE_SIZE (wanted_mode) - offset);
1796
1797 pos %= GET_MODE_BITSIZE (wanted_mode);
1798
1799 newmem = gen_rtx (MEM, wanted_mode,
1800 plus_constant (XEXP (tem, 0), offset));
1801 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1802 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1803 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1804
1805 /* Make the change and see if the insn remains valid. */
1806 INSN_CODE (insn) = -1;
1807 XEXP (x, 0) = newmem;
1808 XEXP (x, 2) = GEN_INT (pos);
1809
1810 if (recog_memoized (insn) >= 0)
1811 return;
1812
1813 /* Otherwise, restore old position. XEXP (x, 0) will be
1814 restored later. */
1815 XEXP (x, 2) = old_pos;
1816 }
1817 }
1818
1819 /* If we get here, the bitfield extract insn can't accept a memory
1820 reference. Copy the input into a register. */
1821
1822 tem1 = gen_reg_rtx (GET_MODE (tem));
1823 emit_insn_before (gen_move_insn (tem1, tem), insn);
1824 XEXP (x, 0) = tem1;
1825 return;
1826 }
1827 break;
1828
1829 case SUBREG:
1830 if (SUBREG_REG (x) == var)
1831 {
1832 /* If this is a special SUBREG made because VAR was promoted
1833 from a wider mode, replace it with VAR and call ourself
1834 recursively, this time saying that the object previously
1835 had its current mode (by virtue of the SUBREG). */
1836
1837 if (SUBREG_PROMOTED_VAR_P (x))
1838 {
1839 *loc = var;
1840 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
1841 return;
1842 }
1843
1844 /* If this SUBREG makes VAR wider, it has become a paradoxical
1845 SUBREG with VAR in memory, but these aren't allowed at this
1846 stage of the compilation. So load VAR into a pseudo and take
1847 a SUBREG of that pseudo. */
1848 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1849 {
1850 replacement = find_fixup_replacement (replacements, var);
1851 if (replacement->new == 0)
1852 replacement->new = gen_reg_rtx (GET_MODE (var));
1853 SUBREG_REG (x) = replacement->new;
1854 return;
1855 }
1856
1857 /* See if we have already found a replacement for this SUBREG.
1858 If so, use it. Otherwise, make a MEM and see if the insn
1859 is recognized. If not, or if we should force MEM into a register,
1860 make a pseudo for this SUBREG. */
1861 replacement = find_fixup_replacement (replacements, x);
1862 if (replacement->new)
1863 {
1864 *loc = replacement->new;
1865 return;
1866 }
1867
1868 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1869
1870 INSN_CODE (insn) = -1;
1871 if (! flag_force_mem && recog_memoized (insn) >= 0)
1872 return;
1873
1874 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1875 return;
1876 }
1877 break;
1878
1879 case SET:
1880 /* First do special simplification of bit-field references. */
1881 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1882 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1883 optimize_bit_field (x, insn, 0);
1884 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1885 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
1886 optimize_bit_field (x, insn, NULL_PTR);
1887
1888 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1889 insn into a pseudo and store the low part of the pseudo into VAR. */
1890 if (GET_CODE (SET_DEST (x)) == SUBREG
1891 && SUBREG_REG (SET_DEST (x)) == var
1892 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
1893 > GET_MODE_SIZE (GET_MODE (var))))
1894 {
1895 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
1896 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
1897 tem)),
1898 insn);
1899 break;
1900 }
1901
1902 {
1903 rtx dest = SET_DEST (x);
1904 rtx src = SET_SRC (x);
1905 rtx outerdest = dest;
1906
1907 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
1908 || GET_CODE (dest) == SIGN_EXTRACT
1909 || GET_CODE (dest) == ZERO_EXTRACT)
1910 dest = XEXP (dest, 0);
1911
1912 if (GET_CODE (src) == SUBREG)
1913 src = XEXP (src, 0);
1914
1915 /* If VAR does not appear at the top level of the SET
1916 just scan the lower levels of the tree. */
1917
1918 if (src != var && dest != var)
1919 break;
1920
1921 /* We will need to rerecognize this insn. */
1922 INSN_CODE (insn) = -1;
1923
1924 #ifdef HAVE_insv
1925 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
1926 {
1927 /* Since this case will return, ensure we fixup all the
1928 operands here. */
1929 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
1930 insn, replacements);
1931 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
1932 insn, replacements);
1933 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
1934 insn, replacements);
1935
1936 tem = XEXP (outerdest, 0);
1937
1938 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
1939 that may appear inside a ZERO_EXTRACT.
1940 This was legitimate when the MEM was a REG. */
1941 if (GET_CODE (tem) == SUBREG
1942 && SUBREG_REG (tem) == var)
1943 tem = fixup_memory_subreg (tem, insn, 1);
1944 else
1945 tem = fixup_stack_1 (tem, insn);
1946
1947 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
1948 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
1949 && ! mode_dependent_address_p (XEXP (tem, 0))
1950 && ! MEM_VOLATILE_P (tem))
1951 {
1952 enum machine_mode wanted_mode
1953 = insn_operand_mode[(int) CODE_FOR_insv][0];
1954 enum machine_mode is_mode = GET_MODE (tem);
1955 int width = INTVAL (XEXP (outerdest, 1));
1956 int pos = INTVAL (XEXP (outerdest, 2));
1957
1958 /* If we have a narrower mode, we can do something. */
1959 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1960 {
1961 int offset = pos / BITS_PER_UNIT;
1962 rtx old_pos = XEXP (outerdest, 2);
1963 rtx newmem;
1964
1965 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1966 offset = (GET_MODE_SIZE (is_mode)
1967 - GET_MODE_SIZE (wanted_mode) - offset);
1968
1969 pos %= GET_MODE_BITSIZE (wanted_mode);
1970
1971 newmem = gen_rtx (MEM, wanted_mode,
1972 plus_constant (XEXP (tem, 0), offset));
1973 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1974 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1975 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1976
1977 /* Make the change and see if the insn remains valid. */
1978 INSN_CODE (insn) = -1;
1979 XEXP (outerdest, 0) = newmem;
1980 XEXP (outerdest, 2) = GEN_INT (pos);
1981
1982 if (recog_memoized (insn) >= 0)
1983 return;
1984
1985 /* Otherwise, restore old position. XEXP (x, 0) will be
1986 restored later. */
1987 XEXP (outerdest, 2) = old_pos;
1988 }
1989 }
1990
1991 /* If we get here, the bit-field store doesn't allow memory
1992 or isn't located at a constant position. Load the value into
1993 a register, do the store, and put it back into memory. */
1994
1995 tem1 = gen_reg_rtx (GET_MODE (tem));
1996 emit_insn_before (gen_move_insn (tem1, tem), insn);
1997 emit_insn_after (gen_move_insn (tem, tem1), insn);
1998 XEXP (outerdest, 0) = tem1;
1999 return;
2000 }
2001 #endif
2002
2003 /* STRICT_LOW_PART is a no-op on memory references
2004 and it can cause combinations to be unrecognizable,
2005 so eliminate it. */
2006
2007 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2008 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2009
2010 /* A valid insn to copy VAR into or out of a register
2011 must be left alone, to avoid an infinite loop here.
2012 If the reference to VAR is by a subreg, fix that up,
2013 since SUBREG is not valid for a memref.
2014 Also fix up the address of the stack slot.
2015
2016 Note that we must not try to recognize the insn until
2017 after we know that we have valid addresses and no
2018 (subreg (mem ...) ...) constructs, since these interfere
2019 with determining the validity of the insn. */
2020
2021 if ((SET_SRC (x) == var
2022 || (GET_CODE (SET_SRC (x)) == SUBREG
2023 && SUBREG_REG (SET_SRC (x)) == var))
2024 && (GET_CODE (SET_DEST (x)) == REG
2025 || (GET_CODE (SET_DEST (x)) == SUBREG
2026 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2027 && GET_MODE (var) == promoted_mode
2028 && x == single_set (insn))
2029 {
2030 rtx pat;
2031
2032 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2033 if (replacement->new)
2034 SET_SRC (x) = replacement->new;
2035 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2036 SET_SRC (x) = replacement->new
2037 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2038 else
2039 SET_SRC (x) = replacement->new
2040 = fixup_stack_1 (SET_SRC (x), insn);
2041
2042 if (recog_memoized (insn) >= 0)
2043 return;
2044
2045 /* INSN is not valid, but we know that we want to
2046 copy SET_SRC (x) to SET_DEST (x) in some way. So
2047 we generate the move and see whether it requires more
2048 than one insn. If it does, we emit those insns and
2049 delete INSN. Otherwise, we an just replace the pattern
2050 of INSN; we have already verified above that INSN has
2051 no other function that to do X. */
2052
2053 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2054 if (GET_CODE (pat) == SEQUENCE)
2055 {
2056 emit_insn_after (pat, insn);
2057 PUT_CODE (insn, NOTE);
2058 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2059 NOTE_SOURCE_FILE (insn) = 0;
2060 }
2061 else
2062 PATTERN (insn) = pat;
2063
2064 return;
2065 }
2066
2067 if ((SET_DEST (x) == var
2068 || (GET_CODE (SET_DEST (x)) == SUBREG
2069 && SUBREG_REG (SET_DEST (x)) == var))
2070 && (GET_CODE (SET_SRC (x)) == REG
2071 || (GET_CODE (SET_SRC (x)) == SUBREG
2072 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2073 && GET_MODE (var) == promoted_mode
2074 && x == single_set (insn))
2075 {
2076 rtx pat;
2077
2078 if (GET_CODE (SET_DEST (x)) == SUBREG)
2079 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2080 else
2081 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2082
2083 if (recog_memoized (insn) >= 0)
2084 return;
2085
2086 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2087 if (GET_CODE (pat) == SEQUENCE)
2088 {
2089 emit_insn_after (pat, insn);
2090 PUT_CODE (insn, NOTE);
2091 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2092 NOTE_SOURCE_FILE (insn) = 0;
2093 }
2094 else
2095 PATTERN (insn) = pat;
2096
2097 return;
2098 }
2099
2100 /* Otherwise, storing into VAR must be handled specially
2101 by storing into a temporary and copying that into VAR
2102 with a new insn after this one. Note that this case
2103 will be used when storing into a promoted scalar since
2104 the insn will now have different modes on the input
2105 and output and hence will be invalid (except for the case
2106 of setting it to a constant, which does not need any
2107 change if it is valid). We generate extra code in that case,
2108 but combine.c will eliminate it. */
2109
2110 if (dest == var)
2111 {
2112 rtx temp;
2113 rtx fixeddest = SET_DEST (x);
2114
2115 /* STRICT_LOW_PART can be discarded, around a MEM. */
2116 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2117 fixeddest = XEXP (fixeddest, 0);
2118 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2119 if (GET_CODE (fixeddest) == SUBREG)
2120 {
2121 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2122 promoted_mode = GET_MODE (fixeddest);
2123 }
2124 else
2125 fixeddest = fixup_stack_1 (fixeddest, insn);
2126
2127 temp = gen_reg_rtx (promoted_mode);
2128
2129 emit_insn_after (gen_move_insn (fixeddest,
2130 gen_lowpart (GET_MODE (fixeddest),
2131 temp)),
2132 insn);
2133
2134 SET_DEST (x) = temp;
2135 }
2136 }
2137 }
2138
2139 /* Nothing special about this RTX; fix its operands. */
2140
2141 fmt = GET_RTX_FORMAT (code);
2142 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2143 {
2144 if (fmt[i] == 'e')
2145 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2146 if (fmt[i] == 'E')
2147 {
2148 register int j;
2149 for (j = 0; j < XVECLEN (x, i); j++)
2150 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2151 insn, replacements);
2152 }
2153 }
2154 }
2155 \f
2156 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2157 return an rtx (MEM:m1 newaddr) which is equivalent.
2158 If any insns must be emitted to compute NEWADDR, put them before INSN.
2159
2160 UNCRITICAL nonzero means accept paradoxical subregs.
2161 This is used for subregs found inside of ZERO_EXTRACTs and in REG_NOTES. */
2162
2163 static rtx
2164 fixup_memory_subreg (x, insn, uncritical)
2165 rtx x;
2166 rtx insn;
2167 int uncritical;
2168 {
2169 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2170 rtx addr = XEXP (SUBREG_REG (x), 0);
2171 enum machine_mode mode = GET_MODE (x);
2172 rtx saved, result;
2173
2174 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2175 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2176 && ! uncritical)
2177 abort ();
2178
2179 if (BYTES_BIG_ENDIAN)
2180 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2181 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2182 addr = plus_constant (addr, offset);
2183 if (!flag_force_addr && memory_address_p (mode, addr))
2184 /* Shortcut if no insns need be emitted. */
2185 return change_address (SUBREG_REG (x), mode, addr);
2186 start_sequence ();
2187 result = change_address (SUBREG_REG (x), mode, addr);
2188 emit_insn_before (gen_sequence (), insn);
2189 end_sequence ();
2190 return result;
2191 }
2192
2193 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2194 Replace subexpressions of X in place.
2195 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2196 Otherwise return X, with its contents possibly altered.
2197
2198 If any insns must be emitted to compute NEWADDR, put them before INSN.
2199
2200 UNCRITICAL is as in fixup_memory_subreg. */
2201
2202 static rtx
2203 walk_fixup_memory_subreg (x, insn, uncritical)
2204 register rtx x;
2205 rtx insn;
2206 int uncritical;
2207 {
2208 register enum rtx_code code;
2209 register char *fmt;
2210 register int i;
2211
2212 if (x == 0)
2213 return 0;
2214
2215 code = GET_CODE (x);
2216
2217 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2218 return fixup_memory_subreg (x, insn, uncritical);
2219
2220 /* Nothing special about this RTX; fix its operands. */
2221
2222 fmt = GET_RTX_FORMAT (code);
2223 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2224 {
2225 if (fmt[i] == 'e')
2226 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2227 if (fmt[i] == 'E')
2228 {
2229 register int j;
2230 for (j = 0; j < XVECLEN (x, i); j++)
2231 XVECEXP (x, i, j)
2232 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2233 }
2234 }
2235 return x;
2236 }
2237 \f
2238 /* For each memory ref within X, if it refers to a stack slot
2239 with an out of range displacement, put the address in a temp register
2240 (emitting new insns before INSN to load these registers)
2241 and alter the memory ref to use that register.
2242 Replace each such MEM rtx with a copy, to avoid clobberage. */
2243
2244 static rtx
2245 fixup_stack_1 (x, insn)
2246 rtx x;
2247 rtx insn;
2248 {
2249 register int i;
2250 register RTX_CODE code = GET_CODE (x);
2251 register char *fmt;
2252
2253 if (code == MEM)
2254 {
2255 register rtx ad = XEXP (x, 0);
2256 /* If we have address of a stack slot but it's not valid
2257 (displacement is too large), compute the sum in a register. */
2258 if (GET_CODE (ad) == PLUS
2259 && GET_CODE (XEXP (ad, 0)) == REG
2260 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2261 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2262 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2263 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2264 {
2265 rtx temp, seq;
2266 if (memory_address_p (GET_MODE (x), ad))
2267 return x;
2268
2269 start_sequence ();
2270 temp = copy_to_reg (ad);
2271 seq = gen_sequence ();
2272 end_sequence ();
2273 emit_insn_before (seq, insn);
2274 return change_address (x, VOIDmode, temp);
2275 }
2276 return x;
2277 }
2278
2279 fmt = GET_RTX_FORMAT (code);
2280 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2281 {
2282 if (fmt[i] == 'e')
2283 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2284 if (fmt[i] == 'E')
2285 {
2286 register int j;
2287 for (j = 0; j < XVECLEN (x, i); j++)
2288 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2289 }
2290 }
2291 return x;
2292 }
2293 \f
2294 /* Optimization: a bit-field instruction whose field
2295 happens to be a byte or halfword in memory
2296 can be changed to a move instruction.
2297
2298 We call here when INSN is an insn to examine or store into a bit-field.
2299 BODY is the SET-rtx to be altered.
2300
2301 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2302 (Currently this is called only from function.c, and EQUIV_MEM
2303 is always 0.) */
2304
2305 static void
2306 optimize_bit_field (body, insn, equiv_mem)
2307 rtx body;
2308 rtx insn;
2309 rtx *equiv_mem;
2310 {
2311 register rtx bitfield;
2312 int destflag;
2313 rtx seq = 0;
2314 enum machine_mode mode;
2315
2316 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2317 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2318 bitfield = SET_DEST (body), destflag = 1;
2319 else
2320 bitfield = SET_SRC (body), destflag = 0;
2321
2322 /* First check that the field being stored has constant size and position
2323 and is in fact a byte or halfword suitably aligned. */
2324
2325 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2326 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2327 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2328 != BLKmode)
2329 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2330 {
2331 register rtx memref = 0;
2332
2333 /* Now check that the containing word is memory, not a register,
2334 and that it is safe to change the machine mode. */
2335
2336 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2337 memref = XEXP (bitfield, 0);
2338 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2339 && equiv_mem != 0)
2340 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2341 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2342 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2343 memref = SUBREG_REG (XEXP (bitfield, 0));
2344 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2345 && equiv_mem != 0
2346 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2347 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2348
2349 if (memref
2350 && ! mode_dependent_address_p (XEXP (memref, 0))
2351 && ! MEM_VOLATILE_P (memref))
2352 {
2353 /* Now adjust the address, first for any subreg'ing
2354 that we are now getting rid of,
2355 and then for which byte of the word is wanted. */
2356
2357 register int offset = INTVAL (XEXP (bitfield, 2));
2358 rtx insns;
2359
2360 /* Adjust OFFSET to count bits from low-address byte. */
2361 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2362 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2363 - offset - INTVAL (XEXP (bitfield, 1)));
2364
2365 /* Adjust OFFSET to count bytes from low-address byte. */
2366 offset /= BITS_PER_UNIT;
2367 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2368 {
2369 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2370 if (BYTES_BIG_ENDIAN)
2371 offset -= (MIN (UNITS_PER_WORD,
2372 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2373 - MIN (UNITS_PER_WORD,
2374 GET_MODE_SIZE (GET_MODE (memref))));
2375 }
2376
2377 start_sequence ();
2378 memref = change_address (memref, mode,
2379 plus_constant (XEXP (memref, 0), offset));
2380 insns = get_insns ();
2381 end_sequence ();
2382 emit_insns_before (insns, insn);
2383
2384 /* Store this memory reference where
2385 we found the bit field reference. */
2386
2387 if (destflag)
2388 {
2389 validate_change (insn, &SET_DEST (body), memref, 1);
2390 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2391 {
2392 rtx src = SET_SRC (body);
2393 while (GET_CODE (src) == SUBREG
2394 && SUBREG_WORD (src) == 0)
2395 src = SUBREG_REG (src);
2396 if (GET_MODE (src) != GET_MODE (memref))
2397 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2398 validate_change (insn, &SET_SRC (body), src, 1);
2399 }
2400 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2401 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2402 /* This shouldn't happen because anything that didn't have
2403 one of these modes should have got converted explicitly
2404 and then referenced through a subreg.
2405 This is so because the original bit-field was
2406 handled by agg_mode and so its tree structure had
2407 the same mode that memref now has. */
2408 abort ();
2409 }
2410 else
2411 {
2412 rtx dest = SET_DEST (body);
2413
2414 while (GET_CODE (dest) == SUBREG
2415 && SUBREG_WORD (dest) == 0
2416 && (GET_MODE_CLASS (GET_MODE (dest))
2417 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest)))))
2418 dest = SUBREG_REG (dest);
2419
2420 validate_change (insn, &SET_DEST (body), dest, 1);
2421
2422 if (GET_MODE (dest) == GET_MODE (memref))
2423 validate_change (insn, &SET_SRC (body), memref, 1);
2424 else
2425 {
2426 /* Convert the mem ref to the destination mode. */
2427 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2428
2429 start_sequence ();
2430 convert_move (newreg, memref,
2431 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2432 seq = get_insns ();
2433 end_sequence ();
2434
2435 validate_change (insn, &SET_SRC (body), newreg, 1);
2436 }
2437 }
2438
2439 /* See if we can convert this extraction or insertion into
2440 a simple move insn. We might not be able to do so if this
2441 was, for example, part of a PARALLEL.
2442
2443 If we succeed, write out any needed conversions. If we fail,
2444 it is hard to guess why we failed, so don't do anything
2445 special; just let the optimization be suppressed. */
2446
2447 if (apply_change_group () && seq)
2448 emit_insns_before (seq, insn);
2449 }
2450 }
2451 }
2452 \f
2453 /* These routines are responsible for converting virtual register references
2454 to the actual hard register references once RTL generation is complete.
2455
2456 The following four variables are used for communication between the
2457 routines. They contain the offsets of the virtual registers from their
2458 respective hard registers. */
2459
2460 static int in_arg_offset;
2461 static int var_offset;
2462 static int dynamic_offset;
2463 static int out_arg_offset;
2464
2465 /* In most machines, the stack pointer register is equivalent to the bottom
2466 of the stack. */
2467
2468 #ifndef STACK_POINTER_OFFSET
2469 #define STACK_POINTER_OFFSET 0
2470 #endif
2471
2472 /* If not defined, pick an appropriate default for the offset of dynamically
2473 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2474 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2475
2476 #ifndef STACK_DYNAMIC_OFFSET
2477
2478 #ifdef ACCUMULATE_OUTGOING_ARGS
2479 /* The bottom of the stack points to the actual arguments. If
2480 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2481 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2482 stack space for register parameters is not pushed by the caller, but
2483 rather part of the fixed stack areas and hence not included in
2484 `current_function_outgoing_args_size'. Nevertheless, we must allow
2485 for it when allocating stack dynamic objects. */
2486
2487 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2488 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2489 (current_function_outgoing_args_size \
2490 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2491
2492 #else
2493 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2494 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2495 #endif
2496
2497 #else
2498 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2499 #endif
2500 #endif
2501
2502 /* Pass through the INSNS of function FNDECL and convert virtual register
2503 references to hard register references. */
2504
2505 void
2506 instantiate_virtual_regs (fndecl, insns)
2507 tree fndecl;
2508 rtx insns;
2509 {
2510 rtx insn;
2511
2512 /* Compute the offsets to use for this function. */
2513 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
2514 var_offset = STARTING_FRAME_OFFSET;
2515 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
2516 out_arg_offset = STACK_POINTER_OFFSET;
2517
2518 /* Scan all variables and parameters of this function. For each that is
2519 in memory, instantiate all virtual registers if the result is a valid
2520 address. If not, we do it later. That will handle most uses of virtual
2521 regs on many machines. */
2522 instantiate_decls (fndecl, 1);
2523
2524 /* Initialize recognition, indicating that volatile is OK. */
2525 init_recog ();
2526
2527 /* Scan through all the insns, instantiating every virtual register still
2528 present. */
2529 for (insn = insns; insn; insn = NEXT_INSN (insn))
2530 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2531 || GET_CODE (insn) == CALL_INSN)
2532 {
2533 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
2534 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
2535 }
2536
2537 /* Now instantiate the remaining register equivalences for debugging info.
2538 These will not be valid addresses. */
2539 instantiate_decls (fndecl, 0);
2540
2541 /* Indicate that, from now on, assign_stack_local should use
2542 frame_pointer_rtx. */
2543 virtuals_instantiated = 1;
2544 }
2545
2546 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
2547 all virtual registers in their DECL_RTL's.
2548
2549 If VALID_ONLY, do this only if the resulting address is still valid.
2550 Otherwise, always do it. */
2551
2552 static void
2553 instantiate_decls (fndecl, valid_only)
2554 tree fndecl;
2555 int valid_only;
2556 {
2557 tree decl;
2558
2559 if (DECL_SAVED_INSNS (fndecl))
2560 /* When compiling an inline function, the obstack used for
2561 rtl allocation is the maybepermanent_obstack. Calling
2562 `resume_temporary_allocation' switches us back to that
2563 obstack while we process this function's parameters. */
2564 resume_temporary_allocation ();
2565
2566 /* Process all parameters of the function. */
2567 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2568 {
2569 instantiate_decl (DECL_RTL (decl), int_size_in_bytes (TREE_TYPE (decl)),
2570 valid_only);
2571 instantiate_decl (DECL_INCOMING_RTL (decl),
2572 int_size_in_bytes (TREE_TYPE (decl)), valid_only);
2573 }
2574
2575 /* Now process all variables defined in the function or its subblocks. */
2576 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
2577
2578 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
2579 {
2580 /* Save all rtl allocated for this function by raising the
2581 high-water mark on the maybepermanent_obstack. */
2582 preserve_data ();
2583 /* All further rtl allocation is now done in the current_obstack. */
2584 rtl_in_current_obstack ();
2585 }
2586 }
2587
2588 /* Subroutine of instantiate_decls: Process all decls in the given
2589 BLOCK node and all its subblocks. */
2590
2591 static void
2592 instantiate_decls_1 (let, valid_only)
2593 tree let;
2594 int valid_only;
2595 {
2596 tree t;
2597
2598 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2599 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
2600 valid_only);
2601
2602 /* Process all subblocks. */
2603 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2604 instantiate_decls_1 (t, valid_only);
2605 }
2606
2607 /* Subroutine of the preceding procedures: Given RTL representing a
2608 decl and the size of the object, do any instantiation required.
2609
2610 If VALID_ONLY is non-zero, it means that the RTL should only be
2611 changed if the new address is valid. */
2612
2613 static void
2614 instantiate_decl (x, size, valid_only)
2615 rtx x;
2616 int size;
2617 int valid_only;
2618 {
2619 enum machine_mode mode;
2620 rtx addr;
2621
2622 /* If this is not a MEM, no need to do anything. Similarly if the
2623 address is a constant or a register that is not a virtual register. */
2624
2625 if (x == 0 || GET_CODE (x) != MEM)
2626 return;
2627
2628 addr = XEXP (x, 0);
2629 if (CONSTANT_P (addr)
2630 || (GET_CODE (addr) == REG
2631 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
2632 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
2633 return;
2634
2635 /* If we should only do this if the address is valid, copy the address.
2636 We need to do this so we can undo any changes that might make the
2637 address invalid. This copy is unfortunate, but probably can't be
2638 avoided. */
2639
2640 if (valid_only)
2641 addr = copy_rtx (addr);
2642
2643 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
2644
2645 if (! valid_only)
2646 return;
2647
2648 /* Now verify that the resulting address is valid for every integer or
2649 floating-point mode up to and including SIZE bytes long. We do this
2650 since the object might be accessed in any mode and frame addresses
2651 are shared. */
2652
2653 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2654 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2655 mode = GET_MODE_WIDER_MODE (mode))
2656 if (! memory_address_p (mode, addr))
2657 return;
2658
2659 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
2660 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2661 mode = GET_MODE_WIDER_MODE (mode))
2662 if (! memory_address_p (mode, addr))
2663 return;
2664
2665 /* Otherwise, put back the address, now that we have updated it and we
2666 know it is valid. */
2667
2668 XEXP (x, 0) = addr;
2669 }
2670 \f
2671 /* Given a pointer to a piece of rtx and an optional pointer to the
2672 containing object, instantiate any virtual registers present in it.
2673
2674 If EXTRA_INSNS, we always do the replacement and generate
2675 any extra insns before OBJECT. If it zero, we do nothing if replacement
2676 is not valid.
2677
2678 Return 1 if we either had nothing to do or if we were able to do the
2679 needed replacement. Return 0 otherwise; we only return zero if
2680 EXTRA_INSNS is zero.
2681
2682 We first try some simple transformations to avoid the creation of extra
2683 pseudos. */
2684
2685 static int
2686 instantiate_virtual_regs_1 (loc, object, extra_insns)
2687 rtx *loc;
2688 rtx object;
2689 int extra_insns;
2690 {
2691 rtx x;
2692 RTX_CODE code;
2693 rtx new = 0;
2694 int offset;
2695 rtx temp;
2696 rtx seq;
2697 int i, j;
2698 char *fmt;
2699
2700 /* Re-start here to avoid recursion in common cases. */
2701 restart:
2702
2703 x = *loc;
2704 if (x == 0)
2705 return 1;
2706
2707 code = GET_CODE (x);
2708
2709 /* Check for some special cases. */
2710 switch (code)
2711 {
2712 case CONST_INT:
2713 case CONST_DOUBLE:
2714 case CONST:
2715 case SYMBOL_REF:
2716 case CODE_LABEL:
2717 case PC:
2718 case CC0:
2719 case ASM_INPUT:
2720 case ADDR_VEC:
2721 case ADDR_DIFF_VEC:
2722 case RETURN:
2723 return 1;
2724
2725 case SET:
2726 /* We are allowed to set the virtual registers. This means that
2727 that the actual register should receive the source minus the
2728 appropriate offset. This is used, for example, in the handling
2729 of non-local gotos. */
2730 if (SET_DEST (x) == virtual_incoming_args_rtx)
2731 new = arg_pointer_rtx, offset = - in_arg_offset;
2732 else if (SET_DEST (x) == virtual_stack_vars_rtx)
2733 new = frame_pointer_rtx, offset = - var_offset;
2734 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
2735 new = stack_pointer_rtx, offset = - dynamic_offset;
2736 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
2737 new = stack_pointer_rtx, offset = - out_arg_offset;
2738
2739 if (new)
2740 {
2741 /* The only valid sources here are PLUS or REG. Just do
2742 the simplest possible thing to handle them. */
2743 if (GET_CODE (SET_SRC (x)) != REG
2744 && GET_CODE (SET_SRC (x)) != PLUS)
2745 abort ();
2746
2747 start_sequence ();
2748 if (GET_CODE (SET_SRC (x)) != REG)
2749 temp = force_operand (SET_SRC (x), NULL_RTX);
2750 else
2751 temp = SET_SRC (x);
2752 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
2753 seq = get_insns ();
2754 end_sequence ();
2755
2756 emit_insns_before (seq, object);
2757 SET_DEST (x) = new;
2758
2759 if (!validate_change (object, &SET_SRC (x), temp, 0)
2760 || ! extra_insns)
2761 abort ();
2762
2763 return 1;
2764 }
2765
2766 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
2767 loc = &SET_SRC (x);
2768 goto restart;
2769
2770 case PLUS:
2771 /* Handle special case of virtual register plus constant. */
2772 if (CONSTANT_P (XEXP (x, 1)))
2773 {
2774 rtx old, new_offset;
2775
2776 /* Check for (plus (plus VIRT foo) (const_int)) first. */
2777 if (GET_CODE (XEXP (x, 0)) == PLUS)
2778 {
2779 rtx inner = XEXP (XEXP (x, 0), 0);
2780
2781 if (inner == virtual_incoming_args_rtx)
2782 new = arg_pointer_rtx, offset = in_arg_offset;
2783 else if (inner == virtual_stack_vars_rtx)
2784 new = frame_pointer_rtx, offset = var_offset;
2785 else if (inner == virtual_stack_dynamic_rtx)
2786 new = stack_pointer_rtx, offset = dynamic_offset;
2787 else if (inner == virtual_outgoing_args_rtx)
2788 new = stack_pointer_rtx, offset = out_arg_offset;
2789 else
2790 {
2791 loc = &XEXP (x, 0);
2792 goto restart;
2793 }
2794
2795 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
2796 extra_insns);
2797 new = gen_rtx (PLUS, Pmode, new, XEXP (XEXP (x, 0), 1));
2798 }
2799
2800 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
2801 new = arg_pointer_rtx, offset = in_arg_offset;
2802 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
2803 new = frame_pointer_rtx, offset = var_offset;
2804 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
2805 new = stack_pointer_rtx, offset = dynamic_offset;
2806 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
2807 new = stack_pointer_rtx, offset = out_arg_offset;
2808 else
2809 {
2810 /* We know the second operand is a constant. Unless the
2811 first operand is a REG (which has been already checked),
2812 it needs to be checked. */
2813 if (GET_CODE (XEXP (x, 0)) != REG)
2814 {
2815 loc = &XEXP (x, 0);
2816 goto restart;
2817 }
2818 return 1;
2819 }
2820
2821 new_offset = plus_constant (XEXP (x, 1), offset);
2822
2823 /* If the new constant is zero, try to replace the sum with just
2824 the register. */
2825 if (new_offset == const0_rtx
2826 && validate_change (object, loc, new, 0))
2827 return 1;
2828
2829 /* Next try to replace the register and new offset.
2830 There are two changes to validate here and we can't assume that
2831 in the case of old offset equals new just changing the register
2832 will yield a valid insn. In the interests of a little efficiency,
2833 however, we only call validate change once (we don't queue up the
2834 changes and then call apply_change_group). */
2835
2836 old = XEXP (x, 0);
2837 if (offset == 0
2838 ? ! validate_change (object, &XEXP (x, 0), new, 0)
2839 : (XEXP (x, 0) = new,
2840 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
2841 {
2842 if (! extra_insns)
2843 {
2844 XEXP (x, 0) = old;
2845 return 0;
2846 }
2847
2848 /* Otherwise copy the new constant into a register and replace
2849 constant with that register. */
2850 temp = gen_reg_rtx (Pmode);
2851 XEXP (x, 0) = new;
2852 if (validate_change (object, &XEXP (x, 1), temp, 0))
2853 emit_insn_before (gen_move_insn (temp, new_offset), object);
2854 else
2855 {
2856 /* If that didn't work, replace this expression with a
2857 register containing the sum. */
2858
2859 XEXP (x, 0) = old;
2860 new = gen_rtx (PLUS, Pmode, new, new_offset);
2861
2862 start_sequence ();
2863 temp = force_operand (new, NULL_RTX);
2864 seq = get_insns ();
2865 end_sequence ();
2866
2867 emit_insns_before (seq, object);
2868 if (! validate_change (object, loc, temp, 0)
2869 && ! validate_replace_rtx (x, temp, object))
2870 abort ();
2871 }
2872 }
2873
2874 return 1;
2875 }
2876
2877 /* Fall through to generic two-operand expression case. */
2878 case EXPR_LIST:
2879 case CALL:
2880 case COMPARE:
2881 case MINUS:
2882 case MULT:
2883 case DIV: case UDIV:
2884 case MOD: case UMOD:
2885 case AND: case IOR: case XOR:
2886 case ROTATERT: case ROTATE:
2887 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2888 case NE: case EQ:
2889 case GE: case GT: case GEU: case GTU:
2890 case LE: case LT: case LEU: case LTU:
2891 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
2892 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
2893 loc = &XEXP (x, 0);
2894 goto restart;
2895
2896 case MEM:
2897 /* Most cases of MEM that convert to valid addresses have already been
2898 handled by our scan of regno_reg_rtx. The only special handling we
2899 need here is to make a copy of the rtx to ensure it isn't being
2900 shared if we have to change it to a pseudo.
2901
2902 If the rtx is a simple reference to an address via a virtual register,
2903 it can potentially be shared. In such cases, first try to make it
2904 a valid address, which can also be shared. Otherwise, copy it and
2905 proceed normally.
2906
2907 First check for common cases that need no processing. These are
2908 usually due to instantiation already being done on a previous instance
2909 of a shared rtx. */
2910
2911 temp = XEXP (x, 0);
2912 if (CONSTANT_ADDRESS_P (temp)
2913 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2914 || temp == arg_pointer_rtx
2915 #endif
2916 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2917 || temp == hard_frame_pointer_rtx
2918 #endif
2919 || temp == frame_pointer_rtx)
2920 return 1;
2921
2922 if (GET_CODE (temp) == PLUS
2923 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2924 && (XEXP (temp, 0) == frame_pointer_rtx
2925 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2926 || XEXP (temp, 0) == hard_frame_pointer_rtx
2927 #endif
2928 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2929 || XEXP (temp, 0) == arg_pointer_rtx
2930 #endif
2931 ))
2932 return 1;
2933
2934 if (temp == virtual_stack_vars_rtx
2935 || temp == virtual_incoming_args_rtx
2936 || (GET_CODE (temp) == PLUS
2937 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2938 && (XEXP (temp, 0) == virtual_stack_vars_rtx
2939 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
2940 {
2941 /* This MEM may be shared. If the substitution can be done without
2942 the need to generate new pseudos, we want to do it in place
2943 so all copies of the shared rtx benefit. The call below will
2944 only make substitutions if the resulting address is still
2945 valid.
2946
2947 Note that we cannot pass X as the object in the recursive call
2948 since the insn being processed may not allow all valid
2949 addresses. However, if we were not passed on object, we can
2950 only modify X without copying it if X will have a valid
2951 address.
2952
2953 ??? Also note that this can still lose if OBJECT is an insn that
2954 has less restrictions on an address that some other insn.
2955 In that case, we will modify the shared address. This case
2956 doesn't seem very likely, though. */
2957
2958 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
2959 object ? object : x, 0))
2960 return 1;
2961
2962 /* Otherwise make a copy and process that copy. We copy the entire
2963 RTL expression since it might be a PLUS which could also be
2964 shared. */
2965 *loc = x = copy_rtx (x);
2966 }
2967
2968 /* Fall through to generic unary operation case. */
2969 case USE:
2970 case CLOBBER:
2971 case SUBREG:
2972 case STRICT_LOW_PART:
2973 case NEG: case NOT:
2974 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
2975 case SIGN_EXTEND: case ZERO_EXTEND:
2976 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2977 case FLOAT: case FIX:
2978 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2979 case ABS:
2980 case SQRT:
2981 case FFS:
2982 /* These case either have just one operand or we know that we need not
2983 check the rest of the operands. */
2984 loc = &XEXP (x, 0);
2985 goto restart;
2986
2987 case REG:
2988 /* Try to replace with a PLUS. If that doesn't work, compute the sum
2989 in front of this insn and substitute the temporary. */
2990 if (x == virtual_incoming_args_rtx)
2991 new = arg_pointer_rtx, offset = in_arg_offset;
2992 else if (x == virtual_stack_vars_rtx)
2993 new = frame_pointer_rtx, offset = var_offset;
2994 else if (x == virtual_stack_dynamic_rtx)
2995 new = stack_pointer_rtx, offset = dynamic_offset;
2996 else if (x == virtual_outgoing_args_rtx)
2997 new = stack_pointer_rtx, offset = out_arg_offset;
2998
2999 if (new)
3000 {
3001 temp = plus_constant (new, offset);
3002 if (!validate_change (object, loc, temp, 0))
3003 {
3004 if (! extra_insns)
3005 return 0;
3006
3007 start_sequence ();
3008 temp = force_operand (temp, NULL_RTX);
3009 seq = get_insns ();
3010 end_sequence ();
3011
3012 emit_insns_before (seq, object);
3013 if (! validate_change (object, loc, temp, 0)
3014 && ! validate_replace_rtx (x, temp, object))
3015 abort ();
3016 }
3017 }
3018
3019 return 1;
3020 }
3021
3022 /* Scan all subexpressions. */
3023 fmt = GET_RTX_FORMAT (code);
3024 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3025 if (*fmt == 'e')
3026 {
3027 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3028 return 0;
3029 }
3030 else if (*fmt == 'E')
3031 for (j = 0; j < XVECLEN (x, i); j++)
3032 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3033 extra_insns))
3034 return 0;
3035
3036 return 1;
3037 }
3038 \f
3039 /* Optimization: assuming this function does not receive nonlocal gotos,
3040 delete the handlers for such, as well as the insns to establish
3041 and disestablish them. */
3042
3043 static void
3044 delete_handlers ()
3045 {
3046 rtx insn;
3047 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3048 {
3049 /* Delete the handler by turning off the flag that would
3050 prevent jump_optimize from deleting it.
3051 Also permit deletion of the nonlocal labels themselves
3052 if nothing local refers to them. */
3053 if (GET_CODE (insn) == CODE_LABEL)
3054 {
3055 tree t, last_t;
3056
3057 LABEL_PRESERVE_P (insn) = 0;
3058
3059 /* Remove it from the nonlocal_label list, to avoid confusing
3060 flow. */
3061 for (t = nonlocal_labels, last_t = 0; t;
3062 last_t = t, t = TREE_CHAIN (t))
3063 if (DECL_RTL (TREE_VALUE (t)) == insn)
3064 break;
3065 if (t)
3066 {
3067 if (! last_t)
3068 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3069 else
3070 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3071 }
3072 }
3073 if (GET_CODE (insn) == INSN
3074 && ((nonlocal_goto_handler_slot != 0
3075 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
3076 || (nonlocal_goto_stack_level != 0
3077 && reg_mentioned_p (nonlocal_goto_stack_level,
3078 PATTERN (insn)))))
3079 delete_insn (insn);
3080 }
3081 }
3082
3083 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3084 of the current function. */
3085
3086 rtx
3087 nonlocal_label_rtx_list ()
3088 {
3089 tree t;
3090 rtx x = 0;
3091
3092 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
3093 x = gen_rtx (EXPR_LIST, VOIDmode, label_rtx (TREE_VALUE (t)), x);
3094
3095 return x;
3096 }
3097 \f
3098 /* Output a USE for any register use in RTL.
3099 This is used with -noreg to mark the extent of lifespan
3100 of any registers used in a user-visible variable's DECL_RTL. */
3101
3102 void
3103 use_variable (rtl)
3104 rtx rtl;
3105 {
3106 if (GET_CODE (rtl) == REG)
3107 /* This is a register variable. */
3108 emit_insn (gen_rtx (USE, VOIDmode, rtl));
3109 else if (GET_CODE (rtl) == MEM
3110 && GET_CODE (XEXP (rtl, 0)) == REG
3111 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3112 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3113 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3114 /* This is a variable-sized structure. */
3115 emit_insn (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)));
3116 }
3117
3118 /* Like use_variable except that it outputs the USEs after INSN
3119 instead of at the end of the insn-chain. */
3120
3121 void
3122 use_variable_after (rtl, insn)
3123 rtx rtl, insn;
3124 {
3125 if (GET_CODE (rtl) == REG)
3126 /* This is a register variable. */
3127 emit_insn_after (gen_rtx (USE, VOIDmode, rtl), insn);
3128 else if (GET_CODE (rtl) == MEM
3129 && GET_CODE (XEXP (rtl, 0)) == REG
3130 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3131 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3132 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3133 /* This is a variable-sized structure. */
3134 emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)), insn);
3135 }
3136 \f
3137 int
3138 max_parm_reg_num ()
3139 {
3140 return max_parm_reg;
3141 }
3142
3143 /* Return the first insn following those generated by `assign_parms'. */
3144
3145 rtx
3146 get_first_nonparm_insn ()
3147 {
3148 if (last_parm_insn)
3149 return NEXT_INSN (last_parm_insn);
3150 return get_insns ();
3151 }
3152
3153 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3154 Crash if there is none. */
3155
3156 rtx
3157 get_first_block_beg ()
3158 {
3159 register rtx searcher;
3160 register rtx insn = get_first_nonparm_insn ();
3161
3162 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3163 if (GET_CODE (searcher) == NOTE
3164 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3165 return searcher;
3166
3167 abort (); /* Invalid call to this function. (See comments above.) */
3168 return NULL_RTX;
3169 }
3170
3171 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3172 This means a type for which function calls must pass an address to the
3173 function or get an address back from the function.
3174 EXP may be a type node or an expression (whose type is tested). */
3175
3176 int
3177 aggregate_value_p (exp)
3178 tree exp;
3179 {
3180 int i, regno, nregs;
3181 rtx reg;
3182 tree type;
3183 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3184 type = exp;
3185 else
3186 type = TREE_TYPE (exp);
3187
3188 if (RETURN_IN_MEMORY (type))
3189 return 1;
3190 /* Types that are TREE_ADDRESSABLE must be contructed in memory,
3191 and thus can't be returned in registers. */
3192 if (TREE_ADDRESSABLE (type))
3193 return 1;
3194 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3195 return 1;
3196 /* Make sure we have suitable call-clobbered regs to return
3197 the value in; if not, we must return it in memory. */
3198 reg = hard_function_value (type, 0);
3199 regno = REGNO (reg);
3200 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3201 for (i = 0; i < nregs; i++)
3202 if (! call_used_regs[regno + i])
3203 return 1;
3204 return 0;
3205 }
3206 \f
3207 /* Assign RTL expressions to the function's parameters.
3208 This may involve copying them into registers and using
3209 those registers as the RTL for them.
3210
3211 If SECOND_TIME is non-zero it means that this function is being
3212 called a second time. This is done by integrate.c when a function's
3213 compilation is deferred. We need to come back here in case the
3214 FUNCTION_ARG macro computes items needed for the rest of the compilation
3215 (such as changing which registers are fixed or caller-saved). But suppress
3216 writing any insns or setting DECL_RTL of anything in this case. */
3217
3218 void
3219 assign_parms (fndecl, second_time)
3220 tree fndecl;
3221 int second_time;
3222 {
3223 register tree parm;
3224 register rtx entry_parm = 0;
3225 register rtx stack_parm = 0;
3226 CUMULATIVE_ARGS args_so_far;
3227 enum machine_mode promoted_mode, passed_mode;
3228 enum machine_mode nominal_mode, promoted_nominal_mode;
3229 int unsignedp;
3230 /* Total space needed so far for args on the stack,
3231 given as a constant and a tree-expression. */
3232 struct args_size stack_args_size;
3233 tree fntype = TREE_TYPE (fndecl);
3234 tree fnargs = DECL_ARGUMENTS (fndecl);
3235 /* This is used for the arg pointer when referring to stack args. */
3236 rtx internal_arg_pointer;
3237 /* This is a dummy PARM_DECL that we used for the function result if
3238 the function returns a structure. */
3239 tree function_result_decl = 0;
3240 int nparmregs = list_length (fnargs) + LAST_VIRTUAL_REGISTER + 1;
3241 int varargs_setup = 0;
3242 rtx conversion_insns = 0;
3243
3244 /* Nonzero if the last arg is named `__builtin_va_alist',
3245 which is used on some machines for old-fashioned non-ANSI varargs.h;
3246 this should be stuck onto the stack as if it had arrived there. */
3247 int hide_last_arg
3248 = (current_function_varargs
3249 && fnargs
3250 && (parm = tree_last (fnargs)) != 0
3251 && DECL_NAME (parm)
3252 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3253 "__builtin_va_alist")));
3254
3255 /* Nonzero if function takes extra anonymous args.
3256 This means the last named arg must be on the stack
3257 right before the anonymous ones. */
3258 int stdarg
3259 = (TYPE_ARG_TYPES (fntype) != 0
3260 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3261 != void_type_node));
3262
3263 current_function_stdarg = stdarg;
3264
3265 /* If the reg that the virtual arg pointer will be translated into is
3266 not a fixed reg or is the stack pointer, make a copy of the virtual
3267 arg pointer, and address parms via the copy. The frame pointer is
3268 considered fixed even though it is not marked as such.
3269
3270 The second time through, simply use ap to avoid generating rtx. */
3271
3272 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3273 || ! (fixed_regs[ARG_POINTER_REGNUM]
3274 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3275 && ! second_time)
3276 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3277 else
3278 internal_arg_pointer = virtual_incoming_args_rtx;
3279 current_function_internal_arg_pointer = internal_arg_pointer;
3280
3281 stack_args_size.constant = 0;
3282 stack_args_size.var = 0;
3283
3284 /* If struct value address is treated as the first argument, make it so. */
3285 if (aggregate_value_p (DECL_RESULT (fndecl))
3286 && ! current_function_returns_pcc_struct
3287 && struct_value_incoming_rtx == 0)
3288 {
3289 tree type = build_pointer_type (TREE_TYPE (fntype));
3290
3291 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
3292
3293 DECL_ARG_TYPE (function_result_decl) = type;
3294 TREE_CHAIN (function_result_decl) = fnargs;
3295 fnargs = function_result_decl;
3296 }
3297
3298 parm_reg_stack_loc = (rtx *) oballoc (nparmregs * sizeof (rtx));
3299 bzero ((char *) parm_reg_stack_loc, nparmregs * sizeof (rtx));
3300
3301 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3302 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
3303 #else
3304 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
3305 #endif
3306
3307 /* We haven't yet found an argument that we must push and pretend the
3308 caller did. */
3309 current_function_pretend_args_size = 0;
3310
3311 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3312 {
3313 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
3314 struct args_size stack_offset;
3315 struct args_size arg_size;
3316 int passed_pointer = 0;
3317 int did_conversion = 0;
3318 tree passed_type = DECL_ARG_TYPE (parm);
3319 tree nominal_type = TREE_TYPE (parm);
3320
3321 /* Set LAST_NAMED if this is last named arg before some
3322 anonymous args. We treat it as if it were anonymous too. */
3323 int last_named = ((TREE_CHAIN (parm) == 0
3324 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3325 && (stdarg || current_function_varargs));
3326
3327 if (TREE_TYPE (parm) == error_mark_node
3328 /* This can happen after weird syntax errors
3329 or if an enum type is defined among the parms. */
3330 || TREE_CODE (parm) != PARM_DECL
3331 || passed_type == NULL)
3332 {
3333 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = gen_rtx (MEM, BLKmode,
3334 const0_rtx);
3335 TREE_USED (parm) = 1;
3336 continue;
3337 }
3338
3339 /* For varargs.h function, save info about regs and stack space
3340 used by the individual args, not including the va_alist arg. */
3341 if (hide_last_arg && last_named)
3342 current_function_args_info = args_so_far;
3343
3344 /* Find mode of arg as it is passed, and mode of arg
3345 as it should be during execution of this function. */
3346 passed_mode = TYPE_MODE (passed_type);
3347 nominal_mode = TYPE_MODE (nominal_type);
3348
3349 /* If the parm's mode is VOID, its value doesn't matter,
3350 and avoid the usual things like emit_move_insn that could crash. */
3351 if (nominal_mode == VOIDmode)
3352 {
3353 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
3354 continue;
3355 }
3356
3357 /* If the parm is to be passed as a transparent union, use the
3358 type of the first field for the tests below. We have already
3359 verified that the modes are the same. */
3360 if (DECL_TRANSPARENT_UNION (parm)
3361 || TYPE_TRANSPARENT_UNION (passed_type))
3362 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
3363
3364 /* See if this arg was passed by invisible reference. It is if
3365 it is an object whose size depends on the contents of the
3366 object itself or if the machine requires these objects be passed
3367 that way. */
3368
3369 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
3370 && contains_placeholder_p (TYPE_SIZE (passed_type)))
3371 || TREE_ADDRESSABLE (passed_type)
3372 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3373 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
3374 passed_type, ! last_named)
3375 #endif
3376 )
3377 {
3378 passed_type = nominal_type = build_pointer_type (passed_type);
3379 passed_pointer = 1;
3380 passed_mode = nominal_mode = Pmode;
3381 }
3382
3383 promoted_mode = passed_mode;
3384
3385 #ifdef PROMOTE_FUNCTION_ARGS
3386 /* Compute the mode in which the arg is actually extended to. */
3387 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
3388 #endif
3389
3390 /* Let machine desc say which reg (if any) the parm arrives in.
3391 0 means it arrives on the stack. */
3392 #ifdef FUNCTION_INCOMING_ARG
3393 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3394 passed_type, ! last_named);
3395 #else
3396 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
3397 passed_type, ! last_named);
3398 #endif
3399
3400 if (entry_parm == 0)
3401 promoted_mode = passed_mode;
3402
3403 #ifdef SETUP_INCOMING_VARARGS
3404 /* If this is the last named parameter, do any required setup for
3405 varargs or stdargs. We need to know about the case of this being an
3406 addressable type, in which case we skip the registers it
3407 would have arrived in.
3408
3409 For stdargs, LAST_NAMED will be set for two parameters, the one that
3410 is actually the last named, and the dummy parameter. We only
3411 want to do this action once.
3412
3413 Also, indicate when RTL generation is to be suppressed. */
3414 if (last_named && !varargs_setup)
3415 {
3416 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
3417 current_function_pretend_args_size,
3418 second_time);
3419 varargs_setup = 1;
3420 }
3421 #endif
3422
3423 /* Determine parm's home in the stack,
3424 in case it arrives in the stack or we should pretend it did.
3425
3426 Compute the stack position and rtx where the argument arrives
3427 and its size.
3428
3429 There is one complexity here: If this was a parameter that would
3430 have been passed in registers, but wasn't only because it is
3431 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
3432 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
3433 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
3434 0 as it was the previous time. */
3435
3436 locate_and_pad_parm (promoted_mode, passed_type,
3437 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3438 1,
3439 #else
3440 #ifdef FUNCTION_INCOMING_ARG
3441 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3442 passed_type,
3443 (! last_named
3444 || varargs_setup)) != 0,
3445 #else
3446 FUNCTION_ARG (args_so_far, promoted_mode,
3447 passed_type,
3448 ! last_named || varargs_setup) != 0,
3449 #endif
3450 #endif
3451 fndecl, &stack_args_size, &stack_offset, &arg_size);
3452
3453 if (! second_time)
3454 {
3455 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
3456
3457 if (offset_rtx == const0_rtx)
3458 stack_parm = gen_rtx (MEM, promoted_mode, internal_arg_pointer);
3459 else
3460 stack_parm = gen_rtx (MEM, promoted_mode,
3461 gen_rtx (PLUS, Pmode,
3462 internal_arg_pointer, offset_rtx));
3463
3464 /* If this is a memory ref that contains aggregate components,
3465 mark it as such for cse and loop optimize. Likewise if it
3466 is readonly. */
3467 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3468 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
3469 }
3470
3471 /* If this parameter was passed both in registers and in the stack,
3472 use the copy on the stack. */
3473 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
3474 entry_parm = 0;
3475
3476 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3477 /* If this parm was passed part in regs and part in memory,
3478 pretend it arrived entirely in memory
3479 by pushing the register-part onto the stack.
3480
3481 In the special case of a DImode or DFmode that is split,
3482 we could put it together in a pseudoreg directly,
3483 but for now that's not worth bothering with. */
3484
3485 if (entry_parm)
3486 {
3487 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
3488 passed_type, ! last_named);
3489
3490 if (nregs > 0)
3491 {
3492 current_function_pretend_args_size
3493 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
3494 / (PARM_BOUNDARY / BITS_PER_UNIT)
3495 * (PARM_BOUNDARY / BITS_PER_UNIT));
3496
3497 if (! second_time)
3498 {
3499 /* Handle calls that pass values in multiple non-contiguous
3500 locations. The Irix 6 ABI has examples of this. */
3501 if (GET_CODE (entry_parm) == PARALLEL)
3502 emit_group_store (validize_mem (stack_parm),
3503 entry_parm);
3504 else
3505 move_block_from_reg (REGNO (entry_parm),
3506 validize_mem (stack_parm), nregs,
3507 int_size_in_bytes (TREE_TYPE (parm)));
3508 }
3509 entry_parm = stack_parm;
3510 }
3511 }
3512 #endif
3513
3514 /* If we didn't decide this parm came in a register,
3515 by default it came on the stack. */
3516 if (entry_parm == 0)
3517 entry_parm = stack_parm;
3518
3519 /* Record permanently how this parm was passed. */
3520 if (! second_time)
3521 DECL_INCOMING_RTL (parm) = entry_parm;
3522
3523 /* If there is actually space on the stack for this parm,
3524 count it in stack_args_size; otherwise set stack_parm to 0
3525 to indicate there is no preallocated stack slot for the parm. */
3526
3527 if (entry_parm == stack_parm
3528 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
3529 /* On some machines, even if a parm value arrives in a register
3530 there is still an (uninitialized) stack slot allocated for it.
3531
3532 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
3533 whether this parameter already has a stack slot allocated,
3534 because an arg block exists only if current_function_args_size
3535 is larger than some threshold, and we haven't calculated that
3536 yet. So, for now, we just assume that stack slots never exist
3537 in this case. */
3538 || REG_PARM_STACK_SPACE (fndecl) > 0
3539 #endif
3540 )
3541 {
3542 stack_args_size.constant += arg_size.constant;
3543 if (arg_size.var)
3544 ADD_PARM_SIZE (stack_args_size, arg_size.var);
3545 }
3546 else
3547 /* No stack slot was pushed for this parm. */
3548 stack_parm = 0;
3549
3550 /* Update info on where next arg arrives in registers. */
3551
3552 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
3553 passed_type, ! last_named);
3554
3555 /* If this is our second time through, we are done with this parm. */
3556 if (second_time)
3557 continue;
3558
3559 /* If we can't trust the parm stack slot to be aligned enough
3560 for its ultimate type, don't use that slot after entry.
3561 We'll make another stack slot, if we need one. */
3562 {
3563 int thisparm_boundary
3564 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
3565
3566 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
3567 stack_parm = 0;
3568 }
3569
3570 /* If parm was passed in memory, and we need to convert it on entry,
3571 don't store it back in that same slot. */
3572 if (entry_parm != 0
3573 && nominal_mode != BLKmode && nominal_mode != passed_mode)
3574 stack_parm = 0;
3575
3576 #if 0
3577 /* Now adjust STACK_PARM to the mode and precise location
3578 where this parameter should live during execution,
3579 if we discover that it must live in the stack during execution.
3580 To make debuggers happier on big-endian machines, we store
3581 the value in the last bytes of the space available. */
3582
3583 if (nominal_mode != BLKmode && nominal_mode != passed_mode
3584 && stack_parm != 0)
3585 {
3586 rtx offset_rtx;
3587
3588 if (BYTES_BIG_ENDIAN
3589 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
3590 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
3591 - GET_MODE_SIZE (nominal_mode));
3592
3593 offset_rtx = ARGS_SIZE_RTX (stack_offset);
3594 if (offset_rtx == const0_rtx)
3595 stack_parm = gen_rtx (MEM, nominal_mode, internal_arg_pointer);
3596 else
3597 stack_parm = gen_rtx (MEM, nominal_mode,
3598 gen_rtx (PLUS, Pmode,
3599 internal_arg_pointer, offset_rtx));
3600
3601 /* If this is a memory ref that contains aggregate components,
3602 mark it as such for cse and loop optimize. */
3603 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3604 }
3605 #endif /* 0 */
3606
3607 #ifdef STACK_REGS
3608 /* We need this "use" info, because the gcc-register->stack-register
3609 converter in reg-stack.c needs to know which registers are active
3610 at the start of the function call. The actual parameter loading
3611 instructions are not always available then anymore, since they might
3612 have been optimised away. */
3613
3614 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
3615 emit_insn (gen_rtx (USE, GET_MODE (entry_parm), entry_parm));
3616 #endif
3617
3618 /* ENTRY_PARM is an RTX for the parameter as it arrives,
3619 in the mode in which it arrives.
3620 STACK_PARM is an RTX for a stack slot where the parameter can live
3621 during the function (in case we want to put it there).
3622 STACK_PARM is 0 if no stack slot was pushed for it.
3623
3624 Now output code if necessary to convert ENTRY_PARM to
3625 the type in which this function declares it,
3626 and store that result in an appropriate place,
3627 which may be a pseudo reg, may be STACK_PARM,
3628 or may be a local stack slot if STACK_PARM is 0.
3629
3630 Set DECL_RTL to that place. */
3631
3632 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
3633 {
3634 /* If a BLKmode arrives in registers, copy it to a stack slot.
3635 Handle calls that pass values in multiple non-contiguous
3636 locations. The Irix 6 ABI has examples of this. */
3637 if (GET_CODE (entry_parm) == REG
3638 || GET_CODE (entry_parm) == PARALLEL)
3639 {
3640 int size_stored
3641 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
3642 UNITS_PER_WORD);
3643
3644 /* Note that we will be storing an integral number of words.
3645 So we have to be careful to ensure that we allocate an
3646 integral number of words. We do this below in the
3647 assign_stack_local if space was not allocated in the argument
3648 list. If it was, this will not work if PARM_BOUNDARY is not
3649 a multiple of BITS_PER_WORD. It isn't clear how to fix this
3650 if it becomes a problem. */
3651
3652 if (stack_parm == 0)
3653 {
3654 stack_parm
3655 = assign_stack_local (GET_MODE (entry_parm),
3656 size_stored, 0);
3657
3658 /* If this is a memory ref that contains aggregate
3659 components, mark it as such for cse and loop optimize. */
3660 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3661 }
3662
3663 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
3664 abort ();
3665
3666 if (TREE_READONLY (parm))
3667 RTX_UNCHANGING_P (stack_parm) = 1;
3668
3669 /* Handle calls that pass values in multiple non-contiguous
3670 locations. The Irix 6 ABI has examples of this. */
3671 if (GET_CODE (entry_parm) == PARALLEL)
3672 emit_group_store (validize_mem (stack_parm), entry_parm);
3673 else
3674 move_block_from_reg (REGNO (entry_parm),
3675 validize_mem (stack_parm),
3676 size_stored / UNITS_PER_WORD,
3677 int_size_in_bytes (TREE_TYPE (parm)));
3678 }
3679 DECL_RTL (parm) = stack_parm;
3680 }
3681 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
3682 && ! DECL_INLINE (fndecl))
3683 /* layout_decl may set this. */
3684 || TREE_ADDRESSABLE (parm)
3685 || TREE_SIDE_EFFECTS (parm)
3686 /* If -ffloat-store specified, don't put explicit
3687 float variables into registers. */
3688 || (flag_float_store
3689 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
3690 /* Always assign pseudo to structure return or item passed
3691 by invisible reference. */
3692 || passed_pointer || parm == function_result_decl)
3693 {
3694 /* Store the parm in a pseudoregister during the function, but we
3695 may need to do it in a wider mode. */
3696
3697 register rtx parmreg;
3698 int regno, regnoi, regnor;
3699
3700 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
3701
3702 promoted_nominal_mode
3703 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
3704
3705 parmreg = gen_reg_rtx (promoted_nominal_mode);
3706 REG_USERVAR_P (parmreg) = 1;
3707
3708 /* If this was an item that we received a pointer to, set DECL_RTL
3709 appropriately. */
3710 if (passed_pointer)
3711 {
3712 DECL_RTL (parm)
3713 = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
3714 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
3715 }
3716 else
3717 DECL_RTL (parm) = parmreg;
3718
3719 /* Copy the value into the register. */
3720 if (nominal_mode != passed_mode
3721 || promoted_nominal_mode != promoted_mode)
3722 {
3723 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3724 mode, by the caller. We now have to convert it to
3725 NOMINAL_MODE, if different. However, PARMREG may be in
3726 a diffent mode than NOMINAL_MODE if it is being stored
3727 promoted.
3728
3729 If ENTRY_PARM is a hard register, it might be in a register
3730 not valid for operating in its mode (e.g., an odd-numbered
3731 register for a DFmode). In that case, moves are the only
3732 thing valid, so we can't do a convert from there. This
3733 occurs when the calling sequence allow such misaligned
3734 usages.
3735
3736 In addition, the conversion may involve a call, which could
3737 clobber parameters which haven't been copied to pseudo
3738 registers yet. Therefore, we must first copy the parm to
3739 a pseudo reg here, and save the conversion until after all
3740 parameters have been moved. */
3741
3742 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
3743
3744 emit_move_insn (tempreg, validize_mem (entry_parm));
3745
3746 push_to_sequence (conversion_insns);
3747 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
3748
3749 expand_assignment (parm,
3750 make_tree (nominal_type, tempreg), 0, 0);
3751 conversion_insns = get_insns ();
3752 did_conversion = 1;
3753 end_sequence ();
3754 }
3755 else
3756 emit_move_insn (parmreg, validize_mem (entry_parm));
3757
3758 /* If we were passed a pointer but the actual value
3759 can safely live in a register, put it in one. */
3760 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
3761 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
3762 && ! DECL_INLINE (fndecl))
3763 /* layout_decl may set this. */
3764 || TREE_ADDRESSABLE (parm)
3765 || TREE_SIDE_EFFECTS (parm)
3766 /* If -ffloat-store specified, don't put explicit
3767 float variables into registers. */
3768 || (flag_float_store
3769 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
3770 {
3771 /* We can't use nominal_mode, because it will have been set to
3772 Pmode above. We must use the actual mode of the parm. */
3773 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3774 REG_USERVAR_P (parmreg) = 1;
3775 emit_move_insn (parmreg, DECL_RTL (parm));
3776 DECL_RTL (parm) = parmreg;
3777 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3778 now the parm. */
3779 stack_parm = 0;
3780 }
3781 #ifdef FUNCTION_ARG_CALLEE_COPIES
3782 /* If we are passed an arg by reference and it is our responsibility
3783 to make a copy, do it now.
3784 PASSED_TYPE and PASSED mode now refer to the pointer, not the
3785 original argument, so we must recreate them in the call to
3786 FUNCTION_ARG_CALLEE_COPIES. */
3787 /* ??? Later add code to handle the case that if the argument isn't
3788 modified, don't do the copy. */
3789
3790 else if (passed_pointer
3791 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
3792 TYPE_MODE (DECL_ARG_TYPE (parm)),
3793 DECL_ARG_TYPE (parm),
3794 ! last_named)
3795 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
3796 {
3797 rtx copy;
3798 tree type = DECL_ARG_TYPE (parm);
3799
3800 /* This sequence may involve a library call perhaps clobbering
3801 registers that haven't been copied to pseudos yet. */
3802
3803 push_to_sequence (conversion_insns);
3804
3805 if (TYPE_SIZE (type) == 0
3806 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3807 /* This is a variable sized object. */
3808 copy = gen_rtx (MEM, BLKmode,
3809 allocate_dynamic_stack_space
3810 (expr_size (parm), NULL_RTX,
3811 TYPE_ALIGN (type)));
3812 else
3813 copy = assign_stack_temp (TYPE_MODE (type),
3814 int_size_in_bytes (type), 1);
3815 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
3816
3817 store_expr (parm, copy, 0);
3818 emit_move_insn (parmreg, XEXP (copy, 0));
3819 conversion_insns = get_insns ();
3820 did_conversion = 1;
3821 end_sequence ();
3822 }
3823 #endif /* FUNCTION_ARG_CALLEE_COPIES */
3824
3825 /* In any case, record the parm's desired stack location
3826 in case we later discover it must live in the stack.
3827
3828 If it is a COMPLEX value, store the stack location for both
3829 halves. */
3830
3831 if (GET_CODE (parmreg) == CONCAT)
3832 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
3833 else
3834 regno = REGNO (parmreg);
3835
3836 if (regno >= nparmregs)
3837 {
3838 rtx *new;
3839 int old_nparmregs = nparmregs;
3840
3841 nparmregs = regno + 5;
3842 new = (rtx *) oballoc (nparmregs * sizeof (rtx));
3843 bcopy ((char *) parm_reg_stack_loc, (char *) new,
3844 old_nparmregs * sizeof (rtx));
3845 bzero ((char *) (new + old_nparmregs),
3846 (nparmregs - old_nparmregs) * sizeof (rtx));
3847 parm_reg_stack_loc = new;
3848 }
3849
3850 if (GET_CODE (parmreg) == CONCAT)
3851 {
3852 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
3853
3854 regnor = REGNO (gen_realpart (submode, parmreg));
3855 regnoi = REGNO (gen_imagpart (submode, parmreg));
3856
3857 if (stack_parm != 0)
3858 {
3859 parm_reg_stack_loc[regnor]
3860 = gen_realpart (submode, stack_parm);
3861 parm_reg_stack_loc[regnoi]
3862 = gen_imagpart (submode, stack_parm);
3863 }
3864 else
3865 {
3866 parm_reg_stack_loc[regnor] = 0;
3867 parm_reg_stack_loc[regnoi] = 0;
3868 }
3869 }
3870 else
3871 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
3872
3873 /* Mark the register as eliminable if we did no conversion
3874 and it was copied from memory at a fixed offset,
3875 and the arg pointer was not copied to a pseudo-reg.
3876 If the arg pointer is a pseudo reg or the offset formed
3877 an invalid address, such memory-equivalences
3878 as we make here would screw up life analysis for it. */
3879 if (nominal_mode == passed_mode
3880 && ! did_conversion
3881 && GET_CODE (entry_parm) == MEM
3882 && entry_parm == stack_parm
3883 && stack_offset.var == 0
3884 && reg_mentioned_p (virtual_incoming_args_rtx,
3885 XEXP (entry_parm, 0)))
3886 {
3887 rtx linsn = get_last_insn ();
3888 rtx sinsn, set;
3889
3890 /* Mark complex types separately. */
3891 if (GET_CODE (parmreg) == CONCAT)
3892 /* Scan backwards for the set of the real and
3893 imaginary parts. */
3894 for (sinsn = linsn; sinsn != 0;
3895 sinsn = prev_nonnote_insn (sinsn))
3896 {
3897 set = single_set (sinsn);
3898 if (set != 0
3899 && SET_DEST (set) == regno_reg_rtx [regnoi])
3900 REG_NOTES (sinsn)
3901 = gen_rtx (EXPR_LIST, REG_EQUIV,
3902 parm_reg_stack_loc[regnoi],
3903 REG_NOTES (sinsn));
3904 else if (set != 0
3905 && SET_DEST (set) == regno_reg_rtx [regnor])
3906 REG_NOTES (sinsn)
3907 = gen_rtx (EXPR_LIST, REG_EQUIV,
3908 parm_reg_stack_loc[regnor],
3909 REG_NOTES (sinsn));
3910 }
3911 else if ((set = single_set (linsn)) != 0
3912 && SET_DEST (set) == parmreg)
3913 REG_NOTES (linsn)
3914 = gen_rtx (EXPR_LIST, REG_EQUIV,
3915 entry_parm, REG_NOTES (linsn));
3916 }
3917
3918 /* For pointer data type, suggest pointer register. */
3919 if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE)
3920 mark_reg_pointer (parmreg,
3921 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
3922 / BITS_PER_UNIT));
3923 }
3924 else
3925 {
3926 /* Value must be stored in the stack slot STACK_PARM
3927 during function execution. */
3928
3929 if (promoted_mode != nominal_mode)
3930 {
3931 /* Conversion is required. */
3932 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
3933
3934 emit_move_insn (tempreg, validize_mem (entry_parm));
3935
3936 push_to_sequence (conversion_insns);
3937 entry_parm = convert_to_mode (nominal_mode, tempreg,
3938 TREE_UNSIGNED (TREE_TYPE (parm)));
3939 conversion_insns = get_insns ();
3940 did_conversion = 1;
3941 end_sequence ();
3942 }
3943
3944 if (entry_parm != stack_parm)
3945 {
3946 if (stack_parm == 0)
3947 {
3948 stack_parm
3949 = assign_stack_local (GET_MODE (entry_parm),
3950 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
3951 /* If this is a memory ref that contains aggregate components,
3952 mark it as such for cse and loop optimize. */
3953 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3954 }
3955
3956 if (promoted_mode != nominal_mode)
3957 {
3958 push_to_sequence (conversion_insns);
3959 emit_move_insn (validize_mem (stack_parm),
3960 validize_mem (entry_parm));
3961 conversion_insns = get_insns ();
3962 end_sequence ();
3963 }
3964 else
3965 emit_move_insn (validize_mem (stack_parm),
3966 validize_mem (entry_parm));
3967 }
3968
3969 DECL_RTL (parm) = stack_parm;
3970 }
3971
3972 /* If this "parameter" was the place where we are receiving the
3973 function's incoming structure pointer, set up the result. */
3974 if (parm == function_result_decl)
3975 {
3976 tree result = DECL_RESULT (fndecl);
3977 tree restype = TREE_TYPE (result);
3978
3979 DECL_RTL (result)
3980 = gen_rtx (MEM, DECL_MODE (result), DECL_RTL (parm));
3981
3982 MEM_IN_STRUCT_P (DECL_RTL (result)) = AGGREGATE_TYPE_P (restype);
3983 }
3984
3985 if (TREE_THIS_VOLATILE (parm))
3986 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
3987 if (TREE_READONLY (parm))
3988 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
3989 }
3990
3991 /* Output all parameter conversion instructions (possibly including calls)
3992 now that all parameters have been copied out of hard registers. */
3993 emit_insns (conversion_insns);
3994
3995 max_parm_reg = max_reg_num ();
3996 last_parm_insn = get_last_insn ();
3997
3998 current_function_args_size = stack_args_size.constant;
3999
4000 /* Adjust function incoming argument size for alignment and
4001 minimum length. */
4002
4003 #ifdef REG_PARM_STACK_SPACE
4004 #ifndef MAYBE_REG_PARM_STACK_SPACE
4005 current_function_args_size = MAX (current_function_args_size,
4006 REG_PARM_STACK_SPACE (fndecl));
4007 #endif
4008 #endif
4009
4010 #ifdef STACK_BOUNDARY
4011 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4012
4013 current_function_args_size
4014 = ((current_function_args_size + STACK_BYTES - 1)
4015 / STACK_BYTES) * STACK_BYTES;
4016 #endif
4017
4018 #ifdef ARGS_GROW_DOWNWARD
4019 current_function_arg_offset_rtx
4020 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4021 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4022 size_int (-stack_args_size.constant)),
4023 NULL_RTX, VOIDmode, 0));
4024 #else
4025 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4026 #endif
4027
4028 /* See how many bytes, if any, of its args a function should try to pop
4029 on return. */
4030
4031 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4032 current_function_args_size);
4033
4034 /* For stdarg.h function, save info about
4035 regs and stack space used by the named args. */
4036
4037 if (!hide_last_arg)
4038 current_function_args_info = args_so_far;
4039
4040 /* Set the rtx used for the function return value. Put this in its
4041 own variable so any optimizers that need this information don't have
4042 to include tree.h. Do this here so it gets done when an inlined
4043 function gets output. */
4044
4045 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4046 }
4047 \f
4048 /* Indicate whether REGNO is an incoming argument to the current function
4049 that was promoted to a wider mode. If so, return the RTX for the
4050 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4051 that REGNO is promoted from and whether the promotion was signed or
4052 unsigned. */
4053
4054 #ifdef PROMOTE_FUNCTION_ARGS
4055
4056 rtx
4057 promoted_input_arg (regno, pmode, punsignedp)
4058 int regno;
4059 enum machine_mode *pmode;
4060 int *punsignedp;
4061 {
4062 tree arg;
4063
4064 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4065 arg = TREE_CHAIN (arg))
4066 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4067 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4068 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4069 {
4070 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4071 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4072
4073 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4074 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4075 && mode != DECL_MODE (arg))
4076 {
4077 *pmode = DECL_MODE (arg);
4078 *punsignedp = unsignedp;
4079 return DECL_INCOMING_RTL (arg);
4080 }
4081 }
4082
4083 return 0;
4084 }
4085
4086 #endif
4087 \f
4088 /* Compute the size and offset from the start of the stacked arguments for a
4089 parm passed in mode PASSED_MODE and with type TYPE.
4090
4091 INITIAL_OFFSET_PTR points to the current offset into the stacked
4092 arguments.
4093
4094 The starting offset and size for this parm are returned in *OFFSET_PTR
4095 and *ARG_SIZE_PTR, respectively.
4096
4097 IN_REGS is non-zero if the argument will be passed in registers. It will
4098 never be set if REG_PARM_STACK_SPACE is not defined.
4099
4100 FNDECL is the function in which the argument was defined.
4101
4102 There are two types of rounding that are done. The first, controlled by
4103 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4104 list to be aligned to the specific boundary (in bits). This rounding
4105 affects the initial and starting offsets, but not the argument size.
4106
4107 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4108 optionally rounds the size of the parm to PARM_BOUNDARY. The
4109 initial offset is not affected by this rounding, while the size always
4110 is and the starting offset may be. */
4111
4112 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4113 initial_offset_ptr is positive because locate_and_pad_parm's
4114 callers pass in the total size of args so far as
4115 initial_offset_ptr. arg_size_ptr is always positive.*/
4116
4117 void
4118 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4119 initial_offset_ptr, offset_ptr, arg_size_ptr)
4120 enum machine_mode passed_mode;
4121 tree type;
4122 int in_regs;
4123 tree fndecl;
4124 struct args_size *initial_offset_ptr;
4125 struct args_size *offset_ptr;
4126 struct args_size *arg_size_ptr;
4127 {
4128 tree sizetree
4129 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4130 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4131 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4132 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4133 int reg_parm_stack_space = 0;
4134
4135 #ifdef REG_PARM_STACK_SPACE
4136 /* If we have found a stack parm before we reach the end of the
4137 area reserved for registers, skip that area. */
4138 if (! in_regs)
4139 {
4140 #ifdef MAYBE_REG_PARM_STACK_SPACE
4141 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4142 #else
4143 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4144 #endif
4145 if (reg_parm_stack_space > 0)
4146 {
4147 if (initial_offset_ptr->var)
4148 {
4149 initial_offset_ptr->var
4150 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4151 size_int (reg_parm_stack_space));
4152 initial_offset_ptr->constant = 0;
4153 }
4154 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4155 initial_offset_ptr->constant = reg_parm_stack_space;
4156 }
4157 }
4158 #endif /* REG_PARM_STACK_SPACE */
4159
4160 arg_size_ptr->var = 0;
4161 arg_size_ptr->constant = 0;
4162
4163 #ifdef ARGS_GROW_DOWNWARD
4164 if (initial_offset_ptr->var)
4165 {
4166 offset_ptr->constant = 0;
4167 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4168 initial_offset_ptr->var);
4169 }
4170 else
4171 {
4172 offset_ptr->constant = - initial_offset_ptr->constant;
4173 offset_ptr->var = 0;
4174 }
4175 if (where_pad != none
4176 && (TREE_CODE (sizetree) != INTEGER_CST
4177 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4178 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4179 SUB_PARM_SIZE (*offset_ptr, sizetree);
4180 if (where_pad != downward)
4181 pad_to_arg_alignment (offset_ptr, boundary);
4182 if (initial_offset_ptr->var)
4183 {
4184 arg_size_ptr->var = size_binop (MINUS_EXPR,
4185 size_binop (MINUS_EXPR,
4186 integer_zero_node,
4187 initial_offset_ptr->var),
4188 offset_ptr->var);
4189 }
4190 else
4191 {
4192 arg_size_ptr->constant = (- initial_offset_ptr->constant -
4193 offset_ptr->constant);
4194 }
4195 #else /* !ARGS_GROW_DOWNWARD */
4196 pad_to_arg_alignment (initial_offset_ptr, boundary);
4197 *offset_ptr = *initial_offset_ptr;
4198
4199 #ifdef PUSH_ROUNDING
4200 if (passed_mode != BLKmode)
4201 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4202 #endif
4203
4204 /* Pad_below needs the pre-rounded size to know how much to pad below
4205 so this must be done before rounding up. */
4206 if (where_pad == downward
4207 /* However, BLKmode args passed in regs have their padding done elsewhere.
4208 The stack slot must be able to hold the entire register. */
4209 && !(in_regs && passed_mode == BLKmode))
4210 pad_below (offset_ptr, passed_mode, sizetree);
4211
4212 if (where_pad != none
4213 && (TREE_CODE (sizetree) != INTEGER_CST
4214 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4215 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4216
4217 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4218 #endif /* ARGS_GROW_DOWNWARD */
4219 }
4220
4221 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4222 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4223
4224 static void
4225 pad_to_arg_alignment (offset_ptr, boundary)
4226 struct args_size *offset_ptr;
4227 int boundary;
4228 {
4229 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4230
4231 if (boundary > BITS_PER_UNIT)
4232 {
4233 if (offset_ptr->var)
4234 {
4235 offset_ptr->var =
4236 #ifdef ARGS_GROW_DOWNWARD
4237 round_down
4238 #else
4239 round_up
4240 #endif
4241 (ARGS_SIZE_TREE (*offset_ptr),
4242 boundary / BITS_PER_UNIT);
4243 offset_ptr->constant = 0; /*?*/
4244 }
4245 else
4246 offset_ptr->constant =
4247 #ifdef ARGS_GROW_DOWNWARD
4248 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4249 #else
4250 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4251 #endif
4252 }
4253 }
4254
4255 static void
4256 pad_below (offset_ptr, passed_mode, sizetree)
4257 struct args_size *offset_ptr;
4258 enum machine_mode passed_mode;
4259 tree sizetree;
4260 {
4261 if (passed_mode != BLKmode)
4262 {
4263 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4264 offset_ptr->constant
4265 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4266 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4267 - GET_MODE_SIZE (passed_mode));
4268 }
4269 else
4270 {
4271 if (TREE_CODE (sizetree) != INTEGER_CST
4272 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4273 {
4274 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4275 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4276 /* Add it in. */
4277 ADD_PARM_SIZE (*offset_ptr, s2);
4278 SUB_PARM_SIZE (*offset_ptr, sizetree);
4279 }
4280 }
4281 }
4282
4283 static tree
4284 round_down (value, divisor)
4285 tree value;
4286 int divisor;
4287 {
4288 return size_binop (MULT_EXPR,
4289 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
4290 size_int (divisor));
4291 }
4292 \f
4293 /* Walk the tree of blocks describing the binding levels within a function
4294 and warn about uninitialized variables.
4295 This is done after calling flow_analysis and before global_alloc
4296 clobbers the pseudo-regs to hard regs. */
4297
4298 void
4299 uninitialized_vars_warning (block)
4300 tree block;
4301 {
4302 register tree decl, sub;
4303 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4304 {
4305 if (TREE_CODE (decl) == VAR_DECL
4306 /* These warnings are unreliable for and aggregates
4307 because assigning the fields one by one can fail to convince
4308 flow.c that the entire aggregate was initialized.
4309 Unions are troublesome because members may be shorter. */
4310 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
4311 && DECL_RTL (decl) != 0
4312 && GET_CODE (DECL_RTL (decl)) == REG
4313 && regno_uninitialized (REGNO (DECL_RTL (decl))))
4314 warning_with_decl (decl,
4315 "`%s' might be used uninitialized in this function");
4316 if (TREE_CODE (decl) == VAR_DECL
4317 && DECL_RTL (decl) != 0
4318 && GET_CODE (DECL_RTL (decl)) == REG
4319 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4320 warning_with_decl (decl,
4321 "variable `%s' might be clobbered by `longjmp' or `vfork'");
4322 }
4323 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4324 uninitialized_vars_warning (sub);
4325 }
4326
4327 /* Do the appropriate part of uninitialized_vars_warning
4328 but for arguments instead of local variables. */
4329
4330 void
4331 setjmp_args_warning ()
4332 {
4333 register tree decl;
4334 for (decl = DECL_ARGUMENTS (current_function_decl);
4335 decl; decl = TREE_CHAIN (decl))
4336 if (DECL_RTL (decl) != 0
4337 && GET_CODE (DECL_RTL (decl)) == REG
4338 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4339 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
4340 }
4341
4342 /* If this function call setjmp, put all vars into the stack
4343 unless they were declared `register'. */
4344
4345 void
4346 setjmp_protect (block)
4347 tree block;
4348 {
4349 register tree decl, sub;
4350 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4351 if ((TREE_CODE (decl) == VAR_DECL
4352 || TREE_CODE (decl) == PARM_DECL)
4353 && DECL_RTL (decl) != 0
4354 && GET_CODE (DECL_RTL (decl)) == REG
4355 /* If this variable came from an inline function, it must be
4356 that it's life doesn't overlap the setjmp. If there was a
4357 setjmp in the function, it would already be in memory. We
4358 must exclude such variable because their DECL_RTL might be
4359 set to strange things such as virtual_stack_vars_rtx. */
4360 && ! DECL_FROM_INLINE (decl)
4361 && (
4362 #ifdef NON_SAVING_SETJMP
4363 /* If longjmp doesn't restore the registers,
4364 don't put anything in them. */
4365 NON_SAVING_SETJMP
4366 ||
4367 #endif
4368 ! DECL_REGISTER (decl)))
4369 put_var_into_stack (decl);
4370 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4371 setjmp_protect (sub);
4372 }
4373 \f
4374 /* Like the previous function, but for args instead of local variables. */
4375
4376 void
4377 setjmp_protect_args ()
4378 {
4379 register tree decl, sub;
4380 for (decl = DECL_ARGUMENTS (current_function_decl);
4381 decl; decl = TREE_CHAIN (decl))
4382 if ((TREE_CODE (decl) == VAR_DECL
4383 || TREE_CODE (decl) == PARM_DECL)
4384 && DECL_RTL (decl) != 0
4385 && GET_CODE (DECL_RTL (decl)) == REG
4386 && (
4387 /* If longjmp doesn't restore the registers,
4388 don't put anything in them. */
4389 #ifdef NON_SAVING_SETJMP
4390 NON_SAVING_SETJMP
4391 ||
4392 #endif
4393 ! DECL_REGISTER (decl)))
4394 put_var_into_stack (decl);
4395 }
4396 \f
4397 /* Return the context-pointer register corresponding to DECL,
4398 or 0 if it does not need one. */
4399
4400 rtx
4401 lookup_static_chain (decl)
4402 tree decl;
4403 {
4404 tree context = decl_function_context (decl);
4405 tree link;
4406
4407 if (context == 0
4408 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
4409 return 0;
4410
4411 /* We treat inline_function_decl as an alias for the current function
4412 because that is the inline function whose vars, types, etc.
4413 are being merged into the current function.
4414 See expand_inline_function. */
4415 if (context == current_function_decl || context == inline_function_decl)
4416 return virtual_stack_vars_rtx;
4417
4418 for (link = context_display; link; link = TREE_CHAIN (link))
4419 if (TREE_PURPOSE (link) == context)
4420 return RTL_EXPR_RTL (TREE_VALUE (link));
4421
4422 abort ();
4423 }
4424 \f
4425 /* Convert a stack slot address ADDR for variable VAR
4426 (from a containing function)
4427 into an address valid in this function (using a static chain). */
4428
4429 rtx
4430 fix_lexical_addr (addr, var)
4431 rtx addr;
4432 tree var;
4433 {
4434 rtx basereg;
4435 int displacement;
4436 tree context = decl_function_context (var);
4437 struct function *fp;
4438 rtx base = 0;
4439
4440 /* If this is the present function, we need not do anything. */
4441 if (context == current_function_decl || context == inline_function_decl)
4442 return addr;
4443
4444 for (fp = outer_function_chain; fp; fp = fp->next)
4445 if (fp->decl == context)
4446 break;
4447
4448 if (fp == 0)
4449 abort ();
4450
4451 /* Decode given address as base reg plus displacement. */
4452 if (GET_CODE (addr) == REG)
4453 basereg = addr, displacement = 0;
4454 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4455 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
4456 else
4457 abort ();
4458
4459 /* We accept vars reached via the containing function's
4460 incoming arg pointer and via its stack variables pointer. */
4461 if (basereg == fp->internal_arg_pointer)
4462 {
4463 /* If reached via arg pointer, get the arg pointer value
4464 out of that function's stack frame.
4465
4466 There are two cases: If a separate ap is needed, allocate a
4467 slot in the outer function for it and dereference it that way.
4468 This is correct even if the real ap is actually a pseudo.
4469 Otherwise, just adjust the offset from the frame pointer to
4470 compensate. */
4471
4472 #ifdef NEED_SEPARATE_AP
4473 rtx addr;
4474
4475 if (fp->arg_pointer_save_area == 0)
4476 fp->arg_pointer_save_area
4477 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
4478
4479 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
4480 addr = memory_address (Pmode, addr);
4481
4482 base = copy_to_reg (gen_rtx (MEM, Pmode, addr));
4483 #else
4484 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
4485 base = lookup_static_chain (var);
4486 #endif
4487 }
4488
4489 else if (basereg == virtual_stack_vars_rtx)
4490 {
4491 /* This is the same code as lookup_static_chain, duplicated here to
4492 avoid an extra call to decl_function_context. */
4493 tree link;
4494
4495 for (link = context_display; link; link = TREE_CHAIN (link))
4496 if (TREE_PURPOSE (link) == context)
4497 {
4498 base = RTL_EXPR_RTL (TREE_VALUE (link));
4499 break;
4500 }
4501 }
4502
4503 if (base == 0)
4504 abort ();
4505
4506 /* Use same offset, relative to appropriate static chain or argument
4507 pointer. */
4508 return plus_constant (base, displacement);
4509 }
4510 \f
4511 /* Return the address of the trampoline for entering nested fn FUNCTION.
4512 If necessary, allocate a trampoline (in the stack frame)
4513 and emit rtl to initialize its contents (at entry to this function). */
4514
4515 rtx
4516 trampoline_address (function)
4517 tree function;
4518 {
4519 tree link;
4520 tree rtlexp;
4521 rtx tramp;
4522 struct function *fp;
4523 tree fn_context;
4524
4525 /* Find an existing trampoline and return it. */
4526 for (link = trampoline_list; link; link = TREE_CHAIN (link))
4527 if (TREE_PURPOSE (link) == function)
4528 return
4529 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
4530
4531 for (fp = outer_function_chain; fp; fp = fp->next)
4532 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
4533 if (TREE_PURPOSE (link) == function)
4534 {
4535 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
4536 function);
4537 return round_trampoline_addr (tramp);
4538 }
4539
4540 /* None exists; we must make one. */
4541
4542 /* Find the `struct function' for the function containing FUNCTION. */
4543 fp = 0;
4544 fn_context = decl_function_context (function);
4545 if (fn_context != current_function_decl)
4546 for (fp = outer_function_chain; fp; fp = fp->next)
4547 if (fp->decl == fn_context)
4548 break;
4549
4550 /* Allocate run-time space for this trampoline
4551 (usually in the defining function's stack frame). */
4552 #ifdef ALLOCATE_TRAMPOLINE
4553 tramp = ALLOCATE_TRAMPOLINE (fp);
4554 #else
4555 /* If rounding needed, allocate extra space
4556 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
4557 #ifdef TRAMPOLINE_ALIGNMENT
4558 #define TRAMPOLINE_REAL_SIZE \
4559 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
4560 #else
4561 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
4562 #endif
4563 if (fp != 0)
4564 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
4565 else
4566 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
4567 #endif
4568
4569 /* Record the trampoline for reuse and note it for later initialization
4570 by expand_function_end. */
4571 if (fp != 0)
4572 {
4573 push_obstacks (fp->function_maybepermanent_obstack,
4574 fp->function_maybepermanent_obstack);
4575 rtlexp = make_node (RTL_EXPR);
4576 RTL_EXPR_RTL (rtlexp) = tramp;
4577 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
4578 pop_obstacks ();
4579 }
4580 else
4581 {
4582 /* Make the RTL_EXPR node temporary, not momentary, so that the
4583 trampoline_list doesn't become garbage. */
4584 int momentary = suspend_momentary ();
4585 rtlexp = make_node (RTL_EXPR);
4586 resume_momentary (momentary);
4587
4588 RTL_EXPR_RTL (rtlexp) = tramp;
4589 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
4590 }
4591
4592 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
4593 return round_trampoline_addr (tramp);
4594 }
4595
4596 /* Given a trampoline address,
4597 round it to multiple of TRAMPOLINE_ALIGNMENT. */
4598
4599 static rtx
4600 round_trampoline_addr (tramp)
4601 rtx tramp;
4602 {
4603 #ifdef TRAMPOLINE_ALIGNMENT
4604 /* Round address up to desired boundary. */
4605 rtx temp = gen_reg_rtx (Pmode);
4606 temp = expand_binop (Pmode, add_optab, tramp,
4607 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
4608 temp, 0, OPTAB_LIB_WIDEN);
4609 tramp = expand_binop (Pmode, and_optab, temp,
4610 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
4611 temp, 0, OPTAB_LIB_WIDEN);
4612 #endif
4613 return tramp;
4614 }
4615 \f
4616 /* The functions identify_blocks and reorder_blocks provide a way to
4617 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
4618 duplicate portions of the RTL code. Call identify_blocks before
4619 changing the RTL, and call reorder_blocks after. */
4620
4621 /* Put all this function's BLOCK nodes including those that are chained
4622 onto the first block into a vector, and return it.
4623 Also store in each NOTE for the beginning or end of a block
4624 the index of that block in the vector.
4625 The arguments are BLOCK, the chain of top-level blocks of the function,
4626 and INSNS, the insn chain of the function. */
4627
4628 tree *
4629 identify_blocks (block, insns)
4630 tree block;
4631 rtx insns;
4632 {
4633 int n_blocks;
4634 tree *block_vector;
4635 int *block_stack;
4636 int depth = 0;
4637 int next_block_number = 1;
4638 int current_block_number = 1;
4639 rtx insn;
4640
4641 if (block == 0)
4642 return 0;
4643
4644 n_blocks = all_blocks (block, 0);
4645 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
4646 block_stack = (int *) alloca (n_blocks * sizeof (int));
4647
4648 all_blocks (block, block_vector);
4649
4650 for (insn = insns; insn; insn = NEXT_INSN (insn))
4651 if (GET_CODE (insn) == NOTE)
4652 {
4653 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
4654 {
4655 block_stack[depth++] = current_block_number;
4656 current_block_number = next_block_number;
4657 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
4658 }
4659 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
4660 {
4661 current_block_number = block_stack[--depth];
4662 NOTE_BLOCK_NUMBER (insn) = current_block_number;
4663 }
4664 }
4665
4666 if (n_blocks != next_block_number)
4667 abort ();
4668
4669 return block_vector;
4670 }
4671
4672 /* Given BLOCK_VECTOR which was returned by identify_blocks,
4673 and a revised instruction chain, rebuild the tree structure
4674 of BLOCK nodes to correspond to the new order of RTL.
4675 The new block tree is inserted below TOP_BLOCK.
4676 Returns the current top-level block. */
4677
4678 tree
4679 reorder_blocks (block_vector, block, insns)
4680 tree *block_vector;
4681 tree block;
4682 rtx insns;
4683 {
4684 tree current_block = block;
4685 rtx insn;
4686
4687 if (block_vector == 0)
4688 return block;
4689
4690 /* Prune the old trees away, so that it doesn't get in the way. */
4691 BLOCK_SUBBLOCKS (current_block) = 0;
4692 BLOCK_CHAIN (current_block) = 0;
4693
4694 for (insn = insns; insn; insn = NEXT_INSN (insn))
4695 if (GET_CODE (insn) == NOTE)
4696 {
4697 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
4698 {
4699 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
4700 /* If we have seen this block before, copy it. */
4701 if (TREE_ASM_WRITTEN (block))
4702 block = copy_node (block);
4703 BLOCK_SUBBLOCKS (block) = 0;
4704 TREE_ASM_WRITTEN (block) = 1;
4705 BLOCK_SUPERCONTEXT (block) = current_block;
4706 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4707 BLOCK_SUBBLOCKS (current_block) = block;
4708 current_block = block;
4709 NOTE_SOURCE_FILE (insn) = 0;
4710 }
4711 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
4712 {
4713 BLOCK_SUBBLOCKS (current_block)
4714 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
4715 current_block = BLOCK_SUPERCONTEXT (current_block);
4716 NOTE_SOURCE_FILE (insn) = 0;
4717 }
4718 }
4719
4720 BLOCK_SUBBLOCKS (current_block)
4721 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
4722 return current_block;
4723 }
4724
4725 /* Reverse the order of elements in the chain T of blocks,
4726 and return the new head of the chain (old last element). */
4727
4728 static tree
4729 blocks_nreverse (t)
4730 tree t;
4731 {
4732 register tree prev = 0, decl, next;
4733 for (decl = t; decl; decl = next)
4734 {
4735 next = BLOCK_CHAIN (decl);
4736 BLOCK_CHAIN (decl) = prev;
4737 prev = decl;
4738 }
4739 return prev;
4740 }
4741
4742 /* Count the subblocks of the list starting with BLOCK, and list them
4743 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
4744 blocks. */
4745
4746 static int
4747 all_blocks (block, vector)
4748 tree block;
4749 tree *vector;
4750 {
4751 int n_blocks = 0;
4752
4753 while (block)
4754 {
4755 TREE_ASM_WRITTEN (block) = 0;
4756
4757 /* Record this block. */
4758 if (vector)
4759 vector[n_blocks] = block;
4760
4761 ++n_blocks;
4762
4763 /* Record the subblocks, and their subblocks... */
4764 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4765 vector ? vector + n_blocks : 0);
4766 block = BLOCK_CHAIN (block);
4767 }
4768
4769 return n_blocks;
4770 }
4771 \f
4772 /* Build bytecode call descriptor for function SUBR. */
4773
4774 rtx
4775 bc_build_calldesc (subr)
4776 tree subr;
4777 {
4778 tree calldesc = 0, arg;
4779 int nargs = 0;
4780
4781 /* Build the argument description vector in reverse order. */
4782 DECL_ARGUMENTS (subr) = nreverse (DECL_ARGUMENTS (subr));
4783 nargs = 0;
4784
4785 for (arg = DECL_ARGUMENTS (subr); arg; arg = TREE_CHAIN (arg))
4786 {
4787 ++nargs;
4788
4789 calldesc = tree_cons ((tree) 0, size_in_bytes (TREE_TYPE (arg)), calldesc);
4790 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (TREE_TYPE (arg)), calldesc);
4791 }
4792
4793 DECL_ARGUMENTS (subr) = nreverse (DECL_ARGUMENTS (subr));
4794
4795 /* Prepend the function's return type. */
4796 calldesc = tree_cons ((tree) 0,
4797 size_in_bytes (TREE_TYPE (TREE_TYPE (subr))),
4798 calldesc);
4799
4800 calldesc = tree_cons ((tree) 0,
4801 bc_runtime_type_code (TREE_TYPE (TREE_TYPE (subr))),
4802 calldesc);
4803
4804 /* Prepend the arg count. */
4805 calldesc = tree_cons ((tree) 0, build_int_2 (nargs, 0), calldesc);
4806
4807 /* Output the call description vector and get its address. */
4808 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
4809 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
4810 build_index_type (build_int_2 (nargs * 2, 0)));
4811
4812 return output_constant_def (calldesc);
4813 }
4814
4815
4816 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4817 and initialize static variables for generating RTL for the statements
4818 of the function. */
4819
4820 void
4821 init_function_start (subr, filename, line)
4822 tree subr;
4823 char *filename;
4824 int line;
4825 {
4826 char *junk;
4827
4828 if (output_bytecode)
4829 {
4830 this_function_decl = subr;
4831 this_function_calldesc = bc_build_calldesc (subr);
4832 local_vars_size = 0;
4833 stack_depth = 0;
4834 max_stack_depth = 0;
4835 stmt_expr_depth = 0;
4836 return;
4837 }
4838
4839 init_stmt_for_function ();
4840
4841 cse_not_expected = ! optimize;
4842
4843 /* Caller save not needed yet. */
4844 caller_save_needed = 0;
4845
4846 /* No stack slots have been made yet. */
4847 stack_slot_list = 0;
4848
4849 /* There is no stack slot for handling nonlocal gotos. */
4850 nonlocal_goto_handler_slot = 0;
4851 nonlocal_goto_stack_level = 0;
4852
4853 /* No labels have been declared for nonlocal use. */
4854 nonlocal_labels = 0;
4855
4856 /* No function calls so far in this function. */
4857 function_call_count = 0;
4858
4859 /* No parm regs have been allocated.
4860 (This is important for output_inline_function.) */
4861 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4862
4863 /* Initialize the RTL mechanism. */
4864 init_emit ();
4865
4866 /* Initialize the queue of pending postincrement and postdecrements,
4867 and some other info in expr.c. */
4868 init_expr ();
4869
4870 /* We haven't done register allocation yet. */
4871 reg_renumber = 0;
4872
4873 init_const_rtx_hash_table ();
4874
4875 current_function_name = (*decl_printable_name) (subr, &junk);
4876
4877 /* Nonzero if this is a nested function that uses a static chain. */
4878
4879 current_function_needs_context
4880 = (decl_function_context (current_function_decl) != 0
4881 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
4882
4883 /* Set if a call to setjmp is seen. */
4884 current_function_calls_setjmp = 0;
4885
4886 /* Set if a call to longjmp is seen. */
4887 current_function_calls_longjmp = 0;
4888
4889 current_function_calls_alloca = 0;
4890 current_function_has_nonlocal_label = 0;
4891 current_function_has_nonlocal_goto = 0;
4892 current_function_contains_functions = 0;
4893
4894 current_function_returns_pcc_struct = 0;
4895 current_function_returns_struct = 0;
4896 current_function_epilogue_delay_list = 0;
4897 current_function_uses_const_pool = 0;
4898 current_function_uses_pic_offset_table = 0;
4899
4900 /* We have not yet needed to make a label to jump to for tail-recursion. */
4901 tail_recursion_label = 0;
4902
4903 /* We haven't had a need to make a save area for ap yet. */
4904
4905 arg_pointer_save_area = 0;
4906
4907 /* No stack slots allocated yet. */
4908 frame_offset = 0;
4909
4910 /* No SAVE_EXPRs in this function yet. */
4911 save_expr_regs = 0;
4912
4913 /* No RTL_EXPRs in this function yet. */
4914 rtl_expr_chain = 0;
4915
4916 /* Set up to allocate temporaries. */
4917 init_temp_slots ();
4918
4919 /* Within function body, compute a type's size as soon it is laid out. */
4920 immediate_size_expand++;
4921
4922 /* We haven't made any trampolines for this function yet. */
4923 trampoline_list = 0;
4924
4925 init_pending_stack_adjust ();
4926 inhibit_defer_pop = 0;
4927
4928 current_function_outgoing_args_size = 0;
4929
4930 /* Prevent ever trying to delete the first instruction of a function.
4931 Also tell final how to output a linenum before the function prologue. */
4932 emit_line_note (filename, line);
4933
4934 /* Make sure first insn is a note even if we don't want linenums.
4935 This makes sure the first insn will never be deleted.
4936 Also, final expects a note to appear there. */
4937 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4938
4939 /* Set flags used by final.c. */
4940 if (aggregate_value_p (DECL_RESULT (subr)))
4941 {
4942 #ifdef PCC_STATIC_STRUCT_RETURN
4943 current_function_returns_pcc_struct = 1;
4944 #endif
4945 current_function_returns_struct = 1;
4946 }
4947
4948 /* Warn if this value is an aggregate type,
4949 regardless of which calling convention we are using for it. */
4950 if (warn_aggregate_return
4951 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4952 warning ("function returns an aggregate");
4953
4954 current_function_returns_pointer
4955 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
4956
4957 /* Indicate that we need to distinguish between the return value of the
4958 present function and the return value of a function being called. */
4959 rtx_equal_function_value_matters = 1;
4960
4961 /* Indicate that we have not instantiated virtual registers yet. */
4962 virtuals_instantiated = 0;
4963
4964 /* Indicate we have no need of a frame pointer yet. */
4965 frame_pointer_needed = 0;
4966
4967 /* By default assume not varargs or stdarg. */
4968 current_function_varargs = 0;
4969 current_function_stdarg = 0;
4970 }
4971
4972 /* Indicate that the current function uses extra args
4973 not explicitly mentioned in the argument list in any fashion. */
4974
4975 void
4976 mark_varargs ()
4977 {
4978 current_function_varargs = 1;
4979 }
4980
4981 /* Expand a call to __main at the beginning of a possible main function. */
4982
4983 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
4984 #undef HAS_INIT_SECTION
4985 #define HAS_INIT_SECTION
4986 #endif
4987
4988 void
4989 expand_main_function ()
4990 {
4991 if (!output_bytecode)
4992 {
4993 /* The zero below avoids a possible parse error */
4994 0;
4995 #if !defined (HAS_INIT_SECTION)
4996 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, NAME__MAIN), 0,
4997 VOIDmode, 0);
4998 #endif /* not HAS_INIT_SECTION */
4999 }
5000 }
5001 \f
5002 extern struct obstack permanent_obstack;
5003
5004 /* Expand start of bytecode function. See comment at
5005 expand_function_start below for details. */
5006
5007 void
5008 bc_expand_function_start (subr, parms_have_cleanups)
5009 tree subr;
5010 int parms_have_cleanups;
5011 {
5012 char label[20], *name;
5013 static int nlab;
5014 tree thisarg;
5015 int argsz;
5016
5017 if (TREE_PUBLIC (subr))
5018 bc_globalize_label (IDENTIFIER_POINTER (DECL_NAME (subr)));
5019
5020 #ifdef DEBUG_PRINT_CODE
5021 fprintf (stderr, "\n<func %s>\n", IDENTIFIER_POINTER (DECL_NAME (subr)));
5022 #endif
5023
5024 for (argsz = 0, thisarg = DECL_ARGUMENTS (subr); thisarg; thisarg = TREE_CHAIN (thisarg))
5025 {
5026 if (DECL_RTL (thisarg))
5027 abort (); /* Should be NULL here I think. */
5028 else if (TREE_CONSTANT (DECL_SIZE (thisarg)))
5029 {
5030 DECL_RTL (thisarg) = bc_gen_rtx ((char *) 0, argsz, (struct bc_label *) 0);
5031 argsz += TREE_INT_CST_LOW (DECL_SIZE (thisarg));
5032 }
5033 else
5034 {
5035 /* Variable-sized objects are pointers to their storage. */
5036 DECL_RTL (thisarg) = bc_gen_rtx ((char *) 0, argsz, (struct bc_label *) 0);
5037 argsz += POINTER_SIZE;
5038 }
5039 }
5040
5041 bc_begin_function (xstrdup (IDENTIFIER_POINTER (DECL_NAME (subr))));
5042
5043 ASM_GENERATE_INTERNAL_LABEL (label, "LX", nlab);
5044
5045 ++nlab;
5046 name = (char *) obstack_copy0 (&permanent_obstack, label, strlen (label));
5047 this_function_callinfo = bc_gen_rtx (name, 0, (struct bc_label *) 0);
5048 this_function_bytecode =
5049 bc_emit_trampoline (BYTECODE_LABEL (this_function_callinfo));
5050 }
5051
5052
5053 /* Expand end of bytecode function. See details the comment of
5054 expand_function_end(), below. */
5055
5056 void
5057 bc_expand_function_end ()
5058 {
5059 char *ptrconsts;
5060
5061 expand_null_return ();
5062
5063 /* Emit any fixup code. This must be done before the call to
5064 to BC_END_FUNCTION (), since that will cause the bytecode
5065 segment to be finished off and closed. */
5066
5067 expand_fixups (NULL_RTX);
5068
5069 ptrconsts = bc_end_function ();
5070
5071 bc_align_const (2 /* INT_ALIGN */);
5072
5073 /* If this changes also make sure to change bc-interp.h! */
5074
5075 bc_emit_const_labeldef (BYTECODE_LABEL (this_function_callinfo));
5076 bc_emit_const ((char *) &max_stack_depth, sizeof max_stack_depth);
5077 bc_emit_const ((char *) &local_vars_size, sizeof local_vars_size);
5078 bc_emit_const_labelref (this_function_bytecode, 0);
5079 bc_emit_const_labelref (ptrconsts, 0);
5080 bc_emit_const_labelref (BYTECODE_LABEL (this_function_calldesc), 0);
5081 }
5082
5083
5084 /* Start the RTL for a new function, and set variables used for
5085 emitting RTL.
5086 SUBR is the FUNCTION_DECL node.
5087 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5088 the function's parameters, which must be run at any return statement. */
5089
5090 void
5091 expand_function_start (subr, parms_have_cleanups)
5092 tree subr;
5093 int parms_have_cleanups;
5094 {
5095 register int i;
5096 tree tem;
5097 rtx last_ptr;
5098
5099 if (output_bytecode)
5100 {
5101 bc_expand_function_start (subr, parms_have_cleanups);
5102 return;
5103 }
5104
5105 /* Make sure volatile mem refs aren't considered
5106 valid operands of arithmetic insns. */
5107 init_recog_no_volatile ();
5108
5109 /* If function gets a static chain arg, store it in the stack frame.
5110 Do this first, so it gets the first stack slot offset. */
5111 if (current_function_needs_context)
5112 {
5113 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5114
5115 #ifdef SMALL_REGISTER_CLASSES
5116 /* Delay copying static chain if it is not a register to avoid
5117 conflicts with regs used for parameters. */
5118 if (GET_CODE (static_chain_incoming_rtx) == REG)
5119 #endif
5120 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5121 }
5122
5123 /* If the parameters of this function need cleaning up, get a label
5124 for the beginning of the code which executes those cleanups. This must
5125 be done before doing anything with return_label. */
5126 if (parms_have_cleanups)
5127 cleanup_label = gen_label_rtx ();
5128 else
5129 cleanup_label = 0;
5130
5131 /* Make the label for return statements to jump to, if this machine
5132 does not have a one-instruction return and uses an epilogue,
5133 or if it returns a structure, or if it has parm cleanups. */
5134 #ifdef HAVE_return
5135 if (cleanup_label == 0 && HAVE_return
5136 && ! current_function_returns_pcc_struct
5137 && ! (current_function_returns_struct && ! optimize))
5138 return_label = 0;
5139 else
5140 return_label = gen_label_rtx ();
5141 #else
5142 return_label = gen_label_rtx ();
5143 #endif
5144
5145 /* Initialize rtx used to return the value. */
5146 /* Do this before assign_parms so that we copy the struct value address
5147 before any library calls that assign parms might generate. */
5148
5149 /* Decide whether to return the value in memory or in a register. */
5150 if (aggregate_value_p (DECL_RESULT (subr)))
5151 {
5152 /* Returning something that won't go in a register. */
5153 register rtx value_address = 0;
5154
5155 #ifdef PCC_STATIC_STRUCT_RETURN
5156 if (current_function_returns_pcc_struct)
5157 {
5158 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5159 value_address = assemble_static_space (size);
5160 }
5161 else
5162 #endif
5163 {
5164 /* Expect to be passed the address of a place to store the value.
5165 If it is passed as an argument, assign_parms will take care of
5166 it. */
5167 if (struct_value_incoming_rtx)
5168 {
5169 value_address = gen_reg_rtx (Pmode);
5170 emit_move_insn (value_address, struct_value_incoming_rtx);
5171 }
5172 }
5173 if (value_address)
5174 {
5175 DECL_RTL (DECL_RESULT (subr))
5176 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (subr)), value_address);
5177 MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)))
5178 = AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5179 }
5180 }
5181 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5182 /* If return mode is void, this decl rtl should not be used. */
5183 DECL_RTL (DECL_RESULT (subr)) = 0;
5184 else if (parms_have_cleanups)
5185 {
5186 /* If function will end with cleanup code for parms,
5187 compute the return values into a pseudo reg,
5188 which we will copy into the true return register
5189 after the cleanups are done. */
5190
5191 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5192
5193 #ifdef PROMOTE_FUNCTION_RETURN
5194 tree type = TREE_TYPE (DECL_RESULT (subr));
5195 int unsignedp = TREE_UNSIGNED (type);
5196
5197 mode = promote_mode (type, mode, &unsignedp, 1);
5198 #endif
5199
5200 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5201 }
5202 else
5203 /* Scalar, returned in a register. */
5204 {
5205 #ifdef FUNCTION_OUTGOING_VALUE
5206 DECL_RTL (DECL_RESULT (subr))
5207 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5208 #else
5209 DECL_RTL (DECL_RESULT (subr))
5210 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5211 #endif
5212
5213 /* Mark this reg as the function's return value. */
5214 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5215 {
5216 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5217 /* Needed because we may need to move this to memory
5218 in case it's a named return value whose address is taken. */
5219 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5220 }
5221 }
5222
5223 /* Initialize rtx for parameters and local variables.
5224 In some cases this requires emitting insns. */
5225
5226 assign_parms (subr, 0);
5227
5228 #ifdef SMALL_REGISTER_CLASSES
5229 /* Copy the static chain now if it wasn't a register. The delay is to
5230 avoid conflicts with the parameter passing registers. */
5231
5232 if (current_function_needs_context)
5233 if (GET_CODE (static_chain_incoming_rtx) != REG)
5234 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5235 #endif
5236
5237 /* The following was moved from init_function_start.
5238 The move is supposed to make sdb output more accurate. */
5239 /* Indicate the beginning of the function body,
5240 as opposed to parm setup. */
5241 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5242
5243 /* If doing stupid allocation, mark parms as born here. */
5244
5245 if (GET_CODE (get_last_insn ()) != NOTE)
5246 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5247 parm_birth_insn = get_last_insn ();
5248
5249 if (obey_regdecls)
5250 {
5251 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5252 use_variable (regno_reg_rtx[i]);
5253
5254 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5255 use_variable (current_function_internal_arg_pointer);
5256 }
5257
5258 context_display = 0;
5259 if (current_function_needs_context)
5260 {
5261 /* Fetch static chain values for containing functions. */
5262 tem = decl_function_context (current_function_decl);
5263 /* If not doing stupid register allocation copy the static chain
5264 pointer into a pseudo. If we have small register classes, copy
5265 the value from memory if static_chain_incoming_rtx is a REG. If
5266 we do stupid register allocation, we use the stack address
5267 generated above. */
5268 if (tem && ! obey_regdecls)
5269 {
5270 #ifdef SMALL_REGISTER_CLASSES
5271 /* If the static chain originally came in a register, put it back
5272 there, then move it out in the next insn. The reason for
5273 this peculiar code is to satisfy function integration. */
5274 if (GET_CODE (static_chain_incoming_rtx) == REG)
5275 emit_move_insn (static_chain_incoming_rtx, last_ptr);
5276 #endif
5277
5278 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5279 }
5280
5281 while (tem)
5282 {
5283 tree rtlexp = make_node (RTL_EXPR);
5284
5285 RTL_EXPR_RTL (rtlexp) = last_ptr;
5286 context_display = tree_cons (tem, rtlexp, context_display);
5287 tem = decl_function_context (tem);
5288 if (tem == 0)
5289 break;
5290 /* Chain thru stack frames, assuming pointer to next lexical frame
5291 is found at the place we always store it. */
5292 #ifdef FRAME_GROWS_DOWNWARD
5293 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
5294 #endif
5295 last_ptr = copy_to_reg (gen_rtx (MEM, Pmode,
5296 memory_address (Pmode, last_ptr)));
5297
5298 /* If we are not optimizing, ensure that we know that this
5299 piece of context is live over the entire function. */
5300 if (! optimize)
5301 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, last_ptr,
5302 save_expr_regs);
5303 }
5304 }
5305
5306 /* After the display initializations is where the tail-recursion label
5307 should go, if we end up needing one. Ensure we have a NOTE here
5308 since some things (like trampolines) get placed before this. */
5309 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
5310
5311 /* Evaluate now the sizes of any types declared among the arguments. */
5312 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5313 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
5314
5315 /* Make sure there is a line number after the function entry setup code. */
5316 force_next_line_note ();
5317 }
5318 \f
5319 /* Generate RTL for the end of the current function.
5320 FILENAME and LINE are the current position in the source file.
5321
5322 It is up to language-specific callers to do cleanups for parameters--
5323 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5324
5325 void
5326 expand_function_end (filename, line, end_bindings)
5327 char *filename;
5328 int line;
5329 int end_bindings;
5330 {
5331 register int i;
5332 tree link;
5333
5334 #ifdef TRAMPOLINE_TEMPLATE
5335 static rtx initial_trampoline;
5336 #endif
5337
5338 if (output_bytecode)
5339 {
5340 bc_expand_function_end ();
5341 return;
5342 }
5343
5344 #ifdef NON_SAVING_SETJMP
5345 /* Don't put any variables in registers if we call setjmp
5346 on a machine that fails to restore the registers. */
5347 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
5348 {
5349 if (DECL_INITIAL (current_function_decl) != error_mark_node)
5350 setjmp_protect (DECL_INITIAL (current_function_decl));
5351
5352 setjmp_protect_args ();
5353 }
5354 #endif
5355
5356 /* Save the argument pointer if a save area was made for it. */
5357 if (arg_pointer_save_area)
5358 {
5359 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
5360 emit_insn_before (x, tail_recursion_reentry);
5361 }
5362
5363 /* Initialize any trampolines required by this function. */
5364 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5365 {
5366 tree function = TREE_PURPOSE (link);
5367 rtx context = lookup_static_chain (function);
5368 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
5369 rtx blktramp;
5370 rtx seq;
5371
5372 #ifdef TRAMPOLINE_TEMPLATE
5373 /* First make sure this compilation has a template for
5374 initializing trampolines. */
5375 if (initial_trampoline == 0)
5376 {
5377 end_temporary_allocation ();
5378 initial_trampoline
5379 = gen_rtx (MEM, BLKmode, assemble_trampoline_template ());
5380 resume_temporary_allocation ();
5381 }
5382 #endif
5383
5384 /* Generate insns to initialize the trampoline. */
5385 start_sequence ();
5386 tramp = round_trampoline_addr (XEXP (tramp, 0));
5387 #ifdef TRAMPOLINE_TEMPLATE
5388 blktramp = change_address (initial_trampoline, BLKmode, tramp);
5389 emit_block_move (blktramp, initial_trampoline,
5390 GEN_INT (TRAMPOLINE_SIZE),
5391 FUNCTION_BOUNDARY / BITS_PER_UNIT);
5392 #endif
5393 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
5394 seq = get_insns ();
5395 end_sequence ();
5396
5397 /* Put those insns at entry to the containing function (this one). */
5398 emit_insns_before (seq, tail_recursion_reentry);
5399 }
5400
5401 /* Warn about unused parms if extra warnings were specified. */
5402 if (warn_unused && extra_warnings)
5403 {
5404 tree decl;
5405
5406 for (decl = DECL_ARGUMENTS (current_function_decl);
5407 decl; decl = TREE_CHAIN (decl))
5408 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
5409 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
5410 warning_with_decl (decl, "unused parameter `%s'");
5411 }
5412
5413 /* Delete handlers for nonlocal gotos if nothing uses them. */
5414 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
5415 delete_handlers ();
5416
5417 /* End any sequences that failed to be closed due to syntax errors. */
5418 while (in_sequence_p ())
5419 end_sequence ();
5420
5421 /* Outside function body, can't compute type's actual size
5422 until next function's body starts. */
5423 immediate_size_expand--;
5424
5425 /* If doing stupid register allocation,
5426 mark register parms as dying here. */
5427
5428 if (obey_regdecls)
5429 {
5430 rtx tem;
5431 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5432 use_variable (regno_reg_rtx[i]);
5433
5434 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
5435
5436 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
5437 {
5438 use_variable (XEXP (tem, 0));
5439 use_variable_after (XEXP (tem, 0), parm_birth_insn);
5440 }
5441
5442 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5443 use_variable (current_function_internal_arg_pointer);
5444 }
5445
5446 clear_pending_stack_adjust ();
5447 do_pending_stack_adjust ();
5448
5449 /* Mark the end of the function body.
5450 If control reaches this insn, the function can drop through
5451 without returning a value. */
5452 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
5453
5454 /* Output a linenumber for the end of the function.
5455 SDB depends on this. */
5456 emit_line_note_force (filename, line);
5457
5458 /* Output the label for the actual return from the function,
5459 if one is expected. This happens either because a function epilogue
5460 is used instead of a return instruction, or because a return was done
5461 with a goto in order to run local cleanups, or because of pcc-style
5462 structure returning. */
5463
5464 if (return_label)
5465 emit_label (return_label);
5466
5467 /* C++ uses this. */
5468 if (end_bindings)
5469 expand_end_bindings (0, 0, 0);
5470
5471 /* If we had calls to alloca, and this machine needs
5472 an accurate stack pointer to exit the function,
5473 insert some code to save and restore the stack pointer. */
5474 #ifdef EXIT_IGNORE_STACK
5475 if (! EXIT_IGNORE_STACK)
5476 #endif
5477 if (current_function_calls_alloca)
5478 {
5479 rtx tem = 0;
5480
5481 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
5482 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
5483 }
5484
5485 /* If scalar return value was computed in a pseudo-reg,
5486 copy that to the hard return register. */
5487 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
5488 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
5489 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
5490 >= FIRST_PSEUDO_REGISTER))
5491 {
5492 rtx real_decl_result;
5493
5494 #ifdef FUNCTION_OUTGOING_VALUE
5495 real_decl_result
5496 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5497 current_function_decl);
5498 #else
5499 real_decl_result
5500 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5501 current_function_decl);
5502 #endif
5503 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
5504 emit_move_insn (real_decl_result,
5505 DECL_RTL (DECL_RESULT (current_function_decl)));
5506 emit_insn (gen_rtx (USE, VOIDmode, real_decl_result));
5507 }
5508
5509 /* If returning a structure, arrange to return the address of the value
5510 in a place where debuggers expect to find it.
5511
5512 If returning a structure PCC style,
5513 the caller also depends on this value.
5514 And current_function_returns_pcc_struct is not necessarily set. */
5515 if (current_function_returns_struct
5516 || current_function_returns_pcc_struct)
5517 {
5518 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5519 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5520 #ifdef FUNCTION_OUTGOING_VALUE
5521 rtx outgoing
5522 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
5523 current_function_decl);
5524 #else
5525 rtx outgoing
5526 = FUNCTION_VALUE (build_pointer_type (type),
5527 current_function_decl);
5528 #endif
5529
5530 /* Mark this as a function return value so integrate will delete the
5531 assignment and USE below when inlining this function. */
5532 REG_FUNCTION_VALUE_P (outgoing) = 1;
5533
5534 emit_move_insn (outgoing, value_address);
5535 use_variable (outgoing);
5536 }
5537
5538 /* Output a return insn if we are using one.
5539 Otherwise, let the rtl chain end here, to drop through
5540 into the epilogue. */
5541
5542 #ifdef HAVE_return
5543 if (HAVE_return)
5544 {
5545 emit_jump_insn (gen_return ());
5546 emit_barrier ();
5547 }
5548 #endif
5549
5550 /* Fix up any gotos that jumped out to the outermost
5551 binding level of the function.
5552 Must follow emitting RETURN_LABEL. */
5553
5554 /* If you have any cleanups to do at this point,
5555 and they need to create temporary variables,
5556 then you will lose. */
5557 expand_fixups (get_insns ());
5558 }
5559 \f
5560 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
5561
5562 static int *prologue;
5563 static int *epilogue;
5564
5565 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
5566 or a single insn). */
5567
5568 static int *
5569 record_insns (insns)
5570 rtx insns;
5571 {
5572 int *vec;
5573
5574 if (GET_CODE (insns) == SEQUENCE)
5575 {
5576 int len = XVECLEN (insns, 0);
5577 vec = (int *) oballoc ((len + 1) * sizeof (int));
5578 vec[len] = 0;
5579 while (--len >= 0)
5580 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
5581 }
5582 else
5583 {
5584 vec = (int *) oballoc (2 * sizeof (int));
5585 vec[0] = INSN_UID (insns);
5586 vec[1] = 0;
5587 }
5588 return vec;
5589 }
5590
5591 /* Determine how many INSN_UIDs in VEC are part of INSN. */
5592
5593 static int
5594 contains (insn, vec)
5595 rtx insn;
5596 int *vec;
5597 {
5598 register int i, j;
5599
5600 if (GET_CODE (insn) == INSN
5601 && GET_CODE (PATTERN (insn)) == SEQUENCE)
5602 {
5603 int count = 0;
5604 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
5605 for (j = 0; vec[j]; j++)
5606 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
5607 count++;
5608 return count;
5609 }
5610 else
5611 {
5612 for (j = 0; vec[j]; j++)
5613 if (INSN_UID (insn) == vec[j])
5614 return 1;
5615 }
5616 return 0;
5617 }
5618
5619 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5620 this into place with notes indicating where the prologue ends and where
5621 the epilogue begins. Update the basic block information when possible. */
5622
5623 void
5624 thread_prologue_and_epilogue_insns (f)
5625 rtx f;
5626 {
5627 #ifdef HAVE_prologue
5628 if (HAVE_prologue)
5629 {
5630 rtx head, seq, insn;
5631
5632 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
5633 prologue insns and a NOTE_INSN_PROLOGUE_END. */
5634 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
5635 seq = gen_prologue ();
5636 head = emit_insn_after (seq, f);
5637
5638 /* Include the new prologue insns in the first block. Ignore them
5639 if they form a basic block unto themselves. */
5640 if (basic_block_head && n_basic_blocks
5641 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
5642 basic_block_head[0] = NEXT_INSN (f);
5643
5644 /* Retain a map of the prologue insns. */
5645 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
5646 }
5647 else
5648 #endif
5649 prologue = 0;
5650
5651 #ifdef HAVE_epilogue
5652 if (HAVE_epilogue)
5653 {
5654 rtx insn = get_last_insn ();
5655 rtx prev = prev_nonnote_insn (insn);
5656
5657 /* If we end with a BARRIER, we don't need an epilogue. */
5658 if (! (prev && GET_CODE (prev) == BARRIER))
5659 {
5660 rtx tail, seq, tem;
5661 rtx first_use = 0;
5662 rtx last_use = 0;
5663
5664 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
5665 epilogue insns, the USE insns at the end of a function,
5666 the jump insn that returns, and then a BARRIER. */
5667
5668 /* Move the USE insns at the end of a function onto a list. */
5669 while (prev
5670 && GET_CODE (prev) == INSN
5671 && GET_CODE (PATTERN (prev)) == USE)
5672 {
5673 tem = prev;
5674 prev = prev_nonnote_insn (prev);
5675
5676 NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem);
5677 PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem);
5678 if (first_use)
5679 {
5680 NEXT_INSN (tem) = first_use;
5681 PREV_INSN (first_use) = tem;
5682 }
5683 first_use = tem;
5684 if (!last_use)
5685 last_use = tem;
5686 }
5687
5688 emit_barrier_after (insn);
5689
5690 seq = gen_epilogue ();
5691 tail = emit_jump_insn_after (seq, insn);
5692
5693 /* Insert the USE insns immediately before the return insn, which
5694 must be the first instruction before the final barrier. */
5695 if (first_use)
5696 {
5697 tem = prev_nonnote_insn (get_last_insn ());
5698 NEXT_INSN (PREV_INSN (tem)) = first_use;
5699 PREV_INSN (first_use) = PREV_INSN (tem);
5700 PREV_INSN (tem) = last_use;
5701 NEXT_INSN (last_use) = tem;
5702 }
5703
5704 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
5705
5706 /* Include the new epilogue insns in the last block. Ignore
5707 them if they form a basic block unto themselves. */
5708 if (basic_block_end && n_basic_blocks
5709 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
5710 basic_block_end[n_basic_blocks - 1] = tail;
5711
5712 /* Retain a map of the epilogue insns. */
5713 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
5714 return;
5715 }
5716 }
5717 #endif
5718 epilogue = 0;
5719 }
5720
5721 /* Reposition the prologue-end and epilogue-begin notes after instruction
5722 scheduling and delayed branch scheduling. */
5723
5724 void
5725 reposition_prologue_and_epilogue_notes (f)
5726 rtx f;
5727 {
5728 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5729 /* Reposition the prologue and epilogue notes. */
5730 if (n_basic_blocks)
5731 {
5732 rtx next, prev;
5733 int len;
5734
5735 if (prologue)
5736 {
5737 register rtx insn, note = 0;
5738
5739 /* Scan from the beginning until we reach the last prologue insn.
5740 We apparently can't depend on basic_block_{head,end} after
5741 reorg has run. */
5742 for (len = 0; prologue[len]; len++)
5743 ;
5744 for (insn = f; len && insn; insn = NEXT_INSN (insn))
5745 {
5746 if (GET_CODE (insn) == NOTE)
5747 {
5748 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5749 note = insn;
5750 }
5751 else if ((len -= contains (insn, prologue)) == 0)
5752 {
5753 /* Find the prologue-end note if we haven't already, and
5754 move it to just after the last prologue insn. */
5755 if (note == 0)
5756 {
5757 for (note = insn; note = NEXT_INSN (note);)
5758 if (GET_CODE (note) == NOTE
5759 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5760 break;
5761 }
5762 next = NEXT_INSN (note);
5763 prev = PREV_INSN (note);
5764 if (prev)
5765 NEXT_INSN (prev) = next;
5766 if (next)
5767 PREV_INSN (next) = prev;
5768 add_insn_after (note, insn);
5769 }
5770 }
5771 }
5772
5773 if (epilogue)
5774 {
5775 register rtx insn, note = 0;
5776
5777 /* Scan from the end until we reach the first epilogue insn.
5778 We apparently can't depend on basic_block_{head,end} after
5779 reorg has run. */
5780 for (len = 0; epilogue[len]; len++)
5781 ;
5782 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
5783 {
5784 if (GET_CODE (insn) == NOTE)
5785 {
5786 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5787 note = insn;
5788 }
5789 else if ((len -= contains (insn, epilogue)) == 0)
5790 {
5791 /* Find the epilogue-begin note if we haven't already, and
5792 move it to just before the first epilogue insn. */
5793 if (note == 0)
5794 {
5795 for (note = insn; note = PREV_INSN (note);)
5796 if (GET_CODE (note) == NOTE
5797 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5798 break;
5799 }
5800 next = NEXT_INSN (note);
5801 prev = PREV_INSN (note);
5802 if (prev)
5803 NEXT_INSN (prev) = next;
5804 if (next)
5805 PREV_INSN (next) = prev;
5806 add_insn_after (note, PREV_INSN (insn));
5807 }
5808 }
5809 }
5810 }
5811 #endif /* HAVE_prologue or HAVE_epilogue */
5812 }