install EH code
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-95, 1996 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42 #include <stdio.h>
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58 #include "bytecode.h"
59 #include "bc-emit.h"
60
61 /* Some systems use __main in a way incompatible with its use in gcc, in these
62 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
63 give the same symbol without quotes for an alternative entry point. You
64 must define both, or neither. */
65 #ifndef NAME__MAIN
66 #define NAME__MAIN "__main"
67 #define SYMBOL__MAIN __main
68 #endif
69
70 /* Round a value to the lowest integer less than it that is a multiple of
71 the required alignment. Avoid using division in case the value is
72 negative. Assume the alignment is a power of two. */
73 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
74
75 /* Similar, but round to the next highest integer that meets the
76 alignment. */
77 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
78
79 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
80 during rtl generation. If they are different register numbers, this is
81 always true. It may also be true if
82 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
83 generation. See fix_lexical_addr for details. */
84
85 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
86 #define NEED_SEPARATE_AP
87 #endif
88
89 /* Number of bytes of args popped by function being compiled on its return.
90 Zero if no bytes are to be popped.
91 May affect compilation of return insn or of function epilogue. */
92
93 int current_function_pops_args;
94
95 /* Nonzero if function being compiled needs to be given an address
96 where the value should be stored. */
97
98 int current_function_returns_struct;
99
100 /* Nonzero if function being compiled needs to
101 return the address of where it has put a structure value. */
102
103 int current_function_returns_pcc_struct;
104
105 /* Nonzero if function being compiled needs to be passed a static chain. */
106
107 int current_function_needs_context;
108
109 /* Nonzero if function being compiled can call setjmp. */
110
111 int current_function_calls_setjmp;
112
113 /* Nonzero if function being compiled can call longjmp. */
114
115 int current_function_calls_longjmp;
116
117 /* Nonzero if function being compiled receives nonlocal gotos
118 from nested functions. */
119
120 int current_function_has_nonlocal_label;
121
122 /* Nonzero if function being compiled has nonlocal gotos to parent
123 function. */
124
125 int current_function_has_nonlocal_goto;
126
127 /* Nonzero if function being compiled contains nested functions. */
128
129 int current_function_contains_functions;
130
131 /* Nonzero if function being compiled can call alloca,
132 either as a subroutine or builtin. */
133
134 int current_function_calls_alloca;
135
136 /* Nonzero if the current function returns a pointer type */
137
138 int current_function_returns_pointer;
139
140 /* If some insns can be deferred to the delay slots of the epilogue, the
141 delay list for them is recorded here. */
142
143 rtx current_function_epilogue_delay_list;
144
145 /* If function's args have a fixed size, this is that size, in bytes.
146 Otherwise, it is -1.
147 May affect compilation of return insn or of function epilogue. */
148
149 int current_function_args_size;
150
151 /* # bytes the prologue should push and pretend that the caller pushed them.
152 The prologue must do this, but only if parms can be passed in registers. */
153
154 int current_function_pretend_args_size;
155
156 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
157 defined, the needed space is pushed by the prologue. */
158
159 int current_function_outgoing_args_size;
160
161 /* This is the offset from the arg pointer to the place where the first
162 anonymous arg can be found, if there is one. */
163
164 rtx current_function_arg_offset_rtx;
165
166 /* Nonzero if current function uses varargs.h or equivalent.
167 Zero for functions that use stdarg.h. */
168
169 int current_function_varargs;
170
171 /* Nonzero if current function uses stdarg.h or equivalent.
172 Zero for functions that use varargs.h. */
173
174 int current_function_stdarg;
175
176 /* Quantities of various kinds of registers
177 used for the current function's args. */
178
179 CUMULATIVE_ARGS current_function_args_info;
180
181 /* Name of function now being compiled. */
182
183 char *current_function_name;
184
185 /* If non-zero, an RTL expression for that location at which the current
186 function returns its result. Always equal to
187 DECL_RTL (DECL_RESULT (current_function_decl)), but provided
188 independently of the tree structures. */
189
190 rtx current_function_return_rtx;
191
192 /* Nonzero if the current function uses the constant pool. */
193
194 int current_function_uses_const_pool;
195
196 /* Nonzero if the current function uses pic_offset_table_rtx. */
197 int current_function_uses_pic_offset_table;
198
199 /* The arg pointer hard register, or the pseudo into which it was copied. */
200 rtx current_function_internal_arg_pointer;
201
202 /* The FUNCTION_DECL for an inline function currently being expanded. */
203 tree inline_function_decl;
204
205 /* Number of function calls seen so far in current function. */
206
207 int function_call_count;
208
209 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
210 (labels to which there can be nonlocal gotos from nested functions)
211 in this function. */
212
213 tree nonlocal_labels;
214
215 /* RTX for stack slot that holds the current handler for nonlocal gotos.
216 Zero when function does not have nonlocal labels. */
217
218 rtx nonlocal_goto_handler_slot;
219
220 /* RTX for stack slot that holds the stack pointer value to restore
221 for a nonlocal goto.
222 Zero when function does not have nonlocal labels. */
223
224 rtx nonlocal_goto_stack_level;
225
226 /* Label that will go on parm cleanup code, if any.
227 Jumping to this label runs cleanup code for parameters, if
228 such code must be run. Following this code is the logical return label. */
229
230 rtx cleanup_label;
231
232 /* Label that will go on function epilogue.
233 Jumping to this label serves as a "return" instruction
234 on machines which require execution of the epilogue on all returns. */
235
236 rtx return_label;
237
238 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
239 So we can mark them all live at the end of the function, if nonopt. */
240 rtx save_expr_regs;
241
242 /* List (chain of EXPR_LISTs) of all stack slots in this function.
243 Made for the sake of unshare_all_rtl. */
244 rtx stack_slot_list;
245
246 /* Chain of all RTL_EXPRs that have insns in them. */
247 tree rtl_expr_chain;
248
249 /* Label to jump back to for tail recursion, or 0 if we have
250 not yet needed one for this function. */
251 rtx tail_recursion_label;
252
253 /* Place after which to insert the tail_recursion_label if we need one. */
254 rtx tail_recursion_reentry;
255
256 /* Location at which to save the argument pointer if it will need to be
257 referenced. There are two cases where this is done: if nonlocal gotos
258 exist, or if vars stored at an offset from the argument pointer will be
259 needed by inner routines. */
260
261 rtx arg_pointer_save_area;
262
263 /* Offset to end of allocated area of stack frame.
264 If stack grows down, this is the address of the last stack slot allocated.
265 If stack grows up, this is the address for the next slot. */
266 int frame_offset;
267
268 /* List (chain of TREE_LISTs) of static chains for containing functions.
269 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
270 in an RTL_EXPR in the TREE_VALUE. */
271 static tree context_display;
272
273 /* List (chain of TREE_LISTs) of trampolines for nested functions.
274 The trampoline sets up the static chain and jumps to the function.
275 We supply the trampoline's address when the function's address is requested.
276
277 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
278 in an RTL_EXPR in the TREE_VALUE. */
279 static tree trampoline_list;
280
281 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
282 static rtx parm_birth_insn;
283
284 #if 0
285 /* Nonzero if a stack slot has been generated whose address is not
286 actually valid. It means that the generated rtl must all be scanned
287 to detect and correct the invalid addresses where they occur. */
288 static int invalid_stack_slot;
289 #endif
290
291 /* Last insn of those whose job was to put parms into their nominal homes. */
292 static rtx last_parm_insn;
293
294 /* 1 + last pseudo register number used for loading a copy
295 of a parameter of this function. */
296 static int max_parm_reg;
297
298 /* Vector indexed by REGNO, containing location on stack in which
299 to put the parm which is nominally in pseudo register REGNO,
300 if we discover that that parm must go in the stack. */
301 static rtx *parm_reg_stack_loc;
302
303 /* Nonzero once virtual register instantiation has been done.
304 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
305 static int virtuals_instantiated;
306
307 /* These variables hold pointers to functions to
308 save and restore machine-specific data,
309 in push_function_context and pop_function_context. */
310 void (*save_machine_status) PROTO((struct function *));
311 void (*restore_machine_status) PROTO((struct function *));
312
313 /* Nonzero if we need to distinguish between the return value of this function
314 and the return value of a function called by this function. This helps
315 integrate.c */
316
317 extern int rtx_equal_function_value_matters;
318 extern tree sequence_rtl_expr;
319 \f
320 /* In order to evaluate some expressions, such as function calls returning
321 structures in memory, we need to temporarily allocate stack locations.
322 We record each allocated temporary in the following structure.
323
324 Associated with each temporary slot is a nesting level. When we pop up
325 one level, all temporaries associated with the previous level are freed.
326 Normally, all temporaries are freed after the execution of the statement
327 in which they were created. However, if we are inside a ({...}) grouping,
328 the result may be in a temporary and hence must be preserved. If the
329 result could be in a temporary, we preserve it if we can determine which
330 one it is in. If we cannot determine which temporary may contain the
331 result, all temporaries are preserved. A temporary is preserved by
332 pretending it was allocated at the previous nesting level.
333
334 Automatic variables are also assigned temporary slots, at the nesting
335 level where they are defined. They are marked a "kept" so that
336 free_temp_slots will not free them. */
337
338 struct temp_slot
339 {
340 /* Points to next temporary slot. */
341 struct temp_slot *next;
342 /* The rtx to used to reference the slot. */
343 rtx slot;
344 /* The rtx used to represent the address if not the address of the
345 slot above. May be an EXPR_LIST if multiple addresses exist. */
346 rtx address;
347 /* The size, in units, of the slot. */
348 int size;
349 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
350 tree rtl_expr;
351 /* Non-zero if this temporary is currently in use. */
352 char in_use;
353 /* Non-zero if this temporary has its address taken. */
354 char addr_taken;
355 /* Nesting level at which this slot is being used. */
356 int level;
357 /* Non-zero if this should survive a call to free_temp_slots. */
358 int keep;
359 /* The offset of the slot from the frame_pointer, including extra space
360 for alignment. This info is for combine_temp_slots. */
361 int base_offset;
362 /* The size of the slot, including extra space for alignment. This
363 info is for combine_temp_slots. */
364 int full_size;
365 };
366
367 /* List of all temporaries allocated, both available and in use. */
368
369 struct temp_slot *temp_slots;
370
371 /* Current nesting level for temporaries. */
372
373 int temp_slot_level;
374 \f
375 /* The FUNCTION_DECL node for the current function. */
376 static tree this_function_decl;
377
378 /* Callinfo pointer for the current function. */
379 static rtx this_function_callinfo;
380
381 /* The label in the bytecode file of this function's actual bytecode.
382 Not an rtx. */
383 static char *this_function_bytecode;
384
385 /* The call description vector for the current function. */
386 static rtx this_function_calldesc;
387
388 /* Size of the local variables allocated for the current function. */
389 int local_vars_size;
390
391 /* Current depth of the bytecode evaluation stack. */
392 int stack_depth;
393
394 /* Maximum depth of the evaluation stack in this function. */
395 int max_stack_depth;
396
397 /* Current depth in statement expressions. */
398 static int stmt_expr_depth;
399
400 /* This structure is used to record MEMs or pseudos used to replace VAR, any
401 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
402 maintain this list in case two operands of an insn were required to match;
403 in that case we must ensure we use the same replacement. */
404
405 struct fixup_replacement
406 {
407 rtx old;
408 rtx new;
409 struct fixup_replacement *next;
410 };
411
412 /* Forward declarations. */
413
414 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
415 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
416 enum machine_mode, enum machine_mode,
417 int));
418 static void fixup_var_refs PROTO((rtx, enum machine_mode, int));
419 static struct fixup_replacement
420 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
421 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
422 rtx, int));
423 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
424 struct fixup_replacement **));
425 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
426 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
427 static rtx fixup_stack_1 PROTO((rtx, rtx));
428 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
429 static void instantiate_decls PROTO((tree, int));
430 static void instantiate_decls_1 PROTO((tree, int));
431 static void instantiate_decl PROTO((rtx, int, int));
432 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
433 static void delete_handlers PROTO((void));
434 static void pad_to_arg_alignment PROTO((struct args_size *, int));
435 static void pad_below PROTO((struct args_size *, enum machine_mode,
436 tree));
437 static tree round_down PROTO((tree, int));
438 static rtx round_trampoline_addr PROTO((rtx));
439 static tree blocks_nreverse PROTO((tree));
440 static int all_blocks PROTO((tree, tree *));
441 static int *record_insns PROTO((rtx));
442 static int contains PROTO((rtx, int *));
443 \f
444 /* Pointer to chain of `struct function' for containing functions. */
445 struct function *outer_function_chain;
446
447 /* Given a function decl for a containing function,
448 return the `struct function' for it. */
449
450 struct function *
451 find_function_data (decl)
452 tree decl;
453 {
454 struct function *p;
455 for (p = outer_function_chain; p; p = p->next)
456 if (p->decl == decl)
457 return p;
458 abort ();
459 }
460
461 /* Save the current context for compilation of a nested function.
462 This is called from language-specific code.
463 The caller is responsible for saving any language-specific status,
464 since this function knows only about language-independent variables. */
465
466 void
467 push_function_context_to (context)
468 tree context;
469 {
470 struct function *p = (struct function *) xmalloc (sizeof (struct function));
471
472 p->next = outer_function_chain;
473 outer_function_chain = p;
474
475 p->name = current_function_name;
476 p->decl = current_function_decl;
477 p->pops_args = current_function_pops_args;
478 p->returns_struct = current_function_returns_struct;
479 p->returns_pcc_struct = current_function_returns_pcc_struct;
480 p->returns_pointer = current_function_returns_pointer;
481 p->needs_context = current_function_needs_context;
482 p->calls_setjmp = current_function_calls_setjmp;
483 p->calls_longjmp = current_function_calls_longjmp;
484 p->calls_alloca = current_function_calls_alloca;
485 p->has_nonlocal_label = current_function_has_nonlocal_label;
486 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
487 p->contains_functions = current_function_contains_functions;
488 p->args_size = current_function_args_size;
489 p->pretend_args_size = current_function_pretend_args_size;
490 p->arg_offset_rtx = current_function_arg_offset_rtx;
491 p->varargs = current_function_varargs;
492 p->stdarg = current_function_stdarg;
493 p->uses_const_pool = current_function_uses_const_pool;
494 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
495 p->internal_arg_pointer = current_function_internal_arg_pointer;
496 p->max_parm_reg = max_parm_reg;
497 p->parm_reg_stack_loc = parm_reg_stack_loc;
498 p->outgoing_args_size = current_function_outgoing_args_size;
499 p->return_rtx = current_function_return_rtx;
500 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
501 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
502 p->nonlocal_labels = nonlocal_labels;
503 p->cleanup_label = cleanup_label;
504 p->return_label = return_label;
505 p->save_expr_regs = save_expr_regs;
506 p->stack_slot_list = stack_slot_list;
507 p->parm_birth_insn = parm_birth_insn;
508 p->frame_offset = frame_offset;
509 p->tail_recursion_label = tail_recursion_label;
510 p->tail_recursion_reentry = tail_recursion_reentry;
511 p->arg_pointer_save_area = arg_pointer_save_area;
512 p->rtl_expr_chain = rtl_expr_chain;
513 p->last_parm_insn = last_parm_insn;
514 p->context_display = context_display;
515 p->trampoline_list = trampoline_list;
516 p->function_call_count = function_call_count;
517 p->temp_slots = temp_slots;
518 p->temp_slot_level = temp_slot_level;
519 p->fixup_var_refs_queue = 0;
520 p->epilogue_delay_list = current_function_epilogue_delay_list;
521
522 save_tree_status (p, context);
523 save_storage_status (p);
524 save_emit_status (p);
525 init_emit ();
526 save_expr_status (p);
527 save_stmt_status (p);
528 save_varasm_status (p);
529
530 if (save_machine_status)
531 (*save_machine_status) (p);
532 }
533
534 void
535 push_function_context ()
536 {
537 push_function_context_to (current_function_decl);
538 }
539
540 /* Restore the last saved context, at the end of a nested function.
541 This function is called from language-specific code. */
542
543 void
544 pop_function_context_from (context)
545 tree context;
546 {
547 struct function *p = outer_function_chain;
548
549 outer_function_chain = p->next;
550
551 current_function_contains_functions
552 = p->contains_functions || p->inline_obstacks
553 || context == current_function_decl;
554 current_function_name = p->name;
555 current_function_decl = p->decl;
556 current_function_pops_args = p->pops_args;
557 current_function_returns_struct = p->returns_struct;
558 current_function_returns_pcc_struct = p->returns_pcc_struct;
559 current_function_returns_pointer = p->returns_pointer;
560 current_function_needs_context = p->needs_context;
561 current_function_calls_setjmp = p->calls_setjmp;
562 current_function_calls_longjmp = p->calls_longjmp;
563 current_function_calls_alloca = p->calls_alloca;
564 current_function_has_nonlocal_label = p->has_nonlocal_label;
565 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
566 current_function_args_size = p->args_size;
567 current_function_pretend_args_size = p->pretend_args_size;
568 current_function_arg_offset_rtx = p->arg_offset_rtx;
569 current_function_varargs = p->varargs;
570 current_function_stdarg = p->stdarg;
571 current_function_uses_const_pool = p->uses_const_pool;
572 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
573 current_function_internal_arg_pointer = p->internal_arg_pointer;
574 max_parm_reg = p->max_parm_reg;
575 parm_reg_stack_loc = p->parm_reg_stack_loc;
576 current_function_outgoing_args_size = p->outgoing_args_size;
577 current_function_return_rtx = p->return_rtx;
578 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
579 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
580 nonlocal_labels = p->nonlocal_labels;
581 cleanup_label = p->cleanup_label;
582 return_label = p->return_label;
583 save_expr_regs = p->save_expr_regs;
584 stack_slot_list = p->stack_slot_list;
585 parm_birth_insn = p->parm_birth_insn;
586 frame_offset = p->frame_offset;
587 tail_recursion_label = p->tail_recursion_label;
588 tail_recursion_reentry = p->tail_recursion_reentry;
589 arg_pointer_save_area = p->arg_pointer_save_area;
590 rtl_expr_chain = p->rtl_expr_chain;
591 last_parm_insn = p->last_parm_insn;
592 context_display = p->context_display;
593 trampoline_list = p->trampoline_list;
594 function_call_count = p->function_call_count;
595 temp_slots = p->temp_slots;
596 temp_slot_level = p->temp_slot_level;
597 current_function_epilogue_delay_list = p->epilogue_delay_list;
598 reg_renumber = 0;
599
600 restore_tree_status (p);
601 restore_storage_status (p);
602 restore_expr_status (p);
603 restore_emit_status (p);
604 restore_stmt_status (p);
605 restore_varasm_status (p);
606
607 if (restore_machine_status)
608 (*restore_machine_status) (p);
609
610 /* Finish doing put_var_into_stack for any of our variables
611 which became addressable during the nested function. */
612 {
613 struct var_refs_queue *queue = p->fixup_var_refs_queue;
614 for (; queue; queue = queue->next)
615 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
616 }
617
618 free (p);
619
620 /* Reset variables that have known state during rtx generation. */
621 rtx_equal_function_value_matters = 1;
622 virtuals_instantiated = 0;
623 }
624
625 void pop_function_context ()
626 {
627 pop_function_context_from (current_function_decl);
628 }
629 \f
630 /* Allocate fixed slots in the stack frame of the current function. */
631
632 /* Return size needed for stack frame based on slots so far allocated.
633 This size counts from zero. It is not rounded to STACK_BOUNDARY;
634 the caller may have to do that. */
635
636 int
637 get_frame_size ()
638 {
639 #ifdef FRAME_GROWS_DOWNWARD
640 return -frame_offset;
641 #else
642 return frame_offset;
643 #endif
644 }
645
646 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
647 with machine mode MODE.
648
649 ALIGN controls the amount of alignment for the address of the slot:
650 0 means according to MODE,
651 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
652 positive specifies alignment boundary in bits.
653
654 We do not round to stack_boundary here. */
655
656 rtx
657 assign_stack_local (mode, size, align)
658 enum machine_mode mode;
659 int size;
660 int align;
661 {
662 register rtx x, addr;
663 int bigend_correction = 0;
664 int alignment;
665
666 if (align == 0)
667 {
668 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
669 if (mode == BLKmode)
670 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
671 }
672 else if (align == -1)
673 {
674 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
675 size = CEIL_ROUND (size, alignment);
676 }
677 else
678 alignment = align / BITS_PER_UNIT;
679
680 /* Round frame offset to that alignment.
681 We must be careful here, since FRAME_OFFSET might be negative and
682 division with a negative dividend isn't as well defined as we might
683 like. So we instead assume that ALIGNMENT is a power of two and
684 use logical operations which are unambiguous. */
685 #ifdef FRAME_GROWS_DOWNWARD
686 frame_offset = FLOOR_ROUND (frame_offset, alignment);
687 #else
688 frame_offset = CEIL_ROUND (frame_offset, alignment);
689 #endif
690
691 /* On a big-endian machine, if we are allocating more space than we will use,
692 use the least significant bytes of those that are allocated. */
693 if (BYTES_BIG_ENDIAN && mode != BLKmode)
694 bigend_correction = size - GET_MODE_SIZE (mode);
695
696 #ifdef FRAME_GROWS_DOWNWARD
697 frame_offset -= size;
698 #endif
699
700 /* If we have already instantiated virtual registers, return the actual
701 address relative to the frame pointer. */
702 if (virtuals_instantiated)
703 addr = plus_constant (frame_pointer_rtx,
704 (frame_offset + bigend_correction
705 + STARTING_FRAME_OFFSET));
706 else
707 addr = plus_constant (virtual_stack_vars_rtx,
708 frame_offset + bigend_correction);
709
710 #ifndef FRAME_GROWS_DOWNWARD
711 frame_offset += size;
712 #endif
713
714 x = gen_rtx (MEM, mode, addr);
715
716 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list);
717
718 return x;
719 }
720
721 /* Assign a stack slot in a containing function.
722 First three arguments are same as in preceding function.
723 The last argument specifies the function to allocate in. */
724
725 rtx
726 assign_outer_stack_local (mode, size, align, function)
727 enum machine_mode mode;
728 int size;
729 int align;
730 struct function *function;
731 {
732 register rtx x, addr;
733 int bigend_correction = 0;
734 int alignment;
735
736 /* Allocate in the memory associated with the function in whose frame
737 we are assigning. */
738 push_obstacks (function->function_obstack,
739 function->function_maybepermanent_obstack);
740
741 if (align == 0)
742 {
743 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
744 if (mode == BLKmode)
745 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
746 }
747 else if (align == -1)
748 {
749 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
750 size = CEIL_ROUND (size, alignment);
751 }
752 else
753 alignment = align / BITS_PER_UNIT;
754
755 /* Round frame offset to that alignment. */
756 #ifdef FRAME_GROWS_DOWNWARD
757 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
758 #else
759 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
760 #endif
761
762 /* On a big-endian machine, if we are allocating more space than we will use,
763 use the least significant bytes of those that are allocated. */
764 if (BYTES_BIG_ENDIAN && mode != BLKmode)
765 bigend_correction = size - GET_MODE_SIZE (mode);
766
767 #ifdef FRAME_GROWS_DOWNWARD
768 function->frame_offset -= size;
769 #endif
770 addr = plus_constant (virtual_stack_vars_rtx,
771 function->frame_offset + bigend_correction);
772 #ifndef FRAME_GROWS_DOWNWARD
773 function->frame_offset += size;
774 #endif
775
776 x = gen_rtx (MEM, mode, addr);
777
778 function->stack_slot_list
779 = gen_rtx (EXPR_LIST, VOIDmode, x, function->stack_slot_list);
780
781 pop_obstacks ();
782
783 return x;
784 }
785 \f
786 /* Allocate a temporary stack slot and record it for possible later
787 reuse.
788
789 MODE is the machine mode to be given to the returned rtx.
790
791 SIZE is the size in units of the space required. We do no rounding here
792 since assign_stack_local will do any required rounding.
793
794 KEEP is 1 if this slot is to be retained after a call to
795 free_temp_slots. Automatic variables for a block are allocated
796 with this flag. KEEP is 2, if we allocate a longer term temporary,
797 whose lifetime is controlled by CLEANUP_POINT_EXPRs. */
798
799 rtx
800 assign_stack_temp (mode, size, keep)
801 enum machine_mode mode;
802 int size;
803 int keep;
804 {
805 struct temp_slot *p, *best_p = 0;
806
807 /* If SIZE is -1 it means that somebody tried to allocate a temporary
808 of a variable size. */
809 if (size == -1)
810 abort ();
811
812 /* First try to find an available, already-allocated temporary that is the
813 exact size we require. */
814 for (p = temp_slots; p; p = p->next)
815 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
816 break;
817
818 /* If we didn't find, one, try one that is larger than what we want. We
819 find the smallest such. */
820 if (p == 0)
821 for (p = temp_slots; p; p = p->next)
822 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
823 && (best_p == 0 || best_p->size > p->size))
824 best_p = p;
825
826 /* Make our best, if any, the one to use. */
827 if (best_p)
828 {
829 /* If there are enough aligned bytes left over, make them into a new
830 temp_slot so that the extra bytes don't get wasted. Do this only
831 for BLKmode slots, so that we can be sure of the alignment. */
832 if (GET_MODE (best_p->slot) == BLKmode)
833 {
834 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
835 int rounded_size = CEIL_ROUND (size, alignment);
836
837 if (best_p->size - rounded_size >= alignment)
838 {
839 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
840 p->in_use = p->addr_taken = 0;
841 p->size = best_p->size - rounded_size;
842 p->base_offset = best_p->base_offset + rounded_size;
843 p->full_size = best_p->full_size - rounded_size;
844 p->slot = gen_rtx (MEM, BLKmode,
845 plus_constant (XEXP (best_p->slot, 0),
846 rounded_size));
847 p->address = 0;
848 p->rtl_expr = 0;
849 p->next = temp_slots;
850 temp_slots = p;
851
852 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, p->slot,
853 stack_slot_list);
854
855 best_p->size = rounded_size;
856 best_p->full_size = rounded_size;
857 }
858 }
859
860 p = best_p;
861 }
862
863 /* If we still didn't find one, make a new temporary. */
864 if (p == 0)
865 {
866 int frame_offset_old = frame_offset;
867 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
868 /* If the temp slot mode doesn't indicate the alignment,
869 use the largest possible, so no one will be disappointed. */
870 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
871 /* The following slot size computation is necessary because we don't
872 know the actual size of the temporary slot until assign_stack_local
873 has performed all the frame alignment and size rounding for the
874 requested temporary. Note that extra space added for alignment
875 can be either above or below this stack slot depending on which
876 way the frame grows. We include the extra space if and only if it
877 is above this slot. */
878 #ifdef FRAME_GROWS_DOWNWARD
879 p->size = frame_offset_old - frame_offset;
880 #else
881 p->size = size;
882 #endif
883 /* Now define the fields used by combine_temp_slots. */
884 #ifdef FRAME_GROWS_DOWNWARD
885 p->base_offset = frame_offset;
886 p->full_size = frame_offset_old - frame_offset;
887 #else
888 p->base_offset = frame_offset_old;
889 p->full_size = frame_offset - frame_offset_old;
890 #endif
891 p->address = 0;
892 p->next = temp_slots;
893 temp_slots = p;
894 }
895
896 p->in_use = 1;
897 p->addr_taken = 0;
898 p->rtl_expr = sequence_rtl_expr;
899
900 if (keep == 2)
901 {
902 p->level = target_temp_slot_level;
903 p->keep = 0;
904 }
905 else
906 {
907 p->level = temp_slot_level;
908 p->keep = keep;
909 }
910 return p->slot;
911 }
912 \f
913 /* Assign a temporary of given TYPE.
914 KEEP is as for assign_stack_temp.
915 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
916 it is 0 if a register is OK.
917 DONT_PROMOTE is 1 if we should not promote values in register
918 to wider modes. */
919
920 rtx
921 assign_temp (type, keep, memory_required, dont_promote)
922 tree type;
923 int keep;
924 int memory_required;
925 int dont_promote;
926 {
927 enum machine_mode mode = TYPE_MODE (type);
928 int unsignedp = TREE_UNSIGNED (type);
929
930 if (mode == BLKmode || memory_required)
931 {
932 int size = int_size_in_bytes (type);
933 rtx tmp;
934
935 /* Unfortunately, we don't yet know how to allocate variable-sized
936 temporaries. However, sometimes we have a fixed upper limit on
937 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
938 instead. This is the case for Chill variable-sized strings. */
939 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
940 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
941 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
942 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
943
944 tmp = assign_stack_temp (mode, size, keep);
945 MEM_IN_STRUCT_P (tmp) = AGGREGATE_TYPE_P (type);
946 return tmp;
947 }
948
949 #ifndef PROMOTE_FOR_CALL_ONLY
950 if (! dont_promote)
951 mode = promote_mode (type, mode, &unsignedp, 0);
952 #endif
953
954 return gen_reg_rtx (mode);
955 }
956 \f
957 /* Combine temporary stack slots which are adjacent on the stack.
958
959 This allows for better use of already allocated stack space. This is only
960 done for BLKmode slots because we can be sure that we won't have alignment
961 problems in this case. */
962
963 void
964 combine_temp_slots ()
965 {
966 struct temp_slot *p, *q;
967 struct temp_slot *prev_p, *prev_q;
968 /* Determine where to free back to after this function. */
969 rtx free_pointer = rtx_alloc (CONST_INT);
970
971 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
972 {
973 int delete_p = 0;
974 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
975 for (q = p->next, prev_q = p; q; q = prev_q->next)
976 {
977 int delete_q = 0;
978 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
979 {
980 if (p->base_offset + p->full_size == q->base_offset)
981 {
982 /* Q comes after P; combine Q into P. */
983 p->size += q->size;
984 p->full_size += q->full_size;
985 delete_q = 1;
986 }
987 else if (q->base_offset + q->full_size == p->base_offset)
988 {
989 /* P comes after Q; combine P into Q. */
990 q->size += p->size;
991 q->full_size += p->full_size;
992 delete_p = 1;
993 break;
994 }
995 }
996 /* Either delete Q or advance past it. */
997 if (delete_q)
998 prev_q->next = q->next;
999 else
1000 prev_q = q;
1001 }
1002 /* Either delete P or advance past it. */
1003 if (delete_p)
1004 {
1005 if (prev_p)
1006 prev_p->next = p->next;
1007 else
1008 temp_slots = p->next;
1009 }
1010 else
1011 prev_p = p;
1012 }
1013
1014 /* Free all the RTL made by plus_constant. */
1015 rtx_free (free_pointer);
1016 }
1017 \f
1018 /* Find the temp slot corresponding to the object at address X. */
1019
1020 static struct temp_slot *
1021 find_temp_slot_from_address (x)
1022 rtx x;
1023 {
1024 struct temp_slot *p;
1025 rtx next;
1026
1027 for (p = temp_slots; p; p = p->next)
1028 {
1029 if (! p->in_use)
1030 continue;
1031 else if (XEXP (p->slot, 0) == x
1032 || p->address == x
1033 || (GET_CODE (x) == PLUS
1034 && XEXP (x, 0) == virtual_stack_vars_rtx
1035 && GET_CODE (XEXP (x, 1)) == CONST_INT
1036 && INTVAL (XEXP (x, 1)) >= p->base_offset
1037 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1038 return p;
1039
1040 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1041 for (next = p->address; next; next = XEXP (next, 1))
1042 if (XEXP (next, 0) == x)
1043 return p;
1044 }
1045
1046 return 0;
1047 }
1048
1049 /* Indicate that NEW is an alternate way of referring to the temp slot
1050 that previous was known by OLD. */
1051
1052 void
1053 update_temp_slot_address (old, new)
1054 rtx old, new;
1055 {
1056 struct temp_slot *p = find_temp_slot_from_address (old);
1057
1058 /* If none, return. Else add NEW as an alias. */
1059 if (p == 0)
1060 return;
1061 else if (p->address == 0)
1062 p->address = new;
1063 else
1064 {
1065 if (GET_CODE (p->address) != EXPR_LIST)
1066 p->address = gen_rtx (EXPR_LIST, VOIDmode, p->address, NULL_RTX);
1067
1068 p->address = gen_rtx (EXPR_LIST, VOIDmode, new, p->address);
1069 }
1070 }
1071
1072 /* If X could be a reference to a temporary slot, mark the fact that its
1073 address was taken. */
1074
1075 void
1076 mark_temp_addr_taken (x)
1077 rtx x;
1078 {
1079 struct temp_slot *p;
1080
1081 if (x == 0)
1082 return;
1083
1084 /* If X is not in memory or is at a constant address, it cannot be in
1085 a temporary slot. */
1086 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1087 return;
1088
1089 p = find_temp_slot_from_address (XEXP (x, 0));
1090 if (p != 0)
1091 p->addr_taken = 1;
1092 }
1093
1094 /* If X could be a reference to a temporary slot, mark that slot as
1095 belonging to the to one level higher than the current level. If X
1096 matched one of our slots, just mark that one. Otherwise, we can't
1097 easily predict which it is, so upgrade all of them. Kept slots
1098 need not be touched.
1099
1100 This is called when an ({...}) construct occurs and a statement
1101 returns a value in memory. */
1102
1103 void
1104 preserve_temp_slots (x)
1105 rtx x;
1106 {
1107 struct temp_slot *p = 0;
1108
1109 /* If there is no result, we still might have some objects whose address
1110 were taken, so we need to make sure they stay around. */
1111 if (x == 0)
1112 {
1113 for (p = temp_slots; p; p = p->next)
1114 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1115 p->level--;
1116
1117 return;
1118 }
1119
1120 /* If X is a register that is being used as a pointer, see if we have
1121 a temporary slot we know it points to. To be consistent with
1122 the code below, we really should preserve all non-kept slots
1123 if we can't find a match, but that seems to be much too costly. */
1124 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1125 p = find_temp_slot_from_address (x);
1126
1127 /* If X is not in memory or is at a constant address, it cannot be in
1128 a temporary slot, but it can contain something whose address was
1129 taken. */
1130 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1131 {
1132 for (p = temp_slots; p; p = p->next)
1133 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1134 p->level--;
1135
1136 return;
1137 }
1138
1139 /* First see if we can find a match. */
1140 if (p == 0)
1141 p = find_temp_slot_from_address (XEXP (x, 0));
1142
1143 if (p != 0)
1144 {
1145 /* Move everything at our level whose address was taken to our new
1146 level in case we used its address. */
1147 struct temp_slot *q;
1148
1149 if (p->level == temp_slot_level)
1150 {
1151 for (q = temp_slots; q; q = q->next)
1152 if (q != p && q->addr_taken && q->level == p->level)
1153 q->level--;
1154
1155 p->level--;
1156 p->addr_taken = 0;
1157 }
1158 return;
1159 }
1160
1161 /* Otherwise, preserve all non-kept slots at this level. */
1162 for (p = temp_slots; p; p = p->next)
1163 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1164 p->level--;
1165 }
1166
1167 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1168 with that RTL_EXPR, promote it into a temporary slot at the present
1169 level so it will not be freed when we free slots made in the
1170 RTL_EXPR. */
1171
1172 void
1173 preserve_rtl_expr_result (x)
1174 rtx x;
1175 {
1176 struct temp_slot *p;
1177
1178 /* If X is not in memory or is at a constant address, it cannot be in
1179 a temporary slot. */
1180 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1181 return;
1182
1183 /* If we can find a match, move it to our level unless it is already at
1184 an upper level. */
1185 p = find_temp_slot_from_address (XEXP (x, 0));
1186 if (p != 0)
1187 {
1188 p->level = MIN (p->level, temp_slot_level);
1189 p->rtl_expr = 0;
1190 }
1191
1192 return;
1193 }
1194
1195 /* Free all temporaries used so far. This is normally called at the end
1196 of generating code for a statement. Don't free any temporaries
1197 currently in use for an RTL_EXPR that hasn't yet been emitted.
1198 We could eventually do better than this since it can be reused while
1199 generating the same RTL_EXPR, but this is complex and probably not
1200 worthwhile. */
1201
1202 void
1203 free_temp_slots ()
1204 {
1205 struct temp_slot *p;
1206
1207 for (p = temp_slots; p; p = p->next)
1208 if (p->in_use && p->level == temp_slot_level && ! p->keep
1209 && p->rtl_expr == 0)
1210 p->in_use = 0;
1211
1212 combine_temp_slots ();
1213 }
1214
1215 /* Free all temporary slots used in T, an RTL_EXPR node. */
1216
1217 void
1218 free_temps_for_rtl_expr (t)
1219 tree t;
1220 {
1221 struct temp_slot *p;
1222
1223 for (p = temp_slots; p; p = p->next)
1224 if (p->rtl_expr == t)
1225 p->in_use = 0;
1226
1227 combine_temp_slots ();
1228 }
1229
1230 /* Push deeper into the nesting level for stack temporaries. */
1231
1232 void
1233 push_temp_slots ()
1234 {
1235 temp_slot_level++;
1236 }
1237
1238 /* Pop a temporary nesting level. All slots in use in the current level
1239 are freed. */
1240
1241 void
1242 pop_temp_slots ()
1243 {
1244 struct temp_slot *p;
1245
1246 for (p = temp_slots; p; p = p->next)
1247 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1248 p->in_use = 0;
1249
1250 combine_temp_slots ();
1251
1252 temp_slot_level--;
1253 }
1254
1255 /* Initialize temporary slots. */
1256
1257 void
1258 init_temp_slots ()
1259 {
1260 /* We have not allocated any temporaries yet. */
1261 temp_slots = 0;
1262 temp_slot_level = 0;
1263 target_temp_slot_level = 0;
1264 }
1265 \f
1266 /* Retroactively move an auto variable from a register to a stack slot.
1267 This is done when an address-reference to the variable is seen. */
1268
1269 void
1270 put_var_into_stack (decl)
1271 tree decl;
1272 {
1273 register rtx reg;
1274 enum machine_mode promoted_mode, decl_mode;
1275 struct function *function = 0;
1276 tree context;
1277
1278 if (output_bytecode)
1279 return;
1280
1281 context = decl_function_context (decl);
1282
1283 /* Get the current rtl used for this object and it's original mode. */
1284 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1285
1286 /* No need to do anything if decl has no rtx yet
1287 since in that case caller is setting TREE_ADDRESSABLE
1288 and a stack slot will be assigned when the rtl is made. */
1289 if (reg == 0)
1290 return;
1291
1292 /* Get the declared mode for this object. */
1293 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1294 : DECL_MODE (decl));
1295 /* Get the mode it's actually stored in. */
1296 promoted_mode = GET_MODE (reg);
1297
1298 /* If this variable comes from an outer function,
1299 find that function's saved context. */
1300 if (context != current_function_decl)
1301 for (function = outer_function_chain; function; function = function->next)
1302 if (function->decl == context)
1303 break;
1304
1305 /* If this is a variable-size object with a pseudo to address it,
1306 put that pseudo into the stack, if the var is nonlocal. */
1307 if (DECL_NONLOCAL (decl)
1308 && GET_CODE (reg) == MEM
1309 && GET_CODE (XEXP (reg, 0)) == REG
1310 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1311 {
1312 reg = XEXP (reg, 0);
1313 decl_mode = promoted_mode = GET_MODE (reg);
1314 }
1315
1316 /* Now we should have a value that resides in one or more pseudo regs. */
1317
1318 if (GET_CODE (reg) == REG)
1319 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1320 promoted_mode, decl_mode, TREE_SIDE_EFFECTS (decl));
1321 else if (GET_CODE (reg) == CONCAT)
1322 {
1323 /* A CONCAT contains two pseudos; put them both in the stack.
1324 We do it so they end up consecutive. */
1325 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1326 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1327 #ifdef FRAME_GROWS_DOWNWARD
1328 /* Since part 0 should have a lower address, do it second. */
1329 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1330 part_mode, TREE_SIDE_EFFECTS (decl));
1331 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1332 part_mode, TREE_SIDE_EFFECTS (decl));
1333 #else
1334 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1335 part_mode, TREE_SIDE_EFFECTS (decl));
1336 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1337 part_mode, TREE_SIDE_EFFECTS (decl));
1338 #endif
1339
1340 /* Change the CONCAT into a combined MEM for both parts. */
1341 PUT_CODE (reg, MEM);
1342 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1343
1344 /* The two parts are in memory order already.
1345 Use the lower parts address as ours. */
1346 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1347 /* Prevent sharing of rtl that might lose. */
1348 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1349 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1350 }
1351 }
1352
1353 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1354 into the stack frame of FUNCTION (0 means the current function).
1355 DECL_MODE is the machine mode of the user-level data type.
1356 PROMOTED_MODE is the machine mode of the register.
1357 VOLATILE_P is nonzero if this is for a "volatile" decl. */
1358
1359 static void
1360 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p)
1361 struct function *function;
1362 rtx reg;
1363 tree type;
1364 enum machine_mode promoted_mode, decl_mode;
1365 int volatile_p;
1366 {
1367 rtx new = 0;
1368
1369 if (function)
1370 {
1371 if (REGNO (reg) < function->max_parm_reg)
1372 new = function->parm_reg_stack_loc[REGNO (reg)];
1373 if (new == 0)
1374 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1375 0, function);
1376 }
1377 else
1378 {
1379 if (REGNO (reg) < max_parm_reg)
1380 new = parm_reg_stack_loc[REGNO (reg)];
1381 if (new == 0)
1382 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1383 }
1384
1385 PUT_MODE (reg, decl_mode);
1386 XEXP (reg, 0) = XEXP (new, 0);
1387 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1388 MEM_VOLATILE_P (reg) = volatile_p;
1389 PUT_CODE (reg, MEM);
1390
1391 /* If this is a memory ref that contains aggregate components,
1392 mark it as such for cse and loop optimize. */
1393 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type);
1394
1395 /* Now make sure that all refs to the variable, previously made
1396 when it was a register, are fixed up to be valid again. */
1397 if (function)
1398 {
1399 struct var_refs_queue *temp;
1400
1401 /* Variable is inherited; fix it up when we get back to its function. */
1402 push_obstacks (function->function_obstack,
1403 function->function_maybepermanent_obstack);
1404
1405 /* See comment in restore_tree_status in tree.c for why this needs to be
1406 on saveable obstack. */
1407 temp
1408 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1409 temp->modified = reg;
1410 temp->promoted_mode = promoted_mode;
1411 temp->unsignedp = TREE_UNSIGNED (type);
1412 temp->next = function->fixup_var_refs_queue;
1413 function->fixup_var_refs_queue = temp;
1414 pop_obstacks ();
1415 }
1416 else
1417 /* Variable is local; fix it up now. */
1418 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
1419 }
1420 \f
1421 static void
1422 fixup_var_refs (var, promoted_mode, unsignedp)
1423 rtx var;
1424 enum machine_mode promoted_mode;
1425 int unsignedp;
1426 {
1427 tree pending;
1428 rtx first_insn = get_insns ();
1429 struct sequence_stack *stack = sequence_stack;
1430 tree rtl_exps = rtl_expr_chain;
1431
1432 /* Must scan all insns for stack-refs that exceed the limit. */
1433 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
1434
1435 /* Scan all pending sequences too. */
1436 for (; stack; stack = stack->next)
1437 {
1438 push_to_sequence (stack->first);
1439 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1440 stack->first, stack->next != 0);
1441 /* Update remembered end of sequence
1442 in case we added an insn at the end. */
1443 stack->last = get_last_insn ();
1444 end_sequence ();
1445 }
1446
1447 /* Scan all waiting RTL_EXPRs too. */
1448 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1449 {
1450 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1451 if (seq != const0_rtx && seq != 0)
1452 {
1453 push_to_sequence (seq);
1454 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
1455 end_sequence ();
1456 }
1457 }
1458 }
1459 \f
1460 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1461 some part of an insn. Return a struct fixup_replacement whose OLD
1462 value is equal to X. Allocate a new structure if no such entry exists. */
1463
1464 static struct fixup_replacement *
1465 find_fixup_replacement (replacements, x)
1466 struct fixup_replacement **replacements;
1467 rtx x;
1468 {
1469 struct fixup_replacement *p;
1470
1471 /* See if we have already replaced this. */
1472 for (p = *replacements; p && p->old != x; p = p->next)
1473 ;
1474
1475 if (p == 0)
1476 {
1477 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1478 p->old = x;
1479 p->new = 0;
1480 p->next = *replacements;
1481 *replacements = p;
1482 }
1483
1484 return p;
1485 }
1486
1487 /* Scan the insn-chain starting with INSN for refs to VAR
1488 and fix them up. TOPLEVEL is nonzero if this chain is the
1489 main chain of insns for the current function. */
1490
1491 static void
1492 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
1493 rtx var;
1494 enum machine_mode promoted_mode;
1495 int unsignedp;
1496 rtx insn;
1497 int toplevel;
1498 {
1499 rtx call_dest = 0;
1500
1501 while (insn)
1502 {
1503 rtx next = NEXT_INSN (insn);
1504 rtx note;
1505 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1506 {
1507 /* If this is a CLOBBER of VAR, delete it.
1508
1509 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1510 and REG_RETVAL notes too. */
1511 if (GET_CODE (PATTERN (insn)) == CLOBBER
1512 && XEXP (PATTERN (insn), 0) == var)
1513 {
1514 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1515 /* The REG_LIBCALL note will go away since we are going to
1516 turn INSN into a NOTE, so just delete the
1517 corresponding REG_RETVAL note. */
1518 remove_note (XEXP (note, 0),
1519 find_reg_note (XEXP (note, 0), REG_RETVAL,
1520 NULL_RTX));
1521
1522 /* In unoptimized compilation, we shouldn't call delete_insn
1523 except in jump.c doing warnings. */
1524 PUT_CODE (insn, NOTE);
1525 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1526 NOTE_SOURCE_FILE (insn) = 0;
1527 }
1528
1529 /* The insn to load VAR from a home in the arglist
1530 is now a no-op. When we see it, just delete it. */
1531 else if (toplevel
1532 && GET_CODE (PATTERN (insn)) == SET
1533 && SET_DEST (PATTERN (insn)) == var
1534 /* If this represents the result of an insn group,
1535 don't delete the insn. */
1536 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1537 && rtx_equal_p (SET_SRC (PATTERN (insn)), var))
1538 {
1539 /* In unoptimized compilation, we shouldn't call delete_insn
1540 except in jump.c doing warnings. */
1541 PUT_CODE (insn, NOTE);
1542 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1543 NOTE_SOURCE_FILE (insn) = 0;
1544 if (insn == last_parm_insn)
1545 last_parm_insn = PREV_INSN (next);
1546 }
1547 else
1548 {
1549 struct fixup_replacement *replacements = 0;
1550 rtx next_insn = NEXT_INSN (insn);
1551
1552 #ifdef SMALL_REGISTER_CLASSES
1553 /* If the insn that copies the results of a CALL_INSN
1554 into a pseudo now references VAR, we have to use an
1555 intermediate pseudo since we want the life of the
1556 return value register to be only a single insn.
1557
1558 If we don't use an intermediate pseudo, such things as
1559 address computations to make the address of VAR valid
1560 if it is not can be placed between the CALL_INSN and INSN.
1561
1562 To make sure this doesn't happen, we record the destination
1563 of the CALL_INSN and see if the next insn uses both that
1564 and VAR. */
1565
1566 if (call_dest != 0 && GET_CODE (insn) == INSN
1567 && reg_mentioned_p (var, PATTERN (insn))
1568 && reg_mentioned_p (call_dest, PATTERN (insn)))
1569 {
1570 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1571
1572 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1573
1574 PATTERN (insn) = replace_rtx (PATTERN (insn),
1575 call_dest, temp);
1576 }
1577
1578 if (GET_CODE (insn) == CALL_INSN
1579 && GET_CODE (PATTERN (insn)) == SET)
1580 call_dest = SET_DEST (PATTERN (insn));
1581 else if (GET_CODE (insn) == CALL_INSN
1582 && GET_CODE (PATTERN (insn)) == PARALLEL
1583 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1584 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1585 else
1586 call_dest = 0;
1587 #endif
1588
1589 /* See if we have to do anything to INSN now that VAR is in
1590 memory. If it needs to be loaded into a pseudo, use a single
1591 pseudo for the entire insn in case there is a MATCH_DUP
1592 between two operands. We pass a pointer to the head of
1593 a list of struct fixup_replacements. If fixup_var_refs_1
1594 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1595 it will record them in this list.
1596
1597 If it allocated a pseudo for any replacement, we copy into
1598 it here. */
1599
1600 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1601 &replacements);
1602
1603 /* If this is last_parm_insn, and any instructions were output
1604 after it to fix it up, then we must set last_parm_insn to
1605 the last such instruction emitted. */
1606 if (insn == last_parm_insn)
1607 last_parm_insn = PREV_INSN (next_insn);
1608
1609 while (replacements)
1610 {
1611 if (GET_CODE (replacements->new) == REG)
1612 {
1613 rtx insert_before;
1614 rtx seq;
1615
1616 /* OLD might be a (subreg (mem)). */
1617 if (GET_CODE (replacements->old) == SUBREG)
1618 replacements->old
1619 = fixup_memory_subreg (replacements->old, insn, 0);
1620 else
1621 replacements->old
1622 = fixup_stack_1 (replacements->old, insn);
1623
1624 insert_before = insn;
1625
1626 /* If we are changing the mode, do a conversion.
1627 This might be wasteful, but combine.c will
1628 eliminate much of the waste. */
1629
1630 if (GET_MODE (replacements->new)
1631 != GET_MODE (replacements->old))
1632 {
1633 start_sequence ();
1634 convert_move (replacements->new,
1635 replacements->old, unsignedp);
1636 seq = gen_sequence ();
1637 end_sequence ();
1638 }
1639 else
1640 seq = gen_move_insn (replacements->new,
1641 replacements->old);
1642
1643 emit_insn_before (seq, insert_before);
1644 }
1645
1646 replacements = replacements->next;
1647 }
1648 }
1649
1650 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1651 But don't touch other insns referred to by reg-notes;
1652 we will get them elsewhere. */
1653 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1654 if (GET_CODE (note) != INSN_LIST)
1655 XEXP (note, 0)
1656 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1657 }
1658 insn = next;
1659 }
1660 }
1661 \f
1662 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1663 See if the rtx expression at *LOC in INSN needs to be changed.
1664
1665 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1666 contain a list of original rtx's and replacements. If we find that we need
1667 to modify this insn by replacing a memory reference with a pseudo or by
1668 making a new MEM to implement a SUBREG, we consult that list to see if
1669 we have already chosen a replacement. If none has already been allocated,
1670 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1671 or the SUBREG, as appropriate, to the pseudo. */
1672
1673 static void
1674 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1675 register rtx var;
1676 enum machine_mode promoted_mode;
1677 register rtx *loc;
1678 rtx insn;
1679 struct fixup_replacement **replacements;
1680 {
1681 register int i;
1682 register rtx x = *loc;
1683 RTX_CODE code = GET_CODE (x);
1684 register char *fmt;
1685 register rtx tem, tem1;
1686 struct fixup_replacement *replacement;
1687
1688 switch (code)
1689 {
1690 case MEM:
1691 if (var == x)
1692 {
1693 /* If we already have a replacement, use it. Otherwise,
1694 try to fix up this address in case it is invalid. */
1695
1696 replacement = find_fixup_replacement (replacements, var);
1697 if (replacement->new)
1698 {
1699 *loc = replacement->new;
1700 return;
1701 }
1702
1703 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1704
1705 /* Unless we are forcing memory to register or we changed the mode,
1706 we can leave things the way they are if the insn is valid. */
1707
1708 INSN_CODE (insn) = -1;
1709 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1710 && recog_memoized (insn) >= 0)
1711 return;
1712
1713 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1714 return;
1715 }
1716
1717 /* If X contains VAR, we need to unshare it here so that we update
1718 each occurrence separately. But all identical MEMs in one insn
1719 must be replaced with the same rtx because of the possibility of
1720 MATCH_DUPs. */
1721
1722 if (reg_mentioned_p (var, x))
1723 {
1724 replacement = find_fixup_replacement (replacements, x);
1725 if (replacement->new == 0)
1726 replacement->new = copy_most_rtx (x, var);
1727
1728 *loc = x = replacement->new;
1729 }
1730 break;
1731
1732 case REG:
1733 case CC0:
1734 case PC:
1735 case CONST_INT:
1736 case CONST:
1737 case SYMBOL_REF:
1738 case LABEL_REF:
1739 case CONST_DOUBLE:
1740 return;
1741
1742 case SIGN_EXTRACT:
1743 case ZERO_EXTRACT:
1744 /* Note that in some cases those types of expressions are altered
1745 by optimize_bit_field, and do not survive to get here. */
1746 if (XEXP (x, 0) == var
1747 || (GET_CODE (XEXP (x, 0)) == SUBREG
1748 && SUBREG_REG (XEXP (x, 0)) == var))
1749 {
1750 /* Get TEM as a valid MEM in the mode presently in the insn.
1751
1752 We don't worry about the possibility of MATCH_DUP here; it
1753 is highly unlikely and would be tricky to handle. */
1754
1755 tem = XEXP (x, 0);
1756 if (GET_CODE (tem) == SUBREG)
1757 {
1758 if (GET_MODE_BITSIZE (GET_MODE (tem))
1759 > GET_MODE_BITSIZE (GET_MODE (var)))
1760 {
1761 replacement = find_fixup_replacement (replacements, var);
1762 if (replacement->new == 0)
1763 replacement->new = gen_reg_rtx (GET_MODE (var));
1764 SUBREG_REG (tem) = replacement->new;
1765 }
1766
1767 tem = fixup_memory_subreg (tem, insn, 0);
1768 }
1769 else
1770 tem = fixup_stack_1 (tem, insn);
1771
1772 /* Unless we want to load from memory, get TEM into the proper mode
1773 for an extract from memory. This can only be done if the
1774 extract is at a constant position and length. */
1775
1776 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1777 && GET_CODE (XEXP (x, 2)) == CONST_INT
1778 && ! mode_dependent_address_p (XEXP (tem, 0))
1779 && ! MEM_VOLATILE_P (tem))
1780 {
1781 enum machine_mode wanted_mode = VOIDmode;
1782 enum machine_mode is_mode = GET_MODE (tem);
1783 int width = INTVAL (XEXP (x, 1));
1784 int pos = INTVAL (XEXP (x, 2));
1785
1786 #ifdef HAVE_extzv
1787 if (GET_CODE (x) == ZERO_EXTRACT)
1788 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1789 #endif
1790 #ifdef HAVE_extv
1791 if (GET_CODE (x) == SIGN_EXTRACT)
1792 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1793 #endif
1794 /* If we have a narrower mode, we can do something. */
1795 if (wanted_mode != VOIDmode
1796 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1797 {
1798 int offset = pos / BITS_PER_UNIT;
1799 rtx old_pos = XEXP (x, 2);
1800 rtx newmem;
1801
1802 /* If the bytes and bits are counted differently, we
1803 must adjust the offset. */
1804 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1805 offset = (GET_MODE_SIZE (is_mode)
1806 - GET_MODE_SIZE (wanted_mode) - offset);
1807
1808 pos %= GET_MODE_BITSIZE (wanted_mode);
1809
1810 newmem = gen_rtx (MEM, wanted_mode,
1811 plus_constant (XEXP (tem, 0), offset));
1812 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1813 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1814 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1815
1816 /* Make the change and see if the insn remains valid. */
1817 INSN_CODE (insn) = -1;
1818 XEXP (x, 0) = newmem;
1819 XEXP (x, 2) = GEN_INT (pos);
1820
1821 if (recog_memoized (insn) >= 0)
1822 return;
1823
1824 /* Otherwise, restore old position. XEXP (x, 0) will be
1825 restored later. */
1826 XEXP (x, 2) = old_pos;
1827 }
1828 }
1829
1830 /* If we get here, the bitfield extract insn can't accept a memory
1831 reference. Copy the input into a register. */
1832
1833 tem1 = gen_reg_rtx (GET_MODE (tem));
1834 emit_insn_before (gen_move_insn (tem1, tem), insn);
1835 XEXP (x, 0) = tem1;
1836 return;
1837 }
1838 break;
1839
1840 case SUBREG:
1841 if (SUBREG_REG (x) == var)
1842 {
1843 /* If this is a special SUBREG made because VAR was promoted
1844 from a wider mode, replace it with VAR and call ourself
1845 recursively, this time saying that the object previously
1846 had its current mode (by virtue of the SUBREG). */
1847
1848 if (SUBREG_PROMOTED_VAR_P (x))
1849 {
1850 *loc = var;
1851 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
1852 return;
1853 }
1854
1855 /* If this SUBREG makes VAR wider, it has become a paradoxical
1856 SUBREG with VAR in memory, but these aren't allowed at this
1857 stage of the compilation. So load VAR into a pseudo and take
1858 a SUBREG of that pseudo. */
1859 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1860 {
1861 replacement = find_fixup_replacement (replacements, var);
1862 if (replacement->new == 0)
1863 replacement->new = gen_reg_rtx (GET_MODE (var));
1864 SUBREG_REG (x) = replacement->new;
1865 return;
1866 }
1867
1868 /* See if we have already found a replacement for this SUBREG.
1869 If so, use it. Otherwise, make a MEM and see if the insn
1870 is recognized. If not, or if we should force MEM into a register,
1871 make a pseudo for this SUBREG. */
1872 replacement = find_fixup_replacement (replacements, x);
1873 if (replacement->new)
1874 {
1875 *loc = replacement->new;
1876 return;
1877 }
1878
1879 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1880
1881 INSN_CODE (insn) = -1;
1882 if (! flag_force_mem && recog_memoized (insn) >= 0)
1883 return;
1884
1885 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1886 return;
1887 }
1888 break;
1889
1890 case SET:
1891 /* First do special simplification of bit-field references. */
1892 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1893 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1894 optimize_bit_field (x, insn, 0);
1895 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1896 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
1897 optimize_bit_field (x, insn, NULL_PTR);
1898
1899 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
1900 into a register and then store it back out. */
1901 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
1902 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
1903 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
1904 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
1905 > GET_MODE_SIZE (GET_MODE (var))))
1906 {
1907 replacement = find_fixup_replacement (replacements, var);
1908 if (replacement->new == 0)
1909 replacement->new = gen_reg_rtx (GET_MODE (var));
1910
1911 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
1912 emit_insn_after (gen_move_insn (var, replacement->new), insn);
1913 }
1914
1915 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1916 insn into a pseudo and store the low part of the pseudo into VAR. */
1917 if (GET_CODE (SET_DEST (x)) == SUBREG
1918 && SUBREG_REG (SET_DEST (x)) == var
1919 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
1920 > GET_MODE_SIZE (GET_MODE (var))))
1921 {
1922 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
1923 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
1924 tem)),
1925 insn);
1926 break;
1927 }
1928
1929 {
1930 rtx dest = SET_DEST (x);
1931 rtx src = SET_SRC (x);
1932 rtx outerdest = dest;
1933
1934 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
1935 || GET_CODE (dest) == SIGN_EXTRACT
1936 || GET_CODE (dest) == ZERO_EXTRACT)
1937 dest = XEXP (dest, 0);
1938
1939 if (GET_CODE (src) == SUBREG)
1940 src = XEXP (src, 0);
1941
1942 /* If VAR does not appear at the top level of the SET
1943 just scan the lower levels of the tree. */
1944
1945 if (src != var && dest != var)
1946 break;
1947
1948 /* We will need to rerecognize this insn. */
1949 INSN_CODE (insn) = -1;
1950
1951 #ifdef HAVE_insv
1952 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
1953 {
1954 /* Since this case will return, ensure we fixup all the
1955 operands here. */
1956 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
1957 insn, replacements);
1958 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
1959 insn, replacements);
1960 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
1961 insn, replacements);
1962
1963 tem = XEXP (outerdest, 0);
1964
1965 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
1966 that may appear inside a ZERO_EXTRACT.
1967 This was legitimate when the MEM was a REG. */
1968 if (GET_CODE (tem) == SUBREG
1969 && SUBREG_REG (tem) == var)
1970 tem = fixup_memory_subreg (tem, insn, 0);
1971 else
1972 tem = fixup_stack_1 (tem, insn);
1973
1974 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
1975 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
1976 && ! mode_dependent_address_p (XEXP (tem, 0))
1977 && ! MEM_VOLATILE_P (tem))
1978 {
1979 enum machine_mode wanted_mode
1980 = insn_operand_mode[(int) CODE_FOR_insv][0];
1981 enum machine_mode is_mode = GET_MODE (tem);
1982 int width = INTVAL (XEXP (outerdest, 1));
1983 int pos = INTVAL (XEXP (outerdest, 2));
1984
1985 /* If we have a narrower mode, we can do something. */
1986 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1987 {
1988 int offset = pos / BITS_PER_UNIT;
1989 rtx old_pos = XEXP (outerdest, 2);
1990 rtx newmem;
1991
1992 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1993 offset = (GET_MODE_SIZE (is_mode)
1994 - GET_MODE_SIZE (wanted_mode) - offset);
1995
1996 pos %= GET_MODE_BITSIZE (wanted_mode);
1997
1998 newmem = gen_rtx (MEM, wanted_mode,
1999 plus_constant (XEXP (tem, 0), offset));
2000 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2001 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
2002 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
2003
2004 /* Make the change and see if the insn remains valid. */
2005 INSN_CODE (insn) = -1;
2006 XEXP (outerdest, 0) = newmem;
2007 XEXP (outerdest, 2) = GEN_INT (pos);
2008
2009 if (recog_memoized (insn) >= 0)
2010 return;
2011
2012 /* Otherwise, restore old position. XEXP (x, 0) will be
2013 restored later. */
2014 XEXP (outerdest, 2) = old_pos;
2015 }
2016 }
2017
2018 /* If we get here, the bit-field store doesn't allow memory
2019 or isn't located at a constant position. Load the value into
2020 a register, do the store, and put it back into memory. */
2021
2022 tem1 = gen_reg_rtx (GET_MODE (tem));
2023 emit_insn_before (gen_move_insn (tem1, tem), insn);
2024 emit_insn_after (gen_move_insn (tem, tem1), insn);
2025 XEXP (outerdest, 0) = tem1;
2026 return;
2027 }
2028 #endif
2029
2030 /* STRICT_LOW_PART is a no-op on memory references
2031 and it can cause combinations to be unrecognizable,
2032 so eliminate it. */
2033
2034 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2035 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2036
2037 /* A valid insn to copy VAR into or out of a register
2038 must be left alone, to avoid an infinite loop here.
2039 If the reference to VAR is by a subreg, fix that up,
2040 since SUBREG is not valid for a memref.
2041 Also fix up the address of the stack slot.
2042
2043 Note that we must not try to recognize the insn until
2044 after we know that we have valid addresses and no
2045 (subreg (mem ...) ...) constructs, since these interfere
2046 with determining the validity of the insn. */
2047
2048 if ((SET_SRC (x) == var
2049 || (GET_CODE (SET_SRC (x)) == SUBREG
2050 && SUBREG_REG (SET_SRC (x)) == var))
2051 && (GET_CODE (SET_DEST (x)) == REG
2052 || (GET_CODE (SET_DEST (x)) == SUBREG
2053 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2054 && GET_MODE (var) == promoted_mode
2055 && x == single_set (insn))
2056 {
2057 rtx pat;
2058
2059 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2060 if (replacement->new)
2061 SET_SRC (x) = replacement->new;
2062 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2063 SET_SRC (x) = replacement->new
2064 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2065 else
2066 SET_SRC (x) = replacement->new
2067 = fixup_stack_1 (SET_SRC (x), insn);
2068
2069 if (recog_memoized (insn) >= 0)
2070 return;
2071
2072 /* INSN is not valid, but we know that we want to
2073 copy SET_SRC (x) to SET_DEST (x) in some way. So
2074 we generate the move and see whether it requires more
2075 than one insn. If it does, we emit those insns and
2076 delete INSN. Otherwise, we an just replace the pattern
2077 of INSN; we have already verified above that INSN has
2078 no other function that to do X. */
2079
2080 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2081 if (GET_CODE (pat) == SEQUENCE)
2082 {
2083 emit_insn_after (pat, insn);
2084 PUT_CODE (insn, NOTE);
2085 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2086 NOTE_SOURCE_FILE (insn) = 0;
2087 }
2088 else
2089 PATTERN (insn) = pat;
2090
2091 return;
2092 }
2093
2094 if ((SET_DEST (x) == var
2095 || (GET_CODE (SET_DEST (x)) == SUBREG
2096 && SUBREG_REG (SET_DEST (x)) == var))
2097 && (GET_CODE (SET_SRC (x)) == REG
2098 || (GET_CODE (SET_SRC (x)) == SUBREG
2099 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2100 && GET_MODE (var) == promoted_mode
2101 && x == single_set (insn))
2102 {
2103 rtx pat;
2104
2105 if (GET_CODE (SET_DEST (x)) == SUBREG)
2106 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2107 else
2108 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2109
2110 if (recog_memoized (insn) >= 0)
2111 return;
2112
2113 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2114 if (GET_CODE (pat) == SEQUENCE)
2115 {
2116 emit_insn_after (pat, insn);
2117 PUT_CODE (insn, NOTE);
2118 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2119 NOTE_SOURCE_FILE (insn) = 0;
2120 }
2121 else
2122 PATTERN (insn) = pat;
2123
2124 return;
2125 }
2126
2127 /* Otherwise, storing into VAR must be handled specially
2128 by storing into a temporary and copying that into VAR
2129 with a new insn after this one. Note that this case
2130 will be used when storing into a promoted scalar since
2131 the insn will now have different modes on the input
2132 and output and hence will be invalid (except for the case
2133 of setting it to a constant, which does not need any
2134 change if it is valid). We generate extra code in that case,
2135 but combine.c will eliminate it. */
2136
2137 if (dest == var)
2138 {
2139 rtx temp;
2140 rtx fixeddest = SET_DEST (x);
2141
2142 /* STRICT_LOW_PART can be discarded, around a MEM. */
2143 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2144 fixeddest = XEXP (fixeddest, 0);
2145 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2146 if (GET_CODE (fixeddest) == SUBREG)
2147 {
2148 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2149 promoted_mode = GET_MODE (fixeddest);
2150 }
2151 else
2152 fixeddest = fixup_stack_1 (fixeddest, insn);
2153
2154 temp = gen_reg_rtx (promoted_mode);
2155
2156 emit_insn_after (gen_move_insn (fixeddest,
2157 gen_lowpart (GET_MODE (fixeddest),
2158 temp)),
2159 insn);
2160
2161 SET_DEST (x) = temp;
2162 }
2163 }
2164 }
2165
2166 /* Nothing special about this RTX; fix its operands. */
2167
2168 fmt = GET_RTX_FORMAT (code);
2169 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2170 {
2171 if (fmt[i] == 'e')
2172 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2173 if (fmt[i] == 'E')
2174 {
2175 register int j;
2176 for (j = 0; j < XVECLEN (x, i); j++)
2177 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2178 insn, replacements);
2179 }
2180 }
2181 }
2182 \f
2183 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2184 return an rtx (MEM:m1 newaddr) which is equivalent.
2185 If any insns must be emitted to compute NEWADDR, put them before INSN.
2186
2187 UNCRITICAL nonzero means accept paradoxical subregs.
2188 This is used for subregs found inside REG_NOTES. */
2189
2190 static rtx
2191 fixup_memory_subreg (x, insn, uncritical)
2192 rtx x;
2193 rtx insn;
2194 int uncritical;
2195 {
2196 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2197 rtx addr = XEXP (SUBREG_REG (x), 0);
2198 enum machine_mode mode = GET_MODE (x);
2199 rtx saved, result;
2200
2201 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2202 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2203 && ! uncritical)
2204 abort ();
2205
2206 if (BYTES_BIG_ENDIAN)
2207 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2208 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2209 addr = plus_constant (addr, offset);
2210 if (!flag_force_addr && memory_address_p (mode, addr))
2211 /* Shortcut if no insns need be emitted. */
2212 return change_address (SUBREG_REG (x), mode, addr);
2213 start_sequence ();
2214 result = change_address (SUBREG_REG (x), mode, addr);
2215 emit_insn_before (gen_sequence (), insn);
2216 end_sequence ();
2217 return result;
2218 }
2219
2220 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2221 Replace subexpressions of X in place.
2222 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2223 Otherwise return X, with its contents possibly altered.
2224
2225 If any insns must be emitted to compute NEWADDR, put them before INSN.
2226
2227 UNCRITICAL is as in fixup_memory_subreg. */
2228
2229 static rtx
2230 walk_fixup_memory_subreg (x, insn, uncritical)
2231 register rtx x;
2232 rtx insn;
2233 int uncritical;
2234 {
2235 register enum rtx_code code;
2236 register char *fmt;
2237 register int i;
2238
2239 if (x == 0)
2240 return 0;
2241
2242 code = GET_CODE (x);
2243
2244 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2245 return fixup_memory_subreg (x, insn, uncritical);
2246
2247 /* Nothing special about this RTX; fix its operands. */
2248
2249 fmt = GET_RTX_FORMAT (code);
2250 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2251 {
2252 if (fmt[i] == 'e')
2253 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2254 if (fmt[i] == 'E')
2255 {
2256 register int j;
2257 for (j = 0; j < XVECLEN (x, i); j++)
2258 XVECEXP (x, i, j)
2259 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2260 }
2261 }
2262 return x;
2263 }
2264 \f
2265 /* For each memory ref within X, if it refers to a stack slot
2266 with an out of range displacement, put the address in a temp register
2267 (emitting new insns before INSN to load these registers)
2268 and alter the memory ref to use that register.
2269 Replace each such MEM rtx with a copy, to avoid clobberage. */
2270
2271 static rtx
2272 fixup_stack_1 (x, insn)
2273 rtx x;
2274 rtx insn;
2275 {
2276 register int i;
2277 register RTX_CODE code = GET_CODE (x);
2278 register char *fmt;
2279
2280 if (code == MEM)
2281 {
2282 register rtx ad = XEXP (x, 0);
2283 /* If we have address of a stack slot but it's not valid
2284 (displacement is too large), compute the sum in a register. */
2285 if (GET_CODE (ad) == PLUS
2286 && GET_CODE (XEXP (ad, 0)) == REG
2287 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2288 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2289 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2290 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2291 {
2292 rtx temp, seq;
2293 if (memory_address_p (GET_MODE (x), ad))
2294 return x;
2295
2296 start_sequence ();
2297 temp = copy_to_reg (ad);
2298 seq = gen_sequence ();
2299 end_sequence ();
2300 emit_insn_before (seq, insn);
2301 return change_address (x, VOIDmode, temp);
2302 }
2303 return x;
2304 }
2305
2306 fmt = GET_RTX_FORMAT (code);
2307 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2308 {
2309 if (fmt[i] == 'e')
2310 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2311 if (fmt[i] == 'E')
2312 {
2313 register int j;
2314 for (j = 0; j < XVECLEN (x, i); j++)
2315 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2316 }
2317 }
2318 return x;
2319 }
2320 \f
2321 /* Optimization: a bit-field instruction whose field
2322 happens to be a byte or halfword in memory
2323 can be changed to a move instruction.
2324
2325 We call here when INSN is an insn to examine or store into a bit-field.
2326 BODY is the SET-rtx to be altered.
2327
2328 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2329 (Currently this is called only from function.c, and EQUIV_MEM
2330 is always 0.) */
2331
2332 static void
2333 optimize_bit_field (body, insn, equiv_mem)
2334 rtx body;
2335 rtx insn;
2336 rtx *equiv_mem;
2337 {
2338 register rtx bitfield;
2339 int destflag;
2340 rtx seq = 0;
2341 enum machine_mode mode;
2342
2343 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2344 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2345 bitfield = SET_DEST (body), destflag = 1;
2346 else
2347 bitfield = SET_SRC (body), destflag = 0;
2348
2349 /* First check that the field being stored has constant size and position
2350 and is in fact a byte or halfword suitably aligned. */
2351
2352 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2353 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2354 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2355 != BLKmode)
2356 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2357 {
2358 register rtx memref = 0;
2359
2360 /* Now check that the containing word is memory, not a register,
2361 and that it is safe to change the machine mode. */
2362
2363 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2364 memref = XEXP (bitfield, 0);
2365 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2366 && equiv_mem != 0)
2367 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2368 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2369 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2370 memref = SUBREG_REG (XEXP (bitfield, 0));
2371 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2372 && equiv_mem != 0
2373 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2374 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2375
2376 if (memref
2377 && ! mode_dependent_address_p (XEXP (memref, 0))
2378 && ! MEM_VOLATILE_P (memref))
2379 {
2380 /* Now adjust the address, first for any subreg'ing
2381 that we are now getting rid of,
2382 and then for which byte of the word is wanted. */
2383
2384 register int offset = INTVAL (XEXP (bitfield, 2));
2385 rtx insns;
2386
2387 /* Adjust OFFSET to count bits from low-address byte. */
2388 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2389 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2390 - offset - INTVAL (XEXP (bitfield, 1)));
2391
2392 /* Adjust OFFSET to count bytes from low-address byte. */
2393 offset /= BITS_PER_UNIT;
2394 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2395 {
2396 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2397 if (BYTES_BIG_ENDIAN)
2398 offset -= (MIN (UNITS_PER_WORD,
2399 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2400 - MIN (UNITS_PER_WORD,
2401 GET_MODE_SIZE (GET_MODE (memref))));
2402 }
2403
2404 start_sequence ();
2405 memref = change_address (memref, mode,
2406 plus_constant (XEXP (memref, 0), offset));
2407 insns = get_insns ();
2408 end_sequence ();
2409 emit_insns_before (insns, insn);
2410
2411 /* Store this memory reference where
2412 we found the bit field reference. */
2413
2414 if (destflag)
2415 {
2416 validate_change (insn, &SET_DEST (body), memref, 1);
2417 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2418 {
2419 rtx src = SET_SRC (body);
2420 while (GET_CODE (src) == SUBREG
2421 && SUBREG_WORD (src) == 0)
2422 src = SUBREG_REG (src);
2423 if (GET_MODE (src) != GET_MODE (memref))
2424 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2425 validate_change (insn, &SET_SRC (body), src, 1);
2426 }
2427 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2428 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2429 /* This shouldn't happen because anything that didn't have
2430 one of these modes should have got converted explicitly
2431 and then referenced through a subreg.
2432 This is so because the original bit-field was
2433 handled by agg_mode and so its tree structure had
2434 the same mode that memref now has. */
2435 abort ();
2436 }
2437 else
2438 {
2439 rtx dest = SET_DEST (body);
2440
2441 while (GET_CODE (dest) == SUBREG
2442 && SUBREG_WORD (dest) == 0
2443 && (GET_MODE_CLASS (GET_MODE (dest))
2444 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest)))))
2445 dest = SUBREG_REG (dest);
2446
2447 validate_change (insn, &SET_DEST (body), dest, 1);
2448
2449 if (GET_MODE (dest) == GET_MODE (memref))
2450 validate_change (insn, &SET_SRC (body), memref, 1);
2451 else
2452 {
2453 /* Convert the mem ref to the destination mode. */
2454 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2455
2456 start_sequence ();
2457 convert_move (newreg, memref,
2458 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2459 seq = get_insns ();
2460 end_sequence ();
2461
2462 validate_change (insn, &SET_SRC (body), newreg, 1);
2463 }
2464 }
2465
2466 /* See if we can convert this extraction or insertion into
2467 a simple move insn. We might not be able to do so if this
2468 was, for example, part of a PARALLEL.
2469
2470 If we succeed, write out any needed conversions. If we fail,
2471 it is hard to guess why we failed, so don't do anything
2472 special; just let the optimization be suppressed. */
2473
2474 if (apply_change_group () && seq)
2475 emit_insns_before (seq, insn);
2476 }
2477 }
2478 }
2479 \f
2480 /* These routines are responsible for converting virtual register references
2481 to the actual hard register references once RTL generation is complete.
2482
2483 The following four variables are used for communication between the
2484 routines. They contain the offsets of the virtual registers from their
2485 respective hard registers. */
2486
2487 static int in_arg_offset;
2488 static int var_offset;
2489 static int dynamic_offset;
2490 static int out_arg_offset;
2491
2492 /* In most machines, the stack pointer register is equivalent to the bottom
2493 of the stack. */
2494
2495 #ifndef STACK_POINTER_OFFSET
2496 #define STACK_POINTER_OFFSET 0
2497 #endif
2498
2499 /* If not defined, pick an appropriate default for the offset of dynamically
2500 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2501 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2502
2503 #ifndef STACK_DYNAMIC_OFFSET
2504
2505 #ifdef ACCUMULATE_OUTGOING_ARGS
2506 /* The bottom of the stack points to the actual arguments. If
2507 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2508 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2509 stack space for register parameters is not pushed by the caller, but
2510 rather part of the fixed stack areas and hence not included in
2511 `current_function_outgoing_args_size'. Nevertheless, we must allow
2512 for it when allocating stack dynamic objects. */
2513
2514 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2515 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2516 (current_function_outgoing_args_size \
2517 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2518
2519 #else
2520 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2521 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2522 #endif
2523
2524 #else
2525 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2526 #endif
2527 #endif
2528
2529 /* Pass through the INSNS of function FNDECL and convert virtual register
2530 references to hard register references. */
2531
2532 void
2533 instantiate_virtual_regs (fndecl, insns)
2534 tree fndecl;
2535 rtx insns;
2536 {
2537 rtx insn;
2538
2539 /* Compute the offsets to use for this function. */
2540 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
2541 var_offset = STARTING_FRAME_OFFSET;
2542 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
2543 out_arg_offset = STACK_POINTER_OFFSET;
2544
2545 /* Scan all variables and parameters of this function. For each that is
2546 in memory, instantiate all virtual registers if the result is a valid
2547 address. If not, we do it later. That will handle most uses of virtual
2548 regs on many machines. */
2549 instantiate_decls (fndecl, 1);
2550
2551 /* Initialize recognition, indicating that volatile is OK. */
2552 init_recog ();
2553
2554 /* Scan through all the insns, instantiating every virtual register still
2555 present. */
2556 for (insn = insns; insn; insn = NEXT_INSN (insn))
2557 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2558 || GET_CODE (insn) == CALL_INSN)
2559 {
2560 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
2561 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
2562 }
2563
2564 /* Now instantiate the remaining register equivalences for debugging info.
2565 These will not be valid addresses. */
2566 instantiate_decls (fndecl, 0);
2567
2568 /* Indicate that, from now on, assign_stack_local should use
2569 frame_pointer_rtx. */
2570 virtuals_instantiated = 1;
2571 }
2572
2573 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
2574 all virtual registers in their DECL_RTL's.
2575
2576 If VALID_ONLY, do this only if the resulting address is still valid.
2577 Otherwise, always do it. */
2578
2579 static void
2580 instantiate_decls (fndecl, valid_only)
2581 tree fndecl;
2582 int valid_only;
2583 {
2584 tree decl;
2585
2586 if (DECL_SAVED_INSNS (fndecl))
2587 /* When compiling an inline function, the obstack used for
2588 rtl allocation is the maybepermanent_obstack. Calling
2589 `resume_temporary_allocation' switches us back to that
2590 obstack while we process this function's parameters. */
2591 resume_temporary_allocation ();
2592
2593 /* Process all parameters of the function. */
2594 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2595 {
2596 instantiate_decl (DECL_RTL (decl), int_size_in_bytes (TREE_TYPE (decl)),
2597 valid_only);
2598 instantiate_decl (DECL_INCOMING_RTL (decl),
2599 int_size_in_bytes (TREE_TYPE (decl)), valid_only);
2600 }
2601
2602 /* Now process all variables defined in the function or its subblocks. */
2603 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
2604
2605 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
2606 {
2607 /* Save all rtl allocated for this function by raising the
2608 high-water mark on the maybepermanent_obstack. */
2609 preserve_data ();
2610 /* All further rtl allocation is now done in the current_obstack. */
2611 rtl_in_current_obstack ();
2612 }
2613 }
2614
2615 /* Subroutine of instantiate_decls: Process all decls in the given
2616 BLOCK node and all its subblocks. */
2617
2618 static void
2619 instantiate_decls_1 (let, valid_only)
2620 tree let;
2621 int valid_only;
2622 {
2623 tree t;
2624
2625 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2626 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
2627 valid_only);
2628
2629 /* Process all subblocks. */
2630 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2631 instantiate_decls_1 (t, valid_only);
2632 }
2633
2634 /* Subroutine of the preceding procedures: Given RTL representing a
2635 decl and the size of the object, do any instantiation required.
2636
2637 If VALID_ONLY is non-zero, it means that the RTL should only be
2638 changed if the new address is valid. */
2639
2640 static void
2641 instantiate_decl (x, size, valid_only)
2642 rtx x;
2643 int size;
2644 int valid_only;
2645 {
2646 enum machine_mode mode;
2647 rtx addr;
2648
2649 /* If this is not a MEM, no need to do anything. Similarly if the
2650 address is a constant or a register that is not a virtual register. */
2651
2652 if (x == 0 || GET_CODE (x) != MEM)
2653 return;
2654
2655 addr = XEXP (x, 0);
2656 if (CONSTANT_P (addr)
2657 || (GET_CODE (addr) == REG
2658 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
2659 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
2660 return;
2661
2662 /* If we should only do this if the address is valid, copy the address.
2663 We need to do this so we can undo any changes that might make the
2664 address invalid. This copy is unfortunate, but probably can't be
2665 avoided. */
2666
2667 if (valid_only)
2668 addr = copy_rtx (addr);
2669
2670 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
2671
2672 if (! valid_only)
2673 return;
2674
2675 /* Now verify that the resulting address is valid for every integer or
2676 floating-point mode up to and including SIZE bytes long. We do this
2677 since the object might be accessed in any mode and frame addresses
2678 are shared. */
2679
2680 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2681 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2682 mode = GET_MODE_WIDER_MODE (mode))
2683 if (! memory_address_p (mode, addr))
2684 return;
2685
2686 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
2687 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2688 mode = GET_MODE_WIDER_MODE (mode))
2689 if (! memory_address_p (mode, addr))
2690 return;
2691
2692 /* Otherwise, put back the address, now that we have updated it and we
2693 know it is valid. */
2694
2695 XEXP (x, 0) = addr;
2696 }
2697 \f
2698 /* Given a pointer to a piece of rtx and an optional pointer to the
2699 containing object, instantiate any virtual registers present in it.
2700
2701 If EXTRA_INSNS, we always do the replacement and generate
2702 any extra insns before OBJECT. If it zero, we do nothing if replacement
2703 is not valid.
2704
2705 Return 1 if we either had nothing to do or if we were able to do the
2706 needed replacement. Return 0 otherwise; we only return zero if
2707 EXTRA_INSNS is zero.
2708
2709 We first try some simple transformations to avoid the creation of extra
2710 pseudos. */
2711
2712 static int
2713 instantiate_virtual_regs_1 (loc, object, extra_insns)
2714 rtx *loc;
2715 rtx object;
2716 int extra_insns;
2717 {
2718 rtx x;
2719 RTX_CODE code;
2720 rtx new = 0;
2721 int offset;
2722 rtx temp;
2723 rtx seq;
2724 int i, j;
2725 char *fmt;
2726
2727 /* Re-start here to avoid recursion in common cases. */
2728 restart:
2729
2730 x = *loc;
2731 if (x == 0)
2732 return 1;
2733
2734 code = GET_CODE (x);
2735
2736 /* Check for some special cases. */
2737 switch (code)
2738 {
2739 case CONST_INT:
2740 case CONST_DOUBLE:
2741 case CONST:
2742 case SYMBOL_REF:
2743 case CODE_LABEL:
2744 case PC:
2745 case CC0:
2746 case ASM_INPUT:
2747 case ADDR_VEC:
2748 case ADDR_DIFF_VEC:
2749 case RETURN:
2750 return 1;
2751
2752 case SET:
2753 /* We are allowed to set the virtual registers. This means that
2754 that the actual register should receive the source minus the
2755 appropriate offset. This is used, for example, in the handling
2756 of non-local gotos. */
2757 if (SET_DEST (x) == virtual_incoming_args_rtx)
2758 new = arg_pointer_rtx, offset = - in_arg_offset;
2759 else if (SET_DEST (x) == virtual_stack_vars_rtx)
2760 new = frame_pointer_rtx, offset = - var_offset;
2761 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
2762 new = stack_pointer_rtx, offset = - dynamic_offset;
2763 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
2764 new = stack_pointer_rtx, offset = - out_arg_offset;
2765
2766 if (new)
2767 {
2768 /* The only valid sources here are PLUS or REG. Just do
2769 the simplest possible thing to handle them. */
2770 if (GET_CODE (SET_SRC (x)) != REG
2771 && GET_CODE (SET_SRC (x)) != PLUS)
2772 abort ();
2773
2774 start_sequence ();
2775 if (GET_CODE (SET_SRC (x)) != REG)
2776 temp = force_operand (SET_SRC (x), NULL_RTX);
2777 else
2778 temp = SET_SRC (x);
2779 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
2780 seq = get_insns ();
2781 end_sequence ();
2782
2783 emit_insns_before (seq, object);
2784 SET_DEST (x) = new;
2785
2786 if (!validate_change (object, &SET_SRC (x), temp, 0)
2787 || ! extra_insns)
2788 abort ();
2789
2790 return 1;
2791 }
2792
2793 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
2794 loc = &SET_SRC (x);
2795 goto restart;
2796
2797 case PLUS:
2798 /* Handle special case of virtual register plus constant. */
2799 if (CONSTANT_P (XEXP (x, 1)))
2800 {
2801 rtx old, new_offset;
2802
2803 /* Check for (plus (plus VIRT foo) (const_int)) first. */
2804 if (GET_CODE (XEXP (x, 0)) == PLUS)
2805 {
2806 rtx inner = XEXP (XEXP (x, 0), 0);
2807
2808 if (inner == virtual_incoming_args_rtx)
2809 new = arg_pointer_rtx, offset = in_arg_offset;
2810 else if (inner == virtual_stack_vars_rtx)
2811 new = frame_pointer_rtx, offset = var_offset;
2812 else if (inner == virtual_stack_dynamic_rtx)
2813 new = stack_pointer_rtx, offset = dynamic_offset;
2814 else if (inner == virtual_outgoing_args_rtx)
2815 new = stack_pointer_rtx, offset = out_arg_offset;
2816 else
2817 {
2818 loc = &XEXP (x, 0);
2819 goto restart;
2820 }
2821
2822 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
2823 extra_insns);
2824 new = gen_rtx (PLUS, Pmode, new, XEXP (XEXP (x, 0), 1));
2825 }
2826
2827 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
2828 new = arg_pointer_rtx, offset = in_arg_offset;
2829 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
2830 new = frame_pointer_rtx, offset = var_offset;
2831 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
2832 new = stack_pointer_rtx, offset = dynamic_offset;
2833 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
2834 new = stack_pointer_rtx, offset = out_arg_offset;
2835 else
2836 {
2837 /* We know the second operand is a constant. Unless the
2838 first operand is a REG (which has been already checked),
2839 it needs to be checked. */
2840 if (GET_CODE (XEXP (x, 0)) != REG)
2841 {
2842 loc = &XEXP (x, 0);
2843 goto restart;
2844 }
2845 return 1;
2846 }
2847
2848 new_offset = plus_constant (XEXP (x, 1), offset);
2849
2850 /* If the new constant is zero, try to replace the sum with just
2851 the register. */
2852 if (new_offset == const0_rtx
2853 && validate_change (object, loc, new, 0))
2854 return 1;
2855
2856 /* Next try to replace the register and new offset.
2857 There are two changes to validate here and we can't assume that
2858 in the case of old offset equals new just changing the register
2859 will yield a valid insn. In the interests of a little efficiency,
2860 however, we only call validate change once (we don't queue up the
2861 changes and then call apply_change_group). */
2862
2863 old = XEXP (x, 0);
2864 if (offset == 0
2865 ? ! validate_change (object, &XEXP (x, 0), new, 0)
2866 : (XEXP (x, 0) = new,
2867 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
2868 {
2869 if (! extra_insns)
2870 {
2871 XEXP (x, 0) = old;
2872 return 0;
2873 }
2874
2875 /* Otherwise copy the new constant into a register and replace
2876 constant with that register. */
2877 temp = gen_reg_rtx (Pmode);
2878 XEXP (x, 0) = new;
2879 if (validate_change (object, &XEXP (x, 1), temp, 0))
2880 emit_insn_before (gen_move_insn (temp, new_offset), object);
2881 else
2882 {
2883 /* If that didn't work, replace this expression with a
2884 register containing the sum. */
2885
2886 XEXP (x, 0) = old;
2887 new = gen_rtx (PLUS, Pmode, new, new_offset);
2888
2889 start_sequence ();
2890 temp = force_operand (new, NULL_RTX);
2891 seq = get_insns ();
2892 end_sequence ();
2893
2894 emit_insns_before (seq, object);
2895 if (! validate_change (object, loc, temp, 0)
2896 && ! validate_replace_rtx (x, temp, object))
2897 abort ();
2898 }
2899 }
2900
2901 return 1;
2902 }
2903
2904 /* Fall through to generic two-operand expression case. */
2905 case EXPR_LIST:
2906 case CALL:
2907 case COMPARE:
2908 case MINUS:
2909 case MULT:
2910 case DIV: case UDIV:
2911 case MOD: case UMOD:
2912 case AND: case IOR: case XOR:
2913 case ROTATERT: case ROTATE:
2914 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2915 case NE: case EQ:
2916 case GE: case GT: case GEU: case GTU:
2917 case LE: case LT: case LEU: case LTU:
2918 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
2919 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
2920 loc = &XEXP (x, 0);
2921 goto restart;
2922
2923 case MEM:
2924 /* Most cases of MEM that convert to valid addresses have already been
2925 handled by our scan of regno_reg_rtx. The only special handling we
2926 need here is to make a copy of the rtx to ensure it isn't being
2927 shared if we have to change it to a pseudo.
2928
2929 If the rtx is a simple reference to an address via a virtual register,
2930 it can potentially be shared. In such cases, first try to make it
2931 a valid address, which can also be shared. Otherwise, copy it and
2932 proceed normally.
2933
2934 First check for common cases that need no processing. These are
2935 usually due to instantiation already being done on a previous instance
2936 of a shared rtx. */
2937
2938 temp = XEXP (x, 0);
2939 if (CONSTANT_ADDRESS_P (temp)
2940 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2941 || temp == arg_pointer_rtx
2942 #endif
2943 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2944 || temp == hard_frame_pointer_rtx
2945 #endif
2946 || temp == frame_pointer_rtx)
2947 return 1;
2948
2949 if (GET_CODE (temp) == PLUS
2950 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2951 && (XEXP (temp, 0) == frame_pointer_rtx
2952 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2953 || XEXP (temp, 0) == hard_frame_pointer_rtx
2954 #endif
2955 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2956 || XEXP (temp, 0) == arg_pointer_rtx
2957 #endif
2958 ))
2959 return 1;
2960
2961 if (temp == virtual_stack_vars_rtx
2962 || temp == virtual_incoming_args_rtx
2963 || (GET_CODE (temp) == PLUS
2964 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2965 && (XEXP (temp, 0) == virtual_stack_vars_rtx
2966 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
2967 {
2968 /* This MEM may be shared. If the substitution can be done without
2969 the need to generate new pseudos, we want to do it in place
2970 so all copies of the shared rtx benefit. The call below will
2971 only make substitutions if the resulting address is still
2972 valid.
2973
2974 Note that we cannot pass X as the object in the recursive call
2975 since the insn being processed may not allow all valid
2976 addresses. However, if we were not passed on object, we can
2977 only modify X without copying it if X will have a valid
2978 address.
2979
2980 ??? Also note that this can still lose if OBJECT is an insn that
2981 has less restrictions on an address that some other insn.
2982 In that case, we will modify the shared address. This case
2983 doesn't seem very likely, though. */
2984
2985 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
2986 object ? object : x, 0))
2987 return 1;
2988
2989 /* Otherwise make a copy and process that copy. We copy the entire
2990 RTL expression since it might be a PLUS which could also be
2991 shared. */
2992 *loc = x = copy_rtx (x);
2993 }
2994
2995 /* Fall through to generic unary operation case. */
2996 case USE:
2997 case CLOBBER:
2998 case SUBREG:
2999 case STRICT_LOW_PART:
3000 case NEG: case NOT:
3001 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3002 case SIGN_EXTEND: case ZERO_EXTEND:
3003 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3004 case FLOAT: case FIX:
3005 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3006 case ABS:
3007 case SQRT:
3008 case FFS:
3009 /* These case either have just one operand or we know that we need not
3010 check the rest of the operands. */
3011 loc = &XEXP (x, 0);
3012 goto restart;
3013
3014 case REG:
3015 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3016 in front of this insn and substitute the temporary. */
3017 if (x == virtual_incoming_args_rtx)
3018 new = arg_pointer_rtx, offset = in_arg_offset;
3019 else if (x == virtual_stack_vars_rtx)
3020 new = frame_pointer_rtx, offset = var_offset;
3021 else if (x == virtual_stack_dynamic_rtx)
3022 new = stack_pointer_rtx, offset = dynamic_offset;
3023 else if (x == virtual_outgoing_args_rtx)
3024 new = stack_pointer_rtx, offset = out_arg_offset;
3025
3026 if (new)
3027 {
3028 temp = plus_constant (new, offset);
3029 if (!validate_change (object, loc, temp, 0))
3030 {
3031 if (! extra_insns)
3032 return 0;
3033
3034 start_sequence ();
3035 temp = force_operand (temp, NULL_RTX);
3036 seq = get_insns ();
3037 end_sequence ();
3038
3039 emit_insns_before (seq, object);
3040 if (! validate_change (object, loc, temp, 0)
3041 && ! validate_replace_rtx (x, temp, object))
3042 abort ();
3043 }
3044 }
3045
3046 return 1;
3047 }
3048
3049 /* Scan all subexpressions. */
3050 fmt = GET_RTX_FORMAT (code);
3051 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3052 if (*fmt == 'e')
3053 {
3054 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3055 return 0;
3056 }
3057 else if (*fmt == 'E')
3058 for (j = 0; j < XVECLEN (x, i); j++)
3059 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3060 extra_insns))
3061 return 0;
3062
3063 return 1;
3064 }
3065 \f
3066 /* Optimization: assuming this function does not receive nonlocal gotos,
3067 delete the handlers for such, as well as the insns to establish
3068 and disestablish them. */
3069
3070 static void
3071 delete_handlers ()
3072 {
3073 rtx insn;
3074 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3075 {
3076 /* Delete the handler by turning off the flag that would
3077 prevent jump_optimize from deleting it.
3078 Also permit deletion of the nonlocal labels themselves
3079 if nothing local refers to them. */
3080 if (GET_CODE (insn) == CODE_LABEL)
3081 {
3082 tree t, last_t;
3083
3084 LABEL_PRESERVE_P (insn) = 0;
3085
3086 /* Remove it from the nonlocal_label list, to avoid confusing
3087 flow. */
3088 for (t = nonlocal_labels, last_t = 0; t;
3089 last_t = t, t = TREE_CHAIN (t))
3090 if (DECL_RTL (TREE_VALUE (t)) == insn)
3091 break;
3092 if (t)
3093 {
3094 if (! last_t)
3095 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3096 else
3097 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3098 }
3099 }
3100 if (GET_CODE (insn) == INSN
3101 && ((nonlocal_goto_handler_slot != 0
3102 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
3103 || (nonlocal_goto_stack_level != 0
3104 && reg_mentioned_p (nonlocal_goto_stack_level,
3105 PATTERN (insn)))))
3106 delete_insn (insn);
3107 }
3108 }
3109
3110 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3111 of the current function. */
3112
3113 rtx
3114 nonlocal_label_rtx_list ()
3115 {
3116 tree t;
3117 rtx x = 0;
3118
3119 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
3120 x = gen_rtx (EXPR_LIST, VOIDmode, label_rtx (TREE_VALUE (t)), x);
3121
3122 return x;
3123 }
3124 \f
3125 /* Output a USE for any register use in RTL.
3126 This is used with -noreg to mark the extent of lifespan
3127 of any registers used in a user-visible variable's DECL_RTL. */
3128
3129 void
3130 use_variable (rtl)
3131 rtx rtl;
3132 {
3133 if (GET_CODE (rtl) == REG)
3134 /* This is a register variable. */
3135 emit_insn (gen_rtx (USE, VOIDmode, rtl));
3136 else if (GET_CODE (rtl) == MEM
3137 && GET_CODE (XEXP (rtl, 0)) == REG
3138 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3139 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3140 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3141 /* This is a variable-sized structure. */
3142 emit_insn (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)));
3143 }
3144
3145 /* Like use_variable except that it outputs the USEs after INSN
3146 instead of at the end of the insn-chain. */
3147
3148 void
3149 use_variable_after (rtl, insn)
3150 rtx rtl, insn;
3151 {
3152 if (GET_CODE (rtl) == REG)
3153 /* This is a register variable. */
3154 emit_insn_after (gen_rtx (USE, VOIDmode, rtl), insn);
3155 else if (GET_CODE (rtl) == MEM
3156 && GET_CODE (XEXP (rtl, 0)) == REG
3157 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3158 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3159 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3160 /* This is a variable-sized structure. */
3161 emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)), insn);
3162 }
3163 \f
3164 int
3165 max_parm_reg_num ()
3166 {
3167 return max_parm_reg;
3168 }
3169
3170 /* Return the first insn following those generated by `assign_parms'. */
3171
3172 rtx
3173 get_first_nonparm_insn ()
3174 {
3175 if (last_parm_insn)
3176 return NEXT_INSN (last_parm_insn);
3177 return get_insns ();
3178 }
3179
3180 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3181 Crash if there is none. */
3182
3183 rtx
3184 get_first_block_beg ()
3185 {
3186 register rtx searcher;
3187 register rtx insn = get_first_nonparm_insn ();
3188
3189 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3190 if (GET_CODE (searcher) == NOTE
3191 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3192 return searcher;
3193
3194 abort (); /* Invalid call to this function. (See comments above.) */
3195 return NULL_RTX;
3196 }
3197
3198 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3199 This means a type for which function calls must pass an address to the
3200 function or get an address back from the function.
3201 EXP may be a type node or an expression (whose type is tested). */
3202
3203 int
3204 aggregate_value_p (exp)
3205 tree exp;
3206 {
3207 int i, regno, nregs;
3208 rtx reg;
3209 tree type;
3210 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3211 type = exp;
3212 else
3213 type = TREE_TYPE (exp);
3214
3215 if (RETURN_IN_MEMORY (type))
3216 return 1;
3217 /* Types that are TREE_ADDRESSABLE must be contructed in memory,
3218 and thus can't be returned in registers. */
3219 if (TREE_ADDRESSABLE (type))
3220 return 1;
3221 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3222 return 1;
3223 /* Make sure we have suitable call-clobbered regs to return
3224 the value in; if not, we must return it in memory. */
3225 reg = hard_function_value (type, 0);
3226
3227 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3228 it is OK. */
3229 if (GET_CODE (reg) != REG)
3230 return 0;
3231
3232 regno = REGNO (reg);
3233 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3234 for (i = 0; i < nregs; i++)
3235 if (! call_used_regs[regno + i])
3236 return 1;
3237 return 0;
3238 }
3239 \f
3240 /* Assign RTL expressions to the function's parameters.
3241 This may involve copying them into registers and using
3242 those registers as the RTL for them.
3243
3244 If SECOND_TIME is non-zero it means that this function is being
3245 called a second time. This is done by integrate.c when a function's
3246 compilation is deferred. We need to come back here in case the
3247 FUNCTION_ARG macro computes items needed for the rest of the compilation
3248 (such as changing which registers are fixed or caller-saved). But suppress
3249 writing any insns or setting DECL_RTL of anything in this case. */
3250
3251 void
3252 assign_parms (fndecl, second_time)
3253 tree fndecl;
3254 int second_time;
3255 {
3256 register tree parm;
3257 register rtx entry_parm = 0;
3258 register rtx stack_parm = 0;
3259 CUMULATIVE_ARGS args_so_far;
3260 enum machine_mode promoted_mode, passed_mode;
3261 enum machine_mode nominal_mode, promoted_nominal_mode;
3262 int unsignedp;
3263 /* Total space needed so far for args on the stack,
3264 given as a constant and a tree-expression. */
3265 struct args_size stack_args_size;
3266 tree fntype = TREE_TYPE (fndecl);
3267 tree fnargs = DECL_ARGUMENTS (fndecl);
3268 /* This is used for the arg pointer when referring to stack args. */
3269 rtx internal_arg_pointer;
3270 /* This is a dummy PARM_DECL that we used for the function result if
3271 the function returns a structure. */
3272 tree function_result_decl = 0;
3273 int nparmregs = list_length (fnargs) + LAST_VIRTUAL_REGISTER + 1;
3274 int varargs_setup = 0;
3275 rtx conversion_insns = 0;
3276
3277 /* Nonzero if the last arg is named `__builtin_va_alist',
3278 which is used on some machines for old-fashioned non-ANSI varargs.h;
3279 this should be stuck onto the stack as if it had arrived there. */
3280 int hide_last_arg
3281 = (current_function_varargs
3282 && fnargs
3283 && (parm = tree_last (fnargs)) != 0
3284 && DECL_NAME (parm)
3285 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3286 "__builtin_va_alist")));
3287
3288 /* Nonzero if function takes extra anonymous args.
3289 This means the last named arg must be on the stack
3290 right before the anonymous ones. */
3291 int stdarg
3292 = (TYPE_ARG_TYPES (fntype) != 0
3293 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3294 != void_type_node));
3295
3296 current_function_stdarg = stdarg;
3297
3298 /* If the reg that the virtual arg pointer will be translated into is
3299 not a fixed reg or is the stack pointer, make a copy of the virtual
3300 arg pointer, and address parms via the copy. The frame pointer is
3301 considered fixed even though it is not marked as such.
3302
3303 The second time through, simply use ap to avoid generating rtx. */
3304
3305 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3306 || ! (fixed_regs[ARG_POINTER_REGNUM]
3307 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3308 && ! second_time)
3309 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3310 else
3311 internal_arg_pointer = virtual_incoming_args_rtx;
3312 current_function_internal_arg_pointer = internal_arg_pointer;
3313
3314 stack_args_size.constant = 0;
3315 stack_args_size.var = 0;
3316
3317 /* If struct value address is treated as the first argument, make it so. */
3318 if (aggregate_value_p (DECL_RESULT (fndecl))
3319 && ! current_function_returns_pcc_struct
3320 && struct_value_incoming_rtx == 0)
3321 {
3322 tree type = build_pointer_type (TREE_TYPE (fntype));
3323
3324 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
3325
3326 DECL_ARG_TYPE (function_result_decl) = type;
3327 TREE_CHAIN (function_result_decl) = fnargs;
3328 fnargs = function_result_decl;
3329 }
3330
3331 parm_reg_stack_loc = (rtx *) oballoc (nparmregs * sizeof (rtx));
3332 bzero ((char *) parm_reg_stack_loc, nparmregs * sizeof (rtx));
3333
3334 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3335 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
3336 #else
3337 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
3338 #endif
3339
3340 /* We haven't yet found an argument that we must push and pretend the
3341 caller did. */
3342 current_function_pretend_args_size = 0;
3343
3344 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3345 {
3346 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
3347 struct args_size stack_offset;
3348 struct args_size arg_size;
3349 int passed_pointer = 0;
3350 int did_conversion = 0;
3351 tree passed_type = DECL_ARG_TYPE (parm);
3352 tree nominal_type = TREE_TYPE (parm);
3353
3354 /* Set LAST_NAMED if this is last named arg before some
3355 anonymous args. We treat it as if it were anonymous too. */
3356 int last_named = ((TREE_CHAIN (parm) == 0
3357 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3358 && (stdarg || current_function_varargs));
3359
3360 if (TREE_TYPE (parm) == error_mark_node
3361 /* This can happen after weird syntax errors
3362 or if an enum type is defined among the parms. */
3363 || TREE_CODE (parm) != PARM_DECL
3364 || passed_type == NULL)
3365 {
3366 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = gen_rtx (MEM, BLKmode,
3367 const0_rtx);
3368 TREE_USED (parm) = 1;
3369 continue;
3370 }
3371
3372 /* For varargs.h function, save info about regs and stack space
3373 used by the individual args, not including the va_alist arg. */
3374 if (hide_last_arg && last_named)
3375 current_function_args_info = args_so_far;
3376
3377 /* Find mode of arg as it is passed, and mode of arg
3378 as it should be during execution of this function. */
3379 passed_mode = TYPE_MODE (passed_type);
3380 nominal_mode = TYPE_MODE (nominal_type);
3381
3382 /* If the parm's mode is VOID, its value doesn't matter,
3383 and avoid the usual things like emit_move_insn that could crash. */
3384 if (nominal_mode == VOIDmode)
3385 {
3386 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
3387 continue;
3388 }
3389
3390 /* If the parm is to be passed as a transparent union, use the
3391 type of the first field for the tests below. We have already
3392 verified that the modes are the same. */
3393 if (DECL_TRANSPARENT_UNION (parm)
3394 || TYPE_TRANSPARENT_UNION (passed_type))
3395 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
3396
3397 /* See if this arg was passed by invisible reference. It is if
3398 it is an object whose size depends on the contents of the
3399 object itself or if the machine requires these objects be passed
3400 that way. */
3401
3402 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
3403 && contains_placeholder_p (TYPE_SIZE (passed_type)))
3404 || TREE_ADDRESSABLE (passed_type)
3405 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3406 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
3407 passed_type, ! last_named)
3408 #endif
3409 )
3410 {
3411 passed_type = nominal_type = build_pointer_type (passed_type);
3412 passed_pointer = 1;
3413 passed_mode = nominal_mode = Pmode;
3414 }
3415
3416 promoted_mode = passed_mode;
3417
3418 #ifdef PROMOTE_FUNCTION_ARGS
3419 /* Compute the mode in which the arg is actually extended to. */
3420 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
3421 #endif
3422
3423 /* Let machine desc say which reg (if any) the parm arrives in.
3424 0 means it arrives on the stack. */
3425 #ifdef FUNCTION_INCOMING_ARG
3426 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3427 passed_type, ! last_named);
3428 #else
3429 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
3430 passed_type, ! last_named);
3431 #endif
3432
3433 if (entry_parm == 0)
3434 promoted_mode = passed_mode;
3435
3436 #ifdef SETUP_INCOMING_VARARGS
3437 /* If this is the last named parameter, do any required setup for
3438 varargs or stdargs. We need to know about the case of this being an
3439 addressable type, in which case we skip the registers it
3440 would have arrived in.
3441
3442 For stdargs, LAST_NAMED will be set for two parameters, the one that
3443 is actually the last named, and the dummy parameter. We only
3444 want to do this action once.
3445
3446 Also, indicate when RTL generation is to be suppressed. */
3447 if (last_named && !varargs_setup)
3448 {
3449 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
3450 current_function_pretend_args_size,
3451 second_time);
3452 varargs_setup = 1;
3453 }
3454 #endif
3455
3456 /* Determine parm's home in the stack,
3457 in case it arrives in the stack or we should pretend it did.
3458
3459 Compute the stack position and rtx where the argument arrives
3460 and its size.
3461
3462 There is one complexity here: If this was a parameter that would
3463 have been passed in registers, but wasn't only because it is
3464 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
3465 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
3466 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
3467 0 as it was the previous time. */
3468
3469 locate_and_pad_parm (promoted_mode, passed_type,
3470 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3471 1,
3472 #else
3473 #ifdef FUNCTION_INCOMING_ARG
3474 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3475 passed_type,
3476 (! last_named
3477 || varargs_setup)) != 0,
3478 #else
3479 FUNCTION_ARG (args_so_far, promoted_mode,
3480 passed_type,
3481 ! last_named || varargs_setup) != 0,
3482 #endif
3483 #endif
3484 fndecl, &stack_args_size, &stack_offset, &arg_size);
3485
3486 if (! second_time)
3487 {
3488 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
3489
3490 if (offset_rtx == const0_rtx)
3491 stack_parm = gen_rtx (MEM, promoted_mode, internal_arg_pointer);
3492 else
3493 stack_parm = gen_rtx (MEM, promoted_mode,
3494 gen_rtx (PLUS, Pmode,
3495 internal_arg_pointer, offset_rtx));
3496
3497 /* If this is a memory ref that contains aggregate components,
3498 mark it as such for cse and loop optimize. Likewise if it
3499 is readonly. */
3500 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3501 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
3502 }
3503
3504 /* If this parameter was passed both in registers and in the stack,
3505 use the copy on the stack. */
3506 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
3507 entry_parm = 0;
3508
3509 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3510 /* If this parm was passed part in regs and part in memory,
3511 pretend it arrived entirely in memory
3512 by pushing the register-part onto the stack.
3513
3514 In the special case of a DImode or DFmode that is split,
3515 we could put it together in a pseudoreg directly,
3516 but for now that's not worth bothering with. */
3517
3518 if (entry_parm)
3519 {
3520 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
3521 passed_type, ! last_named);
3522
3523 if (nregs > 0)
3524 {
3525 current_function_pretend_args_size
3526 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
3527 / (PARM_BOUNDARY / BITS_PER_UNIT)
3528 * (PARM_BOUNDARY / BITS_PER_UNIT));
3529
3530 if (! second_time)
3531 {
3532 /* Handle calls that pass values in multiple non-contiguous
3533 locations. The Irix 6 ABI has examples of this. */
3534 if (GET_CODE (entry_parm) == PARALLEL)
3535 emit_group_store (validize_mem (stack_parm),
3536 entry_parm);
3537 else
3538 move_block_from_reg (REGNO (entry_parm),
3539 validize_mem (stack_parm), nregs,
3540 int_size_in_bytes (TREE_TYPE (parm)));
3541 }
3542 entry_parm = stack_parm;
3543 }
3544 }
3545 #endif
3546
3547 /* If we didn't decide this parm came in a register,
3548 by default it came on the stack. */
3549 if (entry_parm == 0)
3550 entry_parm = stack_parm;
3551
3552 /* Record permanently how this parm was passed. */
3553 if (! second_time)
3554 DECL_INCOMING_RTL (parm) = entry_parm;
3555
3556 /* If there is actually space on the stack for this parm,
3557 count it in stack_args_size; otherwise set stack_parm to 0
3558 to indicate there is no preallocated stack slot for the parm. */
3559
3560 if (entry_parm == stack_parm
3561 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
3562 /* On some machines, even if a parm value arrives in a register
3563 there is still an (uninitialized) stack slot allocated for it.
3564
3565 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
3566 whether this parameter already has a stack slot allocated,
3567 because an arg block exists only if current_function_args_size
3568 is larger than some threshold, and we haven't calculated that
3569 yet. So, for now, we just assume that stack slots never exist
3570 in this case. */
3571 || REG_PARM_STACK_SPACE (fndecl) > 0
3572 #endif
3573 )
3574 {
3575 stack_args_size.constant += arg_size.constant;
3576 if (arg_size.var)
3577 ADD_PARM_SIZE (stack_args_size, arg_size.var);
3578 }
3579 else
3580 /* No stack slot was pushed for this parm. */
3581 stack_parm = 0;
3582
3583 /* Update info on where next arg arrives in registers. */
3584
3585 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
3586 passed_type, ! last_named);
3587
3588 /* If this is our second time through, we are done with this parm. */
3589 if (second_time)
3590 continue;
3591
3592 /* If we can't trust the parm stack slot to be aligned enough
3593 for its ultimate type, don't use that slot after entry.
3594 We'll make another stack slot, if we need one. */
3595 {
3596 int thisparm_boundary
3597 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
3598
3599 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
3600 stack_parm = 0;
3601 }
3602
3603 /* If parm was passed in memory, and we need to convert it on entry,
3604 don't store it back in that same slot. */
3605 if (entry_parm != 0
3606 && nominal_mode != BLKmode && nominal_mode != passed_mode)
3607 stack_parm = 0;
3608
3609 #if 0
3610 /* Now adjust STACK_PARM to the mode and precise location
3611 where this parameter should live during execution,
3612 if we discover that it must live in the stack during execution.
3613 To make debuggers happier on big-endian machines, we store
3614 the value in the last bytes of the space available. */
3615
3616 if (nominal_mode != BLKmode && nominal_mode != passed_mode
3617 && stack_parm != 0)
3618 {
3619 rtx offset_rtx;
3620
3621 if (BYTES_BIG_ENDIAN
3622 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
3623 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
3624 - GET_MODE_SIZE (nominal_mode));
3625
3626 offset_rtx = ARGS_SIZE_RTX (stack_offset);
3627 if (offset_rtx == const0_rtx)
3628 stack_parm = gen_rtx (MEM, nominal_mode, internal_arg_pointer);
3629 else
3630 stack_parm = gen_rtx (MEM, nominal_mode,
3631 gen_rtx (PLUS, Pmode,
3632 internal_arg_pointer, offset_rtx));
3633
3634 /* If this is a memory ref that contains aggregate components,
3635 mark it as such for cse and loop optimize. */
3636 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3637 }
3638 #endif /* 0 */
3639
3640 #ifdef STACK_REGS
3641 /* We need this "use" info, because the gcc-register->stack-register
3642 converter in reg-stack.c needs to know which registers are active
3643 at the start of the function call. The actual parameter loading
3644 instructions are not always available then anymore, since they might
3645 have been optimised away. */
3646
3647 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
3648 emit_insn (gen_rtx (USE, GET_MODE (entry_parm), entry_parm));
3649 #endif
3650
3651 /* ENTRY_PARM is an RTX for the parameter as it arrives,
3652 in the mode in which it arrives.
3653 STACK_PARM is an RTX for a stack slot where the parameter can live
3654 during the function (in case we want to put it there).
3655 STACK_PARM is 0 if no stack slot was pushed for it.
3656
3657 Now output code if necessary to convert ENTRY_PARM to
3658 the type in which this function declares it,
3659 and store that result in an appropriate place,
3660 which may be a pseudo reg, may be STACK_PARM,
3661 or may be a local stack slot if STACK_PARM is 0.
3662
3663 Set DECL_RTL to that place. */
3664
3665 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
3666 {
3667 /* If a BLKmode arrives in registers, copy it to a stack slot.
3668 Handle calls that pass values in multiple non-contiguous
3669 locations. The Irix 6 ABI has examples of this. */
3670 if (GET_CODE (entry_parm) == REG
3671 || GET_CODE (entry_parm) == PARALLEL)
3672 {
3673 int size_stored
3674 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
3675 UNITS_PER_WORD);
3676
3677 /* Note that we will be storing an integral number of words.
3678 So we have to be careful to ensure that we allocate an
3679 integral number of words. We do this below in the
3680 assign_stack_local if space was not allocated in the argument
3681 list. If it was, this will not work if PARM_BOUNDARY is not
3682 a multiple of BITS_PER_WORD. It isn't clear how to fix this
3683 if it becomes a problem. */
3684
3685 if (stack_parm == 0)
3686 {
3687 stack_parm
3688 = assign_stack_local (GET_MODE (entry_parm),
3689 size_stored, 0);
3690
3691 /* If this is a memory ref that contains aggregate
3692 components, mark it as such for cse and loop optimize. */
3693 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3694 }
3695
3696 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
3697 abort ();
3698
3699 if (TREE_READONLY (parm))
3700 RTX_UNCHANGING_P (stack_parm) = 1;
3701
3702 /* Handle calls that pass values in multiple non-contiguous
3703 locations. The Irix 6 ABI has examples of this. */
3704 if (GET_CODE (entry_parm) == PARALLEL)
3705 emit_group_store (validize_mem (stack_parm), entry_parm);
3706 else
3707 move_block_from_reg (REGNO (entry_parm),
3708 validize_mem (stack_parm),
3709 size_stored / UNITS_PER_WORD,
3710 int_size_in_bytes (TREE_TYPE (parm)));
3711 }
3712 DECL_RTL (parm) = stack_parm;
3713 }
3714 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
3715 && ! DECL_INLINE (fndecl))
3716 /* layout_decl may set this. */
3717 || TREE_ADDRESSABLE (parm)
3718 || TREE_SIDE_EFFECTS (parm)
3719 /* If -ffloat-store specified, don't put explicit
3720 float variables into registers. */
3721 || (flag_float_store
3722 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
3723 /* Always assign pseudo to structure return or item passed
3724 by invisible reference. */
3725 || passed_pointer || parm == function_result_decl)
3726 {
3727 /* Store the parm in a pseudoregister during the function, but we
3728 may need to do it in a wider mode. */
3729
3730 register rtx parmreg;
3731 int regno, regnoi, regnor;
3732
3733 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
3734
3735 promoted_nominal_mode
3736 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
3737
3738 parmreg = gen_reg_rtx (promoted_nominal_mode);
3739 REG_USERVAR_P (parmreg) = 1;
3740
3741 /* If this was an item that we received a pointer to, set DECL_RTL
3742 appropriately. */
3743 if (passed_pointer)
3744 {
3745 DECL_RTL (parm)
3746 = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
3747 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
3748 }
3749 else
3750 DECL_RTL (parm) = parmreg;
3751
3752 /* Copy the value into the register. */
3753 if (nominal_mode != passed_mode
3754 || promoted_nominal_mode != promoted_mode)
3755 {
3756 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3757 mode, by the caller. We now have to convert it to
3758 NOMINAL_MODE, if different. However, PARMREG may be in
3759 a diffent mode than NOMINAL_MODE if it is being stored
3760 promoted.
3761
3762 If ENTRY_PARM is a hard register, it might be in a register
3763 not valid for operating in its mode (e.g., an odd-numbered
3764 register for a DFmode). In that case, moves are the only
3765 thing valid, so we can't do a convert from there. This
3766 occurs when the calling sequence allow such misaligned
3767 usages.
3768
3769 In addition, the conversion may involve a call, which could
3770 clobber parameters which haven't been copied to pseudo
3771 registers yet. Therefore, we must first copy the parm to
3772 a pseudo reg here, and save the conversion until after all
3773 parameters have been moved. */
3774
3775 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
3776
3777 emit_move_insn (tempreg, validize_mem (entry_parm));
3778
3779 push_to_sequence (conversion_insns);
3780 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
3781
3782 expand_assignment (parm,
3783 make_tree (nominal_type, tempreg), 0, 0);
3784 conversion_insns = get_insns ();
3785 did_conversion = 1;
3786 end_sequence ();
3787 }
3788 else
3789 emit_move_insn (parmreg, validize_mem (entry_parm));
3790
3791 /* If we were passed a pointer but the actual value
3792 can safely live in a register, put it in one. */
3793 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
3794 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
3795 && ! DECL_INLINE (fndecl))
3796 /* layout_decl may set this. */
3797 || TREE_ADDRESSABLE (parm)
3798 || TREE_SIDE_EFFECTS (parm)
3799 /* If -ffloat-store specified, don't put explicit
3800 float variables into registers. */
3801 || (flag_float_store
3802 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
3803 {
3804 /* We can't use nominal_mode, because it will have been set to
3805 Pmode above. We must use the actual mode of the parm. */
3806 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3807 REG_USERVAR_P (parmreg) = 1;
3808 emit_move_insn (parmreg, DECL_RTL (parm));
3809 DECL_RTL (parm) = parmreg;
3810 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3811 now the parm. */
3812 stack_parm = 0;
3813 }
3814 #ifdef FUNCTION_ARG_CALLEE_COPIES
3815 /* If we are passed an arg by reference and it is our responsibility
3816 to make a copy, do it now.
3817 PASSED_TYPE and PASSED mode now refer to the pointer, not the
3818 original argument, so we must recreate them in the call to
3819 FUNCTION_ARG_CALLEE_COPIES. */
3820 /* ??? Later add code to handle the case that if the argument isn't
3821 modified, don't do the copy. */
3822
3823 else if (passed_pointer
3824 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
3825 TYPE_MODE (DECL_ARG_TYPE (parm)),
3826 DECL_ARG_TYPE (parm),
3827 ! last_named)
3828 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
3829 {
3830 rtx copy;
3831 tree type = DECL_ARG_TYPE (parm);
3832
3833 /* This sequence may involve a library call perhaps clobbering
3834 registers that haven't been copied to pseudos yet. */
3835
3836 push_to_sequence (conversion_insns);
3837
3838 if (TYPE_SIZE (type) == 0
3839 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3840 /* This is a variable sized object. */
3841 copy = gen_rtx (MEM, BLKmode,
3842 allocate_dynamic_stack_space
3843 (expr_size (parm), NULL_RTX,
3844 TYPE_ALIGN (type)));
3845 else
3846 copy = assign_stack_temp (TYPE_MODE (type),
3847 int_size_in_bytes (type), 1);
3848 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
3849
3850 store_expr (parm, copy, 0);
3851 emit_move_insn (parmreg, XEXP (copy, 0));
3852 conversion_insns = get_insns ();
3853 did_conversion = 1;
3854 end_sequence ();
3855 }
3856 #endif /* FUNCTION_ARG_CALLEE_COPIES */
3857
3858 /* In any case, record the parm's desired stack location
3859 in case we later discover it must live in the stack.
3860
3861 If it is a COMPLEX value, store the stack location for both
3862 halves. */
3863
3864 if (GET_CODE (parmreg) == CONCAT)
3865 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
3866 else
3867 regno = REGNO (parmreg);
3868
3869 if (regno >= nparmregs)
3870 {
3871 rtx *new;
3872 int old_nparmregs = nparmregs;
3873
3874 nparmregs = regno + 5;
3875 new = (rtx *) oballoc (nparmregs * sizeof (rtx));
3876 bcopy ((char *) parm_reg_stack_loc, (char *) new,
3877 old_nparmregs * sizeof (rtx));
3878 bzero ((char *) (new + old_nparmregs),
3879 (nparmregs - old_nparmregs) * sizeof (rtx));
3880 parm_reg_stack_loc = new;
3881 }
3882
3883 if (GET_CODE (parmreg) == CONCAT)
3884 {
3885 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
3886
3887 regnor = REGNO (gen_realpart (submode, parmreg));
3888 regnoi = REGNO (gen_imagpart (submode, parmreg));
3889
3890 if (stack_parm != 0)
3891 {
3892 parm_reg_stack_loc[regnor]
3893 = gen_realpart (submode, stack_parm);
3894 parm_reg_stack_loc[regnoi]
3895 = gen_imagpart (submode, stack_parm);
3896 }
3897 else
3898 {
3899 parm_reg_stack_loc[regnor] = 0;
3900 parm_reg_stack_loc[regnoi] = 0;
3901 }
3902 }
3903 else
3904 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
3905
3906 /* Mark the register as eliminable if we did no conversion
3907 and it was copied from memory at a fixed offset,
3908 and the arg pointer was not copied to a pseudo-reg.
3909 If the arg pointer is a pseudo reg or the offset formed
3910 an invalid address, such memory-equivalences
3911 as we make here would screw up life analysis for it. */
3912 if (nominal_mode == passed_mode
3913 && ! did_conversion
3914 && GET_CODE (entry_parm) == MEM
3915 && entry_parm == stack_parm
3916 && stack_offset.var == 0
3917 && reg_mentioned_p (virtual_incoming_args_rtx,
3918 XEXP (entry_parm, 0)))
3919 {
3920 rtx linsn = get_last_insn ();
3921 rtx sinsn, set;
3922
3923 /* Mark complex types separately. */
3924 if (GET_CODE (parmreg) == CONCAT)
3925 /* Scan backwards for the set of the real and
3926 imaginary parts. */
3927 for (sinsn = linsn; sinsn != 0;
3928 sinsn = prev_nonnote_insn (sinsn))
3929 {
3930 set = single_set (sinsn);
3931 if (set != 0
3932 && SET_DEST (set) == regno_reg_rtx [regnoi])
3933 REG_NOTES (sinsn)
3934 = gen_rtx (EXPR_LIST, REG_EQUIV,
3935 parm_reg_stack_loc[regnoi],
3936 REG_NOTES (sinsn));
3937 else if (set != 0
3938 && SET_DEST (set) == regno_reg_rtx [regnor])
3939 REG_NOTES (sinsn)
3940 = gen_rtx (EXPR_LIST, REG_EQUIV,
3941 parm_reg_stack_loc[regnor],
3942 REG_NOTES (sinsn));
3943 }
3944 else if ((set = single_set (linsn)) != 0
3945 && SET_DEST (set) == parmreg)
3946 REG_NOTES (linsn)
3947 = gen_rtx (EXPR_LIST, REG_EQUIV,
3948 entry_parm, REG_NOTES (linsn));
3949 }
3950
3951 /* For pointer data type, suggest pointer register. */
3952 if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE)
3953 mark_reg_pointer (parmreg,
3954 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
3955 / BITS_PER_UNIT));
3956 }
3957 else
3958 {
3959 /* Value must be stored in the stack slot STACK_PARM
3960 during function execution. */
3961
3962 if (promoted_mode != nominal_mode)
3963 {
3964 /* Conversion is required. */
3965 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
3966
3967 emit_move_insn (tempreg, validize_mem (entry_parm));
3968
3969 push_to_sequence (conversion_insns);
3970 entry_parm = convert_to_mode (nominal_mode, tempreg,
3971 TREE_UNSIGNED (TREE_TYPE (parm)));
3972 conversion_insns = get_insns ();
3973 did_conversion = 1;
3974 end_sequence ();
3975 }
3976
3977 if (entry_parm != stack_parm)
3978 {
3979 if (stack_parm == 0)
3980 {
3981 stack_parm
3982 = assign_stack_local (GET_MODE (entry_parm),
3983 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
3984 /* If this is a memory ref that contains aggregate components,
3985 mark it as such for cse and loop optimize. */
3986 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3987 }
3988
3989 if (promoted_mode != nominal_mode)
3990 {
3991 push_to_sequence (conversion_insns);
3992 emit_move_insn (validize_mem (stack_parm),
3993 validize_mem (entry_parm));
3994 conversion_insns = get_insns ();
3995 end_sequence ();
3996 }
3997 else
3998 emit_move_insn (validize_mem (stack_parm),
3999 validize_mem (entry_parm));
4000 }
4001
4002 DECL_RTL (parm) = stack_parm;
4003 }
4004
4005 /* If this "parameter" was the place where we are receiving the
4006 function's incoming structure pointer, set up the result. */
4007 if (parm == function_result_decl)
4008 {
4009 tree result = DECL_RESULT (fndecl);
4010 tree restype = TREE_TYPE (result);
4011
4012 DECL_RTL (result)
4013 = gen_rtx (MEM, DECL_MODE (result), DECL_RTL (parm));
4014
4015 MEM_IN_STRUCT_P (DECL_RTL (result)) = AGGREGATE_TYPE_P (restype);
4016 }
4017
4018 if (TREE_THIS_VOLATILE (parm))
4019 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4020 if (TREE_READONLY (parm))
4021 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4022 }
4023
4024 /* Output all parameter conversion instructions (possibly including calls)
4025 now that all parameters have been copied out of hard registers. */
4026 emit_insns (conversion_insns);
4027
4028 max_parm_reg = max_reg_num ();
4029 last_parm_insn = get_last_insn ();
4030
4031 current_function_args_size = stack_args_size.constant;
4032
4033 /* Adjust function incoming argument size for alignment and
4034 minimum length. */
4035
4036 #ifdef REG_PARM_STACK_SPACE
4037 #ifndef MAYBE_REG_PARM_STACK_SPACE
4038 current_function_args_size = MAX (current_function_args_size,
4039 REG_PARM_STACK_SPACE (fndecl));
4040 #endif
4041 #endif
4042
4043 #ifdef STACK_BOUNDARY
4044 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4045
4046 current_function_args_size
4047 = ((current_function_args_size + STACK_BYTES - 1)
4048 / STACK_BYTES) * STACK_BYTES;
4049 #endif
4050
4051 #ifdef ARGS_GROW_DOWNWARD
4052 current_function_arg_offset_rtx
4053 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4054 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4055 size_int (-stack_args_size.constant)),
4056 NULL_RTX, VOIDmode, 0));
4057 #else
4058 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4059 #endif
4060
4061 /* See how many bytes, if any, of its args a function should try to pop
4062 on return. */
4063
4064 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4065 current_function_args_size);
4066
4067 /* For stdarg.h function, save info about
4068 regs and stack space used by the named args. */
4069
4070 if (!hide_last_arg)
4071 current_function_args_info = args_so_far;
4072
4073 /* Set the rtx used for the function return value. Put this in its
4074 own variable so any optimizers that need this information don't have
4075 to include tree.h. Do this here so it gets done when an inlined
4076 function gets output. */
4077
4078 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4079 }
4080 \f
4081 /* Indicate whether REGNO is an incoming argument to the current function
4082 that was promoted to a wider mode. If so, return the RTX for the
4083 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4084 that REGNO is promoted from and whether the promotion was signed or
4085 unsigned. */
4086
4087 #ifdef PROMOTE_FUNCTION_ARGS
4088
4089 rtx
4090 promoted_input_arg (regno, pmode, punsignedp)
4091 int regno;
4092 enum machine_mode *pmode;
4093 int *punsignedp;
4094 {
4095 tree arg;
4096
4097 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4098 arg = TREE_CHAIN (arg))
4099 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4100 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4101 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4102 {
4103 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4104 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4105
4106 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4107 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4108 && mode != DECL_MODE (arg))
4109 {
4110 *pmode = DECL_MODE (arg);
4111 *punsignedp = unsignedp;
4112 return DECL_INCOMING_RTL (arg);
4113 }
4114 }
4115
4116 return 0;
4117 }
4118
4119 #endif
4120 \f
4121 /* Compute the size and offset from the start of the stacked arguments for a
4122 parm passed in mode PASSED_MODE and with type TYPE.
4123
4124 INITIAL_OFFSET_PTR points to the current offset into the stacked
4125 arguments.
4126
4127 The starting offset and size for this parm are returned in *OFFSET_PTR
4128 and *ARG_SIZE_PTR, respectively.
4129
4130 IN_REGS is non-zero if the argument will be passed in registers. It will
4131 never be set if REG_PARM_STACK_SPACE is not defined.
4132
4133 FNDECL is the function in which the argument was defined.
4134
4135 There are two types of rounding that are done. The first, controlled by
4136 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4137 list to be aligned to the specific boundary (in bits). This rounding
4138 affects the initial and starting offsets, but not the argument size.
4139
4140 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4141 optionally rounds the size of the parm to PARM_BOUNDARY. The
4142 initial offset is not affected by this rounding, while the size always
4143 is and the starting offset may be. */
4144
4145 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4146 initial_offset_ptr is positive because locate_and_pad_parm's
4147 callers pass in the total size of args so far as
4148 initial_offset_ptr. arg_size_ptr is always positive.*/
4149
4150 void
4151 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4152 initial_offset_ptr, offset_ptr, arg_size_ptr)
4153 enum machine_mode passed_mode;
4154 tree type;
4155 int in_regs;
4156 tree fndecl;
4157 struct args_size *initial_offset_ptr;
4158 struct args_size *offset_ptr;
4159 struct args_size *arg_size_ptr;
4160 {
4161 tree sizetree
4162 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4163 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4164 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4165 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4166 int reg_parm_stack_space = 0;
4167
4168 #ifdef REG_PARM_STACK_SPACE
4169 /* If we have found a stack parm before we reach the end of the
4170 area reserved for registers, skip that area. */
4171 if (! in_regs)
4172 {
4173 #ifdef MAYBE_REG_PARM_STACK_SPACE
4174 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4175 #else
4176 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4177 #endif
4178 if (reg_parm_stack_space > 0)
4179 {
4180 if (initial_offset_ptr->var)
4181 {
4182 initial_offset_ptr->var
4183 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4184 size_int (reg_parm_stack_space));
4185 initial_offset_ptr->constant = 0;
4186 }
4187 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4188 initial_offset_ptr->constant = reg_parm_stack_space;
4189 }
4190 }
4191 #endif /* REG_PARM_STACK_SPACE */
4192
4193 arg_size_ptr->var = 0;
4194 arg_size_ptr->constant = 0;
4195
4196 #ifdef ARGS_GROW_DOWNWARD
4197 if (initial_offset_ptr->var)
4198 {
4199 offset_ptr->constant = 0;
4200 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4201 initial_offset_ptr->var);
4202 }
4203 else
4204 {
4205 offset_ptr->constant = - initial_offset_ptr->constant;
4206 offset_ptr->var = 0;
4207 }
4208 if (where_pad != none
4209 && (TREE_CODE (sizetree) != INTEGER_CST
4210 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4211 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4212 SUB_PARM_SIZE (*offset_ptr, sizetree);
4213 if (where_pad != downward)
4214 pad_to_arg_alignment (offset_ptr, boundary);
4215 if (initial_offset_ptr->var)
4216 {
4217 arg_size_ptr->var = size_binop (MINUS_EXPR,
4218 size_binop (MINUS_EXPR,
4219 integer_zero_node,
4220 initial_offset_ptr->var),
4221 offset_ptr->var);
4222 }
4223 else
4224 {
4225 arg_size_ptr->constant = (- initial_offset_ptr->constant -
4226 offset_ptr->constant);
4227 }
4228 #else /* !ARGS_GROW_DOWNWARD */
4229 pad_to_arg_alignment (initial_offset_ptr, boundary);
4230 *offset_ptr = *initial_offset_ptr;
4231
4232 #ifdef PUSH_ROUNDING
4233 if (passed_mode != BLKmode)
4234 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4235 #endif
4236
4237 /* Pad_below needs the pre-rounded size to know how much to pad below
4238 so this must be done before rounding up. */
4239 if (where_pad == downward
4240 /* However, BLKmode args passed in regs have their padding done elsewhere.
4241 The stack slot must be able to hold the entire register. */
4242 && !(in_regs && passed_mode == BLKmode))
4243 pad_below (offset_ptr, passed_mode, sizetree);
4244
4245 if (where_pad != none
4246 && (TREE_CODE (sizetree) != INTEGER_CST
4247 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4248 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4249
4250 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4251 #endif /* ARGS_GROW_DOWNWARD */
4252 }
4253
4254 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4255 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4256
4257 static void
4258 pad_to_arg_alignment (offset_ptr, boundary)
4259 struct args_size *offset_ptr;
4260 int boundary;
4261 {
4262 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4263
4264 if (boundary > BITS_PER_UNIT)
4265 {
4266 if (offset_ptr->var)
4267 {
4268 offset_ptr->var =
4269 #ifdef ARGS_GROW_DOWNWARD
4270 round_down
4271 #else
4272 round_up
4273 #endif
4274 (ARGS_SIZE_TREE (*offset_ptr),
4275 boundary / BITS_PER_UNIT);
4276 offset_ptr->constant = 0; /*?*/
4277 }
4278 else
4279 offset_ptr->constant =
4280 #ifdef ARGS_GROW_DOWNWARD
4281 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4282 #else
4283 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4284 #endif
4285 }
4286 }
4287
4288 static void
4289 pad_below (offset_ptr, passed_mode, sizetree)
4290 struct args_size *offset_ptr;
4291 enum machine_mode passed_mode;
4292 tree sizetree;
4293 {
4294 if (passed_mode != BLKmode)
4295 {
4296 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4297 offset_ptr->constant
4298 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4299 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4300 - GET_MODE_SIZE (passed_mode));
4301 }
4302 else
4303 {
4304 if (TREE_CODE (sizetree) != INTEGER_CST
4305 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4306 {
4307 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4308 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4309 /* Add it in. */
4310 ADD_PARM_SIZE (*offset_ptr, s2);
4311 SUB_PARM_SIZE (*offset_ptr, sizetree);
4312 }
4313 }
4314 }
4315
4316 static tree
4317 round_down (value, divisor)
4318 tree value;
4319 int divisor;
4320 {
4321 return size_binop (MULT_EXPR,
4322 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
4323 size_int (divisor));
4324 }
4325 \f
4326 /* Walk the tree of blocks describing the binding levels within a function
4327 and warn about uninitialized variables.
4328 This is done after calling flow_analysis and before global_alloc
4329 clobbers the pseudo-regs to hard regs. */
4330
4331 void
4332 uninitialized_vars_warning (block)
4333 tree block;
4334 {
4335 register tree decl, sub;
4336 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4337 {
4338 if (TREE_CODE (decl) == VAR_DECL
4339 /* These warnings are unreliable for and aggregates
4340 because assigning the fields one by one can fail to convince
4341 flow.c that the entire aggregate was initialized.
4342 Unions are troublesome because members may be shorter. */
4343 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
4344 && DECL_RTL (decl) != 0
4345 && GET_CODE (DECL_RTL (decl)) == REG
4346 && regno_uninitialized (REGNO (DECL_RTL (decl))))
4347 warning_with_decl (decl,
4348 "`%s' might be used uninitialized in this function");
4349 if (TREE_CODE (decl) == VAR_DECL
4350 && DECL_RTL (decl) != 0
4351 && GET_CODE (DECL_RTL (decl)) == REG
4352 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4353 warning_with_decl (decl,
4354 "variable `%s' might be clobbered by `longjmp' or `vfork'");
4355 }
4356 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4357 uninitialized_vars_warning (sub);
4358 }
4359
4360 /* Do the appropriate part of uninitialized_vars_warning
4361 but for arguments instead of local variables. */
4362
4363 void
4364 setjmp_args_warning ()
4365 {
4366 register tree decl;
4367 for (decl = DECL_ARGUMENTS (current_function_decl);
4368 decl; decl = TREE_CHAIN (decl))
4369 if (DECL_RTL (decl) != 0
4370 && GET_CODE (DECL_RTL (decl)) == REG
4371 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4372 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
4373 }
4374
4375 /* If this function call setjmp, put all vars into the stack
4376 unless they were declared `register'. */
4377
4378 void
4379 setjmp_protect (block)
4380 tree block;
4381 {
4382 register tree decl, sub;
4383 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4384 if ((TREE_CODE (decl) == VAR_DECL
4385 || TREE_CODE (decl) == PARM_DECL)
4386 && DECL_RTL (decl) != 0
4387 && GET_CODE (DECL_RTL (decl)) == REG
4388 /* If this variable came from an inline function, it must be
4389 that it's life doesn't overlap the setjmp. If there was a
4390 setjmp in the function, it would already be in memory. We
4391 must exclude such variable because their DECL_RTL might be
4392 set to strange things such as virtual_stack_vars_rtx. */
4393 && ! DECL_FROM_INLINE (decl)
4394 && (
4395 #ifdef NON_SAVING_SETJMP
4396 /* If longjmp doesn't restore the registers,
4397 don't put anything in them. */
4398 NON_SAVING_SETJMP
4399 ||
4400 #endif
4401 ! DECL_REGISTER (decl)))
4402 put_var_into_stack (decl);
4403 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4404 setjmp_protect (sub);
4405 }
4406 \f
4407 /* Like the previous function, but for args instead of local variables. */
4408
4409 void
4410 setjmp_protect_args ()
4411 {
4412 register tree decl, sub;
4413 for (decl = DECL_ARGUMENTS (current_function_decl);
4414 decl; decl = TREE_CHAIN (decl))
4415 if ((TREE_CODE (decl) == VAR_DECL
4416 || TREE_CODE (decl) == PARM_DECL)
4417 && DECL_RTL (decl) != 0
4418 && GET_CODE (DECL_RTL (decl)) == REG
4419 && (
4420 /* If longjmp doesn't restore the registers,
4421 don't put anything in them. */
4422 #ifdef NON_SAVING_SETJMP
4423 NON_SAVING_SETJMP
4424 ||
4425 #endif
4426 ! DECL_REGISTER (decl)))
4427 put_var_into_stack (decl);
4428 }
4429 \f
4430 /* Return the context-pointer register corresponding to DECL,
4431 or 0 if it does not need one. */
4432
4433 rtx
4434 lookup_static_chain (decl)
4435 tree decl;
4436 {
4437 tree context = decl_function_context (decl);
4438 tree link;
4439
4440 if (context == 0
4441 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
4442 return 0;
4443
4444 /* We treat inline_function_decl as an alias for the current function
4445 because that is the inline function whose vars, types, etc.
4446 are being merged into the current function.
4447 See expand_inline_function. */
4448 if (context == current_function_decl || context == inline_function_decl)
4449 return virtual_stack_vars_rtx;
4450
4451 for (link = context_display; link; link = TREE_CHAIN (link))
4452 if (TREE_PURPOSE (link) == context)
4453 return RTL_EXPR_RTL (TREE_VALUE (link));
4454
4455 abort ();
4456 }
4457 \f
4458 /* Convert a stack slot address ADDR for variable VAR
4459 (from a containing function)
4460 into an address valid in this function (using a static chain). */
4461
4462 rtx
4463 fix_lexical_addr (addr, var)
4464 rtx addr;
4465 tree var;
4466 {
4467 rtx basereg;
4468 int displacement;
4469 tree context = decl_function_context (var);
4470 struct function *fp;
4471 rtx base = 0;
4472
4473 /* If this is the present function, we need not do anything. */
4474 if (context == current_function_decl || context == inline_function_decl)
4475 return addr;
4476
4477 for (fp = outer_function_chain; fp; fp = fp->next)
4478 if (fp->decl == context)
4479 break;
4480
4481 if (fp == 0)
4482 abort ();
4483
4484 /* Decode given address as base reg plus displacement. */
4485 if (GET_CODE (addr) == REG)
4486 basereg = addr, displacement = 0;
4487 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4488 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
4489 else
4490 abort ();
4491
4492 /* We accept vars reached via the containing function's
4493 incoming arg pointer and via its stack variables pointer. */
4494 if (basereg == fp->internal_arg_pointer)
4495 {
4496 /* If reached via arg pointer, get the arg pointer value
4497 out of that function's stack frame.
4498
4499 There are two cases: If a separate ap is needed, allocate a
4500 slot in the outer function for it and dereference it that way.
4501 This is correct even if the real ap is actually a pseudo.
4502 Otherwise, just adjust the offset from the frame pointer to
4503 compensate. */
4504
4505 #ifdef NEED_SEPARATE_AP
4506 rtx addr;
4507
4508 if (fp->arg_pointer_save_area == 0)
4509 fp->arg_pointer_save_area
4510 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
4511
4512 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
4513 addr = memory_address (Pmode, addr);
4514
4515 base = copy_to_reg (gen_rtx (MEM, Pmode, addr));
4516 #else
4517 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
4518 base = lookup_static_chain (var);
4519 #endif
4520 }
4521
4522 else if (basereg == virtual_stack_vars_rtx)
4523 {
4524 /* This is the same code as lookup_static_chain, duplicated here to
4525 avoid an extra call to decl_function_context. */
4526 tree link;
4527
4528 for (link = context_display; link; link = TREE_CHAIN (link))
4529 if (TREE_PURPOSE (link) == context)
4530 {
4531 base = RTL_EXPR_RTL (TREE_VALUE (link));
4532 break;
4533 }
4534 }
4535
4536 if (base == 0)
4537 abort ();
4538
4539 /* Use same offset, relative to appropriate static chain or argument
4540 pointer. */
4541 return plus_constant (base, displacement);
4542 }
4543 \f
4544 /* Return the address of the trampoline for entering nested fn FUNCTION.
4545 If necessary, allocate a trampoline (in the stack frame)
4546 and emit rtl to initialize its contents (at entry to this function). */
4547
4548 rtx
4549 trampoline_address (function)
4550 tree function;
4551 {
4552 tree link;
4553 tree rtlexp;
4554 rtx tramp;
4555 struct function *fp;
4556 tree fn_context;
4557
4558 /* Find an existing trampoline and return it. */
4559 for (link = trampoline_list; link; link = TREE_CHAIN (link))
4560 if (TREE_PURPOSE (link) == function)
4561 return
4562 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
4563
4564 for (fp = outer_function_chain; fp; fp = fp->next)
4565 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
4566 if (TREE_PURPOSE (link) == function)
4567 {
4568 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
4569 function);
4570 return round_trampoline_addr (tramp);
4571 }
4572
4573 /* None exists; we must make one. */
4574
4575 /* Find the `struct function' for the function containing FUNCTION. */
4576 fp = 0;
4577 fn_context = decl_function_context (function);
4578 if (fn_context != current_function_decl)
4579 for (fp = outer_function_chain; fp; fp = fp->next)
4580 if (fp->decl == fn_context)
4581 break;
4582
4583 /* Allocate run-time space for this trampoline
4584 (usually in the defining function's stack frame). */
4585 #ifdef ALLOCATE_TRAMPOLINE
4586 tramp = ALLOCATE_TRAMPOLINE (fp);
4587 #else
4588 /* If rounding needed, allocate extra space
4589 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
4590 #ifdef TRAMPOLINE_ALIGNMENT
4591 #define TRAMPOLINE_REAL_SIZE \
4592 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
4593 #else
4594 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
4595 #endif
4596 if (fp != 0)
4597 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
4598 else
4599 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
4600 #endif
4601
4602 /* Record the trampoline for reuse and note it for later initialization
4603 by expand_function_end. */
4604 if (fp != 0)
4605 {
4606 push_obstacks (fp->function_maybepermanent_obstack,
4607 fp->function_maybepermanent_obstack);
4608 rtlexp = make_node (RTL_EXPR);
4609 RTL_EXPR_RTL (rtlexp) = tramp;
4610 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
4611 pop_obstacks ();
4612 }
4613 else
4614 {
4615 /* Make the RTL_EXPR node temporary, not momentary, so that the
4616 trampoline_list doesn't become garbage. */
4617 int momentary = suspend_momentary ();
4618 rtlexp = make_node (RTL_EXPR);
4619 resume_momentary (momentary);
4620
4621 RTL_EXPR_RTL (rtlexp) = tramp;
4622 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
4623 }
4624
4625 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
4626 return round_trampoline_addr (tramp);
4627 }
4628
4629 /* Given a trampoline address,
4630 round it to multiple of TRAMPOLINE_ALIGNMENT. */
4631
4632 static rtx
4633 round_trampoline_addr (tramp)
4634 rtx tramp;
4635 {
4636 #ifdef TRAMPOLINE_ALIGNMENT
4637 /* Round address up to desired boundary. */
4638 rtx temp = gen_reg_rtx (Pmode);
4639 temp = expand_binop (Pmode, add_optab, tramp,
4640 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
4641 temp, 0, OPTAB_LIB_WIDEN);
4642 tramp = expand_binop (Pmode, and_optab, temp,
4643 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
4644 temp, 0, OPTAB_LIB_WIDEN);
4645 #endif
4646 return tramp;
4647 }
4648 \f
4649 /* The functions identify_blocks and reorder_blocks provide a way to
4650 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
4651 duplicate portions of the RTL code. Call identify_blocks before
4652 changing the RTL, and call reorder_blocks after. */
4653
4654 /* Put all this function's BLOCK nodes including those that are chained
4655 onto the first block into a vector, and return it.
4656 Also store in each NOTE for the beginning or end of a block
4657 the index of that block in the vector.
4658 The arguments are BLOCK, the chain of top-level blocks of the function,
4659 and INSNS, the insn chain of the function. */
4660
4661 tree *
4662 identify_blocks (block, insns)
4663 tree block;
4664 rtx insns;
4665 {
4666 int n_blocks;
4667 tree *block_vector;
4668 int *block_stack;
4669 int depth = 0;
4670 int next_block_number = 1;
4671 int current_block_number = 1;
4672 rtx insn;
4673
4674 if (block == 0)
4675 return 0;
4676
4677 n_blocks = all_blocks (block, 0);
4678 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
4679 block_stack = (int *) alloca (n_blocks * sizeof (int));
4680
4681 all_blocks (block, block_vector);
4682
4683 for (insn = insns; insn; insn = NEXT_INSN (insn))
4684 if (GET_CODE (insn) == NOTE)
4685 {
4686 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
4687 {
4688 block_stack[depth++] = current_block_number;
4689 current_block_number = next_block_number;
4690 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
4691 }
4692 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
4693 {
4694 current_block_number = block_stack[--depth];
4695 NOTE_BLOCK_NUMBER (insn) = current_block_number;
4696 }
4697 }
4698
4699 if (n_blocks != next_block_number)
4700 abort ();
4701
4702 return block_vector;
4703 }
4704
4705 /* Given BLOCK_VECTOR which was returned by identify_blocks,
4706 and a revised instruction chain, rebuild the tree structure
4707 of BLOCK nodes to correspond to the new order of RTL.
4708 The new block tree is inserted below TOP_BLOCK.
4709 Returns the current top-level block. */
4710
4711 tree
4712 reorder_blocks (block_vector, block, insns)
4713 tree *block_vector;
4714 tree block;
4715 rtx insns;
4716 {
4717 tree current_block = block;
4718 rtx insn;
4719
4720 if (block_vector == 0)
4721 return block;
4722
4723 /* Prune the old trees away, so that it doesn't get in the way. */
4724 BLOCK_SUBBLOCKS (current_block) = 0;
4725 BLOCK_CHAIN (current_block) = 0;
4726
4727 for (insn = insns; insn; insn = NEXT_INSN (insn))
4728 if (GET_CODE (insn) == NOTE)
4729 {
4730 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
4731 {
4732 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
4733 /* If we have seen this block before, copy it. */
4734 if (TREE_ASM_WRITTEN (block))
4735 block = copy_node (block);
4736 BLOCK_SUBBLOCKS (block) = 0;
4737 TREE_ASM_WRITTEN (block) = 1;
4738 BLOCK_SUPERCONTEXT (block) = current_block;
4739 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4740 BLOCK_SUBBLOCKS (current_block) = block;
4741 current_block = block;
4742 NOTE_SOURCE_FILE (insn) = 0;
4743 }
4744 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
4745 {
4746 BLOCK_SUBBLOCKS (current_block)
4747 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
4748 current_block = BLOCK_SUPERCONTEXT (current_block);
4749 NOTE_SOURCE_FILE (insn) = 0;
4750 }
4751 }
4752
4753 BLOCK_SUBBLOCKS (current_block)
4754 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
4755 return current_block;
4756 }
4757
4758 /* Reverse the order of elements in the chain T of blocks,
4759 and return the new head of the chain (old last element). */
4760
4761 static tree
4762 blocks_nreverse (t)
4763 tree t;
4764 {
4765 register tree prev = 0, decl, next;
4766 for (decl = t; decl; decl = next)
4767 {
4768 next = BLOCK_CHAIN (decl);
4769 BLOCK_CHAIN (decl) = prev;
4770 prev = decl;
4771 }
4772 return prev;
4773 }
4774
4775 /* Count the subblocks of the list starting with BLOCK, and list them
4776 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
4777 blocks. */
4778
4779 static int
4780 all_blocks (block, vector)
4781 tree block;
4782 tree *vector;
4783 {
4784 int n_blocks = 0;
4785
4786 while (block)
4787 {
4788 TREE_ASM_WRITTEN (block) = 0;
4789
4790 /* Record this block. */
4791 if (vector)
4792 vector[n_blocks] = block;
4793
4794 ++n_blocks;
4795
4796 /* Record the subblocks, and their subblocks... */
4797 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4798 vector ? vector + n_blocks : 0);
4799 block = BLOCK_CHAIN (block);
4800 }
4801
4802 return n_blocks;
4803 }
4804 \f
4805 /* Build bytecode call descriptor for function SUBR. */
4806
4807 rtx
4808 bc_build_calldesc (subr)
4809 tree subr;
4810 {
4811 tree calldesc = 0, arg;
4812 int nargs = 0;
4813
4814 /* Build the argument description vector in reverse order. */
4815 DECL_ARGUMENTS (subr) = nreverse (DECL_ARGUMENTS (subr));
4816 nargs = 0;
4817
4818 for (arg = DECL_ARGUMENTS (subr); arg; arg = TREE_CHAIN (arg))
4819 {
4820 ++nargs;
4821
4822 calldesc = tree_cons ((tree) 0, size_in_bytes (TREE_TYPE (arg)), calldesc);
4823 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (TREE_TYPE (arg)), calldesc);
4824 }
4825
4826 DECL_ARGUMENTS (subr) = nreverse (DECL_ARGUMENTS (subr));
4827
4828 /* Prepend the function's return type. */
4829 calldesc = tree_cons ((tree) 0,
4830 size_in_bytes (TREE_TYPE (TREE_TYPE (subr))),
4831 calldesc);
4832
4833 calldesc = tree_cons ((tree) 0,
4834 bc_runtime_type_code (TREE_TYPE (TREE_TYPE (subr))),
4835 calldesc);
4836
4837 /* Prepend the arg count. */
4838 calldesc = tree_cons ((tree) 0, build_int_2 (nargs, 0), calldesc);
4839
4840 /* Output the call description vector and get its address. */
4841 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
4842 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
4843 build_index_type (build_int_2 (nargs * 2, 0)));
4844
4845 return output_constant_def (calldesc);
4846 }
4847
4848
4849 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4850 and initialize static variables for generating RTL for the statements
4851 of the function. */
4852
4853 void
4854 init_function_start (subr, filename, line)
4855 tree subr;
4856 char *filename;
4857 int line;
4858 {
4859 char *junk;
4860
4861 if (output_bytecode)
4862 {
4863 this_function_decl = subr;
4864 this_function_calldesc = bc_build_calldesc (subr);
4865 local_vars_size = 0;
4866 stack_depth = 0;
4867 max_stack_depth = 0;
4868 stmt_expr_depth = 0;
4869 return;
4870 }
4871
4872 init_stmt_for_function ();
4873
4874 cse_not_expected = ! optimize;
4875
4876 /* Caller save not needed yet. */
4877 caller_save_needed = 0;
4878
4879 /* No stack slots have been made yet. */
4880 stack_slot_list = 0;
4881
4882 /* There is no stack slot for handling nonlocal gotos. */
4883 nonlocal_goto_handler_slot = 0;
4884 nonlocal_goto_stack_level = 0;
4885
4886 /* No labels have been declared for nonlocal use. */
4887 nonlocal_labels = 0;
4888
4889 /* No function calls so far in this function. */
4890 function_call_count = 0;
4891
4892 /* No parm regs have been allocated.
4893 (This is important for output_inline_function.) */
4894 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4895
4896 /* Initialize the RTL mechanism. */
4897 init_emit ();
4898
4899 /* Initialize the queue of pending postincrement and postdecrements,
4900 and some other info in expr.c. */
4901 init_expr ();
4902
4903 /* We haven't done register allocation yet. */
4904 reg_renumber = 0;
4905
4906 init_const_rtx_hash_table ();
4907
4908 current_function_name = (*decl_printable_name) (subr, &junk);
4909
4910 /* Nonzero if this is a nested function that uses a static chain. */
4911
4912 current_function_needs_context
4913 = (decl_function_context (current_function_decl) != 0
4914 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
4915
4916 /* Set if a call to setjmp is seen. */
4917 current_function_calls_setjmp = 0;
4918
4919 /* Set if a call to longjmp is seen. */
4920 current_function_calls_longjmp = 0;
4921
4922 current_function_calls_alloca = 0;
4923 current_function_has_nonlocal_label = 0;
4924 current_function_has_nonlocal_goto = 0;
4925 current_function_contains_functions = 0;
4926
4927 current_function_returns_pcc_struct = 0;
4928 current_function_returns_struct = 0;
4929 current_function_epilogue_delay_list = 0;
4930 current_function_uses_const_pool = 0;
4931 current_function_uses_pic_offset_table = 0;
4932
4933 /* We have not yet needed to make a label to jump to for tail-recursion. */
4934 tail_recursion_label = 0;
4935
4936 /* We haven't had a need to make a save area for ap yet. */
4937
4938 arg_pointer_save_area = 0;
4939
4940 /* No stack slots allocated yet. */
4941 frame_offset = 0;
4942
4943 /* No SAVE_EXPRs in this function yet. */
4944 save_expr_regs = 0;
4945
4946 /* No RTL_EXPRs in this function yet. */
4947 rtl_expr_chain = 0;
4948
4949 /* Set up to allocate temporaries. */
4950 init_temp_slots ();
4951
4952 /* Within function body, compute a type's size as soon it is laid out. */
4953 immediate_size_expand++;
4954
4955 /* We haven't made any trampolines for this function yet. */
4956 trampoline_list = 0;
4957
4958 init_pending_stack_adjust ();
4959 inhibit_defer_pop = 0;
4960
4961 current_function_outgoing_args_size = 0;
4962
4963 /* Prevent ever trying to delete the first instruction of a function.
4964 Also tell final how to output a linenum before the function prologue. */
4965 emit_line_note (filename, line);
4966
4967 /* Make sure first insn is a note even if we don't want linenums.
4968 This makes sure the first insn will never be deleted.
4969 Also, final expects a note to appear there. */
4970 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4971
4972 /* Set flags used by final.c. */
4973 if (aggregate_value_p (DECL_RESULT (subr)))
4974 {
4975 #ifdef PCC_STATIC_STRUCT_RETURN
4976 current_function_returns_pcc_struct = 1;
4977 #endif
4978 current_function_returns_struct = 1;
4979 }
4980
4981 /* Warn if this value is an aggregate type,
4982 regardless of which calling convention we are using for it. */
4983 if (warn_aggregate_return
4984 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4985 warning ("function returns an aggregate");
4986
4987 current_function_returns_pointer
4988 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
4989
4990 /* Indicate that we need to distinguish between the return value of the
4991 present function and the return value of a function being called. */
4992 rtx_equal_function_value_matters = 1;
4993
4994 /* Indicate that we have not instantiated virtual registers yet. */
4995 virtuals_instantiated = 0;
4996
4997 /* Indicate we have no need of a frame pointer yet. */
4998 frame_pointer_needed = 0;
4999
5000 /* By default assume not varargs or stdarg. */
5001 current_function_varargs = 0;
5002 current_function_stdarg = 0;
5003 }
5004
5005 /* Indicate that the current function uses extra args
5006 not explicitly mentioned in the argument list in any fashion. */
5007
5008 void
5009 mark_varargs ()
5010 {
5011 current_function_varargs = 1;
5012 }
5013
5014 /* Expand a call to __main at the beginning of a possible main function. */
5015
5016 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5017 #undef HAS_INIT_SECTION
5018 #define HAS_INIT_SECTION
5019 #endif
5020
5021 void
5022 expand_main_function ()
5023 {
5024 if (!output_bytecode)
5025 {
5026 /* The zero below avoids a possible parse error */
5027 0;
5028 #if !defined (HAS_INIT_SECTION)
5029 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, NAME__MAIN), 0,
5030 VOIDmode, 0);
5031 #endif /* not HAS_INIT_SECTION */
5032 }
5033 }
5034 \f
5035 extern struct obstack permanent_obstack;
5036
5037 /* Expand start of bytecode function. See comment at
5038 expand_function_start below for details. */
5039
5040 void
5041 bc_expand_function_start (subr, parms_have_cleanups)
5042 tree subr;
5043 int parms_have_cleanups;
5044 {
5045 char label[20], *name;
5046 static int nlab;
5047 tree thisarg;
5048 int argsz;
5049
5050 if (TREE_PUBLIC (subr))
5051 bc_globalize_label (IDENTIFIER_POINTER (DECL_NAME (subr)));
5052
5053 #ifdef DEBUG_PRINT_CODE
5054 fprintf (stderr, "\n<func %s>\n", IDENTIFIER_POINTER (DECL_NAME (subr)));
5055 #endif
5056
5057 for (argsz = 0, thisarg = DECL_ARGUMENTS (subr); thisarg; thisarg = TREE_CHAIN (thisarg))
5058 {
5059 if (DECL_RTL (thisarg))
5060 abort (); /* Should be NULL here I think. */
5061 else if (TREE_CONSTANT (DECL_SIZE (thisarg)))
5062 {
5063 DECL_RTL (thisarg) = bc_gen_rtx ((char *) 0, argsz, (struct bc_label *) 0);
5064 argsz += TREE_INT_CST_LOW (DECL_SIZE (thisarg));
5065 }
5066 else
5067 {
5068 /* Variable-sized objects are pointers to their storage. */
5069 DECL_RTL (thisarg) = bc_gen_rtx ((char *) 0, argsz, (struct bc_label *) 0);
5070 argsz += POINTER_SIZE;
5071 }
5072 }
5073
5074 bc_begin_function (xstrdup (IDENTIFIER_POINTER (DECL_NAME (subr))));
5075
5076 ASM_GENERATE_INTERNAL_LABEL (label, "LX", nlab);
5077
5078 ++nlab;
5079 name = (char *) obstack_copy0 (&permanent_obstack, label, strlen (label));
5080 this_function_callinfo = bc_gen_rtx (name, 0, (struct bc_label *) 0);
5081 this_function_bytecode =
5082 bc_emit_trampoline (BYTECODE_LABEL (this_function_callinfo));
5083 }
5084
5085
5086 /* Expand end of bytecode function. See details the comment of
5087 expand_function_end(), below. */
5088
5089 void
5090 bc_expand_function_end ()
5091 {
5092 char *ptrconsts;
5093
5094 expand_null_return ();
5095
5096 /* Emit any fixup code. This must be done before the call to
5097 to BC_END_FUNCTION (), since that will cause the bytecode
5098 segment to be finished off and closed. */
5099
5100 expand_fixups (NULL_RTX);
5101
5102 ptrconsts = bc_end_function ();
5103
5104 bc_align_const (2 /* INT_ALIGN */);
5105
5106 /* If this changes also make sure to change bc-interp.h! */
5107
5108 bc_emit_const_labeldef (BYTECODE_LABEL (this_function_callinfo));
5109 bc_emit_const ((char *) &max_stack_depth, sizeof max_stack_depth);
5110 bc_emit_const ((char *) &local_vars_size, sizeof local_vars_size);
5111 bc_emit_const_labelref (this_function_bytecode, 0);
5112 bc_emit_const_labelref (ptrconsts, 0);
5113 bc_emit_const_labelref (BYTECODE_LABEL (this_function_calldesc), 0);
5114 }
5115
5116
5117 /* Start the RTL for a new function, and set variables used for
5118 emitting RTL.
5119 SUBR is the FUNCTION_DECL node.
5120 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5121 the function's parameters, which must be run at any return statement. */
5122
5123 void
5124 expand_function_start (subr, parms_have_cleanups)
5125 tree subr;
5126 int parms_have_cleanups;
5127 {
5128 register int i;
5129 tree tem;
5130 rtx last_ptr;
5131
5132 if (output_bytecode)
5133 {
5134 bc_expand_function_start (subr, parms_have_cleanups);
5135 return;
5136 }
5137
5138 /* Make sure volatile mem refs aren't considered
5139 valid operands of arithmetic insns. */
5140 init_recog_no_volatile ();
5141
5142 /* If function gets a static chain arg, store it in the stack frame.
5143 Do this first, so it gets the first stack slot offset. */
5144 if (current_function_needs_context)
5145 {
5146 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5147
5148 #ifdef SMALL_REGISTER_CLASSES
5149 /* Delay copying static chain if it is not a register to avoid
5150 conflicts with regs used for parameters. */
5151 if (GET_CODE (static_chain_incoming_rtx) == REG)
5152 #endif
5153 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5154 }
5155
5156 /* If the parameters of this function need cleaning up, get a label
5157 for the beginning of the code which executes those cleanups. This must
5158 be done before doing anything with return_label. */
5159 if (parms_have_cleanups)
5160 cleanup_label = gen_label_rtx ();
5161 else
5162 cleanup_label = 0;
5163
5164 /* Make the label for return statements to jump to, if this machine
5165 does not have a one-instruction return and uses an epilogue,
5166 or if it returns a structure, or if it has parm cleanups. */
5167 #ifdef HAVE_return
5168 if (cleanup_label == 0 && HAVE_return
5169 && ! current_function_returns_pcc_struct
5170 && ! (current_function_returns_struct && ! optimize))
5171 return_label = 0;
5172 else
5173 return_label = gen_label_rtx ();
5174 #else
5175 return_label = gen_label_rtx ();
5176 #endif
5177
5178 /* Initialize rtx used to return the value. */
5179 /* Do this before assign_parms so that we copy the struct value address
5180 before any library calls that assign parms might generate. */
5181
5182 /* Decide whether to return the value in memory or in a register. */
5183 if (aggregate_value_p (DECL_RESULT (subr)))
5184 {
5185 /* Returning something that won't go in a register. */
5186 register rtx value_address = 0;
5187
5188 #ifdef PCC_STATIC_STRUCT_RETURN
5189 if (current_function_returns_pcc_struct)
5190 {
5191 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5192 value_address = assemble_static_space (size);
5193 }
5194 else
5195 #endif
5196 {
5197 /* Expect to be passed the address of a place to store the value.
5198 If it is passed as an argument, assign_parms will take care of
5199 it. */
5200 if (struct_value_incoming_rtx)
5201 {
5202 value_address = gen_reg_rtx (Pmode);
5203 emit_move_insn (value_address, struct_value_incoming_rtx);
5204 }
5205 }
5206 if (value_address)
5207 {
5208 DECL_RTL (DECL_RESULT (subr))
5209 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (subr)), value_address);
5210 MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)))
5211 = AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5212 }
5213 }
5214 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5215 /* If return mode is void, this decl rtl should not be used. */
5216 DECL_RTL (DECL_RESULT (subr)) = 0;
5217 else if (parms_have_cleanups)
5218 {
5219 /* If function will end with cleanup code for parms,
5220 compute the return values into a pseudo reg,
5221 which we will copy into the true return register
5222 after the cleanups are done. */
5223
5224 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5225
5226 #ifdef PROMOTE_FUNCTION_RETURN
5227 tree type = TREE_TYPE (DECL_RESULT (subr));
5228 int unsignedp = TREE_UNSIGNED (type);
5229
5230 mode = promote_mode (type, mode, &unsignedp, 1);
5231 #endif
5232
5233 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5234 }
5235 else
5236 /* Scalar, returned in a register. */
5237 {
5238 #ifdef FUNCTION_OUTGOING_VALUE
5239 DECL_RTL (DECL_RESULT (subr))
5240 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5241 #else
5242 DECL_RTL (DECL_RESULT (subr))
5243 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5244 #endif
5245
5246 /* Mark this reg as the function's return value. */
5247 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5248 {
5249 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5250 /* Needed because we may need to move this to memory
5251 in case it's a named return value whose address is taken. */
5252 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5253 }
5254 }
5255
5256 /* Initialize rtx for parameters and local variables.
5257 In some cases this requires emitting insns. */
5258
5259 assign_parms (subr, 0);
5260
5261 #ifdef SMALL_REGISTER_CLASSES
5262 /* Copy the static chain now if it wasn't a register. The delay is to
5263 avoid conflicts with the parameter passing registers. */
5264
5265 if (current_function_needs_context)
5266 if (GET_CODE (static_chain_incoming_rtx) != REG)
5267 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5268 #endif
5269
5270 /* The following was moved from init_function_start.
5271 The move is supposed to make sdb output more accurate. */
5272 /* Indicate the beginning of the function body,
5273 as opposed to parm setup. */
5274 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5275
5276 /* If doing stupid allocation, mark parms as born here. */
5277
5278 if (GET_CODE (get_last_insn ()) != NOTE)
5279 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5280 parm_birth_insn = get_last_insn ();
5281
5282 if (obey_regdecls)
5283 {
5284 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5285 use_variable (regno_reg_rtx[i]);
5286
5287 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5288 use_variable (current_function_internal_arg_pointer);
5289 }
5290
5291 context_display = 0;
5292 if (current_function_needs_context)
5293 {
5294 /* Fetch static chain values for containing functions. */
5295 tem = decl_function_context (current_function_decl);
5296 /* If not doing stupid register allocation copy the static chain
5297 pointer into a pseudo. If we have small register classes, copy
5298 the value from memory if static_chain_incoming_rtx is a REG. If
5299 we do stupid register allocation, we use the stack address
5300 generated above. */
5301 if (tem && ! obey_regdecls)
5302 {
5303 #ifdef SMALL_REGISTER_CLASSES
5304 /* If the static chain originally came in a register, put it back
5305 there, then move it out in the next insn. The reason for
5306 this peculiar code is to satisfy function integration. */
5307 if (GET_CODE (static_chain_incoming_rtx) == REG)
5308 emit_move_insn (static_chain_incoming_rtx, last_ptr);
5309 #endif
5310
5311 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5312 }
5313
5314 while (tem)
5315 {
5316 tree rtlexp = make_node (RTL_EXPR);
5317
5318 RTL_EXPR_RTL (rtlexp) = last_ptr;
5319 context_display = tree_cons (tem, rtlexp, context_display);
5320 tem = decl_function_context (tem);
5321 if (tem == 0)
5322 break;
5323 /* Chain thru stack frames, assuming pointer to next lexical frame
5324 is found at the place we always store it. */
5325 #ifdef FRAME_GROWS_DOWNWARD
5326 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
5327 #endif
5328 last_ptr = copy_to_reg (gen_rtx (MEM, Pmode,
5329 memory_address (Pmode, last_ptr)));
5330
5331 /* If we are not optimizing, ensure that we know that this
5332 piece of context is live over the entire function. */
5333 if (! optimize)
5334 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, last_ptr,
5335 save_expr_regs);
5336 }
5337 }
5338
5339 /* After the display initializations is where the tail-recursion label
5340 should go, if we end up needing one. Ensure we have a NOTE here
5341 since some things (like trampolines) get placed before this. */
5342 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
5343
5344 /* Evaluate now the sizes of any types declared among the arguments. */
5345 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5346 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
5347
5348 /* Make sure there is a line number after the function entry setup code. */
5349 force_next_line_note ();
5350 }
5351 \f
5352 /* Generate RTL for the end of the current function.
5353 FILENAME and LINE are the current position in the source file.
5354
5355 It is up to language-specific callers to do cleanups for parameters--
5356 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5357
5358 void
5359 expand_function_end (filename, line, end_bindings)
5360 char *filename;
5361 int line;
5362 int end_bindings;
5363 {
5364 register int i;
5365 tree link;
5366
5367 #ifdef TRAMPOLINE_TEMPLATE
5368 static rtx initial_trampoline;
5369 #endif
5370
5371 if (output_bytecode)
5372 {
5373 bc_expand_function_end ();
5374 return;
5375 }
5376
5377 #ifdef NON_SAVING_SETJMP
5378 /* Don't put any variables in registers if we call setjmp
5379 on a machine that fails to restore the registers. */
5380 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
5381 {
5382 if (DECL_INITIAL (current_function_decl) != error_mark_node)
5383 setjmp_protect (DECL_INITIAL (current_function_decl));
5384
5385 setjmp_protect_args ();
5386 }
5387 #endif
5388
5389 /* Save the argument pointer if a save area was made for it. */
5390 if (arg_pointer_save_area)
5391 {
5392 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
5393 emit_insn_before (x, tail_recursion_reentry);
5394 }
5395
5396 /* Initialize any trampolines required by this function. */
5397 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5398 {
5399 tree function = TREE_PURPOSE (link);
5400 rtx context = lookup_static_chain (function);
5401 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
5402 rtx blktramp;
5403 rtx seq;
5404
5405 #ifdef TRAMPOLINE_TEMPLATE
5406 /* First make sure this compilation has a template for
5407 initializing trampolines. */
5408 if (initial_trampoline == 0)
5409 {
5410 end_temporary_allocation ();
5411 initial_trampoline
5412 = gen_rtx (MEM, BLKmode, assemble_trampoline_template ());
5413 resume_temporary_allocation ();
5414 }
5415 #endif
5416
5417 /* Generate insns to initialize the trampoline. */
5418 start_sequence ();
5419 tramp = round_trampoline_addr (XEXP (tramp, 0));
5420 #ifdef TRAMPOLINE_TEMPLATE
5421 blktramp = change_address (initial_trampoline, BLKmode, tramp);
5422 emit_block_move (blktramp, initial_trampoline,
5423 GEN_INT (TRAMPOLINE_SIZE),
5424 FUNCTION_BOUNDARY / BITS_PER_UNIT);
5425 #endif
5426 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
5427 seq = get_insns ();
5428 end_sequence ();
5429
5430 /* Put those insns at entry to the containing function (this one). */
5431 emit_insns_before (seq, tail_recursion_reentry);
5432 }
5433
5434 /* Warn about unused parms if extra warnings were specified. */
5435 if (warn_unused && extra_warnings)
5436 {
5437 tree decl;
5438
5439 for (decl = DECL_ARGUMENTS (current_function_decl);
5440 decl; decl = TREE_CHAIN (decl))
5441 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
5442 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
5443 warning_with_decl (decl, "unused parameter `%s'");
5444 }
5445
5446 /* Delete handlers for nonlocal gotos if nothing uses them. */
5447 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
5448 delete_handlers ();
5449
5450 /* End any sequences that failed to be closed due to syntax errors. */
5451 while (in_sequence_p ())
5452 end_sequence ();
5453
5454 /* Outside function body, can't compute type's actual size
5455 until next function's body starts. */
5456 immediate_size_expand--;
5457
5458 /* If doing stupid register allocation,
5459 mark register parms as dying here. */
5460
5461 if (obey_regdecls)
5462 {
5463 rtx tem;
5464 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5465 use_variable (regno_reg_rtx[i]);
5466
5467 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
5468
5469 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
5470 {
5471 use_variable (XEXP (tem, 0));
5472 use_variable_after (XEXP (tem, 0), parm_birth_insn);
5473 }
5474
5475 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5476 use_variable (current_function_internal_arg_pointer);
5477 }
5478
5479 clear_pending_stack_adjust ();
5480 do_pending_stack_adjust ();
5481
5482 /* Mark the end of the function body.
5483 If control reaches this insn, the function can drop through
5484 without returning a value. */
5485 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
5486
5487 /* Output a linenumber for the end of the function.
5488 SDB depends on this. */
5489 emit_line_note_force (filename, line);
5490
5491 /* Output the label for the actual return from the function,
5492 if one is expected. This happens either because a function epilogue
5493 is used instead of a return instruction, or because a return was done
5494 with a goto in order to run local cleanups, or because of pcc-style
5495 structure returning. */
5496
5497 if (return_label)
5498 emit_label (return_label);
5499
5500 /* C++ uses this. */
5501 if (end_bindings)
5502 expand_end_bindings (0, 0, 0);
5503
5504 /* If we had calls to alloca, and this machine needs
5505 an accurate stack pointer to exit the function,
5506 insert some code to save and restore the stack pointer. */
5507 #ifdef EXIT_IGNORE_STACK
5508 if (! EXIT_IGNORE_STACK)
5509 #endif
5510 if (current_function_calls_alloca)
5511 {
5512 rtx tem = 0;
5513
5514 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
5515 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
5516 }
5517
5518 /* If scalar return value was computed in a pseudo-reg,
5519 copy that to the hard return register. */
5520 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
5521 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
5522 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
5523 >= FIRST_PSEUDO_REGISTER))
5524 {
5525 rtx real_decl_result;
5526
5527 #ifdef FUNCTION_OUTGOING_VALUE
5528 real_decl_result
5529 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5530 current_function_decl);
5531 #else
5532 real_decl_result
5533 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5534 current_function_decl);
5535 #endif
5536 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
5537 emit_move_insn (real_decl_result,
5538 DECL_RTL (DECL_RESULT (current_function_decl)));
5539 emit_insn (gen_rtx (USE, VOIDmode, real_decl_result));
5540 }
5541
5542 /* If returning a structure, arrange to return the address of the value
5543 in a place where debuggers expect to find it.
5544
5545 If returning a structure PCC style,
5546 the caller also depends on this value.
5547 And current_function_returns_pcc_struct is not necessarily set. */
5548 if (current_function_returns_struct
5549 || current_function_returns_pcc_struct)
5550 {
5551 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5552 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5553 #ifdef FUNCTION_OUTGOING_VALUE
5554 rtx outgoing
5555 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
5556 current_function_decl);
5557 #else
5558 rtx outgoing
5559 = FUNCTION_VALUE (build_pointer_type (type),
5560 current_function_decl);
5561 #endif
5562
5563 /* Mark this as a function return value so integrate will delete the
5564 assignment and USE below when inlining this function. */
5565 REG_FUNCTION_VALUE_P (outgoing) = 1;
5566
5567 emit_move_insn (outgoing, value_address);
5568 use_variable (outgoing);
5569 }
5570
5571 /* Output a return insn if we are using one.
5572 Otherwise, let the rtl chain end here, to drop through
5573 into the epilogue. */
5574
5575 #ifdef HAVE_return
5576 if (HAVE_return)
5577 {
5578 emit_jump_insn (gen_return ());
5579 emit_barrier ();
5580 }
5581 #endif
5582
5583 /* Fix up any gotos that jumped out to the outermost
5584 binding level of the function.
5585 Must follow emitting RETURN_LABEL. */
5586
5587 /* If you have any cleanups to do at this point,
5588 and they need to create temporary variables,
5589 then you will lose. */
5590 expand_fixups (get_insns ());
5591 }
5592 \f
5593 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
5594
5595 static int *prologue;
5596 static int *epilogue;
5597
5598 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
5599 or a single insn). */
5600
5601 static int *
5602 record_insns (insns)
5603 rtx insns;
5604 {
5605 int *vec;
5606
5607 if (GET_CODE (insns) == SEQUENCE)
5608 {
5609 int len = XVECLEN (insns, 0);
5610 vec = (int *) oballoc ((len + 1) * sizeof (int));
5611 vec[len] = 0;
5612 while (--len >= 0)
5613 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
5614 }
5615 else
5616 {
5617 vec = (int *) oballoc (2 * sizeof (int));
5618 vec[0] = INSN_UID (insns);
5619 vec[1] = 0;
5620 }
5621 return vec;
5622 }
5623
5624 /* Determine how many INSN_UIDs in VEC are part of INSN. */
5625
5626 static int
5627 contains (insn, vec)
5628 rtx insn;
5629 int *vec;
5630 {
5631 register int i, j;
5632
5633 if (GET_CODE (insn) == INSN
5634 && GET_CODE (PATTERN (insn)) == SEQUENCE)
5635 {
5636 int count = 0;
5637 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
5638 for (j = 0; vec[j]; j++)
5639 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
5640 count++;
5641 return count;
5642 }
5643 else
5644 {
5645 for (j = 0; vec[j]; j++)
5646 if (INSN_UID (insn) == vec[j])
5647 return 1;
5648 }
5649 return 0;
5650 }
5651
5652 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5653 this into place with notes indicating where the prologue ends and where
5654 the epilogue begins. Update the basic block information when possible. */
5655
5656 void
5657 thread_prologue_and_epilogue_insns (f)
5658 rtx f;
5659 {
5660 #ifdef HAVE_prologue
5661 if (HAVE_prologue)
5662 {
5663 rtx head, seq, insn;
5664
5665 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
5666 prologue insns and a NOTE_INSN_PROLOGUE_END. */
5667 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
5668 seq = gen_prologue ();
5669 head = emit_insn_after (seq, f);
5670
5671 /* Include the new prologue insns in the first block. Ignore them
5672 if they form a basic block unto themselves. */
5673 if (basic_block_head && n_basic_blocks
5674 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
5675 basic_block_head[0] = NEXT_INSN (f);
5676
5677 /* Retain a map of the prologue insns. */
5678 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
5679 }
5680 else
5681 #endif
5682 prologue = 0;
5683
5684 #ifdef HAVE_epilogue
5685 if (HAVE_epilogue)
5686 {
5687 rtx insn = get_last_insn ();
5688 rtx prev = prev_nonnote_insn (insn);
5689
5690 /* If we end with a BARRIER, we don't need an epilogue. */
5691 if (! (prev && GET_CODE (prev) == BARRIER))
5692 {
5693 rtx tail, seq, tem;
5694 rtx first_use = 0;
5695 rtx last_use = 0;
5696
5697 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
5698 epilogue insns, the USE insns at the end of a function,
5699 the jump insn that returns, and then a BARRIER. */
5700
5701 /* Move the USE insns at the end of a function onto a list. */
5702 while (prev
5703 && GET_CODE (prev) == INSN
5704 && GET_CODE (PATTERN (prev)) == USE)
5705 {
5706 tem = prev;
5707 prev = prev_nonnote_insn (prev);
5708
5709 NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem);
5710 PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem);
5711 if (first_use)
5712 {
5713 NEXT_INSN (tem) = first_use;
5714 PREV_INSN (first_use) = tem;
5715 }
5716 first_use = tem;
5717 if (!last_use)
5718 last_use = tem;
5719 }
5720
5721 emit_barrier_after (insn);
5722
5723 seq = gen_epilogue ();
5724 tail = emit_jump_insn_after (seq, insn);
5725
5726 /* Insert the USE insns immediately before the return insn, which
5727 must be the first instruction before the final barrier. */
5728 if (first_use)
5729 {
5730 tem = prev_nonnote_insn (get_last_insn ());
5731 NEXT_INSN (PREV_INSN (tem)) = first_use;
5732 PREV_INSN (first_use) = PREV_INSN (tem);
5733 PREV_INSN (tem) = last_use;
5734 NEXT_INSN (last_use) = tem;
5735 }
5736
5737 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
5738
5739 /* Include the new epilogue insns in the last block. Ignore
5740 them if they form a basic block unto themselves. */
5741 if (basic_block_end && n_basic_blocks
5742 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
5743 basic_block_end[n_basic_blocks - 1] = tail;
5744
5745 /* Retain a map of the epilogue insns. */
5746 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
5747 return;
5748 }
5749 }
5750 #endif
5751 epilogue = 0;
5752 }
5753
5754 /* Reposition the prologue-end and epilogue-begin notes after instruction
5755 scheduling and delayed branch scheduling. */
5756
5757 void
5758 reposition_prologue_and_epilogue_notes (f)
5759 rtx f;
5760 {
5761 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5762 /* Reposition the prologue and epilogue notes. */
5763 if (n_basic_blocks)
5764 {
5765 rtx next, prev;
5766 int len;
5767
5768 if (prologue)
5769 {
5770 register rtx insn, note = 0;
5771
5772 /* Scan from the beginning until we reach the last prologue insn.
5773 We apparently can't depend on basic_block_{head,end} after
5774 reorg has run. */
5775 for (len = 0; prologue[len]; len++)
5776 ;
5777 for (insn = f; len && insn; insn = NEXT_INSN (insn))
5778 {
5779 if (GET_CODE (insn) == NOTE)
5780 {
5781 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5782 note = insn;
5783 }
5784 else if ((len -= contains (insn, prologue)) == 0)
5785 {
5786 /* Find the prologue-end note if we haven't already, and
5787 move it to just after the last prologue insn. */
5788 if (note == 0)
5789 {
5790 for (note = insn; note = NEXT_INSN (note);)
5791 if (GET_CODE (note) == NOTE
5792 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5793 break;
5794 }
5795 next = NEXT_INSN (note);
5796 prev = PREV_INSN (note);
5797 if (prev)
5798 NEXT_INSN (prev) = next;
5799 if (next)
5800 PREV_INSN (next) = prev;
5801 add_insn_after (note, insn);
5802 }
5803 }
5804 }
5805
5806 if (epilogue)
5807 {
5808 register rtx insn, note = 0;
5809
5810 /* Scan from the end until we reach the first epilogue insn.
5811 We apparently can't depend on basic_block_{head,end} after
5812 reorg has run. */
5813 for (len = 0; epilogue[len]; len++)
5814 ;
5815 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
5816 {
5817 if (GET_CODE (insn) == NOTE)
5818 {
5819 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5820 note = insn;
5821 }
5822 else if ((len -= contains (insn, epilogue)) == 0)
5823 {
5824 /* Find the epilogue-begin note if we haven't already, and
5825 move it to just before the first epilogue insn. */
5826 if (note == 0)
5827 {
5828 for (note = insn; note = PREV_INSN (note);)
5829 if (GET_CODE (note) == NOTE
5830 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5831 break;
5832 }
5833 next = NEXT_INSN (note);
5834 prev = PREV_INSN (note);
5835 if (prev)
5836 NEXT_INSN (prev) = next;
5837 if (next)
5838 PREV_INSN (next) = prev;
5839 add_insn_after (note, PREV_INSN (insn));
5840 }
5841 }
5842 }
5843 }
5844 #endif /* HAVE_prologue or HAVE_epilogue */
5845 }