(find_temp_slot_from_address): Check for overlap from BASE_OFFSET if X
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-95, 1996 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42 #include <stdio.h>
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "function.h"
47 #include "insn-flags.h"
48 #include "expr.h"
49 #include "insn-codes.h"
50 #include "regs.h"
51 #include "hard-reg-set.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "output.h"
55 #include "basic-block.h"
56 #include "obstack.h"
57 #include "bytecode.h"
58 #include "bc-emit.h"
59
60 /* Some systems use __main in a way incompatible with its use in gcc, in these
61 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
62 give the same symbol without quotes for an alternative entry point. You
63 must define both, or neither. */
64 #ifndef NAME__MAIN
65 #define NAME__MAIN "__main"
66 #define SYMBOL__MAIN __main
67 #endif
68
69 /* Round a value to the lowest integer less than it that is a multiple of
70 the required alignment. Avoid using division in case the value is
71 negative. Assume the alignment is a power of two. */
72 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
73
74 /* Similar, but round to the next highest integer that meets the
75 alignment. */
76 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
77
78 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
79 during rtl generation. If they are different register numbers, this is
80 always true. It may also be true if
81 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
82 generation. See fix_lexical_addr for details. */
83
84 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
85 #define NEED_SEPARATE_AP
86 #endif
87
88 /* Number of bytes of args popped by function being compiled on its return.
89 Zero if no bytes are to be popped.
90 May affect compilation of return insn or of function epilogue. */
91
92 int current_function_pops_args;
93
94 /* Nonzero if function being compiled needs to be given an address
95 where the value should be stored. */
96
97 int current_function_returns_struct;
98
99 /* Nonzero if function being compiled needs to
100 return the address of where it has put a structure value. */
101
102 int current_function_returns_pcc_struct;
103
104 /* Nonzero if function being compiled needs to be passed a static chain. */
105
106 int current_function_needs_context;
107
108 /* Nonzero if function being compiled can call setjmp. */
109
110 int current_function_calls_setjmp;
111
112 /* Nonzero if function being compiled can call longjmp. */
113
114 int current_function_calls_longjmp;
115
116 /* Nonzero if function being compiled receives nonlocal gotos
117 from nested functions. */
118
119 int current_function_has_nonlocal_label;
120
121 /* Nonzero if function being compiled has nonlocal gotos to parent
122 function. */
123
124 int current_function_has_nonlocal_goto;
125
126 /* Nonzero if function being compiled contains nested functions. */
127
128 int current_function_contains_functions;
129
130 /* Nonzero if function being compiled can call alloca,
131 either as a subroutine or builtin. */
132
133 int current_function_calls_alloca;
134
135 /* Nonzero if the current function returns a pointer type */
136
137 int current_function_returns_pointer;
138
139 /* If some insns can be deferred to the delay slots of the epilogue, the
140 delay list for them is recorded here. */
141
142 rtx current_function_epilogue_delay_list;
143
144 /* If function's args have a fixed size, this is that size, in bytes.
145 Otherwise, it is -1.
146 May affect compilation of return insn or of function epilogue. */
147
148 int current_function_args_size;
149
150 /* # bytes the prologue should push and pretend that the caller pushed them.
151 The prologue must do this, but only if parms can be passed in registers. */
152
153 int current_function_pretend_args_size;
154
155 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
156 defined, the needed space is pushed by the prologue. */
157
158 int current_function_outgoing_args_size;
159
160 /* This is the offset from the arg pointer to the place where the first
161 anonymous arg can be found, if there is one. */
162
163 rtx current_function_arg_offset_rtx;
164
165 /* Nonzero if current function uses varargs.h or equivalent.
166 Zero for functions that use stdarg.h. */
167
168 int current_function_varargs;
169
170 /* Nonzero if current function uses stdarg.h or equivalent.
171 Zero for functions that use varargs.h. */
172
173 int current_function_stdarg;
174
175 /* Quantities of various kinds of registers
176 used for the current function's args. */
177
178 CUMULATIVE_ARGS current_function_args_info;
179
180 /* Name of function now being compiled. */
181
182 char *current_function_name;
183
184 /* If non-zero, an RTL expression for that location at which the current
185 function returns its result. Always equal to
186 DECL_RTL (DECL_RESULT (current_function_decl)), but provided
187 independently of the tree structures. */
188
189 rtx current_function_return_rtx;
190
191 /* Nonzero if the current function uses the constant pool. */
192
193 int current_function_uses_const_pool;
194
195 /* Nonzero if the current function uses pic_offset_table_rtx. */
196 int current_function_uses_pic_offset_table;
197
198 /* The arg pointer hard register, or the pseudo into which it was copied. */
199 rtx current_function_internal_arg_pointer;
200
201 /* The FUNCTION_DECL for an inline function currently being expanded. */
202 tree inline_function_decl;
203
204 /* Number of function calls seen so far in current function. */
205
206 int function_call_count;
207
208 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
209 (labels to which there can be nonlocal gotos from nested functions)
210 in this function. */
211
212 tree nonlocal_labels;
213
214 /* RTX for stack slot that holds the current handler for nonlocal gotos.
215 Zero when function does not have nonlocal labels. */
216
217 rtx nonlocal_goto_handler_slot;
218
219 /* RTX for stack slot that holds the stack pointer value to restore
220 for a nonlocal goto.
221 Zero when function does not have nonlocal labels. */
222
223 rtx nonlocal_goto_stack_level;
224
225 /* Label that will go on parm cleanup code, if any.
226 Jumping to this label runs cleanup code for parameters, if
227 such code must be run. Following this code is the logical return label. */
228
229 rtx cleanup_label;
230
231 /* Label that will go on function epilogue.
232 Jumping to this label serves as a "return" instruction
233 on machines which require execution of the epilogue on all returns. */
234
235 rtx return_label;
236
237 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
238 So we can mark them all live at the end of the function, if nonopt. */
239 rtx save_expr_regs;
240
241 /* List (chain of EXPR_LISTs) of all stack slots in this function.
242 Made for the sake of unshare_all_rtl. */
243 rtx stack_slot_list;
244
245 /* Chain of all RTL_EXPRs that have insns in them. */
246 tree rtl_expr_chain;
247
248 /* Label to jump back to for tail recursion, or 0 if we have
249 not yet needed one for this function. */
250 rtx tail_recursion_label;
251
252 /* Place after which to insert the tail_recursion_label if we need one. */
253 rtx tail_recursion_reentry;
254
255 /* Location at which to save the argument pointer if it will need to be
256 referenced. There are two cases where this is done: if nonlocal gotos
257 exist, or if vars stored at an offset from the argument pointer will be
258 needed by inner routines. */
259
260 rtx arg_pointer_save_area;
261
262 /* Offset to end of allocated area of stack frame.
263 If stack grows down, this is the address of the last stack slot allocated.
264 If stack grows up, this is the address for the next slot. */
265 int frame_offset;
266
267 /* List (chain of TREE_LISTs) of static chains for containing functions.
268 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
269 in an RTL_EXPR in the TREE_VALUE. */
270 static tree context_display;
271
272 /* List (chain of TREE_LISTs) of trampolines for nested functions.
273 The trampoline sets up the static chain and jumps to the function.
274 We supply the trampoline's address when the function's address is requested.
275
276 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
277 in an RTL_EXPR in the TREE_VALUE. */
278 static tree trampoline_list;
279
280 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
281 static rtx parm_birth_insn;
282
283 #if 0
284 /* Nonzero if a stack slot has been generated whose address is not
285 actually valid. It means that the generated rtl must all be scanned
286 to detect and correct the invalid addresses where they occur. */
287 static int invalid_stack_slot;
288 #endif
289
290 /* Last insn of those whose job was to put parms into their nominal homes. */
291 static rtx last_parm_insn;
292
293 /* 1 + last pseudo register number used for loading a copy
294 of a parameter of this function. */
295 static int max_parm_reg;
296
297 /* Vector indexed by REGNO, containing location on stack in which
298 to put the parm which is nominally in pseudo register REGNO,
299 if we discover that that parm must go in the stack. */
300 static rtx *parm_reg_stack_loc;
301
302 #if 0 /* Turned off because 0 seems to work just as well. */
303 /* Cleanup lists are required for binding levels regardless of whether
304 that binding level has cleanups or not. This node serves as the
305 cleanup list whenever an empty list is required. */
306 static tree empty_cleanup_list;
307 #endif
308
309 /* Nonzero once virtual register instantiation has been done.
310 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
311 static int virtuals_instantiated;
312
313 /* These variables hold pointers to functions to
314 save and restore machine-specific data,
315 in push_function_context and pop_function_context. */
316 void (*save_machine_status) PROTO((struct function *));
317 void (*restore_machine_status) PROTO((struct function *));
318
319 /* Nonzero if we need to distinguish between the return value of this function
320 and the return value of a function called by this function. This helps
321 integrate.c */
322
323 extern int rtx_equal_function_value_matters;
324 extern tree sequence_rtl_expr;
325 \f
326 /* In order to evaluate some expressions, such as function calls returning
327 structures in memory, we need to temporarily allocate stack locations.
328 We record each allocated temporary in the following structure.
329
330 Associated with each temporary slot is a nesting level. When we pop up
331 one level, all temporaries associated with the previous level are freed.
332 Normally, all temporaries are freed after the execution of the statement
333 in which they were created. However, if we are inside a ({...}) grouping,
334 the result may be in a temporary and hence must be preserved. If the
335 result could be in a temporary, we preserve it if we can determine which
336 one it is in. If we cannot determine which temporary may contain the
337 result, all temporaries are preserved. A temporary is preserved by
338 pretending it was allocated at the previous nesting level.
339
340 Automatic variables are also assigned temporary slots, at the nesting
341 level where they are defined. They are marked a "kept" so that
342 free_temp_slots will not free them. */
343
344 struct temp_slot
345 {
346 /* Points to next temporary slot. */
347 struct temp_slot *next;
348 /* The rtx to used to reference the slot. */
349 rtx slot;
350 /* The rtx used to represent the address if not the address of the
351 slot above. May be an EXPR_LIST if multiple addresses exist. */
352 rtx address;
353 /* The size, in units, of the slot. */
354 int size;
355 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
356 tree rtl_expr;
357 /* Non-zero if this temporary is currently in use. */
358 char in_use;
359 /* Non-zero if this temporary has its address taken. */
360 char addr_taken;
361 /* Nesting level at which this slot is being used. */
362 int level;
363 /* Non-zero if this should survive a call to free_temp_slots. */
364 int keep;
365 /* The offset of the slot from the frame_pointer, including extra space
366 for alignment. This info is for combine_temp_slots. */
367 int base_offset;
368 /* The size of the slot, including extra space for alignment. This
369 info is for combine_temp_slots. */
370 int full_size;
371 };
372
373 /* List of all temporaries allocated, both available and in use. */
374
375 struct temp_slot *temp_slots;
376
377 /* Current nesting level for temporaries. */
378
379 int temp_slot_level;
380 \f
381 /* The FUNCTION_DECL node for the current function. */
382 static tree this_function_decl;
383
384 /* Callinfo pointer for the current function. */
385 static rtx this_function_callinfo;
386
387 /* The label in the bytecode file of this function's actual bytecode.
388 Not an rtx. */
389 static char *this_function_bytecode;
390
391 /* The call description vector for the current function. */
392 static rtx this_function_calldesc;
393
394 /* Size of the local variables allocated for the current function. */
395 int local_vars_size;
396
397 /* Current depth of the bytecode evaluation stack. */
398 int stack_depth;
399
400 /* Maximum depth of the evaluation stack in this function. */
401 int max_stack_depth;
402
403 /* Current depth in statement expressions. */
404 static int stmt_expr_depth;
405
406 /* This structure is used to record MEMs or pseudos used to replace VAR, any
407 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
408 maintain this list in case two operands of an insn were required to match;
409 in that case we must ensure we use the same replacement. */
410
411 struct fixup_replacement
412 {
413 rtx old;
414 rtx new;
415 struct fixup_replacement *next;
416 };
417
418 /* Forward declarations. */
419
420 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
421 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
422 enum machine_mode, enum machine_mode,
423 int));
424 static void fixup_var_refs PROTO((rtx, enum machine_mode, int));
425 static struct fixup_replacement
426 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
427 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
428 rtx, int));
429 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
430 struct fixup_replacement **));
431 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
432 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
433 static rtx fixup_stack_1 PROTO((rtx, rtx));
434 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
435 static void instantiate_decls PROTO((tree, int));
436 static void instantiate_decls_1 PROTO((tree, int));
437 static void instantiate_decl PROTO((rtx, int, int));
438 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
439 static void delete_handlers PROTO((void));
440 static void pad_to_arg_alignment PROTO((struct args_size *, int));
441 static void pad_below PROTO((struct args_size *, enum machine_mode,
442 tree));
443 static tree round_down PROTO((tree, int));
444 static rtx round_trampoline_addr PROTO((rtx));
445 static tree blocks_nreverse PROTO((tree));
446 static int all_blocks PROTO((tree, tree *));
447 static int *record_insns PROTO((rtx));
448 static int contains PROTO((rtx, int *));
449 \f
450 /* Pointer to chain of `struct function' for containing functions. */
451 struct function *outer_function_chain;
452
453 /* Given a function decl for a containing function,
454 return the `struct function' for it. */
455
456 struct function *
457 find_function_data (decl)
458 tree decl;
459 {
460 struct function *p;
461 for (p = outer_function_chain; p; p = p->next)
462 if (p->decl == decl)
463 return p;
464 abort ();
465 }
466
467 /* Save the current context for compilation of a nested function.
468 This is called from language-specific code.
469 The caller is responsible for saving any language-specific status,
470 since this function knows only about language-independent variables. */
471
472 void
473 push_function_context_to (context)
474 tree context;
475 {
476 struct function *p = (struct function *) xmalloc (sizeof (struct function));
477
478 p->next = outer_function_chain;
479 outer_function_chain = p;
480
481 p->name = current_function_name;
482 p->decl = current_function_decl;
483 p->pops_args = current_function_pops_args;
484 p->returns_struct = current_function_returns_struct;
485 p->returns_pcc_struct = current_function_returns_pcc_struct;
486 p->returns_pointer = current_function_returns_pointer;
487 p->needs_context = current_function_needs_context;
488 p->calls_setjmp = current_function_calls_setjmp;
489 p->calls_longjmp = current_function_calls_longjmp;
490 p->calls_alloca = current_function_calls_alloca;
491 p->has_nonlocal_label = current_function_has_nonlocal_label;
492 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
493 p->contains_functions = current_function_contains_functions;
494 p->args_size = current_function_args_size;
495 p->pretend_args_size = current_function_pretend_args_size;
496 p->arg_offset_rtx = current_function_arg_offset_rtx;
497 p->varargs = current_function_varargs;
498 p->stdarg = current_function_stdarg;
499 p->uses_const_pool = current_function_uses_const_pool;
500 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
501 p->internal_arg_pointer = current_function_internal_arg_pointer;
502 p->max_parm_reg = max_parm_reg;
503 p->parm_reg_stack_loc = parm_reg_stack_loc;
504 p->outgoing_args_size = current_function_outgoing_args_size;
505 p->return_rtx = current_function_return_rtx;
506 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
507 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
508 p->nonlocal_labels = nonlocal_labels;
509 p->cleanup_label = cleanup_label;
510 p->return_label = return_label;
511 p->save_expr_regs = save_expr_regs;
512 p->stack_slot_list = stack_slot_list;
513 p->parm_birth_insn = parm_birth_insn;
514 p->frame_offset = frame_offset;
515 p->tail_recursion_label = tail_recursion_label;
516 p->tail_recursion_reentry = tail_recursion_reentry;
517 p->arg_pointer_save_area = arg_pointer_save_area;
518 p->rtl_expr_chain = rtl_expr_chain;
519 p->last_parm_insn = last_parm_insn;
520 p->context_display = context_display;
521 p->trampoline_list = trampoline_list;
522 p->function_call_count = function_call_count;
523 p->temp_slots = temp_slots;
524 p->temp_slot_level = temp_slot_level;
525 p->fixup_var_refs_queue = 0;
526 p->epilogue_delay_list = current_function_epilogue_delay_list;
527
528 save_tree_status (p, context);
529 save_storage_status (p);
530 save_emit_status (p);
531 init_emit ();
532 save_expr_status (p);
533 save_stmt_status (p);
534 save_varasm_status (p);
535
536 if (save_machine_status)
537 (*save_machine_status) (p);
538 }
539
540 void
541 push_function_context ()
542 {
543 push_function_context_to (current_function_decl);
544 }
545
546 /* Restore the last saved context, at the end of a nested function.
547 This function is called from language-specific code. */
548
549 void
550 pop_function_context_from (context)
551 tree context;
552 {
553 struct function *p = outer_function_chain;
554
555 outer_function_chain = p->next;
556
557 current_function_contains_functions
558 = p->contains_functions || p->inline_obstacks
559 || context == current_function_decl;
560 current_function_name = p->name;
561 current_function_decl = p->decl;
562 current_function_pops_args = p->pops_args;
563 current_function_returns_struct = p->returns_struct;
564 current_function_returns_pcc_struct = p->returns_pcc_struct;
565 current_function_returns_pointer = p->returns_pointer;
566 current_function_needs_context = p->needs_context;
567 current_function_calls_setjmp = p->calls_setjmp;
568 current_function_calls_longjmp = p->calls_longjmp;
569 current_function_calls_alloca = p->calls_alloca;
570 current_function_has_nonlocal_label = p->has_nonlocal_label;
571 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
572 current_function_args_size = p->args_size;
573 current_function_pretend_args_size = p->pretend_args_size;
574 current_function_arg_offset_rtx = p->arg_offset_rtx;
575 current_function_varargs = p->varargs;
576 current_function_stdarg = p->stdarg;
577 current_function_uses_const_pool = p->uses_const_pool;
578 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
579 current_function_internal_arg_pointer = p->internal_arg_pointer;
580 max_parm_reg = p->max_parm_reg;
581 parm_reg_stack_loc = p->parm_reg_stack_loc;
582 current_function_outgoing_args_size = p->outgoing_args_size;
583 current_function_return_rtx = p->return_rtx;
584 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
585 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
586 nonlocal_labels = p->nonlocal_labels;
587 cleanup_label = p->cleanup_label;
588 return_label = p->return_label;
589 save_expr_regs = p->save_expr_regs;
590 stack_slot_list = p->stack_slot_list;
591 parm_birth_insn = p->parm_birth_insn;
592 frame_offset = p->frame_offset;
593 tail_recursion_label = p->tail_recursion_label;
594 tail_recursion_reentry = p->tail_recursion_reentry;
595 arg_pointer_save_area = p->arg_pointer_save_area;
596 rtl_expr_chain = p->rtl_expr_chain;
597 last_parm_insn = p->last_parm_insn;
598 context_display = p->context_display;
599 trampoline_list = p->trampoline_list;
600 function_call_count = p->function_call_count;
601 temp_slots = p->temp_slots;
602 temp_slot_level = p->temp_slot_level;
603 current_function_epilogue_delay_list = p->epilogue_delay_list;
604 reg_renumber = 0;
605
606 restore_tree_status (p);
607 restore_storage_status (p);
608 restore_expr_status (p);
609 restore_emit_status (p);
610 restore_stmt_status (p);
611 restore_varasm_status (p);
612
613 if (restore_machine_status)
614 (*restore_machine_status) (p);
615
616 /* Finish doing put_var_into_stack for any of our variables
617 which became addressable during the nested function. */
618 {
619 struct var_refs_queue *queue = p->fixup_var_refs_queue;
620 for (; queue; queue = queue->next)
621 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
622 }
623
624 free (p);
625
626 /* Reset variables that have known state during rtx generation. */
627 rtx_equal_function_value_matters = 1;
628 virtuals_instantiated = 0;
629 }
630
631 void pop_function_context ()
632 {
633 pop_function_context_from (current_function_decl);
634 }
635 \f
636 /* Allocate fixed slots in the stack frame of the current function. */
637
638 /* Return size needed for stack frame based on slots so far allocated.
639 This size counts from zero. It is not rounded to STACK_BOUNDARY;
640 the caller may have to do that. */
641
642 int
643 get_frame_size ()
644 {
645 #ifdef FRAME_GROWS_DOWNWARD
646 return -frame_offset;
647 #else
648 return frame_offset;
649 #endif
650 }
651
652 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
653 with machine mode MODE.
654
655 ALIGN controls the amount of alignment for the address of the slot:
656 0 means according to MODE,
657 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
658 positive specifies alignment boundary in bits.
659
660 We do not round to stack_boundary here. */
661
662 rtx
663 assign_stack_local (mode, size, align)
664 enum machine_mode mode;
665 int size;
666 int align;
667 {
668 register rtx x, addr;
669 int bigend_correction = 0;
670 int alignment;
671
672 if (align == 0)
673 {
674 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
675 if (mode == BLKmode)
676 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
677 }
678 else if (align == -1)
679 {
680 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
681 size = CEIL_ROUND (size, alignment);
682 }
683 else
684 alignment = align / BITS_PER_UNIT;
685
686 /* Round frame offset to that alignment.
687 We must be careful here, since FRAME_OFFSET might be negative and
688 division with a negative dividend isn't as well defined as we might
689 like. So we instead assume that ALIGNMENT is a power of two and
690 use logical operations which are unambiguous. */
691 #ifdef FRAME_GROWS_DOWNWARD
692 frame_offset = FLOOR_ROUND (frame_offset, alignment);
693 #else
694 frame_offset = CEIL_ROUND (frame_offset, alignment);
695 #endif
696
697 /* On a big-endian machine, if we are allocating more space than we will use,
698 use the least significant bytes of those that are allocated. */
699 if (BYTES_BIG_ENDIAN && mode != BLKmode)
700 bigend_correction = size - GET_MODE_SIZE (mode);
701
702 #ifdef FRAME_GROWS_DOWNWARD
703 frame_offset -= size;
704 #endif
705
706 /* If we have already instantiated virtual registers, return the actual
707 address relative to the frame pointer. */
708 if (virtuals_instantiated)
709 addr = plus_constant (frame_pointer_rtx,
710 (frame_offset + bigend_correction
711 + STARTING_FRAME_OFFSET));
712 else
713 addr = plus_constant (virtual_stack_vars_rtx,
714 frame_offset + bigend_correction);
715
716 #ifndef FRAME_GROWS_DOWNWARD
717 frame_offset += size;
718 #endif
719
720 x = gen_rtx (MEM, mode, addr);
721
722 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list);
723
724 return x;
725 }
726
727 /* Assign a stack slot in a containing function.
728 First three arguments are same as in preceding function.
729 The last argument specifies the function to allocate in. */
730
731 rtx
732 assign_outer_stack_local (mode, size, align, function)
733 enum machine_mode mode;
734 int size;
735 int align;
736 struct function *function;
737 {
738 register rtx x, addr;
739 int bigend_correction = 0;
740 int alignment;
741
742 /* Allocate in the memory associated with the function in whose frame
743 we are assigning. */
744 push_obstacks (function->function_obstack,
745 function->function_maybepermanent_obstack);
746
747 if (align == 0)
748 {
749 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
750 if (mode == BLKmode)
751 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
752 }
753 else if (align == -1)
754 {
755 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
756 size = CEIL_ROUND (size, alignment);
757 }
758 else
759 alignment = align / BITS_PER_UNIT;
760
761 /* Round frame offset to that alignment. */
762 #ifdef FRAME_GROWS_DOWNWARD
763 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
764 #else
765 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
766 #endif
767
768 /* On a big-endian machine, if we are allocating more space than we will use,
769 use the least significant bytes of those that are allocated. */
770 if (BYTES_BIG_ENDIAN && mode != BLKmode)
771 bigend_correction = size - GET_MODE_SIZE (mode);
772
773 #ifdef FRAME_GROWS_DOWNWARD
774 function->frame_offset -= size;
775 #endif
776 addr = plus_constant (virtual_stack_vars_rtx,
777 function->frame_offset + bigend_correction);
778 #ifndef FRAME_GROWS_DOWNWARD
779 function->frame_offset += size;
780 #endif
781
782 x = gen_rtx (MEM, mode, addr);
783
784 function->stack_slot_list
785 = gen_rtx (EXPR_LIST, VOIDmode, x, function->stack_slot_list);
786
787 pop_obstacks ();
788
789 return x;
790 }
791 \f
792 /* Allocate a temporary stack slot and record it for possible later
793 reuse.
794
795 MODE is the machine mode to be given to the returned rtx.
796
797 SIZE is the size in units of the space required. We do no rounding here
798 since assign_stack_local will do any required rounding.
799
800 KEEP is 1 if this slot is to be retained after a call to
801 free_temp_slots. Automatic variables for a block are allocated
802 with this flag. KEEP is 2, if we allocate a longer term temporary,
803 whose lifetime is controlled by CLEANUP_POINT_EXPRs. */
804
805 rtx
806 assign_stack_temp (mode, size, keep)
807 enum machine_mode mode;
808 int size;
809 int keep;
810 {
811 struct temp_slot *p, *best_p = 0;
812
813 /* If SIZE is -1 it means that somebody tried to allocate a temporary
814 of a variable size. */
815 if (size == -1)
816 abort ();
817
818 /* First try to find an available, already-allocated temporary that is the
819 exact size we require. */
820 for (p = temp_slots; p; p = p->next)
821 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
822 break;
823
824 /* If we didn't find, one, try one that is larger than what we want. We
825 find the smallest such. */
826 if (p == 0)
827 for (p = temp_slots; p; p = p->next)
828 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
829 && (best_p == 0 || best_p->size > p->size))
830 best_p = p;
831
832 /* Make our best, if any, the one to use. */
833 if (best_p)
834 {
835 /* If there are enough aligned bytes left over, make them into a new
836 temp_slot so that the extra bytes don't get wasted. Do this only
837 for BLKmode slots, so that we can be sure of the alignment. */
838 if (GET_MODE (best_p->slot) == BLKmode)
839 {
840 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
841 int rounded_size = CEIL_ROUND (size, alignment);
842
843 if (best_p->size - rounded_size >= alignment)
844 {
845 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
846 p->in_use = p->addr_taken = 0;
847 p->size = best_p->size - rounded_size;
848 p->base_offset = best_p->base_offset + rounded_size;
849 p->full_size = best_p->full_size - rounded_size;
850 p->slot = gen_rtx (MEM, BLKmode,
851 plus_constant (XEXP (best_p->slot, 0),
852 rounded_size));
853 p->address = 0;
854 p->rtl_expr = 0;
855 p->next = temp_slots;
856 temp_slots = p;
857
858 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, p->slot,
859 stack_slot_list);
860
861 best_p->size = rounded_size;
862 best_p->full_size = rounded_size;
863 }
864 }
865
866 p = best_p;
867 }
868
869 /* If we still didn't find one, make a new temporary. */
870 if (p == 0)
871 {
872 int frame_offset_old = frame_offset;
873 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
874 /* If the temp slot mode doesn't indicate the alignment,
875 use the largest possible, so no one will be disappointed. */
876 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
877 /* The following slot size computation is necessary because we don't
878 know the actual size of the temporary slot until assign_stack_local
879 has performed all the frame alignment and size rounding for the
880 requested temporary. Note that extra space added for alignment
881 can be either above or below this stack slot depending on which
882 way the frame grows. We include the extra space if and only if it
883 is above this slot. */
884 #ifdef FRAME_GROWS_DOWNWARD
885 p->size = frame_offset_old - frame_offset;
886 #else
887 p->size = size;
888 #endif
889 /* Now define the fields used by combine_temp_slots. */
890 #ifdef FRAME_GROWS_DOWNWARD
891 p->base_offset = frame_offset;
892 p->full_size = frame_offset_old - frame_offset;
893 #else
894 p->base_offset = frame_offset_old;
895 p->full_size = frame_offset - frame_offset_old;
896 #endif
897 p->address = 0;
898 p->next = temp_slots;
899 temp_slots = p;
900 }
901
902 p->in_use = 1;
903 p->addr_taken = 0;
904 p->rtl_expr = sequence_rtl_expr;
905
906 if (keep == 2)
907 {
908 p->level = target_temp_slot_level;
909 p->keep = 0;
910 }
911 else
912 {
913 p->level = temp_slot_level;
914 p->keep = keep;
915 }
916 return p->slot;
917 }
918 \f
919 /* Assign a temporary of given TYPE.
920 KEEP is as for assign_stack_temp.
921 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
922 it is 0 if a register is OK.
923 DONT_PROMOTE is 1 if we should not promote values in register
924 to wider modes. */
925
926 rtx
927 assign_temp (type, keep, memory_required, dont_promote)
928 tree type;
929 int keep;
930 int memory_required;
931 int dont_promote;
932 {
933 enum machine_mode mode = TYPE_MODE (type);
934 int unsignedp = TREE_UNSIGNED (type);
935
936 if (mode == BLKmode || memory_required)
937 {
938 int size = int_size_in_bytes (type);
939 rtx tmp;
940
941 /* Unfortunately, we don't yet know how to allocate variable-sized
942 temporaries. However, sometimes we have a fixed upper limit on
943 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
944 instead. This is the case for Chill variable-sized strings. */
945 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
946 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
947 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
948 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
949
950 tmp = assign_stack_temp (mode, size, keep);
951 MEM_IN_STRUCT_P (tmp) = AGGREGATE_TYPE_P (type);
952 return tmp;
953 }
954
955 #ifndef PROMOTE_FOR_CALL_ONLY
956 if (! dont_promote)
957 mode = promote_mode (type, mode, &unsignedp, 0);
958 #endif
959
960 return gen_reg_rtx (mode);
961 }
962 \f
963 /* Combine temporary stack slots which are adjacent on the stack.
964
965 This allows for better use of already allocated stack space. This is only
966 done for BLKmode slots because we can be sure that we won't have alignment
967 problems in this case. */
968
969 void
970 combine_temp_slots ()
971 {
972 struct temp_slot *p, *q;
973 struct temp_slot *prev_p, *prev_q;
974 /* Determine where to free back to after this function. */
975 rtx free_pointer = rtx_alloc (CONST_INT);
976
977 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
978 {
979 int delete_p = 0;
980 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
981 for (q = p->next, prev_q = p; q; q = prev_q->next)
982 {
983 int delete_q = 0;
984 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
985 {
986 if (p->base_offset + p->full_size == q->base_offset)
987 {
988 /* Q comes after P; combine Q into P. */
989 p->size += q->size;
990 p->full_size += q->full_size;
991 delete_q = 1;
992 }
993 else if (q->base_offset + q->full_size == p->base_offset)
994 {
995 /* P comes after Q; combine P into Q. */
996 q->size += p->size;
997 q->full_size += p->full_size;
998 delete_p = 1;
999 break;
1000 }
1001 }
1002 /* Either delete Q or advance past it. */
1003 if (delete_q)
1004 prev_q->next = q->next;
1005 else
1006 prev_q = q;
1007 }
1008 /* Either delete P or advance past it. */
1009 if (delete_p)
1010 {
1011 if (prev_p)
1012 prev_p->next = p->next;
1013 else
1014 temp_slots = p->next;
1015 }
1016 else
1017 prev_p = p;
1018 }
1019
1020 /* Free all the RTL made by plus_constant. */
1021 rtx_free (free_pointer);
1022 }
1023 \f
1024 /* Find the temp slot corresponding to the object at address X. */
1025
1026 static struct temp_slot *
1027 find_temp_slot_from_address (x)
1028 rtx x;
1029 {
1030 struct temp_slot *p;
1031 rtx next;
1032
1033 for (p = temp_slots; p; p = p->next)
1034 {
1035 if (! p->in_use)
1036 continue;
1037 else if (XEXP (p->slot, 0) == x
1038 || p->address == x
1039 || (GET_CODE (x) == PLUS
1040 && XEXP (x, 0) == virtual_stack_vars_rtx
1041 && GET_CODE (XEXP (x, 1)) == CONST_INT
1042 && INTVAL (XEXP (x, 1)) >= p->base_offset
1043 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1044 return p;
1045
1046 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1047 for (next = p->address; next; next = XEXP (next, 1))
1048 if (XEXP (next, 0) == x)
1049 return p;
1050 }
1051
1052 return 0;
1053 }
1054
1055 /* Indicate that NEW is an alternate way of referring to the temp slot
1056 that previous was known by OLD. */
1057
1058 void
1059 update_temp_slot_address (old, new)
1060 rtx old, new;
1061 {
1062 struct temp_slot *p = find_temp_slot_from_address (old);
1063
1064 /* If none, return. Else add NEW as an alias. */
1065 if (p == 0)
1066 return;
1067 else if (p->address == 0)
1068 p->address = new;
1069 else
1070 {
1071 if (GET_CODE (p->address) != EXPR_LIST)
1072 p->address = gen_rtx (EXPR_LIST, VOIDmode, p->address, NULL_RTX);
1073
1074 p->address = gen_rtx (EXPR_LIST, VOIDmode, new, p->address);
1075 }
1076 }
1077
1078 /* If X could be a reference to a temporary slot, mark the fact that its
1079 address was taken. */
1080
1081 void
1082 mark_temp_addr_taken (x)
1083 rtx x;
1084 {
1085 struct temp_slot *p;
1086
1087 if (x == 0)
1088 return;
1089
1090 /* If X is not in memory or is at a constant address, it cannot be in
1091 a temporary slot. */
1092 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1093 return;
1094
1095 p = find_temp_slot_from_address (XEXP (x, 0));
1096 if (p != 0)
1097 p->addr_taken = 1;
1098 }
1099
1100 /* If X could be a reference to a temporary slot, mark that slot as
1101 belonging to the to one level higher than the current level. If X
1102 matched one of our slots, just mark that one. Otherwise, we can't
1103 easily predict which it is, so upgrade all of them. Kept slots
1104 need not be touched.
1105
1106 This is called when an ({...}) construct occurs and a statement
1107 returns a value in memory. */
1108
1109 void
1110 preserve_temp_slots (x)
1111 rtx x;
1112 {
1113 struct temp_slot *p = 0;
1114
1115 /* If there is no result, we still might have some objects whose address
1116 were taken, so we need to make sure they stay around. */
1117 if (x == 0)
1118 {
1119 for (p = temp_slots; p; p = p->next)
1120 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1121 p->level--;
1122
1123 return;
1124 }
1125
1126 /* If X is a register that is being used as a pointer, see if we have
1127 a temporary slot we know it points to. To be consistent with
1128 the code below, we really should preserve all non-kept slots
1129 if we can't find a match, but that seems to be much too costly. */
1130 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1131 p = find_temp_slot_from_address (x);
1132
1133 /* If X is not in memory or is at a constant address, it cannot be in
1134 a temporary slot, but it can contain something whose address was
1135 taken. */
1136 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1137 {
1138 for (p = temp_slots; p; p = p->next)
1139 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1140 p->level--;
1141
1142 return;
1143 }
1144
1145 /* First see if we can find a match. */
1146 if (p == 0)
1147 p = find_temp_slot_from_address (XEXP (x, 0));
1148
1149 if (p != 0)
1150 {
1151 /* Move everything at our level whose address was taken to our new
1152 level in case we used its address. */
1153 struct temp_slot *q;
1154
1155 if (p->level == temp_slot_level)
1156 {
1157 for (q = temp_slots; q; q = q->next)
1158 if (q != p && q->addr_taken && q->level == p->level)
1159 q->level--;
1160
1161 p->level--;
1162 p->addr_taken = 0;
1163 }
1164 return;
1165 }
1166
1167 /* Otherwise, preserve all non-kept slots at this level. */
1168 for (p = temp_slots; p; p = p->next)
1169 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1170 p->level--;
1171 }
1172
1173 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1174 with that RTL_EXPR, promote it into a temporary slot at the present
1175 level so it will not be freed when we free slots made in the
1176 RTL_EXPR. */
1177
1178 void
1179 preserve_rtl_expr_result (x)
1180 rtx x;
1181 {
1182 struct temp_slot *p;
1183
1184 /* If X is not in memory or is at a constant address, it cannot be in
1185 a temporary slot. */
1186 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1187 return;
1188
1189 /* If we can find a match, move it to our level unless it is already at
1190 an upper level. */
1191 p = find_temp_slot_from_address (XEXP (x, 0));
1192 if (p != 0)
1193 {
1194 p->level = MIN (p->level, temp_slot_level);
1195 p->rtl_expr = 0;
1196 }
1197
1198 return;
1199 }
1200
1201 /* Free all temporaries used so far. This is normally called at the end
1202 of generating code for a statement. Don't free any temporaries
1203 currently in use for an RTL_EXPR that hasn't yet been emitted.
1204 We could eventually do better than this since it can be reused while
1205 generating the same RTL_EXPR, but this is complex and probably not
1206 worthwhile. */
1207
1208 void
1209 free_temp_slots ()
1210 {
1211 struct temp_slot *p;
1212
1213 for (p = temp_slots; p; p = p->next)
1214 if (p->in_use && p->level == temp_slot_level && ! p->keep
1215 && p->rtl_expr == 0)
1216 p->in_use = 0;
1217
1218 combine_temp_slots ();
1219 }
1220
1221 /* Free all temporary slots used in T, an RTL_EXPR node. */
1222
1223 void
1224 free_temps_for_rtl_expr (t)
1225 tree t;
1226 {
1227 struct temp_slot *p;
1228
1229 for (p = temp_slots; p; p = p->next)
1230 if (p->rtl_expr == t)
1231 p->in_use = 0;
1232
1233 combine_temp_slots ();
1234 }
1235
1236 /* Push deeper into the nesting level for stack temporaries. */
1237
1238 void
1239 push_temp_slots ()
1240 {
1241 temp_slot_level++;
1242 }
1243
1244 /* Pop a temporary nesting level. All slots in use in the current level
1245 are freed. */
1246
1247 void
1248 pop_temp_slots ()
1249 {
1250 struct temp_slot *p;
1251
1252 for (p = temp_slots; p; p = p->next)
1253 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1254 p->in_use = 0;
1255
1256 combine_temp_slots ();
1257
1258 temp_slot_level--;
1259 }
1260
1261 /* Initialize temporary slots. */
1262
1263 void
1264 init_temp_slots ()
1265 {
1266 /* We have not allocated any temporaries yet. */
1267 temp_slots = 0;
1268 temp_slot_level = 0;
1269 target_temp_slot_level = 0;
1270 }
1271 \f
1272 /* Retroactively move an auto variable from a register to a stack slot.
1273 This is done when an address-reference to the variable is seen. */
1274
1275 void
1276 put_var_into_stack (decl)
1277 tree decl;
1278 {
1279 register rtx reg;
1280 enum machine_mode promoted_mode, decl_mode;
1281 struct function *function = 0;
1282 tree context;
1283
1284 if (output_bytecode)
1285 return;
1286
1287 context = decl_function_context (decl);
1288
1289 /* Get the current rtl used for this object and it's original mode. */
1290 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1291
1292 /* No need to do anything if decl has no rtx yet
1293 since in that case caller is setting TREE_ADDRESSABLE
1294 and a stack slot will be assigned when the rtl is made. */
1295 if (reg == 0)
1296 return;
1297
1298 /* Get the declared mode for this object. */
1299 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1300 : DECL_MODE (decl));
1301 /* Get the mode it's actually stored in. */
1302 promoted_mode = GET_MODE (reg);
1303
1304 /* If this variable comes from an outer function,
1305 find that function's saved context. */
1306 if (context != current_function_decl)
1307 for (function = outer_function_chain; function; function = function->next)
1308 if (function->decl == context)
1309 break;
1310
1311 /* If this is a variable-size object with a pseudo to address it,
1312 put that pseudo into the stack, if the var is nonlocal. */
1313 if (DECL_NONLOCAL (decl)
1314 && GET_CODE (reg) == MEM
1315 && GET_CODE (XEXP (reg, 0)) == REG
1316 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1317 {
1318 reg = XEXP (reg, 0);
1319 decl_mode = promoted_mode = GET_MODE (reg);
1320 }
1321
1322 /* Now we should have a value that resides in one or more pseudo regs. */
1323
1324 if (GET_CODE (reg) == REG)
1325 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1326 promoted_mode, decl_mode, TREE_SIDE_EFFECTS (decl));
1327 else if (GET_CODE (reg) == CONCAT)
1328 {
1329 /* A CONCAT contains two pseudos; put them both in the stack.
1330 We do it so they end up consecutive. */
1331 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1332 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1333 #ifdef FRAME_GROWS_DOWNWARD
1334 /* Since part 0 should have a lower address, do it second. */
1335 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1336 part_mode, TREE_SIDE_EFFECTS (decl));
1337 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1338 part_mode, TREE_SIDE_EFFECTS (decl));
1339 #else
1340 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1341 part_mode, TREE_SIDE_EFFECTS (decl));
1342 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1343 part_mode, TREE_SIDE_EFFECTS (decl));
1344 #endif
1345
1346 /* Change the CONCAT into a combined MEM for both parts. */
1347 PUT_CODE (reg, MEM);
1348 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1349
1350 /* The two parts are in memory order already.
1351 Use the lower parts address as ours. */
1352 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1353 /* Prevent sharing of rtl that might lose. */
1354 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1355 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1356 }
1357 }
1358
1359 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1360 into the stack frame of FUNCTION (0 means the current function).
1361 DECL_MODE is the machine mode of the user-level data type.
1362 PROMOTED_MODE is the machine mode of the register.
1363 VOLATILE_P is nonzero if this is for a "volatile" decl. */
1364
1365 static void
1366 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p)
1367 struct function *function;
1368 rtx reg;
1369 tree type;
1370 enum machine_mode promoted_mode, decl_mode;
1371 int volatile_p;
1372 {
1373 rtx new = 0;
1374
1375 if (function)
1376 {
1377 if (REGNO (reg) < function->max_parm_reg)
1378 new = function->parm_reg_stack_loc[REGNO (reg)];
1379 if (new == 0)
1380 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1381 0, function);
1382 }
1383 else
1384 {
1385 if (REGNO (reg) < max_parm_reg)
1386 new = parm_reg_stack_loc[REGNO (reg)];
1387 if (new == 0)
1388 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1389 }
1390
1391 PUT_MODE (reg, decl_mode);
1392 XEXP (reg, 0) = XEXP (new, 0);
1393 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1394 MEM_VOLATILE_P (reg) = volatile_p;
1395 PUT_CODE (reg, MEM);
1396
1397 /* If this is a memory ref that contains aggregate components,
1398 mark it as such for cse and loop optimize. */
1399 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type);
1400
1401 /* Now make sure that all refs to the variable, previously made
1402 when it was a register, are fixed up to be valid again. */
1403 if (function)
1404 {
1405 struct var_refs_queue *temp;
1406
1407 /* Variable is inherited; fix it up when we get back to its function. */
1408 push_obstacks (function->function_obstack,
1409 function->function_maybepermanent_obstack);
1410
1411 /* See comment in restore_tree_status in tree.c for why this needs to be
1412 on saveable obstack. */
1413 temp
1414 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1415 temp->modified = reg;
1416 temp->promoted_mode = promoted_mode;
1417 temp->unsignedp = TREE_UNSIGNED (type);
1418 temp->next = function->fixup_var_refs_queue;
1419 function->fixup_var_refs_queue = temp;
1420 pop_obstacks ();
1421 }
1422 else
1423 /* Variable is local; fix it up now. */
1424 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
1425 }
1426 \f
1427 static void
1428 fixup_var_refs (var, promoted_mode, unsignedp)
1429 rtx var;
1430 enum machine_mode promoted_mode;
1431 int unsignedp;
1432 {
1433 tree pending;
1434 rtx first_insn = get_insns ();
1435 struct sequence_stack *stack = sequence_stack;
1436 tree rtl_exps = rtl_expr_chain;
1437
1438 /* Must scan all insns for stack-refs that exceed the limit. */
1439 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
1440
1441 /* Scan all pending sequences too. */
1442 for (; stack; stack = stack->next)
1443 {
1444 push_to_sequence (stack->first);
1445 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1446 stack->first, stack->next != 0);
1447 /* Update remembered end of sequence
1448 in case we added an insn at the end. */
1449 stack->last = get_last_insn ();
1450 end_sequence ();
1451 }
1452
1453 /* Scan all waiting RTL_EXPRs too. */
1454 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1455 {
1456 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1457 if (seq != const0_rtx && seq != 0)
1458 {
1459 push_to_sequence (seq);
1460 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
1461 end_sequence ();
1462 }
1463 }
1464 }
1465 \f
1466 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1467 some part of an insn. Return a struct fixup_replacement whose OLD
1468 value is equal to X. Allocate a new structure if no such entry exists. */
1469
1470 static struct fixup_replacement *
1471 find_fixup_replacement (replacements, x)
1472 struct fixup_replacement **replacements;
1473 rtx x;
1474 {
1475 struct fixup_replacement *p;
1476
1477 /* See if we have already replaced this. */
1478 for (p = *replacements; p && p->old != x; p = p->next)
1479 ;
1480
1481 if (p == 0)
1482 {
1483 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1484 p->old = x;
1485 p->new = 0;
1486 p->next = *replacements;
1487 *replacements = p;
1488 }
1489
1490 return p;
1491 }
1492
1493 /* Scan the insn-chain starting with INSN for refs to VAR
1494 and fix them up. TOPLEVEL is nonzero if this chain is the
1495 main chain of insns for the current function. */
1496
1497 static void
1498 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
1499 rtx var;
1500 enum machine_mode promoted_mode;
1501 int unsignedp;
1502 rtx insn;
1503 int toplevel;
1504 {
1505 rtx call_dest = 0;
1506
1507 while (insn)
1508 {
1509 rtx next = NEXT_INSN (insn);
1510 rtx note;
1511 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1512 {
1513 /* If this is a CLOBBER of VAR, delete it.
1514
1515 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1516 and REG_RETVAL notes too. */
1517 if (GET_CODE (PATTERN (insn)) == CLOBBER
1518 && XEXP (PATTERN (insn), 0) == var)
1519 {
1520 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1521 /* The REG_LIBCALL note will go away since we are going to
1522 turn INSN into a NOTE, so just delete the
1523 corresponding REG_RETVAL note. */
1524 remove_note (XEXP (note, 0),
1525 find_reg_note (XEXP (note, 0), REG_RETVAL,
1526 NULL_RTX));
1527
1528 /* In unoptimized compilation, we shouldn't call delete_insn
1529 except in jump.c doing warnings. */
1530 PUT_CODE (insn, NOTE);
1531 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1532 NOTE_SOURCE_FILE (insn) = 0;
1533 }
1534
1535 /* The insn to load VAR from a home in the arglist
1536 is now a no-op. When we see it, just delete it. */
1537 else if (toplevel
1538 && GET_CODE (PATTERN (insn)) == SET
1539 && SET_DEST (PATTERN (insn)) == var
1540 /* If this represents the result of an insn group,
1541 don't delete the insn. */
1542 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1543 && rtx_equal_p (SET_SRC (PATTERN (insn)), var))
1544 {
1545 /* In unoptimized compilation, we shouldn't call delete_insn
1546 except in jump.c doing warnings. */
1547 PUT_CODE (insn, NOTE);
1548 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1549 NOTE_SOURCE_FILE (insn) = 0;
1550 if (insn == last_parm_insn)
1551 last_parm_insn = PREV_INSN (next);
1552 }
1553 else
1554 {
1555 struct fixup_replacement *replacements = 0;
1556 rtx next_insn = NEXT_INSN (insn);
1557
1558 #ifdef SMALL_REGISTER_CLASSES
1559 /* If the insn that copies the results of a CALL_INSN
1560 into a pseudo now references VAR, we have to use an
1561 intermediate pseudo since we want the life of the
1562 return value register to be only a single insn.
1563
1564 If we don't use an intermediate pseudo, such things as
1565 address computations to make the address of VAR valid
1566 if it is not can be placed between the CALL_INSN and INSN.
1567
1568 To make sure this doesn't happen, we record the destination
1569 of the CALL_INSN and see if the next insn uses both that
1570 and VAR. */
1571
1572 if (call_dest != 0 && GET_CODE (insn) == INSN
1573 && reg_mentioned_p (var, PATTERN (insn))
1574 && reg_mentioned_p (call_dest, PATTERN (insn)))
1575 {
1576 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1577
1578 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1579
1580 PATTERN (insn) = replace_rtx (PATTERN (insn),
1581 call_dest, temp);
1582 }
1583
1584 if (GET_CODE (insn) == CALL_INSN
1585 && GET_CODE (PATTERN (insn)) == SET)
1586 call_dest = SET_DEST (PATTERN (insn));
1587 else if (GET_CODE (insn) == CALL_INSN
1588 && GET_CODE (PATTERN (insn)) == PARALLEL
1589 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1590 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1591 else
1592 call_dest = 0;
1593 #endif
1594
1595 /* See if we have to do anything to INSN now that VAR is in
1596 memory. If it needs to be loaded into a pseudo, use a single
1597 pseudo for the entire insn in case there is a MATCH_DUP
1598 between two operands. We pass a pointer to the head of
1599 a list of struct fixup_replacements. If fixup_var_refs_1
1600 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1601 it will record them in this list.
1602
1603 If it allocated a pseudo for any replacement, we copy into
1604 it here. */
1605
1606 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1607 &replacements);
1608
1609 /* If this is last_parm_insn, and any instructions were output
1610 after it to fix it up, then we must set last_parm_insn to
1611 the last such instruction emitted. */
1612 if (insn == last_parm_insn)
1613 last_parm_insn = PREV_INSN (next_insn);
1614
1615 while (replacements)
1616 {
1617 if (GET_CODE (replacements->new) == REG)
1618 {
1619 rtx insert_before;
1620 rtx seq;
1621
1622 /* OLD might be a (subreg (mem)). */
1623 if (GET_CODE (replacements->old) == SUBREG)
1624 replacements->old
1625 = fixup_memory_subreg (replacements->old, insn, 0);
1626 else
1627 replacements->old
1628 = fixup_stack_1 (replacements->old, insn);
1629
1630 insert_before = insn;
1631
1632 /* If we are changing the mode, do a conversion.
1633 This might be wasteful, but combine.c will
1634 eliminate much of the waste. */
1635
1636 if (GET_MODE (replacements->new)
1637 != GET_MODE (replacements->old))
1638 {
1639 start_sequence ();
1640 convert_move (replacements->new,
1641 replacements->old, unsignedp);
1642 seq = gen_sequence ();
1643 end_sequence ();
1644 }
1645 else
1646 seq = gen_move_insn (replacements->new,
1647 replacements->old);
1648
1649 emit_insn_before (seq, insert_before);
1650 }
1651
1652 replacements = replacements->next;
1653 }
1654 }
1655
1656 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1657 But don't touch other insns referred to by reg-notes;
1658 we will get them elsewhere. */
1659 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1660 if (GET_CODE (note) != INSN_LIST)
1661 XEXP (note, 0)
1662 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1663 }
1664 insn = next;
1665 }
1666 }
1667 \f
1668 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1669 See if the rtx expression at *LOC in INSN needs to be changed.
1670
1671 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1672 contain a list of original rtx's and replacements. If we find that we need
1673 to modify this insn by replacing a memory reference with a pseudo or by
1674 making a new MEM to implement a SUBREG, we consult that list to see if
1675 we have already chosen a replacement. If none has already been allocated,
1676 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1677 or the SUBREG, as appropriate, to the pseudo. */
1678
1679 static void
1680 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1681 register rtx var;
1682 enum machine_mode promoted_mode;
1683 register rtx *loc;
1684 rtx insn;
1685 struct fixup_replacement **replacements;
1686 {
1687 register int i;
1688 register rtx x = *loc;
1689 RTX_CODE code = GET_CODE (x);
1690 register char *fmt;
1691 register rtx tem, tem1;
1692 struct fixup_replacement *replacement;
1693
1694 switch (code)
1695 {
1696 case MEM:
1697 if (var == x)
1698 {
1699 /* If we already have a replacement, use it. Otherwise,
1700 try to fix up this address in case it is invalid. */
1701
1702 replacement = find_fixup_replacement (replacements, var);
1703 if (replacement->new)
1704 {
1705 *loc = replacement->new;
1706 return;
1707 }
1708
1709 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1710
1711 /* Unless we are forcing memory to register or we changed the mode,
1712 we can leave things the way they are if the insn is valid. */
1713
1714 INSN_CODE (insn) = -1;
1715 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1716 && recog_memoized (insn) >= 0)
1717 return;
1718
1719 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1720 return;
1721 }
1722
1723 /* If X contains VAR, we need to unshare it here so that we update
1724 each occurrence separately. But all identical MEMs in one insn
1725 must be replaced with the same rtx because of the possibility of
1726 MATCH_DUPs. */
1727
1728 if (reg_mentioned_p (var, x))
1729 {
1730 replacement = find_fixup_replacement (replacements, x);
1731 if (replacement->new == 0)
1732 replacement->new = copy_most_rtx (x, var);
1733
1734 *loc = x = replacement->new;
1735 }
1736 break;
1737
1738 case REG:
1739 case CC0:
1740 case PC:
1741 case CONST_INT:
1742 case CONST:
1743 case SYMBOL_REF:
1744 case LABEL_REF:
1745 case CONST_DOUBLE:
1746 return;
1747
1748 case SIGN_EXTRACT:
1749 case ZERO_EXTRACT:
1750 /* Note that in some cases those types of expressions are altered
1751 by optimize_bit_field, and do not survive to get here. */
1752 if (XEXP (x, 0) == var
1753 || (GET_CODE (XEXP (x, 0)) == SUBREG
1754 && SUBREG_REG (XEXP (x, 0)) == var))
1755 {
1756 /* Get TEM as a valid MEM in the mode presently in the insn.
1757
1758 We don't worry about the possibility of MATCH_DUP here; it
1759 is highly unlikely and would be tricky to handle. */
1760
1761 tem = XEXP (x, 0);
1762 if (GET_CODE (tem) == SUBREG)
1763 {
1764 if (GET_MODE_BITSIZE (GET_MODE (tem))
1765 > GET_MODE_BITSIZE (GET_MODE (var)))
1766 {
1767 replacement = find_fixup_replacement (replacements, var);
1768 if (replacement->new == 0)
1769 replacement->new = gen_reg_rtx (GET_MODE (var));
1770 SUBREG_REG (tem) = replacement->new;
1771 }
1772
1773 tem = fixup_memory_subreg (tem, insn, 0);
1774 }
1775 else
1776 tem = fixup_stack_1 (tem, insn);
1777
1778 /* Unless we want to load from memory, get TEM into the proper mode
1779 for an extract from memory. This can only be done if the
1780 extract is at a constant position and length. */
1781
1782 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1783 && GET_CODE (XEXP (x, 2)) == CONST_INT
1784 && ! mode_dependent_address_p (XEXP (tem, 0))
1785 && ! MEM_VOLATILE_P (tem))
1786 {
1787 enum machine_mode wanted_mode = VOIDmode;
1788 enum machine_mode is_mode = GET_MODE (tem);
1789 int width = INTVAL (XEXP (x, 1));
1790 int pos = INTVAL (XEXP (x, 2));
1791
1792 #ifdef HAVE_extzv
1793 if (GET_CODE (x) == ZERO_EXTRACT)
1794 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1795 #endif
1796 #ifdef HAVE_extv
1797 if (GET_CODE (x) == SIGN_EXTRACT)
1798 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1799 #endif
1800 /* If we have a narrower mode, we can do something. */
1801 if (wanted_mode != VOIDmode
1802 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1803 {
1804 int offset = pos / BITS_PER_UNIT;
1805 rtx old_pos = XEXP (x, 2);
1806 rtx newmem;
1807
1808 /* If the bytes and bits are counted differently, we
1809 must adjust the offset. */
1810 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1811 offset = (GET_MODE_SIZE (is_mode)
1812 - GET_MODE_SIZE (wanted_mode) - offset);
1813
1814 pos %= GET_MODE_BITSIZE (wanted_mode);
1815
1816 newmem = gen_rtx (MEM, wanted_mode,
1817 plus_constant (XEXP (tem, 0), offset));
1818 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1819 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1820 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1821
1822 /* Make the change and see if the insn remains valid. */
1823 INSN_CODE (insn) = -1;
1824 XEXP (x, 0) = newmem;
1825 XEXP (x, 2) = GEN_INT (pos);
1826
1827 if (recog_memoized (insn) >= 0)
1828 return;
1829
1830 /* Otherwise, restore old position. XEXP (x, 0) will be
1831 restored later. */
1832 XEXP (x, 2) = old_pos;
1833 }
1834 }
1835
1836 /* If we get here, the bitfield extract insn can't accept a memory
1837 reference. Copy the input into a register. */
1838
1839 tem1 = gen_reg_rtx (GET_MODE (tem));
1840 emit_insn_before (gen_move_insn (tem1, tem), insn);
1841 XEXP (x, 0) = tem1;
1842 return;
1843 }
1844 break;
1845
1846 case SUBREG:
1847 if (SUBREG_REG (x) == var)
1848 {
1849 /* If this is a special SUBREG made because VAR was promoted
1850 from a wider mode, replace it with VAR and call ourself
1851 recursively, this time saying that the object previously
1852 had its current mode (by virtue of the SUBREG). */
1853
1854 if (SUBREG_PROMOTED_VAR_P (x))
1855 {
1856 *loc = var;
1857 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
1858 return;
1859 }
1860
1861 /* If this SUBREG makes VAR wider, it has become a paradoxical
1862 SUBREG with VAR in memory, but these aren't allowed at this
1863 stage of the compilation. So load VAR into a pseudo and take
1864 a SUBREG of that pseudo. */
1865 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1866 {
1867 replacement = find_fixup_replacement (replacements, var);
1868 if (replacement->new == 0)
1869 replacement->new = gen_reg_rtx (GET_MODE (var));
1870 SUBREG_REG (x) = replacement->new;
1871 return;
1872 }
1873
1874 /* See if we have already found a replacement for this SUBREG.
1875 If so, use it. Otherwise, make a MEM and see if the insn
1876 is recognized. If not, or if we should force MEM into a register,
1877 make a pseudo for this SUBREG. */
1878 replacement = find_fixup_replacement (replacements, x);
1879 if (replacement->new)
1880 {
1881 *loc = replacement->new;
1882 return;
1883 }
1884
1885 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1886
1887 INSN_CODE (insn) = -1;
1888 if (! flag_force_mem && recog_memoized (insn) >= 0)
1889 return;
1890
1891 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1892 return;
1893 }
1894 break;
1895
1896 case SET:
1897 /* First do special simplification of bit-field references. */
1898 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1899 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1900 optimize_bit_field (x, insn, 0);
1901 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1902 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
1903 optimize_bit_field (x, insn, NULL_PTR);
1904
1905 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
1906 into a register and then store it back out. */
1907 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
1908 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
1909 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
1910 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
1911 > GET_MODE_SIZE (GET_MODE (var))))
1912 {
1913 replacement = find_fixup_replacement (replacements, var);
1914 if (replacement->new == 0)
1915 replacement->new = gen_reg_rtx (GET_MODE (var));
1916
1917 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
1918 emit_insn_after (gen_move_insn (var, replacement->new), insn);
1919 }
1920
1921 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1922 insn into a pseudo and store the low part of the pseudo into VAR. */
1923 if (GET_CODE (SET_DEST (x)) == SUBREG
1924 && SUBREG_REG (SET_DEST (x)) == var
1925 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
1926 > GET_MODE_SIZE (GET_MODE (var))))
1927 {
1928 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
1929 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
1930 tem)),
1931 insn);
1932 break;
1933 }
1934
1935 {
1936 rtx dest = SET_DEST (x);
1937 rtx src = SET_SRC (x);
1938 rtx outerdest = dest;
1939
1940 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
1941 || GET_CODE (dest) == SIGN_EXTRACT
1942 || GET_CODE (dest) == ZERO_EXTRACT)
1943 dest = XEXP (dest, 0);
1944
1945 if (GET_CODE (src) == SUBREG)
1946 src = XEXP (src, 0);
1947
1948 /* If VAR does not appear at the top level of the SET
1949 just scan the lower levels of the tree. */
1950
1951 if (src != var && dest != var)
1952 break;
1953
1954 /* We will need to rerecognize this insn. */
1955 INSN_CODE (insn) = -1;
1956
1957 #ifdef HAVE_insv
1958 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
1959 {
1960 /* Since this case will return, ensure we fixup all the
1961 operands here. */
1962 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
1963 insn, replacements);
1964 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
1965 insn, replacements);
1966 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
1967 insn, replacements);
1968
1969 tem = XEXP (outerdest, 0);
1970
1971 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
1972 that may appear inside a ZERO_EXTRACT.
1973 This was legitimate when the MEM was a REG. */
1974 if (GET_CODE (tem) == SUBREG
1975 && SUBREG_REG (tem) == var)
1976 tem = fixup_memory_subreg (tem, insn, 0);
1977 else
1978 tem = fixup_stack_1 (tem, insn);
1979
1980 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
1981 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
1982 && ! mode_dependent_address_p (XEXP (tem, 0))
1983 && ! MEM_VOLATILE_P (tem))
1984 {
1985 enum machine_mode wanted_mode
1986 = insn_operand_mode[(int) CODE_FOR_insv][0];
1987 enum machine_mode is_mode = GET_MODE (tem);
1988 int width = INTVAL (XEXP (outerdest, 1));
1989 int pos = INTVAL (XEXP (outerdest, 2));
1990
1991 /* If we have a narrower mode, we can do something. */
1992 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1993 {
1994 int offset = pos / BITS_PER_UNIT;
1995 rtx old_pos = XEXP (outerdest, 2);
1996 rtx newmem;
1997
1998 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1999 offset = (GET_MODE_SIZE (is_mode)
2000 - GET_MODE_SIZE (wanted_mode) - offset);
2001
2002 pos %= GET_MODE_BITSIZE (wanted_mode);
2003
2004 newmem = gen_rtx (MEM, wanted_mode,
2005 plus_constant (XEXP (tem, 0), offset));
2006 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2007 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
2008 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
2009
2010 /* Make the change and see if the insn remains valid. */
2011 INSN_CODE (insn) = -1;
2012 XEXP (outerdest, 0) = newmem;
2013 XEXP (outerdest, 2) = GEN_INT (pos);
2014
2015 if (recog_memoized (insn) >= 0)
2016 return;
2017
2018 /* Otherwise, restore old position. XEXP (x, 0) will be
2019 restored later. */
2020 XEXP (outerdest, 2) = old_pos;
2021 }
2022 }
2023
2024 /* If we get here, the bit-field store doesn't allow memory
2025 or isn't located at a constant position. Load the value into
2026 a register, do the store, and put it back into memory. */
2027
2028 tem1 = gen_reg_rtx (GET_MODE (tem));
2029 emit_insn_before (gen_move_insn (tem1, tem), insn);
2030 emit_insn_after (gen_move_insn (tem, tem1), insn);
2031 XEXP (outerdest, 0) = tem1;
2032 return;
2033 }
2034 #endif
2035
2036 /* STRICT_LOW_PART is a no-op on memory references
2037 and it can cause combinations to be unrecognizable,
2038 so eliminate it. */
2039
2040 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2041 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2042
2043 /* A valid insn to copy VAR into or out of a register
2044 must be left alone, to avoid an infinite loop here.
2045 If the reference to VAR is by a subreg, fix that up,
2046 since SUBREG is not valid for a memref.
2047 Also fix up the address of the stack slot.
2048
2049 Note that we must not try to recognize the insn until
2050 after we know that we have valid addresses and no
2051 (subreg (mem ...) ...) constructs, since these interfere
2052 with determining the validity of the insn. */
2053
2054 if ((SET_SRC (x) == var
2055 || (GET_CODE (SET_SRC (x)) == SUBREG
2056 && SUBREG_REG (SET_SRC (x)) == var))
2057 && (GET_CODE (SET_DEST (x)) == REG
2058 || (GET_CODE (SET_DEST (x)) == SUBREG
2059 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2060 && GET_MODE (var) == promoted_mode
2061 && x == single_set (insn))
2062 {
2063 rtx pat;
2064
2065 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2066 if (replacement->new)
2067 SET_SRC (x) = replacement->new;
2068 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2069 SET_SRC (x) = replacement->new
2070 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2071 else
2072 SET_SRC (x) = replacement->new
2073 = fixup_stack_1 (SET_SRC (x), insn);
2074
2075 if (recog_memoized (insn) >= 0)
2076 return;
2077
2078 /* INSN is not valid, but we know that we want to
2079 copy SET_SRC (x) to SET_DEST (x) in some way. So
2080 we generate the move and see whether it requires more
2081 than one insn. If it does, we emit those insns and
2082 delete INSN. Otherwise, we an just replace the pattern
2083 of INSN; we have already verified above that INSN has
2084 no other function that to do X. */
2085
2086 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2087 if (GET_CODE (pat) == SEQUENCE)
2088 {
2089 emit_insn_after (pat, insn);
2090 PUT_CODE (insn, NOTE);
2091 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2092 NOTE_SOURCE_FILE (insn) = 0;
2093 }
2094 else
2095 PATTERN (insn) = pat;
2096
2097 return;
2098 }
2099
2100 if ((SET_DEST (x) == var
2101 || (GET_CODE (SET_DEST (x)) == SUBREG
2102 && SUBREG_REG (SET_DEST (x)) == var))
2103 && (GET_CODE (SET_SRC (x)) == REG
2104 || (GET_CODE (SET_SRC (x)) == SUBREG
2105 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2106 && GET_MODE (var) == promoted_mode
2107 && x == single_set (insn))
2108 {
2109 rtx pat;
2110
2111 if (GET_CODE (SET_DEST (x)) == SUBREG)
2112 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2113 else
2114 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2115
2116 if (recog_memoized (insn) >= 0)
2117 return;
2118
2119 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2120 if (GET_CODE (pat) == SEQUENCE)
2121 {
2122 emit_insn_after (pat, insn);
2123 PUT_CODE (insn, NOTE);
2124 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2125 NOTE_SOURCE_FILE (insn) = 0;
2126 }
2127 else
2128 PATTERN (insn) = pat;
2129
2130 return;
2131 }
2132
2133 /* Otherwise, storing into VAR must be handled specially
2134 by storing into a temporary and copying that into VAR
2135 with a new insn after this one. Note that this case
2136 will be used when storing into a promoted scalar since
2137 the insn will now have different modes on the input
2138 and output and hence will be invalid (except for the case
2139 of setting it to a constant, which does not need any
2140 change if it is valid). We generate extra code in that case,
2141 but combine.c will eliminate it. */
2142
2143 if (dest == var)
2144 {
2145 rtx temp;
2146 rtx fixeddest = SET_DEST (x);
2147
2148 /* STRICT_LOW_PART can be discarded, around a MEM. */
2149 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2150 fixeddest = XEXP (fixeddest, 0);
2151 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2152 if (GET_CODE (fixeddest) == SUBREG)
2153 {
2154 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2155 promoted_mode = GET_MODE (fixeddest);
2156 }
2157 else
2158 fixeddest = fixup_stack_1 (fixeddest, insn);
2159
2160 temp = gen_reg_rtx (promoted_mode);
2161
2162 emit_insn_after (gen_move_insn (fixeddest,
2163 gen_lowpart (GET_MODE (fixeddest),
2164 temp)),
2165 insn);
2166
2167 SET_DEST (x) = temp;
2168 }
2169 }
2170 }
2171
2172 /* Nothing special about this RTX; fix its operands. */
2173
2174 fmt = GET_RTX_FORMAT (code);
2175 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2176 {
2177 if (fmt[i] == 'e')
2178 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2179 if (fmt[i] == 'E')
2180 {
2181 register int j;
2182 for (j = 0; j < XVECLEN (x, i); j++)
2183 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2184 insn, replacements);
2185 }
2186 }
2187 }
2188 \f
2189 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2190 return an rtx (MEM:m1 newaddr) which is equivalent.
2191 If any insns must be emitted to compute NEWADDR, put them before INSN.
2192
2193 UNCRITICAL nonzero means accept paradoxical subregs.
2194 This is used for subregs found inside REG_NOTES. */
2195
2196 static rtx
2197 fixup_memory_subreg (x, insn, uncritical)
2198 rtx x;
2199 rtx insn;
2200 int uncritical;
2201 {
2202 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2203 rtx addr = XEXP (SUBREG_REG (x), 0);
2204 enum machine_mode mode = GET_MODE (x);
2205 rtx saved, result;
2206
2207 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2208 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2209 && ! uncritical)
2210 abort ();
2211
2212 if (BYTES_BIG_ENDIAN)
2213 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2214 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2215 addr = plus_constant (addr, offset);
2216 if (!flag_force_addr && memory_address_p (mode, addr))
2217 /* Shortcut if no insns need be emitted. */
2218 return change_address (SUBREG_REG (x), mode, addr);
2219 start_sequence ();
2220 result = change_address (SUBREG_REG (x), mode, addr);
2221 emit_insn_before (gen_sequence (), insn);
2222 end_sequence ();
2223 return result;
2224 }
2225
2226 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2227 Replace subexpressions of X in place.
2228 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2229 Otherwise return X, with its contents possibly altered.
2230
2231 If any insns must be emitted to compute NEWADDR, put them before INSN.
2232
2233 UNCRITICAL is as in fixup_memory_subreg. */
2234
2235 static rtx
2236 walk_fixup_memory_subreg (x, insn, uncritical)
2237 register rtx x;
2238 rtx insn;
2239 int uncritical;
2240 {
2241 register enum rtx_code code;
2242 register char *fmt;
2243 register int i;
2244
2245 if (x == 0)
2246 return 0;
2247
2248 code = GET_CODE (x);
2249
2250 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2251 return fixup_memory_subreg (x, insn, uncritical);
2252
2253 /* Nothing special about this RTX; fix its operands. */
2254
2255 fmt = GET_RTX_FORMAT (code);
2256 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2257 {
2258 if (fmt[i] == 'e')
2259 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2260 if (fmt[i] == 'E')
2261 {
2262 register int j;
2263 for (j = 0; j < XVECLEN (x, i); j++)
2264 XVECEXP (x, i, j)
2265 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2266 }
2267 }
2268 return x;
2269 }
2270 \f
2271 /* For each memory ref within X, if it refers to a stack slot
2272 with an out of range displacement, put the address in a temp register
2273 (emitting new insns before INSN to load these registers)
2274 and alter the memory ref to use that register.
2275 Replace each such MEM rtx with a copy, to avoid clobberage. */
2276
2277 static rtx
2278 fixup_stack_1 (x, insn)
2279 rtx x;
2280 rtx insn;
2281 {
2282 register int i;
2283 register RTX_CODE code = GET_CODE (x);
2284 register char *fmt;
2285
2286 if (code == MEM)
2287 {
2288 register rtx ad = XEXP (x, 0);
2289 /* If we have address of a stack slot but it's not valid
2290 (displacement is too large), compute the sum in a register. */
2291 if (GET_CODE (ad) == PLUS
2292 && GET_CODE (XEXP (ad, 0)) == REG
2293 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2294 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2295 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2296 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2297 {
2298 rtx temp, seq;
2299 if (memory_address_p (GET_MODE (x), ad))
2300 return x;
2301
2302 start_sequence ();
2303 temp = copy_to_reg (ad);
2304 seq = gen_sequence ();
2305 end_sequence ();
2306 emit_insn_before (seq, insn);
2307 return change_address (x, VOIDmode, temp);
2308 }
2309 return x;
2310 }
2311
2312 fmt = GET_RTX_FORMAT (code);
2313 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2314 {
2315 if (fmt[i] == 'e')
2316 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2317 if (fmt[i] == 'E')
2318 {
2319 register int j;
2320 for (j = 0; j < XVECLEN (x, i); j++)
2321 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2322 }
2323 }
2324 return x;
2325 }
2326 \f
2327 /* Optimization: a bit-field instruction whose field
2328 happens to be a byte or halfword in memory
2329 can be changed to a move instruction.
2330
2331 We call here when INSN is an insn to examine or store into a bit-field.
2332 BODY is the SET-rtx to be altered.
2333
2334 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2335 (Currently this is called only from function.c, and EQUIV_MEM
2336 is always 0.) */
2337
2338 static void
2339 optimize_bit_field (body, insn, equiv_mem)
2340 rtx body;
2341 rtx insn;
2342 rtx *equiv_mem;
2343 {
2344 register rtx bitfield;
2345 int destflag;
2346 rtx seq = 0;
2347 enum machine_mode mode;
2348
2349 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2350 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2351 bitfield = SET_DEST (body), destflag = 1;
2352 else
2353 bitfield = SET_SRC (body), destflag = 0;
2354
2355 /* First check that the field being stored has constant size and position
2356 and is in fact a byte or halfword suitably aligned. */
2357
2358 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2359 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2360 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2361 != BLKmode)
2362 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2363 {
2364 register rtx memref = 0;
2365
2366 /* Now check that the containing word is memory, not a register,
2367 and that it is safe to change the machine mode. */
2368
2369 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2370 memref = XEXP (bitfield, 0);
2371 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2372 && equiv_mem != 0)
2373 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2374 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2375 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2376 memref = SUBREG_REG (XEXP (bitfield, 0));
2377 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2378 && equiv_mem != 0
2379 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2380 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2381
2382 if (memref
2383 && ! mode_dependent_address_p (XEXP (memref, 0))
2384 && ! MEM_VOLATILE_P (memref))
2385 {
2386 /* Now adjust the address, first for any subreg'ing
2387 that we are now getting rid of,
2388 and then for which byte of the word is wanted. */
2389
2390 register int offset = INTVAL (XEXP (bitfield, 2));
2391 rtx insns;
2392
2393 /* Adjust OFFSET to count bits from low-address byte. */
2394 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2395 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2396 - offset - INTVAL (XEXP (bitfield, 1)));
2397
2398 /* Adjust OFFSET to count bytes from low-address byte. */
2399 offset /= BITS_PER_UNIT;
2400 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2401 {
2402 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2403 if (BYTES_BIG_ENDIAN)
2404 offset -= (MIN (UNITS_PER_WORD,
2405 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2406 - MIN (UNITS_PER_WORD,
2407 GET_MODE_SIZE (GET_MODE (memref))));
2408 }
2409
2410 start_sequence ();
2411 memref = change_address (memref, mode,
2412 plus_constant (XEXP (memref, 0), offset));
2413 insns = get_insns ();
2414 end_sequence ();
2415 emit_insns_before (insns, insn);
2416
2417 /* Store this memory reference where
2418 we found the bit field reference. */
2419
2420 if (destflag)
2421 {
2422 validate_change (insn, &SET_DEST (body), memref, 1);
2423 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2424 {
2425 rtx src = SET_SRC (body);
2426 while (GET_CODE (src) == SUBREG
2427 && SUBREG_WORD (src) == 0)
2428 src = SUBREG_REG (src);
2429 if (GET_MODE (src) != GET_MODE (memref))
2430 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2431 validate_change (insn, &SET_SRC (body), src, 1);
2432 }
2433 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2434 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2435 /* This shouldn't happen because anything that didn't have
2436 one of these modes should have got converted explicitly
2437 and then referenced through a subreg.
2438 This is so because the original bit-field was
2439 handled by agg_mode and so its tree structure had
2440 the same mode that memref now has. */
2441 abort ();
2442 }
2443 else
2444 {
2445 rtx dest = SET_DEST (body);
2446
2447 while (GET_CODE (dest) == SUBREG
2448 && SUBREG_WORD (dest) == 0
2449 && (GET_MODE_CLASS (GET_MODE (dest))
2450 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest)))))
2451 dest = SUBREG_REG (dest);
2452
2453 validate_change (insn, &SET_DEST (body), dest, 1);
2454
2455 if (GET_MODE (dest) == GET_MODE (memref))
2456 validate_change (insn, &SET_SRC (body), memref, 1);
2457 else
2458 {
2459 /* Convert the mem ref to the destination mode. */
2460 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2461
2462 start_sequence ();
2463 convert_move (newreg, memref,
2464 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2465 seq = get_insns ();
2466 end_sequence ();
2467
2468 validate_change (insn, &SET_SRC (body), newreg, 1);
2469 }
2470 }
2471
2472 /* See if we can convert this extraction or insertion into
2473 a simple move insn. We might not be able to do so if this
2474 was, for example, part of a PARALLEL.
2475
2476 If we succeed, write out any needed conversions. If we fail,
2477 it is hard to guess why we failed, so don't do anything
2478 special; just let the optimization be suppressed. */
2479
2480 if (apply_change_group () && seq)
2481 emit_insns_before (seq, insn);
2482 }
2483 }
2484 }
2485 \f
2486 /* These routines are responsible for converting virtual register references
2487 to the actual hard register references once RTL generation is complete.
2488
2489 The following four variables are used for communication between the
2490 routines. They contain the offsets of the virtual registers from their
2491 respective hard registers. */
2492
2493 static int in_arg_offset;
2494 static int var_offset;
2495 static int dynamic_offset;
2496 static int out_arg_offset;
2497
2498 /* In most machines, the stack pointer register is equivalent to the bottom
2499 of the stack. */
2500
2501 #ifndef STACK_POINTER_OFFSET
2502 #define STACK_POINTER_OFFSET 0
2503 #endif
2504
2505 /* If not defined, pick an appropriate default for the offset of dynamically
2506 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2507 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2508
2509 #ifndef STACK_DYNAMIC_OFFSET
2510
2511 #ifdef ACCUMULATE_OUTGOING_ARGS
2512 /* The bottom of the stack points to the actual arguments. If
2513 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2514 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2515 stack space for register parameters is not pushed by the caller, but
2516 rather part of the fixed stack areas and hence not included in
2517 `current_function_outgoing_args_size'. Nevertheless, we must allow
2518 for it when allocating stack dynamic objects. */
2519
2520 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2521 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2522 (current_function_outgoing_args_size \
2523 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2524
2525 #else
2526 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2527 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2528 #endif
2529
2530 #else
2531 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2532 #endif
2533 #endif
2534
2535 /* Pass through the INSNS of function FNDECL and convert virtual register
2536 references to hard register references. */
2537
2538 void
2539 instantiate_virtual_regs (fndecl, insns)
2540 tree fndecl;
2541 rtx insns;
2542 {
2543 rtx insn;
2544
2545 /* Compute the offsets to use for this function. */
2546 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
2547 var_offset = STARTING_FRAME_OFFSET;
2548 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
2549 out_arg_offset = STACK_POINTER_OFFSET;
2550
2551 /* Scan all variables and parameters of this function. For each that is
2552 in memory, instantiate all virtual registers if the result is a valid
2553 address. If not, we do it later. That will handle most uses of virtual
2554 regs on many machines. */
2555 instantiate_decls (fndecl, 1);
2556
2557 /* Initialize recognition, indicating that volatile is OK. */
2558 init_recog ();
2559
2560 /* Scan through all the insns, instantiating every virtual register still
2561 present. */
2562 for (insn = insns; insn; insn = NEXT_INSN (insn))
2563 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2564 || GET_CODE (insn) == CALL_INSN)
2565 {
2566 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
2567 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
2568 }
2569
2570 /* Now instantiate the remaining register equivalences for debugging info.
2571 These will not be valid addresses. */
2572 instantiate_decls (fndecl, 0);
2573
2574 /* Indicate that, from now on, assign_stack_local should use
2575 frame_pointer_rtx. */
2576 virtuals_instantiated = 1;
2577 }
2578
2579 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
2580 all virtual registers in their DECL_RTL's.
2581
2582 If VALID_ONLY, do this only if the resulting address is still valid.
2583 Otherwise, always do it. */
2584
2585 static void
2586 instantiate_decls (fndecl, valid_only)
2587 tree fndecl;
2588 int valid_only;
2589 {
2590 tree decl;
2591
2592 if (DECL_SAVED_INSNS (fndecl))
2593 /* When compiling an inline function, the obstack used for
2594 rtl allocation is the maybepermanent_obstack. Calling
2595 `resume_temporary_allocation' switches us back to that
2596 obstack while we process this function's parameters. */
2597 resume_temporary_allocation ();
2598
2599 /* Process all parameters of the function. */
2600 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2601 {
2602 instantiate_decl (DECL_RTL (decl), int_size_in_bytes (TREE_TYPE (decl)),
2603 valid_only);
2604 instantiate_decl (DECL_INCOMING_RTL (decl),
2605 int_size_in_bytes (TREE_TYPE (decl)), valid_only);
2606 }
2607
2608 /* Now process all variables defined in the function or its subblocks. */
2609 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
2610
2611 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
2612 {
2613 /* Save all rtl allocated for this function by raising the
2614 high-water mark on the maybepermanent_obstack. */
2615 preserve_data ();
2616 /* All further rtl allocation is now done in the current_obstack. */
2617 rtl_in_current_obstack ();
2618 }
2619 }
2620
2621 /* Subroutine of instantiate_decls: Process all decls in the given
2622 BLOCK node and all its subblocks. */
2623
2624 static void
2625 instantiate_decls_1 (let, valid_only)
2626 tree let;
2627 int valid_only;
2628 {
2629 tree t;
2630
2631 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2632 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
2633 valid_only);
2634
2635 /* Process all subblocks. */
2636 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2637 instantiate_decls_1 (t, valid_only);
2638 }
2639
2640 /* Subroutine of the preceding procedures: Given RTL representing a
2641 decl and the size of the object, do any instantiation required.
2642
2643 If VALID_ONLY is non-zero, it means that the RTL should only be
2644 changed if the new address is valid. */
2645
2646 static void
2647 instantiate_decl (x, size, valid_only)
2648 rtx x;
2649 int size;
2650 int valid_only;
2651 {
2652 enum machine_mode mode;
2653 rtx addr;
2654
2655 /* If this is not a MEM, no need to do anything. Similarly if the
2656 address is a constant or a register that is not a virtual register. */
2657
2658 if (x == 0 || GET_CODE (x) != MEM)
2659 return;
2660
2661 addr = XEXP (x, 0);
2662 if (CONSTANT_P (addr)
2663 || (GET_CODE (addr) == REG
2664 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
2665 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
2666 return;
2667
2668 /* If we should only do this if the address is valid, copy the address.
2669 We need to do this so we can undo any changes that might make the
2670 address invalid. This copy is unfortunate, but probably can't be
2671 avoided. */
2672
2673 if (valid_only)
2674 addr = copy_rtx (addr);
2675
2676 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
2677
2678 if (! valid_only)
2679 return;
2680
2681 /* Now verify that the resulting address is valid for every integer or
2682 floating-point mode up to and including SIZE bytes long. We do this
2683 since the object might be accessed in any mode and frame addresses
2684 are shared. */
2685
2686 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2687 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2688 mode = GET_MODE_WIDER_MODE (mode))
2689 if (! memory_address_p (mode, addr))
2690 return;
2691
2692 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
2693 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2694 mode = GET_MODE_WIDER_MODE (mode))
2695 if (! memory_address_p (mode, addr))
2696 return;
2697
2698 /* Otherwise, put back the address, now that we have updated it and we
2699 know it is valid. */
2700
2701 XEXP (x, 0) = addr;
2702 }
2703 \f
2704 /* Given a pointer to a piece of rtx and an optional pointer to the
2705 containing object, instantiate any virtual registers present in it.
2706
2707 If EXTRA_INSNS, we always do the replacement and generate
2708 any extra insns before OBJECT. If it zero, we do nothing if replacement
2709 is not valid.
2710
2711 Return 1 if we either had nothing to do or if we were able to do the
2712 needed replacement. Return 0 otherwise; we only return zero if
2713 EXTRA_INSNS is zero.
2714
2715 We first try some simple transformations to avoid the creation of extra
2716 pseudos. */
2717
2718 static int
2719 instantiate_virtual_regs_1 (loc, object, extra_insns)
2720 rtx *loc;
2721 rtx object;
2722 int extra_insns;
2723 {
2724 rtx x;
2725 RTX_CODE code;
2726 rtx new = 0;
2727 int offset;
2728 rtx temp;
2729 rtx seq;
2730 int i, j;
2731 char *fmt;
2732
2733 /* Re-start here to avoid recursion in common cases. */
2734 restart:
2735
2736 x = *loc;
2737 if (x == 0)
2738 return 1;
2739
2740 code = GET_CODE (x);
2741
2742 /* Check for some special cases. */
2743 switch (code)
2744 {
2745 case CONST_INT:
2746 case CONST_DOUBLE:
2747 case CONST:
2748 case SYMBOL_REF:
2749 case CODE_LABEL:
2750 case PC:
2751 case CC0:
2752 case ASM_INPUT:
2753 case ADDR_VEC:
2754 case ADDR_DIFF_VEC:
2755 case RETURN:
2756 return 1;
2757
2758 case SET:
2759 /* We are allowed to set the virtual registers. This means that
2760 that the actual register should receive the source minus the
2761 appropriate offset. This is used, for example, in the handling
2762 of non-local gotos. */
2763 if (SET_DEST (x) == virtual_incoming_args_rtx)
2764 new = arg_pointer_rtx, offset = - in_arg_offset;
2765 else if (SET_DEST (x) == virtual_stack_vars_rtx)
2766 new = frame_pointer_rtx, offset = - var_offset;
2767 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
2768 new = stack_pointer_rtx, offset = - dynamic_offset;
2769 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
2770 new = stack_pointer_rtx, offset = - out_arg_offset;
2771
2772 if (new)
2773 {
2774 /* The only valid sources here are PLUS or REG. Just do
2775 the simplest possible thing to handle them. */
2776 if (GET_CODE (SET_SRC (x)) != REG
2777 && GET_CODE (SET_SRC (x)) != PLUS)
2778 abort ();
2779
2780 start_sequence ();
2781 if (GET_CODE (SET_SRC (x)) != REG)
2782 temp = force_operand (SET_SRC (x), NULL_RTX);
2783 else
2784 temp = SET_SRC (x);
2785 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
2786 seq = get_insns ();
2787 end_sequence ();
2788
2789 emit_insns_before (seq, object);
2790 SET_DEST (x) = new;
2791
2792 if (!validate_change (object, &SET_SRC (x), temp, 0)
2793 || ! extra_insns)
2794 abort ();
2795
2796 return 1;
2797 }
2798
2799 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
2800 loc = &SET_SRC (x);
2801 goto restart;
2802
2803 case PLUS:
2804 /* Handle special case of virtual register plus constant. */
2805 if (CONSTANT_P (XEXP (x, 1)))
2806 {
2807 rtx old, new_offset;
2808
2809 /* Check for (plus (plus VIRT foo) (const_int)) first. */
2810 if (GET_CODE (XEXP (x, 0)) == PLUS)
2811 {
2812 rtx inner = XEXP (XEXP (x, 0), 0);
2813
2814 if (inner == virtual_incoming_args_rtx)
2815 new = arg_pointer_rtx, offset = in_arg_offset;
2816 else if (inner == virtual_stack_vars_rtx)
2817 new = frame_pointer_rtx, offset = var_offset;
2818 else if (inner == virtual_stack_dynamic_rtx)
2819 new = stack_pointer_rtx, offset = dynamic_offset;
2820 else if (inner == virtual_outgoing_args_rtx)
2821 new = stack_pointer_rtx, offset = out_arg_offset;
2822 else
2823 {
2824 loc = &XEXP (x, 0);
2825 goto restart;
2826 }
2827
2828 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
2829 extra_insns);
2830 new = gen_rtx (PLUS, Pmode, new, XEXP (XEXP (x, 0), 1));
2831 }
2832
2833 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
2834 new = arg_pointer_rtx, offset = in_arg_offset;
2835 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
2836 new = frame_pointer_rtx, offset = var_offset;
2837 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
2838 new = stack_pointer_rtx, offset = dynamic_offset;
2839 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
2840 new = stack_pointer_rtx, offset = out_arg_offset;
2841 else
2842 {
2843 /* We know the second operand is a constant. Unless the
2844 first operand is a REG (which has been already checked),
2845 it needs to be checked. */
2846 if (GET_CODE (XEXP (x, 0)) != REG)
2847 {
2848 loc = &XEXP (x, 0);
2849 goto restart;
2850 }
2851 return 1;
2852 }
2853
2854 new_offset = plus_constant (XEXP (x, 1), offset);
2855
2856 /* If the new constant is zero, try to replace the sum with just
2857 the register. */
2858 if (new_offset == const0_rtx
2859 && validate_change (object, loc, new, 0))
2860 return 1;
2861
2862 /* Next try to replace the register and new offset.
2863 There are two changes to validate here and we can't assume that
2864 in the case of old offset equals new just changing the register
2865 will yield a valid insn. In the interests of a little efficiency,
2866 however, we only call validate change once (we don't queue up the
2867 changes and then call apply_change_group). */
2868
2869 old = XEXP (x, 0);
2870 if (offset == 0
2871 ? ! validate_change (object, &XEXP (x, 0), new, 0)
2872 : (XEXP (x, 0) = new,
2873 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
2874 {
2875 if (! extra_insns)
2876 {
2877 XEXP (x, 0) = old;
2878 return 0;
2879 }
2880
2881 /* Otherwise copy the new constant into a register and replace
2882 constant with that register. */
2883 temp = gen_reg_rtx (Pmode);
2884 XEXP (x, 0) = new;
2885 if (validate_change (object, &XEXP (x, 1), temp, 0))
2886 emit_insn_before (gen_move_insn (temp, new_offset), object);
2887 else
2888 {
2889 /* If that didn't work, replace this expression with a
2890 register containing the sum. */
2891
2892 XEXP (x, 0) = old;
2893 new = gen_rtx (PLUS, Pmode, new, new_offset);
2894
2895 start_sequence ();
2896 temp = force_operand (new, NULL_RTX);
2897 seq = get_insns ();
2898 end_sequence ();
2899
2900 emit_insns_before (seq, object);
2901 if (! validate_change (object, loc, temp, 0)
2902 && ! validate_replace_rtx (x, temp, object))
2903 abort ();
2904 }
2905 }
2906
2907 return 1;
2908 }
2909
2910 /* Fall through to generic two-operand expression case. */
2911 case EXPR_LIST:
2912 case CALL:
2913 case COMPARE:
2914 case MINUS:
2915 case MULT:
2916 case DIV: case UDIV:
2917 case MOD: case UMOD:
2918 case AND: case IOR: case XOR:
2919 case ROTATERT: case ROTATE:
2920 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2921 case NE: case EQ:
2922 case GE: case GT: case GEU: case GTU:
2923 case LE: case LT: case LEU: case LTU:
2924 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
2925 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
2926 loc = &XEXP (x, 0);
2927 goto restart;
2928
2929 case MEM:
2930 /* Most cases of MEM that convert to valid addresses have already been
2931 handled by our scan of regno_reg_rtx. The only special handling we
2932 need here is to make a copy of the rtx to ensure it isn't being
2933 shared if we have to change it to a pseudo.
2934
2935 If the rtx is a simple reference to an address via a virtual register,
2936 it can potentially be shared. In such cases, first try to make it
2937 a valid address, which can also be shared. Otherwise, copy it and
2938 proceed normally.
2939
2940 First check for common cases that need no processing. These are
2941 usually due to instantiation already being done on a previous instance
2942 of a shared rtx. */
2943
2944 temp = XEXP (x, 0);
2945 if (CONSTANT_ADDRESS_P (temp)
2946 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2947 || temp == arg_pointer_rtx
2948 #endif
2949 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2950 || temp == hard_frame_pointer_rtx
2951 #endif
2952 || temp == frame_pointer_rtx)
2953 return 1;
2954
2955 if (GET_CODE (temp) == PLUS
2956 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2957 && (XEXP (temp, 0) == frame_pointer_rtx
2958 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2959 || XEXP (temp, 0) == hard_frame_pointer_rtx
2960 #endif
2961 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2962 || XEXP (temp, 0) == arg_pointer_rtx
2963 #endif
2964 ))
2965 return 1;
2966
2967 if (temp == virtual_stack_vars_rtx
2968 || temp == virtual_incoming_args_rtx
2969 || (GET_CODE (temp) == PLUS
2970 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2971 && (XEXP (temp, 0) == virtual_stack_vars_rtx
2972 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
2973 {
2974 /* This MEM may be shared. If the substitution can be done without
2975 the need to generate new pseudos, we want to do it in place
2976 so all copies of the shared rtx benefit. The call below will
2977 only make substitutions if the resulting address is still
2978 valid.
2979
2980 Note that we cannot pass X as the object in the recursive call
2981 since the insn being processed may not allow all valid
2982 addresses. However, if we were not passed on object, we can
2983 only modify X without copying it if X will have a valid
2984 address.
2985
2986 ??? Also note that this can still lose if OBJECT is an insn that
2987 has less restrictions on an address that some other insn.
2988 In that case, we will modify the shared address. This case
2989 doesn't seem very likely, though. */
2990
2991 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
2992 object ? object : x, 0))
2993 return 1;
2994
2995 /* Otherwise make a copy and process that copy. We copy the entire
2996 RTL expression since it might be a PLUS which could also be
2997 shared. */
2998 *loc = x = copy_rtx (x);
2999 }
3000
3001 /* Fall through to generic unary operation case. */
3002 case USE:
3003 case CLOBBER:
3004 case SUBREG:
3005 case STRICT_LOW_PART:
3006 case NEG: case NOT:
3007 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3008 case SIGN_EXTEND: case ZERO_EXTEND:
3009 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3010 case FLOAT: case FIX:
3011 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3012 case ABS:
3013 case SQRT:
3014 case FFS:
3015 /* These case either have just one operand or we know that we need not
3016 check the rest of the operands. */
3017 loc = &XEXP (x, 0);
3018 goto restart;
3019
3020 case REG:
3021 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3022 in front of this insn and substitute the temporary. */
3023 if (x == virtual_incoming_args_rtx)
3024 new = arg_pointer_rtx, offset = in_arg_offset;
3025 else if (x == virtual_stack_vars_rtx)
3026 new = frame_pointer_rtx, offset = var_offset;
3027 else if (x == virtual_stack_dynamic_rtx)
3028 new = stack_pointer_rtx, offset = dynamic_offset;
3029 else if (x == virtual_outgoing_args_rtx)
3030 new = stack_pointer_rtx, offset = out_arg_offset;
3031
3032 if (new)
3033 {
3034 temp = plus_constant (new, offset);
3035 if (!validate_change (object, loc, temp, 0))
3036 {
3037 if (! extra_insns)
3038 return 0;
3039
3040 start_sequence ();
3041 temp = force_operand (temp, NULL_RTX);
3042 seq = get_insns ();
3043 end_sequence ();
3044
3045 emit_insns_before (seq, object);
3046 if (! validate_change (object, loc, temp, 0)
3047 && ! validate_replace_rtx (x, temp, object))
3048 abort ();
3049 }
3050 }
3051
3052 return 1;
3053 }
3054
3055 /* Scan all subexpressions. */
3056 fmt = GET_RTX_FORMAT (code);
3057 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3058 if (*fmt == 'e')
3059 {
3060 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3061 return 0;
3062 }
3063 else if (*fmt == 'E')
3064 for (j = 0; j < XVECLEN (x, i); j++)
3065 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3066 extra_insns))
3067 return 0;
3068
3069 return 1;
3070 }
3071 \f
3072 /* Optimization: assuming this function does not receive nonlocal gotos,
3073 delete the handlers for such, as well as the insns to establish
3074 and disestablish them. */
3075
3076 static void
3077 delete_handlers ()
3078 {
3079 rtx insn;
3080 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3081 {
3082 /* Delete the handler by turning off the flag that would
3083 prevent jump_optimize from deleting it.
3084 Also permit deletion of the nonlocal labels themselves
3085 if nothing local refers to them. */
3086 if (GET_CODE (insn) == CODE_LABEL)
3087 {
3088 tree t, last_t;
3089
3090 LABEL_PRESERVE_P (insn) = 0;
3091
3092 /* Remove it from the nonlocal_label list, to avoid confusing
3093 flow. */
3094 for (t = nonlocal_labels, last_t = 0; t;
3095 last_t = t, t = TREE_CHAIN (t))
3096 if (DECL_RTL (TREE_VALUE (t)) == insn)
3097 break;
3098 if (t)
3099 {
3100 if (! last_t)
3101 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3102 else
3103 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3104 }
3105 }
3106 if (GET_CODE (insn) == INSN
3107 && ((nonlocal_goto_handler_slot != 0
3108 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
3109 || (nonlocal_goto_stack_level != 0
3110 && reg_mentioned_p (nonlocal_goto_stack_level,
3111 PATTERN (insn)))))
3112 delete_insn (insn);
3113 }
3114 }
3115
3116 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3117 of the current function. */
3118
3119 rtx
3120 nonlocal_label_rtx_list ()
3121 {
3122 tree t;
3123 rtx x = 0;
3124
3125 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
3126 x = gen_rtx (EXPR_LIST, VOIDmode, label_rtx (TREE_VALUE (t)), x);
3127
3128 return x;
3129 }
3130 \f
3131 /* Output a USE for any register use in RTL.
3132 This is used with -noreg to mark the extent of lifespan
3133 of any registers used in a user-visible variable's DECL_RTL. */
3134
3135 void
3136 use_variable (rtl)
3137 rtx rtl;
3138 {
3139 if (GET_CODE (rtl) == REG)
3140 /* This is a register variable. */
3141 emit_insn (gen_rtx (USE, VOIDmode, rtl));
3142 else if (GET_CODE (rtl) == MEM
3143 && GET_CODE (XEXP (rtl, 0)) == REG
3144 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3145 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3146 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3147 /* This is a variable-sized structure. */
3148 emit_insn (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)));
3149 }
3150
3151 /* Like use_variable except that it outputs the USEs after INSN
3152 instead of at the end of the insn-chain. */
3153
3154 void
3155 use_variable_after (rtl, insn)
3156 rtx rtl, insn;
3157 {
3158 if (GET_CODE (rtl) == REG)
3159 /* This is a register variable. */
3160 emit_insn_after (gen_rtx (USE, VOIDmode, rtl), insn);
3161 else if (GET_CODE (rtl) == MEM
3162 && GET_CODE (XEXP (rtl, 0)) == REG
3163 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3164 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3165 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3166 /* This is a variable-sized structure. */
3167 emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)), insn);
3168 }
3169 \f
3170 int
3171 max_parm_reg_num ()
3172 {
3173 return max_parm_reg;
3174 }
3175
3176 /* Return the first insn following those generated by `assign_parms'. */
3177
3178 rtx
3179 get_first_nonparm_insn ()
3180 {
3181 if (last_parm_insn)
3182 return NEXT_INSN (last_parm_insn);
3183 return get_insns ();
3184 }
3185
3186 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3187 Crash if there is none. */
3188
3189 rtx
3190 get_first_block_beg ()
3191 {
3192 register rtx searcher;
3193 register rtx insn = get_first_nonparm_insn ();
3194
3195 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3196 if (GET_CODE (searcher) == NOTE
3197 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3198 return searcher;
3199
3200 abort (); /* Invalid call to this function. (See comments above.) */
3201 return NULL_RTX;
3202 }
3203
3204 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3205 This means a type for which function calls must pass an address to the
3206 function or get an address back from the function.
3207 EXP may be a type node or an expression (whose type is tested). */
3208
3209 int
3210 aggregate_value_p (exp)
3211 tree exp;
3212 {
3213 int i, regno, nregs;
3214 rtx reg;
3215 tree type;
3216 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3217 type = exp;
3218 else
3219 type = TREE_TYPE (exp);
3220
3221 if (RETURN_IN_MEMORY (type))
3222 return 1;
3223 /* Types that are TREE_ADDRESSABLE must be contructed in memory,
3224 and thus can't be returned in registers. */
3225 if (TREE_ADDRESSABLE (type))
3226 return 1;
3227 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3228 return 1;
3229 /* Make sure we have suitable call-clobbered regs to return
3230 the value in; if not, we must return it in memory. */
3231 reg = hard_function_value (type, 0);
3232 regno = REGNO (reg);
3233 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3234 for (i = 0; i < nregs; i++)
3235 if (! call_used_regs[regno + i])
3236 return 1;
3237 return 0;
3238 }
3239 \f
3240 /* Assign RTL expressions to the function's parameters.
3241 This may involve copying them into registers and using
3242 those registers as the RTL for them.
3243
3244 If SECOND_TIME is non-zero it means that this function is being
3245 called a second time. This is done by integrate.c when a function's
3246 compilation is deferred. We need to come back here in case the
3247 FUNCTION_ARG macro computes items needed for the rest of the compilation
3248 (such as changing which registers are fixed or caller-saved). But suppress
3249 writing any insns or setting DECL_RTL of anything in this case. */
3250
3251 void
3252 assign_parms (fndecl, second_time)
3253 tree fndecl;
3254 int second_time;
3255 {
3256 register tree parm;
3257 register rtx entry_parm = 0;
3258 register rtx stack_parm = 0;
3259 CUMULATIVE_ARGS args_so_far;
3260 enum machine_mode promoted_mode, passed_mode;
3261 enum machine_mode nominal_mode, promoted_nominal_mode;
3262 int unsignedp;
3263 /* Total space needed so far for args on the stack,
3264 given as a constant and a tree-expression. */
3265 struct args_size stack_args_size;
3266 tree fntype = TREE_TYPE (fndecl);
3267 tree fnargs = DECL_ARGUMENTS (fndecl);
3268 /* This is used for the arg pointer when referring to stack args. */
3269 rtx internal_arg_pointer;
3270 /* This is a dummy PARM_DECL that we used for the function result if
3271 the function returns a structure. */
3272 tree function_result_decl = 0;
3273 int nparmregs = list_length (fnargs) + LAST_VIRTUAL_REGISTER + 1;
3274 int varargs_setup = 0;
3275 rtx conversion_insns = 0;
3276
3277 /* Nonzero if the last arg is named `__builtin_va_alist',
3278 which is used on some machines for old-fashioned non-ANSI varargs.h;
3279 this should be stuck onto the stack as if it had arrived there. */
3280 int hide_last_arg
3281 = (current_function_varargs
3282 && fnargs
3283 && (parm = tree_last (fnargs)) != 0
3284 && DECL_NAME (parm)
3285 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3286 "__builtin_va_alist")));
3287
3288 /* Nonzero if function takes extra anonymous args.
3289 This means the last named arg must be on the stack
3290 right before the anonymous ones. */
3291 int stdarg
3292 = (TYPE_ARG_TYPES (fntype) != 0
3293 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3294 != void_type_node));
3295
3296 current_function_stdarg = stdarg;
3297
3298 /* If the reg that the virtual arg pointer will be translated into is
3299 not a fixed reg or is the stack pointer, make a copy of the virtual
3300 arg pointer, and address parms via the copy. The frame pointer is
3301 considered fixed even though it is not marked as such.
3302
3303 The second time through, simply use ap to avoid generating rtx. */
3304
3305 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3306 || ! (fixed_regs[ARG_POINTER_REGNUM]
3307 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3308 && ! second_time)
3309 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3310 else
3311 internal_arg_pointer = virtual_incoming_args_rtx;
3312 current_function_internal_arg_pointer = internal_arg_pointer;
3313
3314 stack_args_size.constant = 0;
3315 stack_args_size.var = 0;
3316
3317 /* If struct value address is treated as the first argument, make it so. */
3318 if (aggregate_value_p (DECL_RESULT (fndecl))
3319 && ! current_function_returns_pcc_struct
3320 && struct_value_incoming_rtx == 0)
3321 {
3322 tree type = build_pointer_type (TREE_TYPE (fntype));
3323
3324 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
3325
3326 DECL_ARG_TYPE (function_result_decl) = type;
3327 TREE_CHAIN (function_result_decl) = fnargs;
3328 fnargs = function_result_decl;
3329 }
3330
3331 parm_reg_stack_loc = (rtx *) oballoc (nparmregs * sizeof (rtx));
3332 bzero ((char *) parm_reg_stack_loc, nparmregs * sizeof (rtx));
3333
3334 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3335 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
3336 #else
3337 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
3338 #endif
3339
3340 /* We haven't yet found an argument that we must push and pretend the
3341 caller did. */
3342 current_function_pretend_args_size = 0;
3343
3344 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3345 {
3346 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
3347 struct args_size stack_offset;
3348 struct args_size arg_size;
3349 int passed_pointer = 0;
3350 int did_conversion = 0;
3351 tree passed_type = DECL_ARG_TYPE (parm);
3352 tree nominal_type = TREE_TYPE (parm);
3353
3354 /* Set LAST_NAMED if this is last named arg before some
3355 anonymous args. We treat it as if it were anonymous too. */
3356 int last_named = ((TREE_CHAIN (parm) == 0
3357 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3358 && (stdarg || current_function_varargs));
3359
3360 if (TREE_TYPE (parm) == error_mark_node
3361 /* This can happen after weird syntax errors
3362 or if an enum type is defined among the parms. */
3363 || TREE_CODE (parm) != PARM_DECL
3364 || passed_type == NULL)
3365 {
3366 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = gen_rtx (MEM, BLKmode,
3367 const0_rtx);
3368 TREE_USED (parm) = 1;
3369 continue;
3370 }
3371
3372 /* For varargs.h function, save info about regs and stack space
3373 used by the individual args, not including the va_alist arg. */
3374 if (hide_last_arg && last_named)
3375 current_function_args_info = args_so_far;
3376
3377 /* Find mode of arg as it is passed, and mode of arg
3378 as it should be during execution of this function. */
3379 passed_mode = TYPE_MODE (passed_type);
3380 nominal_mode = TYPE_MODE (nominal_type);
3381
3382 /* If the parm's mode is VOID, its value doesn't matter,
3383 and avoid the usual things like emit_move_insn that could crash. */
3384 if (nominal_mode == VOIDmode)
3385 {
3386 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
3387 continue;
3388 }
3389
3390 /* If the parm is to be passed as a transparent union, use the
3391 type of the first field for the tests below. We have already
3392 verified that the modes are the same. */
3393 if (DECL_TRANSPARENT_UNION (parm)
3394 || TYPE_TRANSPARENT_UNION (passed_type))
3395 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
3396
3397 /* See if this arg was passed by invisible reference. It is if
3398 it is an object whose size depends on the contents of the
3399 object itself or if the machine requires these objects be passed
3400 that way. */
3401
3402 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
3403 && contains_placeholder_p (TYPE_SIZE (passed_type)))
3404 || TREE_ADDRESSABLE (passed_type)
3405 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3406 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
3407 passed_type, ! last_named)
3408 #endif
3409 )
3410 {
3411 passed_type = nominal_type = build_pointer_type (passed_type);
3412 passed_pointer = 1;
3413 passed_mode = nominal_mode = Pmode;
3414 }
3415
3416 promoted_mode = passed_mode;
3417
3418 #ifdef PROMOTE_FUNCTION_ARGS
3419 /* Compute the mode in which the arg is actually extended to. */
3420 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
3421 #endif
3422
3423 /* Let machine desc say which reg (if any) the parm arrives in.
3424 0 means it arrives on the stack. */
3425 #ifdef FUNCTION_INCOMING_ARG
3426 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3427 passed_type, ! last_named);
3428 #else
3429 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
3430 passed_type, ! last_named);
3431 #endif
3432
3433 if (entry_parm == 0)
3434 promoted_mode = passed_mode;
3435
3436 #ifdef SETUP_INCOMING_VARARGS
3437 /* If this is the last named parameter, do any required setup for
3438 varargs or stdargs. We need to know about the case of this being an
3439 addressable type, in which case we skip the registers it
3440 would have arrived in.
3441
3442 For stdargs, LAST_NAMED will be set for two parameters, the one that
3443 is actually the last named, and the dummy parameter. We only
3444 want to do this action once.
3445
3446 Also, indicate when RTL generation is to be suppressed. */
3447 if (last_named && !varargs_setup)
3448 {
3449 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
3450 current_function_pretend_args_size,
3451 second_time);
3452 varargs_setup = 1;
3453 }
3454 #endif
3455
3456 /* Determine parm's home in the stack,
3457 in case it arrives in the stack or we should pretend it did.
3458
3459 Compute the stack position and rtx where the argument arrives
3460 and its size.
3461
3462 There is one complexity here: If this was a parameter that would
3463 have been passed in registers, but wasn't only because it is
3464 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
3465 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
3466 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
3467 0 as it was the previous time. */
3468
3469 locate_and_pad_parm (promoted_mode, passed_type,
3470 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3471 1,
3472 #else
3473 #ifdef FUNCTION_INCOMING_ARG
3474 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3475 passed_type,
3476 (! last_named
3477 || varargs_setup)) != 0,
3478 #else
3479 FUNCTION_ARG (args_so_far, promoted_mode,
3480 passed_type,
3481 ! last_named || varargs_setup) != 0,
3482 #endif
3483 #endif
3484 fndecl, &stack_args_size, &stack_offset, &arg_size);
3485
3486 if (! second_time)
3487 {
3488 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
3489
3490 if (offset_rtx == const0_rtx)
3491 stack_parm = gen_rtx (MEM, promoted_mode, internal_arg_pointer);
3492 else
3493 stack_parm = gen_rtx (MEM, promoted_mode,
3494 gen_rtx (PLUS, Pmode,
3495 internal_arg_pointer, offset_rtx));
3496
3497 /* If this is a memory ref that contains aggregate components,
3498 mark it as such for cse and loop optimize. Likewise if it
3499 is readonly. */
3500 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3501 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
3502 }
3503
3504 /* If this parameter was passed both in registers and in the stack,
3505 use the copy on the stack. */
3506 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
3507 entry_parm = 0;
3508
3509 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3510 /* If this parm was passed part in regs and part in memory,
3511 pretend it arrived entirely in memory
3512 by pushing the register-part onto the stack.
3513
3514 In the special case of a DImode or DFmode that is split,
3515 we could put it together in a pseudoreg directly,
3516 but for now that's not worth bothering with. */
3517
3518 if (entry_parm)
3519 {
3520 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
3521 passed_type, ! last_named);
3522
3523 if (nregs > 0)
3524 {
3525 current_function_pretend_args_size
3526 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
3527 / (PARM_BOUNDARY / BITS_PER_UNIT)
3528 * (PARM_BOUNDARY / BITS_PER_UNIT));
3529
3530 if (! second_time)
3531 {
3532 /* Handle calls that pass values in multiple non-contiguous
3533 locations. The Irix 6 ABI has examples of this. */
3534 if (GET_CODE (entry_parm) == PARALLEL)
3535 emit_group_store (validize_mem (stack_parm),
3536 entry_parm);
3537 else
3538 move_block_from_reg (REGNO (entry_parm),
3539 validize_mem (stack_parm), nregs,
3540 int_size_in_bytes (TREE_TYPE (parm)));
3541 }
3542 entry_parm = stack_parm;
3543 }
3544 }
3545 #endif
3546
3547 /* If we didn't decide this parm came in a register,
3548 by default it came on the stack. */
3549 if (entry_parm == 0)
3550 entry_parm = stack_parm;
3551
3552 /* Record permanently how this parm was passed. */
3553 if (! second_time)
3554 DECL_INCOMING_RTL (parm) = entry_parm;
3555
3556 /* If there is actually space on the stack for this parm,
3557 count it in stack_args_size; otherwise set stack_parm to 0
3558 to indicate there is no preallocated stack slot for the parm. */
3559
3560 if (entry_parm == stack_parm
3561 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
3562 /* On some machines, even if a parm value arrives in a register
3563 there is still an (uninitialized) stack slot allocated for it.
3564
3565 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
3566 whether this parameter already has a stack slot allocated,
3567 because an arg block exists only if current_function_args_size
3568 is larger than some threshold, and we haven't calculated that
3569 yet. So, for now, we just assume that stack slots never exist
3570 in this case. */
3571 || REG_PARM_STACK_SPACE (fndecl) > 0
3572 #endif
3573 )
3574 {
3575 stack_args_size.constant += arg_size.constant;
3576 if (arg_size.var)
3577 ADD_PARM_SIZE (stack_args_size, arg_size.var);
3578 }
3579 else
3580 /* No stack slot was pushed for this parm. */
3581 stack_parm = 0;
3582
3583 /* Update info on where next arg arrives in registers. */
3584
3585 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
3586 passed_type, ! last_named);
3587
3588 /* If this is our second time through, we are done with this parm. */
3589 if (second_time)
3590 continue;
3591
3592 /* If we can't trust the parm stack slot to be aligned enough
3593 for its ultimate type, don't use that slot after entry.
3594 We'll make another stack slot, if we need one. */
3595 {
3596 int thisparm_boundary
3597 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
3598
3599 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
3600 stack_parm = 0;
3601 }
3602
3603 /* If parm was passed in memory, and we need to convert it on entry,
3604 don't store it back in that same slot. */
3605 if (entry_parm != 0
3606 && nominal_mode != BLKmode && nominal_mode != passed_mode)
3607 stack_parm = 0;
3608
3609 #if 0
3610 /* Now adjust STACK_PARM to the mode and precise location
3611 where this parameter should live during execution,
3612 if we discover that it must live in the stack during execution.
3613 To make debuggers happier on big-endian machines, we store
3614 the value in the last bytes of the space available. */
3615
3616 if (nominal_mode != BLKmode && nominal_mode != passed_mode
3617 && stack_parm != 0)
3618 {
3619 rtx offset_rtx;
3620
3621 if (BYTES_BIG_ENDIAN
3622 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
3623 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
3624 - GET_MODE_SIZE (nominal_mode));
3625
3626 offset_rtx = ARGS_SIZE_RTX (stack_offset);
3627 if (offset_rtx == const0_rtx)
3628 stack_parm = gen_rtx (MEM, nominal_mode, internal_arg_pointer);
3629 else
3630 stack_parm = gen_rtx (MEM, nominal_mode,
3631 gen_rtx (PLUS, Pmode,
3632 internal_arg_pointer, offset_rtx));
3633
3634 /* If this is a memory ref that contains aggregate components,
3635 mark it as such for cse and loop optimize. */
3636 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3637 }
3638 #endif /* 0 */
3639
3640 #ifdef STACK_REGS
3641 /* We need this "use" info, because the gcc-register->stack-register
3642 converter in reg-stack.c needs to know which registers are active
3643 at the start of the function call. The actual parameter loading
3644 instructions are not always available then anymore, since they might
3645 have been optimised away. */
3646
3647 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
3648 emit_insn (gen_rtx (USE, GET_MODE (entry_parm), entry_parm));
3649 #endif
3650
3651 /* ENTRY_PARM is an RTX for the parameter as it arrives,
3652 in the mode in which it arrives.
3653 STACK_PARM is an RTX for a stack slot where the parameter can live
3654 during the function (in case we want to put it there).
3655 STACK_PARM is 0 if no stack slot was pushed for it.
3656
3657 Now output code if necessary to convert ENTRY_PARM to
3658 the type in which this function declares it,
3659 and store that result in an appropriate place,
3660 which may be a pseudo reg, may be STACK_PARM,
3661 or may be a local stack slot if STACK_PARM is 0.
3662
3663 Set DECL_RTL to that place. */
3664
3665 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
3666 {
3667 /* If a BLKmode arrives in registers, copy it to a stack slot.
3668 Handle calls that pass values in multiple non-contiguous
3669 locations. The Irix 6 ABI has examples of this. */
3670 if (GET_CODE (entry_parm) == REG
3671 || GET_CODE (entry_parm) == PARALLEL)
3672 {
3673 int size_stored
3674 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
3675 UNITS_PER_WORD);
3676
3677 /* Note that we will be storing an integral number of words.
3678 So we have to be careful to ensure that we allocate an
3679 integral number of words. We do this below in the
3680 assign_stack_local if space was not allocated in the argument
3681 list. If it was, this will not work if PARM_BOUNDARY is not
3682 a multiple of BITS_PER_WORD. It isn't clear how to fix this
3683 if it becomes a problem. */
3684
3685 if (stack_parm == 0)
3686 {
3687 stack_parm
3688 = assign_stack_local (GET_MODE (entry_parm),
3689 size_stored, 0);
3690
3691 /* If this is a memory ref that contains aggregate
3692 components, mark it as such for cse and loop optimize. */
3693 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3694 }
3695
3696 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
3697 abort ();
3698
3699 if (TREE_READONLY (parm))
3700 RTX_UNCHANGING_P (stack_parm) = 1;
3701
3702 /* Handle calls that pass values in multiple non-contiguous
3703 locations. The Irix 6 ABI has examples of this. */
3704 if (GET_CODE (entry_parm) == PARALLEL)
3705 emit_group_store (validize_mem (stack_parm), entry_parm);
3706 else
3707 move_block_from_reg (REGNO (entry_parm),
3708 validize_mem (stack_parm),
3709 size_stored / UNITS_PER_WORD,
3710 int_size_in_bytes (TREE_TYPE (parm)));
3711 }
3712 DECL_RTL (parm) = stack_parm;
3713 }
3714 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
3715 && ! DECL_INLINE (fndecl))
3716 /* layout_decl may set this. */
3717 || TREE_ADDRESSABLE (parm)
3718 || TREE_SIDE_EFFECTS (parm)
3719 /* If -ffloat-store specified, don't put explicit
3720 float variables into registers. */
3721 || (flag_float_store
3722 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
3723 /* Always assign pseudo to structure return or item passed
3724 by invisible reference. */
3725 || passed_pointer || parm == function_result_decl)
3726 {
3727 /* Store the parm in a pseudoregister during the function, but we
3728 may need to do it in a wider mode. */
3729
3730 register rtx parmreg;
3731 int regno, regnoi, regnor;
3732
3733 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
3734
3735 promoted_nominal_mode
3736 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
3737
3738 parmreg = gen_reg_rtx (promoted_nominal_mode);
3739 REG_USERVAR_P (parmreg) = 1;
3740
3741 /* If this was an item that we received a pointer to, set DECL_RTL
3742 appropriately. */
3743 if (passed_pointer)
3744 {
3745 DECL_RTL (parm)
3746 = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
3747 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
3748 }
3749 else
3750 DECL_RTL (parm) = parmreg;
3751
3752 /* Copy the value into the register. */
3753 if (nominal_mode != passed_mode
3754 || promoted_nominal_mode != promoted_mode)
3755 {
3756 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3757 mode, by the caller. We now have to convert it to
3758 NOMINAL_MODE, if different. However, PARMREG may be in
3759 a diffent mode than NOMINAL_MODE if it is being stored
3760 promoted.
3761
3762 If ENTRY_PARM is a hard register, it might be in a register
3763 not valid for operating in its mode (e.g., an odd-numbered
3764 register for a DFmode). In that case, moves are the only
3765 thing valid, so we can't do a convert from there. This
3766 occurs when the calling sequence allow such misaligned
3767 usages.
3768
3769 In addition, the conversion may involve a call, which could
3770 clobber parameters which haven't been copied to pseudo
3771 registers yet. Therefore, we must first copy the parm to
3772 a pseudo reg here, and save the conversion until after all
3773 parameters have been moved. */
3774
3775 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
3776
3777 emit_move_insn (tempreg, validize_mem (entry_parm));
3778
3779 push_to_sequence (conversion_insns);
3780 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
3781
3782 expand_assignment (parm,
3783 make_tree (nominal_type, tempreg), 0, 0);
3784 conversion_insns = get_insns ();
3785 did_conversion = 1;
3786 end_sequence ();
3787 }
3788 else
3789 emit_move_insn (parmreg, validize_mem (entry_parm));
3790
3791 /* If we were passed a pointer but the actual value
3792 can safely live in a register, put it in one. */
3793 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
3794 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
3795 && ! DECL_INLINE (fndecl))
3796 /* layout_decl may set this. */
3797 || TREE_ADDRESSABLE (parm)
3798 || TREE_SIDE_EFFECTS (parm)
3799 /* If -ffloat-store specified, don't put explicit
3800 float variables into registers. */
3801 || (flag_float_store
3802 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
3803 {
3804 /* We can't use nominal_mode, because it will have been set to
3805 Pmode above. We must use the actual mode of the parm. */
3806 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3807 REG_USERVAR_P (parmreg) = 1;
3808 emit_move_insn (parmreg, DECL_RTL (parm));
3809 DECL_RTL (parm) = parmreg;
3810 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3811 now the parm. */
3812 stack_parm = 0;
3813 }
3814 #ifdef FUNCTION_ARG_CALLEE_COPIES
3815 /* If we are passed an arg by reference and it is our responsibility
3816 to make a copy, do it now.
3817 PASSED_TYPE and PASSED mode now refer to the pointer, not the
3818 original argument, so we must recreate them in the call to
3819 FUNCTION_ARG_CALLEE_COPIES. */
3820 /* ??? Later add code to handle the case that if the argument isn't
3821 modified, don't do the copy. */
3822
3823 else if (passed_pointer
3824 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
3825 TYPE_MODE (DECL_ARG_TYPE (parm)),
3826 DECL_ARG_TYPE (parm),
3827 ! last_named)
3828 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
3829 {
3830 rtx copy;
3831 tree type = DECL_ARG_TYPE (parm);
3832
3833 /* This sequence may involve a library call perhaps clobbering
3834 registers that haven't been copied to pseudos yet. */
3835
3836 push_to_sequence (conversion_insns);
3837
3838 if (TYPE_SIZE (type) == 0
3839 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3840 /* This is a variable sized object. */
3841 copy = gen_rtx (MEM, BLKmode,
3842 allocate_dynamic_stack_space
3843 (expr_size (parm), NULL_RTX,
3844 TYPE_ALIGN (type)));
3845 else
3846 copy = assign_stack_temp (TYPE_MODE (type),
3847 int_size_in_bytes (type), 1);
3848 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
3849
3850 store_expr (parm, copy, 0);
3851 emit_move_insn (parmreg, XEXP (copy, 0));
3852 conversion_insns = get_insns ();
3853 did_conversion = 1;
3854 end_sequence ();
3855 }
3856 #endif /* FUNCTION_ARG_CALLEE_COPIES */
3857
3858 /* In any case, record the parm's desired stack location
3859 in case we later discover it must live in the stack.
3860
3861 If it is a COMPLEX value, store the stack location for both
3862 halves. */
3863
3864 if (GET_CODE (parmreg) == CONCAT)
3865 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
3866 else
3867 regno = REGNO (parmreg);
3868
3869 if (regno >= nparmregs)
3870 {
3871 rtx *new;
3872 int old_nparmregs = nparmregs;
3873
3874 nparmregs = regno + 5;
3875 new = (rtx *) oballoc (nparmregs * sizeof (rtx));
3876 bcopy ((char *) parm_reg_stack_loc, (char *) new,
3877 old_nparmregs * sizeof (rtx));
3878 bzero ((char *) (new + old_nparmregs),
3879 (nparmregs - old_nparmregs) * sizeof (rtx));
3880 parm_reg_stack_loc = new;
3881 }
3882
3883 if (GET_CODE (parmreg) == CONCAT)
3884 {
3885 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
3886
3887 regnor = REGNO (gen_realpart (submode, parmreg));
3888 regnoi = REGNO (gen_imagpart (submode, parmreg));
3889
3890 if (stack_parm != 0)
3891 {
3892 parm_reg_stack_loc[regnor]
3893 = gen_realpart (submode, stack_parm);
3894 parm_reg_stack_loc[regnoi]
3895 = gen_imagpart (submode, stack_parm);
3896 }
3897 else
3898 {
3899 parm_reg_stack_loc[regnor] = 0;
3900 parm_reg_stack_loc[regnoi] = 0;
3901 }
3902 }
3903 else
3904 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
3905
3906 /* Mark the register as eliminable if we did no conversion
3907 and it was copied from memory at a fixed offset,
3908 and the arg pointer was not copied to a pseudo-reg.
3909 If the arg pointer is a pseudo reg or the offset formed
3910 an invalid address, such memory-equivalences
3911 as we make here would screw up life analysis for it. */
3912 if (nominal_mode == passed_mode
3913 && ! did_conversion
3914 && GET_CODE (entry_parm) == MEM
3915 && entry_parm == stack_parm
3916 && stack_offset.var == 0
3917 && reg_mentioned_p (virtual_incoming_args_rtx,
3918 XEXP (entry_parm, 0)))
3919 {
3920 rtx linsn = get_last_insn ();
3921 rtx sinsn, set;
3922
3923 /* Mark complex types separately. */
3924 if (GET_CODE (parmreg) == CONCAT)
3925 /* Scan backwards for the set of the real and
3926 imaginary parts. */
3927 for (sinsn = linsn; sinsn != 0;
3928 sinsn = prev_nonnote_insn (sinsn))
3929 {
3930 set = single_set (sinsn);
3931 if (set != 0
3932 && SET_DEST (set) == regno_reg_rtx [regnoi])
3933 REG_NOTES (sinsn)
3934 = gen_rtx (EXPR_LIST, REG_EQUIV,
3935 parm_reg_stack_loc[regnoi],
3936 REG_NOTES (sinsn));
3937 else if (set != 0
3938 && SET_DEST (set) == regno_reg_rtx [regnor])
3939 REG_NOTES (sinsn)
3940 = gen_rtx (EXPR_LIST, REG_EQUIV,
3941 parm_reg_stack_loc[regnor],
3942 REG_NOTES (sinsn));
3943 }
3944 else if ((set = single_set (linsn)) != 0
3945 && SET_DEST (set) == parmreg)
3946 REG_NOTES (linsn)
3947 = gen_rtx (EXPR_LIST, REG_EQUIV,
3948 entry_parm, REG_NOTES (linsn));
3949 }
3950
3951 /* For pointer data type, suggest pointer register. */
3952 if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE)
3953 mark_reg_pointer (parmreg,
3954 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
3955 / BITS_PER_UNIT));
3956 }
3957 else
3958 {
3959 /* Value must be stored in the stack slot STACK_PARM
3960 during function execution. */
3961
3962 if (promoted_mode != nominal_mode)
3963 {
3964 /* Conversion is required. */
3965 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
3966
3967 emit_move_insn (tempreg, validize_mem (entry_parm));
3968
3969 push_to_sequence (conversion_insns);
3970 entry_parm = convert_to_mode (nominal_mode, tempreg,
3971 TREE_UNSIGNED (TREE_TYPE (parm)));
3972 conversion_insns = get_insns ();
3973 did_conversion = 1;
3974 end_sequence ();
3975 }
3976
3977 if (entry_parm != stack_parm)
3978 {
3979 if (stack_parm == 0)
3980 {
3981 stack_parm
3982 = assign_stack_local (GET_MODE (entry_parm),
3983 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
3984 /* If this is a memory ref that contains aggregate components,
3985 mark it as such for cse and loop optimize. */
3986 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3987 }
3988
3989 if (promoted_mode != nominal_mode)
3990 {
3991 push_to_sequence (conversion_insns);
3992 emit_move_insn (validize_mem (stack_parm),
3993 validize_mem (entry_parm));
3994 conversion_insns = get_insns ();
3995 end_sequence ();
3996 }
3997 else
3998 emit_move_insn (validize_mem (stack_parm),
3999 validize_mem (entry_parm));
4000 }
4001
4002 DECL_RTL (parm) = stack_parm;
4003 }
4004
4005 /* If this "parameter" was the place where we are receiving the
4006 function's incoming structure pointer, set up the result. */
4007 if (parm == function_result_decl)
4008 {
4009 tree result = DECL_RESULT (fndecl);
4010 tree restype = TREE_TYPE (result);
4011
4012 DECL_RTL (result)
4013 = gen_rtx (MEM, DECL_MODE (result), DECL_RTL (parm));
4014
4015 MEM_IN_STRUCT_P (DECL_RTL (result)) = AGGREGATE_TYPE_P (restype);
4016 }
4017
4018 if (TREE_THIS_VOLATILE (parm))
4019 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4020 if (TREE_READONLY (parm))
4021 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4022 }
4023
4024 /* Output all parameter conversion instructions (possibly including calls)
4025 now that all parameters have been copied out of hard registers. */
4026 emit_insns (conversion_insns);
4027
4028 max_parm_reg = max_reg_num ();
4029 last_parm_insn = get_last_insn ();
4030
4031 current_function_args_size = stack_args_size.constant;
4032
4033 /* Adjust function incoming argument size for alignment and
4034 minimum length. */
4035
4036 #ifdef REG_PARM_STACK_SPACE
4037 #ifndef MAYBE_REG_PARM_STACK_SPACE
4038 current_function_args_size = MAX (current_function_args_size,
4039 REG_PARM_STACK_SPACE (fndecl));
4040 #endif
4041 #endif
4042
4043 #ifdef STACK_BOUNDARY
4044 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4045
4046 current_function_args_size
4047 = ((current_function_args_size + STACK_BYTES - 1)
4048 / STACK_BYTES) * STACK_BYTES;
4049 #endif
4050
4051 #ifdef ARGS_GROW_DOWNWARD
4052 current_function_arg_offset_rtx
4053 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4054 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4055 size_int (-stack_args_size.constant)),
4056 NULL_RTX, VOIDmode, 0));
4057 #else
4058 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4059 #endif
4060
4061 /* See how many bytes, if any, of its args a function should try to pop
4062 on return. */
4063
4064 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4065 current_function_args_size);
4066
4067 /* For stdarg.h function, save info about
4068 regs and stack space used by the named args. */
4069
4070 if (!hide_last_arg)
4071 current_function_args_info = args_so_far;
4072
4073 /* Set the rtx used for the function return value. Put this in its
4074 own variable so any optimizers that need this information don't have
4075 to include tree.h. Do this here so it gets done when an inlined
4076 function gets output. */
4077
4078 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4079 }
4080 \f
4081 /* Indicate whether REGNO is an incoming argument to the current function
4082 that was promoted to a wider mode. If so, return the RTX for the
4083 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4084 that REGNO is promoted from and whether the promotion was signed or
4085 unsigned. */
4086
4087 #ifdef PROMOTE_FUNCTION_ARGS
4088
4089 rtx
4090 promoted_input_arg (regno, pmode, punsignedp)
4091 int regno;
4092 enum machine_mode *pmode;
4093 int *punsignedp;
4094 {
4095 tree arg;
4096
4097 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4098 arg = TREE_CHAIN (arg))
4099 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4100 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4101 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4102 {
4103 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4104 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4105
4106 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4107 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4108 && mode != DECL_MODE (arg))
4109 {
4110 *pmode = DECL_MODE (arg);
4111 *punsignedp = unsignedp;
4112 return DECL_INCOMING_RTL (arg);
4113 }
4114 }
4115
4116 return 0;
4117 }
4118
4119 #endif
4120 \f
4121 /* Compute the size and offset from the start of the stacked arguments for a
4122 parm passed in mode PASSED_MODE and with type TYPE.
4123
4124 INITIAL_OFFSET_PTR points to the current offset into the stacked
4125 arguments.
4126
4127 The starting offset and size for this parm are returned in *OFFSET_PTR
4128 and *ARG_SIZE_PTR, respectively.
4129
4130 IN_REGS is non-zero if the argument will be passed in registers. It will
4131 never be set if REG_PARM_STACK_SPACE is not defined.
4132
4133 FNDECL is the function in which the argument was defined.
4134
4135 There are two types of rounding that are done. The first, controlled by
4136 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4137 list to be aligned to the specific boundary (in bits). This rounding
4138 affects the initial and starting offsets, but not the argument size.
4139
4140 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4141 optionally rounds the size of the parm to PARM_BOUNDARY. The
4142 initial offset is not affected by this rounding, while the size always
4143 is and the starting offset may be. */
4144
4145 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4146 initial_offset_ptr is positive because locate_and_pad_parm's
4147 callers pass in the total size of args so far as
4148 initial_offset_ptr. arg_size_ptr is always positive.*/
4149
4150 void
4151 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4152 initial_offset_ptr, offset_ptr, arg_size_ptr)
4153 enum machine_mode passed_mode;
4154 tree type;
4155 int in_regs;
4156 tree fndecl;
4157 struct args_size *initial_offset_ptr;
4158 struct args_size *offset_ptr;
4159 struct args_size *arg_size_ptr;
4160 {
4161 tree sizetree
4162 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4163 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4164 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4165 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4166 int reg_parm_stack_space = 0;
4167
4168 #ifdef REG_PARM_STACK_SPACE
4169 /* If we have found a stack parm before we reach the end of the
4170 area reserved for registers, skip that area. */
4171 if (! in_regs)
4172 {
4173 #ifdef MAYBE_REG_PARM_STACK_SPACE
4174 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4175 #else
4176 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4177 #endif
4178 if (reg_parm_stack_space > 0)
4179 {
4180 if (initial_offset_ptr->var)
4181 {
4182 initial_offset_ptr->var
4183 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4184 size_int (reg_parm_stack_space));
4185 initial_offset_ptr->constant = 0;
4186 }
4187 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4188 initial_offset_ptr->constant = reg_parm_stack_space;
4189 }
4190 }
4191 #endif /* REG_PARM_STACK_SPACE */
4192
4193 arg_size_ptr->var = 0;
4194 arg_size_ptr->constant = 0;
4195
4196 #ifdef ARGS_GROW_DOWNWARD
4197 if (initial_offset_ptr->var)
4198 {
4199 offset_ptr->constant = 0;
4200 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4201 initial_offset_ptr->var);
4202 }
4203 else
4204 {
4205 offset_ptr->constant = - initial_offset_ptr->constant;
4206 offset_ptr->var = 0;
4207 }
4208 if (where_pad != none
4209 && (TREE_CODE (sizetree) != INTEGER_CST
4210 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4211 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4212 SUB_PARM_SIZE (*offset_ptr, sizetree);
4213 if (where_pad != downward)
4214 pad_to_arg_alignment (offset_ptr, boundary);
4215 if (initial_offset_ptr->var)
4216 {
4217 arg_size_ptr->var = size_binop (MINUS_EXPR,
4218 size_binop (MINUS_EXPR,
4219 integer_zero_node,
4220 initial_offset_ptr->var),
4221 offset_ptr->var);
4222 }
4223 else
4224 {
4225 arg_size_ptr->constant = (- initial_offset_ptr->constant -
4226 offset_ptr->constant);
4227 }
4228 #else /* !ARGS_GROW_DOWNWARD */
4229 pad_to_arg_alignment (initial_offset_ptr, boundary);
4230 *offset_ptr = *initial_offset_ptr;
4231
4232 #ifdef PUSH_ROUNDING
4233 if (passed_mode != BLKmode)
4234 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4235 #endif
4236
4237 /* Pad_below needs the pre-rounded size to know how much to pad below
4238 so this must be done before rounding up. */
4239 if (where_pad == downward
4240 /* However, BLKmode args passed in regs have their padding done elsewhere.
4241 The stack slot must be able to hold the entire register. */
4242 && !(in_regs && passed_mode == BLKmode))
4243 pad_below (offset_ptr, passed_mode, sizetree);
4244
4245 if (where_pad != none
4246 && (TREE_CODE (sizetree) != INTEGER_CST
4247 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4248 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4249
4250 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4251 #endif /* ARGS_GROW_DOWNWARD */
4252 }
4253
4254 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4255 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4256
4257 static void
4258 pad_to_arg_alignment (offset_ptr, boundary)
4259 struct args_size *offset_ptr;
4260 int boundary;
4261 {
4262 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4263
4264 if (boundary > BITS_PER_UNIT)
4265 {
4266 if (offset_ptr->var)
4267 {
4268 offset_ptr->var =
4269 #ifdef ARGS_GROW_DOWNWARD
4270 round_down
4271 #else
4272 round_up
4273 #endif
4274 (ARGS_SIZE_TREE (*offset_ptr),
4275 boundary / BITS_PER_UNIT);
4276 offset_ptr->constant = 0; /*?*/
4277 }
4278 else
4279 offset_ptr->constant =
4280 #ifdef ARGS_GROW_DOWNWARD
4281 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4282 #else
4283 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4284 #endif
4285 }
4286 }
4287
4288 static void
4289 pad_below (offset_ptr, passed_mode, sizetree)
4290 struct args_size *offset_ptr;
4291 enum machine_mode passed_mode;
4292 tree sizetree;
4293 {
4294 if (passed_mode != BLKmode)
4295 {
4296 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4297 offset_ptr->constant
4298 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4299 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4300 - GET_MODE_SIZE (passed_mode));
4301 }
4302 else
4303 {
4304 if (TREE_CODE (sizetree) != INTEGER_CST
4305 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4306 {
4307 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4308 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4309 /* Add it in. */
4310 ADD_PARM_SIZE (*offset_ptr, s2);
4311 SUB_PARM_SIZE (*offset_ptr, sizetree);
4312 }
4313 }
4314 }
4315
4316 static tree
4317 round_down (value, divisor)
4318 tree value;
4319 int divisor;
4320 {
4321 return size_binop (MULT_EXPR,
4322 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
4323 size_int (divisor));
4324 }
4325 \f
4326 /* Walk the tree of blocks describing the binding levels within a function
4327 and warn about uninitialized variables.
4328 This is done after calling flow_analysis and before global_alloc
4329 clobbers the pseudo-regs to hard regs. */
4330
4331 void
4332 uninitialized_vars_warning (block)
4333 tree block;
4334 {
4335 register tree decl, sub;
4336 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4337 {
4338 if (TREE_CODE (decl) == VAR_DECL
4339 /* These warnings are unreliable for and aggregates
4340 because assigning the fields one by one can fail to convince
4341 flow.c that the entire aggregate was initialized.
4342 Unions are troublesome because members may be shorter. */
4343 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
4344 && DECL_RTL (decl) != 0
4345 && GET_CODE (DECL_RTL (decl)) == REG
4346 && regno_uninitialized (REGNO (DECL_RTL (decl))))
4347 warning_with_decl (decl,
4348 "`%s' might be used uninitialized in this function");
4349 if (TREE_CODE (decl) == VAR_DECL
4350 && DECL_RTL (decl) != 0
4351 && GET_CODE (DECL_RTL (decl)) == REG
4352 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4353 warning_with_decl (decl,
4354 "variable `%s' might be clobbered by `longjmp' or `vfork'");
4355 }
4356 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4357 uninitialized_vars_warning (sub);
4358 }
4359
4360 /* Do the appropriate part of uninitialized_vars_warning
4361 but for arguments instead of local variables. */
4362
4363 void
4364 setjmp_args_warning ()
4365 {
4366 register tree decl;
4367 for (decl = DECL_ARGUMENTS (current_function_decl);
4368 decl; decl = TREE_CHAIN (decl))
4369 if (DECL_RTL (decl) != 0
4370 && GET_CODE (DECL_RTL (decl)) == REG
4371 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4372 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
4373 }
4374
4375 /* If this function call setjmp, put all vars into the stack
4376 unless they were declared `register'. */
4377
4378 void
4379 setjmp_protect (block)
4380 tree block;
4381 {
4382 register tree decl, sub;
4383 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4384 if ((TREE_CODE (decl) == VAR_DECL
4385 || TREE_CODE (decl) == PARM_DECL)
4386 && DECL_RTL (decl) != 0
4387 && GET_CODE (DECL_RTL (decl)) == REG
4388 /* If this variable came from an inline function, it must be
4389 that it's life doesn't overlap the setjmp. If there was a
4390 setjmp in the function, it would already be in memory. We
4391 must exclude such variable because their DECL_RTL might be
4392 set to strange things such as virtual_stack_vars_rtx. */
4393 && ! DECL_FROM_INLINE (decl)
4394 && (
4395 #ifdef NON_SAVING_SETJMP
4396 /* If longjmp doesn't restore the registers,
4397 don't put anything in them. */
4398 NON_SAVING_SETJMP
4399 ||
4400 #endif
4401 ! DECL_REGISTER (decl)))
4402 put_var_into_stack (decl);
4403 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4404 setjmp_protect (sub);
4405 }
4406 \f
4407 /* Like the previous function, but for args instead of local variables. */
4408
4409 void
4410 setjmp_protect_args ()
4411 {
4412 register tree decl, sub;
4413 for (decl = DECL_ARGUMENTS (current_function_decl);
4414 decl; decl = TREE_CHAIN (decl))
4415 if ((TREE_CODE (decl) == VAR_DECL
4416 || TREE_CODE (decl) == PARM_DECL)
4417 && DECL_RTL (decl) != 0
4418 && GET_CODE (DECL_RTL (decl)) == REG
4419 && (
4420 /* If longjmp doesn't restore the registers,
4421 don't put anything in them. */
4422 #ifdef NON_SAVING_SETJMP
4423 NON_SAVING_SETJMP
4424 ||
4425 #endif
4426 ! DECL_REGISTER (decl)))
4427 put_var_into_stack (decl);
4428 }
4429 \f
4430 /* Return the context-pointer register corresponding to DECL,
4431 or 0 if it does not need one. */
4432
4433 rtx
4434 lookup_static_chain (decl)
4435 tree decl;
4436 {
4437 tree context = decl_function_context (decl);
4438 tree link;
4439
4440 if (context == 0
4441 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
4442 return 0;
4443
4444 /* We treat inline_function_decl as an alias for the current function
4445 because that is the inline function whose vars, types, etc.
4446 are being merged into the current function.
4447 See expand_inline_function. */
4448 if (context == current_function_decl || context == inline_function_decl)
4449 return virtual_stack_vars_rtx;
4450
4451 for (link = context_display; link; link = TREE_CHAIN (link))
4452 if (TREE_PURPOSE (link) == context)
4453 return RTL_EXPR_RTL (TREE_VALUE (link));
4454
4455 abort ();
4456 }
4457 \f
4458 /* Convert a stack slot address ADDR for variable VAR
4459 (from a containing function)
4460 into an address valid in this function (using a static chain). */
4461
4462 rtx
4463 fix_lexical_addr (addr, var)
4464 rtx addr;
4465 tree var;
4466 {
4467 rtx basereg;
4468 int displacement;
4469 tree context = decl_function_context (var);
4470 struct function *fp;
4471 rtx base = 0;
4472
4473 /* If this is the present function, we need not do anything. */
4474 if (context == current_function_decl || context == inline_function_decl)
4475 return addr;
4476
4477 for (fp = outer_function_chain; fp; fp = fp->next)
4478 if (fp->decl == context)
4479 break;
4480
4481 if (fp == 0)
4482 abort ();
4483
4484 /* Decode given address as base reg plus displacement. */
4485 if (GET_CODE (addr) == REG)
4486 basereg = addr, displacement = 0;
4487 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4488 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
4489 else
4490 abort ();
4491
4492 /* We accept vars reached via the containing function's
4493 incoming arg pointer and via its stack variables pointer. */
4494 if (basereg == fp->internal_arg_pointer)
4495 {
4496 /* If reached via arg pointer, get the arg pointer value
4497 out of that function's stack frame.
4498
4499 There are two cases: If a separate ap is needed, allocate a
4500 slot in the outer function for it and dereference it that way.
4501 This is correct even if the real ap is actually a pseudo.
4502 Otherwise, just adjust the offset from the frame pointer to
4503 compensate. */
4504
4505 #ifdef NEED_SEPARATE_AP
4506 rtx addr;
4507
4508 if (fp->arg_pointer_save_area == 0)
4509 fp->arg_pointer_save_area
4510 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
4511
4512 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
4513 addr = memory_address (Pmode, addr);
4514
4515 base = copy_to_reg (gen_rtx (MEM, Pmode, addr));
4516 #else
4517 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
4518 base = lookup_static_chain (var);
4519 #endif
4520 }
4521
4522 else if (basereg == virtual_stack_vars_rtx)
4523 {
4524 /* This is the same code as lookup_static_chain, duplicated here to
4525 avoid an extra call to decl_function_context. */
4526 tree link;
4527
4528 for (link = context_display; link; link = TREE_CHAIN (link))
4529 if (TREE_PURPOSE (link) == context)
4530 {
4531 base = RTL_EXPR_RTL (TREE_VALUE (link));
4532 break;
4533 }
4534 }
4535
4536 if (base == 0)
4537 abort ();
4538
4539 /* Use same offset, relative to appropriate static chain or argument
4540 pointer. */
4541 return plus_constant (base, displacement);
4542 }
4543 \f
4544 /* Return the address of the trampoline for entering nested fn FUNCTION.
4545 If necessary, allocate a trampoline (in the stack frame)
4546 and emit rtl to initialize its contents (at entry to this function). */
4547
4548 rtx
4549 trampoline_address (function)
4550 tree function;
4551 {
4552 tree link;
4553 tree rtlexp;
4554 rtx tramp;
4555 struct function *fp;
4556 tree fn_context;
4557
4558 /* Find an existing trampoline and return it. */
4559 for (link = trampoline_list; link; link = TREE_CHAIN (link))
4560 if (TREE_PURPOSE (link) == function)
4561 return
4562 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
4563
4564 for (fp = outer_function_chain; fp; fp = fp->next)
4565 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
4566 if (TREE_PURPOSE (link) == function)
4567 {
4568 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
4569 function);
4570 return round_trampoline_addr (tramp);
4571 }
4572
4573 /* None exists; we must make one. */
4574
4575 /* Find the `struct function' for the function containing FUNCTION. */
4576 fp = 0;
4577 fn_context = decl_function_context (function);
4578 if (fn_context != current_function_decl)
4579 for (fp = outer_function_chain; fp; fp = fp->next)
4580 if (fp->decl == fn_context)
4581 break;
4582
4583 /* Allocate run-time space for this trampoline
4584 (usually in the defining function's stack frame). */
4585 #ifdef ALLOCATE_TRAMPOLINE
4586 tramp = ALLOCATE_TRAMPOLINE (fp);
4587 #else
4588 /* If rounding needed, allocate extra space
4589 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
4590 #ifdef TRAMPOLINE_ALIGNMENT
4591 #define TRAMPOLINE_REAL_SIZE \
4592 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
4593 #else
4594 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
4595 #endif
4596 if (fp != 0)
4597 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
4598 else
4599 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
4600 #endif
4601
4602 /* Record the trampoline for reuse and note it for later initialization
4603 by expand_function_end. */
4604 if (fp != 0)
4605 {
4606 push_obstacks (fp->function_maybepermanent_obstack,
4607 fp->function_maybepermanent_obstack);
4608 rtlexp = make_node (RTL_EXPR);
4609 RTL_EXPR_RTL (rtlexp) = tramp;
4610 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
4611 pop_obstacks ();
4612 }
4613 else
4614 {
4615 /* Make the RTL_EXPR node temporary, not momentary, so that the
4616 trampoline_list doesn't become garbage. */
4617 int momentary = suspend_momentary ();
4618 rtlexp = make_node (RTL_EXPR);
4619 resume_momentary (momentary);
4620
4621 RTL_EXPR_RTL (rtlexp) = tramp;
4622 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
4623 }
4624
4625 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
4626 return round_trampoline_addr (tramp);
4627 }
4628
4629 /* Given a trampoline address,
4630 round it to multiple of TRAMPOLINE_ALIGNMENT. */
4631
4632 static rtx
4633 round_trampoline_addr (tramp)
4634 rtx tramp;
4635 {
4636 #ifdef TRAMPOLINE_ALIGNMENT
4637 /* Round address up to desired boundary. */
4638 rtx temp = gen_reg_rtx (Pmode);
4639 temp = expand_binop (Pmode, add_optab, tramp,
4640 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
4641 temp, 0, OPTAB_LIB_WIDEN);
4642 tramp = expand_binop (Pmode, and_optab, temp,
4643 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
4644 temp, 0, OPTAB_LIB_WIDEN);
4645 #endif
4646 return tramp;
4647 }
4648 \f
4649 /* The functions identify_blocks and reorder_blocks provide a way to
4650 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
4651 duplicate portions of the RTL code. Call identify_blocks before
4652 changing the RTL, and call reorder_blocks after. */
4653
4654 /* Put all this function's BLOCK nodes including those that are chained
4655 onto the first block into a vector, and return it.
4656 Also store in each NOTE for the beginning or end of a block
4657 the index of that block in the vector.
4658 The arguments are BLOCK, the chain of top-level blocks of the function,
4659 and INSNS, the insn chain of the function. */
4660
4661 tree *
4662 identify_blocks (block, insns)
4663 tree block;
4664 rtx insns;
4665 {
4666 int n_blocks;
4667 tree *block_vector;
4668 int *block_stack;
4669 int depth = 0;
4670 int next_block_number = 1;
4671 int current_block_number = 1;
4672 rtx insn;
4673
4674 if (block == 0)
4675 return 0;
4676
4677 n_blocks = all_blocks (block, 0);
4678 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
4679 block_stack = (int *) alloca (n_blocks * sizeof (int));
4680
4681 all_blocks (block, block_vector);
4682
4683 for (insn = insns; insn; insn = NEXT_INSN (insn))
4684 if (GET_CODE (insn) == NOTE)
4685 {
4686 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
4687 {
4688 block_stack[depth++] = current_block_number;
4689 current_block_number = next_block_number;
4690 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
4691 }
4692 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
4693 {
4694 current_block_number = block_stack[--depth];
4695 NOTE_BLOCK_NUMBER (insn) = current_block_number;
4696 }
4697 }
4698
4699 if (n_blocks != next_block_number)
4700 abort ();
4701
4702 return block_vector;
4703 }
4704
4705 /* Given BLOCK_VECTOR which was returned by identify_blocks,
4706 and a revised instruction chain, rebuild the tree structure
4707 of BLOCK nodes to correspond to the new order of RTL.
4708 The new block tree is inserted below TOP_BLOCK.
4709 Returns the current top-level block. */
4710
4711 tree
4712 reorder_blocks (block_vector, block, insns)
4713 tree *block_vector;
4714 tree block;
4715 rtx insns;
4716 {
4717 tree current_block = block;
4718 rtx insn;
4719
4720 if (block_vector == 0)
4721 return block;
4722
4723 /* Prune the old trees away, so that it doesn't get in the way. */
4724 BLOCK_SUBBLOCKS (current_block) = 0;
4725 BLOCK_CHAIN (current_block) = 0;
4726
4727 for (insn = insns; insn; insn = NEXT_INSN (insn))
4728 if (GET_CODE (insn) == NOTE)
4729 {
4730 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
4731 {
4732 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
4733 /* If we have seen this block before, copy it. */
4734 if (TREE_ASM_WRITTEN (block))
4735 block = copy_node (block);
4736 BLOCK_SUBBLOCKS (block) = 0;
4737 TREE_ASM_WRITTEN (block) = 1;
4738 BLOCK_SUPERCONTEXT (block) = current_block;
4739 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4740 BLOCK_SUBBLOCKS (current_block) = block;
4741 current_block = block;
4742 NOTE_SOURCE_FILE (insn) = 0;
4743 }
4744 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
4745 {
4746 BLOCK_SUBBLOCKS (current_block)
4747 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
4748 current_block = BLOCK_SUPERCONTEXT (current_block);
4749 NOTE_SOURCE_FILE (insn) = 0;
4750 }
4751 }
4752
4753 BLOCK_SUBBLOCKS (current_block)
4754 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
4755 return current_block;
4756 }
4757
4758 /* Reverse the order of elements in the chain T of blocks,
4759 and return the new head of the chain (old last element). */
4760
4761 static tree
4762 blocks_nreverse (t)
4763 tree t;
4764 {
4765 register tree prev = 0, decl, next;
4766 for (decl = t; decl; decl = next)
4767 {
4768 next = BLOCK_CHAIN (decl);
4769 BLOCK_CHAIN (decl) = prev;
4770 prev = decl;
4771 }
4772 return prev;
4773 }
4774
4775 /* Count the subblocks of the list starting with BLOCK, and list them
4776 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
4777 blocks. */
4778
4779 static int
4780 all_blocks (block, vector)
4781 tree block;
4782 tree *vector;
4783 {
4784 int n_blocks = 0;
4785
4786 while (block)
4787 {
4788 TREE_ASM_WRITTEN (block) = 0;
4789
4790 /* Record this block. */
4791 if (vector)
4792 vector[n_blocks] = block;
4793
4794 ++n_blocks;
4795
4796 /* Record the subblocks, and their subblocks... */
4797 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4798 vector ? vector + n_blocks : 0);
4799 block = BLOCK_CHAIN (block);
4800 }
4801
4802 return n_blocks;
4803 }
4804 \f
4805 /* Build bytecode call descriptor for function SUBR. */
4806
4807 rtx
4808 bc_build_calldesc (subr)
4809 tree subr;
4810 {
4811 tree calldesc = 0, arg;
4812 int nargs = 0;
4813
4814 /* Build the argument description vector in reverse order. */
4815 DECL_ARGUMENTS (subr) = nreverse (DECL_ARGUMENTS (subr));
4816 nargs = 0;
4817
4818 for (arg = DECL_ARGUMENTS (subr); arg; arg = TREE_CHAIN (arg))
4819 {
4820 ++nargs;
4821
4822 calldesc = tree_cons ((tree) 0, size_in_bytes (TREE_TYPE (arg)), calldesc);
4823 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (TREE_TYPE (arg)), calldesc);
4824 }
4825
4826 DECL_ARGUMENTS (subr) = nreverse (DECL_ARGUMENTS (subr));
4827
4828 /* Prepend the function's return type. */
4829 calldesc = tree_cons ((tree) 0,
4830 size_in_bytes (TREE_TYPE (TREE_TYPE (subr))),
4831 calldesc);
4832
4833 calldesc = tree_cons ((tree) 0,
4834 bc_runtime_type_code (TREE_TYPE (TREE_TYPE (subr))),
4835 calldesc);
4836
4837 /* Prepend the arg count. */
4838 calldesc = tree_cons ((tree) 0, build_int_2 (nargs, 0), calldesc);
4839
4840 /* Output the call description vector and get its address. */
4841 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
4842 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
4843 build_index_type (build_int_2 (nargs * 2, 0)));
4844
4845 return output_constant_def (calldesc);
4846 }
4847
4848
4849 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4850 and initialize static variables for generating RTL for the statements
4851 of the function. */
4852
4853 void
4854 init_function_start (subr, filename, line)
4855 tree subr;
4856 char *filename;
4857 int line;
4858 {
4859 char *junk;
4860
4861 if (output_bytecode)
4862 {
4863 this_function_decl = subr;
4864 this_function_calldesc = bc_build_calldesc (subr);
4865 local_vars_size = 0;
4866 stack_depth = 0;
4867 max_stack_depth = 0;
4868 stmt_expr_depth = 0;
4869 return;
4870 }
4871
4872 init_stmt_for_function ();
4873
4874 cse_not_expected = ! optimize;
4875
4876 /* Caller save not needed yet. */
4877 caller_save_needed = 0;
4878
4879 /* No stack slots have been made yet. */
4880 stack_slot_list = 0;
4881
4882 /* There is no stack slot for handling nonlocal gotos. */
4883 nonlocal_goto_handler_slot = 0;
4884 nonlocal_goto_stack_level = 0;
4885
4886 /* No labels have been declared for nonlocal use. */
4887 nonlocal_labels = 0;
4888
4889 /* No function calls so far in this function. */
4890 function_call_count = 0;
4891
4892 /* No parm regs have been allocated.
4893 (This is important for output_inline_function.) */
4894 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4895
4896 /* Initialize the RTL mechanism. */
4897 init_emit ();
4898
4899 /* Initialize the queue of pending postincrement and postdecrements,
4900 and some other info in expr.c. */
4901 init_expr ();
4902
4903 /* We haven't done register allocation yet. */
4904 reg_renumber = 0;
4905
4906 init_const_rtx_hash_table ();
4907
4908 current_function_name = (*decl_printable_name) (subr, &junk);
4909
4910 /* Nonzero if this is a nested function that uses a static chain. */
4911
4912 current_function_needs_context
4913 = (decl_function_context (current_function_decl) != 0
4914 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
4915
4916 /* Set if a call to setjmp is seen. */
4917 current_function_calls_setjmp = 0;
4918
4919 /* Set if a call to longjmp is seen. */
4920 current_function_calls_longjmp = 0;
4921
4922 current_function_calls_alloca = 0;
4923 current_function_has_nonlocal_label = 0;
4924 current_function_has_nonlocal_goto = 0;
4925 current_function_contains_functions = 0;
4926
4927 current_function_returns_pcc_struct = 0;
4928 current_function_returns_struct = 0;
4929 current_function_epilogue_delay_list = 0;
4930 current_function_uses_const_pool = 0;
4931 current_function_uses_pic_offset_table = 0;
4932
4933 /* We have not yet needed to make a label to jump to for tail-recursion. */
4934 tail_recursion_label = 0;
4935
4936 /* We haven't had a need to make a save area for ap yet. */
4937
4938 arg_pointer_save_area = 0;
4939
4940 /* No stack slots allocated yet. */
4941 frame_offset = 0;
4942
4943 /* No SAVE_EXPRs in this function yet. */
4944 save_expr_regs = 0;
4945
4946 /* No RTL_EXPRs in this function yet. */
4947 rtl_expr_chain = 0;
4948
4949 /* Set up to allocate temporaries. */
4950 init_temp_slots ();
4951
4952 /* Within function body, compute a type's size as soon it is laid out. */
4953 immediate_size_expand++;
4954
4955 /* We haven't made any trampolines for this function yet. */
4956 trampoline_list = 0;
4957
4958 init_pending_stack_adjust ();
4959 inhibit_defer_pop = 0;
4960
4961 current_function_outgoing_args_size = 0;
4962
4963 /* Prevent ever trying to delete the first instruction of a function.
4964 Also tell final how to output a linenum before the function prologue. */
4965 emit_line_note (filename, line);
4966
4967 /* Make sure first insn is a note even if we don't want linenums.
4968 This makes sure the first insn will never be deleted.
4969 Also, final expects a note to appear there. */
4970 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4971
4972 /* Set flags used by final.c. */
4973 if (aggregate_value_p (DECL_RESULT (subr)))
4974 {
4975 #ifdef PCC_STATIC_STRUCT_RETURN
4976 current_function_returns_pcc_struct = 1;
4977 #endif
4978 current_function_returns_struct = 1;
4979 }
4980
4981 /* Warn if this value is an aggregate type,
4982 regardless of which calling convention we are using for it. */
4983 if (warn_aggregate_return
4984 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4985 warning ("function returns an aggregate");
4986
4987 current_function_returns_pointer
4988 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
4989
4990 /* Indicate that we need to distinguish between the return value of the
4991 present function and the return value of a function being called. */
4992 rtx_equal_function_value_matters = 1;
4993
4994 /* Indicate that we have not instantiated virtual registers yet. */
4995 virtuals_instantiated = 0;
4996
4997 /* Indicate we have no need of a frame pointer yet. */
4998 frame_pointer_needed = 0;
4999
5000 /* By default assume not varargs or stdarg. */
5001 current_function_varargs = 0;
5002 current_function_stdarg = 0;
5003 }
5004
5005 /* Indicate that the current function uses extra args
5006 not explicitly mentioned in the argument list in any fashion. */
5007
5008 void
5009 mark_varargs ()
5010 {
5011 current_function_varargs = 1;
5012 }
5013
5014 /* Expand a call to __main at the beginning of a possible main function. */
5015
5016 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5017 #undef HAS_INIT_SECTION
5018 #define HAS_INIT_SECTION
5019 #endif
5020
5021 void
5022 expand_main_function ()
5023 {
5024 if (!output_bytecode)
5025 {
5026 /* The zero below avoids a possible parse error */
5027 0;
5028 #if !defined (HAS_INIT_SECTION)
5029 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, NAME__MAIN), 0,
5030 VOIDmode, 0);
5031 #endif /* not HAS_INIT_SECTION */
5032 }
5033 }
5034 \f
5035 extern struct obstack permanent_obstack;
5036
5037 /* Expand start of bytecode function. See comment at
5038 expand_function_start below for details. */
5039
5040 void
5041 bc_expand_function_start (subr, parms_have_cleanups)
5042 tree subr;
5043 int parms_have_cleanups;
5044 {
5045 char label[20], *name;
5046 static int nlab;
5047 tree thisarg;
5048 int argsz;
5049
5050 if (TREE_PUBLIC (subr))
5051 bc_globalize_label (IDENTIFIER_POINTER (DECL_NAME (subr)));
5052
5053 #ifdef DEBUG_PRINT_CODE
5054 fprintf (stderr, "\n<func %s>\n", IDENTIFIER_POINTER (DECL_NAME (subr)));
5055 #endif
5056
5057 for (argsz = 0, thisarg = DECL_ARGUMENTS (subr); thisarg; thisarg = TREE_CHAIN (thisarg))
5058 {
5059 if (DECL_RTL (thisarg))
5060 abort (); /* Should be NULL here I think. */
5061 else if (TREE_CONSTANT (DECL_SIZE (thisarg)))
5062 {
5063 DECL_RTL (thisarg) = bc_gen_rtx ((char *) 0, argsz, (struct bc_label *) 0);
5064 argsz += TREE_INT_CST_LOW (DECL_SIZE (thisarg));
5065 }
5066 else
5067 {
5068 /* Variable-sized objects are pointers to their storage. */
5069 DECL_RTL (thisarg) = bc_gen_rtx ((char *) 0, argsz, (struct bc_label *) 0);
5070 argsz += POINTER_SIZE;
5071 }
5072 }
5073
5074 bc_begin_function (xstrdup (IDENTIFIER_POINTER (DECL_NAME (subr))));
5075
5076 ASM_GENERATE_INTERNAL_LABEL (label, "LX", nlab);
5077
5078 ++nlab;
5079 name = (char *) obstack_copy0 (&permanent_obstack, label, strlen (label));
5080 this_function_callinfo = bc_gen_rtx (name, 0, (struct bc_label *) 0);
5081 this_function_bytecode =
5082 bc_emit_trampoline (BYTECODE_LABEL (this_function_callinfo));
5083 }
5084
5085
5086 /* Expand end of bytecode function. See details the comment of
5087 expand_function_end(), below. */
5088
5089 void
5090 bc_expand_function_end ()
5091 {
5092 char *ptrconsts;
5093
5094 expand_null_return ();
5095
5096 /* Emit any fixup code. This must be done before the call to
5097 to BC_END_FUNCTION (), since that will cause the bytecode
5098 segment to be finished off and closed. */
5099
5100 expand_fixups (NULL_RTX);
5101
5102 ptrconsts = bc_end_function ();
5103
5104 bc_align_const (2 /* INT_ALIGN */);
5105
5106 /* If this changes also make sure to change bc-interp.h! */
5107
5108 bc_emit_const_labeldef (BYTECODE_LABEL (this_function_callinfo));
5109 bc_emit_const ((char *) &max_stack_depth, sizeof max_stack_depth);
5110 bc_emit_const ((char *) &local_vars_size, sizeof local_vars_size);
5111 bc_emit_const_labelref (this_function_bytecode, 0);
5112 bc_emit_const_labelref (ptrconsts, 0);
5113 bc_emit_const_labelref (BYTECODE_LABEL (this_function_calldesc), 0);
5114 }
5115
5116
5117 /* Start the RTL for a new function, and set variables used for
5118 emitting RTL.
5119 SUBR is the FUNCTION_DECL node.
5120 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5121 the function's parameters, which must be run at any return statement. */
5122
5123 void
5124 expand_function_start (subr, parms_have_cleanups)
5125 tree subr;
5126 int parms_have_cleanups;
5127 {
5128 register int i;
5129 tree tem;
5130 rtx last_ptr;
5131
5132 if (output_bytecode)
5133 {
5134 bc_expand_function_start (subr, parms_have_cleanups);
5135 return;
5136 }
5137
5138 /* Make sure volatile mem refs aren't considered
5139 valid operands of arithmetic insns. */
5140 init_recog_no_volatile ();
5141
5142 /* If function gets a static chain arg, store it in the stack frame.
5143 Do this first, so it gets the first stack slot offset. */
5144 if (current_function_needs_context)
5145 {
5146 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5147
5148 #ifdef SMALL_REGISTER_CLASSES
5149 /* Delay copying static chain if it is not a register to avoid
5150 conflicts with regs used for parameters. */
5151 if (GET_CODE (static_chain_incoming_rtx) == REG)
5152 #endif
5153 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5154 }
5155
5156 /* If the parameters of this function need cleaning up, get a label
5157 for the beginning of the code which executes those cleanups. This must
5158 be done before doing anything with return_label. */
5159 if (parms_have_cleanups)
5160 cleanup_label = gen_label_rtx ();
5161 else
5162 cleanup_label = 0;
5163
5164 /* Make the label for return statements to jump to, if this machine
5165 does not have a one-instruction return and uses an epilogue,
5166 or if it returns a structure, or if it has parm cleanups. */
5167 #ifdef HAVE_return
5168 if (cleanup_label == 0 && HAVE_return
5169 && ! current_function_returns_pcc_struct
5170 && ! (current_function_returns_struct && ! optimize))
5171 return_label = 0;
5172 else
5173 return_label = gen_label_rtx ();
5174 #else
5175 return_label = gen_label_rtx ();
5176 #endif
5177
5178 /* Initialize rtx used to return the value. */
5179 /* Do this before assign_parms so that we copy the struct value address
5180 before any library calls that assign parms might generate. */
5181
5182 /* Decide whether to return the value in memory or in a register. */
5183 if (aggregate_value_p (DECL_RESULT (subr)))
5184 {
5185 /* Returning something that won't go in a register. */
5186 register rtx value_address = 0;
5187
5188 #ifdef PCC_STATIC_STRUCT_RETURN
5189 if (current_function_returns_pcc_struct)
5190 {
5191 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5192 value_address = assemble_static_space (size);
5193 }
5194 else
5195 #endif
5196 {
5197 /* Expect to be passed the address of a place to store the value.
5198 If it is passed as an argument, assign_parms will take care of
5199 it. */
5200 if (struct_value_incoming_rtx)
5201 {
5202 value_address = gen_reg_rtx (Pmode);
5203 emit_move_insn (value_address, struct_value_incoming_rtx);
5204 }
5205 }
5206 if (value_address)
5207 {
5208 DECL_RTL (DECL_RESULT (subr))
5209 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (subr)), value_address);
5210 MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)))
5211 = AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5212 }
5213 }
5214 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5215 /* If return mode is void, this decl rtl should not be used. */
5216 DECL_RTL (DECL_RESULT (subr)) = 0;
5217 else if (parms_have_cleanups)
5218 {
5219 /* If function will end with cleanup code for parms,
5220 compute the return values into a pseudo reg,
5221 which we will copy into the true return register
5222 after the cleanups are done. */
5223
5224 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5225
5226 #ifdef PROMOTE_FUNCTION_RETURN
5227 tree type = TREE_TYPE (DECL_RESULT (subr));
5228 int unsignedp = TREE_UNSIGNED (type);
5229
5230 mode = promote_mode (type, mode, &unsignedp, 1);
5231 #endif
5232
5233 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5234 }
5235 else
5236 /* Scalar, returned in a register. */
5237 {
5238 #ifdef FUNCTION_OUTGOING_VALUE
5239 DECL_RTL (DECL_RESULT (subr))
5240 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5241 #else
5242 DECL_RTL (DECL_RESULT (subr))
5243 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5244 #endif
5245
5246 /* Mark this reg as the function's return value. */
5247 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5248 {
5249 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5250 /* Needed because we may need to move this to memory
5251 in case it's a named return value whose address is taken. */
5252 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5253 }
5254 }
5255
5256 /* Initialize rtx for parameters and local variables.
5257 In some cases this requires emitting insns. */
5258
5259 assign_parms (subr, 0);
5260
5261 #ifdef SMALL_REGISTER_CLASSES
5262 /* Copy the static chain now if it wasn't a register. The delay is to
5263 avoid conflicts with the parameter passing registers. */
5264
5265 if (current_function_needs_context)
5266 if (GET_CODE (static_chain_incoming_rtx) != REG)
5267 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5268 #endif
5269
5270 /* The following was moved from init_function_start.
5271 The move is supposed to make sdb output more accurate. */
5272 /* Indicate the beginning of the function body,
5273 as opposed to parm setup. */
5274 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5275
5276 /* If doing stupid allocation, mark parms as born here. */
5277
5278 if (GET_CODE (get_last_insn ()) != NOTE)
5279 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5280 parm_birth_insn = get_last_insn ();
5281
5282 if (obey_regdecls)
5283 {
5284 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5285 use_variable (regno_reg_rtx[i]);
5286
5287 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5288 use_variable (current_function_internal_arg_pointer);
5289 }
5290
5291 context_display = 0;
5292 if (current_function_needs_context)
5293 {
5294 /* Fetch static chain values for containing functions. */
5295 tem = decl_function_context (current_function_decl);
5296 /* If not doing stupid register allocation copy the static chain
5297 pointer into a pseudo. If we have small register classes, copy
5298 the value from memory if static_chain_incoming_rtx is a REG. If
5299 we do stupid register allocation, we use the stack address
5300 generated above. */
5301 if (tem && ! obey_regdecls)
5302 {
5303 #ifdef SMALL_REGISTER_CLASSES
5304 /* If the static chain originally came in a register, put it back
5305 there, then move it out in the next insn. The reason for
5306 this peculiar code is to satisfy function integration. */
5307 if (GET_CODE (static_chain_incoming_rtx) == REG)
5308 emit_move_insn (static_chain_incoming_rtx, last_ptr);
5309 #endif
5310
5311 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5312 }
5313
5314 while (tem)
5315 {
5316 tree rtlexp = make_node (RTL_EXPR);
5317
5318 RTL_EXPR_RTL (rtlexp) = last_ptr;
5319 context_display = tree_cons (tem, rtlexp, context_display);
5320 tem = decl_function_context (tem);
5321 if (tem == 0)
5322 break;
5323 /* Chain thru stack frames, assuming pointer to next lexical frame
5324 is found at the place we always store it. */
5325 #ifdef FRAME_GROWS_DOWNWARD
5326 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
5327 #endif
5328 last_ptr = copy_to_reg (gen_rtx (MEM, Pmode,
5329 memory_address (Pmode, last_ptr)));
5330
5331 /* If we are not optimizing, ensure that we know that this
5332 piece of context is live over the entire function. */
5333 if (! optimize)
5334 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, last_ptr,
5335 save_expr_regs);
5336 }
5337 }
5338
5339 /* After the display initializations is where the tail-recursion label
5340 should go, if we end up needing one. Ensure we have a NOTE here
5341 since some things (like trampolines) get placed before this. */
5342 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
5343
5344 /* Evaluate now the sizes of any types declared among the arguments. */
5345 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5346 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
5347
5348 /* Make sure there is a line number after the function entry setup code. */
5349 force_next_line_note ();
5350 }
5351 \f
5352 /* Generate RTL for the end of the current function.
5353 FILENAME and LINE are the current position in the source file.
5354
5355 It is up to language-specific callers to do cleanups for parameters--
5356 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5357
5358 void
5359 expand_function_end (filename, line, end_bindings)
5360 char *filename;
5361 int line;
5362 int end_bindings;
5363 {
5364 register int i;
5365 tree link;
5366
5367 #ifdef TRAMPOLINE_TEMPLATE
5368 static rtx initial_trampoline;
5369 #endif
5370
5371 if (output_bytecode)
5372 {
5373 bc_expand_function_end ();
5374 return;
5375 }
5376
5377 #ifdef NON_SAVING_SETJMP
5378 /* Don't put any variables in registers if we call setjmp
5379 on a machine that fails to restore the registers. */
5380 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
5381 {
5382 if (DECL_INITIAL (current_function_decl) != error_mark_node)
5383 setjmp_protect (DECL_INITIAL (current_function_decl));
5384
5385 setjmp_protect_args ();
5386 }
5387 #endif
5388
5389 /* Save the argument pointer if a save area was made for it. */
5390 if (arg_pointer_save_area)
5391 {
5392 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
5393 emit_insn_before (x, tail_recursion_reentry);
5394 }
5395
5396 /* Initialize any trampolines required by this function. */
5397 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5398 {
5399 tree function = TREE_PURPOSE (link);
5400 rtx context = lookup_static_chain (function);
5401 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
5402 rtx blktramp;
5403 rtx seq;
5404
5405 #ifdef TRAMPOLINE_TEMPLATE
5406 /* First make sure this compilation has a template for
5407 initializing trampolines. */
5408 if (initial_trampoline == 0)
5409 {
5410 end_temporary_allocation ();
5411 initial_trampoline
5412 = gen_rtx (MEM, BLKmode, assemble_trampoline_template ());
5413 resume_temporary_allocation ();
5414 }
5415 #endif
5416
5417 /* Generate insns to initialize the trampoline. */
5418 start_sequence ();
5419 tramp = round_trampoline_addr (XEXP (tramp, 0));
5420 #ifdef TRAMPOLINE_TEMPLATE
5421 blktramp = change_address (initial_trampoline, BLKmode, tramp);
5422 emit_block_move (blktramp, initial_trampoline,
5423 GEN_INT (TRAMPOLINE_SIZE),
5424 FUNCTION_BOUNDARY / BITS_PER_UNIT);
5425 #endif
5426 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
5427 seq = get_insns ();
5428 end_sequence ();
5429
5430 /* Put those insns at entry to the containing function (this one). */
5431 emit_insns_before (seq, tail_recursion_reentry);
5432 }
5433
5434 /* Warn about unused parms if extra warnings were specified. */
5435 if (warn_unused && extra_warnings)
5436 {
5437 tree decl;
5438
5439 for (decl = DECL_ARGUMENTS (current_function_decl);
5440 decl; decl = TREE_CHAIN (decl))
5441 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
5442 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
5443 warning_with_decl (decl, "unused parameter `%s'");
5444 }
5445
5446 /* Delete handlers for nonlocal gotos if nothing uses them. */
5447 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
5448 delete_handlers ();
5449
5450 /* End any sequences that failed to be closed due to syntax errors. */
5451 while (in_sequence_p ())
5452 end_sequence ();
5453
5454 /* Outside function body, can't compute type's actual size
5455 until next function's body starts. */
5456 immediate_size_expand--;
5457
5458 /* If doing stupid register allocation,
5459 mark register parms as dying here. */
5460
5461 if (obey_regdecls)
5462 {
5463 rtx tem;
5464 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5465 use_variable (regno_reg_rtx[i]);
5466
5467 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
5468
5469 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
5470 {
5471 use_variable (XEXP (tem, 0));
5472 use_variable_after (XEXP (tem, 0), parm_birth_insn);
5473 }
5474
5475 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5476 use_variable (current_function_internal_arg_pointer);
5477 }
5478
5479 clear_pending_stack_adjust ();
5480 do_pending_stack_adjust ();
5481
5482 /* Mark the end of the function body.
5483 If control reaches this insn, the function can drop through
5484 without returning a value. */
5485 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
5486
5487 /* Output a linenumber for the end of the function.
5488 SDB depends on this. */
5489 emit_line_note_force (filename, line);
5490
5491 /* Output the label for the actual return from the function,
5492 if one is expected. This happens either because a function epilogue
5493 is used instead of a return instruction, or because a return was done
5494 with a goto in order to run local cleanups, or because of pcc-style
5495 structure returning. */
5496
5497 if (return_label)
5498 emit_label (return_label);
5499
5500 /* C++ uses this. */
5501 if (end_bindings)
5502 expand_end_bindings (0, 0, 0);
5503
5504 /* If we had calls to alloca, and this machine needs
5505 an accurate stack pointer to exit the function,
5506 insert some code to save and restore the stack pointer. */
5507 #ifdef EXIT_IGNORE_STACK
5508 if (! EXIT_IGNORE_STACK)
5509 #endif
5510 if (current_function_calls_alloca)
5511 {
5512 rtx tem = 0;
5513
5514 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
5515 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
5516 }
5517
5518 /* If scalar return value was computed in a pseudo-reg,
5519 copy that to the hard return register. */
5520 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
5521 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
5522 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
5523 >= FIRST_PSEUDO_REGISTER))
5524 {
5525 rtx real_decl_result;
5526
5527 #ifdef FUNCTION_OUTGOING_VALUE
5528 real_decl_result
5529 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5530 current_function_decl);
5531 #else
5532 real_decl_result
5533 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5534 current_function_decl);
5535 #endif
5536 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
5537 emit_move_insn (real_decl_result,
5538 DECL_RTL (DECL_RESULT (current_function_decl)));
5539 emit_insn (gen_rtx (USE, VOIDmode, real_decl_result));
5540 }
5541
5542 /* If returning a structure, arrange to return the address of the value
5543 in a place where debuggers expect to find it.
5544
5545 If returning a structure PCC style,
5546 the caller also depends on this value.
5547 And current_function_returns_pcc_struct is not necessarily set. */
5548 if (current_function_returns_struct
5549 || current_function_returns_pcc_struct)
5550 {
5551 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5552 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5553 #ifdef FUNCTION_OUTGOING_VALUE
5554 rtx outgoing
5555 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
5556 current_function_decl);
5557 #else
5558 rtx outgoing
5559 = FUNCTION_VALUE (build_pointer_type (type),
5560 current_function_decl);
5561 #endif
5562
5563 /* Mark this as a function return value so integrate will delete the
5564 assignment and USE below when inlining this function. */
5565 REG_FUNCTION_VALUE_P (outgoing) = 1;
5566
5567 emit_move_insn (outgoing, value_address);
5568 use_variable (outgoing);
5569 }
5570
5571 /* Output a return insn if we are using one.
5572 Otherwise, let the rtl chain end here, to drop through
5573 into the epilogue. */
5574
5575 #ifdef HAVE_return
5576 if (HAVE_return)
5577 {
5578 emit_jump_insn (gen_return ());
5579 emit_barrier ();
5580 }
5581 #endif
5582
5583 /* Fix up any gotos that jumped out to the outermost
5584 binding level of the function.
5585 Must follow emitting RETURN_LABEL. */
5586
5587 /* If you have any cleanups to do at this point,
5588 and they need to create temporary variables,
5589 then you will lose. */
5590 expand_fixups (get_insns ());
5591 }
5592 \f
5593 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
5594
5595 static int *prologue;
5596 static int *epilogue;
5597
5598 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
5599 or a single insn). */
5600
5601 static int *
5602 record_insns (insns)
5603 rtx insns;
5604 {
5605 int *vec;
5606
5607 if (GET_CODE (insns) == SEQUENCE)
5608 {
5609 int len = XVECLEN (insns, 0);
5610 vec = (int *) oballoc ((len + 1) * sizeof (int));
5611 vec[len] = 0;
5612 while (--len >= 0)
5613 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
5614 }
5615 else
5616 {
5617 vec = (int *) oballoc (2 * sizeof (int));
5618 vec[0] = INSN_UID (insns);
5619 vec[1] = 0;
5620 }
5621 return vec;
5622 }
5623
5624 /* Determine how many INSN_UIDs in VEC are part of INSN. */
5625
5626 static int
5627 contains (insn, vec)
5628 rtx insn;
5629 int *vec;
5630 {
5631 register int i, j;
5632
5633 if (GET_CODE (insn) == INSN
5634 && GET_CODE (PATTERN (insn)) == SEQUENCE)
5635 {
5636 int count = 0;
5637 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
5638 for (j = 0; vec[j]; j++)
5639 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
5640 count++;
5641 return count;
5642 }
5643 else
5644 {
5645 for (j = 0; vec[j]; j++)
5646 if (INSN_UID (insn) == vec[j])
5647 return 1;
5648 }
5649 return 0;
5650 }
5651
5652 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5653 this into place with notes indicating where the prologue ends and where
5654 the epilogue begins. Update the basic block information when possible. */
5655
5656 void
5657 thread_prologue_and_epilogue_insns (f)
5658 rtx f;
5659 {
5660 #ifdef HAVE_prologue
5661 if (HAVE_prologue)
5662 {
5663 rtx head, seq, insn;
5664
5665 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
5666 prologue insns and a NOTE_INSN_PROLOGUE_END. */
5667 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
5668 seq = gen_prologue ();
5669 head = emit_insn_after (seq, f);
5670
5671 /* Include the new prologue insns in the first block. Ignore them
5672 if they form a basic block unto themselves. */
5673 if (basic_block_head && n_basic_blocks
5674 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
5675 basic_block_head[0] = NEXT_INSN (f);
5676
5677 /* Retain a map of the prologue insns. */
5678 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
5679 }
5680 else
5681 #endif
5682 prologue = 0;
5683
5684 #ifdef HAVE_epilogue
5685 if (HAVE_epilogue)
5686 {
5687 rtx insn = get_last_insn ();
5688 rtx prev = prev_nonnote_insn (insn);
5689
5690 /* If we end with a BARRIER, we don't need an epilogue. */
5691 if (! (prev && GET_CODE (prev) == BARRIER))
5692 {
5693 rtx tail, seq, tem;
5694 rtx first_use = 0;
5695 rtx last_use = 0;
5696
5697 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
5698 epilogue insns, the USE insns at the end of a function,
5699 the jump insn that returns, and then a BARRIER. */
5700
5701 /* Move the USE insns at the end of a function onto a list. */
5702 while (prev
5703 && GET_CODE (prev) == INSN
5704 && GET_CODE (PATTERN (prev)) == USE)
5705 {
5706 tem = prev;
5707 prev = prev_nonnote_insn (prev);
5708
5709 NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem);
5710 PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem);
5711 if (first_use)
5712 {
5713 NEXT_INSN (tem) = first_use;
5714 PREV_INSN (first_use) = tem;
5715 }
5716 first_use = tem;
5717 if (!last_use)
5718 last_use = tem;
5719 }
5720
5721 emit_barrier_after (insn);
5722
5723 seq = gen_epilogue ();
5724 tail = emit_jump_insn_after (seq, insn);
5725
5726 /* Insert the USE insns immediately before the return insn, which
5727 must be the first instruction before the final barrier. */
5728 if (first_use)
5729 {
5730 tem = prev_nonnote_insn (get_last_insn ());
5731 NEXT_INSN (PREV_INSN (tem)) = first_use;
5732 PREV_INSN (first_use) = PREV_INSN (tem);
5733 PREV_INSN (tem) = last_use;
5734 NEXT_INSN (last_use) = tem;
5735 }
5736
5737 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
5738
5739 /* Include the new epilogue insns in the last block. Ignore
5740 them if they form a basic block unto themselves. */
5741 if (basic_block_end && n_basic_blocks
5742 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
5743 basic_block_end[n_basic_blocks - 1] = tail;
5744
5745 /* Retain a map of the epilogue insns. */
5746 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
5747 return;
5748 }
5749 }
5750 #endif
5751 epilogue = 0;
5752 }
5753
5754 /* Reposition the prologue-end and epilogue-begin notes after instruction
5755 scheduling and delayed branch scheduling. */
5756
5757 void
5758 reposition_prologue_and_epilogue_notes (f)
5759 rtx f;
5760 {
5761 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5762 /* Reposition the prologue and epilogue notes. */
5763 if (n_basic_blocks)
5764 {
5765 rtx next, prev;
5766 int len;
5767
5768 if (prologue)
5769 {
5770 register rtx insn, note = 0;
5771
5772 /* Scan from the beginning until we reach the last prologue insn.
5773 We apparently can't depend on basic_block_{head,end} after
5774 reorg has run. */
5775 for (len = 0; prologue[len]; len++)
5776 ;
5777 for (insn = f; len && insn; insn = NEXT_INSN (insn))
5778 {
5779 if (GET_CODE (insn) == NOTE)
5780 {
5781 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5782 note = insn;
5783 }
5784 else if ((len -= contains (insn, prologue)) == 0)
5785 {
5786 /* Find the prologue-end note if we haven't already, and
5787 move it to just after the last prologue insn. */
5788 if (note == 0)
5789 {
5790 for (note = insn; note = NEXT_INSN (note);)
5791 if (GET_CODE (note) == NOTE
5792 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5793 break;
5794 }
5795 next = NEXT_INSN (note);
5796 prev = PREV_INSN (note);
5797 if (prev)
5798 NEXT_INSN (prev) = next;
5799 if (next)
5800 PREV_INSN (next) = prev;
5801 add_insn_after (note, insn);
5802 }
5803 }
5804 }
5805
5806 if (epilogue)
5807 {
5808 register rtx insn, note = 0;
5809
5810 /* Scan from the end until we reach the first epilogue insn.
5811 We apparently can't depend on basic_block_{head,end} after
5812 reorg has run. */
5813 for (len = 0; epilogue[len]; len++)
5814 ;
5815 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
5816 {
5817 if (GET_CODE (insn) == NOTE)
5818 {
5819 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5820 note = insn;
5821 }
5822 else if ((len -= contains (insn, epilogue)) == 0)
5823 {
5824 /* Find the epilogue-begin note if we haven't already, and
5825 move it to just before the first epilogue insn. */
5826 if (note == 0)
5827 {
5828 for (note = insn; note = PREV_INSN (note);)
5829 if (GET_CODE (note) == NOTE
5830 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5831 break;
5832 }
5833 next = NEXT_INSN (note);
5834 prev = PREV_INSN (note);
5835 if (prev)
5836 NEXT_INSN (prev) = next;
5837 if (next)
5838 PREV_INSN (next) = prev;
5839 add_insn_after (note, PREV_INSN (insn));
5840 }
5841 }
5842 }
5843 }
5844 #endif /* HAVE_prologue or HAVE_epilogue */
5845 }