f6ffe09ce7a329d750106f1bff6b9dce2c5a5d21
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-95, 1996 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42
43 #include <stdio.h>
44
45 #include "rtl.h"
46 #include "tree.h"
47 #include "flags.h"
48 #include "function.h"
49 #include "insn-flags.h"
50 #include "expr.h"
51 #include "insn-codes.h"
52 #include "regs.h"
53 #include "hard-reg-set.h"
54 #include "insn-config.h"
55 #include "recog.h"
56 #include "output.h"
57 #include "basic-block.h"
58 #include "obstack.h"
59 #include "bytecode.h"
60
61 /* Some systems use __main in a way incompatible with its use in gcc, in these
62 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
63 give the same symbol without quotes for an alternative entry point. You
64 must define both, or neither. */
65 #ifndef NAME__MAIN
66 #define NAME__MAIN "__main"
67 #define SYMBOL__MAIN __main
68 #endif
69
70 /* Round a value to the lowest integer less than it that is a multiple of
71 the required alignment. Avoid using division in case the value is
72 negative. Assume the alignment is a power of two. */
73 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
74
75 /* Similar, but round to the next highest integer that meets the
76 alignment. */
77 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
78
79 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
80 during rtl generation. If they are different register numbers, this is
81 always true. It may also be true if
82 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
83 generation. See fix_lexical_addr for details. */
84
85 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
86 #define NEED_SEPARATE_AP
87 #endif
88
89 /* Number of bytes of args popped by function being compiled on its return.
90 Zero if no bytes are to be popped.
91 May affect compilation of return insn or of function epilogue. */
92
93 int current_function_pops_args;
94
95 /* Nonzero if function being compiled needs to be given an address
96 where the value should be stored. */
97
98 int current_function_returns_struct;
99
100 /* Nonzero if function being compiled needs to
101 return the address of where it has put a structure value. */
102
103 int current_function_returns_pcc_struct;
104
105 /* Nonzero if function being compiled needs to be passed a static chain. */
106
107 int current_function_needs_context;
108
109 /* Nonzero if function being compiled can call setjmp. */
110
111 int current_function_calls_setjmp;
112
113 /* Nonzero if function being compiled can call longjmp. */
114
115 int current_function_calls_longjmp;
116
117 /* Nonzero if function being compiled receives nonlocal gotos
118 from nested functions. */
119
120 int current_function_has_nonlocal_label;
121
122 /* Nonzero if function being compiled has nonlocal gotos to parent
123 function. */
124
125 int current_function_has_nonlocal_goto;
126
127 /* Nonzero if function being compiled contains nested functions. */
128
129 int current_function_contains_functions;
130
131 /* Nonzero if function being compiled can call alloca,
132 either as a subroutine or builtin. */
133
134 int current_function_calls_alloca;
135
136 /* Nonzero if the current function returns a pointer type */
137
138 int current_function_returns_pointer;
139
140 /* If some insns can be deferred to the delay slots of the epilogue, the
141 delay list for them is recorded here. */
142
143 rtx current_function_epilogue_delay_list;
144
145 /* If function's args have a fixed size, this is that size, in bytes.
146 Otherwise, it is -1.
147 May affect compilation of return insn or of function epilogue. */
148
149 int current_function_args_size;
150
151 /* # bytes the prologue should push and pretend that the caller pushed them.
152 The prologue must do this, but only if parms can be passed in registers. */
153
154 int current_function_pretend_args_size;
155
156 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
157 defined, the needed space is pushed by the prologue. */
158
159 int current_function_outgoing_args_size;
160
161 /* This is the offset from the arg pointer to the place where the first
162 anonymous arg can be found, if there is one. */
163
164 rtx current_function_arg_offset_rtx;
165
166 /* Nonzero if current function uses varargs.h or equivalent.
167 Zero for functions that use stdarg.h. */
168
169 int current_function_varargs;
170
171 /* Nonzero if current function uses stdarg.h or equivalent.
172 Zero for functions that use varargs.h. */
173
174 int current_function_stdarg;
175
176 /* Quantities of various kinds of registers
177 used for the current function's args. */
178
179 CUMULATIVE_ARGS current_function_args_info;
180
181 /* Name of function now being compiled. */
182
183 char *current_function_name;
184
185 /* If non-zero, an RTL expression for that location at which the current
186 function returns its result. Always equal to
187 DECL_RTL (DECL_RESULT (current_function_decl)), but provided
188 independently of the tree structures. */
189
190 rtx current_function_return_rtx;
191
192 /* Nonzero if the current function uses the constant pool. */
193
194 int current_function_uses_const_pool;
195
196 /* Nonzero if the current function uses pic_offset_table_rtx. */
197 int current_function_uses_pic_offset_table;
198
199 /* The arg pointer hard register, or the pseudo into which it was copied. */
200 rtx current_function_internal_arg_pointer;
201
202 /* The FUNCTION_DECL for an inline function currently being expanded. */
203 tree inline_function_decl;
204
205 /* Number of function calls seen so far in current function. */
206
207 int function_call_count;
208
209 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
210 (labels to which there can be nonlocal gotos from nested functions)
211 in this function. */
212
213 tree nonlocal_labels;
214
215 /* RTX for stack slot that holds the current handler for nonlocal gotos.
216 Zero when function does not have nonlocal labels. */
217
218 rtx nonlocal_goto_handler_slot;
219
220 /* RTX for stack slot that holds the stack pointer value to restore
221 for a nonlocal goto.
222 Zero when function does not have nonlocal labels. */
223
224 rtx nonlocal_goto_stack_level;
225
226 /* Label that will go on parm cleanup code, if any.
227 Jumping to this label runs cleanup code for parameters, if
228 such code must be run. Following this code is the logical return label. */
229
230 rtx cleanup_label;
231
232 /* Label that will go on function epilogue.
233 Jumping to this label serves as a "return" instruction
234 on machines which require execution of the epilogue on all returns. */
235
236 rtx return_label;
237
238 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
239 So we can mark them all live at the end of the function, if nonopt. */
240 rtx save_expr_regs;
241
242 /* List (chain of EXPR_LISTs) of all stack slots in this function.
243 Made for the sake of unshare_all_rtl. */
244 rtx stack_slot_list;
245
246 /* Chain of all RTL_EXPRs that have insns in them. */
247 tree rtl_expr_chain;
248
249 /* Label to jump back to for tail recursion, or 0 if we have
250 not yet needed one for this function. */
251 rtx tail_recursion_label;
252
253 /* Place after which to insert the tail_recursion_label if we need one. */
254 rtx tail_recursion_reentry;
255
256 /* Location at which to save the argument pointer if it will need to be
257 referenced. There are two cases where this is done: if nonlocal gotos
258 exist, or if vars stored at an offset from the argument pointer will be
259 needed by inner routines. */
260
261 rtx arg_pointer_save_area;
262
263 /* Offset to end of allocated area of stack frame.
264 If stack grows down, this is the address of the last stack slot allocated.
265 If stack grows up, this is the address for the next slot. */
266 int frame_offset;
267
268 /* List (chain of TREE_LISTs) of static chains for containing functions.
269 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
270 in an RTL_EXPR in the TREE_VALUE. */
271 static tree context_display;
272
273 /* List (chain of TREE_LISTs) of trampolines for nested functions.
274 The trampoline sets up the static chain and jumps to the function.
275 We supply the trampoline's address when the function's address is requested.
276
277 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
278 in an RTL_EXPR in the TREE_VALUE. */
279 static tree trampoline_list;
280
281 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
282 static rtx parm_birth_insn;
283
284 #if 0
285 /* Nonzero if a stack slot has been generated whose address is not
286 actually valid. It means that the generated rtl must all be scanned
287 to detect and correct the invalid addresses where they occur. */
288 static int invalid_stack_slot;
289 #endif
290
291 /* Last insn of those whose job was to put parms into their nominal homes. */
292 static rtx last_parm_insn;
293
294 /* 1 + last pseudo register number used for loading a copy
295 of a parameter of this function. */
296 static int max_parm_reg;
297
298 /* Vector indexed by REGNO, containing location on stack in which
299 to put the parm which is nominally in pseudo register REGNO,
300 if we discover that that parm must go in the stack. */
301 static rtx *parm_reg_stack_loc;
302
303 #if 0 /* Turned off because 0 seems to work just as well. */
304 /* Cleanup lists are required for binding levels regardless of whether
305 that binding level has cleanups or not. This node serves as the
306 cleanup list whenever an empty list is required. */
307 static tree empty_cleanup_list;
308 #endif
309
310 /* Nonzero once virtual register instantiation has been done.
311 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
312 static int virtuals_instantiated;
313
314 /* These variables hold pointers to functions to
315 save and restore machine-specific data,
316 in push_function_context and pop_function_context. */
317 void (*save_machine_status) ();
318 void (*restore_machine_status) ();
319
320 /* Nonzero if we need to distinguish between the return value of this function
321 and the return value of a function called by this function. This helps
322 integrate.c */
323
324 extern int rtx_equal_function_value_matters;
325 extern tree sequence_rtl_expr;
326 extern tree bc_runtime_type_code ();
327 extern rtx bc_build_calldesc ();
328 extern char *bc_emit_trampoline ();
329 extern char *bc_end_function ();
330 \f
331 /* In order to evaluate some expressions, such as function calls returning
332 structures in memory, we need to temporarily allocate stack locations.
333 We record each allocated temporary in the following structure.
334
335 Associated with each temporary slot is a nesting level. When we pop up
336 one level, all temporaries associated with the previous level are freed.
337 Normally, all temporaries are freed after the execution of the statement
338 in which they were created. However, if we are inside a ({...}) grouping,
339 the result may be in a temporary and hence must be preserved. If the
340 result could be in a temporary, we preserve it if we can determine which
341 one it is in. If we cannot determine which temporary may contain the
342 result, all temporaries are preserved. A temporary is preserved by
343 pretending it was allocated at the previous nesting level.
344
345 Automatic variables are also assigned temporary slots, at the nesting
346 level where they are defined. They are marked a "kept" so that
347 free_temp_slots will not free them. */
348
349 struct temp_slot
350 {
351 /* Points to next temporary slot. */
352 struct temp_slot *next;
353 /* The rtx to used to reference the slot. */
354 rtx slot;
355 /* The rtx used to represent the address if not the address of the
356 slot above. May be an EXPR_LIST if multiple addresses exist. */
357 rtx address;
358 /* The size, in units, of the slot. */
359 int size;
360 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
361 tree rtl_expr;
362 /* Non-zero if this temporary is currently in use. */
363 char in_use;
364 /* Non-zero if this temporary has its address taken. */
365 char addr_taken;
366 /* Nesting level at which this slot is being used. */
367 int level;
368 /* Non-zero if this should survive a call to free_temp_slots. */
369 int keep;
370 /* The offset of the slot from the frame_pointer, including extra space
371 for alignment. This info is for combine_temp_slots. */
372 int base_offset;
373 /* The size of the slot, including extra space for alignment. This
374 info is for combine_temp_slots. */
375 int full_size;
376 };
377
378 /* List of all temporaries allocated, both available and in use. */
379
380 struct temp_slot *temp_slots;
381
382 /* Current nesting level for temporaries. */
383
384 int temp_slot_level;
385 \f
386 /* The FUNCTION_DECL node for the current function. */
387 static tree this_function_decl;
388
389 /* Callinfo pointer for the current function. */
390 static rtx this_function_callinfo;
391
392 /* The label in the bytecode file of this function's actual bytecode.
393 Not an rtx. */
394 static char *this_function_bytecode;
395
396 /* The call description vector for the current function. */
397 static rtx this_function_calldesc;
398
399 /* Size of the local variables allocated for the current function. */
400 int local_vars_size;
401
402 /* Current depth of the bytecode evaluation stack. */
403 int stack_depth;
404
405 /* Maximum depth of the evaluation stack in this function. */
406 int max_stack_depth;
407
408 /* Current depth in statement expressions. */
409 static int stmt_expr_depth;
410
411 /* This structure is used to record MEMs or pseudos used to replace VAR, any
412 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
413 maintain this list in case two operands of an insn were required to match;
414 in that case we must ensure we use the same replacement. */
415
416 struct fixup_replacement
417 {
418 rtx old;
419 rtx new;
420 struct fixup_replacement *next;
421 };
422
423 /* Forward declarations. */
424
425 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
426 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
427 enum machine_mode, enum machine_mode,
428 int));
429 static void fixup_var_refs PROTO((rtx, enum machine_mode, int));
430 static struct fixup_replacement
431 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
432 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
433 rtx, int));
434 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
435 struct fixup_replacement **));
436 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
437 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
438 static rtx fixup_stack_1 PROTO((rtx, rtx));
439 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
440 static void instantiate_decls PROTO((tree, int));
441 static void instantiate_decls_1 PROTO((tree, int));
442 static void instantiate_decl PROTO((rtx, int, int));
443 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
444 static void delete_handlers PROTO((void));
445 static void pad_to_arg_alignment PROTO((struct args_size *, int));
446 static void pad_below PROTO((struct args_size *, enum machine_mode,
447 tree));
448 static tree round_down PROTO((tree, int));
449 static rtx round_trampoline_addr PROTO((rtx));
450 static tree blocks_nreverse PROTO((tree));
451 static int all_blocks PROTO((tree, tree *));
452 static int *record_insns PROTO((rtx));
453 static int contains PROTO((rtx, int *));
454 \f
455 /* Pointer to chain of `struct function' for containing functions. */
456 struct function *outer_function_chain;
457
458 /* Given a function decl for a containing function,
459 return the `struct function' for it. */
460
461 struct function *
462 find_function_data (decl)
463 tree decl;
464 {
465 struct function *p;
466 for (p = outer_function_chain; p; p = p->next)
467 if (p->decl == decl)
468 return p;
469 abort ();
470 }
471
472 /* Save the current context for compilation of a nested function.
473 This is called from language-specific code.
474 The caller is responsible for saving any language-specific status,
475 since this function knows only about language-independent variables. */
476
477 void
478 push_function_context_to (context)
479 tree context;
480 {
481 struct function *p = (struct function *) xmalloc (sizeof (struct function));
482
483 p->next = outer_function_chain;
484 outer_function_chain = p;
485
486 p->name = current_function_name;
487 p->decl = current_function_decl;
488 p->pops_args = current_function_pops_args;
489 p->returns_struct = current_function_returns_struct;
490 p->returns_pcc_struct = current_function_returns_pcc_struct;
491 p->needs_context = current_function_needs_context;
492 p->calls_setjmp = current_function_calls_setjmp;
493 p->calls_longjmp = current_function_calls_longjmp;
494 p->calls_alloca = current_function_calls_alloca;
495 p->has_nonlocal_label = current_function_has_nonlocal_label;
496 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
497 p->contains_functions = current_function_contains_functions;
498 p->args_size = current_function_args_size;
499 p->pretend_args_size = current_function_pretend_args_size;
500 p->arg_offset_rtx = current_function_arg_offset_rtx;
501 p->varargs = current_function_varargs;
502 p->stdarg = current_function_stdarg;
503 p->uses_const_pool = current_function_uses_const_pool;
504 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
505 p->internal_arg_pointer = current_function_internal_arg_pointer;
506 p->max_parm_reg = max_parm_reg;
507 p->parm_reg_stack_loc = parm_reg_stack_loc;
508 p->outgoing_args_size = current_function_outgoing_args_size;
509 p->return_rtx = current_function_return_rtx;
510 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
511 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
512 p->nonlocal_labels = nonlocal_labels;
513 p->cleanup_label = cleanup_label;
514 p->return_label = return_label;
515 p->save_expr_regs = save_expr_regs;
516 p->stack_slot_list = stack_slot_list;
517 p->parm_birth_insn = parm_birth_insn;
518 p->frame_offset = frame_offset;
519 p->tail_recursion_label = tail_recursion_label;
520 p->tail_recursion_reentry = tail_recursion_reentry;
521 p->arg_pointer_save_area = arg_pointer_save_area;
522 p->rtl_expr_chain = rtl_expr_chain;
523 p->last_parm_insn = last_parm_insn;
524 p->context_display = context_display;
525 p->trampoline_list = trampoline_list;
526 p->function_call_count = function_call_count;
527 p->temp_slots = temp_slots;
528 p->temp_slot_level = temp_slot_level;
529 p->fixup_var_refs_queue = 0;
530 p->epilogue_delay_list = current_function_epilogue_delay_list;
531
532 save_tree_status (p, context);
533 save_storage_status (p);
534 save_emit_status (p);
535 init_emit ();
536 save_expr_status (p);
537 save_stmt_status (p);
538 save_varasm_status (p);
539
540 if (save_machine_status)
541 (*save_machine_status) (p);
542 }
543
544 void
545 push_function_context ()
546 {
547 push_function_context_to (current_function_decl);
548 }
549
550 /* Restore the last saved context, at the end of a nested function.
551 This function is called from language-specific code. */
552
553 void
554 pop_function_context_from (context)
555 tree context;
556 {
557 struct function *p = outer_function_chain;
558
559 outer_function_chain = p->next;
560
561 current_function_contains_functions
562 = p->contains_functions || p->inline_obstacks
563 || context == current_function_decl;
564 current_function_name = p->name;
565 current_function_decl = p->decl;
566 current_function_pops_args = p->pops_args;
567 current_function_returns_struct = p->returns_struct;
568 current_function_returns_pcc_struct = p->returns_pcc_struct;
569 current_function_needs_context = p->needs_context;
570 current_function_calls_setjmp = p->calls_setjmp;
571 current_function_calls_longjmp = p->calls_longjmp;
572 current_function_calls_alloca = p->calls_alloca;
573 current_function_has_nonlocal_label = p->has_nonlocal_label;
574 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
575 current_function_args_size = p->args_size;
576 current_function_pretend_args_size = p->pretend_args_size;
577 current_function_arg_offset_rtx = p->arg_offset_rtx;
578 current_function_varargs = p->varargs;
579 current_function_stdarg = p->stdarg;
580 current_function_uses_const_pool = p->uses_const_pool;
581 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
582 current_function_internal_arg_pointer = p->internal_arg_pointer;
583 max_parm_reg = p->max_parm_reg;
584 parm_reg_stack_loc = p->parm_reg_stack_loc;
585 current_function_outgoing_args_size = p->outgoing_args_size;
586 current_function_return_rtx = p->return_rtx;
587 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
588 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
589 nonlocal_labels = p->nonlocal_labels;
590 cleanup_label = p->cleanup_label;
591 return_label = p->return_label;
592 save_expr_regs = p->save_expr_regs;
593 stack_slot_list = p->stack_slot_list;
594 parm_birth_insn = p->parm_birth_insn;
595 frame_offset = p->frame_offset;
596 tail_recursion_label = p->tail_recursion_label;
597 tail_recursion_reentry = p->tail_recursion_reentry;
598 arg_pointer_save_area = p->arg_pointer_save_area;
599 rtl_expr_chain = p->rtl_expr_chain;
600 last_parm_insn = p->last_parm_insn;
601 context_display = p->context_display;
602 trampoline_list = p->trampoline_list;
603 function_call_count = p->function_call_count;
604 temp_slots = p->temp_slots;
605 temp_slot_level = p->temp_slot_level;
606 current_function_epilogue_delay_list = p->epilogue_delay_list;
607 reg_renumber = 0;
608
609 restore_tree_status (p);
610 restore_storage_status (p);
611 restore_expr_status (p);
612 restore_emit_status (p);
613 restore_stmt_status (p);
614 restore_varasm_status (p);
615
616 if (restore_machine_status)
617 (*restore_machine_status) (p);
618
619 /* Finish doing put_var_into_stack for any of our variables
620 which became addressable during the nested function. */
621 {
622 struct var_refs_queue *queue = p->fixup_var_refs_queue;
623 for (; queue; queue = queue->next)
624 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
625 }
626
627 free (p);
628
629 /* Reset variables that have known state during rtx generation. */
630 rtx_equal_function_value_matters = 1;
631 virtuals_instantiated = 0;
632 }
633
634 void pop_function_context ()
635 {
636 pop_function_context_from (current_function_decl);
637 }
638 \f
639 /* Allocate fixed slots in the stack frame of the current function. */
640
641 /* Return size needed for stack frame based on slots so far allocated.
642 This size counts from zero. It is not rounded to STACK_BOUNDARY;
643 the caller may have to do that. */
644
645 int
646 get_frame_size ()
647 {
648 #ifdef FRAME_GROWS_DOWNWARD
649 return -frame_offset;
650 #else
651 return frame_offset;
652 #endif
653 }
654
655 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
656 with machine mode MODE.
657
658 ALIGN controls the amount of alignment for the address of the slot:
659 0 means according to MODE,
660 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
661 positive specifies alignment boundary in bits.
662
663 We do not round to stack_boundary here. */
664
665 rtx
666 assign_stack_local (mode, size, align)
667 enum machine_mode mode;
668 int size;
669 int align;
670 {
671 register rtx x, addr;
672 int bigend_correction = 0;
673 int alignment;
674
675 if (align == 0)
676 {
677 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
678 if (mode == BLKmode)
679 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
680 }
681 else if (align == -1)
682 {
683 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
684 size = CEIL_ROUND (size, alignment);
685 }
686 else
687 alignment = align / BITS_PER_UNIT;
688
689 /* Round frame offset to that alignment.
690 We must be careful here, since FRAME_OFFSET might be negative and
691 division with a negative dividend isn't as well defined as we might
692 like. So we instead assume that ALIGNMENT is a power of two and
693 use logical operations which are unambiguous. */
694 #ifdef FRAME_GROWS_DOWNWARD
695 frame_offset = FLOOR_ROUND (frame_offset, alignment);
696 #else
697 frame_offset = CEIL_ROUND (frame_offset, alignment);
698 #endif
699
700 /* On a big-endian machine, if we are allocating more space than we will use,
701 use the least significant bytes of those that are allocated. */
702 if (BYTES_BIG_ENDIAN && mode != BLKmode)
703 bigend_correction = size - GET_MODE_SIZE (mode);
704
705 #ifdef FRAME_GROWS_DOWNWARD
706 frame_offset -= size;
707 #endif
708
709 /* If we have already instantiated virtual registers, return the actual
710 address relative to the frame pointer. */
711 if (virtuals_instantiated)
712 addr = plus_constant (frame_pointer_rtx,
713 (frame_offset + bigend_correction
714 + STARTING_FRAME_OFFSET));
715 else
716 addr = plus_constant (virtual_stack_vars_rtx,
717 frame_offset + bigend_correction);
718
719 #ifndef FRAME_GROWS_DOWNWARD
720 frame_offset += size;
721 #endif
722
723 x = gen_rtx (MEM, mode, addr);
724
725 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list);
726
727 return x;
728 }
729
730 /* Assign a stack slot in a containing function.
731 First three arguments are same as in preceding function.
732 The last argument specifies the function to allocate in. */
733
734 rtx
735 assign_outer_stack_local (mode, size, align, function)
736 enum machine_mode mode;
737 int size;
738 int align;
739 struct function *function;
740 {
741 register rtx x, addr;
742 int bigend_correction = 0;
743 int alignment;
744
745 /* Allocate in the memory associated with the function in whose frame
746 we are assigning. */
747 push_obstacks (function->function_obstack,
748 function->function_maybepermanent_obstack);
749
750 if (align == 0)
751 {
752 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
753 if (mode == BLKmode)
754 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
755 }
756 else if (align == -1)
757 {
758 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
759 size = CEIL_ROUND (size, alignment);
760 }
761 else
762 alignment = align / BITS_PER_UNIT;
763
764 /* Round frame offset to that alignment. */
765 #ifdef FRAME_GROWS_DOWNWARD
766 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
767 #else
768 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
769 #endif
770
771 /* On a big-endian machine, if we are allocating more space than we will use,
772 use the least significant bytes of those that are allocated. */
773 if (BYTES_BIG_ENDIAN && mode != BLKmode)
774 bigend_correction = size - GET_MODE_SIZE (mode);
775
776 #ifdef FRAME_GROWS_DOWNWARD
777 function->frame_offset -= size;
778 #endif
779 addr = plus_constant (virtual_stack_vars_rtx,
780 function->frame_offset + bigend_correction);
781 #ifndef FRAME_GROWS_DOWNWARD
782 function->frame_offset += size;
783 #endif
784
785 x = gen_rtx (MEM, mode, addr);
786
787 function->stack_slot_list
788 = gen_rtx (EXPR_LIST, VOIDmode, x, function->stack_slot_list);
789
790 pop_obstacks ();
791
792 return x;
793 }
794 \f
795 /* Allocate a temporary stack slot and record it for possible later
796 reuse.
797
798 MODE is the machine mode to be given to the returned rtx.
799
800 SIZE is the size in units of the space required. We do no rounding here
801 since assign_stack_local will do any required rounding.
802
803 KEEP is 1 if this slot is to be retained after a call to
804 free_temp_slots. Automatic variables for a block are allocated
805 with this flag. KEEP is 2, if we allocate a longer term temporary,
806 whose lifetime is controlled by CLEANUP_POINT_EXPRs. */
807
808 rtx
809 assign_stack_temp (mode, size, keep)
810 enum machine_mode mode;
811 int size;
812 int keep;
813 {
814 struct temp_slot *p, *best_p = 0;
815
816 /* If SIZE is -1 it means that somebody tried to allocate a temporary
817 of a variable size. */
818 if (size == -1)
819 abort ();
820
821 /* First try to find an available, already-allocated temporary that is the
822 exact size we require. */
823 for (p = temp_slots; p; p = p->next)
824 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
825 break;
826
827 /* If we didn't find, one, try one that is larger than what we want. We
828 find the smallest such. */
829 if (p == 0)
830 for (p = temp_slots; p; p = p->next)
831 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
832 && (best_p == 0 || best_p->size > p->size))
833 best_p = p;
834
835 /* Make our best, if any, the one to use. */
836 if (best_p)
837 {
838 /* If there are enough aligned bytes left over, make them into a new
839 temp_slot so that the extra bytes don't get wasted. Do this only
840 for BLKmode slots, so that we can be sure of the alignment. */
841 if (GET_MODE (best_p->slot) == BLKmode)
842 {
843 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
844 int rounded_size = CEIL_ROUND (size, alignment);
845
846 if (best_p->size - rounded_size >= alignment)
847 {
848 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
849 p->in_use = p->addr_taken = 0;
850 p->size = best_p->size - rounded_size;
851 p->base_offset = best_p->base_offset + rounded_size;
852 p->full_size = best_p->full_size - rounded_size;
853 p->slot = gen_rtx (MEM, BLKmode,
854 plus_constant (XEXP (best_p->slot, 0),
855 rounded_size));
856 p->address = 0;
857 p->rtl_expr = 0;
858 p->next = temp_slots;
859 temp_slots = p;
860
861 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, p->slot,
862 stack_slot_list);
863
864 best_p->size = rounded_size;
865 best_p->full_size = rounded_size;
866 }
867 }
868
869 p = best_p;
870 }
871
872 /* If we still didn't find one, make a new temporary. */
873 if (p == 0)
874 {
875 int frame_offset_old = frame_offset;
876 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
877 /* If the temp slot mode doesn't indicate the alignment,
878 use the largest possible, so no one will be disappointed. */
879 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
880 /* The following slot size computation is necessary because we don't
881 know the actual size of the temporary slot until assign_stack_local
882 has performed all the frame alignment and size rounding for the
883 requested temporary. Note that extra space added for alignment
884 can be either above or below this stack slot depending on which
885 way the frame grows. We include the extra space if and only if it
886 is above this slot. */
887 #ifdef FRAME_GROWS_DOWNWARD
888 p->size = frame_offset_old - frame_offset;
889 #else
890 p->size = size;
891 #endif
892 /* Now define the fields used by combine_temp_slots. */
893 #ifdef FRAME_GROWS_DOWNWARD
894 p->base_offset = frame_offset;
895 p->full_size = frame_offset_old - frame_offset;
896 #else
897 p->base_offset = frame_offset_old;
898 p->full_size = frame_offset - frame_offset_old;
899 #endif
900 p->address = 0;
901 p->next = temp_slots;
902 temp_slots = p;
903 }
904
905 p->in_use = 1;
906 p->addr_taken = 0;
907 p->rtl_expr = sequence_rtl_expr;
908
909 if (keep == 2)
910 {
911 p->level = target_temp_slot_level;
912 p->keep = 0;
913 }
914 else
915 {
916 p->level = temp_slot_level;
917 p->keep = keep;
918 }
919 return p->slot;
920 }
921 \f
922 /* Assign a temporary of given TYPE.
923 KEEP is as for assign_stack_temp.
924 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
925 it is 0 if a register is OK.
926 DONT_PROMOTE is 1 if we should not promote values in register
927 to wider modes. */
928
929 rtx
930 assign_temp (type, keep, memory_required, dont_promote)
931 tree type;
932 int keep;
933 int memory_required;
934 int dont_promote;
935 {
936 enum machine_mode mode = TYPE_MODE (type);
937 int unsignedp = TREE_UNSIGNED (type);
938
939 if (mode == BLKmode || memory_required)
940 {
941 int size = int_size_in_bytes (type);
942 rtx tmp;
943
944 /* Unfortunately, we don't yet know how to allocate variable-sized
945 temporaries. However, sometimes we have a fixed upper limit on
946 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
947 instead. This is the case for Chill variable-sized strings. */
948 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
949 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
950 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
951 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
952
953 tmp = assign_stack_temp (mode, size, keep);
954 MEM_IN_STRUCT_P (tmp) = AGGREGATE_TYPE_P (type);
955 return tmp;
956 }
957
958 #ifndef PROMOTE_FOR_CALL_ONLY
959 if (! dont_promote)
960 mode = promote_mode (type, mode, &unsignedp, 0);
961 #endif
962
963 return gen_reg_rtx (mode);
964 }
965 \f
966 /* Combine temporary stack slots which are adjacent on the stack.
967
968 This allows for better use of already allocated stack space. This is only
969 done for BLKmode slots because we can be sure that we won't have alignment
970 problems in this case. */
971
972 void
973 combine_temp_slots ()
974 {
975 struct temp_slot *p, *q;
976 struct temp_slot *prev_p, *prev_q;
977 /* Determine where to free back to after this function. */
978 rtx free_pointer = rtx_alloc (CONST_INT);
979
980 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
981 {
982 int delete_p = 0;
983 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
984 for (q = p->next, prev_q = p; q; q = prev_q->next)
985 {
986 int delete_q = 0;
987 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
988 {
989 if (p->base_offset + p->full_size == q->base_offset)
990 {
991 /* Q comes after P; combine Q into P. */
992 p->size += q->size;
993 p->full_size += q->full_size;
994 delete_q = 1;
995 }
996 else if (q->base_offset + q->full_size == p->base_offset)
997 {
998 /* P comes after Q; combine P into Q. */
999 q->size += p->size;
1000 q->full_size += p->full_size;
1001 delete_p = 1;
1002 break;
1003 }
1004 }
1005 /* Either delete Q or advance past it. */
1006 if (delete_q)
1007 prev_q->next = q->next;
1008 else
1009 prev_q = q;
1010 }
1011 /* Either delete P or advance past it. */
1012 if (delete_p)
1013 {
1014 if (prev_p)
1015 prev_p->next = p->next;
1016 else
1017 temp_slots = p->next;
1018 }
1019 else
1020 prev_p = p;
1021 }
1022
1023 /* Free all the RTL made by plus_constant. */
1024 rtx_free (free_pointer);
1025 }
1026 \f
1027 /* Find the temp slot corresponding to the object at address X. */
1028
1029 static struct temp_slot *
1030 find_temp_slot_from_address (x)
1031 rtx x;
1032 {
1033 struct temp_slot *p;
1034 rtx next;
1035
1036 for (p = temp_slots; p; p = p->next)
1037 {
1038 if (! p->in_use)
1039 continue;
1040 else if (XEXP (p->slot, 0) == x
1041 || p->address == x)
1042 return p;
1043
1044 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1045 for (next = p->address; next; next = XEXP (next, 1))
1046 if (XEXP (next, 0) == x)
1047 return p;
1048 }
1049
1050 return 0;
1051 }
1052
1053 /* Indicate that NEW is an alternate way of referring to the temp slot
1054 that previous was known by OLD. */
1055
1056 void
1057 update_temp_slot_address (old, new)
1058 rtx old, new;
1059 {
1060 struct temp_slot *p = find_temp_slot_from_address (old);
1061
1062 /* If none, return. Else add NEW as an alias. */
1063 if (p == 0)
1064 return;
1065 else if (p->address == 0)
1066 p->address = new;
1067 else
1068 {
1069 if (GET_CODE (p->address) != EXPR_LIST)
1070 p->address = gen_rtx (EXPR_LIST, VOIDmode, p->address, NULL_RTX);
1071
1072 p->address = gen_rtx (EXPR_LIST, VOIDmode, new, p->address);
1073 }
1074 }
1075
1076 /* If X could be a reference to a temporary slot, mark the fact that its
1077 address was taken. */
1078
1079 void
1080 mark_temp_addr_taken (x)
1081 rtx x;
1082 {
1083 struct temp_slot *p;
1084
1085 if (x == 0)
1086 return;
1087
1088 /* If X is not in memory or is at a constant address, it cannot be in
1089 a temporary slot. */
1090 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1091 return;
1092
1093 p = find_temp_slot_from_address (XEXP (x, 0));
1094 if (p != 0)
1095 p->addr_taken = 1;
1096 }
1097
1098 /* If X could be a reference to a temporary slot, mark that slot as belonging
1099 to the to one level higher. If X matched one of our slots, just mark that
1100 one. Otherwise, we can't easily predict which it is, so upgrade all of
1101 them. Kept slots need not be touched.
1102
1103 This is called when an ({...}) construct occurs and a statement
1104 returns a value in memory. */
1105
1106 void
1107 preserve_temp_slots (x)
1108 rtx x;
1109 {
1110 struct temp_slot *p = 0;
1111
1112 /* If there is no result, we still might have some objects whose address
1113 were taken, so we need to make sure they stay around. */
1114 if (x == 0)
1115 {
1116 for (p = temp_slots; p; p = p->next)
1117 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1118 p->level--;
1119
1120 return;
1121 }
1122
1123 /* If X is a register that is being used as a pointer, see if we have
1124 a temporary slot we know it points to. To be consistent with
1125 the code below, we really should preserve all non-kept slots
1126 if we can't find a match, but that seems to be much too costly. */
1127 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1128 p = find_temp_slot_from_address (x);
1129
1130 /* If X is not in memory or is at a constant address, it cannot be in
1131 a temporary slot, but it can contain something whose address was
1132 taken. */
1133 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1134 {
1135 for (p = temp_slots; p; p = p->next)
1136 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1137 p->level--;
1138
1139 return;
1140 }
1141
1142 /* First see if we can find a match. */
1143 if (p == 0)
1144 p = find_temp_slot_from_address (XEXP (x, 0));
1145
1146 if (p != 0)
1147 {
1148 /* Move everything at our level whose address was taken to our new
1149 level in case we used its address. */
1150 struct temp_slot *q;
1151
1152 for (q = temp_slots; q; q = q->next)
1153 if (q != p && q->addr_taken && q->level == p->level)
1154 q->level--;
1155
1156 p->level--;
1157 p->addr_taken = 0;
1158 return;
1159 }
1160
1161 /* Otherwise, preserve all non-kept slots at this level. */
1162 for (p = temp_slots; p; p = p->next)
1163 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1164 p->level--;
1165 }
1166
1167 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1168 with that RTL_EXPR, promote it into a temporary slot at the present
1169 level so it will not be freed when we free slots made in the
1170 RTL_EXPR. */
1171
1172 void
1173 preserve_rtl_expr_result (x)
1174 rtx x;
1175 {
1176 struct temp_slot *p;
1177
1178 /* If X is not in memory or is at a constant address, it cannot be in
1179 a temporary slot. */
1180 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1181 return;
1182
1183 /* If we can find a match, move it to our level unless it is already at
1184 an upper level. */
1185 p = find_temp_slot_from_address (XEXP (x, 0));
1186 if (p != 0)
1187 {
1188 p->level = MIN (p->level, temp_slot_level);
1189 p->rtl_expr = 0;
1190 }
1191
1192 return;
1193 }
1194
1195 /* Free all temporaries used so far. This is normally called at the end
1196 of generating code for a statement. Don't free any temporaries
1197 currently in use for an RTL_EXPR that hasn't yet been emitted.
1198 We could eventually do better than this since it can be reused while
1199 generating the same RTL_EXPR, but this is complex and probably not
1200 worthwhile. */
1201
1202 void
1203 free_temp_slots ()
1204 {
1205 struct temp_slot *p;
1206
1207 for (p = temp_slots; p; p = p->next)
1208 if (p->in_use && p->level == temp_slot_level && ! p->keep
1209 && p->rtl_expr == 0)
1210 p->in_use = 0;
1211
1212 combine_temp_slots ();
1213 }
1214
1215 /* Free all temporary slots used in T, an RTL_EXPR node. */
1216
1217 void
1218 free_temps_for_rtl_expr (t)
1219 tree t;
1220 {
1221 struct temp_slot *p;
1222
1223 for (p = temp_slots; p; p = p->next)
1224 if (p->rtl_expr == t)
1225 p->in_use = 0;
1226
1227 combine_temp_slots ();
1228 }
1229
1230 /* Push deeper into the nesting level for stack temporaries. */
1231
1232 void
1233 push_temp_slots ()
1234 {
1235 temp_slot_level++;
1236 }
1237
1238 /* Pop a temporary nesting level. All slots in use in the current level
1239 are freed. */
1240
1241 void
1242 pop_temp_slots ()
1243 {
1244 struct temp_slot *p;
1245
1246 for (p = temp_slots; p; p = p->next)
1247 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1248 p->in_use = 0;
1249
1250 combine_temp_slots ();
1251
1252 temp_slot_level--;
1253 }
1254
1255 /* Initialize temporary slots. */
1256
1257 void
1258 init_temp_slots ()
1259 {
1260 /* We have not allocated any temporaries yet. */
1261 temp_slots = 0;
1262 temp_slot_level = 0;
1263 target_temp_slot_level = 0;
1264 }
1265 \f
1266 /* Retroactively move an auto variable from a register to a stack slot.
1267 This is done when an address-reference to the variable is seen. */
1268
1269 void
1270 put_var_into_stack (decl)
1271 tree decl;
1272 {
1273 register rtx reg;
1274 enum machine_mode promoted_mode, decl_mode;
1275 struct function *function = 0;
1276 tree context;
1277
1278 if (output_bytecode)
1279 return;
1280
1281 context = decl_function_context (decl);
1282
1283 /* Get the current rtl used for this object and it's original mode. */
1284 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1285
1286 /* No need to do anything if decl has no rtx yet
1287 since in that case caller is setting TREE_ADDRESSABLE
1288 and a stack slot will be assigned when the rtl is made. */
1289 if (reg == 0)
1290 return;
1291
1292 /* Get the declared mode for this object. */
1293 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1294 : DECL_MODE (decl));
1295 /* Get the mode it's actually stored in. */
1296 promoted_mode = GET_MODE (reg);
1297
1298 /* If this variable comes from an outer function,
1299 find that function's saved context. */
1300 if (context != current_function_decl)
1301 for (function = outer_function_chain; function; function = function->next)
1302 if (function->decl == context)
1303 break;
1304
1305 /* If this is a variable-size object with a pseudo to address it,
1306 put that pseudo into the stack, if the var is nonlocal. */
1307 if (DECL_NONLOCAL (decl)
1308 && GET_CODE (reg) == MEM
1309 && GET_CODE (XEXP (reg, 0)) == REG
1310 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1311 {
1312 reg = XEXP (reg, 0);
1313 decl_mode = promoted_mode = GET_MODE (reg);
1314 }
1315
1316 /* Now we should have a value that resides in one or more pseudo regs. */
1317
1318 if (GET_CODE (reg) == REG)
1319 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1320 promoted_mode, decl_mode, TREE_SIDE_EFFECTS (decl));
1321 else if (GET_CODE (reg) == CONCAT)
1322 {
1323 /* A CONCAT contains two pseudos; put them both in the stack.
1324 We do it so they end up consecutive. */
1325 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1326 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1327 #ifdef FRAME_GROWS_DOWNWARD
1328 /* Since part 0 should have a lower address, do it second. */
1329 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1330 part_mode, TREE_SIDE_EFFECTS (decl));
1331 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1332 part_mode, TREE_SIDE_EFFECTS (decl));
1333 #else
1334 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1335 part_mode, TREE_SIDE_EFFECTS (decl));
1336 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1337 part_mode, TREE_SIDE_EFFECTS (decl));
1338 #endif
1339
1340 /* Change the CONCAT into a combined MEM for both parts. */
1341 PUT_CODE (reg, MEM);
1342 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1343
1344 /* The two parts are in memory order already.
1345 Use the lower parts address as ours. */
1346 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1347 /* Prevent sharing of rtl that might lose. */
1348 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1349 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1350 }
1351 }
1352
1353 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1354 into the stack frame of FUNCTION (0 means the current function).
1355 DECL_MODE is the machine mode of the user-level data type.
1356 PROMOTED_MODE is the machine mode of the register.
1357 VOLATILE_P is nonzero if this is for a "volatile" decl. */
1358
1359 static void
1360 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p)
1361 struct function *function;
1362 rtx reg;
1363 tree type;
1364 enum machine_mode promoted_mode, decl_mode;
1365 int volatile_p;
1366 {
1367 rtx new = 0;
1368
1369 if (function)
1370 {
1371 if (REGNO (reg) < function->max_parm_reg)
1372 new = function->parm_reg_stack_loc[REGNO (reg)];
1373 if (new == 0)
1374 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1375 0, function);
1376 }
1377 else
1378 {
1379 if (REGNO (reg) < max_parm_reg)
1380 new = parm_reg_stack_loc[REGNO (reg)];
1381 if (new == 0)
1382 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1383 }
1384
1385 PUT_MODE (reg, decl_mode);
1386 XEXP (reg, 0) = XEXP (new, 0);
1387 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1388 MEM_VOLATILE_P (reg) = volatile_p;
1389 PUT_CODE (reg, MEM);
1390
1391 /* If this is a memory ref that contains aggregate components,
1392 mark it as such for cse and loop optimize. */
1393 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type);
1394
1395 /* Now make sure that all refs to the variable, previously made
1396 when it was a register, are fixed up to be valid again. */
1397 if (function)
1398 {
1399 struct var_refs_queue *temp;
1400
1401 /* Variable is inherited; fix it up when we get back to its function. */
1402 push_obstacks (function->function_obstack,
1403 function->function_maybepermanent_obstack);
1404
1405 /* See comment in restore_tree_status in tree.c for why this needs to be
1406 on saveable obstack. */
1407 temp
1408 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1409 temp->modified = reg;
1410 temp->promoted_mode = promoted_mode;
1411 temp->unsignedp = TREE_UNSIGNED (type);
1412 temp->next = function->fixup_var_refs_queue;
1413 function->fixup_var_refs_queue = temp;
1414 pop_obstacks ();
1415 }
1416 else
1417 /* Variable is local; fix it up now. */
1418 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
1419 }
1420 \f
1421 static void
1422 fixup_var_refs (var, promoted_mode, unsignedp)
1423 rtx var;
1424 enum machine_mode promoted_mode;
1425 int unsignedp;
1426 {
1427 tree pending;
1428 rtx first_insn = get_insns ();
1429 struct sequence_stack *stack = sequence_stack;
1430 tree rtl_exps = rtl_expr_chain;
1431
1432 /* Must scan all insns for stack-refs that exceed the limit. */
1433 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
1434
1435 /* Scan all pending sequences too. */
1436 for (; stack; stack = stack->next)
1437 {
1438 push_to_sequence (stack->first);
1439 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1440 stack->first, stack->next != 0);
1441 /* Update remembered end of sequence
1442 in case we added an insn at the end. */
1443 stack->last = get_last_insn ();
1444 end_sequence ();
1445 }
1446
1447 /* Scan all waiting RTL_EXPRs too. */
1448 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1449 {
1450 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1451 if (seq != const0_rtx && seq != 0)
1452 {
1453 push_to_sequence (seq);
1454 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
1455 end_sequence ();
1456 }
1457 }
1458 }
1459 \f
1460 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1461 some part of an insn. Return a struct fixup_replacement whose OLD
1462 value is equal to X. Allocate a new structure if no such entry exists. */
1463
1464 static struct fixup_replacement *
1465 find_fixup_replacement (replacements, x)
1466 struct fixup_replacement **replacements;
1467 rtx x;
1468 {
1469 struct fixup_replacement *p;
1470
1471 /* See if we have already replaced this. */
1472 for (p = *replacements; p && p->old != x; p = p->next)
1473 ;
1474
1475 if (p == 0)
1476 {
1477 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1478 p->old = x;
1479 p->new = 0;
1480 p->next = *replacements;
1481 *replacements = p;
1482 }
1483
1484 return p;
1485 }
1486
1487 /* Scan the insn-chain starting with INSN for refs to VAR
1488 and fix them up. TOPLEVEL is nonzero if this chain is the
1489 main chain of insns for the current function. */
1490
1491 static void
1492 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
1493 rtx var;
1494 enum machine_mode promoted_mode;
1495 int unsignedp;
1496 rtx insn;
1497 int toplevel;
1498 {
1499 rtx call_dest = 0;
1500
1501 while (insn)
1502 {
1503 rtx next = NEXT_INSN (insn);
1504 rtx note;
1505 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1506 {
1507 /* If this is a CLOBBER of VAR, delete it.
1508
1509 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1510 and REG_RETVAL notes too. */
1511 if (GET_CODE (PATTERN (insn)) == CLOBBER
1512 && XEXP (PATTERN (insn), 0) == var)
1513 {
1514 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1515 /* The REG_LIBCALL note will go away since we are going to
1516 turn INSN into a NOTE, so just delete the
1517 corresponding REG_RETVAL note. */
1518 remove_note (XEXP (note, 0),
1519 find_reg_note (XEXP (note, 0), REG_RETVAL,
1520 NULL_RTX));
1521
1522 /* In unoptimized compilation, we shouldn't call delete_insn
1523 except in jump.c doing warnings. */
1524 PUT_CODE (insn, NOTE);
1525 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1526 NOTE_SOURCE_FILE (insn) = 0;
1527 }
1528
1529 /* The insn to load VAR from a home in the arglist
1530 is now a no-op. When we see it, just delete it. */
1531 else if (toplevel
1532 && GET_CODE (PATTERN (insn)) == SET
1533 && SET_DEST (PATTERN (insn)) == var
1534 /* If this represents the result of an insn group,
1535 don't delete the insn. */
1536 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1537 && rtx_equal_p (SET_SRC (PATTERN (insn)), var))
1538 {
1539 /* In unoptimized compilation, we shouldn't call delete_insn
1540 except in jump.c doing warnings. */
1541 PUT_CODE (insn, NOTE);
1542 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1543 NOTE_SOURCE_FILE (insn) = 0;
1544 if (insn == last_parm_insn)
1545 last_parm_insn = PREV_INSN (next);
1546 }
1547 else
1548 {
1549 struct fixup_replacement *replacements = 0;
1550 rtx next_insn = NEXT_INSN (insn);
1551
1552 #ifdef SMALL_REGISTER_CLASSES
1553 /* If the insn that copies the results of a CALL_INSN
1554 into a pseudo now references VAR, we have to use an
1555 intermediate pseudo since we want the life of the
1556 return value register to be only a single insn.
1557
1558 If we don't use an intermediate pseudo, such things as
1559 address computations to make the address of VAR valid
1560 if it is not can be placed between the CALL_INSN and INSN.
1561
1562 To make sure this doesn't happen, we record the destination
1563 of the CALL_INSN and see if the next insn uses both that
1564 and VAR. */
1565
1566 if (call_dest != 0 && GET_CODE (insn) == INSN
1567 && reg_mentioned_p (var, PATTERN (insn))
1568 && reg_mentioned_p (call_dest, PATTERN (insn)))
1569 {
1570 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1571
1572 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1573
1574 PATTERN (insn) = replace_rtx (PATTERN (insn),
1575 call_dest, temp);
1576 }
1577
1578 if (GET_CODE (insn) == CALL_INSN
1579 && GET_CODE (PATTERN (insn)) == SET)
1580 call_dest = SET_DEST (PATTERN (insn));
1581 else if (GET_CODE (insn) == CALL_INSN
1582 && GET_CODE (PATTERN (insn)) == PARALLEL
1583 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1584 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1585 else
1586 call_dest = 0;
1587 #endif
1588
1589 /* See if we have to do anything to INSN now that VAR is in
1590 memory. If it needs to be loaded into a pseudo, use a single
1591 pseudo for the entire insn in case there is a MATCH_DUP
1592 between two operands. We pass a pointer to the head of
1593 a list of struct fixup_replacements. If fixup_var_refs_1
1594 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1595 it will record them in this list.
1596
1597 If it allocated a pseudo for any replacement, we copy into
1598 it here. */
1599
1600 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1601 &replacements);
1602
1603 /* If this is last_parm_insn, and any instructions were output
1604 after it to fix it up, then we must set last_parm_insn to
1605 the last such instruction emitted. */
1606 if (insn == last_parm_insn)
1607 last_parm_insn = PREV_INSN (next_insn);
1608
1609 while (replacements)
1610 {
1611 if (GET_CODE (replacements->new) == REG)
1612 {
1613 rtx insert_before;
1614 rtx seq;
1615
1616 /* OLD might be a (subreg (mem)). */
1617 if (GET_CODE (replacements->old) == SUBREG)
1618 replacements->old
1619 = fixup_memory_subreg (replacements->old, insn, 0);
1620 else
1621 replacements->old
1622 = fixup_stack_1 (replacements->old, insn);
1623
1624 insert_before = insn;
1625
1626 /* If we are changing the mode, do a conversion.
1627 This might be wasteful, but combine.c will
1628 eliminate much of the waste. */
1629
1630 if (GET_MODE (replacements->new)
1631 != GET_MODE (replacements->old))
1632 {
1633 start_sequence ();
1634 convert_move (replacements->new,
1635 replacements->old, unsignedp);
1636 seq = gen_sequence ();
1637 end_sequence ();
1638 }
1639 else
1640 seq = gen_move_insn (replacements->new,
1641 replacements->old);
1642
1643 emit_insn_before (seq, insert_before);
1644 }
1645
1646 replacements = replacements->next;
1647 }
1648 }
1649
1650 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1651 But don't touch other insns referred to by reg-notes;
1652 we will get them elsewhere. */
1653 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1654 if (GET_CODE (note) != INSN_LIST)
1655 XEXP (note, 0)
1656 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1657 }
1658 insn = next;
1659 }
1660 }
1661 \f
1662 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1663 See if the rtx expression at *LOC in INSN needs to be changed.
1664
1665 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1666 contain a list of original rtx's and replacements. If we find that we need
1667 to modify this insn by replacing a memory reference with a pseudo or by
1668 making a new MEM to implement a SUBREG, we consult that list to see if
1669 we have already chosen a replacement. If none has already been allocated,
1670 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1671 or the SUBREG, as appropriate, to the pseudo. */
1672
1673 static void
1674 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1675 register rtx var;
1676 enum machine_mode promoted_mode;
1677 register rtx *loc;
1678 rtx insn;
1679 struct fixup_replacement **replacements;
1680 {
1681 register int i;
1682 register rtx x = *loc;
1683 RTX_CODE code = GET_CODE (x);
1684 register char *fmt;
1685 register rtx tem, tem1;
1686 struct fixup_replacement *replacement;
1687
1688 switch (code)
1689 {
1690 case MEM:
1691 if (var == x)
1692 {
1693 /* If we already have a replacement, use it. Otherwise,
1694 try to fix up this address in case it is invalid. */
1695
1696 replacement = find_fixup_replacement (replacements, var);
1697 if (replacement->new)
1698 {
1699 *loc = replacement->new;
1700 return;
1701 }
1702
1703 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1704
1705 /* Unless we are forcing memory to register or we changed the mode,
1706 we can leave things the way they are if the insn is valid. */
1707
1708 INSN_CODE (insn) = -1;
1709 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1710 && recog_memoized (insn) >= 0)
1711 return;
1712
1713 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1714 return;
1715 }
1716
1717 /* If X contains VAR, we need to unshare it here so that we update
1718 each occurrence separately. But all identical MEMs in one insn
1719 must be replaced with the same rtx because of the possibility of
1720 MATCH_DUPs. */
1721
1722 if (reg_mentioned_p (var, x))
1723 {
1724 replacement = find_fixup_replacement (replacements, x);
1725 if (replacement->new == 0)
1726 replacement->new = copy_most_rtx (x, var);
1727
1728 *loc = x = replacement->new;
1729 }
1730 break;
1731
1732 case REG:
1733 case CC0:
1734 case PC:
1735 case CONST_INT:
1736 case CONST:
1737 case SYMBOL_REF:
1738 case LABEL_REF:
1739 case CONST_DOUBLE:
1740 return;
1741
1742 case SIGN_EXTRACT:
1743 case ZERO_EXTRACT:
1744 /* Note that in some cases those types of expressions are altered
1745 by optimize_bit_field, and do not survive to get here. */
1746 if (XEXP (x, 0) == var
1747 || (GET_CODE (XEXP (x, 0)) == SUBREG
1748 && SUBREG_REG (XEXP (x, 0)) == var))
1749 {
1750 /* Get TEM as a valid MEM in the mode presently in the insn.
1751
1752 We don't worry about the possibility of MATCH_DUP here; it
1753 is highly unlikely and would be tricky to handle. */
1754
1755 tem = XEXP (x, 0);
1756 if (GET_CODE (tem) == SUBREG)
1757 tem = fixup_memory_subreg (tem, insn, 1);
1758 tem = fixup_stack_1 (tem, insn);
1759
1760 /* Unless we want to load from memory, get TEM into the proper mode
1761 for an extract from memory. This can only be done if the
1762 extract is at a constant position and length. */
1763
1764 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1765 && GET_CODE (XEXP (x, 2)) == CONST_INT
1766 && ! mode_dependent_address_p (XEXP (tem, 0))
1767 && ! MEM_VOLATILE_P (tem))
1768 {
1769 enum machine_mode wanted_mode = VOIDmode;
1770 enum machine_mode is_mode = GET_MODE (tem);
1771 int width = INTVAL (XEXP (x, 1));
1772 int pos = INTVAL (XEXP (x, 2));
1773
1774 #ifdef HAVE_extzv
1775 if (GET_CODE (x) == ZERO_EXTRACT)
1776 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1777 #endif
1778 #ifdef HAVE_extv
1779 if (GET_CODE (x) == SIGN_EXTRACT)
1780 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1781 #endif
1782 /* If we have a narrower mode, we can do something. */
1783 if (wanted_mode != VOIDmode
1784 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1785 {
1786 int offset = pos / BITS_PER_UNIT;
1787 rtx old_pos = XEXP (x, 2);
1788 rtx newmem;
1789
1790 /* If the bytes and bits are counted differently, we
1791 must adjust the offset. */
1792 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1793 offset = (GET_MODE_SIZE (is_mode)
1794 - GET_MODE_SIZE (wanted_mode) - offset);
1795
1796 pos %= GET_MODE_BITSIZE (wanted_mode);
1797
1798 newmem = gen_rtx (MEM, wanted_mode,
1799 plus_constant (XEXP (tem, 0), offset));
1800 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1801 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1802 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1803
1804 /* Make the change and see if the insn remains valid. */
1805 INSN_CODE (insn) = -1;
1806 XEXP (x, 0) = newmem;
1807 XEXP (x, 2) = GEN_INT (pos);
1808
1809 if (recog_memoized (insn) >= 0)
1810 return;
1811
1812 /* Otherwise, restore old position. XEXP (x, 0) will be
1813 restored later. */
1814 XEXP (x, 2) = old_pos;
1815 }
1816 }
1817
1818 /* If we get here, the bitfield extract insn can't accept a memory
1819 reference. Copy the input into a register. */
1820
1821 tem1 = gen_reg_rtx (GET_MODE (tem));
1822 emit_insn_before (gen_move_insn (tem1, tem), insn);
1823 XEXP (x, 0) = tem1;
1824 return;
1825 }
1826 break;
1827
1828 case SUBREG:
1829 if (SUBREG_REG (x) == var)
1830 {
1831 /* If this is a special SUBREG made because VAR was promoted
1832 from a wider mode, replace it with VAR and call ourself
1833 recursively, this time saying that the object previously
1834 had its current mode (by virtue of the SUBREG). */
1835
1836 if (SUBREG_PROMOTED_VAR_P (x))
1837 {
1838 *loc = var;
1839 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
1840 return;
1841 }
1842
1843 /* If this SUBREG makes VAR wider, it has become a paradoxical
1844 SUBREG with VAR in memory, but these aren't allowed at this
1845 stage of the compilation. So load VAR into a pseudo and take
1846 a SUBREG of that pseudo. */
1847 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1848 {
1849 replacement = find_fixup_replacement (replacements, var);
1850 if (replacement->new == 0)
1851 replacement->new = gen_reg_rtx (GET_MODE (var));
1852 SUBREG_REG (x) = replacement->new;
1853 return;
1854 }
1855
1856 /* See if we have already found a replacement for this SUBREG.
1857 If so, use it. Otherwise, make a MEM and see if the insn
1858 is recognized. If not, or if we should force MEM into a register,
1859 make a pseudo for this SUBREG. */
1860 replacement = find_fixup_replacement (replacements, x);
1861 if (replacement->new)
1862 {
1863 *loc = replacement->new;
1864 return;
1865 }
1866
1867 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1868
1869 INSN_CODE (insn) = -1;
1870 if (! flag_force_mem && recog_memoized (insn) >= 0)
1871 return;
1872
1873 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1874 return;
1875 }
1876 break;
1877
1878 case SET:
1879 /* First do special simplification of bit-field references. */
1880 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1881 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1882 optimize_bit_field (x, insn, 0);
1883 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1884 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
1885 optimize_bit_field (x, insn, NULL_PTR);
1886
1887 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1888 insn into a pseudo and store the low part of the pseudo into VAR. */
1889 if (GET_CODE (SET_DEST (x)) == SUBREG
1890 && SUBREG_REG (SET_DEST (x)) == var
1891 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
1892 > GET_MODE_SIZE (GET_MODE (var))))
1893 {
1894 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
1895 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
1896 tem)),
1897 insn);
1898 break;
1899 }
1900
1901 {
1902 rtx dest = SET_DEST (x);
1903 rtx src = SET_SRC (x);
1904 rtx outerdest = dest;
1905
1906 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
1907 || GET_CODE (dest) == SIGN_EXTRACT
1908 || GET_CODE (dest) == ZERO_EXTRACT)
1909 dest = XEXP (dest, 0);
1910
1911 if (GET_CODE (src) == SUBREG)
1912 src = XEXP (src, 0);
1913
1914 /* If VAR does not appear at the top level of the SET
1915 just scan the lower levels of the tree. */
1916
1917 if (src != var && dest != var)
1918 break;
1919
1920 /* We will need to rerecognize this insn. */
1921 INSN_CODE (insn) = -1;
1922
1923 #ifdef HAVE_insv
1924 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
1925 {
1926 /* Since this case will return, ensure we fixup all the
1927 operands here. */
1928 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
1929 insn, replacements);
1930 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
1931 insn, replacements);
1932 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
1933 insn, replacements);
1934
1935 tem = XEXP (outerdest, 0);
1936
1937 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
1938 that may appear inside a ZERO_EXTRACT.
1939 This was legitimate when the MEM was a REG. */
1940 if (GET_CODE (tem) == SUBREG
1941 && SUBREG_REG (tem) == var)
1942 tem = fixup_memory_subreg (tem, insn, 1);
1943 else
1944 tem = fixup_stack_1 (tem, insn);
1945
1946 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
1947 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
1948 && ! mode_dependent_address_p (XEXP (tem, 0))
1949 && ! MEM_VOLATILE_P (tem))
1950 {
1951 enum machine_mode wanted_mode
1952 = insn_operand_mode[(int) CODE_FOR_insv][0];
1953 enum machine_mode is_mode = GET_MODE (tem);
1954 int width = INTVAL (XEXP (outerdest, 1));
1955 int pos = INTVAL (XEXP (outerdest, 2));
1956
1957 /* If we have a narrower mode, we can do something. */
1958 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1959 {
1960 int offset = pos / BITS_PER_UNIT;
1961 rtx old_pos = XEXP (outerdest, 2);
1962 rtx newmem;
1963
1964 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1965 offset = (GET_MODE_SIZE (is_mode)
1966 - GET_MODE_SIZE (wanted_mode) - offset);
1967
1968 pos %= GET_MODE_BITSIZE (wanted_mode);
1969
1970 newmem = gen_rtx (MEM, wanted_mode,
1971 plus_constant (XEXP (tem, 0), offset));
1972 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1973 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1974 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1975
1976 /* Make the change and see if the insn remains valid. */
1977 INSN_CODE (insn) = -1;
1978 XEXP (outerdest, 0) = newmem;
1979 XEXP (outerdest, 2) = GEN_INT (pos);
1980
1981 if (recog_memoized (insn) >= 0)
1982 return;
1983
1984 /* Otherwise, restore old position. XEXP (x, 0) will be
1985 restored later. */
1986 XEXP (outerdest, 2) = old_pos;
1987 }
1988 }
1989
1990 /* If we get here, the bit-field store doesn't allow memory
1991 or isn't located at a constant position. Load the value into
1992 a register, do the store, and put it back into memory. */
1993
1994 tem1 = gen_reg_rtx (GET_MODE (tem));
1995 emit_insn_before (gen_move_insn (tem1, tem), insn);
1996 emit_insn_after (gen_move_insn (tem, tem1), insn);
1997 XEXP (outerdest, 0) = tem1;
1998 return;
1999 }
2000 #endif
2001
2002 /* STRICT_LOW_PART is a no-op on memory references
2003 and it can cause combinations to be unrecognizable,
2004 so eliminate it. */
2005
2006 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2007 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2008
2009 /* A valid insn to copy VAR into or out of a register
2010 must be left alone, to avoid an infinite loop here.
2011 If the reference to VAR is by a subreg, fix that up,
2012 since SUBREG is not valid for a memref.
2013 Also fix up the address of the stack slot.
2014
2015 Note that we must not try to recognize the insn until
2016 after we know that we have valid addresses and no
2017 (subreg (mem ...) ...) constructs, since these interfere
2018 with determining the validity of the insn. */
2019
2020 if ((SET_SRC (x) == var
2021 || (GET_CODE (SET_SRC (x)) == SUBREG
2022 && SUBREG_REG (SET_SRC (x)) == var))
2023 && (GET_CODE (SET_DEST (x)) == REG
2024 || (GET_CODE (SET_DEST (x)) == SUBREG
2025 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2026 && GET_MODE (var) == promoted_mode
2027 && x == single_set (insn))
2028 {
2029 rtx pat;
2030
2031 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2032 if (replacement->new)
2033 SET_SRC (x) = replacement->new;
2034 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2035 SET_SRC (x) = replacement->new
2036 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2037 else
2038 SET_SRC (x) = replacement->new
2039 = fixup_stack_1 (SET_SRC (x), insn);
2040
2041 if (recog_memoized (insn) >= 0)
2042 return;
2043
2044 /* INSN is not valid, but we know that we want to
2045 copy SET_SRC (x) to SET_DEST (x) in some way. So
2046 we generate the move and see whether it requires more
2047 than one insn. If it does, we emit those insns and
2048 delete INSN. Otherwise, we an just replace the pattern
2049 of INSN; we have already verified above that INSN has
2050 no other function that to do X. */
2051
2052 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2053 if (GET_CODE (pat) == SEQUENCE)
2054 {
2055 emit_insn_after (pat, insn);
2056 PUT_CODE (insn, NOTE);
2057 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2058 NOTE_SOURCE_FILE (insn) = 0;
2059 }
2060 else
2061 PATTERN (insn) = pat;
2062
2063 return;
2064 }
2065
2066 if ((SET_DEST (x) == var
2067 || (GET_CODE (SET_DEST (x)) == SUBREG
2068 && SUBREG_REG (SET_DEST (x)) == var))
2069 && (GET_CODE (SET_SRC (x)) == REG
2070 || (GET_CODE (SET_SRC (x)) == SUBREG
2071 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2072 && GET_MODE (var) == promoted_mode
2073 && x == single_set (insn))
2074 {
2075 rtx pat;
2076
2077 if (GET_CODE (SET_DEST (x)) == SUBREG)
2078 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2079 else
2080 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2081
2082 if (recog_memoized (insn) >= 0)
2083 return;
2084
2085 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2086 if (GET_CODE (pat) == SEQUENCE)
2087 {
2088 emit_insn_after (pat, insn);
2089 PUT_CODE (insn, NOTE);
2090 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2091 NOTE_SOURCE_FILE (insn) = 0;
2092 }
2093 else
2094 PATTERN (insn) = pat;
2095
2096 return;
2097 }
2098
2099 /* Otherwise, storing into VAR must be handled specially
2100 by storing into a temporary and copying that into VAR
2101 with a new insn after this one. Note that this case
2102 will be used when storing into a promoted scalar since
2103 the insn will now have different modes on the input
2104 and output and hence will be invalid (except for the case
2105 of setting it to a constant, which does not need any
2106 change if it is valid). We generate extra code in that case,
2107 but combine.c will eliminate it. */
2108
2109 if (dest == var)
2110 {
2111 rtx temp;
2112 rtx fixeddest = SET_DEST (x);
2113
2114 /* STRICT_LOW_PART can be discarded, around a MEM. */
2115 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2116 fixeddest = XEXP (fixeddest, 0);
2117 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2118 if (GET_CODE (fixeddest) == SUBREG)
2119 {
2120 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2121 promoted_mode = GET_MODE (fixeddest);
2122 }
2123 else
2124 fixeddest = fixup_stack_1 (fixeddest, insn);
2125
2126 temp = gen_reg_rtx (promoted_mode);
2127
2128 emit_insn_after (gen_move_insn (fixeddest,
2129 gen_lowpart (GET_MODE (fixeddest),
2130 temp)),
2131 insn);
2132
2133 SET_DEST (x) = temp;
2134 }
2135 }
2136 }
2137
2138 /* Nothing special about this RTX; fix its operands. */
2139
2140 fmt = GET_RTX_FORMAT (code);
2141 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2142 {
2143 if (fmt[i] == 'e')
2144 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2145 if (fmt[i] == 'E')
2146 {
2147 register int j;
2148 for (j = 0; j < XVECLEN (x, i); j++)
2149 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2150 insn, replacements);
2151 }
2152 }
2153 }
2154 \f
2155 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2156 return an rtx (MEM:m1 newaddr) which is equivalent.
2157 If any insns must be emitted to compute NEWADDR, put them before INSN.
2158
2159 UNCRITICAL nonzero means accept paradoxical subregs.
2160 This is used for subregs found inside of ZERO_EXTRACTs and in REG_NOTES. */
2161
2162 static rtx
2163 fixup_memory_subreg (x, insn, uncritical)
2164 rtx x;
2165 rtx insn;
2166 int uncritical;
2167 {
2168 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2169 rtx addr = XEXP (SUBREG_REG (x), 0);
2170 enum machine_mode mode = GET_MODE (x);
2171 rtx saved, result;
2172
2173 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2174 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2175 && ! uncritical)
2176 abort ();
2177
2178 if (BYTES_BIG_ENDIAN)
2179 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2180 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2181 addr = plus_constant (addr, offset);
2182 if (!flag_force_addr && memory_address_p (mode, addr))
2183 /* Shortcut if no insns need be emitted. */
2184 return change_address (SUBREG_REG (x), mode, addr);
2185 start_sequence ();
2186 result = change_address (SUBREG_REG (x), mode, addr);
2187 emit_insn_before (gen_sequence (), insn);
2188 end_sequence ();
2189 return result;
2190 }
2191
2192 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2193 Replace subexpressions of X in place.
2194 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2195 Otherwise return X, with its contents possibly altered.
2196
2197 If any insns must be emitted to compute NEWADDR, put them before INSN.
2198
2199 UNCRITICAL is as in fixup_memory_subreg. */
2200
2201 static rtx
2202 walk_fixup_memory_subreg (x, insn, uncritical)
2203 register rtx x;
2204 rtx insn;
2205 int uncritical;
2206 {
2207 register enum rtx_code code;
2208 register char *fmt;
2209 register int i;
2210
2211 if (x == 0)
2212 return 0;
2213
2214 code = GET_CODE (x);
2215
2216 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2217 return fixup_memory_subreg (x, insn, uncritical);
2218
2219 /* Nothing special about this RTX; fix its operands. */
2220
2221 fmt = GET_RTX_FORMAT (code);
2222 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2223 {
2224 if (fmt[i] == 'e')
2225 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2226 if (fmt[i] == 'E')
2227 {
2228 register int j;
2229 for (j = 0; j < XVECLEN (x, i); j++)
2230 XVECEXP (x, i, j)
2231 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2232 }
2233 }
2234 return x;
2235 }
2236 \f
2237 /* For each memory ref within X, if it refers to a stack slot
2238 with an out of range displacement, put the address in a temp register
2239 (emitting new insns before INSN to load these registers)
2240 and alter the memory ref to use that register.
2241 Replace each such MEM rtx with a copy, to avoid clobberage. */
2242
2243 static rtx
2244 fixup_stack_1 (x, insn)
2245 rtx x;
2246 rtx insn;
2247 {
2248 register int i;
2249 register RTX_CODE code = GET_CODE (x);
2250 register char *fmt;
2251
2252 if (code == MEM)
2253 {
2254 register rtx ad = XEXP (x, 0);
2255 /* If we have address of a stack slot but it's not valid
2256 (displacement is too large), compute the sum in a register. */
2257 if (GET_CODE (ad) == PLUS
2258 && GET_CODE (XEXP (ad, 0)) == REG
2259 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2260 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2261 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2262 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2263 {
2264 rtx temp, seq;
2265 if (memory_address_p (GET_MODE (x), ad))
2266 return x;
2267
2268 start_sequence ();
2269 temp = copy_to_reg (ad);
2270 seq = gen_sequence ();
2271 end_sequence ();
2272 emit_insn_before (seq, insn);
2273 return change_address (x, VOIDmode, temp);
2274 }
2275 return x;
2276 }
2277
2278 fmt = GET_RTX_FORMAT (code);
2279 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2280 {
2281 if (fmt[i] == 'e')
2282 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2283 if (fmt[i] == 'E')
2284 {
2285 register int j;
2286 for (j = 0; j < XVECLEN (x, i); j++)
2287 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2288 }
2289 }
2290 return x;
2291 }
2292 \f
2293 /* Optimization: a bit-field instruction whose field
2294 happens to be a byte or halfword in memory
2295 can be changed to a move instruction.
2296
2297 We call here when INSN is an insn to examine or store into a bit-field.
2298 BODY is the SET-rtx to be altered.
2299
2300 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2301 (Currently this is called only from function.c, and EQUIV_MEM
2302 is always 0.) */
2303
2304 static void
2305 optimize_bit_field (body, insn, equiv_mem)
2306 rtx body;
2307 rtx insn;
2308 rtx *equiv_mem;
2309 {
2310 register rtx bitfield;
2311 int destflag;
2312 rtx seq = 0;
2313 enum machine_mode mode;
2314
2315 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2316 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2317 bitfield = SET_DEST (body), destflag = 1;
2318 else
2319 bitfield = SET_SRC (body), destflag = 0;
2320
2321 /* First check that the field being stored has constant size and position
2322 and is in fact a byte or halfword suitably aligned. */
2323
2324 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2325 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2326 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2327 != BLKmode)
2328 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2329 {
2330 register rtx memref = 0;
2331
2332 /* Now check that the containing word is memory, not a register,
2333 and that it is safe to change the machine mode. */
2334
2335 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2336 memref = XEXP (bitfield, 0);
2337 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2338 && equiv_mem != 0)
2339 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2340 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2341 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2342 memref = SUBREG_REG (XEXP (bitfield, 0));
2343 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2344 && equiv_mem != 0
2345 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2346 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2347
2348 if (memref
2349 && ! mode_dependent_address_p (XEXP (memref, 0))
2350 && ! MEM_VOLATILE_P (memref))
2351 {
2352 /* Now adjust the address, first for any subreg'ing
2353 that we are now getting rid of,
2354 and then for which byte of the word is wanted. */
2355
2356 register int offset = INTVAL (XEXP (bitfield, 2));
2357 rtx insns;
2358
2359 /* Adjust OFFSET to count bits from low-address byte. */
2360 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2361 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2362 - offset - INTVAL (XEXP (bitfield, 1)));
2363
2364 /* Adjust OFFSET to count bytes from low-address byte. */
2365 offset /= BITS_PER_UNIT;
2366 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2367 {
2368 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2369 if (BYTES_BIG_ENDIAN)
2370 offset -= (MIN (UNITS_PER_WORD,
2371 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2372 - MIN (UNITS_PER_WORD,
2373 GET_MODE_SIZE (GET_MODE (memref))));
2374 }
2375
2376 start_sequence ();
2377 memref = change_address (memref, mode,
2378 plus_constant (XEXP (memref, 0), offset));
2379 insns = get_insns ();
2380 end_sequence ();
2381 emit_insns_before (insns, insn);
2382
2383 /* Store this memory reference where
2384 we found the bit field reference. */
2385
2386 if (destflag)
2387 {
2388 validate_change (insn, &SET_DEST (body), memref, 1);
2389 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2390 {
2391 rtx src = SET_SRC (body);
2392 while (GET_CODE (src) == SUBREG
2393 && SUBREG_WORD (src) == 0)
2394 src = SUBREG_REG (src);
2395 if (GET_MODE (src) != GET_MODE (memref))
2396 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2397 validate_change (insn, &SET_SRC (body), src, 1);
2398 }
2399 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2400 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2401 /* This shouldn't happen because anything that didn't have
2402 one of these modes should have got converted explicitly
2403 and then referenced through a subreg.
2404 This is so because the original bit-field was
2405 handled by agg_mode and so its tree structure had
2406 the same mode that memref now has. */
2407 abort ();
2408 }
2409 else
2410 {
2411 rtx dest = SET_DEST (body);
2412
2413 while (GET_CODE (dest) == SUBREG
2414 && SUBREG_WORD (dest) == 0
2415 && (GET_MODE_CLASS (GET_MODE (dest))
2416 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest)))))
2417 dest = SUBREG_REG (dest);
2418
2419 validate_change (insn, &SET_DEST (body), dest, 1);
2420
2421 if (GET_MODE (dest) == GET_MODE (memref))
2422 validate_change (insn, &SET_SRC (body), memref, 1);
2423 else
2424 {
2425 /* Convert the mem ref to the destination mode. */
2426 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2427
2428 start_sequence ();
2429 convert_move (newreg, memref,
2430 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2431 seq = get_insns ();
2432 end_sequence ();
2433
2434 validate_change (insn, &SET_SRC (body), newreg, 1);
2435 }
2436 }
2437
2438 /* See if we can convert this extraction or insertion into
2439 a simple move insn. We might not be able to do so if this
2440 was, for example, part of a PARALLEL.
2441
2442 If we succeed, write out any needed conversions. If we fail,
2443 it is hard to guess why we failed, so don't do anything
2444 special; just let the optimization be suppressed. */
2445
2446 if (apply_change_group () && seq)
2447 emit_insns_before (seq, insn);
2448 }
2449 }
2450 }
2451 \f
2452 /* These routines are responsible for converting virtual register references
2453 to the actual hard register references once RTL generation is complete.
2454
2455 The following four variables are used for communication between the
2456 routines. They contain the offsets of the virtual registers from their
2457 respective hard registers. */
2458
2459 static int in_arg_offset;
2460 static int var_offset;
2461 static int dynamic_offset;
2462 static int out_arg_offset;
2463
2464 /* In most machines, the stack pointer register is equivalent to the bottom
2465 of the stack. */
2466
2467 #ifndef STACK_POINTER_OFFSET
2468 #define STACK_POINTER_OFFSET 0
2469 #endif
2470
2471 /* If not defined, pick an appropriate default for the offset of dynamically
2472 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2473 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2474
2475 #ifndef STACK_DYNAMIC_OFFSET
2476
2477 #ifdef ACCUMULATE_OUTGOING_ARGS
2478 /* The bottom of the stack points to the actual arguments. If
2479 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2480 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2481 stack space for register parameters is not pushed by the caller, but
2482 rather part of the fixed stack areas and hence not included in
2483 `current_function_outgoing_args_size'. Nevertheless, we must allow
2484 for it when allocating stack dynamic objects. */
2485
2486 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2487 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2488 (current_function_outgoing_args_size \
2489 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2490
2491 #else
2492 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2493 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2494 #endif
2495
2496 #else
2497 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2498 #endif
2499 #endif
2500
2501 /* Pass through the INSNS of function FNDECL and convert virtual register
2502 references to hard register references. */
2503
2504 void
2505 instantiate_virtual_regs (fndecl, insns)
2506 tree fndecl;
2507 rtx insns;
2508 {
2509 rtx insn;
2510
2511 /* Compute the offsets to use for this function. */
2512 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
2513 var_offset = STARTING_FRAME_OFFSET;
2514 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
2515 out_arg_offset = STACK_POINTER_OFFSET;
2516
2517 /* Scan all variables and parameters of this function. For each that is
2518 in memory, instantiate all virtual registers if the result is a valid
2519 address. If not, we do it later. That will handle most uses of virtual
2520 regs on many machines. */
2521 instantiate_decls (fndecl, 1);
2522
2523 /* Initialize recognition, indicating that volatile is OK. */
2524 init_recog ();
2525
2526 /* Scan through all the insns, instantiating every virtual register still
2527 present. */
2528 for (insn = insns; insn; insn = NEXT_INSN (insn))
2529 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2530 || GET_CODE (insn) == CALL_INSN)
2531 {
2532 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
2533 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
2534 }
2535
2536 /* Now instantiate the remaining register equivalences for debugging info.
2537 These will not be valid addresses. */
2538 instantiate_decls (fndecl, 0);
2539
2540 /* Indicate that, from now on, assign_stack_local should use
2541 frame_pointer_rtx. */
2542 virtuals_instantiated = 1;
2543 }
2544
2545 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
2546 all virtual registers in their DECL_RTL's.
2547
2548 If VALID_ONLY, do this only if the resulting address is still valid.
2549 Otherwise, always do it. */
2550
2551 static void
2552 instantiate_decls (fndecl, valid_only)
2553 tree fndecl;
2554 int valid_only;
2555 {
2556 tree decl;
2557
2558 if (DECL_SAVED_INSNS (fndecl))
2559 /* When compiling an inline function, the obstack used for
2560 rtl allocation is the maybepermanent_obstack. Calling
2561 `resume_temporary_allocation' switches us back to that
2562 obstack while we process this function's parameters. */
2563 resume_temporary_allocation ();
2564
2565 /* Process all parameters of the function. */
2566 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2567 {
2568 instantiate_decl (DECL_RTL (decl), int_size_in_bytes (TREE_TYPE (decl)),
2569 valid_only);
2570 instantiate_decl (DECL_INCOMING_RTL (decl),
2571 int_size_in_bytes (TREE_TYPE (decl)), valid_only);
2572 }
2573
2574 /* Now process all variables defined in the function or its subblocks. */
2575 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
2576
2577 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
2578 {
2579 /* Save all rtl allocated for this function by raising the
2580 high-water mark on the maybepermanent_obstack. */
2581 preserve_data ();
2582 /* All further rtl allocation is now done in the current_obstack. */
2583 rtl_in_current_obstack ();
2584 }
2585 }
2586
2587 /* Subroutine of instantiate_decls: Process all decls in the given
2588 BLOCK node and all its subblocks. */
2589
2590 static void
2591 instantiate_decls_1 (let, valid_only)
2592 tree let;
2593 int valid_only;
2594 {
2595 tree t;
2596
2597 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2598 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
2599 valid_only);
2600
2601 /* Process all subblocks. */
2602 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2603 instantiate_decls_1 (t, valid_only);
2604 }
2605
2606 /* Subroutine of the preceding procedures: Given RTL representing a
2607 decl and the size of the object, do any instantiation required.
2608
2609 If VALID_ONLY is non-zero, it means that the RTL should only be
2610 changed if the new address is valid. */
2611
2612 static void
2613 instantiate_decl (x, size, valid_only)
2614 rtx x;
2615 int size;
2616 int valid_only;
2617 {
2618 enum machine_mode mode;
2619 rtx addr;
2620
2621 /* If this is not a MEM, no need to do anything. Similarly if the
2622 address is a constant or a register that is not a virtual register. */
2623
2624 if (x == 0 || GET_CODE (x) != MEM)
2625 return;
2626
2627 addr = XEXP (x, 0);
2628 if (CONSTANT_P (addr)
2629 || (GET_CODE (addr) == REG
2630 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
2631 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
2632 return;
2633
2634 /* If we should only do this if the address is valid, copy the address.
2635 We need to do this so we can undo any changes that might make the
2636 address invalid. This copy is unfortunate, but probably can't be
2637 avoided. */
2638
2639 if (valid_only)
2640 addr = copy_rtx (addr);
2641
2642 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
2643
2644 if (! valid_only)
2645 return;
2646
2647 /* Now verify that the resulting address is valid for every integer or
2648 floating-point mode up to and including SIZE bytes long. We do this
2649 since the object might be accessed in any mode and frame addresses
2650 are shared. */
2651
2652 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2653 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2654 mode = GET_MODE_WIDER_MODE (mode))
2655 if (! memory_address_p (mode, addr))
2656 return;
2657
2658 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
2659 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2660 mode = GET_MODE_WIDER_MODE (mode))
2661 if (! memory_address_p (mode, addr))
2662 return;
2663
2664 /* Otherwise, put back the address, now that we have updated it and we
2665 know it is valid. */
2666
2667 XEXP (x, 0) = addr;
2668 }
2669 \f
2670 /* Given a pointer to a piece of rtx and an optional pointer to the
2671 containing object, instantiate any virtual registers present in it.
2672
2673 If EXTRA_INSNS, we always do the replacement and generate
2674 any extra insns before OBJECT. If it zero, we do nothing if replacement
2675 is not valid.
2676
2677 Return 1 if we either had nothing to do or if we were able to do the
2678 needed replacement. Return 0 otherwise; we only return zero if
2679 EXTRA_INSNS is zero.
2680
2681 We first try some simple transformations to avoid the creation of extra
2682 pseudos. */
2683
2684 static int
2685 instantiate_virtual_regs_1 (loc, object, extra_insns)
2686 rtx *loc;
2687 rtx object;
2688 int extra_insns;
2689 {
2690 rtx x;
2691 RTX_CODE code;
2692 rtx new = 0;
2693 int offset;
2694 rtx temp;
2695 rtx seq;
2696 int i, j;
2697 char *fmt;
2698
2699 /* Re-start here to avoid recursion in common cases. */
2700 restart:
2701
2702 x = *loc;
2703 if (x == 0)
2704 return 1;
2705
2706 code = GET_CODE (x);
2707
2708 /* Check for some special cases. */
2709 switch (code)
2710 {
2711 case CONST_INT:
2712 case CONST_DOUBLE:
2713 case CONST:
2714 case SYMBOL_REF:
2715 case CODE_LABEL:
2716 case PC:
2717 case CC0:
2718 case ASM_INPUT:
2719 case ADDR_VEC:
2720 case ADDR_DIFF_VEC:
2721 case RETURN:
2722 return 1;
2723
2724 case SET:
2725 /* We are allowed to set the virtual registers. This means that
2726 that the actual register should receive the source minus the
2727 appropriate offset. This is used, for example, in the handling
2728 of non-local gotos. */
2729 if (SET_DEST (x) == virtual_incoming_args_rtx)
2730 new = arg_pointer_rtx, offset = - in_arg_offset;
2731 else if (SET_DEST (x) == virtual_stack_vars_rtx)
2732 new = frame_pointer_rtx, offset = - var_offset;
2733 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
2734 new = stack_pointer_rtx, offset = - dynamic_offset;
2735 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
2736 new = stack_pointer_rtx, offset = - out_arg_offset;
2737
2738 if (new)
2739 {
2740 /* The only valid sources here are PLUS or REG. Just do
2741 the simplest possible thing to handle them. */
2742 if (GET_CODE (SET_SRC (x)) != REG
2743 && GET_CODE (SET_SRC (x)) != PLUS)
2744 abort ();
2745
2746 start_sequence ();
2747 if (GET_CODE (SET_SRC (x)) != REG)
2748 temp = force_operand (SET_SRC (x), NULL_RTX);
2749 else
2750 temp = SET_SRC (x);
2751 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
2752 seq = get_insns ();
2753 end_sequence ();
2754
2755 emit_insns_before (seq, object);
2756 SET_DEST (x) = new;
2757
2758 if (!validate_change (object, &SET_SRC (x), temp, 0)
2759 || ! extra_insns)
2760 abort ();
2761
2762 return 1;
2763 }
2764
2765 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
2766 loc = &SET_SRC (x);
2767 goto restart;
2768
2769 case PLUS:
2770 /* Handle special case of virtual register plus constant. */
2771 if (CONSTANT_P (XEXP (x, 1)))
2772 {
2773 rtx old, new_offset;
2774
2775 /* Check for (plus (plus VIRT foo) (const_int)) first. */
2776 if (GET_CODE (XEXP (x, 0)) == PLUS)
2777 {
2778 rtx inner = XEXP (XEXP (x, 0), 0);
2779
2780 if (inner == virtual_incoming_args_rtx)
2781 new = arg_pointer_rtx, offset = in_arg_offset;
2782 else if (inner == virtual_stack_vars_rtx)
2783 new = frame_pointer_rtx, offset = var_offset;
2784 else if (inner == virtual_stack_dynamic_rtx)
2785 new = stack_pointer_rtx, offset = dynamic_offset;
2786 else if (inner == virtual_outgoing_args_rtx)
2787 new = stack_pointer_rtx, offset = out_arg_offset;
2788 else
2789 {
2790 loc = &XEXP (x, 0);
2791 goto restart;
2792 }
2793
2794 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
2795 extra_insns);
2796 new = gen_rtx (PLUS, Pmode, new, XEXP (XEXP (x, 0), 1));
2797 }
2798
2799 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
2800 new = arg_pointer_rtx, offset = in_arg_offset;
2801 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
2802 new = frame_pointer_rtx, offset = var_offset;
2803 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
2804 new = stack_pointer_rtx, offset = dynamic_offset;
2805 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
2806 new = stack_pointer_rtx, offset = out_arg_offset;
2807 else
2808 {
2809 /* We know the second operand is a constant. Unless the
2810 first operand is a REG (which has been already checked),
2811 it needs to be checked. */
2812 if (GET_CODE (XEXP (x, 0)) != REG)
2813 {
2814 loc = &XEXP (x, 0);
2815 goto restart;
2816 }
2817 return 1;
2818 }
2819
2820 new_offset = plus_constant (XEXP (x, 1), offset);
2821
2822 /* If the new constant is zero, try to replace the sum with just
2823 the register. */
2824 if (new_offset == const0_rtx
2825 && validate_change (object, loc, new, 0))
2826 return 1;
2827
2828 /* Next try to replace the register and new offset.
2829 There are two changes to validate here and we can't assume that
2830 in the case of old offset equals new just changing the register
2831 will yield a valid insn. In the interests of a little efficiency,
2832 however, we only call validate change once (we don't queue up the
2833 changes and then call apply_change_group). */
2834
2835 old = XEXP (x, 0);
2836 if (offset == 0
2837 ? ! validate_change (object, &XEXP (x, 0), new, 0)
2838 : (XEXP (x, 0) = new,
2839 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
2840 {
2841 if (! extra_insns)
2842 {
2843 XEXP (x, 0) = old;
2844 return 0;
2845 }
2846
2847 /* Otherwise copy the new constant into a register and replace
2848 constant with that register. */
2849 temp = gen_reg_rtx (Pmode);
2850 XEXP (x, 0) = new;
2851 if (validate_change (object, &XEXP (x, 1), temp, 0))
2852 emit_insn_before (gen_move_insn (temp, new_offset), object);
2853 else
2854 {
2855 /* If that didn't work, replace this expression with a
2856 register containing the sum. */
2857
2858 XEXP (x, 0) = old;
2859 new = gen_rtx (PLUS, Pmode, new, new_offset);
2860
2861 start_sequence ();
2862 temp = force_operand (new, NULL_RTX);
2863 seq = get_insns ();
2864 end_sequence ();
2865
2866 emit_insns_before (seq, object);
2867 if (! validate_change (object, loc, temp, 0)
2868 && ! validate_replace_rtx (x, temp, object))
2869 abort ();
2870 }
2871 }
2872
2873 return 1;
2874 }
2875
2876 /* Fall through to generic two-operand expression case. */
2877 case EXPR_LIST:
2878 case CALL:
2879 case COMPARE:
2880 case MINUS:
2881 case MULT:
2882 case DIV: case UDIV:
2883 case MOD: case UMOD:
2884 case AND: case IOR: case XOR:
2885 case ROTATERT: case ROTATE:
2886 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2887 case NE: case EQ:
2888 case GE: case GT: case GEU: case GTU:
2889 case LE: case LT: case LEU: case LTU:
2890 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
2891 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
2892 loc = &XEXP (x, 0);
2893 goto restart;
2894
2895 case MEM:
2896 /* Most cases of MEM that convert to valid addresses have already been
2897 handled by our scan of regno_reg_rtx. The only special handling we
2898 need here is to make a copy of the rtx to ensure it isn't being
2899 shared if we have to change it to a pseudo.
2900
2901 If the rtx is a simple reference to an address via a virtual register,
2902 it can potentially be shared. In such cases, first try to make it
2903 a valid address, which can also be shared. Otherwise, copy it and
2904 proceed normally.
2905
2906 First check for common cases that need no processing. These are
2907 usually due to instantiation already being done on a previous instance
2908 of a shared rtx. */
2909
2910 temp = XEXP (x, 0);
2911 if (CONSTANT_ADDRESS_P (temp)
2912 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2913 || temp == arg_pointer_rtx
2914 #endif
2915 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2916 || temp == hard_frame_pointer_rtx
2917 #endif
2918 || temp == frame_pointer_rtx)
2919 return 1;
2920
2921 if (GET_CODE (temp) == PLUS
2922 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2923 && (XEXP (temp, 0) == frame_pointer_rtx
2924 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2925 || XEXP (temp, 0) == hard_frame_pointer_rtx
2926 #endif
2927 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2928 || XEXP (temp, 0) == arg_pointer_rtx
2929 #endif
2930 ))
2931 return 1;
2932
2933 if (temp == virtual_stack_vars_rtx
2934 || temp == virtual_incoming_args_rtx
2935 || (GET_CODE (temp) == PLUS
2936 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2937 && (XEXP (temp, 0) == virtual_stack_vars_rtx
2938 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
2939 {
2940 /* This MEM may be shared. If the substitution can be done without
2941 the need to generate new pseudos, we want to do it in place
2942 so all copies of the shared rtx benefit. The call below will
2943 only make substitutions if the resulting address is still
2944 valid.
2945
2946 Note that we cannot pass X as the object in the recursive call
2947 since the insn being processed may not allow all valid
2948 addresses. However, if we were not passed on object, we can
2949 only modify X without copying it if X will have a valid
2950 address.
2951
2952 ??? Also note that this can still lose if OBJECT is an insn that
2953 has less restrictions on an address that some other insn.
2954 In that case, we will modify the shared address. This case
2955 doesn't seem very likely, though. */
2956
2957 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
2958 object ? object : x, 0))
2959 return 1;
2960
2961 /* Otherwise make a copy and process that copy. We copy the entire
2962 RTL expression since it might be a PLUS which could also be
2963 shared. */
2964 *loc = x = copy_rtx (x);
2965 }
2966
2967 /* Fall through to generic unary operation case. */
2968 case USE:
2969 case CLOBBER:
2970 case SUBREG:
2971 case STRICT_LOW_PART:
2972 case NEG: case NOT:
2973 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
2974 case SIGN_EXTEND: case ZERO_EXTEND:
2975 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2976 case FLOAT: case FIX:
2977 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2978 case ABS:
2979 case SQRT:
2980 case FFS:
2981 /* These case either have just one operand or we know that we need not
2982 check the rest of the operands. */
2983 loc = &XEXP (x, 0);
2984 goto restart;
2985
2986 case REG:
2987 /* Try to replace with a PLUS. If that doesn't work, compute the sum
2988 in front of this insn and substitute the temporary. */
2989 if (x == virtual_incoming_args_rtx)
2990 new = arg_pointer_rtx, offset = in_arg_offset;
2991 else if (x == virtual_stack_vars_rtx)
2992 new = frame_pointer_rtx, offset = var_offset;
2993 else if (x == virtual_stack_dynamic_rtx)
2994 new = stack_pointer_rtx, offset = dynamic_offset;
2995 else if (x == virtual_outgoing_args_rtx)
2996 new = stack_pointer_rtx, offset = out_arg_offset;
2997
2998 if (new)
2999 {
3000 temp = plus_constant (new, offset);
3001 if (!validate_change (object, loc, temp, 0))
3002 {
3003 if (! extra_insns)
3004 return 0;
3005
3006 start_sequence ();
3007 temp = force_operand (temp, NULL_RTX);
3008 seq = get_insns ();
3009 end_sequence ();
3010
3011 emit_insns_before (seq, object);
3012 if (! validate_change (object, loc, temp, 0)
3013 && ! validate_replace_rtx (x, temp, object))
3014 abort ();
3015 }
3016 }
3017
3018 return 1;
3019 }
3020
3021 /* Scan all subexpressions. */
3022 fmt = GET_RTX_FORMAT (code);
3023 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3024 if (*fmt == 'e')
3025 {
3026 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3027 return 0;
3028 }
3029 else if (*fmt == 'E')
3030 for (j = 0; j < XVECLEN (x, i); j++)
3031 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3032 extra_insns))
3033 return 0;
3034
3035 return 1;
3036 }
3037 \f
3038 /* Optimization: assuming this function does not receive nonlocal gotos,
3039 delete the handlers for such, as well as the insns to establish
3040 and disestablish them. */
3041
3042 static void
3043 delete_handlers ()
3044 {
3045 rtx insn;
3046 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3047 {
3048 /* Delete the handler by turning off the flag that would
3049 prevent jump_optimize from deleting it.
3050 Also permit deletion of the nonlocal labels themselves
3051 if nothing local refers to them. */
3052 if (GET_CODE (insn) == CODE_LABEL)
3053 {
3054 tree t, last_t;
3055
3056 LABEL_PRESERVE_P (insn) = 0;
3057
3058 /* Remove it from the nonlocal_label list, to avoid confusing
3059 flow. */
3060 for (t = nonlocal_labels, last_t = 0; t;
3061 last_t = t, t = TREE_CHAIN (t))
3062 if (DECL_RTL (TREE_VALUE (t)) == insn)
3063 break;
3064 if (t)
3065 {
3066 if (! last_t)
3067 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3068 else
3069 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3070 }
3071 }
3072 if (GET_CODE (insn) == INSN
3073 && ((nonlocal_goto_handler_slot != 0
3074 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
3075 || (nonlocal_goto_stack_level != 0
3076 && reg_mentioned_p (nonlocal_goto_stack_level,
3077 PATTERN (insn)))))
3078 delete_insn (insn);
3079 }
3080 }
3081
3082 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3083 of the current function. */
3084
3085 rtx
3086 nonlocal_label_rtx_list ()
3087 {
3088 tree t;
3089 rtx x = 0;
3090
3091 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
3092 x = gen_rtx (EXPR_LIST, VOIDmode, label_rtx (TREE_VALUE (t)), x);
3093
3094 return x;
3095 }
3096 \f
3097 /* Output a USE for any register use in RTL.
3098 This is used with -noreg to mark the extent of lifespan
3099 of any registers used in a user-visible variable's DECL_RTL. */
3100
3101 void
3102 use_variable (rtl)
3103 rtx rtl;
3104 {
3105 if (GET_CODE (rtl) == REG)
3106 /* This is a register variable. */
3107 emit_insn (gen_rtx (USE, VOIDmode, rtl));
3108 else if (GET_CODE (rtl) == MEM
3109 && GET_CODE (XEXP (rtl, 0)) == REG
3110 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3111 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3112 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3113 /* This is a variable-sized structure. */
3114 emit_insn (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)));
3115 }
3116
3117 /* Like use_variable except that it outputs the USEs after INSN
3118 instead of at the end of the insn-chain. */
3119
3120 void
3121 use_variable_after (rtl, insn)
3122 rtx rtl, insn;
3123 {
3124 if (GET_CODE (rtl) == REG)
3125 /* This is a register variable. */
3126 emit_insn_after (gen_rtx (USE, VOIDmode, rtl), insn);
3127 else if (GET_CODE (rtl) == MEM
3128 && GET_CODE (XEXP (rtl, 0)) == REG
3129 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3130 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3131 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3132 /* This is a variable-sized structure. */
3133 emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)), insn);
3134 }
3135 \f
3136 int
3137 max_parm_reg_num ()
3138 {
3139 return max_parm_reg;
3140 }
3141
3142 /* Return the first insn following those generated by `assign_parms'. */
3143
3144 rtx
3145 get_first_nonparm_insn ()
3146 {
3147 if (last_parm_insn)
3148 return NEXT_INSN (last_parm_insn);
3149 return get_insns ();
3150 }
3151
3152 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3153 Crash if there is none. */
3154
3155 rtx
3156 get_first_block_beg ()
3157 {
3158 register rtx searcher;
3159 register rtx insn = get_first_nonparm_insn ();
3160
3161 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3162 if (GET_CODE (searcher) == NOTE
3163 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3164 return searcher;
3165
3166 abort (); /* Invalid call to this function. (See comments above.) */
3167 return NULL_RTX;
3168 }
3169
3170 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3171 This means a type for which function calls must pass an address to the
3172 function or get an address back from the function.
3173 EXP may be a type node or an expression (whose type is tested). */
3174
3175 int
3176 aggregate_value_p (exp)
3177 tree exp;
3178 {
3179 int i, regno, nregs;
3180 rtx reg;
3181 tree type;
3182 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3183 type = exp;
3184 else
3185 type = TREE_TYPE (exp);
3186
3187 if (RETURN_IN_MEMORY (type))
3188 return 1;
3189 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3190 return 1;
3191 /* Make sure we have suitable call-clobbered regs to return
3192 the value in; if not, we must return it in memory. */
3193 reg = hard_function_value (type, 0);
3194 regno = REGNO (reg);
3195 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3196 for (i = 0; i < nregs; i++)
3197 if (! call_used_regs[regno + i])
3198 return 1;
3199 return 0;
3200 }
3201 \f
3202 /* Assign RTL expressions to the function's parameters.
3203 This may involve copying them into registers and using
3204 those registers as the RTL for them.
3205
3206 If SECOND_TIME is non-zero it means that this function is being
3207 called a second time. This is done by integrate.c when a function's
3208 compilation is deferred. We need to come back here in case the
3209 FUNCTION_ARG macro computes items needed for the rest of the compilation
3210 (such as changing which registers are fixed or caller-saved). But suppress
3211 writing any insns or setting DECL_RTL of anything in this case. */
3212
3213 void
3214 assign_parms (fndecl, second_time)
3215 tree fndecl;
3216 int second_time;
3217 {
3218 register tree parm;
3219 register rtx entry_parm = 0;
3220 register rtx stack_parm = 0;
3221 CUMULATIVE_ARGS args_so_far;
3222 enum machine_mode promoted_mode, passed_mode;
3223 enum machine_mode nominal_mode, promoted_nominal_mode;
3224 int unsignedp;
3225 /* Total space needed so far for args on the stack,
3226 given as a constant and a tree-expression. */
3227 struct args_size stack_args_size;
3228 tree fntype = TREE_TYPE (fndecl);
3229 tree fnargs = DECL_ARGUMENTS (fndecl);
3230 /* This is used for the arg pointer when referring to stack args. */
3231 rtx internal_arg_pointer;
3232 /* This is a dummy PARM_DECL that we used for the function result if
3233 the function returns a structure. */
3234 tree function_result_decl = 0;
3235 int nparmregs = list_length (fnargs) + LAST_VIRTUAL_REGISTER + 1;
3236 int varargs_setup = 0;
3237 rtx conversion_insns = 0;
3238 /* FUNCTION_ARG may look at this variable. Since this is not
3239 expanding a call it will always be zero in this function. */
3240 int current_call_is_indirect = 0;
3241
3242 /* Nonzero if the last arg is named `__builtin_va_alist',
3243 which is used on some machines for old-fashioned non-ANSI varargs.h;
3244 this should be stuck onto the stack as if it had arrived there. */
3245 int hide_last_arg
3246 = (current_function_varargs
3247 && fnargs
3248 && (parm = tree_last (fnargs)) != 0
3249 && DECL_NAME (parm)
3250 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3251 "__builtin_va_alist")));
3252
3253 /* Nonzero if function takes extra anonymous args.
3254 This means the last named arg must be on the stack
3255 right before the anonymous ones. */
3256 int stdarg
3257 = (TYPE_ARG_TYPES (fntype) != 0
3258 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3259 != void_type_node));
3260
3261 current_function_stdarg = stdarg;
3262
3263 /* If the reg that the virtual arg pointer will be translated into is
3264 not a fixed reg or is the stack pointer, make a copy of the virtual
3265 arg pointer, and address parms via the copy. The frame pointer is
3266 considered fixed even though it is not marked as such.
3267
3268 The second time through, simply use ap to avoid generating rtx. */
3269
3270 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3271 || ! (fixed_regs[ARG_POINTER_REGNUM]
3272 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3273 && ! second_time)
3274 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3275 else
3276 internal_arg_pointer = virtual_incoming_args_rtx;
3277 current_function_internal_arg_pointer = internal_arg_pointer;
3278
3279 stack_args_size.constant = 0;
3280 stack_args_size.var = 0;
3281
3282 /* If struct value address is treated as the first argument, make it so. */
3283 if (aggregate_value_p (DECL_RESULT (fndecl))
3284 && ! current_function_returns_pcc_struct
3285 && struct_value_incoming_rtx == 0)
3286 {
3287 tree type = build_pointer_type (TREE_TYPE (fntype));
3288
3289 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
3290
3291 DECL_ARG_TYPE (function_result_decl) = type;
3292 TREE_CHAIN (function_result_decl) = fnargs;
3293 fnargs = function_result_decl;
3294 }
3295
3296 parm_reg_stack_loc = (rtx *) oballoc (nparmregs * sizeof (rtx));
3297 bzero ((char *) parm_reg_stack_loc, nparmregs * sizeof (rtx));
3298
3299 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3300 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
3301 #else
3302 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX);
3303 #endif
3304
3305 /* We haven't yet found an argument that we must push and pretend the
3306 caller did. */
3307 current_function_pretend_args_size = 0;
3308
3309 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3310 {
3311 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
3312 struct args_size stack_offset;
3313 struct args_size arg_size;
3314 int passed_pointer = 0;
3315 int did_conversion = 0;
3316 tree passed_type = DECL_ARG_TYPE (parm);
3317 tree nominal_type = TREE_TYPE (parm);
3318
3319 /* Set LAST_NAMED if this is last named arg before some
3320 anonymous args. We treat it as if it were anonymous too. */
3321 int last_named = ((TREE_CHAIN (parm) == 0
3322 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3323 && (stdarg || current_function_varargs));
3324
3325 if (TREE_TYPE (parm) == error_mark_node
3326 /* This can happen after weird syntax errors
3327 or if an enum type is defined among the parms. */
3328 || TREE_CODE (parm) != PARM_DECL
3329 || passed_type == NULL)
3330 {
3331 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = gen_rtx (MEM, BLKmode,
3332 const0_rtx);
3333 TREE_USED (parm) = 1;
3334 continue;
3335 }
3336
3337 /* For varargs.h function, save info about regs and stack space
3338 used by the individual args, not including the va_alist arg. */
3339 if (hide_last_arg && last_named)
3340 current_function_args_info = args_so_far;
3341
3342 /* Find mode of arg as it is passed, and mode of arg
3343 as it should be during execution of this function. */
3344 passed_mode = TYPE_MODE (passed_type);
3345 nominal_mode = TYPE_MODE (nominal_type);
3346
3347 /* If the parm's mode is VOID, its value doesn't matter,
3348 and avoid the usual things like emit_move_insn that could crash. */
3349 if (nominal_mode == VOIDmode)
3350 {
3351 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
3352 continue;
3353 }
3354
3355 /* If the parm is to be passed as a transparent union, use the
3356 type of the first field for the tests below. We have already
3357 verified that the modes are the same. */
3358 if (DECL_TRANSPARENT_UNION (parm)
3359 || TYPE_TRANSPARENT_UNION (passed_type))
3360 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
3361
3362 /* See if this arg was passed by invisible reference. It is if
3363 it is an object whose size depends on the contents of the
3364 object itself or if the machine requires these objects be passed
3365 that way. */
3366
3367 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
3368 && contains_placeholder_p (TYPE_SIZE (passed_type)))
3369 || TREE_ADDRESSABLE (passed_type)
3370 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3371 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
3372 passed_type, ! last_named)
3373 #endif
3374 )
3375 {
3376 passed_type = nominal_type = build_pointer_type (passed_type);
3377 passed_pointer = 1;
3378 passed_mode = nominal_mode = Pmode;
3379 }
3380
3381 promoted_mode = passed_mode;
3382
3383 #ifdef PROMOTE_FUNCTION_ARGS
3384 /* Compute the mode in which the arg is actually extended to. */
3385 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
3386 #endif
3387
3388 /* Let machine desc say which reg (if any) the parm arrives in.
3389 0 means it arrives on the stack. */
3390 #ifdef FUNCTION_INCOMING_ARG
3391 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3392 passed_type, ! last_named);
3393 #else
3394 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
3395 passed_type, ! last_named);
3396 #endif
3397
3398 if (entry_parm == 0)
3399 promoted_mode = passed_mode;
3400
3401 #ifdef SETUP_INCOMING_VARARGS
3402 /* If this is the last named parameter, do any required setup for
3403 varargs or stdargs. We need to know about the case of this being an
3404 addressable type, in which case we skip the registers it
3405 would have arrived in.
3406
3407 For stdargs, LAST_NAMED will be set for two parameters, the one that
3408 is actually the last named, and the dummy parameter. We only
3409 want to do this action once.
3410
3411 Also, indicate when RTL generation is to be suppressed. */
3412 if (last_named && !varargs_setup)
3413 {
3414 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
3415 current_function_pretend_args_size,
3416 second_time);
3417 varargs_setup = 1;
3418 }
3419 #endif
3420
3421 /* Determine parm's home in the stack,
3422 in case it arrives in the stack or we should pretend it did.
3423
3424 Compute the stack position and rtx where the argument arrives
3425 and its size.
3426
3427 There is one complexity here: If this was a parameter that would
3428 have been passed in registers, but wasn't only because it is
3429 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
3430 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
3431 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
3432 0 as it was the previous time. */
3433
3434 locate_and_pad_parm (promoted_mode, passed_type,
3435 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3436 1,
3437 #else
3438 #ifdef FUNCTION_INCOMING_ARG
3439 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3440 passed_type,
3441 (! last_named
3442 || varargs_setup)) != 0,
3443 #else
3444 FUNCTION_ARG (args_so_far, promoted_mode,
3445 passed_type,
3446 ! last_named || varargs_setup) != 0,
3447 #endif
3448 #endif
3449 fndecl, &stack_args_size, &stack_offset, &arg_size);
3450
3451 if (! second_time)
3452 {
3453 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
3454
3455 if (offset_rtx == const0_rtx)
3456 stack_parm = gen_rtx (MEM, promoted_mode, internal_arg_pointer);
3457 else
3458 stack_parm = gen_rtx (MEM, promoted_mode,
3459 gen_rtx (PLUS, Pmode,
3460 internal_arg_pointer, offset_rtx));
3461
3462 /* If this is a memory ref that contains aggregate components,
3463 mark it as such for cse and loop optimize. */
3464 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3465 }
3466
3467 /* If this parameter was passed both in registers and in the stack,
3468 use the copy on the stack. */
3469 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
3470 entry_parm = 0;
3471
3472 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3473 /* If this parm was passed part in regs and part in memory,
3474 pretend it arrived entirely in memory
3475 by pushing the register-part onto the stack.
3476
3477 In the special case of a DImode or DFmode that is split,
3478 we could put it together in a pseudoreg directly,
3479 but for now that's not worth bothering with. */
3480
3481 if (entry_parm)
3482 {
3483 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
3484 passed_type, ! last_named);
3485
3486 if (nregs > 0)
3487 {
3488 current_function_pretend_args_size
3489 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
3490 / (PARM_BOUNDARY / BITS_PER_UNIT)
3491 * (PARM_BOUNDARY / BITS_PER_UNIT));
3492
3493 if (! second_time)
3494 move_block_from_reg (REGNO (entry_parm),
3495 validize_mem (stack_parm), nregs,
3496 int_size_in_bytes (TREE_TYPE (parm)));
3497 entry_parm = stack_parm;
3498 }
3499 }
3500 #endif
3501
3502 /* If we didn't decide this parm came in a register,
3503 by default it came on the stack. */
3504 if (entry_parm == 0)
3505 entry_parm = stack_parm;
3506
3507 /* Record permanently how this parm was passed. */
3508 if (! second_time)
3509 DECL_INCOMING_RTL (parm) = entry_parm;
3510
3511 /* If there is actually space on the stack for this parm,
3512 count it in stack_args_size; otherwise set stack_parm to 0
3513 to indicate there is no preallocated stack slot for the parm. */
3514
3515 if (entry_parm == stack_parm
3516 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
3517 /* On some machines, even if a parm value arrives in a register
3518 there is still an (uninitialized) stack slot allocated for it.
3519
3520 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
3521 whether this parameter already has a stack slot allocated,
3522 because an arg block exists only if current_function_args_size
3523 is larger than some threshold, and we haven't calculated that
3524 yet. So, for now, we just assume that stack slots never exist
3525 in this case. */
3526 || REG_PARM_STACK_SPACE (fndecl) > 0
3527 #endif
3528 )
3529 {
3530 stack_args_size.constant += arg_size.constant;
3531 if (arg_size.var)
3532 ADD_PARM_SIZE (stack_args_size, arg_size.var);
3533 }
3534 else
3535 /* No stack slot was pushed for this parm. */
3536 stack_parm = 0;
3537
3538 /* Update info on where next arg arrives in registers. */
3539
3540 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
3541 passed_type, ! last_named);
3542
3543 /* If this is our second time through, we are done with this parm. */
3544 if (second_time)
3545 continue;
3546
3547 /* If we can't trust the parm stack slot to be aligned enough
3548 for its ultimate type, don't use that slot after entry.
3549 We'll make another stack slot, if we need one. */
3550 {
3551 int thisparm_boundary
3552 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
3553
3554 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
3555 stack_parm = 0;
3556 }
3557
3558 /* If parm was passed in memory, and we need to convert it on entry,
3559 don't store it back in that same slot. */
3560 if (entry_parm != 0
3561 && nominal_mode != BLKmode && nominal_mode != passed_mode)
3562 stack_parm = 0;
3563
3564 #if 0
3565 /* Now adjust STACK_PARM to the mode and precise location
3566 where this parameter should live during execution,
3567 if we discover that it must live in the stack during execution.
3568 To make debuggers happier on big-endian machines, we store
3569 the value in the last bytes of the space available. */
3570
3571 if (nominal_mode != BLKmode && nominal_mode != passed_mode
3572 && stack_parm != 0)
3573 {
3574 rtx offset_rtx;
3575
3576 if (BYTES_BIG_ENDIAN
3577 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
3578 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
3579 - GET_MODE_SIZE (nominal_mode));
3580
3581 offset_rtx = ARGS_SIZE_RTX (stack_offset);
3582 if (offset_rtx == const0_rtx)
3583 stack_parm = gen_rtx (MEM, nominal_mode, internal_arg_pointer);
3584 else
3585 stack_parm = gen_rtx (MEM, nominal_mode,
3586 gen_rtx (PLUS, Pmode,
3587 internal_arg_pointer, offset_rtx));
3588
3589 /* If this is a memory ref that contains aggregate components,
3590 mark it as such for cse and loop optimize. */
3591 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3592 }
3593 #endif /* 0 */
3594
3595 #ifdef STACK_REGS
3596 /* We need this "use" info, because the gcc-register->stack-register
3597 converter in reg-stack.c needs to know which registers are active
3598 at the start of the function call. The actual parameter loading
3599 instructions are not always available then anymore, since they might
3600 have been optimised away. */
3601
3602 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
3603 emit_insn (gen_rtx (USE, GET_MODE (entry_parm), entry_parm));
3604 #endif
3605
3606 /* ENTRY_PARM is an RTX for the parameter as it arrives,
3607 in the mode in which it arrives.
3608 STACK_PARM is an RTX for a stack slot where the parameter can live
3609 during the function (in case we want to put it there).
3610 STACK_PARM is 0 if no stack slot was pushed for it.
3611
3612 Now output code if necessary to convert ENTRY_PARM to
3613 the type in which this function declares it,
3614 and store that result in an appropriate place,
3615 which may be a pseudo reg, may be STACK_PARM,
3616 or may be a local stack slot if STACK_PARM is 0.
3617
3618 Set DECL_RTL to that place. */
3619
3620 if (nominal_mode == BLKmode)
3621 {
3622 /* If a BLKmode arrives in registers, copy it to a stack slot. */
3623 if (GET_CODE (entry_parm) == REG)
3624 {
3625 int size_stored
3626 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
3627 UNITS_PER_WORD);
3628
3629 /* Note that we will be storing an integral number of words.
3630 So we have to be careful to ensure that we allocate an
3631 integral number of words. We do this below in the
3632 assign_stack_local if space was not allocated in the argument
3633 list. If it was, this will not work if PARM_BOUNDARY is not
3634 a multiple of BITS_PER_WORD. It isn't clear how to fix this
3635 if it becomes a problem. */
3636
3637 if (stack_parm == 0)
3638 {
3639 stack_parm
3640 = assign_stack_local (GET_MODE (entry_parm),
3641 size_stored, 0);
3642
3643 /* If this is a memory ref that contains aggregate
3644 components, mark it as such for cse and loop optimize. */
3645 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3646 }
3647
3648 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
3649 abort ();
3650
3651 if (TREE_READONLY (parm))
3652 RTX_UNCHANGING_P (stack_parm) = 1;
3653
3654 move_block_from_reg (REGNO (entry_parm),
3655 validize_mem (stack_parm),
3656 size_stored / UNITS_PER_WORD,
3657 int_size_in_bytes (TREE_TYPE (parm)));
3658 }
3659 DECL_RTL (parm) = stack_parm;
3660 }
3661 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
3662 && ! DECL_INLINE (fndecl))
3663 /* layout_decl may set this. */
3664 || TREE_ADDRESSABLE (parm)
3665 || TREE_SIDE_EFFECTS (parm)
3666 /* If -ffloat-store specified, don't put explicit
3667 float variables into registers. */
3668 || (flag_float_store
3669 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
3670 /* Always assign pseudo to structure return or item passed
3671 by invisible reference. */
3672 || passed_pointer || parm == function_result_decl)
3673 {
3674 /* Store the parm in a pseudoregister during the function, but we
3675 may need to do it in a wider mode. */
3676
3677 register rtx parmreg;
3678 int regno, regnoi, regnor;
3679
3680 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
3681
3682 promoted_nominal_mode
3683 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
3684
3685 parmreg = gen_reg_rtx (promoted_nominal_mode);
3686 REG_USERVAR_P (parmreg) = 1;
3687
3688 /* If this was an item that we received a pointer to, set DECL_RTL
3689 appropriately. */
3690 if (passed_pointer)
3691 {
3692 DECL_RTL (parm)
3693 = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
3694 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
3695 }
3696 else
3697 DECL_RTL (parm) = parmreg;
3698
3699 /* Copy the value into the register. */
3700 if (nominal_mode != passed_mode
3701 || promoted_nominal_mode != promoted_mode)
3702 {
3703 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3704 mode, by the caller. We now have to convert it to
3705 NOMINAL_MODE, if different. However, PARMREG may be in
3706 a diffent mode than NOMINAL_MODE if it is being stored
3707 promoted.
3708
3709 If ENTRY_PARM is a hard register, it might be in a register
3710 not valid for operating in its mode (e.g., an odd-numbered
3711 register for a DFmode). In that case, moves are the only
3712 thing valid, so we can't do a convert from there. This
3713 occurs when the calling sequence allow such misaligned
3714 usages.
3715
3716 In addition, the conversion may involve a call, which could
3717 clobber parameters which haven't been copied to pseudo
3718 registers yet. Therefore, we must first copy the parm to
3719 a pseudo reg here, and save the conversion until after all
3720 parameters have been moved. */
3721
3722 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
3723
3724 emit_move_insn (tempreg, validize_mem (entry_parm));
3725
3726 push_to_sequence (conversion_insns);
3727 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
3728
3729 expand_assignment (parm,
3730 make_tree (nominal_type, tempreg), 0, 0);
3731 conversion_insns = get_insns ();
3732 did_conversion = 1;
3733 end_sequence ();
3734 }
3735 else
3736 emit_move_insn (parmreg, validize_mem (entry_parm));
3737
3738 /* If we were passed a pointer but the actual value
3739 can safely live in a register, put it in one. */
3740 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
3741 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
3742 && ! DECL_INLINE (fndecl))
3743 /* layout_decl may set this. */
3744 || TREE_ADDRESSABLE (parm)
3745 || TREE_SIDE_EFFECTS (parm)
3746 /* If -ffloat-store specified, don't put explicit
3747 float variables into registers. */
3748 || (flag_float_store
3749 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
3750 {
3751 /* We can't use nominal_mode, because it will have been set to
3752 Pmode above. We must use the actual mode of the parm. */
3753 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3754 REG_USERVAR_P (parmreg) = 1;
3755 emit_move_insn (parmreg, DECL_RTL (parm));
3756 DECL_RTL (parm) = parmreg;
3757 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3758 now the parm. */
3759 stack_parm = 0;
3760 }
3761 #ifdef FUNCTION_ARG_CALLEE_COPIES
3762 /* If we are passed an arg by reference and it is our responsibility
3763 to make a copy, do it now.
3764 PASSED_TYPE and PASSED mode now refer to the pointer, not the
3765 original argument, so we must recreate them in the call to
3766 FUNCTION_ARG_CALLEE_COPIES. */
3767 /* ??? Later add code to handle the case that if the argument isn't
3768 modified, don't do the copy. */
3769
3770 else if (passed_pointer
3771 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
3772 TYPE_MODE (DECL_ARG_TYPE (parm)),
3773 DECL_ARG_TYPE (parm),
3774 ! last_named)
3775 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
3776 {
3777 rtx copy;
3778 tree type = DECL_ARG_TYPE (parm);
3779
3780 /* This sequence may involve a library call perhaps clobbering
3781 registers that haven't been copied to pseudos yet. */
3782
3783 push_to_sequence (conversion_insns);
3784
3785 if (TYPE_SIZE (type) == 0
3786 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3787 /* This is a variable sized object. */
3788 copy = gen_rtx (MEM, BLKmode,
3789 allocate_dynamic_stack_space
3790 (expr_size (parm), NULL_RTX,
3791 TYPE_ALIGN (type)));
3792 else
3793 copy = assign_stack_temp (TYPE_MODE (type),
3794 int_size_in_bytes (type), 1);
3795 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
3796
3797 store_expr (parm, copy, 0);
3798 emit_move_insn (parmreg, XEXP (copy, 0));
3799 conversion_insns = get_insns ();
3800 did_conversion = 1;
3801 end_sequence ();
3802 }
3803 #endif /* FUNCTION_ARG_CALLEE_COPIES */
3804
3805 /* In any case, record the parm's desired stack location
3806 in case we later discover it must live in the stack.
3807
3808 If it is a COMPLEX value, store the stack location for both
3809 halves. */
3810
3811 if (GET_CODE (parmreg) == CONCAT)
3812 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
3813 else
3814 regno = REGNO (parmreg);
3815
3816 if (regno >= nparmregs)
3817 {
3818 rtx *new;
3819 int old_nparmregs = nparmregs;
3820
3821 nparmregs = regno + 5;
3822 new = (rtx *) oballoc (nparmregs * sizeof (rtx));
3823 bcopy ((char *) parm_reg_stack_loc, (char *) new,
3824 old_nparmregs * sizeof (rtx));
3825 bzero ((char *) (new + old_nparmregs),
3826 (nparmregs - old_nparmregs) * sizeof (rtx));
3827 parm_reg_stack_loc = new;
3828 }
3829
3830 if (GET_CODE (parmreg) == CONCAT)
3831 {
3832 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
3833
3834 regnor = REGNO (gen_realpart (submode, parmreg));
3835 regnoi = REGNO (gen_imagpart (submode, parmreg));
3836
3837 if (stack_parm != 0)
3838 {
3839 parm_reg_stack_loc[regnor]
3840 = gen_realpart (submode, stack_parm);
3841 parm_reg_stack_loc[regnoi]
3842 = gen_imagpart (submode, stack_parm);
3843 }
3844 else
3845 {
3846 parm_reg_stack_loc[regnor] = 0;
3847 parm_reg_stack_loc[regnoi] = 0;
3848 }
3849 }
3850 else
3851 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
3852
3853 /* Mark the register as eliminable if we did no conversion
3854 and it was copied from memory at a fixed offset,
3855 and the arg pointer was not copied to a pseudo-reg.
3856 If the arg pointer is a pseudo reg or the offset formed
3857 an invalid address, such memory-equivalences
3858 as we make here would screw up life analysis for it. */
3859 if (nominal_mode == passed_mode
3860 && ! did_conversion
3861 && GET_CODE (entry_parm) == MEM
3862 && entry_parm == stack_parm
3863 && stack_offset.var == 0
3864 && reg_mentioned_p (virtual_incoming_args_rtx,
3865 XEXP (entry_parm, 0)))
3866 {
3867 rtx linsn = get_last_insn ();
3868
3869 /* Mark complex types separately. */
3870 if (GET_CODE (parmreg) == CONCAT)
3871 {
3872 REG_NOTES (linsn)
3873 = gen_rtx (EXPR_LIST, REG_EQUIV,
3874 parm_reg_stack_loc[regnoi], REG_NOTES (linsn));
3875
3876 /* Now search backward for where we set the real part. */
3877 for (; linsn != 0
3878 && ! reg_referenced_p (parm_reg_stack_loc[regnor],
3879 PATTERN (linsn));
3880 linsn = prev_nonnote_insn (linsn))
3881 ;
3882
3883 REG_NOTES (linsn)
3884 = gen_rtx (EXPR_LIST, REG_EQUIV,
3885 parm_reg_stack_loc[regnor], REG_NOTES (linsn));
3886 }
3887 else
3888 REG_NOTES (linsn)
3889 = gen_rtx (EXPR_LIST, REG_EQUIV,
3890 entry_parm, REG_NOTES (linsn));
3891 }
3892
3893 /* For pointer data type, suggest pointer register. */
3894 if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE)
3895 mark_reg_pointer (parmreg,
3896 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
3897 / BITS_PER_UNIT));
3898 }
3899 else
3900 {
3901 /* Value must be stored in the stack slot STACK_PARM
3902 during function execution. */
3903
3904 if (promoted_mode != nominal_mode)
3905 {
3906 /* Conversion is required. */
3907 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
3908
3909 emit_move_insn (tempreg, validize_mem (entry_parm));
3910
3911 push_to_sequence (conversion_insns);
3912 entry_parm = convert_to_mode (nominal_mode, tempreg,
3913 TREE_UNSIGNED (TREE_TYPE (parm)));
3914 conversion_insns = get_insns ();
3915 did_conversion = 1;
3916 end_sequence ();
3917 }
3918
3919 if (entry_parm != stack_parm)
3920 {
3921 if (stack_parm == 0)
3922 {
3923 stack_parm
3924 = assign_stack_local (GET_MODE (entry_parm),
3925 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
3926 /* If this is a memory ref that contains aggregate components,
3927 mark it as such for cse and loop optimize. */
3928 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3929 }
3930
3931 if (promoted_mode != nominal_mode)
3932 {
3933 push_to_sequence (conversion_insns);
3934 emit_move_insn (validize_mem (stack_parm),
3935 validize_mem (entry_parm));
3936 conversion_insns = get_insns ();
3937 end_sequence ();
3938 }
3939 else
3940 emit_move_insn (validize_mem (stack_parm),
3941 validize_mem (entry_parm));
3942 }
3943
3944 DECL_RTL (parm) = stack_parm;
3945 }
3946
3947 /* If this "parameter" was the place where we are receiving the
3948 function's incoming structure pointer, set up the result. */
3949 if (parm == function_result_decl)
3950 {
3951 tree result = DECL_RESULT (fndecl);
3952 tree restype = TREE_TYPE (result);
3953
3954 DECL_RTL (result)
3955 = gen_rtx (MEM, DECL_MODE (result), DECL_RTL (parm));
3956
3957 MEM_IN_STRUCT_P (DECL_RTL (result)) = AGGREGATE_TYPE_P (restype);
3958 }
3959
3960 if (TREE_THIS_VOLATILE (parm))
3961 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
3962 if (TREE_READONLY (parm))
3963 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
3964 }
3965
3966 /* Output all parameter conversion instructions (possibly including calls)
3967 now that all parameters have been copied out of hard registers. */
3968 emit_insns (conversion_insns);
3969
3970 max_parm_reg = max_reg_num ();
3971 last_parm_insn = get_last_insn ();
3972
3973 current_function_args_size = stack_args_size.constant;
3974
3975 /* Adjust function incoming argument size for alignment and
3976 minimum length. */
3977
3978 #ifdef REG_PARM_STACK_SPACE
3979 #ifndef MAYBE_REG_PARM_STACK_SPACE
3980 current_function_args_size = MAX (current_function_args_size,
3981 REG_PARM_STACK_SPACE (fndecl));
3982 #endif
3983 #endif
3984
3985 #ifdef STACK_BOUNDARY
3986 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
3987
3988 current_function_args_size
3989 = ((current_function_args_size + STACK_BYTES - 1)
3990 / STACK_BYTES) * STACK_BYTES;
3991 #endif
3992
3993 #ifdef ARGS_GROW_DOWNWARD
3994 current_function_arg_offset_rtx
3995 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
3996 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
3997 size_int (-stack_args_size.constant)),
3998 NULL_RTX, VOIDmode, 0));
3999 #else
4000 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4001 #endif
4002
4003 /* See how many bytes, if any, of its args a function should try to pop
4004 on return. */
4005
4006 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4007 current_function_args_size);
4008
4009 /* For stdarg.h function, save info about
4010 regs and stack space used by the named args. */
4011
4012 if (!hide_last_arg)
4013 current_function_args_info = args_so_far;
4014
4015 /* Set the rtx used for the function return value. Put this in its
4016 own variable so any optimizers that need this information don't have
4017 to include tree.h. Do this here so it gets done when an inlined
4018 function gets output. */
4019
4020 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4021 }
4022 \f
4023 /* Indicate whether REGNO is an incoming argument to the current function
4024 that was promoted to a wider mode. If so, return the RTX for the
4025 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4026 that REGNO is promoted from and whether the promotion was signed or
4027 unsigned. */
4028
4029 #ifdef PROMOTE_FUNCTION_ARGS
4030
4031 rtx
4032 promoted_input_arg (regno, pmode, punsignedp)
4033 int regno;
4034 enum machine_mode *pmode;
4035 int *punsignedp;
4036 {
4037 tree arg;
4038
4039 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4040 arg = TREE_CHAIN (arg))
4041 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4042 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4043 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4044 {
4045 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4046 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4047
4048 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4049 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4050 && mode != DECL_MODE (arg))
4051 {
4052 *pmode = DECL_MODE (arg);
4053 *punsignedp = unsignedp;
4054 return DECL_INCOMING_RTL (arg);
4055 }
4056 }
4057
4058 return 0;
4059 }
4060
4061 #endif
4062 \f
4063 /* Compute the size and offset from the start of the stacked arguments for a
4064 parm passed in mode PASSED_MODE and with type TYPE.
4065
4066 INITIAL_OFFSET_PTR points to the current offset into the stacked
4067 arguments.
4068
4069 The starting offset and size for this parm are returned in *OFFSET_PTR
4070 and *ARG_SIZE_PTR, respectively.
4071
4072 IN_REGS is non-zero if the argument will be passed in registers. It will
4073 never be set if REG_PARM_STACK_SPACE is not defined.
4074
4075 FNDECL is the function in which the argument was defined.
4076
4077 There are two types of rounding that are done. The first, controlled by
4078 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4079 list to be aligned to the specific boundary (in bits). This rounding
4080 affects the initial and starting offsets, but not the argument size.
4081
4082 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4083 optionally rounds the size of the parm to PARM_BOUNDARY. The
4084 initial offset is not affected by this rounding, while the size always
4085 is and the starting offset may be. */
4086
4087 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4088 initial_offset_ptr is positive because locate_and_pad_parm's
4089 callers pass in the total size of args so far as
4090 initial_offset_ptr. arg_size_ptr is always positive.*/
4091
4092 void
4093 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4094 initial_offset_ptr, offset_ptr, arg_size_ptr)
4095 enum machine_mode passed_mode;
4096 tree type;
4097 int in_regs;
4098 tree fndecl;
4099 struct args_size *initial_offset_ptr;
4100 struct args_size *offset_ptr;
4101 struct args_size *arg_size_ptr;
4102 {
4103 tree sizetree
4104 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4105 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4106 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4107 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4108 int reg_parm_stack_space = 0;
4109
4110 #ifdef REG_PARM_STACK_SPACE
4111 /* If we have found a stack parm before we reach the end of the
4112 area reserved for registers, skip that area. */
4113 if (! in_regs)
4114 {
4115 #ifdef MAYBE_REG_PARM_STACK_SPACE
4116 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4117 #else
4118 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4119 #endif
4120 if (reg_parm_stack_space > 0)
4121 {
4122 if (initial_offset_ptr->var)
4123 {
4124 initial_offset_ptr->var
4125 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4126 size_int (reg_parm_stack_space));
4127 initial_offset_ptr->constant = 0;
4128 }
4129 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4130 initial_offset_ptr->constant = reg_parm_stack_space;
4131 }
4132 }
4133 #endif /* REG_PARM_STACK_SPACE */
4134
4135 arg_size_ptr->var = 0;
4136 arg_size_ptr->constant = 0;
4137
4138 #ifdef ARGS_GROW_DOWNWARD
4139 if (initial_offset_ptr->var)
4140 {
4141 offset_ptr->constant = 0;
4142 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4143 initial_offset_ptr->var);
4144 }
4145 else
4146 {
4147 offset_ptr->constant = - initial_offset_ptr->constant;
4148 offset_ptr->var = 0;
4149 }
4150 if (where_pad != none
4151 && (TREE_CODE (sizetree) != INTEGER_CST
4152 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4153 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4154 SUB_PARM_SIZE (*offset_ptr, sizetree);
4155 if (where_pad != downward)
4156 pad_to_arg_alignment (offset_ptr, boundary);
4157 if (initial_offset_ptr->var)
4158 {
4159 arg_size_ptr->var = size_binop (MINUS_EXPR,
4160 size_binop (MINUS_EXPR,
4161 integer_zero_node,
4162 initial_offset_ptr->var),
4163 offset_ptr->var);
4164 }
4165 else
4166 {
4167 arg_size_ptr->constant = (- initial_offset_ptr->constant -
4168 offset_ptr->constant);
4169 }
4170 #else /* !ARGS_GROW_DOWNWARD */
4171 pad_to_arg_alignment (initial_offset_ptr, boundary);
4172 *offset_ptr = *initial_offset_ptr;
4173
4174 #ifdef PUSH_ROUNDING
4175 if (passed_mode != BLKmode)
4176 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4177 #endif
4178
4179 /* Pad_below needs the pre-rounded size to know how much to pad below
4180 so this must be done before rounding up. */
4181 if (where_pad == downward
4182 /* However, BLKmode args passed in regs have their padding done elsewhere.
4183 The stack slot must be able to hold the entire register. */
4184 && !(in_regs && passed_mode == BLKmode))
4185 pad_below (offset_ptr, passed_mode, sizetree);
4186
4187 if (where_pad != none
4188 && (TREE_CODE (sizetree) != INTEGER_CST
4189 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4190 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4191
4192 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4193 #endif /* ARGS_GROW_DOWNWARD */
4194 }
4195
4196 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4197 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4198
4199 static void
4200 pad_to_arg_alignment (offset_ptr, boundary)
4201 struct args_size *offset_ptr;
4202 int boundary;
4203 {
4204 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4205
4206 if (boundary > BITS_PER_UNIT)
4207 {
4208 if (offset_ptr->var)
4209 {
4210 offset_ptr->var =
4211 #ifdef ARGS_GROW_DOWNWARD
4212 round_down
4213 #else
4214 round_up
4215 #endif
4216 (ARGS_SIZE_TREE (*offset_ptr),
4217 boundary / BITS_PER_UNIT);
4218 offset_ptr->constant = 0; /*?*/
4219 }
4220 else
4221 offset_ptr->constant =
4222 #ifdef ARGS_GROW_DOWNWARD
4223 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4224 #else
4225 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4226 #endif
4227 }
4228 }
4229
4230 static void
4231 pad_below (offset_ptr, passed_mode, sizetree)
4232 struct args_size *offset_ptr;
4233 enum machine_mode passed_mode;
4234 tree sizetree;
4235 {
4236 if (passed_mode != BLKmode)
4237 {
4238 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4239 offset_ptr->constant
4240 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4241 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4242 - GET_MODE_SIZE (passed_mode));
4243 }
4244 else
4245 {
4246 if (TREE_CODE (sizetree) != INTEGER_CST
4247 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4248 {
4249 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4250 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4251 /* Add it in. */
4252 ADD_PARM_SIZE (*offset_ptr, s2);
4253 SUB_PARM_SIZE (*offset_ptr, sizetree);
4254 }
4255 }
4256 }
4257
4258 static tree
4259 round_down (value, divisor)
4260 tree value;
4261 int divisor;
4262 {
4263 return size_binop (MULT_EXPR,
4264 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
4265 size_int (divisor));
4266 }
4267 \f
4268 /* Walk the tree of blocks describing the binding levels within a function
4269 and warn about uninitialized variables.
4270 This is done after calling flow_analysis and before global_alloc
4271 clobbers the pseudo-regs to hard regs. */
4272
4273 void
4274 uninitialized_vars_warning (block)
4275 tree block;
4276 {
4277 register tree decl, sub;
4278 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4279 {
4280 if (TREE_CODE (decl) == VAR_DECL
4281 /* These warnings are unreliable for and aggregates
4282 because assigning the fields one by one can fail to convince
4283 flow.c that the entire aggregate was initialized.
4284 Unions are troublesome because members may be shorter. */
4285 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
4286 && DECL_RTL (decl) != 0
4287 && GET_CODE (DECL_RTL (decl)) == REG
4288 && regno_uninitialized (REGNO (DECL_RTL (decl))))
4289 warning_with_decl (decl,
4290 "`%s' might be used uninitialized in this function");
4291 if (TREE_CODE (decl) == VAR_DECL
4292 && DECL_RTL (decl) != 0
4293 && GET_CODE (DECL_RTL (decl)) == REG
4294 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4295 warning_with_decl (decl,
4296 "variable `%s' might be clobbered by `longjmp' or `vfork'");
4297 }
4298 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4299 uninitialized_vars_warning (sub);
4300 }
4301
4302 /* Do the appropriate part of uninitialized_vars_warning
4303 but for arguments instead of local variables. */
4304
4305 void
4306 setjmp_args_warning ()
4307 {
4308 register tree decl;
4309 for (decl = DECL_ARGUMENTS (current_function_decl);
4310 decl; decl = TREE_CHAIN (decl))
4311 if (DECL_RTL (decl) != 0
4312 && GET_CODE (DECL_RTL (decl)) == REG
4313 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4314 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
4315 }
4316
4317 /* If this function call setjmp, put all vars into the stack
4318 unless they were declared `register'. */
4319
4320 void
4321 setjmp_protect (block)
4322 tree block;
4323 {
4324 register tree decl, sub;
4325 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4326 if ((TREE_CODE (decl) == VAR_DECL
4327 || TREE_CODE (decl) == PARM_DECL)
4328 && DECL_RTL (decl) != 0
4329 && GET_CODE (DECL_RTL (decl)) == REG
4330 /* If this variable came from an inline function, it must be
4331 that it's life doesn't overlap the setjmp. If there was a
4332 setjmp in the function, it would already be in memory. We
4333 must exclude such variable because their DECL_RTL might be
4334 set to strange things such as virtual_stack_vars_rtx. */
4335 && ! DECL_FROM_INLINE (decl)
4336 && (
4337 #ifdef NON_SAVING_SETJMP
4338 /* If longjmp doesn't restore the registers,
4339 don't put anything in them. */
4340 NON_SAVING_SETJMP
4341 ||
4342 #endif
4343 ! DECL_REGISTER (decl)))
4344 put_var_into_stack (decl);
4345 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4346 setjmp_protect (sub);
4347 }
4348 \f
4349 /* Like the previous function, but for args instead of local variables. */
4350
4351 void
4352 setjmp_protect_args ()
4353 {
4354 register tree decl, sub;
4355 for (decl = DECL_ARGUMENTS (current_function_decl);
4356 decl; decl = TREE_CHAIN (decl))
4357 if ((TREE_CODE (decl) == VAR_DECL
4358 || TREE_CODE (decl) == PARM_DECL)
4359 && DECL_RTL (decl) != 0
4360 && GET_CODE (DECL_RTL (decl)) == REG
4361 && (
4362 /* If longjmp doesn't restore the registers,
4363 don't put anything in them. */
4364 #ifdef NON_SAVING_SETJMP
4365 NON_SAVING_SETJMP
4366 ||
4367 #endif
4368 ! DECL_REGISTER (decl)))
4369 put_var_into_stack (decl);
4370 }
4371 \f
4372 /* Return the context-pointer register corresponding to DECL,
4373 or 0 if it does not need one. */
4374
4375 rtx
4376 lookup_static_chain (decl)
4377 tree decl;
4378 {
4379 tree context = decl_function_context (decl);
4380 tree link;
4381
4382 if (context == 0
4383 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
4384 return 0;
4385
4386 /* We treat inline_function_decl as an alias for the current function
4387 because that is the inline function whose vars, types, etc.
4388 are being merged into the current function.
4389 See expand_inline_function. */
4390 if (context == current_function_decl || context == inline_function_decl)
4391 return virtual_stack_vars_rtx;
4392
4393 for (link = context_display; link; link = TREE_CHAIN (link))
4394 if (TREE_PURPOSE (link) == context)
4395 return RTL_EXPR_RTL (TREE_VALUE (link));
4396
4397 abort ();
4398 }
4399 \f
4400 /* Convert a stack slot address ADDR for variable VAR
4401 (from a containing function)
4402 into an address valid in this function (using a static chain). */
4403
4404 rtx
4405 fix_lexical_addr (addr, var)
4406 rtx addr;
4407 tree var;
4408 {
4409 rtx basereg;
4410 int displacement;
4411 tree context = decl_function_context (var);
4412 struct function *fp;
4413 rtx base = 0;
4414
4415 /* If this is the present function, we need not do anything. */
4416 if (context == current_function_decl || context == inline_function_decl)
4417 return addr;
4418
4419 for (fp = outer_function_chain; fp; fp = fp->next)
4420 if (fp->decl == context)
4421 break;
4422
4423 if (fp == 0)
4424 abort ();
4425
4426 /* Decode given address as base reg plus displacement. */
4427 if (GET_CODE (addr) == REG)
4428 basereg = addr, displacement = 0;
4429 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4430 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
4431 else
4432 abort ();
4433
4434 /* We accept vars reached via the containing function's
4435 incoming arg pointer and via its stack variables pointer. */
4436 if (basereg == fp->internal_arg_pointer)
4437 {
4438 /* If reached via arg pointer, get the arg pointer value
4439 out of that function's stack frame.
4440
4441 There are two cases: If a separate ap is needed, allocate a
4442 slot in the outer function for it and dereference it that way.
4443 This is correct even if the real ap is actually a pseudo.
4444 Otherwise, just adjust the offset from the frame pointer to
4445 compensate. */
4446
4447 #ifdef NEED_SEPARATE_AP
4448 rtx addr;
4449
4450 if (fp->arg_pointer_save_area == 0)
4451 fp->arg_pointer_save_area
4452 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
4453
4454 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
4455 addr = memory_address (Pmode, addr);
4456
4457 base = copy_to_reg (gen_rtx (MEM, Pmode, addr));
4458 #else
4459 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
4460 base = lookup_static_chain (var);
4461 #endif
4462 }
4463
4464 else if (basereg == virtual_stack_vars_rtx)
4465 {
4466 /* This is the same code as lookup_static_chain, duplicated here to
4467 avoid an extra call to decl_function_context. */
4468 tree link;
4469
4470 for (link = context_display; link; link = TREE_CHAIN (link))
4471 if (TREE_PURPOSE (link) == context)
4472 {
4473 base = RTL_EXPR_RTL (TREE_VALUE (link));
4474 break;
4475 }
4476 }
4477
4478 if (base == 0)
4479 abort ();
4480
4481 /* Use same offset, relative to appropriate static chain or argument
4482 pointer. */
4483 return plus_constant (base, displacement);
4484 }
4485 \f
4486 /* Return the address of the trampoline for entering nested fn FUNCTION.
4487 If necessary, allocate a trampoline (in the stack frame)
4488 and emit rtl to initialize its contents (at entry to this function). */
4489
4490 rtx
4491 trampoline_address (function)
4492 tree function;
4493 {
4494 tree link;
4495 tree rtlexp;
4496 rtx tramp;
4497 struct function *fp;
4498 tree fn_context;
4499
4500 /* Find an existing trampoline and return it. */
4501 for (link = trampoline_list; link; link = TREE_CHAIN (link))
4502 if (TREE_PURPOSE (link) == function)
4503 return
4504 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
4505
4506 for (fp = outer_function_chain; fp; fp = fp->next)
4507 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
4508 if (TREE_PURPOSE (link) == function)
4509 {
4510 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
4511 function);
4512 return round_trampoline_addr (tramp);
4513 }
4514
4515 /* None exists; we must make one. */
4516
4517 /* Find the `struct function' for the function containing FUNCTION. */
4518 fp = 0;
4519 fn_context = decl_function_context (function);
4520 if (fn_context != current_function_decl)
4521 for (fp = outer_function_chain; fp; fp = fp->next)
4522 if (fp->decl == fn_context)
4523 break;
4524
4525 /* Allocate run-time space for this trampoline
4526 (usually in the defining function's stack frame). */
4527 #ifdef ALLOCATE_TRAMPOLINE
4528 tramp = ALLOCATE_TRAMPOLINE (fp);
4529 #else
4530 /* If rounding needed, allocate extra space
4531 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
4532 #ifdef TRAMPOLINE_ALIGNMENT
4533 #define TRAMPOLINE_REAL_SIZE \
4534 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
4535 #else
4536 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
4537 #endif
4538 if (fp != 0)
4539 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
4540 else
4541 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
4542 #endif
4543
4544 /* Record the trampoline for reuse and note it for later initialization
4545 by expand_function_end. */
4546 if (fp != 0)
4547 {
4548 push_obstacks (fp->function_maybepermanent_obstack,
4549 fp->function_maybepermanent_obstack);
4550 rtlexp = make_node (RTL_EXPR);
4551 RTL_EXPR_RTL (rtlexp) = tramp;
4552 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
4553 pop_obstacks ();
4554 }
4555 else
4556 {
4557 /* Make the RTL_EXPR node temporary, not momentary, so that the
4558 trampoline_list doesn't become garbage. */
4559 int momentary = suspend_momentary ();
4560 rtlexp = make_node (RTL_EXPR);
4561 resume_momentary (momentary);
4562
4563 RTL_EXPR_RTL (rtlexp) = tramp;
4564 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
4565 }
4566
4567 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
4568 return round_trampoline_addr (tramp);
4569 }
4570
4571 /* Given a trampoline address,
4572 round it to multiple of TRAMPOLINE_ALIGNMENT. */
4573
4574 static rtx
4575 round_trampoline_addr (tramp)
4576 rtx tramp;
4577 {
4578 #ifdef TRAMPOLINE_ALIGNMENT
4579 /* Round address up to desired boundary. */
4580 rtx temp = gen_reg_rtx (Pmode);
4581 temp = expand_binop (Pmode, add_optab, tramp,
4582 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
4583 temp, 0, OPTAB_LIB_WIDEN);
4584 tramp = expand_binop (Pmode, and_optab, temp,
4585 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
4586 temp, 0, OPTAB_LIB_WIDEN);
4587 #endif
4588 return tramp;
4589 }
4590 \f
4591 /* The functions identify_blocks and reorder_blocks provide a way to
4592 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
4593 duplicate portions of the RTL code. Call identify_blocks before
4594 changing the RTL, and call reorder_blocks after. */
4595
4596 /* Put all this function's BLOCK nodes including those that are chained
4597 onto the first block into a vector, and return it.
4598 Also store in each NOTE for the beginning or end of a block
4599 the index of that block in the vector.
4600 The arguments are BLOCK, the chain of top-level blocks of the function,
4601 and INSNS, the insn chain of the function. */
4602
4603 tree *
4604 identify_blocks (block, insns)
4605 tree block;
4606 rtx insns;
4607 {
4608 int n_blocks;
4609 tree *block_vector;
4610 int *block_stack;
4611 int depth = 0;
4612 int next_block_number = 1;
4613 int current_block_number = 1;
4614 rtx insn;
4615
4616 if (block == 0)
4617 return 0;
4618
4619 n_blocks = all_blocks (block, 0);
4620 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
4621 block_stack = (int *) alloca (n_blocks * sizeof (int));
4622
4623 all_blocks (block, block_vector);
4624
4625 for (insn = insns; insn; insn = NEXT_INSN (insn))
4626 if (GET_CODE (insn) == NOTE)
4627 {
4628 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
4629 {
4630 block_stack[depth++] = current_block_number;
4631 current_block_number = next_block_number;
4632 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
4633 }
4634 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
4635 {
4636 current_block_number = block_stack[--depth];
4637 NOTE_BLOCK_NUMBER (insn) = current_block_number;
4638 }
4639 }
4640
4641 if (n_blocks != next_block_number)
4642 abort ();
4643
4644 return block_vector;
4645 }
4646
4647 /* Given BLOCK_VECTOR which was returned by identify_blocks,
4648 and a revised instruction chain, rebuild the tree structure
4649 of BLOCK nodes to correspond to the new order of RTL.
4650 The new block tree is inserted below TOP_BLOCK.
4651 Returns the current top-level block. */
4652
4653 tree
4654 reorder_blocks (block_vector, block, insns)
4655 tree *block_vector;
4656 tree block;
4657 rtx insns;
4658 {
4659 tree current_block = block;
4660 rtx insn;
4661
4662 if (block_vector == 0)
4663 return block;
4664
4665 /* Prune the old trees away, so that it doesn't get in the way. */
4666 BLOCK_SUBBLOCKS (current_block) = 0;
4667 BLOCK_CHAIN (current_block) = 0;
4668
4669 for (insn = insns; insn; insn = NEXT_INSN (insn))
4670 if (GET_CODE (insn) == NOTE)
4671 {
4672 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
4673 {
4674 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
4675 /* If we have seen this block before, copy it. */
4676 if (TREE_ASM_WRITTEN (block))
4677 block = copy_node (block);
4678 BLOCK_SUBBLOCKS (block) = 0;
4679 TREE_ASM_WRITTEN (block) = 1;
4680 BLOCK_SUPERCONTEXT (block) = current_block;
4681 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4682 BLOCK_SUBBLOCKS (current_block) = block;
4683 current_block = block;
4684 NOTE_SOURCE_FILE (insn) = 0;
4685 }
4686 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
4687 {
4688 BLOCK_SUBBLOCKS (current_block)
4689 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
4690 current_block = BLOCK_SUPERCONTEXT (current_block);
4691 NOTE_SOURCE_FILE (insn) = 0;
4692 }
4693 }
4694
4695 BLOCK_SUBBLOCKS (current_block)
4696 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
4697 return current_block;
4698 }
4699
4700 /* Reverse the order of elements in the chain T of blocks,
4701 and return the new head of the chain (old last element). */
4702
4703 static tree
4704 blocks_nreverse (t)
4705 tree t;
4706 {
4707 register tree prev = 0, decl, next;
4708 for (decl = t; decl; decl = next)
4709 {
4710 next = BLOCK_CHAIN (decl);
4711 BLOCK_CHAIN (decl) = prev;
4712 prev = decl;
4713 }
4714 return prev;
4715 }
4716
4717 /* Count the subblocks of the list starting with BLOCK, and list them
4718 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
4719 blocks. */
4720
4721 static int
4722 all_blocks (block, vector)
4723 tree block;
4724 tree *vector;
4725 {
4726 int n_blocks = 0;
4727
4728 while (block)
4729 {
4730 TREE_ASM_WRITTEN (block) = 0;
4731
4732 /* Record this block. */
4733 if (vector)
4734 vector[n_blocks] = block;
4735
4736 ++n_blocks;
4737
4738 /* Record the subblocks, and their subblocks... */
4739 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4740 vector ? vector + n_blocks : 0);
4741 block = BLOCK_CHAIN (block);
4742 }
4743
4744 return n_blocks;
4745 }
4746 \f
4747 /* Build bytecode call descriptor for function SUBR. */
4748
4749 rtx
4750 bc_build_calldesc (subr)
4751 tree subr;
4752 {
4753 tree calldesc = 0, arg;
4754 int nargs = 0;
4755
4756 /* Build the argument description vector in reverse order. */
4757 DECL_ARGUMENTS (subr) = nreverse (DECL_ARGUMENTS (subr));
4758 nargs = 0;
4759
4760 for (arg = DECL_ARGUMENTS (subr); arg; arg = TREE_CHAIN (arg))
4761 {
4762 ++nargs;
4763
4764 calldesc = tree_cons ((tree) 0, size_in_bytes (TREE_TYPE (arg)), calldesc);
4765 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (TREE_TYPE (arg)), calldesc);
4766 }
4767
4768 DECL_ARGUMENTS (subr) = nreverse (DECL_ARGUMENTS (subr));
4769
4770 /* Prepend the function's return type. */
4771 calldesc = tree_cons ((tree) 0,
4772 size_in_bytes (TREE_TYPE (TREE_TYPE (subr))),
4773 calldesc);
4774
4775 calldesc = tree_cons ((tree) 0,
4776 bc_runtime_type_code (TREE_TYPE (TREE_TYPE (subr))),
4777 calldesc);
4778
4779 /* Prepend the arg count. */
4780 calldesc = tree_cons ((tree) 0, build_int_2 (nargs, 0), calldesc);
4781
4782 /* Output the call description vector and get its address. */
4783 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
4784 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
4785 build_index_type (build_int_2 (nargs * 2, 0)));
4786
4787 return output_constant_def (calldesc);
4788 }
4789
4790
4791 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4792 and initialize static variables for generating RTL for the statements
4793 of the function. */
4794
4795 void
4796 init_function_start (subr, filename, line)
4797 tree subr;
4798 char *filename;
4799 int line;
4800 {
4801 char *junk;
4802
4803 if (output_bytecode)
4804 {
4805 this_function_decl = subr;
4806 this_function_calldesc = bc_build_calldesc (subr);
4807 local_vars_size = 0;
4808 stack_depth = 0;
4809 max_stack_depth = 0;
4810 stmt_expr_depth = 0;
4811 return;
4812 }
4813
4814 init_stmt_for_function ();
4815
4816 cse_not_expected = ! optimize;
4817
4818 /* Caller save not needed yet. */
4819 caller_save_needed = 0;
4820
4821 /* No stack slots have been made yet. */
4822 stack_slot_list = 0;
4823
4824 /* There is no stack slot for handling nonlocal gotos. */
4825 nonlocal_goto_handler_slot = 0;
4826 nonlocal_goto_stack_level = 0;
4827
4828 /* No labels have been declared for nonlocal use. */
4829 nonlocal_labels = 0;
4830
4831 /* No function calls so far in this function. */
4832 function_call_count = 0;
4833
4834 /* No parm regs have been allocated.
4835 (This is important for output_inline_function.) */
4836 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4837
4838 /* Initialize the RTL mechanism. */
4839 init_emit ();
4840
4841 /* Initialize the queue of pending postincrement and postdecrements,
4842 and some other info in expr.c. */
4843 init_expr ();
4844
4845 /* We haven't done register allocation yet. */
4846 reg_renumber = 0;
4847
4848 init_const_rtx_hash_table ();
4849
4850 current_function_name = (*decl_printable_name) (subr, &junk);
4851
4852 /* Nonzero if this is a nested function that uses a static chain. */
4853
4854 current_function_needs_context
4855 = (decl_function_context (current_function_decl) != 0
4856 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
4857
4858 /* Set if a call to setjmp is seen. */
4859 current_function_calls_setjmp = 0;
4860
4861 /* Set if a call to longjmp is seen. */
4862 current_function_calls_longjmp = 0;
4863
4864 current_function_calls_alloca = 0;
4865 current_function_has_nonlocal_label = 0;
4866 current_function_has_nonlocal_goto = 0;
4867 current_function_contains_functions = 0;
4868
4869 current_function_returns_pcc_struct = 0;
4870 current_function_returns_struct = 0;
4871 current_function_epilogue_delay_list = 0;
4872 current_function_uses_const_pool = 0;
4873 current_function_uses_pic_offset_table = 0;
4874
4875 /* We have not yet needed to make a label to jump to for tail-recursion. */
4876 tail_recursion_label = 0;
4877
4878 /* We haven't had a need to make a save area for ap yet. */
4879
4880 arg_pointer_save_area = 0;
4881
4882 /* No stack slots allocated yet. */
4883 frame_offset = 0;
4884
4885 /* No SAVE_EXPRs in this function yet. */
4886 save_expr_regs = 0;
4887
4888 /* No RTL_EXPRs in this function yet. */
4889 rtl_expr_chain = 0;
4890
4891 /* Set up to allocate temporaries. */
4892 init_temp_slots ();
4893
4894 /* Within function body, compute a type's size as soon it is laid out. */
4895 immediate_size_expand++;
4896
4897 /* We haven't made any trampolines for this function yet. */
4898 trampoline_list = 0;
4899
4900 init_pending_stack_adjust ();
4901 inhibit_defer_pop = 0;
4902
4903 current_function_outgoing_args_size = 0;
4904
4905 /* Prevent ever trying to delete the first instruction of a function.
4906 Also tell final how to output a linenum before the function prologue. */
4907 emit_line_note (filename, line);
4908
4909 /* Make sure first insn is a note even if we don't want linenums.
4910 This makes sure the first insn will never be deleted.
4911 Also, final expects a note to appear there. */
4912 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4913
4914 /* Set flags used by final.c. */
4915 if (aggregate_value_p (DECL_RESULT (subr)))
4916 {
4917 #ifdef PCC_STATIC_STRUCT_RETURN
4918 current_function_returns_pcc_struct = 1;
4919 #endif
4920 current_function_returns_struct = 1;
4921 }
4922
4923 /* Warn if this value is an aggregate type,
4924 regardless of which calling convention we are using for it. */
4925 if (warn_aggregate_return
4926 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
4927 warning ("function returns an aggregate");
4928
4929 current_function_returns_pointer
4930 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
4931
4932 /* Indicate that we need to distinguish between the return value of the
4933 present function and the return value of a function being called. */
4934 rtx_equal_function_value_matters = 1;
4935
4936 /* Indicate that we have not instantiated virtual registers yet. */
4937 virtuals_instantiated = 0;
4938
4939 /* Indicate we have no need of a frame pointer yet. */
4940 frame_pointer_needed = 0;
4941
4942 /* By default assume not varargs or stdarg. */
4943 current_function_varargs = 0;
4944 current_function_stdarg = 0;
4945 }
4946
4947 /* Indicate that the current function uses extra args
4948 not explicitly mentioned in the argument list in any fashion. */
4949
4950 void
4951 mark_varargs ()
4952 {
4953 current_function_varargs = 1;
4954 }
4955
4956 /* Expand a call to __main at the beginning of a possible main function. */
4957
4958 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
4959 #undef HAS_INIT_SECTION
4960 #define HAS_INIT_SECTION
4961 #endif
4962
4963 void
4964 expand_main_function ()
4965 {
4966 if (!output_bytecode)
4967 {
4968 /* The zero below avoids a possible parse error */
4969 0;
4970 #if !defined (HAS_INIT_SECTION)
4971 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, NAME__MAIN), 0,
4972 VOIDmode, 0);
4973 #endif /* not HAS_INIT_SECTION */
4974 }
4975 }
4976 \f
4977 extern struct obstack permanent_obstack;
4978
4979 /* Expand start of bytecode function. See comment at
4980 expand_function_start below for details. */
4981
4982 void
4983 bc_expand_function_start (subr, parms_have_cleanups)
4984 tree subr;
4985 int parms_have_cleanups;
4986 {
4987 char label[20], *name;
4988 static int nlab;
4989 tree thisarg;
4990 int argsz;
4991
4992 if (TREE_PUBLIC (subr))
4993 bc_globalize_label (IDENTIFIER_POINTER (DECL_NAME (subr)));
4994
4995 #ifdef DEBUG_PRINT_CODE
4996 fprintf (stderr, "\n<func %s>\n", IDENTIFIER_POINTER (DECL_NAME (subr)));
4997 #endif
4998
4999 for (argsz = 0, thisarg = DECL_ARGUMENTS (subr); thisarg; thisarg = TREE_CHAIN (thisarg))
5000 {
5001 if (DECL_RTL (thisarg))
5002 abort (); /* Should be NULL here I think. */
5003 else if (TREE_CONSTANT (DECL_SIZE (thisarg)))
5004 {
5005 DECL_RTL (thisarg) = bc_gen_rtx ((char *) 0, argsz, (struct bc_label *) 0);
5006 argsz += TREE_INT_CST_LOW (DECL_SIZE (thisarg));
5007 }
5008 else
5009 {
5010 /* Variable-sized objects are pointers to their storage. */
5011 DECL_RTL (thisarg) = bc_gen_rtx ((char *) 0, argsz, (struct bc_label *) 0);
5012 argsz += POINTER_SIZE;
5013 }
5014 }
5015
5016 bc_begin_function (xstrdup (IDENTIFIER_POINTER (DECL_NAME (subr))));
5017
5018 ASM_GENERATE_INTERNAL_LABEL (label, "LX", nlab);
5019
5020 ++nlab;
5021 name = (char *) obstack_copy0 (&permanent_obstack, label, strlen (label));
5022 this_function_callinfo = bc_gen_rtx (name, 0, (struct bc_label *) 0);
5023 this_function_bytecode =
5024 bc_emit_trampoline (BYTECODE_LABEL (this_function_callinfo));
5025 }
5026
5027
5028 /* Expand end of bytecode function. See details the comment of
5029 expand_function_end(), below. */
5030
5031 void
5032 bc_expand_function_end ()
5033 {
5034 char *ptrconsts;
5035
5036 expand_null_return ();
5037
5038 /* Emit any fixup code. This must be done before the call to
5039 to BC_END_FUNCTION (), since that will cause the bytecode
5040 segment to be finished off and closed. */
5041
5042 expand_fixups (NULL_RTX);
5043
5044 ptrconsts = bc_end_function ();
5045
5046 bc_align_const (2 /* INT_ALIGN */);
5047
5048 /* If this changes also make sure to change bc-interp.h! */
5049
5050 bc_emit_const_labeldef (BYTECODE_LABEL (this_function_callinfo));
5051 bc_emit_const ((char *) &max_stack_depth, sizeof max_stack_depth);
5052 bc_emit_const ((char *) &local_vars_size, sizeof local_vars_size);
5053 bc_emit_const_labelref (this_function_bytecode, 0);
5054 bc_emit_const_labelref (ptrconsts, 0);
5055 bc_emit_const_labelref (BYTECODE_LABEL (this_function_calldesc), 0);
5056 }
5057
5058
5059 /* Start the RTL for a new function, and set variables used for
5060 emitting RTL.
5061 SUBR is the FUNCTION_DECL node.
5062 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5063 the function's parameters, which must be run at any return statement. */
5064
5065 void
5066 expand_function_start (subr, parms_have_cleanups)
5067 tree subr;
5068 int parms_have_cleanups;
5069 {
5070 register int i;
5071 tree tem;
5072 rtx last_ptr;
5073
5074 if (output_bytecode)
5075 {
5076 bc_expand_function_start (subr, parms_have_cleanups);
5077 return;
5078 }
5079
5080 /* Make sure volatile mem refs aren't considered
5081 valid operands of arithmetic insns. */
5082 init_recog_no_volatile ();
5083
5084 /* If function gets a static chain arg, store it in the stack frame.
5085 Do this first, so it gets the first stack slot offset. */
5086 if (current_function_needs_context)
5087 {
5088 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5089
5090 #ifdef SMALL_REGISTER_CLASSES
5091 /* Delay copying static chain if it is not a register to avoid
5092 conflicts with regs used for parameters. */
5093 if (GET_CODE (static_chain_incoming_rtx) == REG)
5094 #endif
5095 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5096 }
5097
5098 /* If the parameters of this function need cleaning up, get a label
5099 for the beginning of the code which executes those cleanups. This must
5100 be done before doing anything with return_label. */
5101 if (parms_have_cleanups)
5102 cleanup_label = gen_label_rtx ();
5103 else
5104 cleanup_label = 0;
5105
5106 /* Make the label for return statements to jump to, if this machine
5107 does not have a one-instruction return and uses an epilogue,
5108 or if it returns a structure, or if it has parm cleanups. */
5109 #ifdef HAVE_return
5110 if (cleanup_label == 0 && HAVE_return
5111 && ! current_function_returns_pcc_struct
5112 && ! (current_function_returns_struct && ! optimize))
5113 return_label = 0;
5114 else
5115 return_label = gen_label_rtx ();
5116 #else
5117 return_label = gen_label_rtx ();
5118 #endif
5119
5120 /* Initialize rtx used to return the value. */
5121 /* Do this before assign_parms so that we copy the struct value address
5122 before any library calls that assign parms might generate. */
5123
5124 /* Decide whether to return the value in memory or in a register. */
5125 if (aggregate_value_p (DECL_RESULT (subr)))
5126 {
5127 /* Returning something that won't go in a register. */
5128 register rtx value_address = 0;
5129
5130 #ifdef PCC_STATIC_STRUCT_RETURN
5131 if (current_function_returns_pcc_struct)
5132 {
5133 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5134 value_address = assemble_static_space (size);
5135 }
5136 else
5137 #endif
5138 {
5139 /* Expect to be passed the address of a place to store the value.
5140 If it is passed as an argument, assign_parms will take care of
5141 it. */
5142 if (struct_value_incoming_rtx)
5143 {
5144 value_address = gen_reg_rtx (Pmode);
5145 emit_move_insn (value_address, struct_value_incoming_rtx);
5146 }
5147 }
5148 if (value_address)
5149 {
5150 DECL_RTL (DECL_RESULT (subr))
5151 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (subr)), value_address);
5152 MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)))
5153 = AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5154 }
5155 }
5156 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5157 /* If return mode is void, this decl rtl should not be used. */
5158 DECL_RTL (DECL_RESULT (subr)) = 0;
5159 else if (parms_have_cleanups)
5160 {
5161 /* If function will end with cleanup code for parms,
5162 compute the return values into a pseudo reg,
5163 which we will copy into the true return register
5164 after the cleanups are done. */
5165
5166 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5167
5168 #ifdef PROMOTE_FUNCTION_RETURN
5169 tree type = TREE_TYPE (DECL_RESULT (subr));
5170 int unsignedp = TREE_UNSIGNED (type);
5171
5172 mode = promote_mode (type, mode, &unsignedp, 1);
5173 #endif
5174
5175 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5176 }
5177 else
5178 /* Scalar, returned in a register. */
5179 {
5180 #ifdef FUNCTION_OUTGOING_VALUE
5181 DECL_RTL (DECL_RESULT (subr))
5182 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5183 #else
5184 DECL_RTL (DECL_RESULT (subr))
5185 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5186 #endif
5187
5188 /* Mark this reg as the function's return value. */
5189 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5190 {
5191 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5192 /* Needed because we may need to move this to memory
5193 in case it's a named return value whose address is taken. */
5194 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5195 }
5196 }
5197
5198 /* Initialize rtx for parameters and local variables.
5199 In some cases this requires emitting insns. */
5200
5201 assign_parms (subr, 0);
5202
5203 #ifdef SMALL_REGISTER_CLASSES
5204 /* Copy the static chain now if it wasn't a register. The delay is to
5205 avoid conflicts with the parameter passing registers. */
5206
5207 if (current_function_needs_context)
5208 if (GET_CODE (static_chain_incoming_rtx) != REG)
5209 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5210 #endif
5211
5212 /* The following was moved from init_function_start.
5213 The move is supposed to make sdb output more accurate. */
5214 /* Indicate the beginning of the function body,
5215 as opposed to parm setup. */
5216 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5217
5218 /* If doing stupid allocation, mark parms as born here. */
5219
5220 if (GET_CODE (get_last_insn ()) != NOTE)
5221 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5222 parm_birth_insn = get_last_insn ();
5223
5224 if (obey_regdecls)
5225 {
5226 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5227 use_variable (regno_reg_rtx[i]);
5228
5229 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5230 use_variable (current_function_internal_arg_pointer);
5231 }
5232
5233 context_display = 0;
5234 if (current_function_needs_context)
5235 {
5236 /* Fetch static chain values for containing functions. */
5237 tem = decl_function_context (current_function_decl);
5238 /* If not doing stupid register allocation copy the static chain
5239 pointer into a pseudo. If we have small register classes, copy
5240 the value from memory if static_chain_incoming_rtx is a REG. If
5241 we do stupid register allocation, we use the stack address
5242 generated above. */
5243 if (tem && ! obey_regdecls)
5244 {
5245 #ifdef SMALL_REGISTER_CLASSES
5246 /* If the static chain originally came in a register, put it back
5247 there, then move it out in the next insn. The reason for
5248 this peculiar code is to satisfy function integration. */
5249 if (GET_CODE (static_chain_incoming_rtx) == REG)
5250 emit_move_insn (static_chain_incoming_rtx, last_ptr);
5251 #endif
5252
5253 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5254 }
5255
5256 while (tem)
5257 {
5258 tree rtlexp = make_node (RTL_EXPR);
5259
5260 RTL_EXPR_RTL (rtlexp) = last_ptr;
5261 context_display = tree_cons (tem, rtlexp, context_display);
5262 tem = decl_function_context (tem);
5263 if (tem == 0)
5264 break;
5265 /* Chain thru stack frames, assuming pointer to next lexical frame
5266 is found at the place we always store it. */
5267 #ifdef FRAME_GROWS_DOWNWARD
5268 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
5269 #endif
5270 last_ptr = copy_to_reg (gen_rtx (MEM, Pmode,
5271 memory_address (Pmode, last_ptr)));
5272
5273 /* If we are not optimizing, ensure that we know that this
5274 piece of context is live over the entire function. */
5275 if (! optimize)
5276 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, last_ptr,
5277 save_expr_regs);
5278 }
5279 }
5280
5281 /* After the display initializations is where the tail-recursion label
5282 should go, if we end up needing one. Ensure we have a NOTE here
5283 since some things (like trampolines) get placed before this. */
5284 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
5285
5286 /* Evaluate now the sizes of any types declared among the arguments. */
5287 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5288 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
5289
5290 /* Make sure there is a line number after the function entry setup code. */
5291 force_next_line_note ();
5292 }
5293 \f
5294 /* Generate RTL for the end of the current function.
5295 FILENAME and LINE are the current position in the source file.
5296
5297 It is up to language-specific callers to do cleanups for parameters--
5298 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5299
5300 void
5301 expand_function_end (filename, line, end_bindings)
5302 char *filename;
5303 int line;
5304 int end_bindings;
5305 {
5306 register int i;
5307 tree link;
5308
5309 static rtx initial_trampoline;
5310
5311 if (output_bytecode)
5312 {
5313 bc_expand_function_end ();
5314 return;
5315 }
5316
5317 #ifdef NON_SAVING_SETJMP
5318 /* Don't put any variables in registers if we call setjmp
5319 on a machine that fails to restore the registers. */
5320 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
5321 {
5322 if (DECL_INITIAL (current_function_decl) != error_mark_node)
5323 setjmp_protect (DECL_INITIAL (current_function_decl));
5324
5325 setjmp_protect_args ();
5326 }
5327 #endif
5328
5329 /* Save the argument pointer if a save area was made for it. */
5330 if (arg_pointer_save_area)
5331 {
5332 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
5333 emit_insn_before (x, tail_recursion_reentry);
5334 }
5335
5336 /* Initialize any trampolines required by this function. */
5337 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5338 {
5339 tree function = TREE_PURPOSE (link);
5340 rtx context = lookup_static_chain (function);
5341 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
5342 rtx seq;
5343
5344 /* First make sure this compilation has a template for
5345 initializing trampolines. */
5346 if (initial_trampoline == 0)
5347 {
5348 end_temporary_allocation ();
5349 initial_trampoline
5350 = gen_rtx (MEM, BLKmode, assemble_trampoline_template ());
5351 resume_temporary_allocation ();
5352 }
5353
5354 /* Generate insns to initialize the trampoline. */
5355 start_sequence ();
5356 tramp = change_address (initial_trampoline, BLKmode,
5357 round_trampoline_addr (XEXP (tramp, 0)));
5358 emit_block_move (tramp, initial_trampoline, GEN_INT (TRAMPOLINE_SIZE),
5359 FUNCTION_BOUNDARY / BITS_PER_UNIT);
5360 INITIALIZE_TRAMPOLINE (XEXP (tramp, 0),
5361 XEXP (DECL_RTL (function), 0), context);
5362 seq = get_insns ();
5363 end_sequence ();
5364
5365 /* Put those insns at entry to the containing function (this one). */
5366 emit_insns_before (seq, tail_recursion_reentry);
5367 }
5368
5369 /* Warn about unused parms if extra warnings were specified. */
5370 if (warn_unused && extra_warnings)
5371 {
5372 tree decl;
5373
5374 for (decl = DECL_ARGUMENTS (current_function_decl);
5375 decl; decl = TREE_CHAIN (decl))
5376 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
5377 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
5378 warning_with_decl (decl, "unused parameter `%s'");
5379 }
5380
5381 /* Delete handlers for nonlocal gotos if nothing uses them. */
5382 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
5383 delete_handlers ();
5384
5385 /* End any sequences that failed to be closed due to syntax errors. */
5386 while (in_sequence_p ())
5387 end_sequence ();
5388
5389 /* Outside function body, can't compute type's actual size
5390 until next function's body starts. */
5391 immediate_size_expand--;
5392
5393 /* If doing stupid register allocation,
5394 mark register parms as dying here. */
5395
5396 if (obey_regdecls)
5397 {
5398 rtx tem;
5399 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5400 use_variable (regno_reg_rtx[i]);
5401
5402 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
5403
5404 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
5405 {
5406 use_variable (XEXP (tem, 0));
5407 use_variable_after (XEXP (tem, 0), parm_birth_insn);
5408 }
5409
5410 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5411 use_variable (current_function_internal_arg_pointer);
5412 }
5413
5414 clear_pending_stack_adjust ();
5415 do_pending_stack_adjust ();
5416
5417 /* Mark the end of the function body.
5418 If control reaches this insn, the function can drop through
5419 without returning a value. */
5420 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
5421
5422 /* Output a linenumber for the end of the function.
5423 SDB depends on this. */
5424 emit_line_note_force (filename, line);
5425
5426 /* Output the label for the actual return from the function,
5427 if one is expected. This happens either because a function epilogue
5428 is used instead of a return instruction, or because a return was done
5429 with a goto in order to run local cleanups, or because of pcc-style
5430 structure returning. */
5431
5432 if (return_label)
5433 emit_label (return_label);
5434
5435 /* C++ uses this. */
5436 if (end_bindings)
5437 expand_end_bindings (0, 0, 0);
5438
5439 /* If we had calls to alloca, and this machine needs
5440 an accurate stack pointer to exit the function,
5441 insert some code to save and restore the stack pointer. */
5442 #ifdef EXIT_IGNORE_STACK
5443 if (! EXIT_IGNORE_STACK)
5444 #endif
5445 if (current_function_calls_alloca)
5446 {
5447 rtx tem = 0;
5448
5449 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
5450 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
5451 }
5452
5453 /* If scalar return value was computed in a pseudo-reg,
5454 copy that to the hard return register. */
5455 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
5456 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
5457 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
5458 >= FIRST_PSEUDO_REGISTER))
5459 {
5460 rtx real_decl_result;
5461
5462 #ifdef FUNCTION_OUTGOING_VALUE
5463 real_decl_result
5464 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5465 current_function_decl);
5466 #else
5467 real_decl_result
5468 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5469 current_function_decl);
5470 #endif
5471 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
5472 emit_move_insn (real_decl_result,
5473 DECL_RTL (DECL_RESULT (current_function_decl)));
5474 emit_insn (gen_rtx (USE, VOIDmode, real_decl_result));
5475 }
5476
5477 /* If returning a structure, arrange to return the address of the value
5478 in a place where debuggers expect to find it.
5479
5480 If returning a structure PCC style,
5481 the caller also depends on this value.
5482 And current_function_returns_pcc_struct is not necessarily set. */
5483 if (current_function_returns_struct
5484 || current_function_returns_pcc_struct)
5485 {
5486 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5487 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5488 #ifdef FUNCTION_OUTGOING_VALUE
5489 rtx outgoing
5490 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
5491 current_function_decl);
5492 #else
5493 rtx outgoing
5494 = FUNCTION_VALUE (build_pointer_type (type),
5495 current_function_decl);
5496 #endif
5497
5498 /* Mark this as a function return value so integrate will delete the
5499 assignment and USE below when inlining this function. */
5500 REG_FUNCTION_VALUE_P (outgoing) = 1;
5501
5502 emit_move_insn (outgoing, value_address);
5503 use_variable (outgoing);
5504 }
5505
5506 /* Output a return insn if we are using one.
5507 Otherwise, let the rtl chain end here, to drop through
5508 into the epilogue. */
5509
5510 #ifdef HAVE_return
5511 if (HAVE_return)
5512 {
5513 emit_jump_insn (gen_return ());
5514 emit_barrier ();
5515 }
5516 #endif
5517
5518 /* Fix up any gotos that jumped out to the outermost
5519 binding level of the function.
5520 Must follow emitting RETURN_LABEL. */
5521
5522 /* If you have any cleanups to do at this point,
5523 and they need to create temporary variables,
5524 then you will lose. */
5525 expand_fixups (get_insns ());
5526 }
5527 \f
5528 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
5529
5530 static int *prologue;
5531 static int *epilogue;
5532
5533 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
5534 or a single insn). */
5535
5536 static int *
5537 record_insns (insns)
5538 rtx insns;
5539 {
5540 int *vec;
5541
5542 if (GET_CODE (insns) == SEQUENCE)
5543 {
5544 int len = XVECLEN (insns, 0);
5545 vec = (int *) oballoc ((len + 1) * sizeof (int));
5546 vec[len] = 0;
5547 while (--len >= 0)
5548 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
5549 }
5550 else
5551 {
5552 vec = (int *) oballoc (2 * sizeof (int));
5553 vec[0] = INSN_UID (insns);
5554 vec[1] = 0;
5555 }
5556 return vec;
5557 }
5558
5559 /* Determine how many INSN_UIDs in VEC are part of INSN. */
5560
5561 static int
5562 contains (insn, vec)
5563 rtx insn;
5564 int *vec;
5565 {
5566 register int i, j;
5567
5568 if (GET_CODE (insn) == INSN
5569 && GET_CODE (PATTERN (insn)) == SEQUENCE)
5570 {
5571 int count = 0;
5572 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
5573 for (j = 0; vec[j]; j++)
5574 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
5575 count++;
5576 return count;
5577 }
5578 else
5579 {
5580 for (j = 0; vec[j]; j++)
5581 if (INSN_UID (insn) == vec[j])
5582 return 1;
5583 }
5584 return 0;
5585 }
5586
5587 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5588 this into place with notes indicating where the prologue ends and where
5589 the epilogue begins. Update the basic block information when possible. */
5590
5591 void
5592 thread_prologue_and_epilogue_insns (f)
5593 rtx f;
5594 {
5595 #ifdef HAVE_prologue
5596 if (HAVE_prologue)
5597 {
5598 rtx head, seq, insn;
5599
5600 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
5601 prologue insns and a NOTE_INSN_PROLOGUE_END. */
5602 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
5603 seq = gen_prologue ();
5604 head = emit_insn_after (seq, f);
5605
5606 /* Include the new prologue insns in the first block. Ignore them
5607 if they form a basic block unto themselves. */
5608 if (basic_block_head && n_basic_blocks
5609 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
5610 basic_block_head[0] = NEXT_INSN (f);
5611
5612 /* Retain a map of the prologue insns. */
5613 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
5614 }
5615 else
5616 #endif
5617 prologue = 0;
5618
5619 #ifdef HAVE_epilogue
5620 if (HAVE_epilogue)
5621 {
5622 rtx insn = get_last_insn ();
5623 rtx prev = prev_nonnote_insn (insn);
5624
5625 /* If we end with a BARRIER, we don't need an epilogue. */
5626 if (! (prev && GET_CODE (prev) == BARRIER))
5627 {
5628 rtx tail, seq, tem;
5629 rtx first_use = 0;
5630 rtx last_use = 0;
5631
5632 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
5633 epilogue insns, the USE insns at the end of a function,
5634 the jump insn that returns, and then a BARRIER. */
5635
5636 /* Move the USE insns at the end of a function onto a list. */
5637 while (prev
5638 && GET_CODE (prev) == INSN
5639 && GET_CODE (PATTERN (prev)) == USE)
5640 {
5641 tem = prev;
5642 prev = prev_nonnote_insn (prev);
5643
5644 NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem);
5645 PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem);
5646 if (first_use)
5647 {
5648 NEXT_INSN (tem) = first_use;
5649 PREV_INSN (first_use) = tem;
5650 }
5651 first_use = tem;
5652 if (!last_use)
5653 last_use = tem;
5654 }
5655
5656 emit_barrier_after (insn);
5657
5658 seq = gen_epilogue ();
5659 tail = emit_jump_insn_after (seq, insn);
5660
5661 /* Insert the USE insns immediately before the return insn, which
5662 must be the first instruction before the final barrier. */
5663 if (first_use)
5664 {
5665 tem = prev_nonnote_insn (get_last_insn ());
5666 NEXT_INSN (PREV_INSN (tem)) = first_use;
5667 PREV_INSN (first_use) = PREV_INSN (tem);
5668 PREV_INSN (tem) = last_use;
5669 NEXT_INSN (last_use) = tem;
5670 }
5671
5672 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
5673
5674 /* Include the new epilogue insns in the last block. Ignore
5675 them if they form a basic block unto themselves. */
5676 if (basic_block_end && n_basic_blocks
5677 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
5678 basic_block_end[n_basic_blocks - 1] = tail;
5679
5680 /* Retain a map of the epilogue insns. */
5681 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
5682 return;
5683 }
5684 }
5685 #endif
5686 epilogue = 0;
5687 }
5688
5689 /* Reposition the prologue-end and epilogue-begin notes after instruction
5690 scheduling and delayed branch scheduling. */
5691
5692 void
5693 reposition_prologue_and_epilogue_notes (f)
5694 rtx f;
5695 {
5696 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5697 /* Reposition the prologue and epilogue notes. */
5698 if (n_basic_blocks)
5699 {
5700 rtx next, prev;
5701 int len;
5702
5703 if (prologue)
5704 {
5705 register rtx insn, note = 0;
5706
5707 /* Scan from the beginning until we reach the last prologue insn.
5708 We apparently can't depend on basic_block_{head,end} after
5709 reorg has run. */
5710 for (len = 0; prologue[len]; len++)
5711 ;
5712 for (insn = f; len && insn; insn = NEXT_INSN (insn))
5713 {
5714 if (GET_CODE (insn) == NOTE)
5715 {
5716 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5717 note = insn;
5718 }
5719 else if ((len -= contains (insn, prologue)) == 0)
5720 {
5721 /* Find the prologue-end note if we haven't already, and
5722 move it to just after the last prologue insn. */
5723 if (note == 0)
5724 {
5725 for (note = insn; note = NEXT_INSN (note);)
5726 if (GET_CODE (note) == NOTE
5727 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5728 break;
5729 }
5730 next = NEXT_INSN (note);
5731 prev = PREV_INSN (note);
5732 if (prev)
5733 NEXT_INSN (prev) = next;
5734 if (next)
5735 PREV_INSN (next) = prev;
5736 add_insn_after (note, insn);
5737 }
5738 }
5739 }
5740
5741 if (epilogue)
5742 {
5743 register rtx insn, note = 0;
5744
5745 /* Scan from the end until we reach the first epilogue insn.
5746 We apparently can't depend on basic_block_{head,end} after
5747 reorg has run. */
5748 for (len = 0; epilogue[len]; len++)
5749 ;
5750 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
5751 {
5752 if (GET_CODE (insn) == NOTE)
5753 {
5754 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5755 note = insn;
5756 }
5757 else if ((len -= contains (insn, epilogue)) == 0)
5758 {
5759 /* Find the epilogue-begin note if we haven't already, and
5760 move it to just before the first epilogue insn. */
5761 if (note == 0)
5762 {
5763 for (note = insn; note = PREV_INSN (note);)
5764 if (GET_CODE (note) == NOTE
5765 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5766 break;
5767 }
5768 next = NEXT_INSN (note);
5769 prev = PREV_INSN (note);
5770 if (prev)
5771 NEXT_INSN (prev) = next;
5772 if (next)
5773 PREV_INSN (next) = prev;
5774 add_insn_after (note, PREV_INSN (insn));
5775 }
5776 }
5777 }
5778 }
5779 #endif /* HAVE_prologue or HAVE_epilogue */
5780 }