(assign_parms): Use mark_user_reg.
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-95, 1996 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42 #include <stdio.h>
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58 #include "bytecode.h"
59 #include "bc-emit.h"
60
61 /* Some systems use __main in a way incompatible with its use in gcc, in these
62 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
63 give the same symbol without quotes for an alternative entry point. You
64 must define both, or neither. */
65 #ifndef NAME__MAIN
66 #define NAME__MAIN "__main"
67 #define SYMBOL__MAIN __main
68 #endif
69
70 /* Round a value to the lowest integer less than it that is a multiple of
71 the required alignment. Avoid using division in case the value is
72 negative. Assume the alignment is a power of two. */
73 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
74
75 /* Similar, but round to the next highest integer that meets the
76 alignment. */
77 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
78
79 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
80 during rtl generation. If they are different register numbers, this is
81 always true. It may also be true if
82 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
83 generation. See fix_lexical_addr for details. */
84
85 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
86 #define NEED_SEPARATE_AP
87 #endif
88
89 /* Number of bytes of args popped by function being compiled on its return.
90 Zero if no bytes are to be popped.
91 May affect compilation of return insn or of function epilogue. */
92
93 int current_function_pops_args;
94
95 /* Nonzero if function being compiled needs to be given an address
96 where the value should be stored. */
97
98 int current_function_returns_struct;
99
100 /* Nonzero if function being compiled needs to
101 return the address of where it has put a structure value. */
102
103 int current_function_returns_pcc_struct;
104
105 /* Nonzero if function being compiled needs to be passed a static chain. */
106
107 int current_function_needs_context;
108
109 /* Nonzero if function being compiled can call setjmp. */
110
111 int current_function_calls_setjmp;
112
113 /* Nonzero if function being compiled can call longjmp. */
114
115 int current_function_calls_longjmp;
116
117 /* Nonzero if function being compiled receives nonlocal gotos
118 from nested functions. */
119
120 int current_function_has_nonlocal_label;
121
122 /* Nonzero if function being compiled has nonlocal gotos to parent
123 function. */
124
125 int current_function_has_nonlocal_goto;
126
127 /* Nonzero if function being compiled contains nested functions. */
128
129 int current_function_contains_functions;
130
131 /* Nonzero if function being compiled can call alloca,
132 either as a subroutine or builtin. */
133
134 int current_function_calls_alloca;
135
136 /* Nonzero if the current function returns a pointer type */
137
138 int current_function_returns_pointer;
139
140 /* If some insns can be deferred to the delay slots of the epilogue, the
141 delay list for them is recorded here. */
142
143 rtx current_function_epilogue_delay_list;
144
145 /* If function's args have a fixed size, this is that size, in bytes.
146 Otherwise, it is -1.
147 May affect compilation of return insn or of function epilogue. */
148
149 int current_function_args_size;
150
151 /* # bytes the prologue should push and pretend that the caller pushed them.
152 The prologue must do this, but only if parms can be passed in registers. */
153
154 int current_function_pretend_args_size;
155
156 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
157 defined, the needed space is pushed by the prologue. */
158
159 int current_function_outgoing_args_size;
160
161 /* This is the offset from the arg pointer to the place where the first
162 anonymous arg can be found, if there is one. */
163
164 rtx current_function_arg_offset_rtx;
165
166 /* Nonzero if current function uses varargs.h or equivalent.
167 Zero for functions that use stdarg.h. */
168
169 int current_function_varargs;
170
171 /* Nonzero if current function uses stdarg.h or equivalent.
172 Zero for functions that use varargs.h. */
173
174 int current_function_stdarg;
175
176 /* Quantities of various kinds of registers
177 used for the current function's args. */
178
179 CUMULATIVE_ARGS current_function_args_info;
180
181 /* Name of function now being compiled. */
182
183 char *current_function_name;
184
185 /* If non-zero, an RTL expression for that location at which the current
186 function returns its result. Always equal to
187 DECL_RTL (DECL_RESULT (current_function_decl)), but provided
188 independently of the tree structures. */
189
190 rtx current_function_return_rtx;
191
192 /* Nonzero if the current function uses the constant pool. */
193
194 int current_function_uses_const_pool;
195
196 /* Nonzero if the current function uses pic_offset_table_rtx. */
197 int current_function_uses_pic_offset_table;
198
199 /* The arg pointer hard register, or the pseudo into which it was copied. */
200 rtx current_function_internal_arg_pointer;
201
202 /* The FUNCTION_DECL for an inline function currently being expanded. */
203 tree inline_function_decl;
204
205 /* Number of function calls seen so far in current function. */
206
207 int function_call_count;
208
209 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
210 (labels to which there can be nonlocal gotos from nested functions)
211 in this function. */
212
213 tree nonlocal_labels;
214
215 /* RTX for stack slot that holds the current handler for nonlocal gotos.
216 Zero when function does not have nonlocal labels. */
217
218 rtx nonlocal_goto_handler_slot;
219
220 /* RTX for stack slot that holds the stack pointer value to restore
221 for a nonlocal goto.
222 Zero when function does not have nonlocal labels. */
223
224 rtx nonlocal_goto_stack_level;
225
226 /* Label that will go on parm cleanup code, if any.
227 Jumping to this label runs cleanup code for parameters, if
228 such code must be run. Following this code is the logical return label. */
229
230 rtx cleanup_label;
231
232 /* Label that will go on function epilogue.
233 Jumping to this label serves as a "return" instruction
234 on machines which require execution of the epilogue on all returns. */
235
236 rtx return_label;
237
238 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
239 So we can mark them all live at the end of the function, if nonopt. */
240 rtx save_expr_regs;
241
242 /* List (chain of EXPR_LISTs) of all stack slots in this function.
243 Made for the sake of unshare_all_rtl. */
244 rtx stack_slot_list;
245
246 /* Chain of all RTL_EXPRs that have insns in them. */
247 tree rtl_expr_chain;
248
249 /* Label to jump back to for tail recursion, or 0 if we have
250 not yet needed one for this function. */
251 rtx tail_recursion_label;
252
253 /* Place after which to insert the tail_recursion_label if we need one. */
254 rtx tail_recursion_reentry;
255
256 /* Location at which to save the argument pointer if it will need to be
257 referenced. There are two cases where this is done: if nonlocal gotos
258 exist, or if vars stored at an offset from the argument pointer will be
259 needed by inner routines. */
260
261 rtx arg_pointer_save_area;
262
263 /* Offset to end of allocated area of stack frame.
264 If stack grows down, this is the address of the last stack slot allocated.
265 If stack grows up, this is the address for the next slot. */
266 int frame_offset;
267
268 /* List (chain of TREE_LISTs) of static chains for containing functions.
269 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
270 in an RTL_EXPR in the TREE_VALUE. */
271 static tree context_display;
272
273 /* List (chain of TREE_LISTs) of trampolines for nested functions.
274 The trampoline sets up the static chain and jumps to the function.
275 We supply the trampoline's address when the function's address is requested.
276
277 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
278 in an RTL_EXPR in the TREE_VALUE. */
279 static tree trampoline_list;
280
281 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
282 static rtx parm_birth_insn;
283
284 #if 0
285 /* Nonzero if a stack slot has been generated whose address is not
286 actually valid. It means that the generated rtl must all be scanned
287 to detect and correct the invalid addresses where they occur. */
288 static int invalid_stack_slot;
289 #endif
290
291 /* Last insn of those whose job was to put parms into their nominal homes. */
292 static rtx last_parm_insn;
293
294 /* 1 + last pseudo register number used for loading a copy
295 of a parameter of this function. */
296 static int max_parm_reg;
297
298 /* Vector indexed by REGNO, containing location on stack in which
299 to put the parm which is nominally in pseudo register REGNO,
300 if we discover that that parm must go in the stack. */
301 static rtx *parm_reg_stack_loc;
302
303 /* Nonzero once virtual register instantiation has been done.
304 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
305 static int virtuals_instantiated;
306
307 /* These variables hold pointers to functions to
308 save and restore machine-specific data,
309 in push_function_context and pop_function_context. */
310 void (*save_machine_status) PROTO((struct function *));
311 void (*restore_machine_status) PROTO((struct function *));
312
313 /* Nonzero if we need to distinguish between the return value of this function
314 and the return value of a function called by this function. This helps
315 integrate.c */
316
317 extern int rtx_equal_function_value_matters;
318 extern tree sequence_rtl_expr;
319 \f
320 /* In order to evaluate some expressions, such as function calls returning
321 structures in memory, we need to temporarily allocate stack locations.
322 We record each allocated temporary in the following structure.
323
324 Associated with each temporary slot is a nesting level. When we pop up
325 one level, all temporaries associated with the previous level are freed.
326 Normally, all temporaries are freed after the execution of the statement
327 in which they were created. However, if we are inside a ({...}) grouping,
328 the result may be in a temporary and hence must be preserved. If the
329 result could be in a temporary, we preserve it if we can determine which
330 one it is in. If we cannot determine which temporary may contain the
331 result, all temporaries are preserved. A temporary is preserved by
332 pretending it was allocated at the previous nesting level.
333
334 Automatic variables are also assigned temporary slots, at the nesting
335 level where they are defined. They are marked a "kept" so that
336 free_temp_slots will not free them. */
337
338 struct temp_slot
339 {
340 /* Points to next temporary slot. */
341 struct temp_slot *next;
342 /* The rtx to used to reference the slot. */
343 rtx slot;
344 /* The rtx used to represent the address if not the address of the
345 slot above. May be an EXPR_LIST if multiple addresses exist. */
346 rtx address;
347 /* The size, in units, of the slot. */
348 int size;
349 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
350 tree rtl_expr;
351 /* Non-zero if this temporary is currently in use. */
352 char in_use;
353 /* Non-zero if this temporary has its address taken. */
354 char addr_taken;
355 /* Nesting level at which this slot is being used. */
356 int level;
357 /* Non-zero if this should survive a call to free_temp_slots. */
358 int keep;
359 /* The offset of the slot from the frame_pointer, including extra space
360 for alignment. This info is for combine_temp_slots. */
361 int base_offset;
362 /* The size of the slot, including extra space for alignment. This
363 info is for combine_temp_slots. */
364 int full_size;
365 };
366
367 /* List of all temporaries allocated, both available and in use. */
368
369 struct temp_slot *temp_slots;
370
371 /* Current nesting level for temporaries. */
372
373 int temp_slot_level;
374 \f
375 /* The FUNCTION_DECL node for the current function. */
376 static tree this_function_decl;
377
378 /* Callinfo pointer for the current function. */
379 static rtx this_function_callinfo;
380
381 /* The label in the bytecode file of this function's actual bytecode.
382 Not an rtx. */
383 static char *this_function_bytecode;
384
385 /* The call description vector for the current function. */
386 static rtx this_function_calldesc;
387
388 /* Size of the local variables allocated for the current function. */
389 int local_vars_size;
390
391 /* Current depth of the bytecode evaluation stack. */
392 int stack_depth;
393
394 /* Maximum depth of the evaluation stack in this function. */
395 int max_stack_depth;
396
397 /* Current depth in statement expressions. */
398 static int stmt_expr_depth;
399
400 /* This structure is used to record MEMs or pseudos used to replace VAR, any
401 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
402 maintain this list in case two operands of an insn were required to match;
403 in that case we must ensure we use the same replacement. */
404
405 struct fixup_replacement
406 {
407 rtx old;
408 rtx new;
409 struct fixup_replacement *next;
410 };
411
412 /* Forward declarations. */
413
414 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
415 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
416 enum machine_mode, enum machine_mode,
417 int));
418 static void fixup_var_refs PROTO((rtx, enum machine_mode, int));
419 static struct fixup_replacement
420 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
421 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
422 rtx, int));
423 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
424 struct fixup_replacement **));
425 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
426 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
427 static rtx fixup_stack_1 PROTO((rtx, rtx));
428 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
429 static void instantiate_decls PROTO((tree, int));
430 static void instantiate_decls_1 PROTO((tree, int));
431 static void instantiate_decl PROTO((rtx, int, int));
432 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
433 static void delete_handlers PROTO((void));
434 static void pad_to_arg_alignment PROTO((struct args_size *, int));
435 static void pad_below PROTO((struct args_size *, enum machine_mode,
436 tree));
437 static tree round_down PROTO((tree, int));
438 static rtx round_trampoline_addr PROTO((rtx));
439 static tree blocks_nreverse PROTO((tree));
440 static int all_blocks PROTO((tree, tree *));
441 static int *record_insns PROTO((rtx));
442 static int contains PROTO((rtx, int *));
443 \f
444 /* Pointer to chain of `struct function' for containing functions. */
445 struct function *outer_function_chain;
446
447 /* Given a function decl for a containing function,
448 return the `struct function' for it. */
449
450 struct function *
451 find_function_data (decl)
452 tree decl;
453 {
454 struct function *p;
455 for (p = outer_function_chain; p; p = p->next)
456 if (p->decl == decl)
457 return p;
458 abort ();
459 }
460
461 /* Save the current context for compilation of a nested function.
462 This is called from language-specific code.
463 The caller is responsible for saving any language-specific status,
464 since this function knows only about language-independent variables. */
465
466 void
467 push_function_context_to (context)
468 tree context;
469 {
470 struct function *p = (struct function *) xmalloc (sizeof (struct function));
471
472 p->next = outer_function_chain;
473 outer_function_chain = p;
474
475 p->name = current_function_name;
476 p->decl = current_function_decl;
477 p->pops_args = current_function_pops_args;
478 p->returns_struct = current_function_returns_struct;
479 p->returns_pcc_struct = current_function_returns_pcc_struct;
480 p->returns_pointer = current_function_returns_pointer;
481 p->needs_context = current_function_needs_context;
482 p->calls_setjmp = current_function_calls_setjmp;
483 p->calls_longjmp = current_function_calls_longjmp;
484 p->calls_alloca = current_function_calls_alloca;
485 p->has_nonlocal_label = current_function_has_nonlocal_label;
486 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
487 p->contains_functions = current_function_contains_functions;
488 p->args_size = current_function_args_size;
489 p->pretend_args_size = current_function_pretend_args_size;
490 p->arg_offset_rtx = current_function_arg_offset_rtx;
491 p->varargs = current_function_varargs;
492 p->stdarg = current_function_stdarg;
493 p->uses_const_pool = current_function_uses_const_pool;
494 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
495 p->internal_arg_pointer = current_function_internal_arg_pointer;
496 p->max_parm_reg = max_parm_reg;
497 p->parm_reg_stack_loc = parm_reg_stack_loc;
498 p->outgoing_args_size = current_function_outgoing_args_size;
499 p->return_rtx = current_function_return_rtx;
500 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
501 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
502 p->nonlocal_labels = nonlocal_labels;
503 p->cleanup_label = cleanup_label;
504 p->return_label = return_label;
505 p->save_expr_regs = save_expr_regs;
506 p->stack_slot_list = stack_slot_list;
507 p->parm_birth_insn = parm_birth_insn;
508 p->frame_offset = frame_offset;
509 p->tail_recursion_label = tail_recursion_label;
510 p->tail_recursion_reentry = tail_recursion_reentry;
511 p->arg_pointer_save_area = arg_pointer_save_area;
512 p->rtl_expr_chain = rtl_expr_chain;
513 p->last_parm_insn = last_parm_insn;
514 p->context_display = context_display;
515 p->trampoline_list = trampoline_list;
516 p->function_call_count = function_call_count;
517 p->temp_slots = temp_slots;
518 p->temp_slot_level = temp_slot_level;
519 p->fixup_var_refs_queue = 0;
520 p->epilogue_delay_list = current_function_epilogue_delay_list;
521
522 save_tree_status (p, context);
523 save_storage_status (p);
524 save_emit_status (p);
525 init_emit ();
526 save_expr_status (p);
527 save_stmt_status (p);
528 save_varasm_status (p);
529
530 if (save_machine_status)
531 (*save_machine_status) (p);
532 }
533
534 void
535 push_function_context ()
536 {
537 push_function_context_to (current_function_decl);
538 }
539
540 /* Restore the last saved context, at the end of a nested function.
541 This function is called from language-specific code. */
542
543 void
544 pop_function_context_from (context)
545 tree context;
546 {
547 struct function *p = outer_function_chain;
548
549 outer_function_chain = p->next;
550
551 current_function_contains_functions
552 = p->contains_functions || p->inline_obstacks
553 || context == current_function_decl;
554 current_function_name = p->name;
555 current_function_decl = p->decl;
556 current_function_pops_args = p->pops_args;
557 current_function_returns_struct = p->returns_struct;
558 current_function_returns_pcc_struct = p->returns_pcc_struct;
559 current_function_returns_pointer = p->returns_pointer;
560 current_function_needs_context = p->needs_context;
561 current_function_calls_setjmp = p->calls_setjmp;
562 current_function_calls_longjmp = p->calls_longjmp;
563 current_function_calls_alloca = p->calls_alloca;
564 current_function_has_nonlocal_label = p->has_nonlocal_label;
565 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
566 current_function_args_size = p->args_size;
567 current_function_pretend_args_size = p->pretend_args_size;
568 current_function_arg_offset_rtx = p->arg_offset_rtx;
569 current_function_varargs = p->varargs;
570 current_function_stdarg = p->stdarg;
571 current_function_uses_const_pool = p->uses_const_pool;
572 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
573 current_function_internal_arg_pointer = p->internal_arg_pointer;
574 max_parm_reg = p->max_parm_reg;
575 parm_reg_stack_loc = p->parm_reg_stack_loc;
576 current_function_outgoing_args_size = p->outgoing_args_size;
577 current_function_return_rtx = p->return_rtx;
578 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
579 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
580 nonlocal_labels = p->nonlocal_labels;
581 cleanup_label = p->cleanup_label;
582 return_label = p->return_label;
583 save_expr_regs = p->save_expr_regs;
584 stack_slot_list = p->stack_slot_list;
585 parm_birth_insn = p->parm_birth_insn;
586 frame_offset = p->frame_offset;
587 tail_recursion_label = p->tail_recursion_label;
588 tail_recursion_reentry = p->tail_recursion_reentry;
589 arg_pointer_save_area = p->arg_pointer_save_area;
590 rtl_expr_chain = p->rtl_expr_chain;
591 last_parm_insn = p->last_parm_insn;
592 context_display = p->context_display;
593 trampoline_list = p->trampoline_list;
594 function_call_count = p->function_call_count;
595 temp_slots = p->temp_slots;
596 temp_slot_level = p->temp_slot_level;
597 current_function_epilogue_delay_list = p->epilogue_delay_list;
598 reg_renumber = 0;
599
600 restore_tree_status (p);
601 restore_storage_status (p);
602 restore_expr_status (p);
603 restore_emit_status (p);
604 restore_stmt_status (p);
605 restore_varasm_status (p);
606
607 if (restore_machine_status)
608 (*restore_machine_status) (p);
609
610 /* Finish doing put_var_into_stack for any of our variables
611 which became addressable during the nested function. */
612 {
613 struct var_refs_queue *queue = p->fixup_var_refs_queue;
614 for (; queue; queue = queue->next)
615 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
616 }
617
618 free (p);
619
620 /* Reset variables that have known state during rtx generation. */
621 rtx_equal_function_value_matters = 1;
622 virtuals_instantiated = 0;
623 }
624
625 void pop_function_context ()
626 {
627 pop_function_context_from (current_function_decl);
628 }
629 \f
630 /* Allocate fixed slots in the stack frame of the current function. */
631
632 /* Return size needed for stack frame based on slots so far allocated.
633 This size counts from zero. It is not rounded to STACK_BOUNDARY;
634 the caller may have to do that. */
635
636 int
637 get_frame_size ()
638 {
639 #ifdef FRAME_GROWS_DOWNWARD
640 return -frame_offset;
641 #else
642 return frame_offset;
643 #endif
644 }
645
646 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
647 with machine mode MODE.
648
649 ALIGN controls the amount of alignment for the address of the slot:
650 0 means according to MODE,
651 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
652 positive specifies alignment boundary in bits.
653
654 We do not round to stack_boundary here. */
655
656 rtx
657 assign_stack_local (mode, size, align)
658 enum machine_mode mode;
659 int size;
660 int align;
661 {
662 register rtx x, addr;
663 int bigend_correction = 0;
664 int alignment;
665
666 if (align == 0)
667 {
668 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
669 if (mode == BLKmode)
670 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
671 }
672 else if (align == -1)
673 {
674 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
675 size = CEIL_ROUND (size, alignment);
676 }
677 else
678 alignment = align / BITS_PER_UNIT;
679
680 /* Round frame offset to that alignment.
681 We must be careful here, since FRAME_OFFSET might be negative and
682 division with a negative dividend isn't as well defined as we might
683 like. So we instead assume that ALIGNMENT is a power of two and
684 use logical operations which are unambiguous. */
685 #ifdef FRAME_GROWS_DOWNWARD
686 frame_offset = FLOOR_ROUND (frame_offset, alignment);
687 #else
688 frame_offset = CEIL_ROUND (frame_offset, alignment);
689 #endif
690
691 /* On a big-endian machine, if we are allocating more space than we will use,
692 use the least significant bytes of those that are allocated. */
693 if (BYTES_BIG_ENDIAN && mode != BLKmode)
694 bigend_correction = size - GET_MODE_SIZE (mode);
695
696 #ifdef FRAME_GROWS_DOWNWARD
697 frame_offset -= size;
698 #endif
699
700 /* If we have already instantiated virtual registers, return the actual
701 address relative to the frame pointer. */
702 if (virtuals_instantiated)
703 addr = plus_constant (frame_pointer_rtx,
704 (frame_offset + bigend_correction
705 + STARTING_FRAME_OFFSET));
706 else
707 addr = plus_constant (virtual_stack_vars_rtx,
708 frame_offset + bigend_correction);
709
710 #ifndef FRAME_GROWS_DOWNWARD
711 frame_offset += size;
712 #endif
713
714 x = gen_rtx (MEM, mode, addr);
715
716 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list);
717
718 return x;
719 }
720
721 /* Assign a stack slot in a containing function.
722 First three arguments are same as in preceding function.
723 The last argument specifies the function to allocate in. */
724
725 rtx
726 assign_outer_stack_local (mode, size, align, function)
727 enum machine_mode mode;
728 int size;
729 int align;
730 struct function *function;
731 {
732 register rtx x, addr;
733 int bigend_correction = 0;
734 int alignment;
735
736 /* Allocate in the memory associated with the function in whose frame
737 we are assigning. */
738 push_obstacks (function->function_obstack,
739 function->function_maybepermanent_obstack);
740
741 if (align == 0)
742 {
743 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
744 if (mode == BLKmode)
745 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
746 }
747 else if (align == -1)
748 {
749 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
750 size = CEIL_ROUND (size, alignment);
751 }
752 else
753 alignment = align / BITS_PER_UNIT;
754
755 /* Round frame offset to that alignment. */
756 #ifdef FRAME_GROWS_DOWNWARD
757 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
758 #else
759 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
760 #endif
761
762 /* On a big-endian machine, if we are allocating more space than we will use,
763 use the least significant bytes of those that are allocated. */
764 if (BYTES_BIG_ENDIAN && mode != BLKmode)
765 bigend_correction = size - GET_MODE_SIZE (mode);
766
767 #ifdef FRAME_GROWS_DOWNWARD
768 function->frame_offset -= size;
769 #endif
770 addr = plus_constant (virtual_stack_vars_rtx,
771 function->frame_offset + bigend_correction);
772 #ifndef FRAME_GROWS_DOWNWARD
773 function->frame_offset += size;
774 #endif
775
776 x = gen_rtx (MEM, mode, addr);
777
778 function->stack_slot_list
779 = gen_rtx (EXPR_LIST, VOIDmode, x, function->stack_slot_list);
780
781 pop_obstacks ();
782
783 return x;
784 }
785 \f
786 /* Allocate a temporary stack slot and record it for possible later
787 reuse.
788
789 MODE is the machine mode to be given to the returned rtx.
790
791 SIZE is the size in units of the space required. We do no rounding here
792 since assign_stack_local will do any required rounding.
793
794 KEEP is 1 if this slot is to be retained after a call to
795 free_temp_slots. Automatic variables for a block are allocated
796 with this flag. KEEP is 2, if we allocate a longer term temporary,
797 whose lifetime is controlled by CLEANUP_POINT_EXPRs. */
798
799 rtx
800 assign_stack_temp (mode, size, keep)
801 enum machine_mode mode;
802 int size;
803 int keep;
804 {
805 struct temp_slot *p, *best_p = 0;
806
807 /* If SIZE is -1 it means that somebody tried to allocate a temporary
808 of a variable size. */
809 if (size == -1)
810 abort ();
811
812 /* First try to find an available, already-allocated temporary that is the
813 exact size we require. */
814 for (p = temp_slots; p; p = p->next)
815 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
816 break;
817
818 /* If we didn't find, one, try one that is larger than what we want. We
819 find the smallest such. */
820 if (p == 0)
821 for (p = temp_slots; p; p = p->next)
822 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
823 && (best_p == 0 || best_p->size > p->size))
824 best_p = p;
825
826 /* Make our best, if any, the one to use. */
827 if (best_p)
828 {
829 /* If there are enough aligned bytes left over, make them into a new
830 temp_slot so that the extra bytes don't get wasted. Do this only
831 for BLKmode slots, so that we can be sure of the alignment. */
832 if (GET_MODE (best_p->slot) == BLKmode)
833 {
834 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
835 int rounded_size = CEIL_ROUND (size, alignment);
836
837 if (best_p->size - rounded_size >= alignment)
838 {
839 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
840 p->in_use = p->addr_taken = 0;
841 p->size = best_p->size - rounded_size;
842 p->base_offset = best_p->base_offset + rounded_size;
843 p->full_size = best_p->full_size - rounded_size;
844 p->slot = gen_rtx (MEM, BLKmode,
845 plus_constant (XEXP (best_p->slot, 0),
846 rounded_size));
847 p->address = 0;
848 p->rtl_expr = 0;
849 p->next = temp_slots;
850 temp_slots = p;
851
852 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, p->slot,
853 stack_slot_list);
854
855 best_p->size = rounded_size;
856 best_p->full_size = rounded_size;
857 }
858 }
859
860 p = best_p;
861 }
862
863 /* If we still didn't find one, make a new temporary. */
864 if (p == 0)
865 {
866 int frame_offset_old = frame_offset;
867 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
868 /* If the temp slot mode doesn't indicate the alignment,
869 use the largest possible, so no one will be disappointed. */
870 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
871 /* The following slot size computation is necessary because we don't
872 know the actual size of the temporary slot until assign_stack_local
873 has performed all the frame alignment and size rounding for the
874 requested temporary. Note that extra space added for alignment
875 can be either above or below this stack slot depending on which
876 way the frame grows. We include the extra space if and only if it
877 is above this slot. */
878 #ifdef FRAME_GROWS_DOWNWARD
879 p->size = frame_offset_old - frame_offset;
880 #else
881 p->size = size;
882 #endif
883 /* Now define the fields used by combine_temp_slots. */
884 #ifdef FRAME_GROWS_DOWNWARD
885 p->base_offset = frame_offset;
886 p->full_size = frame_offset_old - frame_offset;
887 #else
888 p->base_offset = frame_offset_old;
889 p->full_size = frame_offset - frame_offset_old;
890 #endif
891 p->address = 0;
892 p->next = temp_slots;
893 temp_slots = p;
894 }
895
896 p->in_use = 1;
897 p->addr_taken = 0;
898 p->rtl_expr = sequence_rtl_expr;
899
900 if (keep == 2)
901 {
902 p->level = target_temp_slot_level;
903 p->keep = 0;
904 }
905 else
906 {
907 p->level = temp_slot_level;
908 p->keep = keep;
909 }
910 return p->slot;
911 }
912 \f
913 /* Assign a temporary of given TYPE.
914 KEEP is as for assign_stack_temp.
915 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
916 it is 0 if a register is OK.
917 DONT_PROMOTE is 1 if we should not promote values in register
918 to wider modes. */
919
920 rtx
921 assign_temp (type, keep, memory_required, dont_promote)
922 tree type;
923 int keep;
924 int memory_required;
925 int dont_promote;
926 {
927 enum machine_mode mode = TYPE_MODE (type);
928 int unsignedp = TREE_UNSIGNED (type);
929
930 if (mode == BLKmode || memory_required)
931 {
932 int size = int_size_in_bytes (type);
933 rtx tmp;
934
935 /* Unfortunately, we don't yet know how to allocate variable-sized
936 temporaries. However, sometimes we have a fixed upper limit on
937 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
938 instead. This is the case for Chill variable-sized strings. */
939 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
940 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
941 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
942 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
943
944 tmp = assign_stack_temp (mode, size, keep);
945 MEM_IN_STRUCT_P (tmp) = AGGREGATE_TYPE_P (type);
946 return tmp;
947 }
948
949 #ifndef PROMOTE_FOR_CALL_ONLY
950 if (! dont_promote)
951 mode = promote_mode (type, mode, &unsignedp, 0);
952 #endif
953
954 return gen_reg_rtx (mode);
955 }
956 \f
957 /* Combine temporary stack slots which are adjacent on the stack.
958
959 This allows for better use of already allocated stack space. This is only
960 done for BLKmode slots because we can be sure that we won't have alignment
961 problems in this case. */
962
963 void
964 combine_temp_slots ()
965 {
966 struct temp_slot *p, *q;
967 struct temp_slot *prev_p, *prev_q;
968 /* Determine where to free back to after this function. */
969 rtx free_pointer = rtx_alloc (CONST_INT);
970
971 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
972 {
973 int delete_p = 0;
974 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
975 for (q = p->next, prev_q = p; q; q = prev_q->next)
976 {
977 int delete_q = 0;
978 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
979 {
980 if (p->base_offset + p->full_size == q->base_offset)
981 {
982 /* Q comes after P; combine Q into P. */
983 p->size += q->size;
984 p->full_size += q->full_size;
985 delete_q = 1;
986 }
987 else if (q->base_offset + q->full_size == p->base_offset)
988 {
989 /* P comes after Q; combine P into Q. */
990 q->size += p->size;
991 q->full_size += p->full_size;
992 delete_p = 1;
993 break;
994 }
995 }
996 /* Either delete Q or advance past it. */
997 if (delete_q)
998 prev_q->next = q->next;
999 else
1000 prev_q = q;
1001 }
1002 /* Either delete P or advance past it. */
1003 if (delete_p)
1004 {
1005 if (prev_p)
1006 prev_p->next = p->next;
1007 else
1008 temp_slots = p->next;
1009 }
1010 else
1011 prev_p = p;
1012 }
1013
1014 /* Free all the RTL made by plus_constant. */
1015 rtx_free (free_pointer);
1016 }
1017 \f
1018 /* Find the temp slot corresponding to the object at address X. */
1019
1020 static struct temp_slot *
1021 find_temp_slot_from_address (x)
1022 rtx x;
1023 {
1024 struct temp_slot *p;
1025 rtx next;
1026
1027 for (p = temp_slots; p; p = p->next)
1028 {
1029 if (! p->in_use)
1030 continue;
1031 else if (XEXP (p->slot, 0) == x
1032 || p->address == x
1033 || (GET_CODE (x) == PLUS
1034 && XEXP (x, 0) == virtual_stack_vars_rtx
1035 && GET_CODE (XEXP (x, 1)) == CONST_INT
1036 && INTVAL (XEXP (x, 1)) >= p->base_offset
1037 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1038 return p;
1039
1040 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1041 for (next = p->address; next; next = XEXP (next, 1))
1042 if (XEXP (next, 0) == x)
1043 return p;
1044 }
1045
1046 return 0;
1047 }
1048
1049 /* Indicate that NEW is an alternate way of referring to the temp slot
1050 that previous was known by OLD. */
1051
1052 void
1053 update_temp_slot_address (old, new)
1054 rtx old, new;
1055 {
1056 struct temp_slot *p = find_temp_slot_from_address (old);
1057
1058 /* If none, return. Else add NEW as an alias. */
1059 if (p == 0)
1060 return;
1061 else if (p->address == 0)
1062 p->address = new;
1063 else
1064 {
1065 if (GET_CODE (p->address) != EXPR_LIST)
1066 p->address = gen_rtx (EXPR_LIST, VOIDmode, p->address, NULL_RTX);
1067
1068 p->address = gen_rtx (EXPR_LIST, VOIDmode, new, p->address);
1069 }
1070 }
1071
1072 /* If X could be a reference to a temporary slot, mark the fact that its
1073 address was taken. */
1074
1075 void
1076 mark_temp_addr_taken (x)
1077 rtx x;
1078 {
1079 struct temp_slot *p;
1080
1081 if (x == 0)
1082 return;
1083
1084 /* If X is not in memory or is at a constant address, it cannot be in
1085 a temporary slot. */
1086 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1087 return;
1088
1089 p = find_temp_slot_from_address (XEXP (x, 0));
1090 if (p != 0)
1091 p->addr_taken = 1;
1092 }
1093
1094 /* If X could be a reference to a temporary slot, mark that slot as
1095 belonging to the to one level higher than the current level. If X
1096 matched one of our slots, just mark that one. Otherwise, we can't
1097 easily predict which it is, so upgrade all of them. Kept slots
1098 need not be touched.
1099
1100 This is called when an ({...}) construct occurs and a statement
1101 returns a value in memory. */
1102
1103 void
1104 preserve_temp_slots (x)
1105 rtx x;
1106 {
1107 struct temp_slot *p = 0;
1108
1109 /* If there is no result, we still might have some objects whose address
1110 were taken, so we need to make sure they stay around. */
1111 if (x == 0)
1112 {
1113 for (p = temp_slots; p; p = p->next)
1114 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1115 p->level--;
1116
1117 return;
1118 }
1119
1120 /* If X is a register that is being used as a pointer, see if we have
1121 a temporary slot we know it points to. To be consistent with
1122 the code below, we really should preserve all non-kept slots
1123 if we can't find a match, but that seems to be much too costly. */
1124 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1125 p = find_temp_slot_from_address (x);
1126
1127 /* If X is not in memory or is at a constant address, it cannot be in
1128 a temporary slot, but it can contain something whose address was
1129 taken. */
1130 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1131 {
1132 for (p = temp_slots; p; p = p->next)
1133 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1134 p->level--;
1135
1136 return;
1137 }
1138
1139 /* First see if we can find a match. */
1140 if (p == 0)
1141 p = find_temp_slot_from_address (XEXP (x, 0));
1142
1143 if (p != 0)
1144 {
1145 /* Move everything at our level whose address was taken to our new
1146 level in case we used its address. */
1147 struct temp_slot *q;
1148
1149 if (p->level == temp_slot_level)
1150 {
1151 for (q = temp_slots; q; q = q->next)
1152 if (q != p && q->addr_taken && q->level == p->level)
1153 q->level--;
1154
1155 p->level--;
1156 p->addr_taken = 0;
1157 }
1158 return;
1159 }
1160
1161 /* Otherwise, preserve all non-kept slots at this level. */
1162 for (p = temp_slots; p; p = p->next)
1163 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1164 p->level--;
1165 }
1166
1167 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1168 with that RTL_EXPR, promote it into a temporary slot at the present
1169 level so it will not be freed when we free slots made in the
1170 RTL_EXPR. */
1171
1172 void
1173 preserve_rtl_expr_result (x)
1174 rtx x;
1175 {
1176 struct temp_slot *p;
1177
1178 /* If X is not in memory or is at a constant address, it cannot be in
1179 a temporary slot. */
1180 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1181 return;
1182
1183 /* If we can find a match, move it to our level unless it is already at
1184 an upper level. */
1185 p = find_temp_slot_from_address (XEXP (x, 0));
1186 if (p != 0)
1187 {
1188 p->level = MIN (p->level, temp_slot_level);
1189 p->rtl_expr = 0;
1190 }
1191
1192 return;
1193 }
1194
1195 /* Free all temporaries used so far. This is normally called at the end
1196 of generating code for a statement. Don't free any temporaries
1197 currently in use for an RTL_EXPR that hasn't yet been emitted.
1198 We could eventually do better than this since it can be reused while
1199 generating the same RTL_EXPR, but this is complex and probably not
1200 worthwhile. */
1201
1202 void
1203 free_temp_slots ()
1204 {
1205 struct temp_slot *p;
1206
1207 for (p = temp_slots; p; p = p->next)
1208 if (p->in_use && p->level == temp_slot_level && ! p->keep
1209 && p->rtl_expr == 0)
1210 p->in_use = 0;
1211
1212 combine_temp_slots ();
1213 }
1214
1215 /* Free all temporary slots used in T, an RTL_EXPR node. */
1216
1217 void
1218 free_temps_for_rtl_expr (t)
1219 tree t;
1220 {
1221 struct temp_slot *p;
1222
1223 for (p = temp_slots; p; p = p->next)
1224 if (p->rtl_expr == t)
1225 p->in_use = 0;
1226
1227 combine_temp_slots ();
1228 }
1229
1230 /* Mark all temporaries ever allocated in this functon as not suitable
1231 for reuse until the current level is exited. */
1232
1233 void
1234 mark_all_temps_used ()
1235 {
1236 struct temp_slot *p;
1237
1238 for (p = temp_slots; p; p = p->next)
1239 {
1240 p->in_use = 1;
1241 p->level = MIN (p->level, temp_slot_level);
1242 }
1243 }
1244
1245 /* Push deeper into the nesting level for stack temporaries. */
1246
1247 void
1248 push_temp_slots ()
1249 {
1250 temp_slot_level++;
1251 }
1252
1253 /* Pop a temporary nesting level. All slots in use in the current level
1254 are freed. */
1255
1256 void
1257 pop_temp_slots ()
1258 {
1259 struct temp_slot *p;
1260
1261 for (p = temp_slots; p; p = p->next)
1262 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1263 p->in_use = 0;
1264
1265 combine_temp_slots ();
1266
1267 temp_slot_level--;
1268 }
1269
1270 /* Initialize temporary slots. */
1271
1272 void
1273 init_temp_slots ()
1274 {
1275 /* We have not allocated any temporaries yet. */
1276 temp_slots = 0;
1277 temp_slot_level = 0;
1278 target_temp_slot_level = 0;
1279 }
1280 \f
1281 /* Retroactively move an auto variable from a register to a stack slot.
1282 This is done when an address-reference to the variable is seen. */
1283
1284 void
1285 put_var_into_stack (decl)
1286 tree decl;
1287 {
1288 register rtx reg;
1289 enum machine_mode promoted_mode, decl_mode;
1290 struct function *function = 0;
1291 tree context;
1292
1293 if (output_bytecode)
1294 return;
1295
1296 context = decl_function_context (decl);
1297
1298 /* Get the current rtl used for this object and it's original mode. */
1299 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1300
1301 /* No need to do anything if decl has no rtx yet
1302 since in that case caller is setting TREE_ADDRESSABLE
1303 and a stack slot will be assigned when the rtl is made. */
1304 if (reg == 0)
1305 return;
1306
1307 /* Get the declared mode for this object. */
1308 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1309 : DECL_MODE (decl));
1310 /* Get the mode it's actually stored in. */
1311 promoted_mode = GET_MODE (reg);
1312
1313 /* If this variable comes from an outer function,
1314 find that function's saved context. */
1315 if (context != current_function_decl)
1316 for (function = outer_function_chain; function; function = function->next)
1317 if (function->decl == context)
1318 break;
1319
1320 /* If this is a variable-size object with a pseudo to address it,
1321 put that pseudo into the stack, if the var is nonlocal. */
1322 if (DECL_NONLOCAL (decl)
1323 && GET_CODE (reg) == MEM
1324 && GET_CODE (XEXP (reg, 0)) == REG
1325 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1326 {
1327 reg = XEXP (reg, 0);
1328 decl_mode = promoted_mode = GET_MODE (reg);
1329 }
1330
1331 /* Now we should have a value that resides in one or more pseudo regs. */
1332
1333 if (GET_CODE (reg) == REG)
1334 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1335 promoted_mode, decl_mode, TREE_SIDE_EFFECTS (decl));
1336 else if (GET_CODE (reg) == CONCAT)
1337 {
1338 /* A CONCAT contains two pseudos; put them both in the stack.
1339 We do it so they end up consecutive. */
1340 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1341 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1342 #ifdef FRAME_GROWS_DOWNWARD
1343 /* Since part 0 should have a lower address, do it second. */
1344 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1345 part_mode, TREE_SIDE_EFFECTS (decl));
1346 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1347 part_mode, TREE_SIDE_EFFECTS (decl));
1348 #else
1349 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1350 part_mode, TREE_SIDE_EFFECTS (decl));
1351 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1352 part_mode, TREE_SIDE_EFFECTS (decl));
1353 #endif
1354
1355 /* Change the CONCAT into a combined MEM for both parts. */
1356 PUT_CODE (reg, MEM);
1357 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1358
1359 /* The two parts are in memory order already.
1360 Use the lower parts address as ours. */
1361 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1362 /* Prevent sharing of rtl that might lose. */
1363 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1364 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1365 }
1366 }
1367
1368 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1369 into the stack frame of FUNCTION (0 means the current function).
1370 DECL_MODE is the machine mode of the user-level data type.
1371 PROMOTED_MODE is the machine mode of the register.
1372 VOLATILE_P is nonzero if this is for a "volatile" decl. */
1373
1374 static void
1375 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p)
1376 struct function *function;
1377 rtx reg;
1378 tree type;
1379 enum machine_mode promoted_mode, decl_mode;
1380 int volatile_p;
1381 {
1382 rtx new = 0;
1383
1384 if (function)
1385 {
1386 if (REGNO (reg) < function->max_parm_reg)
1387 new = function->parm_reg_stack_loc[REGNO (reg)];
1388 if (new == 0)
1389 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1390 0, function);
1391 }
1392 else
1393 {
1394 if (REGNO (reg) < max_parm_reg)
1395 new = parm_reg_stack_loc[REGNO (reg)];
1396 if (new == 0)
1397 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1398 }
1399
1400 PUT_MODE (reg, decl_mode);
1401 XEXP (reg, 0) = XEXP (new, 0);
1402 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1403 MEM_VOLATILE_P (reg) = volatile_p;
1404 PUT_CODE (reg, MEM);
1405
1406 /* If this is a memory ref that contains aggregate components,
1407 mark it as such for cse and loop optimize. */
1408 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type);
1409
1410 /* Now make sure that all refs to the variable, previously made
1411 when it was a register, are fixed up to be valid again. */
1412 if (function)
1413 {
1414 struct var_refs_queue *temp;
1415
1416 /* Variable is inherited; fix it up when we get back to its function. */
1417 push_obstacks (function->function_obstack,
1418 function->function_maybepermanent_obstack);
1419
1420 /* See comment in restore_tree_status in tree.c for why this needs to be
1421 on saveable obstack. */
1422 temp
1423 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1424 temp->modified = reg;
1425 temp->promoted_mode = promoted_mode;
1426 temp->unsignedp = TREE_UNSIGNED (type);
1427 temp->next = function->fixup_var_refs_queue;
1428 function->fixup_var_refs_queue = temp;
1429 pop_obstacks ();
1430 }
1431 else
1432 /* Variable is local; fix it up now. */
1433 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
1434 }
1435 \f
1436 static void
1437 fixup_var_refs (var, promoted_mode, unsignedp)
1438 rtx var;
1439 enum machine_mode promoted_mode;
1440 int unsignedp;
1441 {
1442 tree pending;
1443 rtx first_insn = get_insns ();
1444 struct sequence_stack *stack = sequence_stack;
1445 tree rtl_exps = rtl_expr_chain;
1446
1447 /* Must scan all insns for stack-refs that exceed the limit. */
1448 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
1449
1450 /* Scan all pending sequences too. */
1451 for (; stack; stack = stack->next)
1452 {
1453 push_to_sequence (stack->first);
1454 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1455 stack->first, stack->next != 0);
1456 /* Update remembered end of sequence
1457 in case we added an insn at the end. */
1458 stack->last = get_last_insn ();
1459 end_sequence ();
1460 }
1461
1462 /* Scan all waiting RTL_EXPRs too. */
1463 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1464 {
1465 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1466 if (seq != const0_rtx && seq != 0)
1467 {
1468 push_to_sequence (seq);
1469 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
1470 end_sequence ();
1471 }
1472 }
1473 }
1474 \f
1475 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1476 some part of an insn. Return a struct fixup_replacement whose OLD
1477 value is equal to X. Allocate a new structure if no such entry exists. */
1478
1479 static struct fixup_replacement *
1480 find_fixup_replacement (replacements, x)
1481 struct fixup_replacement **replacements;
1482 rtx x;
1483 {
1484 struct fixup_replacement *p;
1485
1486 /* See if we have already replaced this. */
1487 for (p = *replacements; p && p->old != x; p = p->next)
1488 ;
1489
1490 if (p == 0)
1491 {
1492 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1493 p->old = x;
1494 p->new = 0;
1495 p->next = *replacements;
1496 *replacements = p;
1497 }
1498
1499 return p;
1500 }
1501
1502 /* Scan the insn-chain starting with INSN for refs to VAR
1503 and fix them up. TOPLEVEL is nonzero if this chain is the
1504 main chain of insns for the current function. */
1505
1506 static void
1507 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
1508 rtx var;
1509 enum machine_mode promoted_mode;
1510 int unsignedp;
1511 rtx insn;
1512 int toplevel;
1513 {
1514 rtx call_dest = 0;
1515
1516 while (insn)
1517 {
1518 rtx next = NEXT_INSN (insn);
1519 rtx note;
1520 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1521 {
1522 /* If this is a CLOBBER of VAR, delete it.
1523
1524 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1525 and REG_RETVAL notes too. */
1526 if (GET_CODE (PATTERN (insn)) == CLOBBER
1527 && XEXP (PATTERN (insn), 0) == var)
1528 {
1529 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1530 /* The REG_LIBCALL note will go away since we are going to
1531 turn INSN into a NOTE, so just delete the
1532 corresponding REG_RETVAL note. */
1533 remove_note (XEXP (note, 0),
1534 find_reg_note (XEXP (note, 0), REG_RETVAL,
1535 NULL_RTX));
1536
1537 /* In unoptimized compilation, we shouldn't call delete_insn
1538 except in jump.c doing warnings. */
1539 PUT_CODE (insn, NOTE);
1540 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1541 NOTE_SOURCE_FILE (insn) = 0;
1542 }
1543
1544 /* The insn to load VAR from a home in the arglist
1545 is now a no-op. When we see it, just delete it. */
1546 else if (toplevel
1547 && GET_CODE (PATTERN (insn)) == SET
1548 && SET_DEST (PATTERN (insn)) == var
1549 /* If this represents the result of an insn group,
1550 don't delete the insn. */
1551 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1552 && rtx_equal_p (SET_SRC (PATTERN (insn)), var))
1553 {
1554 /* In unoptimized compilation, we shouldn't call delete_insn
1555 except in jump.c doing warnings. */
1556 PUT_CODE (insn, NOTE);
1557 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1558 NOTE_SOURCE_FILE (insn) = 0;
1559 if (insn == last_parm_insn)
1560 last_parm_insn = PREV_INSN (next);
1561 }
1562 else
1563 {
1564 struct fixup_replacement *replacements = 0;
1565 rtx next_insn = NEXT_INSN (insn);
1566
1567 #ifdef SMALL_REGISTER_CLASSES
1568 /* If the insn that copies the results of a CALL_INSN
1569 into a pseudo now references VAR, we have to use an
1570 intermediate pseudo since we want the life of the
1571 return value register to be only a single insn.
1572
1573 If we don't use an intermediate pseudo, such things as
1574 address computations to make the address of VAR valid
1575 if it is not can be placed between the CALL_INSN and INSN.
1576
1577 To make sure this doesn't happen, we record the destination
1578 of the CALL_INSN and see if the next insn uses both that
1579 and VAR. */
1580
1581 if (call_dest != 0 && GET_CODE (insn) == INSN
1582 && reg_mentioned_p (var, PATTERN (insn))
1583 && reg_mentioned_p (call_dest, PATTERN (insn)))
1584 {
1585 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1586
1587 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1588
1589 PATTERN (insn) = replace_rtx (PATTERN (insn),
1590 call_dest, temp);
1591 }
1592
1593 if (GET_CODE (insn) == CALL_INSN
1594 && GET_CODE (PATTERN (insn)) == SET)
1595 call_dest = SET_DEST (PATTERN (insn));
1596 else if (GET_CODE (insn) == CALL_INSN
1597 && GET_CODE (PATTERN (insn)) == PARALLEL
1598 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1599 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1600 else
1601 call_dest = 0;
1602 #endif
1603
1604 /* See if we have to do anything to INSN now that VAR is in
1605 memory. If it needs to be loaded into a pseudo, use a single
1606 pseudo for the entire insn in case there is a MATCH_DUP
1607 between two operands. We pass a pointer to the head of
1608 a list of struct fixup_replacements. If fixup_var_refs_1
1609 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1610 it will record them in this list.
1611
1612 If it allocated a pseudo for any replacement, we copy into
1613 it here. */
1614
1615 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1616 &replacements);
1617
1618 /* If this is last_parm_insn, and any instructions were output
1619 after it to fix it up, then we must set last_parm_insn to
1620 the last such instruction emitted. */
1621 if (insn == last_parm_insn)
1622 last_parm_insn = PREV_INSN (next_insn);
1623
1624 while (replacements)
1625 {
1626 if (GET_CODE (replacements->new) == REG)
1627 {
1628 rtx insert_before;
1629 rtx seq;
1630
1631 /* OLD might be a (subreg (mem)). */
1632 if (GET_CODE (replacements->old) == SUBREG)
1633 replacements->old
1634 = fixup_memory_subreg (replacements->old, insn, 0);
1635 else
1636 replacements->old
1637 = fixup_stack_1 (replacements->old, insn);
1638
1639 insert_before = insn;
1640
1641 /* If we are changing the mode, do a conversion.
1642 This might be wasteful, but combine.c will
1643 eliminate much of the waste. */
1644
1645 if (GET_MODE (replacements->new)
1646 != GET_MODE (replacements->old))
1647 {
1648 start_sequence ();
1649 convert_move (replacements->new,
1650 replacements->old, unsignedp);
1651 seq = gen_sequence ();
1652 end_sequence ();
1653 }
1654 else
1655 seq = gen_move_insn (replacements->new,
1656 replacements->old);
1657
1658 emit_insn_before (seq, insert_before);
1659 }
1660
1661 replacements = replacements->next;
1662 }
1663 }
1664
1665 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1666 But don't touch other insns referred to by reg-notes;
1667 we will get them elsewhere. */
1668 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1669 if (GET_CODE (note) != INSN_LIST)
1670 XEXP (note, 0)
1671 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1672 }
1673 insn = next;
1674 }
1675 }
1676 \f
1677 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1678 See if the rtx expression at *LOC in INSN needs to be changed.
1679
1680 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1681 contain a list of original rtx's and replacements. If we find that we need
1682 to modify this insn by replacing a memory reference with a pseudo or by
1683 making a new MEM to implement a SUBREG, we consult that list to see if
1684 we have already chosen a replacement. If none has already been allocated,
1685 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1686 or the SUBREG, as appropriate, to the pseudo. */
1687
1688 static void
1689 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1690 register rtx var;
1691 enum machine_mode promoted_mode;
1692 register rtx *loc;
1693 rtx insn;
1694 struct fixup_replacement **replacements;
1695 {
1696 register int i;
1697 register rtx x = *loc;
1698 RTX_CODE code = GET_CODE (x);
1699 register char *fmt;
1700 register rtx tem, tem1;
1701 struct fixup_replacement *replacement;
1702
1703 switch (code)
1704 {
1705 case MEM:
1706 if (var == x)
1707 {
1708 /* If we already have a replacement, use it. Otherwise,
1709 try to fix up this address in case it is invalid. */
1710
1711 replacement = find_fixup_replacement (replacements, var);
1712 if (replacement->new)
1713 {
1714 *loc = replacement->new;
1715 return;
1716 }
1717
1718 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1719
1720 /* Unless we are forcing memory to register or we changed the mode,
1721 we can leave things the way they are if the insn is valid. */
1722
1723 INSN_CODE (insn) = -1;
1724 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1725 && recog_memoized (insn) >= 0)
1726 return;
1727
1728 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1729 return;
1730 }
1731
1732 /* If X contains VAR, we need to unshare it here so that we update
1733 each occurrence separately. But all identical MEMs in one insn
1734 must be replaced with the same rtx because of the possibility of
1735 MATCH_DUPs. */
1736
1737 if (reg_mentioned_p (var, x))
1738 {
1739 replacement = find_fixup_replacement (replacements, x);
1740 if (replacement->new == 0)
1741 replacement->new = copy_most_rtx (x, var);
1742
1743 *loc = x = replacement->new;
1744 }
1745 break;
1746
1747 case REG:
1748 case CC0:
1749 case PC:
1750 case CONST_INT:
1751 case CONST:
1752 case SYMBOL_REF:
1753 case LABEL_REF:
1754 case CONST_DOUBLE:
1755 return;
1756
1757 case SIGN_EXTRACT:
1758 case ZERO_EXTRACT:
1759 /* Note that in some cases those types of expressions are altered
1760 by optimize_bit_field, and do not survive to get here. */
1761 if (XEXP (x, 0) == var
1762 || (GET_CODE (XEXP (x, 0)) == SUBREG
1763 && SUBREG_REG (XEXP (x, 0)) == var))
1764 {
1765 /* Get TEM as a valid MEM in the mode presently in the insn.
1766
1767 We don't worry about the possibility of MATCH_DUP here; it
1768 is highly unlikely and would be tricky to handle. */
1769
1770 tem = XEXP (x, 0);
1771 if (GET_CODE (tem) == SUBREG)
1772 {
1773 if (GET_MODE_BITSIZE (GET_MODE (tem))
1774 > GET_MODE_BITSIZE (GET_MODE (var)))
1775 {
1776 replacement = find_fixup_replacement (replacements, var);
1777 if (replacement->new == 0)
1778 replacement->new = gen_reg_rtx (GET_MODE (var));
1779 SUBREG_REG (tem) = replacement->new;
1780 }
1781
1782 tem = fixup_memory_subreg (tem, insn, 0);
1783 }
1784 else
1785 tem = fixup_stack_1 (tem, insn);
1786
1787 /* Unless we want to load from memory, get TEM into the proper mode
1788 for an extract from memory. This can only be done if the
1789 extract is at a constant position and length. */
1790
1791 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1792 && GET_CODE (XEXP (x, 2)) == CONST_INT
1793 && ! mode_dependent_address_p (XEXP (tem, 0))
1794 && ! MEM_VOLATILE_P (tem))
1795 {
1796 enum machine_mode wanted_mode = VOIDmode;
1797 enum machine_mode is_mode = GET_MODE (tem);
1798 int width = INTVAL (XEXP (x, 1));
1799 int pos = INTVAL (XEXP (x, 2));
1800
1801 #ifdef HAVE_extzv
1802 if (GET_CODE (x) == ZERO_EXTRACT)
1803 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1804 #endif
1805 #ifdef HAVE_extv
1806 if (GET_CODE (x) == SIGN_EXTRACT)
1807 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1808 #endif
1809 /* If we have a narrower mode, we can do something. */
1810 if (wanted_mode != VOIDmode
1811 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1812 {
1813 int offset = pos / BITS_PER_UNIT;
1814 rtx old_pos = XEXP (x, 2);
1815 rtx newmem;
1816
1817 /* If the bytes and bits are counted differently, we
1818 must adjust the offset. */
1819 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1820 offset = (GET_MODE_SIZE (is_mode)
1821 - GET_MODE_SIZE (wanted_mode) - offset);
1822
1823 pos %= GET_MODE_BITSIZE (wanted_mode);
1824
1825 newmem = gen_rtx (MEM, wanted_mode,
1826 plus_constant (XEXP (tem, 0), offset));
1827 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1828 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1829 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1830
1831 /* Make the change and see if the insn remains valid. */
1832 INSN_CODE (insn) = -1;
1833 XEXP (x, 0) = newmem;
1834 XEXP (x, 2) = GEN_INT (pos);
1835
1836 if (recog_memoized (insn) >= 0)
1837 return;
1838
1839 /* Otherwise, restore old position. XEXP (x, 0) will be
1840 restored later. */
1841 XEXP (x, 2) = old_pos;
1842 }
1843 }
1844
1845 /* If we get here, the bitfield extract insn can't accept a memory
1846 reference. Copy the input into a register. */
1847
1848 tem1 = gen_reg_rtx (GET_MODE (tem));
1849 emit_insn_before (gen_move_insn (tem1, tem), insn);
1850 XEXP (x, 0) = tem1;
1851 return;
1852 }
1853 break;
1854
1855 case SUBREG:
1856 if (SUBREG_REG (x) == var)
1857 {
1858 /* If this is a special SUBREG made because VAR was promoted
1859 from a wider mode, replace it with VAR and call ourself
1860 recursively, this time saying that the object previously
1861 had its current mode (by virtue of the SUBREG). */
1862
1863 if (SUBREG_PROMOTED_VAR_P (x))
1864 {
1865 *loc = var;
1866 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
1867 return;
1868 }
1869
1870 /* If this SUBREG makes VAR wider, it has become a paradoxical
1871 SUBREG with VAR in memory, but these aren't allowed at this
1872 stage of the compilation. So load VAR into a pseudo and take
1873 a SUBREG of that pseudo. */
1874 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1875 {
1876 replacement = find_fixup_replacement (replacements, var);
1877 if (replacement->new == 0)
1878 replacement->new = gen_reg_rtx (GET_MODE (var));
1879 SUBREG_REG (x) = replacement->new;
1880 return;
1881 }
1882
1883 /* See if we have already found a replacement for this SUBREG.
1884 If so, use it. Otherwise, make a MEM and see if the insn
1885 is recognized. If not, or if we should force MEM into a register,
1886 make a pseudo for this SUBREG. */
1887 replacement = find_fixup_replacement (replacements, x);
1888 if (replacement->new)
1889 {
1890 *loc = replacement->new;
1891 return;
1892 }
1893
1894 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1895
1896 INSN_CODE (insn) = -1;
1897 if (! flag_force_mem && recog_memoized (insn) >= 0)
1898 return;
1899
1900 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1901 return;
1902 }
1903 break;
1904
1905 case SET:
1906 /* First do special simplification of bit-field references. */
1907 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1908 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1909 optimize_bit_field (x, insn, 0);
1910 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1911 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
1912 optimize_bit_field (x, insn, NULL_PTR);
1913
1914 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
1915 into a register and then store it back out. */
1916 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
1917 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
1918 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
1919 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
1920 > GET_MODE_SIZE (GET_MODE (var))))
1921 {
1922 replacement = find_fixup_replacement (replacements, var);
1923 if (replacement->new == 0)
1924 replacement->new = gen_reg_rtx (GET_MODE (var));
1925
1926 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
1927 emit_insn_after (gen_move_insn (var, replacement->new), insn);
1928 }
1929
1930 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1931 insn into a pseudo and store the low part of the pseudo into VAR. */
1932 if (GET_CODE (SET_DEST (x)) == SUBREG
1933 && SUBREG_REG (SET_DEST (x)) == var
1934 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
1935 > GET_MODE_SIZE (GET_MODE (var))))
1936 {
1937 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
1938 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
1939 tem)),
1940 insn);
1941 break;
1942 }
1943
1944 {
1945 rtx dest = SET_DEST (x);
1946 rtx src = SET_SRC (x);
1947 rtx outerdest = dest;
1948
1949 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
1950 || GET_CODE (dest) == SIGN_EXTRACT
1951 || GET_CODE (dest) == ZERO_EXTRACT)
1952 dest = XEXP (dest, 0);
1953
1954 if (GET_CODE (src) == SUBREG)
1955 src = XEXP (src, 0);
1956
1957 /* If VAR does not appear at the top level of the SET
1958 just scan the lower levels of the tree. */
1959
1960 if (src != var && dest != var)
1961 break;
1962
1963 /* We will need to rerecognize this insn. */
1964 INSN_CODE (insn) = -1;
1965
1966 #ifdef HAVE_insv
1967 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
1968 {
1969 /* Since this case will return, ensure we fixup all the
1970 operands here. */
1971 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
1972 insn, replacements);
1973 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
1974 insn, replacements);
1975 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
1976 insn, replacements);
1977
1978 tem = XEXP (outerdest, 0);
1979
1980 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
1981 that may appear inside a ZERO_EXTRACT.
1982 This was legitimate when the MEM was a REG. */
1983 if (GET_CODE (tem) == SUBREG
1984 && SUBREG_REG (tem) == var)
1985 tem = fixup_memory_subreg (tem, insn, 0);
1986 else
1987 tem = fixup_stack_1 (tem, insn);
1988
1989 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
1990 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
1991 && ! mode_dependent_address_p (XEXP (tem, 0))
1992 && ! MEM_VOLATILE_P (tem))
1993 {
1994 enum machine_mode wanted_mode
1995 = insn_operand_mode[(int) CODE_FOR_insv][0];
1996 enum machine_mode is_mode = GET_MODE (tem);
1997 int width = INTVAL (XEXP (outerdest, 1));
1998 int pos = INTVAL (XEXP (outerdest, 2));
1999
2000 /* If we have a narrower mode, we can do something. */
2001 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2002 {
2003 int offset = pos / BITS_PER_UNIT;
2004 rtx old_pos = XEXP (outerdest, 2);
2005 rtx newmem;
2006
2007 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2008 offset = (GET_MODE_SIZE (is_mode)
2009 - GET_MODE_SIZE (wanted_mode) - offset);
2010
2011 pos %= GET_MODE_BITSIZE (wanted_mode);
2012
2013 newmem = gen_rtx (MEM, wanted_mode,
2014 plus_constant (XEXP (tem, 0), offset));
2015 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2016 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
2017 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
2018
2019 /* Make the change and see if the insn remains valid. */
2020 INSN_CODE (insn) = -1;
2021 XEXP (outerdest, 0) = newmem;
2022 XEXP (outerdest, 2) = GEN_INT (pos);
2023
2024 if (recog_memoized (insn) >= 0)
2025 return;
2026
2027 /* Otherwise, restore old position. XEXP (x, 0) will be
2028 restored later. */
2029 XEXP (outerdest, 2) = old_pos;
2030 }
2031 }
2032
2033 /* If we get here, the bit-field store doesn't allow memory
2034 or isn't located at a constant position. Load the value into
2035 a register, do the store, and put it back into memory. */
2036
2037 tem1 = gen_reg_rtx (GET_MODE (tem));
2038 emit_insn_before (gen_move_insn (tem1, tem), insn);
2039 emit_insn_after (gen_move_insn (tem, tem1), insn);
2040 XEXP (outerdest, 0) = tem1;
2041 return;
2042 }
2043 #endif
2044
2045 /* STRICT_LOW_PART is a no-op on memory references
2046 and it can cause combinations to be unrecognizable,
2047 so eliminate it. */
2048
2049 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2050 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2051
2052 /* A valid insn to copy VAR into or out of a register
2053 must be left alone, to avoid an infinite loop here.
2054 If the reference to VAR is by a subreg, fix that up,
2055 since SUBREG is not valid for a memref.
2056 Also fix up the address of the stack slot.
2057
2058 Note that we must not try to recognize the insn until
2059 after we know that we have valid addresses and no
2060 (subreg (mem ...) ...) constructs, since these interfere
2061 with determining the validity of the insn. */
2062
2063 if ((SET_SRC (x) == var
2064 || (GET_CODE (SET_SRC (x)) == SUBREG
2065 && SUBREG_REG (SET_SRC (x)) == var))
2066 && (GET_CODE (SET_DEST (x)) == REG
2067 || (GET_CODE (SET_DEST (x)) == SUBREG
2068 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2069 && GET_MODE (var) == promoted_mode
2070 && x == single_set (insn))
2071 {
2072 rtx pat;
2073
2074 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2075 if (replacement->new)
2076 SET_SRC (x) = replacement->new;
2077 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2078 SET_SRC (x) = replacement->new
2079 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2080 else
2081 SET_SRC (x) = replacement->new
2082 = fixup_stack_1 (SET_SRC (x), insn);
2083
2084 if (recog_memoized (insn) >= 0)
2085 return;
2086
2087 /* INSN is not valid, but we know that we want to
2088 copy SET_SRC (x) to SET_DEST (x) in some way. So
2089 we generate the move and see whether it requires more
2090 than one insn. If it does, we emit those insns and
2091 delete INSN. Otherwise, we an just replace the pattern
2092 of INSN; we have already verified above that INSN has
2093 no other function that to do X. */
2094
2095 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2096 if (GET_CODE (pat) == SEQUENCE)
2097 {
2098 emit_insn_after (pat, insn);
2099 PUT_CODE (insn, NOTE);
2100 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2101 NOTE_SOURCE_FILE (insn) = 0;
2102 }
2103 else
2104 PATTERN (insn) = pat;
2105
2106 return;
2107 }
2108
2109 if ((SET_DEST (x) == var
2110 || (GET_CODE (SET_DEST (x)) == SUBREG
2111 && SUBREG_REG (SET_DEST (x)) == var))
2112 && (GET_CODE (SET_SRC (x)) == REG
2113 || (GET_CODE (SET_SRC (x)) == SUBREG
2114 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2115 && GET_MODE (var) == promoted_mode
2116 && x == single_set (insn))
2117 {
2118 rtx pat;
2119
2120 if (GET_CODE (SET_DEST (x)) == SUBREG)
2121 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2122 else
2123 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2124
2125 if (recog_memoized (insn) >= 0)
2126 return;
2127
2128 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2129 if (GET_CODE (pat) == SEQUENCE)
2130 {
2131 emit_insn_after (pat, insn);
2132 PUT_CODE (insn, NOTE);
2133 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2134 NOTE_SOURCE_FILE (insn) = 0;
2135 }
2136 else
2137 PATTERN (insn) = pat;
2138
2139 return;
2140 }
2141
2142 /* Otherwise, storing into VAR must be handled specially
2143 by storing into a temporary and copying that into VAR
2144 with a new insn after this one. Note that this case
2145 will be used when storing into a promoted scalar since
2146 the insn will now have different modes on the input
2147 and output and hence will be invalid (except for the case
2148 of setting it to a constant, which does not need any
2149 change if it is valid). We generate extra code in that case,
2150 but combine.c will eliminate it. */
2151
2152 if (dest == var)
2153 {
2154 rtx temp;
2155 rtx fixeddest = SET_DEST (x);
2156
2157 /* STRICT_LOW_PART can be discarded, around a MEM. */
2158 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2159 fixeddest = XEXP (fixeddest, 0);
2160 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2161 if (GET_CODE (fixeddest) == SUBREG)
2162 {
2163 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2164 promoted_mode = GET_MODE (fixeddest);
2165 }
2166 else
2167 fixeddest = fixup_stack_1 (fixeddest, insn);
2168
2169 temp = gen_reg_rtx (promoted_mode);
2170
2171 emit_insn_after (gen_move_insn (fixeddest,
2172 gen_lowpart (GET_MODE (fixeddest),
2173 temp)),
2174 insn);
2175
2176 SET_DEST (x) = temp;
2177 }
2178 }
2179 }
2180
2181 /* Nothing special about this RTX; fix its operands. */
2182
2183 fmt = GET_RTX_FORMAT (code);
2184 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2185 {
2186 if (fmt[i] == 'e')
2187 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2188 if (fmt[i] == 'E')
2189 {
2190 register int j;
2191 for (j = 0; j < XVECLEN (x, i); j++)
2192 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2193 insn, replacements);
2194 }
2195 }
2196 }
2197 \f
2198 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2199 return an rtx (MEM:m1 newaddr) which is equivalent.
2200 If any insns must be emitted to compute NEWADDR, put them before INSN.
2201
2202 UNCRITICAL nonzero means accept paradoxical subregs.
2203 This is used for subregs found inside REG_NOTES. */
2204
2205 static rtx
2206 fixup_memory_subreg (x, insn, uncritical)
2207 rtx x;
2208 rtx insn;
2209 int uncritical;
2210 {
2211 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2212 rtx addr = XEXP (SUBREG_REG (x), 0);
2213 enum machine_mode mode = GET_MODE (x);
2214 rtx saved, result;
2215
2216 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2217 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2218 && ! uncritical)
2219 abort ();
2220
2221 if (BYTES_BIG_ENDIAN)
2222 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2223 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2224 addr = plus_constant (addr, offset);
2225 if (!flag_force_addr && memory_address_p (mode, addr))
2226 /* Shortcut if no insns need be emitted. */
2227 return change_address (SUBREG_REG (x), mode, addr);
2228 start_sequence ();
2229 result = change_address (SUBREG_REG (x), mode, addr);
2230 emit_insn_before (gen_sequence (), insn);
2231 end_sequence ();
2232 return result;
2233 }
2234
2235 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2236 Replace subexpressions of X in place.
2237 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2238 Otherwise return X, with its contents possibly altered.
2239
2240 If any insns must be emitted to compute NEWADDR, put them before INSN.
2241
2242 UNCRITICAL is as in fixup_memory_subreg. */
2243
2244 static rtx
2245 walk_fixup_memory_subreg (x, insn, uncritical)
2246 register rtx x;
2247 rtx insn;
2248 int uncritical;
2249 {
2250 register enum rtx_code code;
2251 register char *fmt;
2252 register int i;
2253
2254 if (x == 0)
2255 return 0;
2256
2257 code = GET_CODE (x);
2258
2259 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2260 return fixup_memory_subreg (x, insn, uncritical);
2261
2262 /* Nothing special about this RTX; fix its operands. */
2263
2264 fmt = GET_RTX_FORMAT (code);
2265 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2266 {
2267 if (fmt[i] == 'e')
2268 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2269 if (fmt[i] == 'E')
2270 {
2271 register int j;
2272 for (j = 0; j < XVECLEN (x, i); j++)
2273 XVECEXP (x, i, j)
2274 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2275 }
2276 }
2277 return x;
2278 }
2279 \f
2280 /* For each memory ref within X, if it refers to a stack slot
2281 with an out of range displacement, put the address in a temp register
2282 (emitting new insns before INSN to load these registers)
2283 and alter the memory ref to use that register.
2284 Replace each such MEM rtx with a copy, to avoid clobberage. */
2285
2286 static rtx
2287 fixup_stack_1 (x, insn)
2288 rtx x;
2289 rtx insn;
2290 {
2291 register int i;
2292 register RTX_CODE code = GET_CODE (x);
2293 register char *fmt;
2294
2295 if (code == MEM)
2296 {
2297 register rtx ad = XEXP (x, 0);
2298 /* If we have address of a stack slot but it's not valid
2299 (displacement is too large), compute the sum in a register. */
2300 if (GET_CODE (ad) == PLUS
2301 && GET_CODE (XEXP (ad, 0)) == REG
2302 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2303 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2304 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2305 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2306 {
2307 rtx temp, seq;
2308 if (memory_address_p (GET_MODE (x), ad))
2309 return x;
2310
2311 start_sequence ();
2312 temp = copy_to_reg (ad);
2313 seq = gen_sequence ();
2314 end_sequence ();
2315 emit_insn_before (seq, insn);
2316 return change_address (x, VOIDmode, temp);
2317 }
2318 return x;
2319 }
2320
2321 fmt = GET_RTX_FORMAT (code);
2322 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2323 {
2324 if (fmt[i] == 'e')
2325 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2326 if (fmt[i] == 'E')
2327 {
2328 register int j;
2329 for (j = 0; j < XVECLEN (x, i); j++)
2330 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2331 }
2332 }
2333 return x;
2334 }
2335 \f
2336 /* Optimization: a bit-field instruction whose field
2337 happens to be a byte or halfword in memory
2338 can be changed to a move instruction.
2339
2340 We call here when INSN is an insn to examine or store into a bit-field.
2341 BODY is the SET-rtx to be altered.
2342
2343 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2344 (Currently this is called only from function.c, and EQUIV_MEM
2345 is always 0.) */
2346
2347 static void
2348 optimize_bit_field (body, insn, equiv_mem)
2349 rtx body;
2350 rtx insn;
2351 rtx *equiv_mem;
2352 {
2353 register rtx bitfield;
2354 int destflag;
2355 rtx seq = 0;
2356 enum machine_mode mode;
2357
2358 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2359 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2360 bitfield = SET_DEST (body), destflag = 1;
2361 else
2362 bitfield = SET_SRC (body), destflag = 0;
2363
2364 /* First check that the field being stored has constant size and position
2365 and is in fact a byte or halfword suitably aligned. */
2366
2367 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2368 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2369 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2370 != BLKmode)
2371 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2372 {
2373 register rtx memref = 0;
2374
2375 /* Now check that the containing word is memory, not a register,
2376 and that it is safe to change the machine mode. */
2377
2378 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2379 memref = XEXP (bitfield, 0);
2380 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2381 && equiv_mem != 0)
2382 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2383 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2384 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2385 memref = SUBREG_REG (XEXP (bitfield, 0));
2386 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2387 && equiv_mem != 0
2388 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2389 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2390
2391 if (memref
2392 && ! mode_dependent_address_p (XEXP (memref, 0))
2393 && ! MEM_VOLATILE_P (memref))
2394 {
2395 /* Now adjust the address, first for any subreg'ing
2396 that we are now getting rid of,
2397 and then for which byte of the word is wanted. */
2398
2399 register int offset = INTVAL (XEXP (bitfield, 2));
2400 rtx insns;
2401
2402 /* Adjust OFFSET to count bits from low-address byte. */
2403 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2404 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2405 - offset - INTVAL (XEXP (bitfield, 1)));
2406
2407 /* Adjust OFFSET to count bytes from low-address byte. */
2408 offset /= BITS_PER_UNIT;
2409 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2410 {
2411 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2412 if (BYTES_BIG_ENDIAN)
2413 offset -= (MIN (UNITS_PER_WORD,
2414 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2415 - MIN (UNITS_PER_WORD,
2416 GET_MODE_SIZE (GET_MODE (memref))));
2417 }
2418
2419 start_sequence ();
2420 memref = change_address (memref, mode,
2421 plus_constant (XEXP (memref, 0), offset));
2422 insns = get_insns ();
2423 end_sequence ();
2424 emit_insns_before (insns, insn);
2425
2426 /* Store this memory reference where
2427 we found the bit field reference. */
2428
2429 if (destflag)
2430 {
2431 validate_change (insn, &SET_DEST (body), memref, 1);
2432 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2433 {
2434 rtx src = SET_SRC (body);
2435 while (GET_CODE (src) == SUBREG
2436 && SUBREG_WORD (src) == 0)
2437 src = SUBREG_REG (src);
2438 if (GET_MODE (src) != GET_MODE (memref))
2439 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2440 validate_change (insn, &SET_SRC (body), src, 1);
2441 }
2442 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2443 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2444 /* This shouldn't happen because anything that didn't have
2445 one of these modes should have got converted explicitly
2446 and then referenced through a subreg.
2447 This is so because the original bit-field was
2448 handled by agg_mode and so its tree structure had
2449 the same mode that memref now has. */
2450 abort ();
2451 }
2452 else
2453 {
2454 rtx dest = SET_DEST (body);
2455
2456 while (GET_CODE (dest) == SUBREG
2457 && SUBREG_WORD (dest) == 0
2458 && (GET_MODE_CLASS (GET_MODE (dest))
2459 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest)))))
2460 dest = SUBREG_REG (dest);
2461
2462 validate_change (insn, &SET_DEST (body), dest, 1);
2463
2464 if (GET_MODE (dest) == GET_MODE (memref))
2465 validate_change (insn, &SET_SRC (body), memref, 1);
2466 else
2467 {
2468 /* Convert the mem ref to the destination mode. */
2469 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2470
2471 start_sequence ();
2472 convert_move (newreg, memref,
2473 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2474 seq = get_insns ();
2475 end_sequence ();
2476
2477 validate_change (insn, &SET_SRC (body), newreg, 1);
2478 }
2479 }
2480
2481 /* See if we can convert this extraction or insertion into
2482 a simple move insn. We might not be able to do so if this
2483 was, for example, part of a PARALLEL.
2484
2485 If we succeed, write out any needed conversions. If we fail,
2486 it is hard to guess why we failed, so don't do anything
2487 special; just let the optimization be suppressed. */
2488
2489 if (apply_change_group () && seq)
2490 emit_insns_before (seq, insn);
2491 }
2492 }
2493 }
2494 \f
2495 /* These routines are responsible for converting virtual register references
2496 to the actual hard register references once RTL generation is complete.
2497
2498 The following four variables are used for communication between the
2499 routines. They contain the offsets of the virtual registers from their
2500 respective hard registers. */
2501
2502 static int in_arg_offset;
2503 static int var_offset;
2504 static int dynamic_offset;
2505 static int out_arg_offset;
2506
2507 /* In most machines, the stack pointer register is equivalent to the bottom
2508 of the stack. */
2509
2510 #ifndef STACK_POINTER_OFFSET
2511 #define STACK_POINTER_OFFSET 0
2512 #endif
2513
2514 /* If not defined, pick an appropriate default for the offset of dynamically
2515 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2516 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2517
2518 #ifndef STACK_DYNAMIC_OFFSET
2519
2520 #ifdef ACCUMULATE_OUTGOING_ARGS
2521 /* The bottom of the stack points to the actual arguments. If
2522 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2523 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2524 stack space for register parameters is not pushed by the caller, but
2525 rather part of the fixed stack areas and hence not included in
2526 `current_function_outgoing_args_size'. Nevertheless, we must allow
2527 for it when allocating stack dynamic objects. */
2528
2529 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2530 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2531 (current_function_outgoing_args_size \
2532 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2533
2534 #else
2535 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2536 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2537 #endif
2538
2539 #else
2540 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2541 #endif
2542 #endif
2543
2544 /* Pass through the INSNS of function FNDECL and convert virtual register
2545 references to hard register references. */
2546
2547 void
2548 instantiate_virtual_regs (fndecl, insns)
2549 tree fndecl;
2550 rtx insns;
2551 {
2552 rtx insn;
2553
2554 /* Compute the offsets to use for this function. */
2555 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
2556 var_offset = STARTING_FRAME_OFFSET;
2557 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
2558 out_arg_offset = STACK_POINTER_OFFSET;
2559
2560 /* Scan all variables and parameters of this function. For each that is
2561 in memory, instantiate all virtual registers if the result is a valid
2562 address. If not, we do it later. That will handle most uses of virtual
2563 regs on many machines. */
2564 instantiate_decls (fndecl, 1);
2565
2566 /* Initialize recognition, indicating that volatile is OK. */
2567 init_recog ();
2568
2569 /* Scan through all the insns, instantiating every virtual register still
2570 present. */
2571 for (insn = insns; insn; insn = NEXT_INSN (insn))
2572 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2573 || GET_CODE (insn) == CALL_INSN)
2574 {
2575 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
2576 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
2577 }
2578
2579 /* Now instantiate the remaining register equivalences for debugging info.
2580 These will not be valid addresses. */
2581 instantiate_decls (fndecl, 0);
2582
2583 /* Indicate that, from now on, assign_stack_local should use
2584 frame_pointer_rtx. */
2585 virtuals_instantiated = 1;
2586 }
2587
2588 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
2589 all virtual registers in their DECL_RTL's.
2590
2591 If VALID_ONLY, do this only if the resulting address is still valid.
2592 Otherwise, always do it. */
2593
2594 static void
2595 instantiate_decls (fndecl, valid_only)
2596 tree fndecl;
2597 int valid_only;
2598 {
2599 tree decl;
2600
2601 if (DECL_SAVED_INSNS (fndecl))
2602 /* When compiling an inline function, the obstack used for
2603 rtl allocation is the maybepermanent_obstack. Calling
2604 `resume_temporary_allocation' switches us back to that
2605 obstack while we process this function's parameters. */
2606 resume_temporary_allocation ();
2607
2608 /* Process all parameters of the function. */
2609 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2610 {
2611 instantiate_decl (DECL_RTL (decl), int_size_in_bytes (TREE_TYPE (decl)),
2612 valid_only);
2613 instantiate_decl (DECL_INCOMING_RTL (decl),
2614 int_size_in_bytes (TREE_TYPE (decl)), valid_only);
2615 }
2616
2617 /* Now process all variables defined in the function or its subblocks. */
2618 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
2619
2620 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
2621 {
2622 /* Save all rtl allocated for this function by raising the
2623 high-water mark on the maybepermanent_obstack. */
2624 preserve_data ();
2625 /* All further rtl allocation is now done in the current_obstack. */
2626 rtl_in_current_obstack ();
2627 }
2628 }
2629
2630 /* Subroutine of instantiate_decls: Process all decls in the given
2631 BLOCK node and all its subblocks. */
2632
2633 static void
2634 instantiate_decls_1 (let, valid_only)
2635 tree let;
2636 int valid_only;
2637 {
2638 tree t;
2639
2640 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2641 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
2642 valid_only);
2643
2644 /* Process all subblocks. */
2645 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2646 instantiate_decls_1 (t, valid_only);
2647 }
2648
2649 /* Subroutine of the preceding procedures: Given RTL representing a
2650 decl and the size of the object, do any instantiation required.
2651
2652 If VALID_ONLY is non-zero, it means that the RTL should only be
2653 changed if the new address is valid. */
2654
2655 static void
2656 instantiate_decl (x, size, valid_only)
2657 rtx x;
2658 int size;
2659 int valid_only;
2660 {
2661 enum machine_mode mode;
2662 rtx addr;
2663
2664 /* If this is not a MEM, no need to do anything. Similarly if the
2665 address is a constant or a register that is not a virtual register. */
2666
2667 if (x == 0 || GET_CODE (x) != MEM)
2668 return;
2669
2670 addr = XEXP (x, 0);
2671 if (CONSTANT_P (addr)
2672 || (GET_CODE (addr) == REG
2673 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
2674 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
2675 return;
2676
2677 /* If we should only do this if the address is valid, copy the address.
2678 We need to do this so we can undo any changes that might make the
2679 address invalid. This copy is unfortunate, but probably can't be
2680 avoided. */
2681
2682 if (valid_only)
2683 addr = copy_rtx (addr);
2684
2685 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
2686
2687 if (valid_only)
2688 {
2689 /* Now verify that the resulting address is valid for every integer or
2690 floating-point mode up to and including SIZE bytes long. We do this
2691 since the object might be accessed in any mode and frame addresses
2692 are shared. */
2693
2694 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2695 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2696 mode = GET_MODE_WIDER_MODE (mode))
2697 if (! memory_address_p (mode, addr))
2698 return;
2699
2700 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
2701 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2702 mode = GET_MODE_WIDER_MODE (mode))
2703 if (! memory_address_p (mode, addr))
2704 return;
2705 }
2706
2707 /* Put back the address now that we have updated it and we either know
2708 it is valid or we don't care whether it is valid. */
2709
2710 XEXP (x, 0) = addr;
2711 }
2712 \f
2713 /* Given a pointer to a piece of rtx and an optional pointer to the
2714 containing object, instantiate any virtual registers present in it.
2715
2716 If EXTRA_INSNS, we always do the replacement and generate
2717 any extra insns before OBJECT. If it zero, we do nothing if replacement
2718 is not valid.
2719
2720 Return 1 if we either had nothing to do or if we were able to do the
2721 needed replacement. Return 0 otherwise; we only return zero if
2722 EXTRA_INSNS is zero.
2723
2724 We first try some simple transformations to avoid the creation of extra
2725 pseudos. */
2726
2727 static int
2728 instantiate_virtual_regs_1 (loc, object, extra_insns)
2729 rtx *loc;
2730 rtx object;
2731 int extra_insns;
2732 {
2733 rtx x;
2734 RTX_CODE code;
2735 rtx new = 0;
2736 int offset;
2737 rtx temp;
2738 rtx seq;
2739 int i, j;
2740 char *fmt;
2741
2742 /* Re-start here to avoid recursion in common cases. */
2743 restart:
2744
2745 x = *loc;
2746 if (x == 0)
2747 return 1;
2748
2749 code = GET_CODE (x);
2750
2751 /* Check for some special cases. */
2752 switch (code)
2753 {
2754 case CONST_INT:
2755 case CONST_DOUBLE:
2756 case CONST:
2757 case SYMBOL_REF:
2758 case CODE_LABEL:
2759 case PC:
2760 case CC0:
2761 case ASM_INPUT:
2762 case ADDR_VEC:
2763 case ADDR_DIFF_VEC:
2764 case RETURN:
2765 return 1;
2766
2767 case SET:
2768 /* We are allowed to set the virtual registers. This means that
2769 that the actual register should receive the source minus the
2770 appropriate offset. This is used, for example, in the handling
2771 of non-local gotos. */
2772 if (SET_DEST (x) == virtual_incoming_args_rtx)
2773 new = arg_pointer_rtx, offset = - in_arg_offset;
2774 else if (SET_DEST (x) == virtual_stack_vars_rtx)
2775 new = frame_pointer_rtx, offset = - var_offset;
2776 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
2777 new = stack_pointer_rtx, offset = - dynamic_offset;
2778 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
2779 new = stack_pointer_rtx, offset = - out_arg_offset;
2780
2781 if (new)
2782 {
2783 /* The only valid sources here are PLUS or REG. Just do
2784 the simplest possible thing to handle them. */
2785 if (GET_CODE (SET_SRC (x)) != REG
2786 && GET_CODE (SET_SRC (x)) != PLUS)
2787 abort ();
2788
2789 start_sequence ();
2790 if (GET_CODE (SET_SRC (x)) != REG)
2791 temp = force_operand (SET_SRC (x), NULL_RTX);
2792 else
2793 temp = SET_SRC (x);
2794 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
2795 seq = get_insns ();
2796 end_sequence ();
2797
2798 emit_insns_before (seq, object);
2799 SET_DEST (x) = new;
2800
2801 if (!validate_change (object, &SET_SRC (x), temp, 0)
2802 || ! extra_insns)
2803 abort ();
2804
2805 return 1;
2806 }
2807
2808 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
2809 loc = &SET_SRC (x);
2810 goto restart;
2811
2812 case PLUS:
2813 /* Handle special case of virtual register plus constant. */
2814 if (CONSTANT_P (XEXP (x, 1)))
2815 {
2816 rtx old, new_offset;
2817
2818 /* Check for (plus (plus VIRT foo) (const_int)) first. */
2819 if (GET_CODE (XEXP (x, 0)) == PLUS)
2820 {
2821 rtx inner = XEXP (XEXP (x, 0), 0);
2822
2823 if (inner == virtual_incoming_args_rtx)
2824 new = arg_pointer_rtx, offset = in_arg_offset;
2825 else if (inner == virtual_stack_vars_rtx)
2826 new = frame_pointer_rtx, offset = var_offset;
2827 else if (inner == virtual_stack_dynamic_rtx)
2828 new = stack_pointer_rtx, offset = dynamic_offset;
2829 else if (inner == virtual_outgoing_args_rtx)
2830 new = stack_pointer_rtx, offset = out_arg_offset;
2831 else
2832 {
2833 loc = &XEXP (x, 0);
2834 goto restart;
2835 }
2836
2837 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
2838 extra_insns);
2839 new = gen_rtx (PLUS, Pmode, new, XEXP (XEXP (x, 0), 1));
2840 }
2841
2842 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
2843 new = arg_pointer_rtx, offset = in_arg_offset;
2844 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
2845 new = frame_pointer_rtx, offset = var_offset;
2846 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
2847 new = stack_pointer_rtx, offset = dynamic_offset;
2848 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
2849 new = stack_pointer_rtx, offset = out_arg_offset;
2850 else
2851 {
2852 /* We know the second operand is a constant. Unless the
2853 first operand is a REG (which has been already checked),
2854 it needs to be checked. */
2855 if (GET_CODE (XEXP (x, 0)) != REG)
2856 {
2857 loc = &XEXP (x, 0);
2858 goto restart;
2859 }
2860 return 1;
2861 }
2862
2863 new_offset = plus_constant (XEXP (x, 1), offset);
2864
2865 /* If the new constant is zero, try to replace the sum with just
2866 the register. */
2867 if (new_offset == const0_rtx
2868 && validate_change (object, loc, new, 0))
2869 return 1;
2870
2871 /* Next try to replace the register and new offset.
2872 There are two changes to validate here and we can't assume that
2873 in the case of old offset equals new just changing the register
2874 will yield a valid insn. In the interests of a little efficiency,
2875 however, we only call validate change once (we don't queue up the
2876 changes and then call apply_change_group). */
2877
2878 old = XEXP (x, 0);
2879 if (offset == 0
2880 ? ! validate_change (object, &XEXP (x, 0), new, 0)
2881 : (XEXP (x, 0) = new,
2882 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
2883 {
2884 if (! extra_insns)
2885 {
2886 XEXP (x, 0) = old;
2887 return 0;
2888 }
2889
2890 /* Otherwise copy the new constant into a register and replace
2891 constant with that register. */
2892 temp = gen_reg_rtx (Pmode);
2893 XEXP (x, 0) = new;
2894 if (validate_change (object, &XEXP (x, 1), temp, 0))
2895 emit_insn_before (gen_move_insn (temp, new_offset), object);
2896 else
2897 {
2898 /* If that didn't work, replace this expression with a
2899 register containing the sum. */
2900
2901 XEXP (x, 0) = old;
2902 new = gen_rtx (PLUS, Pmode, new, new_offset);
2903
2904 start_sequence ();
2905 temp = force_operand (new, NULL_RTX);
2906 seq = get_insns ();
2907 end_sequence ();
2908
2909 emit_insns_before (seq, object);
2910 if (! validate_change (object, loc, temp, 0)
2911 && ! validate_replace_rtx (x, temp, object))
2912 abort ();
2913 }
2914 }
2915
2916 return 1;
2917 }
2918
2919 /* Fall through to generic two-operand expression case. */
2920 case EXPR_LIST:
2921 case CALL:
2922 case COMPARE:
2923 case MINUS:
2924 case MULT:
2925 case DIV: case UDIV:
2926 case MOD: case UMOD:
2927 case AND: case IOR: case XOR:
2928 case ROTATERT: case ROTATE:
2929 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2930 case NE: case EQ:
2931 case GE: case GT: case GEU: case GTU:
2932 case LE: case LT: case LEU: case LTU:
2933 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
2934 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
2935 loc = &XEXP (x, 0);
2936 goto restart;
2937
2938 case MEM:
2939 /* Most cases of MEM that convert to valid addresses have already been
2940 handled by our scan of regno_reg_rtx. The only special handling we
2941 need here is to make a copy of the rtx to ensure it isn't being
2942 shared if we have to change it to a pseudo.
2943
2944 If the rtx is a simple reference to an address via a virtual register,
2945 it can potentially be shared. In such cases, first try to make it
2946 a valid address, which can also be shared. Otherwise, copy it and
2947 proceed normally.
2948
2949 First check for common cases that need no processing. These are
2950 usually due to instantiation already being done on a previous instance
2951 of a shared rtx. */
2952
2953 temp = XEXP (x, 0);
2954 if (CONSTANT_ADDRESS_P (temp)
2955 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2956 || temp == arg_pointer_rtx
2957 #endif
2958 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2959 || temp == hard_frame_pointer_rtx
2960 #endif
2961 || temp == frame_pointer_rtx)
2962 return 1;
2963
2964 if (GET_CODE (temp) == PLUS
2965 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2966 && (XEXP (temp, 0) == frame_pointer_rtx
2967 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2968 || XEXP (temp, 0) == hard_frame_pointer_rtx
2969 #endif
2970 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2971 || XEXP (temp, 0) == arg_pointer_rtx
2972 #endif
2973 ))
2974 return 1;
2975
2976 if (temp == virtual_stack_vars_rtx
2977 || temp == virtual_incoming_args_rtx
2978 || (GET_CODE (temp) == PLUS
2979 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2980 && (XEXP (temp, 0) == virtual_stack_vars_rtx
2981 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
2982 {
2983 /* This MEM may be shared. If the substitution can be done without
2984 the need to generate new pseudos, we want to do it in place
2985 so all copies of the shared rtx benefit. The call below will
2986 only make substitutions if the resulting address is still
2987 valid.
2988
2989 Note that we cannot pass X as the object in the recursive call
2990 since the insn being processed may not allow all valid
2991 addresses. However, if we were not passed on object, we can
2992 only modify X without copying it if X will have a valid
2993 address.
2994
2995 ??? Also note that this can still lose if OBJECT is an insn that
2996 has less restrictions on an address that some other insn.
2997 In that case, we will modify the shared address. This case
2998 doesn't seem very likely, though. */
2999
3000 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3001 object ? object : x, 0))
3002 return 1;
3003
3004 /* Otherwise make a copy and process that copy. We copy the entire
3005 RTL expression since it might be a PLUS which could also be
3006 shared. */
3007 *loc = x = copy_rtx (x);
3008 }
3009
3010 /* Fall through to generic unary operation case. */
3011 case USE:
3012 case CLOBBER:
3013 case SUBREG:
3014 case STRICT_LOW_PART:
3015 case NEG: case NOT:
3016 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3017 case SIGN_EXTEND: case ZERO_EXTEND:
3018 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3019 case FLOAT: case FIX:
3020 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3021 case ABS:
3022 case SQRT:
3023 case FFS:
3024 /* These case either have just one operand or we know that we need not
3025 check the rest of the operands. */
3026 loc = &XEXP (x, 0);
3027 goto restart;
3028
3029 case REG:
3030 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3031 in front of this insn and substitute the temporary. */
3032 if (x == virtual_incoming_args_rtx)
3033 new = arg_pointer_rtx, offset = in_arg_offset;
3034 else if (x == virtual_stack_vars_rtx)
3035 new = frame_pointer_rtx, offset = var_offset;
3036 else if (x == virtual_stack_dynamic_rtx)
3037 new = stack_pointer_rtx, offset = dynamic_offset;
3038 else if (x == virtual_outgoing_args_rtx)
3039 new = stack_pointer_rtx, offset = out_arg_offset;
3040
3041 if (new)
3042 {
3043 temp = plus_constant (new, offset);
3044 if (!validate_change (object, loc, temp, 0))
3045 {
3046 if (! extra_insns)
3047 return 0;
3048
3049 start_sequence ();
3050 temp = force_operand (temp, NULL_RTX);
3051 seq = get_insns ();
3052 end_sequence ();
3053
3054 emit_insns_before (seq, object);
3055 if (! validate_change (object, loc, temp, 0)
3056 && ! validate_replace_rtx (x, temp, object))
3057 abort ();
3058 }
3059 }
3060
3061 return 1;
3062 }
3063
3064 /* Scan all subexpressions. */
3065 fmt = GET_RTX_FORMAT (code);
3066 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3067 if (*fmt == 'e')
3068 {
3069 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3070 return 0;
3071 }
3072 else if (*fmt == 'E')
3073 for (j = 0; j < XVECLEN (x, i); j++)
3074 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3075 extra_insns))
3076 return 0;
3077
3078 return 1;
3079 }
3080 \f
3081 /* Optimization: assuming this function does not receive nonlocal gotos,
3082 delete the handlers for such, as well as the insns to establish
3083 and disestablish them. */
3084
3085 static void
3086 delete_handlers ()
3087 {
3088 rtx insn;
3089 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3090 {
3091 /* Delete the handler by turning off the flag that would
3092 prevent jump_optimize from deleting it.
3093 Also permit deletion of the nonlocal labels themselves
3094 if nothing local refers to them. */
3095 if (GET_CODE (insn) == CODE_LABEL)
3096 {
3097 tree t, last_t;
3098
3099 LABEL_PRESERVE_P (insn) = 0;
3100
3101 /* Remove it from the nonlocal_label list, to avoid confusing
3102 flow. */
3103 for (t = nonlocal_labels, last_t = 0; t;
3104 last_t = t, t = TREE_CHAIN (t))
3105 if (DECL_RTL (TREE_VALUE (t)) == insn)
3106 break;
3107 if (t)
3108 {
3109 if (! last_t)
3110 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3111 else
3112 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3113 }
3114 }
3115 if (GET_CODE (insn) == INSN
3116 && ((nonlocal_goto_handler_slot != 0
3117 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
3118 || (nonlocal_goto_stack_level != 0
3119 && reg_mentioned_p (nonlocal_goto_stack_level,
3120 PATTERN (insn)))))
3121 delete_insn (insn);
3122 }
3123 }
3124
3125 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3126 of the current function. */
3127
3128 rtx
3129 nonlocal_label_rtx_list ()
3130 {
3131 tree t;
3132 rtx x = 0;
3133
3134 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
3135 x = gen_rtx (EXPR_LIST, VOIDmode, label_rtx (TREE_VALUE (t)), x);
3136
3137 return x;
3138 }
3139 \f
3140 /* Output a USE for any register use in RTL.
3141 This is used with -noreg to mark the extent of lifespan
3142 of any registers used in a user-visible variable's DECL_RTL. */
3143
3144 void
3145 use_variable (rtl)
3146 rtx rtl;
3147 {
3148 if (GET_CODE (rtl) == REG)
3149 /* This is a register variable. */
3150 emit_insn (gen_rtx (USE, VOIDmode, rtl));
3151 else if (GET_CODE (rtl) == MEM
3152 && GET_CODE (XEXP (rtl, 0)) == REG
3153 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3154 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3155 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3156 /* This is a variable-sized structure. */
3157 emit_insn (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)));
3158 }
3159
3160 /* Like use_variable except that it outputs the USEs after INSN
3161 instead of at the end of the insn-chain. */
3162
3163 void
3164 use_variable_after (rtl, insn)
3165 rtx rtl, insn;
3166 {
3167 if (GET_CODE (rtl) == REG)
3168 /* This is a register variable. */
3169 emit_insn_after (gen_rtx (USE, VOIDmode, rtl), insn);
3170 else if (GET_CODE (rtl) == MEM
3171 && GET_CODE (XEXP (rtl, 0)) == REG
3172 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3173 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3174 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3175 /* This is a variable-sized structure. */
3176 emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)), insn);
3177 }
3178 \f
3179 int
3180 max_parm_reg_num ()
3181 {
3182 return max_parm_reg;
3183 }
3184
3185 /* Return the first insn following those generated by `assign_parms'. */
3186
3187 rtx
3188 get_first_nonparm_insn ()
3189 {
3190 if (last_parm_insn)
3191 return NEXT_INSN (last_parm_insn);
3192 return get_insns ();
3193 }
3194
3195 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3196 Crash if there is none. */
3197
3198 rtx
3199 get_first_block_beg ()
3200 {
3201 register rtx searcher;
3202 register rtx insn = get_first_nonparm_insn ();
3203
3204 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3205 if (GET_CODE (searcher) == NOTE
3206 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3207 return searcher;
3208
3209 abort (); /* Invalid call to this function. (See comments above.) */
3210 return NULL_RTX;
3211 }
3212
3213 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3214 This means a type for which function calls must pass an address to the
3215 function or get an address back from the function.
3216 EXP may be a type node or an expression (whose type is tested). */
3217
3218 int
3219 aggregate_value_p (exp)
3220 tree exp;
3221 {
3222 int i, regno, nregs;
3223 rtx reg;
3224 tree type;
3225 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3226 type = exp;
3227 else
3228 type = TREE_TYPE (exp);
3229
3230 if (RETURN_IN_MEMORY (type))
3231 return 1;
3232 /* Types that are TREE_ADDRESSABLE must be contructed in memory,
3233 and thus can't be returned in registers. */
3234 if (TREE_ADDRESSABLE (type))
3235 return 1;
3236 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3237 return 1;
3238 /* Make sure we have suitable call-clobbered regs to return
3239 the value in; if not, we must return it in memory. */
3240 reg = hard_function_value (type, 0);
3241
3242 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3243 it is OK. */
3244 if (GET_CODE (reg) != REG)
3245 return 0;
3246
3247 regno = REGNO (reg);
3248 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3249 for (i = 0; i < nregs; i++)
3250 if (! call_used_regs[regno + i])
3251 return 1;
3252 return 0;
3253 }
3254 \f
3255 /* Assign RTL expressions to the function's parameters.
3256 This may involve copying them into registers and using
3257 those registers as the RTL for them.
3258
3259 If SECOND_TIME is non-zero it means that this function is being
3260 called a second time. This is done by integrate.c when a function's
3261 compilation is deferred. We need to come back here in case the
3262 FUNCTION_ARG macro computes items needed for the rest of the compilation
3263 (such as changing which registers are fixed or caller-saved). But suppress
3264 writing any insns or setting DECL_RTL of anything in this case. */
3265
3266 void
3267 assign_parms (fndecl, second_time)
3268 tree fndecl;
3269 int second_time;
3270 {
3271 register tree parm;
3272 register rtx entry_parm = 0;
3273 register rtx stack_parm = 0;
3274 CUMULATIVE_ARGS args_so_far;
3275 enum machine_mode promoted_mode, passed_mode;
3276 enum machine_mode nominal_mode, promoted_nominal_mode;
3277 int unsignedp;
3278 /* Total space needed so far for args on the stack,
3279 given as a constant and a tree-expression. */
3280 struct args_size stack_args_size;
3281 tree fntype = TREE_TYPE (fndecl);
3282 tree fnargs = DECL_ARGUMENTS (fndecl);
3283 /* This is used for the arg pointer when referring to stack args. */
3284 rtx internal_arg_pointer;
3285 /* This is a dummy PARM_DECL that we used for the function result if
3286 the function returns a structure. */
3287 tree function_result_decl = 0;
3288 int nparmregs = list_length (fnargs) + LAST_VIRTUAL_REGISTER + 1;
3289 int varargs_setup = 0;
3290 rtx conversion_insns = 0;
3291
3292 /* Nonzero if the last arg is named `__builtin_va_alist',
3293 which is used on some machines for old-fashioned non-ANSI varargs.h;
3294 this should be stuck onto the stack as if it had arrived there. */
3295 int hide_last_arg
3296 = (current_function_varargs
3297 && fnargs
3298 && (parm = tree_last (fnargs)) != 0
3299 && DECL_NAME (parm)
3300 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3301 "__builtin_va_alist")));
3302
3303 /* Nonzero if function takes extra anonymous args.
3304 This means the last named arg must be on the stack
3305 right before the anonymous ones. */
3306 int stdarg
3307 = (TYPE_ARG_TYPES (fntype) != 0
3308 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3309 != void_type_node));
3310
3311 current_function_stdarg = stdarg;
3312
3313 /* If the reg that the virtual arg pointer will be translated into is
3314 not a fixed reg or is the stack pointer, make a copy of the virtual
3315 arg pointer, and address parms via the copy. The frame pointer is
3316 considered fixed even though it is not marked as such.
3317
3318 The second time through, simply use ap to avoid generating rtx. */
3319
3320 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3321 || ! (fixed_regs[ARG_POINTER_REGNUM]
3322 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3323 && ! second_time)
3324 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3325 else
3326 internal_arg_pointer = virtual_incoming_args_rtx;
3327 current_function_internal_arg_pointer = internal_arg_pointer;
3328
3329 stack_args_size.constant = 0;
3330 stack_args_size.var = 0;
3331
3332 /* If struct value address is treated as the first argument, make it so. */
3333 if (aggregate_value_p (DECL_RESULT (fndecl))
3334 && ! current_function_returns_pcc_struct
3335 && struct_value_incoming_rtx == 0)
3336 {
3337 tree type = build_pointer_type (TREE_TYPE (fntype));
3338
3339 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
3340
3341 DECL_ARG_TYPE (function_result_decl) = type;
3342 TREE_CHAIN (function_result_decl) = fnargs;
3343 fnargs = function_result_decl;
3344 }
3345
3346 parm_reg_stack_loc = (rtx *) oballoc (nparmregs * sizeof (rtx));
3347 bzero ((char *) parm_reg_stack_loc, nparmregs * sizeof (rtx));
3348
3349 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3350 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
3351 #else
3352 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
3353 #endif
3354
3355 /* We haven't yet found an argument that we must push and pretend the
3356 caller did. */
3357 current_function_pretend_args_size = 0;
3358
3359 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3360 {
3361 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
3362 struct args_size stack_offset;
3363 struct args_size arg_size;
3364 int passed_pointer = 0;
3365 int did_conversion = 0;
3366 tree passed_type = DECL_ARG_TYPE (parm);
3367 tree nominal_type = TREE_TYPE (parm);
3368
3369 /* Set LAST_NAMED if this is last named arg before some
3370 anonymous args. We treat it as if it were anonymous too. */
3371 int last_named = ((TREE_CHAIN (parm) == 0
3372 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3373 && (stdarg || current_function_varargs));
3374
3375 if (TREE_TYPE (parm) == error_mark_node
3376 /* This can happen after weird syntax errors
3377 or if an enum type is defined among the parms. */
3378 || TREE_CODE (parm) != PARM_DECL
3379 || passed_type == NULL)
3380 {
3381 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = gen_rtx (MEM, BLKmode,
3382 const0_rtx);
3383 TREE_USED (parm) = 1;
3384 continue;
3385 }
3386
3387 /* For varargs.h function, save info about regs and stack space
3388 used by the individual args, not including the va_alist arg. */
3389 if (hide_last_arg && last_named)
3390 current_function_args_info = args_so_far;
3391
3392 /* Find mode of arg as it is passed, and mode of arg
3393 as it should be during execution of this function. */
3394 passed_mode = TYPE_MODE (passed_type);
3395 nominal_mode = TYPE_MODE (nominal_type);
3396
3397 /* If the parm's mode is VOID, its value doesn't matter,
3398 and avoid the usual things like emit_move_insn that could crash. */
3399 if (nominal_mode == VOIDmode)
3400 {
3401 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
3402 continue;
3403 }
3404
3405 /* If the parm is to be passed as a transparent union, use the
3406 type of the first field for the tests below. We have already
3407 verified that the modes are the same. */
3408 if (DECL_TRANSPARENT_UNION (parm)
3409 || TYPE_TRANSPARENT_UNION (passed_type))
3410 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
3411
3412 /* See if this arg was passed by invisible reference. It is if
3413 it is an object whose size depends on the contents of the
3414 object itself or if the machine requires these objects be passed
3415 that way. */
3416
3417 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
3418 && contains_placeholder_p (TYPE_SIZE (passed_type)))
3419 || TREE_ADDRESSABLE (passed_type)
3420 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3421 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
3422 passed_type, ! last_named)
3423 #endif
3424 )
3425 {
3426 passed_type = nominal_type = build_pointer_type (passed_type);
3427 passed_pointer = 1;
3428 passed_mode = nominal_mode = Pmode;
3429 }
3430
3431 promoted_mode = passed_mode;
3432
3433 #ifdef PROMOTE_FUNCTION_ARGS
3434 /* Compute the mode in which the arg is actually extended to. */
3435 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
3436 #endif
3437
3438 /* Let machine desc say which reg (if any) the parm arrives in.
3439 0 means it arrives on the stack. */
3440 #ifdef FUNCTION_INCOMING_ARG
3441 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3442 passed_type, ! last_named);
3443 #else
3444 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
3445 passed_type, ! last_named);
3446 #endif
3447
3448 if (entry_parm == 0)
3449 promoted_mode = passed_mode;
3450
3451 #ifdef SETUP_INCOMING_VARARGS
3452 /* If this is the last named parameter, do any required setup for
3453 varargs or stdargs. We need to know about the case of this being an
3454 addressable type, in which case we skip the registers it
3455 would have arrived in.
3456
3457 For stdargs, LAST_NAMED will be set for two parameters, the one that
3458 is actually the last named, and the dummy parameter. We only
3459 want to do this action once.
3460
3461 Also, indicate when RTL generation is to be suppressed. */
3462 if (last_named && !varargs_setup)
3463 {
3464 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
3465 current_function_pretend_args_size,
3466 second_time);
3467 varargs_setup = 1;
3468 }
3469 #endif
3470
3471 /* Determine parm's home in the stack,
3472 in case it arrives in the stack or we should pretend it did.
3473
3474 Compute the stack position and rtx where the argument arrives
3475 and its size.
3476
3477 There is one complexity here: If this was a parameter that would
3478 have been passed in registers, but wasn't only because it is
3479 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
3480 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
3481 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
3482 0 as it was the previous time. */
3483
3484 locate_and_pad_parm (promoted_mode, passed_type,
3485 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3486 1,
3487 #else
3488 #ifdef FUNCTION_INCOMING_ARG
3489 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3490 passed_type,
3491 (! last_named
3492 || varargs_setup)) != 0,
3493 #else
3494 FUNCTION_ARG (args_so_far, promoted_mode,
3495 passed_type,
3496 ! last_named || varargs_setup) != 0,
3497 #endif
3498 #endif
3499 fndecl, &stack_args_size, &stack_offset, &arg_size);
3500
3501 if (! second_time)
3502 {
3503 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
3504
3505 if (offset_rtx == const0_rtx)
3506 stack_parm = gen_rtx (MEM, promoted_mode, internal_arg_pointer);
3507 else
3508 stack_parm = gen_rtx (MEM, promoted_mode,
3509 gen_rtx (PLUS, Pmode,
3510 internal_arg_pointer, offset_rtx));
3511
3512 /* If this is a memory ref that contains aggregate components,
3513 mark it as such for cse and loop optimize. Likewise if it
3514 is readonly. */
3515 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3516 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
3517 }
3518
3519 /* If this parameter was passed both in registers and in the stack,
3520 use the copy on the stack. */
3521 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
3522 entry_parm = 0;
3523
3524 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3525 /* If this parm was passed part in regs and part in memory,
3526 pretend it arrived entirely in memory
3527 by pushing the register-part onto the stack.
3528
3529 In the special case of a DImode or DFmode that is split,
3530 we could put it together in a pseudoreg directly,
3531 but for now that's not worth bothering with. */
3532
3533 if (entry_parm)
3534 {
3535 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
3536 passed_type, ! last_named);
3537
3538 if (nregs > 0)
3539 {
3540 current_function_pretend_args_size
3541 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
3542 / (PARM_BOUNDARY / BITS_PER_UNIT)
3543 * (PARM_BOUNDARY / BITS_PER_UNIT));
3544
3545 if (! second_time)
3546 {
3547 /* Handle calls that pass values in multiple non-contiguous
3548 locations. The Irix 6 ABI has examples of this. */
3549 if (GET_CODE (entry_parm) == PARALLEL)
3550 emit_group_store (validize_mem (stack_parm),
3551 entry_parm);
3552 else
3553 move_block_from_reg (REGNO (entry_parm),
3554 validize_mem (stack_parm), nregs,
3555 int_size_in_bytes (TREE_TYPE (parm)));
3556 }
3557 entry_parm = stack_parm;
3558 }
3559 }
3560 #endif
3561
3562 /* If we didn't decide this parm came in a register,
3563 by default it came on the stack. */
3564 if (entry_parm == 0)
3565 entry_parm = stack_parm;
3566
3567 /* Record permanently how this parm was passed. */
3568 if (! second_time)
3569 DECL_INCOMING_RTL (parm) = entry_parm;
3570
3571 /* If there is actually space on the stack for this parm,
3572 count it in stack_args_size; otherwise set stack_parm to 0
3573 to indicate there is no preallocated stack slot for the parm. */
3574
3575 if (entry_parm == stack_parm
3576 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
3577 /* On some machines, even if a parm value arrives in a register
3578 there is still an (uninitialized) stack slot allocated for it.
3579
3580 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
3581 whether this parameter already has a stack slot allocated,
3582 because an arg block exists only if current_function_args_size
3583 is larger than some threshold, and we haven't calculated that
3584 yet. So, for now, we just assume that stack slots never exist
3585 in this case. */
3586 || REG_PARM_STACK_SPACE (fndecl) > 0
3587 #endif
3588 )
3589 {
3590 stack_args_size.constant += arg_size.constant;
3591 if (arg_size.var)
3592 ADD_PARM_SIZE (stack_args_size, arg_size.var);
3593 }
3594 else
3595 /* No stack slot was pushed for this parm. */
3596 stack_parm = 0;
3597
3598 /* Update info on where next arg arrives in registers. */
3599
3600 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
3601 passed_type, ! last_named);
3602
3603 /* If this is our second time through, we are done with this parm. */
3604 if (second_time)
3605 continue;
3606
3607 /* If we can't trust the parm stack slot to be aligned enough
3608 for its ultimate type, don't use that slot after entry.
3609 We'll make another stack slot, if we need one. */
3610 {
3611 int thisparm_boundary
3612 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
3613
3614 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
3615 stack_parm = 0;
3616 }
3617
3618 /* If parm was passed in memory, and we need to convert it on entry,
3619 don't store it back in that same slot. */
3620 if (entry_parm != 0
3621 && nominal_mode != BLKmode && nominal_mode != passed_mode)
3622 stack_parm = 0;
3623
3624 #if 0
3625 /* Now adjust STACK_PARM to the mode and precise location
3626 where this parameter should live during execution,
3627 if we discover that it must live in the stack during execution.
3628 To make debuggers happier on big-endian machines, we store
3629 the value in the last bytes of the space available. */
3630
3631 if (nominal_mode != BLKmode && nominal_mode != passed_mode
3632 && stack_parm != 0)
3633 {
3634 rtx offset_rtx;
3635
3636 if (BYTES_BIG_ENDIAN
3637 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
3638 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
3639 - GET_MODE_SIZE (nominal_mode));
3640
3641 offset_rtx = ARGS_SIZE_RTX (stack_offset);
3642 if (offset_rtx == const0_rtx)
3643 stack_parm = gen_rtx (MEM, nominal_mode, internal_arg_pointer);
3644 else
3645 stack_parm = gen_rtx (MEM, nominal_mode,
3646 gen_rtx (PLUS, Pmode,
3647 internal_arg_pointer, offset_rtx));
3648
3649 /* If this is a memory ref that contains aggregate components,
3650 mark it as such for cse and loop optimize. */
3651 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3652 }
3653 #endif /* 0 */
3654
3655 #ifdef STACK_REGS
3656 /* We need this "use" info, because the gcc-register->stack-register
3657 converter in reg-stack.c needs to know which registers are active
3658 at the start of the function call. The actual parameter loading
3659 instructions are not always available then anymore, since they might
3660 have been optimised away. */
3661
3662 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
3663 emit_insn (gen_rtx (USE, GET_MODE (entry_parm), entry_parm));
3664 #endif
3665
3666 /* ENTRY_PARM is an RTX for the parameter as it arrives,
3667 in the mode in which it arrives.
3668 STACK_PARM is an RTX for a stack slot where the parameter can live
3669 during the function (in case we want to put it there).
3670 STACK_PARM is 0 if no stack slot was pushed for it.
3671
3672 Now output code if necessary to convert ENTRY_PARM to
3673 the type in which this function declares it,
3674 and store that result in an appropriate place,
3675 which may be a pseudo reg, may be STACK_PARM,
3676 or may be a local stack slot if STACK_PARM is 0.
3677
3678 Set DECL_RTL to that place. */
3679
3680 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
3681 {
3682 /* If a BLKmode arrives in registers, copy it to a stack slot.
3683 Handle calls that pass values in multiple non-contiguous
3684 locations. The Irix 6 ABI has examples of this. */
3685 if (GET_CODE (entry_parm) == REG
3686 || GET_CODE (entry_parm) == PARALLEL)
3687 {
3688 int size_stored
3689 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
3690 UNITS_PER_WORD);
3691
3692 /* Note that we will be storing an integral number of words.
3693 So we have to be careful to ensure that we allocate an
3694 integral number of words. We do this below in the
3695 assign_stack_local if space was not allocated in the argument
3696 list. If it was, this will not work if PARM_BOUNDARY is not
3697 a multiple of BITS_PER_WORD. It isn't clear how to fix this
3698 if it becomes a problem. */
3699
3700 if (stack_parm == 0)
3701 {
3702 stack_parm
3703 = assign_stack_local (GET_MODE (entry_parm),
3704 size_stored, 0);
3705
3706 /* If this is a memory ref that contains aggregate
3707 components, mark it as such for cse and loop optimize. */
3708 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3709 }
3710
3711 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
3712 abort ();
3713
3714 if (TREE_READONLY (parm))
3715 RTX_UNCHANGING_P (stack_parm) = 1;
3716
3717 /* Handle calls that pass values in multiple non-contiguous
3718 locations. The Irix 6 ABI has examples of this. */
3719 if (GET_CODE (entry_parm) == PARALLEL)
3720 emit_group_store (validize_mem (stack_parm), entry_parm);
3721 else
3722 move_block_from_reg (REGNO (entry_parm),
3723 validize_mem (stack_parm),
3724 size_stored / UNITS_PER_WORD,
3725 int_size_in_bytes (TREE_TYPE (parm)));
3726 }
3727 DECL_RTL (parm) = stack_parm;
3728 }
3729 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
3730 && ! DECL_INLINE (fndecl))
3731 /* layout_decl may set this. */
3732 || TREE_ADDRESSABLE (parm)
3733 || TREE_SIDE_EFFECTS (parm)
3734 /* If -ffloat-store specified, don't put explicit
3735 float variables into registers. */
3736 || (flag_float_store
3737 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
3738 /* Always assign pseudo to structure return or item passed
3739 by invisible reference. */
3740 || passed_pointer || parm == function_result_decl)
3741 {
3742 /* Store the parm in a pseudoregister during the function, but we
3743 may need to do it in a wider mode. */
3744
3745 register rtx parmreg;
3746 int regno, regnoi, regnor;
3747
3748 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
3749
3750 promoted_nominal_mode
3751 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
3752
3753 parmreg = gen_reg_rtx (promoted_nominal_mode);
3754 mark_user_reg (parmreg);
3755
3756 /* If this was an item that we received a pointer to, set DECL_RTL
3757 appropriately. */
3758 if (passed_pointer)
3759 {
3760 DECL_RTL (parm)
3761 = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
3762 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
3763 }
3764 else
3765 DECL_RTL (parm) = parmreg;
3766
3767 /* Copy the value into the register. */
3768 if (nominal_mode != passed_mode
3769 || promoted_nominal_mode != promoted_mode)
3770 {
3771 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3772 mode, by the caller. We now have to convert it to
3773 NOMINAL_MODE, if different. However, PARMREG may be in
3774 a diffent mode than NOMINAL_MODE if it is being stored
3775 promoted.
3776
3777 If ENTRY_PARM is a hard register, it might be in a register
3778 not valid for operating in its mode (e.g., an odd-numbered
3779 register for a DFmode). In that case, moves are the only
3780 thing valid, so we can't do a convert from there. This
3781 occurs when the calling sequence allow such misaligned
3782 usages.
3783
3784 In addition, the conversion may involve a call, which could
3785 clobber parameters which haven't been copied to pseudo
3786 registers yet. Therefore, we must first copy the parm to
3787 a pseudo reg here, and save the conversion until after all
3788 parameters have been moved. */
3789
3790 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
3791
3792 emit_move_insn (tempreg, validize_mem (entry_parm));
3793
3794 push_to_sequence (conversion_insns);
3795 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
3796
3797 expand_assignment (parm,
3798 make_tree (nominal_type, tempreg), 0, 0);
3799 conversion_insns = get_insns ();
3800 did_conversion = 1;
3801 end_sequence ();
3802 }
3803 else
3804 emit_move_insn (parmreg, validize_mem (entry_parm));
3805
3806 /* If we were passed a pointer but the actual value
3807 can safely live in a register, put it in one. */
3808 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
3809 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
3810 && ! DECL_INLINE (fndecl))
3811 /* layout_decl may set this. */
3812 || TREE_ADDRESSABLE (parm)
3813 || TREE_SIDE_EFFECTS (parm)
3814 /* If -ffloat-store specified, don't put explicit
3815 float variables into registers. */
3816 || (flag_float_store
3817 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
3818 {
3819 /* We can't use nominal_mode, because it will have been set to
3820 Pmode above. We must use the actual mode of the parm. */
3821 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3822 mark_user_reg (parmreg);
3823 emit_move_insn (parmreg, DECL_RTL (parm));
3824 DECL_RTL (parm) = parmreg;
3825 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3826 now the parm. */
3827 stack_parm = 0;
3828 }
3829 #ifdef FUNCTION_ARG_CALLEE_COPIES
3830 /* If we are passed an arg by reference and it is our responsibility
3831 to make a copy, do it now.
3832 PASSED_TYPE and PASSED mode now refer to the pointer, not the
3833 original argument, so we must recreate them in the call to
3834 FUNCTION_ARG_CALLEE_COPIES. */
3835 /* ??? Later add code to handle the case that if the argument isn't
3836 modified, don't do the copy. */
3837
3838 else if (passed_pointer
3839 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
3840 TYPE_MODE (DECL_ARG_TYPE (parm)),
3841 DECL_ARG_TYPE (parm),
3842 ! last_named)
3843 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
3844 {
3845 rtx copy;
3846 tree type = DECL_ARG_TYPE (parm);
3847
3848 /* This sequence may involve a library call perhaps clobbering
3849 registers that haven't been copied to pseudos yet. */
3850
3851 push_to_sequence (conversion_insns);
3852
3853 if (TYPE_SIZE (type) == 0
3854 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3855 /* This is a variable sized object. */
3856 copy = gen_rtx (MEM, BLKmode,
3857 allocate_dynamic_stack_space
3858 (expr_size (parm), NULL_RTX,
3859 TYPE_ALIGN (type)));
3860 else
3861 copy = assign_stack_temp (TYPE_MODE (type),
3862 int_size_in_bytes (type), 1);
3863 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
3864
3865 store_expr (parm, copy, 0);
3866 emit_move_insn (parmreg, XEXP (copy, 0));
3867 conversion_insns = get_insns ();
3868 did_conversion = 1;
3869 end_sequence ();
3870 }
3871 #endif /* FUNCTION_ARG_CALLEE_COPIES */
3872
3873 /* In any case, record the parm's desired stack location
3874 in case we later discover it must live in the stack.
3875
3876 If it is a COMPLEX value, store the stack location for both
3877 halves. */
3878
3879 if (GET_CODE (parmreg) == CONCAT)
3880 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
3881 else
3882 regno = REGNO (parmreg);
3883
3884 if (regno >= nparmregs)
3885 {
3886 rtx *new;
3887 int old_nparmregs = nparmregs;
3888
3889 nparmregs = regno + 5;
3890 new = (rtx *) oballoc (nparmregs * sizeof (rtx));
3891 bcopy ((char *) parm_reg_stack_loc, (char *) new,
3892 old_nparmregs * sizeof (rtx));
3893 bzero ((char *) (new + old_nparmregs),
3894 (nparmregs - old_nparmregs) * sizeof (rtx));
3895 parm_reg_stack_loc = new;
3896 }
3897
3898 if (GET_CODE (parmreg) == CONCAT)
3899 {
3900 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
3901
3902 regnor = REGNO (gen_realpart (submode, parmreg));
3903 regnoi = REGNO (gen_imagpart (submode, parmreg));
3904
3905 if (stack_parm != 0)
3906 {
3907 parm_reg_stack_loc[regnor]
3908 = gen_realpart (submode, stack_parm);
3909 parm_reg_stack_loc[regnoi]
3910 = gen_imagpart (submode, stack_parm);
3911 }
3912 else
3913 {
3914 parm_reg_stack_loc[regnor] = 0;
3915 parm_reg_stack_loc[regnoi] = 0;
3916 }
3917 }
3918 else
3919 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
3920
3921 /* Mark the register as eliminable if we did no conversion
3922 and it was copied from memory at a fixed offset,
3923 and the arg pointer was not copied to a pseudo-reg.
3924 If the arg pointer is a pseudo reg or the offset formed
3925 an invalid address, such memory-equivalences
3926 as we make here would screw up life analysis for it. */
3927 if (nominal_mode == passed_mode
3928 && ! did_conversion
3929 && GET_CODE (entry_parm) == MEM
3930 && entry_parm == stack_parm
3931 && stack_offset.var == 0
3932 && reg_mentioned_p (virtual_incoming_args_rtx,
3933 XEXP (entry_parm, 0)))
3934 {
3935 rtx linsn = get_last_insn ();
3936 rtx sinsn, set;
3937
3938 /* Mark complex types separately. */
3939 if (GET_CODE (parmreg) == CONCAT)
3940 /* Scan backwards for the set of the real and
3941 imaginary parts. */
3942 for (sinsn = linsn; sinsn != 0;
3943 sinsn = prev_nonnote_insn (sinsn))
3944 {
3945 set = single_set (sinsn);
3946 if (set != 0
3947 && SET_DEST (set) == regno_reg_rtx [regnoi])
3948 REG_NOTES (sinsn)
3949 = gen_rtx (EXPR_LIST, REG_EQUIV,
3950 parm_reg_stack_loc[regnoi],
3951 REG_NOTES (sinsn));
3952 else if (set != 0
3953 && SET_DEST (set) == regno_reg_rtx [regnor])
3954 REG_NOTES (sinsn)
3955 = gen_rtx (EXPR_LIST, REG_EQUIV,
3956 parm_reg_stack_loc[regnor],
3957 REG_NOTES (sinsn));
3958 }
3959 else if ((set = single_set (linsn)) != 0
3960 && SET_DEST (set) == parmreg)
3961 REG_NOTES (linsn)
3962 = gen_rtx (EXPR_LIST, REG_EQUIV,
3963 entry_parm, REG_NOTES (linsn));
3964 }
3965
3966 /* For pointer data type, suggest pointer register. */
3967 if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE)
3968 mark_reg_pointer (parmreg,
3969 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
3970 / BITS_PER_UNIT));
3971 }
3972 else
3973 {
3974 /* Value must be stored in the stack slot STACK_PARM
3975 during function execution. */
3976
3977 if (promoted_mode != nominal_mode)
3978 {
3979 /* Conversion is required. */
3980 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
3981
3982 emit_move_insn (tempreg, validize_mem (entry_parm));
3983
3984 push_to_sequence (conversion_insns);
3985 entry_parm = convert_to_mode (nominal_mode, tempreg,
3986 TREE_UNSIGNED (TREE_TYPE (parm)));
3987 conversion_insns = get_insns ();
3988 did_conversion = 1;
3989 end_sequence ();
3990 }
3991
3992 if (entry_parm != stack_parm)
3993 {
3994 if (stack_parm == 0)
3995 {
3996 stack_parm
3997 = assign_stack_local (GET_MODE (entry_parm),
3998 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
3999 /* If this is a memory ref that contains aggregate components,
4000 mark it as such for cse and loop optimize. */
4001 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4002 }
4003
4004 if (promoted_mode != nominal_mode)
4005 {
4006 push_to_sequence (conversion_insns);
4007 emit_move_insn (validize_mem (stack_parm),
4008 validize_mem (entry_parm));
4009 conversion_insns = get_insns ();
4010 end_sequence ();
4011 }
4012 else
4013 emit_move_insn (validize_mem (stack_parm),
4014 validize_mem (entry_parm));
4015 }
4016
4017 DECL_RTL (parm) = stack_parm;
4018 }
4019
4020 /* If this "parameter" was the place where we are receiving the
4021 function's incoming structure pointer, set up the result. */
4022 if (parm == function_result_decl)
4023 {
4024 tree result = DECL_RESULT (fndecl);
4025 tree restype = TREE_TYPE (result);
4026
4027 DECL_RTL (result)
4028 = gen_rtx (MEM, DECL_MODE (result), DECL_RTL (parm));
4029
4030 MEM_IN_STRUCT_P (DECL_RTL (result)) = AGGREGATE_TYPE_P (restype);
4031 }
4032
4033 if (TREE_THIS_VOLATILE (parm))
4034 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4035 if (TREE_READONLY (parm))
4036 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4037 }
4038
4039 /* Output all parameter conversion instructions (possibly including calls)
4040 now that all parameters have been copied out of hard registers. */
4041 emit_insns (conversion_insns);
4042
4043 max_parm_reg = max_reg_num ();
4044 last_parm_insn = get_last_insn ();
4045
4046 current_function_args_size = stack_args_size.constant;
4047
4048 /* Adjust function incoming argument size for alignment and
4049 minimum length. */
4050
4051 #ifdef REG_PARM_STACK_SPACE
4052 #ifndef MAYBE_REG_PARM_STACK_SPACE
4053 current_function_args_size = MAX (current_function_args_size,
4054 REG_PARM_STACK_SPACE (fndecl));
4055 #endif
4056 #endif
4057
4058 #ifdef STACK_BOUNDARY
4059 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4060
4061 current_function_args_size
4062 = ((current_function_args_size + STACK_BYTES - 1)
4063 / STACK_BYTES) * STACK_BYTES;
4064 #endif
4065
4066 #ifdef ARGS_GROW_DOWNWARD
4067 current_function_arg_offset_rtx
4068 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4069 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4070 size_int (-stack_args_size.constant)),
4071 NULL_RTX, VOIDmode, 0));
4072 #else
4073 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4074 #endif
4075
4076 /* See how many bytes, if any, of its args a function should try to pop
4077 on return. */
4078
4079 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4080 current_function_args_size);
4081
4082 /* For stdarg.h function, save info about
4083 regs and stack space used by the named args. */
4084
4085 if (!hide_last_arg)
4086 current_function_args_info = args_so_far;
4087
4088 /* Set the rtx used for the function return value. Put this in its
4089 own variable so any optimizers that need this information don't have
4090 to include tree.h. Do this here so it gets done when an inlined
4091 function gets output. */
4092
4093 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4094 }
4095 \f
4096 /* Indicate whether REGNO is an incoming argument to the current function
4097 that was promoted to a wider mode. If so, return the RTX for the
4098 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4099 that REGNO is promoted from and whether the promotion was signed or
4100 unsigned. */
4101
4102 #ifdef PROMOTE_FUNCTION_ARGS
4103
4104 rtx
4105 promoted_input_arg (regno, pmode, punsignedp)
4106 int regno;
4107 enum machine_mode *pmode;
4108 int *punsignedp;
4109 {
4110 tree arg;
4111
4112 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4113 arg = TREE_CHAIN (arg))
4114 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4115 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4116 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4117 {
4118 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4119 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4120
4121 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4122 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4123 && mode != DECL_MODE (arg))
4124 {
4125 *pmode = DECL_MODE (arg);
4126 *punsignedp = unsignedp;
4127 return DECL_INCOMING_RTL (arg);
4128 }
4129 }
4130
4131 return 0;
4132 }
4133
4134 #endif
4135 \f
4136 /* Compute the size and offset from the start of the stacked arguments for a
4137 parm passed in mode PASSED_MODE and with type TYPE.
4138
4139 INITIAL_OFFSET_PTR points to the current offset into the stacked
4140 arguments.
4141
4142 The starting offset and size for this parm are returned in *OFFSET_PTR
4143 and *ARG_SIZE_PTR, respectively.
4144
4145 IN_REGS is non-zero if the argument will be passed in registers. It will
4146 never be set if REG_PARM_STACK_SPACE is not defined.
4147
4148 FNDECL is the function in which the argument was defined.
4149
4150 There are two types of rounding that are done. The first, controlled by
4151 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4152 list to be aligned to the specific boundary (in bits). This rounding
4153 affects the initial and starting offsets, but not the argument size.
4154
4155 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4156 optionally rounds the size of the parm to PARM_BOUNDARY. The
4157 initial offset is not affected by this rounding, while the size always
4158 is and the starting offset may be. */
4159
4160 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4161 initial_offset_ptr is positive because locate_and_pad_parm's
4162 callers pass in the total size of args so far as
4163 initial_offset_ptr. arg_size_ptr is always positive.*/
4164
4165 void
4166 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4167 initial_offset_ptr, offset_ptr, arg_size_ptr)
4168 enum machine_mode passed_mode;
4169 tree type;
4170 int in_regs;
4171 tree fndecl;
4172 struct args_size *initial_offset_ptr;
4173 struct args_size *offset_ptr;
4174 struct args_size *arg_size_ptr;
4175 {
4176 tree sizetree
4177 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4178 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4179 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4180 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4181 int reg_parm_stack_space = 0;
4182
4183 #ifdef REG_PARM_STACK_SPACE
4184 /* If we have found a stack parm before we reach the end of the
4185 area reserved for registers, skip that area. */
4186 if (! in_regs)
4187 {
4188 #ifdef MAYBE_REG_PARM_STACK_SPACE
4189 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4190 #else
4191 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4192 #endif
4193 if (reg_parm_stack_space > 0)
4194 {
4195 if (initial_offset_ptr->var)
4196 {
4197 initial_offset_ptr->var
4198 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4199 size_int (reg_parm_stack_space));
4200 initial_offset_ptr->constant = 0;
4201 }
4202 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4203 initial_offset_ptr->constant = reg_parm_stack_space;
4204 }
4205 }
4206 #endif /* REG_PARM_STACK_SPACE */
4207
4208 arg_size_ptr->var = 0;
4209 arg_size_ptr->constant = 0;
4210
4211 #ifdef ARGS_GROW_DOWNWARD
4212 if (initial_offset_ptr->var)
4213 {
4214 offset_ptr->constant = 0;
4215 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4216 initial_offset_ptr->var);
4217 }
4218 else
4219 {
4220 offset_ptr->constant = - initial_offset_ptr->constant;
4221 offset_ptr->var = 0;
4222 }
4223 if (where_pad != none
4224 && (TREE_CODE (sizetree) != INTEGER_CST
4225 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4226 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4227 SUB_PARM_SIZE (*offset_ptr, sizetree);
4228 if (where_pad != downward)
4229 pad_to_arg_alignment (offset_ptr, boundary);
4230 if (initial_offset_ptr->var)
4231 {
4232 arg_size_ptr->var = size_binop (MINUS_EXPR,
4233 size_binop (MINUS_EXPR,
4234 integer_zero_node,
4235 initial_offset_ptr->var),
4236 offset_ptr->var);
4237 }
4238 else
4239 {
4240 arg_size_ptr->constant = (- initial_offset_ptr->constant -
4241 offset_ptr->constant);
4242 }
4243 #else /* !ARGS_GROW_DOWNWARD */
4244 pad_to_arg_alignment (initial_offset_ptr, boundary);
4245 *offset_ptr = *initial_offset_ptr;
4246
4247 #ifdef PUSH_ROUNDING
4248 if (passed_mode != BLKmode)
4249 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4250 #endif
4251
4252 /* Pad_below needs the pre-rounded size to know how much to pad below
4253 so this must be done before rounding up. */
4254 if (where_pad == downward
4255 /* However, BLKmode args passed in regs have their padding done elsewhere.
4256 The stack slot must be able to hold the entire register. */
4257 && !(in_regs && passed_mode == BLKmode))
4258 pad_below (offset_ptr, passed_mode, sizetree);
4259
4260 if (where_pad != none
4261 && (TREE_CODE (sizetree) != INTEGER_CST
4262 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4263 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4264
4265 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4266 #endif /* ARGS_GROW_DOWNWARD */
4267 }
4268
4269 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4270 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4271
4272 static void
4273 pad_to_arg_alignment (offset_ptr, boundary)
4274 struct args_size *offset_ptr;
4275 int boundary;
4276 {
4277 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4278
4279 if (boundary > BITS_PER_UNIT)
4280 {
4281 if (offset_ptr->var)
4282 {
4283 offset_ptr->var =
4284 #ifdef ARGS_GROW_DOWNWARD
4285 round_down
4286 #else
4287 round_up
4288 #endif
4289 (ARGS_SIZE_TREE (*offset_ptr),
4290 boundary / BITS_PER_UNIT);
4291 offset_ptr->constant = 0; /*?*/
4292 }
4293 else
4294 offset_ptr->constant =
4295 #ifdef ARGS_GROW_DOWNWARD
4296 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4297 #else
4298 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4299 #endif
4300 }
4301 }
4302
4303 static void
4304 pad_below (offset_ptr, passed_mode, sizetree)
4305 struct args_size *offset_ptr;
4306 enum machine_mode passed_mode;
4307 tree sizetree;
4308 {
4309 if (passed_mode != BLKmode)
4310 {
4311 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4312 offset_ptr->constant
4313 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4314 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4315 - GET_MODE_SIZE (passed_mode));
4316 }
4317 else
4318 {
4319 if (TREE_CODE (sizetree) != INTEGER_CST
4320 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4321 {
4322 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4323 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4324 /* Add it in. */
4325 ADD_PARM_SIZE (*offset_ptr, s2);
4326 SUB_PARM_SIZE (*offset_ptr, sizetree);
4327 }
4328 }
4329 }
4330
4331 static tree
4332 round_down (value, divisor)
4333 tree value;
4334 int divisor;
4335 {
4336 return size_binop (MULT_EXPR,
4337 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
4338 size_int (divisor));
4339 }
4340 \f
4341 /* Walk the tree of blocks describing the binding levels within a function
4342 and warn about uninitialized variables.
4343 This is done after calling flow_analysis and before global_alloc
4344 clobbers the pseudo-regs to hard regs. */
4345
4346 void
4347 uninitialized_vars_warning (block)
4348 tree block;
4349 {
4350 register tree decl, sub;
4351 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4352 {
4353 if (TREE_CODE (decl) == VAR_DECL
4354 /* These warnings are unreliable for and aggregates
4355 because assigning the fields one by one can fail to convince
4356 flow.c that the entire aggregate was initialized.
4357 Unions are troublesome because members may be shorter. */
4358 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
4359 && DECL_RTL (decl) != 0
4360 && GET_CODE (DECL_RTL (decl)) == REG
4361 && regno_uninitialized (REGNO (DECL_RTL (decl))))
4362 warning_with_decl (decl,
4363 "`%s' might be used uninitialized in this function");
4364 if (TREE_CODE (decl) == VAR_DECL
4365 && DECL_RTL (decl) != 0
4366 && GET_CODE (DECL_RTL (decl)) == REG
4367 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4368 warning_with_decl (decl,
4369 "variable `%s' might be clobbered by `longjmp' or `vfork'");
4370 }
4371 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4372 uninitialized_vars_warning (sub);
4373 }
4374
4375 /* Do the appropriate part of uninitialized_vars_warning
4376 but for arguments instead of local variables. */
4377
4378 void
4379 setjmp_args_warning ()
4380 {
4381 register tree decl;
4382 for (decl = DECL_ARGUMENTS (current_function_decl);
4383 decl; decl = TREE_CHAIN (decl))
4384 if (DECL_RTL (decl) != 0
4385 && GET_CODE (DECL_RTL (decl)) == REG
4386 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4387 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
4388 }
4389
4390 /* If this function call setjmp, put all vars into the stack
4391 unless they were declared `register'. */
4392
4393 void
4394 setjmp_protect (block)
4395 tree block;
4396 {
4397 register tree decl, sub;
4398 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4399 if ((TREE_CODE (decl) == VAR_DECL
4400 || TREE_CODE (decl) == PARM_DECL)
4401 && DECL_RTL (decl) != 0
4402 && GET_CODE (DECL_RTL (decl)) == REG
4403 /* If this variable came from an inline function, it must be
4404 that it's life doesn't overlap the setjmp. If there was a
4405 setjmp in the function, it would already be in memory. We
4406 must exclude such variable because their DECL_RTL might be
4407 set to strange things such as virtual_stack_vars_rtx. */
4408 && ! DECL_FROM_INLINE (decl)
4409 && (
4410 #ifdef NON_SAVING_SETJMP
4411 /* If longjmp doesn't restore the registers,
4412 don't put anything in them. */
4413 NON_SAVING_SETJMP
4414 ||
4415 #endif
4416 ! DECL_REGISTER (decl)))
4417 put_var_into_stack (decl);
4418 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4419 setjmp_protect (sub);
4420 }
4421 \f
4422 /* Like the previous function, but for args instead of local variables. */
4423
4424 void
4425 setjmp_protect_args ()
4426 {
4427 register tree decl, sub;
4428 for (decl = DECL_ARGUMENTS (current_function_decl);
4429 decl; decl = TREE_CHAIN (decl))
4430 if ((TREE_CODE (decl) == VAR_DECL
4431 || TREE_CODE (decl) == PARM_DECL)
4432 && DECL_RTL (decl) != 0
4433 && GET_CODE (DECL_RTL (decl)) == REG
4434 && (
4435 /* If longjmp doesn't restore the registers,
4436 don't put anything in them. */
4437 #ifdef NON_SAVING_SETJMP
4438 NON_SAVING_SETJMP
4439 ||
4440 #endif
4441 ! DECL_REGISTER (decl)))
4442 put_var_into_stack (decl);
4443 }
4444 \f
4445 /* Return the context-pointer register corresponding to DECL,
4446 or 0 if it does not need one. */
4447
4448 rtx
4449 lookup_static_chain (decl)
4450 tree decl;
4451 {
4452 tree context = decl_function_context (decl);
4453 tree link;
4454
4455 if (context == 0
4456 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
4457 return 0;
4458
4459 /* We treat inline_function_decl as an alias for the current function
4460 because that is the inline function whose vars, types, etc.
4461 are being merged into the current function.
4462 See expand_inline_function. */
4463 if (context == current_function_decl || context == inline_function_decl)
4464 return virtual_stack_vars_rtx;
4465
4466 for (link = context_display; link; link = TREE_CHAIN (link))
4467 if (TREE_PURPOSE (link) == context)
4468 return RTL_EXPR_RTL (TREE_VALUE (link));
4469
4470 abort ();
4471 }
4472 \f
4473 /* Convert a stack slot address ADDR for variable VAR
4474 (from a containing function)
4475 into an address valid in this function (using a static chain). */
4476
4477 rtx
4478 fix_lexical_addr (addr, var)
4479 rtx addr;
4480 tree var;
4481 {
4482 rtx basereg;
4483 int displacement;
4484 tree context = decl_function_context (var);
4485 struct function *fp;
4486 rtx base = 0;
4487
4488 /* If this is the present function, we need not do anything. */
4489 if (context == current_function_decl || context == inline_function_decl)
4490 return addr;
4491
4492 for (fp = outer_function_chain; fp; fp = fp->next)
4493 if (fp->decl == context)
4494 break;
4495
4496 if (fp == 0)
4497 abort ();
4498
4499 /* Decode given address as base reg plus displacement. */
4500 if (GET_CODE (addr) == REG)
4501 basereg = addr, displacement = 0;
4502 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4503 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
4504 else
4505 abort ();
4506
4507 /* We accept vars reached via the containing function's
4508 incoming arg pointer and via its stack variables pointer. */
4509 if (basereg == fp->internal_arg_pointer)
4510 {
4511 /* If reached via arg pointer, get the arg pointer value
4512 out of that function's stack frame.
4513
4514 There are two cases: If a separate ap is needed, allocate a
4515 slot in the outer function for it and dereference it that way.
4516 This is correct even if the real ap is actually a pseudo.
4517 Otherwise, just adjust the offset from the frame pointer to
4518 compensate. */
4519
4520 #ifdef NEED_SEPARATE_AP
4521 rtx addr;
4522
4523 if (fp->arg_pointer_save_area == 0)
4524 fp->arg_pointer_save_area
4525 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
4526
4527 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
4528 addr = memory_address (Pmode, addr);
4529
4530 base = copy_to_reg (gen_rtx (MEM, Pmode, addr));
4531 #else
4532 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
4533 base = lookup_static_chain (var);
4534 #endif
4535 }
4536
4537 else if (basereg == virtual_stack_vars_rtx)
4538 {
4539 /* This is the same code as lookup_static_chain, duplicated here to
4540 avoid an extra call to decl_function_context. */
4541 tree link;
4542
4543 for (link = context_display; link; link = TREE_CHAIN (link))
4544 if (TREE_PURPOSE (link) == context)
4545 {
4546 base = RTL_EXPR_RTL (TREE_VALUE (link));
4547 break;
4548 }
4549 }
4550
4551 if (base == 0)
4552 abort ();
4553
4554 /* Use same offset, relative to appropriate static chain or argument
4555 pointer. */
4556 return plus_constant (base, displacement);
4557 }
4558 \f
4559 /* Return the address of the trampoline for entering nested fn FUNCTION.
4560 If necessary, allocate a trampoline (in the stack frame)
4561 and emit rtl to initialize its contents (at entry to this function). */
4562
4563 rtx
4564 trampoline_address (function)
4565 tree function;
4566 {
4567 tree link;
4568 tree rtlexp;
4569 rtx tramp;
4570 struct function *fp;
4571 tree fn_context;
4572
4573 /* Find an existing trampoline and return it. */
4574 for (link = trampoline_list; link; link = TREE_CHAIN (link))
4575 if (TREE_PURPOSE (link) == function)
4576 return
4577 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
4578
4579 for (fp = outer_function_chain; fp; fp = fp->next)
4580 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
4581 if (TREE_PURPOSE (link) == function)
4582 {
4583 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
4584 function);
4585 return round_trampoline_addr (tramp);
4586 }
4587
4588 /* None exists; we must make one. */
4589
4590 /* Find the `struct function' for the function containing FUNCTION. */
4591 fp = 0;
4592 fn_context = decl_function_context (function);
4593 if (fn_context != current_function_decl)
4594 for (fp = outer_function_chain; fp; fp = fp->next)
4595 if (fp->decl == fn_context)
4596 break;
4597
4598 /* Allocate run-time space for this trampoline
4599 (usually in the defining function's stack frame). */
4600 #ifdef ALLOCATE_TRAMPOLINE
4601 tramp = ALLOCATE_TRAMPOLINE (fp);
4602 #else
4603 /* If rounding needed, allocate extra space
4604 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
4605 #ifdef TRAMPOLINE_ALIGNMENT
4606 #define TRAMPOLINE_REAL_SIZE \
4607 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
4608 #else
4609 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
4610 #endif
4611 if (fp != 0)
4612 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
4613 else
4614 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
4615 #endif
4616
4617 /* Record the trampoline for reuse and note it for later initialization
4618 by expand_function_end. */
4619 if (fp != 0)
4620 {
4621 push_obstacks (fp->function_maybepermanent_obstack,
4622 fp->function_maybepermanent_obstack);
4623 rtlexp = make_node (RTL_EXPR);
4624 RTL_EXPR_RTL (rtlexp) = tramp;
4625 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
4626 pop_obstacks ();
4627 }
4628 else
4629 {
4630 /* Make the RTL_EXPR node temporary, not momentary, so that the
4631 trampoline_list doesn't become garbage. */
4632 int momentary = suspend_momentary ();
4633 rtlexp = make_node (RTL_EXPR);
4634 resume_momentary (momentary);
4635
4636 RTL_EXPR_RTL (rtlexp) = tramp;
4637 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
4638 }
4639
4640 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
4641 return round_trampoline_addr (tramp);
4642 }
4643
4644 /* Given a trampoline address,
4645 round it to multiple of TRAMPOLINE_ALIGNMENT. */
4646
4647 static rtx
4648 round_trampoline_addr (tramp)
4649 rtx tramp;
4650 {
4651 #ifdef TRAMPOLINE_ALIGNMENT
4652 /* Round address up to desired boundary. */
4653 rtx temp = gen_reg_rtx (Pmode);
4654 temp = expand_binop (Pmode, add_optab, tramp,
4655 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
4656 temp, 0, OPTAB_LIB_WIDEN);
4657 tramp = expand_binop (Pmode, and_optab, temp,
4658 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
4659 temp, 0, OPTAB_LIB_WIDEN);
4660 #endif
4661 return tramp;
4662 }
4663 \f
4664 /* The functions identify_blocks and reorder_blocks provide a way to
4665 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
4666 duplicate portions of the RTL code. Call identify_blocks before
4667 changing the RTL, and call reorder_blocks after. */
4668
4669 /* Put all this function's BLOCK nodes including those that are chained
4670 onto the first block into a vector, and return it.
4671 Also store in each NOTE for the beginning or end of a block
4672 the index of that block in the vector.
4673 The arguments are BLOCK, the chain of top-level blocks of the function,
4674 and INSNS, the insn chain of the function. */
4675
4676 tree *
4677 identify_blocks (block, insns)
4678 tree block;
4679 rtx insns;
4680 {
4681 int n_blocks;
4682 tree *block_vector;
4683 int *block_stack;
4684 int depth = 0;
4685 int next_block_number = 1;
4686 int current_block_number = 1;
4687 rtx insn;
4688
4689 if (block == 0)
4690 return 0;
4691
4692 n_blocks = all_blocks (block, 0);
4693 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
4694 block_stack = (int *) alloca (n_blocks * sizeof (int));
4695
4696 all_blocks (block, block_vector);
4697
4698 for (insn = insns; insn; insn = NEXT_INSN (insn))
4699 if (GET_CODE (insn) == NOTE)
4700 {
4701 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
4702 {
4703 block_stack[depth++] = current_block_number;
4704 current_block_number = next_block_number;
4705 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
4706 }
4707 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
4708 {
4709 current_block_number = block_stack[--depth];
4710 NOTE_BLOCK_NUMBER (insn) = current_block_number;
4711 }
4712 }
4713
4714 if (n_blocks != next_block_number)
4715 abort ();
4716
4717 return block_vector;
4718 }
4719
4720 /* Given BLOCK_VECTOR which was returned by identify_blocks,
4721 and a revised instruction chain, rebuild the tree structure
4722 of BLOCK nodes to correspond to the new order of RTL.
4723 The new block tree is inserted below TOP_BLOCK.
4724 Returns the current top-level block. */
4725
4726 tree
4727 reorder_blocks (block_vector, block, insns)
4728 tree *block_vector;
4729 tree block;
4730 rtx insns;
4731 {
4732 tree current_block = block;
4733 rtx insn;
4734
4735 if (block_vector == 0)
4736 return block;
4737
4738 /* Prune the old trees away, so that it doesn't get in the way. */
4739 BLOCK_SUBBLOCKS (current_block) = 0;
4740 BLOCK_CHAIN (current_block) = 0;
4741
4742 for (insn = insns; insn; insn = NEXT_INSN (insn))
4743 if (GET_CODE (insn) == NOTE)
4744 {
4745 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
4746 {
4747 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
4748 /* If we have seen this block before, copy it. */
4749 if (TREE_ASM_WRITTEN (block))
4750 block = copy_node (block);
4751 BLOCK_SUBBLOCKS (block) = 0;
4752 TREE_ASM_WRITTEN (block) = 1;
4753 BLOCK_SUPERCONTEXT (block) = current_block;
4754 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4755 BLOCK_SUBBLOCKS (current_block) = block;
4756 current_block = block;
4757 NOTE_SOURCE_FILE (insn) = 0;
4758 }
4759 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
4760 {
4761 BLOCK_SUBBLOCKS (current_block)
4762 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
4763 current_block = BLOCK_SUPERCONTEXT (current_block);
4764 NOTE_SOURCE_FILE (insn) = 0;
4765 }
4766 }
4767
4768 BLOCK_SUBBLOCKS (current_block)
4769 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
4770 return current_block;
4771 }
4772
4773 /* Reverse the order of elements in the chain T of blocks,
4774 and return the new head of the chain (old last element). */
4775
4776 static tree
4777 blocks_nreverse (t)
4778 tree t;
4779 {
4780 register tree prev = 0, decl, next;
4781 for (decl = t; decl; decl = next)
4782 {
4783 next = BLOCK_CHAIN (decl);
4784 BLOCK_CHAIN (decl) = prev;
4785 prev = decl;
4786 }
4787 return prev;
4788 }
4789
4790 /* Count the subblocks of the list starting with BLOCK, and list them
4791 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
4792 blocks. */
4793
4794 static int
4795 all_blocks (block, vector)
4796 tree block;
4797 tree *vector;
4798 {
4799 int n_blocks = 0;
4800
4801 while (block)
4802 {
4803 TREE_ASM_WRITTEN (block) = 0;
4804
4805 /* Record this block. */
4806 if (vector)
4807 vector[n_blocks] = block;
4808
4809 ++n_blocks;
4810
4811 /* Record the subblocks, and their subblocks... */
4812 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4813 vector ? vector + n_blocks : 0);
4814 block = BLOCK_CHAIN (block);
4815 }
4816
4817 return n_blocks;
4818 }
4819 \f
4820 /* Build bytecode call descriptor for function SUBR. */
4821
4822 rtx
4823 bc_build_calldesc (subr)
4824 tree subr;
4825 {
4826 tree calldesc = 0, arg;
4827 int nargs = 0;
4828
4829 /* Build the argument description vector in reverse order. */
4830 DECL_ARGUMENTS (subr) = nreverse (DECL_ARGUMENTS (subr));
4831 nargs = 0;
4832
4833 for (arg = DECL_ARGUMENTS (subr); arg; arg = TREE_CHAIN (arg))
4834 {
4835 ++nargs;
4836
4837 calldesc = tree_cons ((tree) 0, size_in_bytes (TREE_TYPE (arg)), calldesc);
4838 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (TREE_TYPE (arg)), calldesc);
4839 }
4840
4841 DECL_ARGUMENTS (subr) = nreverse (DECL_ARGUMENTS (subr));
4842
4843 /* Prepend the function's return type. */
4844 calldesc = tree_cons ((tree) 0,
4845 size_in_bytes (TREE_TYPE (TREE_TYPE (subr))),
4846 calldesc);
4847
4848 calldesc = tree_cons ((tree) 0,
4849 bc_runtime_type_code (TREE_TYPE (TREE_TYPE (subr))),
4850 calldesc);
4851
4852 /* Prepend the arg count. */
4853 calldesc = tree_cons ((tree) 0, build_int_2 (nargs, 0), calldesc);
4854
4855 /* Output the call description vector and get its address. */
4856 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
4857 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
4858 build_index_type (build_int_2 (nargs * 2, 0)));
4859
4860 return output_constant_def (calldesc);
4861 }
4862
4863
4864 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4865 and initialize static variables for generating RTL for the statements
4866 of the function. */
4867
4868 void
4869 init_function_start (subr, filename, line)
4870 tree subr;
4871 char *filename;
4872 int line;
4873 {
4874 char *junk;
4875
4876 if (output_bytecode)
4877 {
4878 this_function_decl = subr;
4879 this_function_calldesc = bc_build_calldesc (subr);
4880 local_vars_size = 0;
4881 stack_depth = 0;
4882 max_stack_depth = 0;
4883 stmt_expr_depth = 0;
4884 return;
4885 }
4886
4887 init_stmt_for_function ();
4888
4889 cse_not_expected = ! optimize;
4890
4891 /* Caller save not needed yet. */
4892 caller_save_needed = 0;
4893
4894 /* No stack slots have been made yet. */
4895 stack_slot_list = 0;
4896
4897 /* There is no stack slot for handling nonlocal gotos. */
4898 nonlocal_goto_handler_slot = 0;
4899 nonlocal_goto_stack_level = 0;
4900
4901 /* No labels have been declared for nonlocal use. */
4902 nonlocal_labels = 0;
4903
4904 /* No function calls so far in this function. */
4905 function_call_count = 0;
4906
4907 /* No parm regs have been allocated.
4908 (This is important for output_inline_function.) */
4909 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4910
4911 /* Initialize the RTL mechanism. */
4912 init_emit ();
4913
4914 /* Initialize the queue of pending postincrement and postdecrements,
4915 and some other info in expr.c. */
4916 init_expr ();
4917
4918 /* We haven't done register allocation yet. */
4919 reg_renumber = 0;
4920
4921 init_const_rtx_hash_table ();
4922
4923 current_function_name = (*decl_printable_name) (subr, &junk);
4924
4925 /* Nonzero if this is a nested function that uses a static chain. */
4926
4927 current_function_needs_context
4928 = (decl_function_context (current_function_decl) != 0
4929 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
4930
4931 /* Set if a call to setjmp is seen. */
4932 current_function_calls_setjmp = 0;
4933
4934 /* Set if a call to longjmp is seen. */
4935 current_function_calls_longjmp = 0;
4936
4937 current_function_calls_alloca = 0;
4938 current_function_has_nonlocal_label = 0;
4939 current_function_has_nonlocal_goto = 0;
4940 current_function_contains_functions = 0;
4941
4942 current_function_returns_pcc_struct = 0;
4943 current_function_returns_struct = 0;
4944 current_function_epilogue_delay_list = 0;
4945 current_function_uses_const_pool = 0;
4946 current_function_uses_pic_offset_table = 0;
4947
4948 /* We have not yet needed to make a label to jump to for tail-recursion. */
4949 tail_recursion_label = 0;
4950
4951 /* We haven't had a need to make a save area for ap yet. */
4952
4953 arg_pointer_save_area = 0;
4954
4955 /* No stack slots allocated yet. */
4956 frame_offset = 0;
4957
4958 /* No SAVE_EXPRs in this function yet. */
4959 save_expr_regs = 0;
4960
4961 /* No RTL_EXPRs in this function yet. */
4962 rtl_expr_chain = 0;
4963
4964 /* Set up to allocate temporaries. */
4965 init_temp_slots ();
4966
4967 /* Within function body, compute a type's size as soon it is laid out. */
4968 immediate_size_expand++;
4969
4970 /* We haven't made any trampolines for this function yet. */
4971 trampoline_list = 0;
4972
4973 init_pending_stack_adjust ();
4974 inhibit_defer_pop = 0;
4975
4976 current_function_outgoing_args_size = 0;
4977
4978 /* Prevent ever trying to delete the first instruction of a function.
4979 Also tell final how to output a linenum before the function prologue. */
4980 emit_line_note (filename, line);
4981
4982 /* Make sure first insn is a note even if we don't want linenums.
4983 This makes sure the first insn will never be deleted.
4984 Also, final expects a note to appear there. */
4985 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4986
4987 /* Set flags used by final.c. */
4988 if (aggregate_value_p (DECL_RESULT (subr)))
4989 {
4990 #ifdef PCC_STATIC_STRUCT_RETURN
4991 current_function_returns_pcc_struct = 1;
4992 #endif
4993 current_function_returns_struct = 1;
4994 }
4995
4996 /* Warn if this value is an aggregate type,
4997 regardless of which calling convention we are using for it. */
4998 if (warn_aggregate_return
4999 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5000 warning ("function returns an aggregate");
5001
5002 current_function_returns_pointer
5003 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5004
5005 /* Indicate that we need to distinguish between the return value of the
5006 present function and the return value of a function being called. */
5007 rtx_equal_function_value_matters = 1;
5008
5009 /* Indicate that we have not instantiated virtual registers yet. */
5010 virtuals_instantiated = 0;
5011
5012 /* Indicate we have no need of a frame pointer yet. */
5013 frame_pointer_needed = 0;
5014
5015 /* By default assume not varargs or stdarg. */
5016 current_function_varargs = 0;
5017 current_function_stdarg = 0;
5018 }
5019
5020 /* Indicate that the current function uses extra args
5021 not explicitly mentioned in the argument list in any fashion. */
5022
5023 void
5024 mark_varargs ()
5025 {
5026 current_function_varargs = 1;
5027 }
5028
5029 /* Expand a call to __main at the beginning of a possible main function. */
5030
5031 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5032 #undef HAS_INIT_SECTION
5033 #define HAS_INIT_SECTION
5034 #endif
5035
5036 void
5037 expand_main_function ()
5038 {
5039 if (!output_bytecode)
5040 {
5041 /* The zero below avoids a possible parse error */
5042 0;
5043 #if !defined (HAS_INIT_SECTION)
5044 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, NAME__MAIN), 0,
5045 VOIDmode, 0);
5046 #endif /* not HAS_INIT_SECTION */
5047 }
5048 }
5049 \f
5050 extern struct obstack permanent_obstack;
5051
5052 /* Expand start of bytecode function. See comment at
5053 expand_function_start below for details. */
5054
5055 void
5056 bc_expand_function_start (subr, parms_have_cleanups)
5057 tree subr;
5058 int parms_have_cleanups;
5059 {
5060 char label[20], *name;
5061 static int nlab;
5062 tree thisarg;
5063 int argsz;
5064
5065 if (TREE_PUBLIC (subr))
5066 bc_globalize_label (IDENTIFIER_POINTER (DECL_NAME (subr)));
5067
5068 #ifdef DEBUG_PRINT_CODE
5069 fprintf (stderr, "\n<func %s>\n", IDENTIFIER_POINTER (DECL_NAME (subr)));
5070 #endif
5071
5072 for (argsz = 0, thisarg = DECL_ARGUMENTS (subr); thisarg; thisarg = TREE_CHAIN (thisarg))
5073 {
5074 if (DECL_RTL (thisarg))
5075 abort (); /* Should be NULL here I think. */
5076 else if (TREE_CONSTANT (DECL_SIZE (thisarg)))
5077 {
5078 DECL_RTL (thisarg) = bc_gen_rtx ((char *) 0, argsz, (struct bc_label *) 0);
5079 argsz += TREE_INT_CST_LOW (DECL_SIZE (thisarg));
5080 }
5081 else
5082 {
5083 /* Variable-sized objects are pointers to their storage. */
5084 DECL_RTL (thisarg) = bc_gen_rtx ((char *) 0, argsz, (struct bc_label *) 0);
5085 argsz += POINTER_SIZE;
5086 }
5087 }
5088
5089 bc_begin_function (xstrdup (IDENTIFIER_POINTER (DECL_NAME (subr))));
5090
5091 ASM_GENERATE_INTERNAL_LABEL (label, "LX", nlab);
5092
5093 ++nlab;
5094 name = (char *) obstack_copy0 (&permanent_obstack, label, strlen (label));
5095 this_function_callinfo = bc_gen_rtx (name, 0, (struct bc_label *) 0);
5096 this_function_bytecode =
5097 bc_emit_trampoline (BYTECODE_LABEL (this_function_callinfo));
5098 }
5099
5100
5101 /* Expand end of bytecode function. See details the comment of
5102 expand_function_end(), below. */
5103
5104 void
5105 bc_expand_function_end ()
5106 {
5107 char *ptrconsts;
5108
5109 expand_null_return ();
5110
5111 /* Emit any fixup code. This must be done before the call to
5112 to BC_END_FUNCTION (), since that will cause the bytecode
5113 segment to be finished off and closed. */
5114
5115 expand_fixups (NULL_RTX);
5116
5117 ptrconsts = bc_end_function ();
5118
5119 bc_align_const (2 /* INT_ALIGN */);
5120
5121 /* If this changes also make sure to change bc-interp.h! */
5122
5123 bc_emit_const_labeldef (BYTECODE_LABEL (this_function_callinfo));
5124 bc_emit_const ((char *) &max_stack_depth, sizeof max_stack_depth);
5125 bc_emit_const ((char *) &local_vars_size, sizeof local_vars_size);
5126 bc_emit_const_labelref (this_function_bytecode, 0);
5127 bc_emit_const_labelref (ptrconsts, 0);
5128 bc_emit_const_labelref (BYTECODE_LABEL (this_function_calldesc), 0);
5129 }
5130
5131
5132 /* Start the RTL for a new function, and set variables used for
5133 emitting RTL.
5134 SUBR is the FUNCTION_DECL node.
5135 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5136 the function's parameters, which must be run at any return statement. */
5137
5138 void
5139 expand_function_start (subr, parms_have_cleanups)
5140 tree subr;
5141 int parms_have_cleanups;
5142 {
5143 register int i;
5144 tree tem;
5145 rtx last_ptr;
5146
5147 if (output_bytecode)
5148 {
5149 bc_expand_function_start (subr, parms_have_cleanups);
5150 return;
5151 }
5152
5153 /* Make sure volatile mem refs aren't considered
5154 valid operands of arithmetic insns. */
5155 init_recog_no_volatile ();
5156
5157 /* If function gets a static chain arg, store it in the stack frame.
5158 Do this first, so it gets the first stack slot offset. */
5159 if (current_function_needs_context)
5160 {
5161 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5162
5163 #ifdef SMALL_REGISTER_CLASSES
5164 /* Delay copying static chain if it is not a register to avoid
5165 conflicts with regs used for parameters. */
5166 if (GET_CODE (static_chain_incoming_rtx) == REG)
5167 #endif
5168 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5169 }
5170
5171 /* If the parameters of this function need cleaning up, get a label
5172 for the beginning of the code which executes those cleanups. This must
5173 be done before doing anything with return_label. */
5174 if (parms_have_cleanups)
5175 cleanup_label = gen_label_rtx ();
5176 else
5177 cleanup_label = 0;
5178
5179 /* Make the label for return statements to jump to, if this machine
5180 does not have a one-instruction return and uses an epilogue,
5181 or if it returns a structure, or if it has parm cleanups. */
5182 #ifdef HAVE_return
5183 if (cleanup_label == 0 && HAVE_return
5184 && ! current_function_returns_pcc_struct
5185 && ! (current_function_returns_struct && ! optimize))
5186 return_label = 0;
5187 else
5188 return_label = gen_label_rtx ();
5189 #else
5190 return_label = gen_label_rtx ();
5191 #endif
5192
5193 /* Initialize rtx used to return the value. */
5194 /* Do this before assign_parms so that we copy the struct value address
5195 before any library calls that assign parms might generate. */
5196
5197 /* Decide whether to return the value in memory or in a register. */
5198 if (aggregate_value_p (DECL_RESULT (subr)))
5199 {
5200 /* Returning something that won't go in a register. */
5201 register rtx value_address = 0;
5202
5203 #ifdef PCC_STATIC_STRUCT_RETURN
5204 if (current_function_returns_pcc_struct)
5205 {
5206 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5207 value_address = assemble_static_space (size);
5208 }
5209 else
5210 #endif
5211 {
5212 /* Expect to be passed the address of a place to store the value.
5213 If it is passed as an argument, assign_parms will take care of
5214 it. */
5215 if (struct_value_incoming_rtx)
5216 {
5217 value_address = gen_reg_rtx (Pmode);
5218 emit_move_insn (value_address, struct_value_incoming_rtx);
5219 }
5220 }
5221 if (value_address)
5222 {
5223 DECL_RTL (DECL_RESULT (subr))
5224 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (subr)), value_address);
5225 MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)))
5226 = AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5227 }
5228 }
5229 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5230 /* If return mode is void, this decl rtl should not be used. */
5231 DECL_RTL (DECL_RESULT (subr)) = 0;
5232 else if (parms_have_cleanups)
5233 {
5234 /* If function will end with cleanup code for parms,
5235 compute the return values into a pseudo reg,
5236 which we will copy into the true return register
5237 after the cleanups are done. */
5238
5239 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5240
5241 #ifdef PROMOTE_FUNCTION_RETURN
5242 tree type = TREE_TYPE (DECL_RESULT (subr));
5243 int unsignedp = TREE_UNSIGNED (type);
5244
5245 mode = promote_mode (type, mode, &unsignedp, 1);
5246 #endif
5247
5248 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5249 }
5250 else
5251 /* Scalar, returned in a register. */
5252 {
5253 #ifdef FUNCTION_OUTGOING_VALUE
5254 DECL_RTL (DECL_RESULT (subr))
5255 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5256 #else
5257 DECL_RTL (DECL_RESULT (subr))
5258 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5259 #endif
5260
5261 /* Mark this reg as the function's return value. */
5262 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5263 {
5264 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5265 /* Needed because we may need to move this to memory
5266 in case it's a named return value whose address is taken. */
5267 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5268 }
5269 }
5270
5271 /* Initialize rtx for parameters and local variables.
5272 In some cases this requires emitting insns. */
5273
5274 assign_parms (subr, 0);
5275
5276 #ifdef SMALL_REGISTER_CLASSES
5277 /* Copy the static chain now if it wasn't a register. The delay is to
5278 avoid conflicts with the parameter passing registers. */
5279
5280 if (current_function_needs_context)
5281 if (GET_CODE (static_chain_incoming_rtx) != REG)
5282 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5283 #endif
5284
5285 /* The following was moved from init_function_start.
5286 The move is supposed to make sdb output more accurate. */
5287 /* Indicate the beginning of the function body,
5288 as opposed to parm setup. */
5289 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5290
5291 /* If doing stupid allocation, mark parms as born here. */
5292
5293 if (GET_CODE (get_last_insn ()) != NOTE)
5294 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5295 parm_birth_insn = get_last_insn ();
5296
5297 if (obey_regdecls)
5298 {
5299 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5300 use_variable (regno_reg_rtx[i]);
5301
5302 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5303 use_variable (current_function_internal_arg_pointer);
5304 }
5305
5306 context_display = 0;
5307 if (current_function_needs_context)
5308 {
5309 /* Fetch static chain values for containing functions. */
5310 tem = decl_function_context (current_function_decl);
5311 /* If not doing stupid register allocation copy the static chain
5312 pointer into a pseudo. If we have small register classes, copy
5313 the value from memory if static_chain_incoming_rtx is a REG. If
5314 we do stupid register allocation, we use the stack address
5315 generated above. */
5316 if (tem && ! obey_regdecls)
5317 {
5318 #ifdef SMALL_REGISTER_CLASSES
5319 /* If the static chain originally came in a register, put it back
5320 there, then move it out in the next insn. The reason for
5321 this peculiar code is to satisfy function integration. */
5322 if (GET_CODE (static_chain_incoming_rtx) == REG)
5323 emit_move_insn (static_chain_incoming_rtx, last_ptr);
5324 #endif
5325
5326 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5327 }
5328
5329 while (tem)
5330 {
5331 tree rtlexp = make_node (RTL_EXPR);
5332
5333 RTL_EXPR_RTL (rtlexp) = last_ptr;
5334 context_display = tree_cons (tem, rtlexp, context_display);
5335 tem = decl_function_context (tem);
5336 if (tem == 0)
5337 break;
5338 /* Chain thru stack frames, assuming pointer to next lexical frame
5339 is found at the place we always store it. */
5340 #ifdef FRAME_GROWS_DOWNWARD
5341 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
5342 #endif
5343 last_ptr = copy_to_reg (gen_rtx (MEM, Pmode,
5344 memory_address (Pmode, last_ptr)));
5345
5346 /* If we are not optimizing, ensure that we know that this
5347 piece of context is live over the entire function. */
5348 if (! optimize)
5349 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, last_ptr,
5350 save_expr_regs);
5351 }
5352 }
5353
5354 /* After the display initializations is where the tail-recursion label
5355 should go, if we end up needing one. Ensure we have a NOTE here
5356 since some things (like trampolines) get placed before this. */
5357 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
5358
5359 /* Evaluate now the sizes of any types declared among the arguments. */
5360 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5361 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
5362
5363 /* Make sure there is a line number after the function entry setup code. */
5364 force_next_line_note ();
5365 }
5366 \f
5367 /* Generate RTL for the end of the current function.
5368 FILENAME and LINE are the current position in the source file.
5369
5370 It is up to language-specific callers to do cleanups for parameters--
5371 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5372
5373 void
5374 expand_function_end (filename, line, end_bindings)
5375 char *filename;
5376 int line;
5377 int end_bindings;
5378 {
5379 register int i;
5380 tree link;
5381
5382 #ifdef TRAMPOLINE_TEMPLATE
5383 static rtx initial_trampoline;
5384 #endif
5385
5386 if (output_bytecode)
5387 {
5388 bc_expand_function_end ();
5389 return;
5390 }
5391
5392 #ifdef NON_SAVING_SETJMP
5393 /* Don't put any variables in registers if we call setjmp
5394 on a machine that fails to restore the registers. */
5395 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
5396 {
5397 if (DECL_INITIAL (current_function_decl) != error_mark_node)
5398 setjmp_protect (DECL_INITIAL (current_function_decl));
5399
5400 setjmp_protect_args ();
5401 }
5402 #endif
5403
5404 /* Save the argument pointer if a save area was made for it. */
5405 if (arg_pointer_save_area)
5406 {
5407 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
5408 emit_insn_before (x, tail_recursion_reentry);
5409 }
5410
5411 /* Initialize any trampolines required by this function. */
5412 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5413 {
5414 tree function = TREE_PURPOSE (link);
5415 rtx context = lookup_static_chain (function);
5416 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
5417 rtx blktramp;
5418 rtx seq;
5419
5420 #ifdef TRAMPOLINE_TEMPLATE
5421 /* First make sure this compilation has a template for
5422 initializing trampolines. */
5423 if (initial_trampoline == 0)
5424 {
5425 end_temporary_allocation ();
5426 initial_trampoline
5427 = gen_rtx (MEM, BLKmode, assemble_trampoline_template ());
5428 resume_temporary_allocation ();
5429 }
5430 #endif
5431
5432 /* Generate insns to initialize the trampoline. */
5433 start_sequence ();
5434 tramp = round_trampoline_addr (XEXP (tramp, 0));
5435 #ifdef TRAMPOLINE_TEMPLATE
5436 blktramp = change_address (initial_trampoline, BLKmode, tramp);
5437 emit_block_move (blktramp, initial_trampoline,
5438 GEN_INT (TRAMPOLINE_SIZE),
5439 FUNCTION_BOUNDARY / BITS_PER_UNIT);
5440 #endif
5441 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
5442 seq = get_insns ();
5443 end_sequence ();
5444
5445 /* Put those insns at entry to the containing function (this one). */
5446 emit_insns_before (seq, tail_recursion_reentry);
5447 }
5448
5449 /* Warn about unused parms if extra warnings were specified. */
5450 if (warn_unused && extra_warnings)
5451 {
5452 tree decl;
5453
5454 for (decl = DECL_ARGUMENTS (current_function_decl);
5455 decl; decl = TREE_CHAIN (decl))
5456 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
5457 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
5458 warning_with_decl (decl, "unused parameter `%s'");
5459 }
5460
5461 /* Delete handlers for nonlocal gotos if nothing uses them. */
5462 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
5463 delete_handlers ();
5464
5465 /* End any sequences that failed to be closed due to syntax errors. */
5466 while (in_sequence_p ())
5467 end_sequence ();
5468
5469 /* Outside function body, can't compute type's actual size
5470 until next function's body starts. */
5471 immediate_size_expand--;
5472
5473 /* If doing stupid register allocation,
5474 mark register parms as dying here. */
5475
5476 if (obey_regdecls)
5477 {
5478 rtx tem;
5479 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5480 use_variable (regno_reg_rtx[i]);
5481
5482 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
5483
5484 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
5485 {
5486 use_variable (XEXP (tem, 0));
5487 use_variable_after (XEXP (tem, 0), parm_birth_insn);
5488 }
5489
5490 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5491 use_variable (current_function_internal_arg_pointer);
5492 }
5493
5494 clear_pending_stack_adjust ();
5495 do_pending_stack_adjust ();
5496
5497 /* Mark the end of the function body.
5498 If control reaches this insn, the function can drop through
5499 without returning a value. */
5500 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
5501
5502 /* Output a linenumber for the end of the function.
5503 SDB depends on this. */
5504 emit_line_note_force (filename, line);
5505
5506 /* Output the label for the actual return from the function,
5507 if one is expected. This happens either because a function epilogue
5508 is used instead of a return instruction, or because a return was done
5509 with a goto in order to run local cleanups, or because of pcc-style
5510 structure returning. */
5511
5512 if (return_label)
5513 emit_label (return_label);
5514
5515 /* C++ uses this. */
5516 if (end_bindings)
5517 expand_end_bindings (0, 0, 0);
5518
5519 /* If we had calls to alloca, and this machine needs
5520 an accurate stack pointer to exit the function,
5521 insert some code to save and restore the stack pointer. */
5522 #ifdef EXIT_IGNORE_STACK
5523 if (! EXIT_IGNORE_STACK)
5524 #endif
5525 if (current_function_calls_alloca)
5526 {
5527 rtx tem = 0;
5528
5529 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
5530 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
5531 }
5532
5533 /* If scalar return value was computed in a pseudo-reg,
5534 copy that to the hard return register. */
5535 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
5536 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
5537 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
5538 >= FIRST_PSEUDO_REGISTER))
5539 {
5540 rtx real_decl_result;
5541
5542 #ifdef FUNCTION_OUTGOING_VALUE
5543 real_decl_result
5544 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5545 current_function_decl);
5546 #else
5547 real_decl_result
5548 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5549 current_function_decl);
5550 #endif
5551 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
5552 emit_move_insn (real_decl_result,
5553 DECL_RTL (DECL_RESULT (current_function_decl)));
5554 emit_insn (gen_rtx (USE, VOIDmode, real_decl_result));
5555 }
5556
5557 /* If returning a structure, arrange to return the address of the value
5558 in a place where debuggers expect to find it.
5559
5560 If returning a structure PCC style,
5561 the caller also depends on this value.
5562 And current_function_returns_pcc_struct is not necessarily set. */
5563 if (current_function_returns_struct
5564 || current_function_returns_pcc_struct)
5565 {
5566 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5567 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5568 #ifdef FUNCTION_OUTGOING_VALUE
5569 rtx outgoing
5570 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
5571 current_function_decl);
5572 #else
5573 rtx outgoing
5574 = FUNCTION_VALUE (build_pointer_type (type),
5575 current_function_decl);
5576 #endif
5577
5578 /* Mark this as a function return value so integrate will delete the
5579 assignment and USE below when inlining this function. */
5580 REG_FUNCTION_VALUE_P (outgoing) = 1;
5581
5582 emit_move_insn (outgoing, value_address);
5583 use_variable (outgoing);
5584 }
5585
5586 /* Output a return insn if we are using one.
5587 Otherwise, let the rtl chain end here, to drop through
5588 into the epilogue. */
5589
5590 #ifdef HAVE_return
5591 if (HAVE_return)
5592 {
5593 emit_jump_insn (gen_return ());
5594 emit_barrier ();
5595 }
5596 #endif
5597
5598 /* Fix up any gotos that jumped out to the outermost
5599 binding level of the function.
5600 Must follow emitting RETURN_LABEL. */
5601
5602 /* If you have any cleanups to do at this point,
5603 and they need to create temporary variables,
5604 then you will lose. */
5605 expand_fixups (get_insns ());
5606 }
5607 \f
5608 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
5609
5610 static int *prologue;
5611 static int *epilogue;
5612
5613 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
5614 or a single insn). */
5615
5616 static int *
5617 record_insns (insns)
5618 rtx insns;
5619 {
5620 int *vec;
5621
5622 if (GET_CODE (insns) == SEQUENCE)
5623 {
5624 int len = XVECLEN (insns, 0);
5625 vec = (int *) oballoc ((len + 1) * sizeof (int));
5626 vec[len] = 0;
5627 while (--len >= 0)
5628 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
5629 }
5630 else
5631 {
5632 vec = (int *) oballoc (2 * sizeof (int));
5633 vec[0] = INSN_UID (insns);
5634 vec[1] = 0;
5635 }
5636 return vec;
5637 }
5638
5639 /* Determine how many INSN_UIDs in VEC are part of INSN. */
5640
5641 static int
5642 contains (insn, vec)
5643 rtx insn;
5644 int *vec;
5645 {
5646 register int i, j;
5647
5648 if (GET_CODE (insn) == INSN
5649 && GET_CODE (PATTERN (insn)) == SEQUENCE)
5650 {
5651 int count = 0;
5652 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
5653 for (j = 0; vec[j]; j++)
5654 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
5655 count++;
5656 return count;
5657 }
5658 else
5659 {
5660 for (j = 0; vec[j]; j++)
5661 if (INSN_UID (insn) == vec[j])
5662 return 1;
5663 }
5664 return 0;
5665 }
5666
5667 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5668 this into place with notes indicating where the prologue ends and where
5669 the epilogue begins. Update the basic block information when possible. */
5670
5671 void
5672 thread_prologue_and_epilogue_insns (f)
5673 rtx f;
5674 {
5675 #ifdef HAVE_prologue
5676 if (HAVE_prologue)
5677 {
5678 rtx head, seq, insn;
5679
5680 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
5681 prologue insns and a NOTE_INSN_PROLOGUE_END. */
5682 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
5683 seq = gen_prologue ();
5684 head = emit_insn_after (seq, f);
5685
5686 /* Include the new prologue insns in the first block. Ignore them
5687 if they form a basic block unto themselves. */
5688 if (basic_block_head && n_basic_blocks
5689 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
5690 basic_block_head[0] = NEXT_INSN (f);
5691
5692 /* Retain a map of the prologue insns. */
5693 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
5694 }
5695 else
5696 #endif
5697 prologue = 0;
5698
5699 #ifdef HAVE_epilogue
5700 if (HAVE_epilogue)
5701 {
5702 rtx insn = get_last_insn ();
5703 rtx prev = prev_nonnote_insn (insn);
5704
5705 /* If we end with a BARRIER, we don't need an epilogue. */
5706 if (! (prev && GET_CODE (prev) == BARRIER))
5707 {
5708 rtx tail, seq, tem;
5709 rtx first_use = 0;
5710 rtx last_use = 0;
5711
5712 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
5713 epilogue insns, the USE insns at the end of a function,
5714 the jump insn that returns, and then a BARRIER. */
5715
5716 /* Move the USE insns at the end of a function onto a list. */
5717 while (prev
5718 && GET_CODE (prev) == INSN
5719 && GET_CODE (PATTERN (prev)) == USE)
5720 {
5721 tem = prev;
5722 prev = prev_nonnote_insn (prev);
5723
5724 NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem);
5725 PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem);
5726 if (first_use)
5727 {
5728 NEXT_INSN (tem) = first_use;
5729 PREV_INSN (first_use) = tem;
5730 }
5731 first_use = tem;
5732 if (!last_use)
5733 last_use = tem;
5734 }
5735
5736 emit_barrier_after (insn);
5737
5738 seq = gen_epilogue ();
5739 tail = emit_jump_insn_after (seq, insn);
5740
5741 /* Insert the USE insns immediately before the return insn, which
5742 must be the first instruction before the final barrier. */
5743 if (first_use)
5744 {
5745 tem = prev_nonnote_insn (get_last_insn ());
5746 NEXT_INSN (PREV_INSN (tem)) = first_use;
5747 PREV_INSN (first_use) = PREV_INSN (tem);
5748 PREV_INSN (tem) = last_use;
5749 NEXT_INSN (last_use) = tem;
5750 }
5751
5752 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
5753
5754 /* Include the new epilogue insns in the last block. Ignore
5755 them if they form a basic block unto themselves. */
5756 if (basic_block_end && n_basic_blocks
5757 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
5758 basic_block_end[n_basic_blocks - 1] = tail;
5759
5760 /* Retain a map of the epilogue insns. */
5761 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
5762 return;
5763 }
5764 }
5765 #endif
5766 epilogue = 0;
5767 }
5768
5769 /* Reposition the prologue-end and epilogue-begin notes after instruction
5770 scheduling and delayed branch scheduling. */
5771
5772 void
5773 reposition_prologue_and_epilogue_notes (f)
5774 rtx f;
5775 {
5776 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5777 /* Reposition the prologue and epilogue notes. */
5778 if (n_basic_blocks)
5779 {
5780 rtx next, prev;
5781 int len;
5782
5783 if (prologue)
5784 {
5785 register rtx insn, note = 0;
5786
5787 /* Scan from the beginning until we reach the last prologue insn.
5788 We apparently can't depend on basic_block_{head,end} after
5789 reorg has run. */
5790 for (len = 0; prologue[len]; len++)
5791 ;
5792 for (insn = f; len && insn; insn = NEXT_INSN (insn))
5793 {
5794 if (GET_CODE (insn) == NOTE)
5795 {
5796 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5797 note = insn;
5798 }
5799 else if ((len -= contains (insn, prologue)) == 0)
5800 {
5801 /* Find the prologue-end note if we haven't already, and
5802 move it to just after the last prologue insn. */
5803 if (note == 0)
5804 {
5805 for (note = insn; note = NEXT_INSN (note);)
5806 if (GET_CODE (note) == NOTE
5807 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5808 break;
5809 }
5810 next = NEXT_INSN (note);
5811 prev = PREV_INSN (note);
5812 if (prev)
5813 NEXT_INSN (prev) = next;
5814 if (next)
5815 PREV_INSN (next) = prev;
5816 add_insn_after (note, insn);
5817 }
5818 }
5819 }
5820
5821 if (epilogue)
5822 {
5823 register rtx insn, note = 0;
5824
5825 /* Scan from the end until we reach the first epilogue insn.
5826 We apparently can't depend on basic_block_{head,end} after
5827 reorg has run. */
5828 for (len = 0; epilogue[len]; len++)
5829 ;
5830 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
5831 {
5832 if (GET_CODE (insn) == NOTE)
5833 {
5834 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5835 note = insn;
5836 }
5837 else if ((len -= contains (insn, epilogue)) == 0)
5838 {
5839 /* Find the epilogue-begin note if we haven't already, and
5840 move it to just before the first epilogue insn. */
5841 if (note == 0)
5842 {
5843 for (note = insn; note = PREV_INSN (note);)
5844 if (GET_CODE (note) == NOTE
5845 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5846 break;
5847 }
5848 next = NEXT_INSN (note);
5849 prev = PREV_INSN (note);
5850 if (prev)
5851 NEXT_INSN (prev) = next;
5852 if (next)
5853 PREV_INSN (next) = prev;
5854 add_insn_after (note, PREV_INSN (insn));
5855 }
5856 }
5857 }
5858 }
5859 #endif /* HAVE_prologue or HAVE_epilogue */
5860 }