Check SMALL_REGISTER_CLASSES at runtime
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-95, 1996 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42 #include <stdio.h>
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58 #include "bytecode.h"
59 #include "bc-emit.h"
60
61 /* Some systems use __main in a way incompatible with its use in gcc, in these
62 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
63 give the same symbol without quotes for an alternative entry point. You
64 must define both, or neither. */
65 #ifndef NAME__MAIN
66 #define NAME__MAIN "__main"
67 #define SYMBOL__MAIN __main
68 #endif
69
70 /* Round a value to the lowest integer less than it that is a multiple of
71 the required alignment. Avoid using division in case the value is
72 negative. Assume the alignment is a power of two. */
73 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
74
75 /* Similar, but round to the next highest integer that meets the
76 alignment. */
77 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
78
79 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
80 during rtl generation. If they are different register numbers, this is
81 always true. It may also be true if
82 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
83 generation. See fix_lexical_addr for details. */
84
85 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
86 #define NEED_SEPARATE_AP
87 #endif
88
89 /* Number of bytes of args popped by function being compiled on its return.
90 Zero if no bytes are to be popped.
91 May affect compilation of return insn or of function epilogue. */
92
93 int current_function_pops_args;
94
95 /* Nonzero if function being compiled needs to be given an address
96 where the value should be stored. */
97
98 int current_function_returns_struct;
99
100 /* Nonzero if function being compiled needs to
101 return the address of where it has put a structure value. */
102
103 int current_function_returns_pcc_struct;
104
105 /* Nonzero if function being compiled needs to be passed a static chain. */
106
107 int current_function_needs_context;
108
109 /* Nonzero if function being compiled can call setjmp. */
110
111 int current_function_calls_setjmp;
112
113 /* Nonzero if function being compiled can call longjmp. */
114
115 int current_function_calls_longjmp;
116
117 /* Nonzero if function being compiled receives nonlocal gotos
118 from nested functions. */
119
120 int current_function_has_nonlocal_label;
121
122 /* Nonzero if function being compiled has nonlocal gotos to parent
123 function. */
124
125 int current_function_has_nonlocal_goto;
126
127 /* Nonzero if function being compiled contains nested functions. */
128
129 int current_function_contains_functions;
130
131 /* Nonzero if function being compiled can call alloca,
132 either as a subroutine or builtin. */
133
134 int current_function_calls_alloca;
135
136 /* Nonzero if the current function returns a pointer type */
137
138 int current_function_returns_pointer;
139
140 /* If some insns can be deferred to the delay slots of the epilogue, the
141 delay list for them is recorded here. */
142
143 rtx current_function_epilogue_delay_list;
144
145 /* If function's args have a fixed size, this is that size, in bytes.
146 Otherwise, it is -1.
147 May affect compilation of return insn or of function epilogue. */
148
149 int current_function_args_size;
150
151 /* # bytes the prologue should push and pretend that the caller pushed them.
152 The prologue must do this, but only if parms can be passed in registers. */
153
154 int current_function_pretend_args_size;
155
156 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
157 defined, the needed space is pushed by the prologue. */
158
159 int current_function_outgoing_args_size;
160
161 /* This is the offset from the arg pointer to the place where the first
162 anonymous arg can be found, if there is one. */
163
164 rtx current_function_arg_offset_rtx;
165
166 /* Nonzero if current function uses varargs.h or equivalent.
167 Zero for functions that use stdarg.h. */
168
169 int current_function_varargs;
170
171 /* Nonzero if current function uses stdarg.h or equivalent.
172 Zero for functions that use varargs.h. */
173
174 int current_function_stdarg;
175
176 /* Quantities of various kinds of registers
177 used for the current function's args. */
178
179 CUMULATIVE_ARGS current_function_args_info;
180
181 /* Name of function now being compiled. */
182
183 char *current_function_name;
184
185 /* If non-zero, an RTL expression for that location at which the current
186 function returns its result. Always equal to
187 DECL_RTL (DECL_RESULT (current_function_decl)), but provided
188 independently of the tree structures. */
189
190 rtx current_function_return_rtx;
191
192 /* Nonzero if the current function uses the constant pool. */
193
194 int current_function_uses_const_pool;
195
196 /* Nonzero if the current function uses pic_offset_table_rtx. */
197 int current_function_uses_pic_offset_table;
198
199 /* The arg pointer hard register, or the pseudo into which it was copied. */
200 rtx current_function_internal_arg_pointer;
201
202 /* The FUNCTION_DECL for an inline function currently being expanded. */
203 tree inline_function_decl;
204
205 /* Number of function calls seen so far in current function. */
206
207 int function_call_count;
208
209 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
210 (labels to which there can be nonlocal gotos from nested functions)
211 in this function. */
212
213 tree nonlocal_labels;
214
215 /* RTX for stack slot that holds the current handler for nonlocal gotos.
216 Zero when function does not have nonlocal labels. */
217
218 rtx nonlocal_goto_handler_slot;
219
220 /* RTX for stack slot that holds the stack pointer value to restore
221 for a nonlocal goto.
222 Zero when function does not have nonlocal labels. */
223
224 rtx nonlocal_goto_stack_level;
225
226 /* Label that will go on parm cleanup code, if any.
227 Jumping to this label runs cleanup code for parameters, if
228 such code must be run. Following this code is the logical return label. */
229
230 rtx cleanup_label;
231
232 /* Label that will go on function epilogue.
233 Jumping to this label serves as a "return" instruction
234 on machines which require execution of the epilogue on all returns. */
235
236 rtx return_label;
237
238 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
239 So we can mark them all live at the end of the function, if nonopt. */
240 rtx save_expr_regs;
241
242 /* List (chain of EXPR_LISTs) of all stack slots in this function.
243 Made for the sake of unshare_all_rtl. */
244 rtx stack_slot_list;
245
246 /* Chain of all RTL_EXPRs that have insns in them. */
247 tree rtl_expr_chain;
248
249 /* Label to jump back to for tail recursion, or 0 if we have
250 not yet needed one for this function. */
251 rtx tail_recursion_label;
252
253 /* Place after which to insert the tail_recursion_label if we need one. */
254 rtx tail_recursion_reentry;
255
256 /* Location at which to save the argument pointer if it will need to be
257 referenced. There are two cases where this is done: if nonlocal gotos
258 exist, or if vars stored at an offset from the argument pointer will be
259 needed by inner routines. */
260
261 rtx arg_pointer_save_area;
262
263 /* Offset to end of allocated area of stack frame.
264 If stack grows down, this is the address of the last stack slot allocated.
265 If stack grows up, this is the address for the next slot. */
266 HOST_WIDE_INT frame_offset;
267
268 /* List (chain of TREE_LISTs) of static chains for containing functions.
269 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
270 in an RTL_EXPR in the TREE_VALUE. */
271 static tree context_display;
272
273 /* List (chain of TREE_LISTs) of trampolines for nested functions.
274 The trampoline sets up the static chain and jumps to the function.
275 We supply the trampoline's address when the function's address is requested.
276
277 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
278 in an RTL_EXPR in the TREE_VALUE. */
279 static tree trampoline_list;
280
281 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
282 static rtx parm_birth_insn;
283
284 #if 0
285 /* Nonzero if a stack slot has been generated whose address is not
286 actually valid. It means that the generated rtl must all be scanned
287 to detect and correct the invalid addresses where they occur. */
288 static int invalid_stack_slot;
289 #endif
290
291 /* Last insn of those whose job was to put parms into their nominal homes. */
292 static rtx last_parm_insn;
293
294 /* 1 + last pseudo register number used for loading a copy
295 of a parameter of this function. */
296 static int max_parm_reg;
297
298 /* Vector indexed by REGNO, containing location on stack in which
299 to put the parm which is nominally in pseudo register REGNO,
300 if we discover that that parm must go in the stack. */
301 static rtx *parm_reg_stack_loc;
302
303 /* Nonzero once virtual register instantiation has been done.
304 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
305 static int virtuals_instantiated;
306
307 /* These variables hold pointers to functions to
308 save and restore machine-specific data,
309 in push_function_context and pop_function_context. */
310 void (*save_machine_status) PROTO((struct function *));
311 void (*restore_machine_status) PROTO((struct function *));
312
313 /* Nonzero if we need to distinguish between the return value of this function
314 and the return value of a function called by this function. This helps
315 integrate.c */
316
317 extern int rtx_equal_function_value_matters;
318 extern tree sequence_rtl_expr;
319 \f
320 /* In order to evaluate some expressions, such as function calls returning
321 structures in memory, we need to temporarily allocate stack locations.
322 We record each allocated temporary in the following structure.
323
324 Associated with each temporary slot is a nesting level. When we pop up
325 one level, all temporaries associated with the previous level are freed.
326 Normally, all temporaries are freed after the execution of the statement
327 in which they were created. However, if we are inside a ({...}) grouping,
328 the result may be in a temporary and hence must be preserved. If the
329 result could be in a temporary, we preserve it if we can determine which
330 one it is in. If we cannot determine which temporary may contain the
331 result, all temporaries are preserved. A temporary is preserved by
332 pretending it was allocated at the previous nesting level.
333
334 Automatic variables are also assigned temporary slots, at the nesting
335 level where they are defined. They are marked a "kept" so that
336 free_temp_slots will not free them. */
337
338 struct temp_slot
339 {
340 /* Points to next temporary slot. */
341 struct temp_slot *next;
342 /* The rtx to used to reference the slot. */
343 rtx slot;
344 /* The rtx used to represent the address if not the address of the
345 slot above. May be an EXPR_LIST if multiple addresses exist. */
346 rtx address;
347 /* The size, in units, of the slot. */
348 int size;
349 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
350 tree rtl_expr;
351 /* Non-zero if this temporary is currently in use. */
352 char in_use;
353 /* Non-zero if this temporary has its address taken. */
354 char addr_taken;
355 /* Nesting level at which this slot is being used. */
356 int level;
357 /* Non-zero if this should survive a call to free_temp_slots. */
358 int keep;
359 /* The offset of the slot from the frame_pointer, including extra space
360 for alignment. This info is for combine_temp_slots. */
361 int base_offset;
362 /* The size of the slot, including extra space for alignment. This
363 info is for combine_temp_slots. */
364 int full_size;
365 };
366
367 /* List of all temporaries allocated, both available and in use. */
368
369 struct temp_slot *temp_slots;
370
371 /* Current nesting level for temporaries. */
372
373 int temp_slot_level;
374 \f
375 /* The FUNCTION_DECL node for the current function. */
376 static tree this_function_decl;
377
378 /* Callinfo pointer for the current function. */
379 static rtx this_function_callinfo;
380
381 /* The label in the bytecode file of this function's actual bytecode.
382 Not an rtx. */
383 static char *this_function_bytecode;
384
385 /* The call description vector for the current function. */
386 static rtx this_function_calldesc;
387
388 /* Size of the local variables allocated for the current function. */
389 int local_vars_size;
390
391 /* Current depth of the bytecode evaluation stack. */
392 int stack_depth;
393
394 /* Maximum depth of the evaluation stack in this function. */
395 int max_stack_depth;
396
397 /* Current depth in statement expressions. */
398 static int stmt_expr_depth;
399
400 /* This structure is used to record MEMs or pseudos used to replace VAR, any
401 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
402 maintain this list in case two operands of an insn were required to match;
403 in that case we must ensure we use the same replacement. */
404
405 struct fixup_replacement
406 {
407 rtx old;
408 rtx new;
409 struct fixup_replacement *next;
410 };
411
412 /* Forward declarations. */
413
414 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
415 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
416 enum machine_mode, enum machine_mode,
417 int));
418 static void fixup_var_refs PROTO((rtx, enum machine_mode, int));
419 static struct fixup_replacement
420 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
421 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
422 rtx, int));
423 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
424 struct fixup_replacement **));
425 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
426 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
427 static rtx fixup_stack_1 PROTO((rtx, rtx));
428 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
429 static void instantiate_decls PROTO((tree, int));
430 static void instantiate_decls_1 PROTO((tree, int));
431 static void instantiate_decl PROTO((rtx, int, int));
432 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
433 static void delete_handlers PROTO((void));
434 static void pad_to_arg_alignment PROTO((struct args_size *, int));
435 static void pad_below PROTO((struct args_size *, enum machine_mode,
436 tree));
437 static tree round_down PROTO((tree, int));
438 static rtx round_trampoline_addr PROTO((rtx));
439 static tree blocks_nreverse PROTO((tree));
440 static int all_blocks PROTO((tree, tree *));
441 static int *record_insns PROTO((rtx));
442 static int contains PROTO((rtx, int *));
443 \f
444 /* Pointer to chain of `struct function' for containing functions. */
445 struct function *outer_function_chain;
446
447 /* Given a function decl for a containing function,
448 return the `struct function' for it. */
449
450 struct function *
451 find_function_data (decl)
452 tree decl;
453 {
454 struct function *p;
455 for (p = outer_function_chain; p; p = p->next)
456 if (p->decl == decl)
457 return p;
458 abort ();
459 }
460
461 /* Save the current context for compilation of a nested function.
462 This is called from language-specific code.
463 The caller is responsible for saving any language-specific status,
464 since this function knows only about language-independent variables. */
465
466 void
467 push_function_context_to (context)
468 tree context;
469 {
470 struct function *p = (struct function *) xmalloc (sizeof (struct function));
471
472 p->next = outer_function_chain;
473 outer_function_chain = p;
474
475 p->name = current_function_name;
476 p->decl = current_function_decl;
477 p->pops_args = current_function_pops_args;
478 p->returns_struct = current_function_returns_struct;
479 p->returns_pcc_struct = current_function_returns_pcc_struct;
480 p->returns_pointer = current_function_returns_pointer;
481 p->needs_context = current_function_needs_context;
482 p->calls_setjmp = current_function_calls_setjmp;
483 p->calls_longjmp = current_function_calls_longjmp;
484 p->calls_alloca = current_function_calls_alloca;
485 p->has_nonlocal_label = current_function_has_nonlocal_label;
486 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
487 p->contains_functions = current_function_contains_functions;
488 p->args_size = current_function_args_size;
489 p->pretend_args_size = current_function_pretend_args_size;
490 p->arg_offset_rtx = current_function_arg_offset_rtx;
491 p->varargs = current_function_varargs;
492 p->stdarg = current_function_stdarg;
493 p->uses_const_pool = current_function_uses_const_pool;
494 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
495 p->internal_arg_pointer = current_function_internal_arg_pointer;
496 p->max_parm_reg = max_parm_reg;
497 p->parm_reg_stack_loc = parm_reg_stack_loc;
498 p->outgoing_args_size = current_function_outgoing_args_size;
499 p->return_rtx = current_function_return_rtx;
500 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
501 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
502 p->nonlocal_labels = nonlocal_labels;
503 p->cleanup_label = cleanup_label;
504 p->return_label = return_label;
505 p->save_expr_regs = save_expr_regs;
506 p->stack_slot_list = stack_slot_list;
507 p->parm_birth_insn = parm_birth_insn;
508 p->frame_offset = frame_offset;
509 p->tail_recursion_label = tail_recursion_label;
510 p->tail_recursion_reentry = tail_recursion_reentry;
511 p->arg_pointer_save_area = arg_pointer_save_area;
512 p->rtl_expr_chain = rtl_expr_chain;
513 p->last_parm_insn = last_parm_insn;
514 p->context_display = context_display;
515 p->trampoline_list = trampoline_list;
516 p->function_call_count = function_call_count;
517 p->temp_slots = temp_slots;
518 p->temp_slot_level = temp_slot_level;
519 p->fixup_var_refs_queue = 0;
520 p->epilogue_delay_list = current_function_epilogue_delay_list;
521
522 save_tree_status (p, context);
523 save_storage_status (p);
524 save_emit_status (p);
525 init_emit ();
526 save_expr_status (p);
527 save_stmt_status (p);
528 save_varasm_status (p);
529
530 if (save_machine_status)
531 (*save_machine_status) (p);
532 }
533
534 void
535 push_function_context ()
536 {
537 push_function_context_to (current_function_decl);
538 }
539
540 /* Restore the last saved context, at the end of a nested function.
541 This function is called from language-specific code. */
542
543 void
544 pop_function_context_from (context)
545 tree context;
546 {
547 struct function *p = outer_function_chain;
548
549 outer_function_chain = p->next;
550
551 current_function_contains_functions
552 = p->contains_functions || p->inline_obstacks
553 || context == current_function_decl;
554 current_function_name = p->name;
555 current_function_decl = p->decl;
556 current_function_pops_args = p->pops_args;
557 current_function_returns_struct = p->returns_struct;
558 current_function_returns_pcc_struct = p->returns_pcc_struct;
559 current_function_returns_pointer = p->returns_pointer;
560 current_function_needs_context = p->needs_context;
561 current_function_calls_setjmp = p->calls_setjmp;
562 current_function_calls_longjmp = p->calls_longjmp;
563 current_function_calls_alloca = p->calls_alloca;
564 current_function_has_nonlocal_label = p->has_nonlocal_label;
565 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
566 current_function_args_size = p->args_size;
567 current_function_pretend_args_size = p->pretend_args_size;
568 current_function_arg_offset_rtx = p->arg_offset_rtx;
569 current_function_varargs = p->varargs;
570 current_function_stdarg = p->stdarg;
571 current_function_uses_const_pool = p->uses_const_pool;
572 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
573 current_function_internal_arg_pointer = p->internal_arg_pointer;
574 max_parm_reg = p->max_parm_reg;
575 parm_reg_stack_loc = p->parm_reg_stack_loc;
576 current_function_outgoing_args_size = p->outgoing_args_size;
577 current_function_return_rtx = p->return_rtx;
578 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
579 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
580 nonlocal_labels = p->nonlocal_labels;
581 cleanup_label = p->cleanup_label;
582 return_label = p->return_label;
583 save_expr_regs = p->save_expr_regs;
584 stack_slot_list = p->stack_slot_list;
585 parm_birth_insn = p->parm_birth_insn;
586 frame_offset = p->frame_offset;
587 tail_recursion_label = p->tail_recursion_label;
588 tail_recursion_reentry = p->tail_recursion_reentry;
589 arg_pointer_save_area = p->arg_pointer_save_area;
590 rtl_expr_chain = p->rtl_expr_chain;
591 last_parm_insn = p->last_parm_insn;
592 context_display = p->context_display;
593 trampoline_list = p->trampoline_list;
594 function_call_count = p->function_call_count;
595 temp_slots = p->temp_slots;
596 temp_slot_level = p->temp_slot_level;
597 current_function_epilogue_delay_list = p->epilogue_delay_list;
598 reg_renumber = 0;
599
600 restore_tree_status (p);
601 restore_storage_status (p);
602 restore_expr_status (p);
603 restore_emit_status (p);
604 restore_stmt_status (p);
605 restore_varasm_status (p);
606
607 if (restore_machine_status)
608 (*restore_machine_status) (p);
609
610 /* Finish doing put_var_into_stack for any of our variables
611 which became addressable during the nested function. */
612 {
613 struct var_refs_queue *queue = p->fixup_var_refs_queue;
614 for (; queue; queue = queue->next)
615 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
616 }
617
618 free (p);
619
620 /* Reset variables that have known state during rtx generation. */
621 rtx_equal_function_value_matters = 1;
622 virtuals_instantiated = 0;
623 }
624
625 void pop_function_context ()
626 {
627 pop_function_context_from (current_function_decl);
628 }
629 \f
630 /* Allocate fixed slots in the stack frame of the current function. */
631
632 /* Return size needed for stack frame based on slots so far allocated.
633 This size counts from zero. It is not rounded to STACK_BOUNDARY;
634 the caller may have to do that. */
635
636 HOST_WIDE_INT
637 get_frame_size ()
638 {
639 #ifdef FRAME_GROWS_DOWNWARD
640 return -frame_offset;
641 #else
642 return frame_offset;
643 #endif
644 }
645
646 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
647 with machine mode MODE.
648
649 ALIGN controls the amount of alignment for the address of the slot:
650 0 means according to MODE,
651 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
652 positive specifies alignment boundary in bits.
653
654 We do not round to stack_boundary here. */
655
656 rtx
657 assign_stack_local (mode, size, align)
658 enum machine_mode mode;
659 int size;
660 int align;
661 {
662 register rtx x, addr;
663 int bigend_correction = 0;
664 int alignment;
665
666 if (align == 0)
667 {
668 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
669 if (mode == BLKmode)
670 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
671 }
672 else if (align == -1)
673 {
674 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
675 size = CEIL_ROUND (size, alignment);
676 }
677 else
678 alignment = align / BITS_PER_UNIT;
679
680 /* Round frame offset to that alignment.
681 We must be careful here, since FRAME_OFFSET might be negative and
682 division with a negative dividend isn't as well defined as we might
683 like. So we instead assume that ALIGNMENT is a power of two and
684 use logical operations which are unambiguous. */
685 #ifdef FRAME_GROWS_DOWNWARD
686 frame_offset = FLOOR_ROUND (frame_offset, alignment);
687 #else
688 frame_offset = CEIL_ROUND (frame_offset, alignment);
689 #endif
690
691 /* On a big-endian machine, if we are allocating more space than we will use,
692 use the least significant bytes of those that are allocated. */
693 if (BYTES_BIG_ENDIAN && mode != BLKmode)
694 bigend_correction = size - GET_MODE_SIZE (mode);
695
696 #ifdef FRAME_GROWS_DOWNWARD
697 frame_offset -= size;
698 #endif
699
700 /* If we have already instantiated virtual registers, return the actual
701 address relative to the frame pointer. */
702 if (virtuals_instantiated)
703 addr = plus_constant (frame_pointer_rtx,
704 (frame_offset + bigend_correction
705 + STARTING_FRAME_OFFSET));
706 else
707 addr = plus_constant (virtual_stack_vars_rtx,
708 frame_offset + bigend_correction);
709
710 #ifndef FRAME_GROWS_DOWNWARD
711 frame_offset += size;
712 #endif
713
714 x = gen_rtx (MEM, mode, addr);
715
716 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list);
717
718 return x;
719 }
720
721 /* Assign a stack slot in a containing function.
722 First three arguments are same as in preceding function.
723 The last argument specifies the function to allocate in. */
724
725 rtx
726 assign_outer_stack_local (mode, size, align, function)
727 enum machine_mode mode;
728 int size;
729 int align;
730 struct function *function;
731 {
732 register rtx x, addr;
733 int bigend_correction = 0;
734 int alignment;
735
736 /* Allocate in the memory associated with the function in whose frame
737 we are assigning. */
738 push_obstacks (function->function_obstack,
739 function->function_maybepermanent_obstack);
740
741 if (align == 0)
742 {
743 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
744 if (mode == BLKmode)
745 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
746 }
747 else if (align == -1)
748 {
749 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
750 size = CEIL_ROUND (size, alignment);
751 }
752 else
753 alignment = align / BITS_PER_UNIT;
754
755 /* Round frame offset to that alignment. */
756 #ifdef FRAME_GROWS_DOWNWARD
757 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
758 #else
759 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
760 #endif
761
762 /* On a big-endian machine, if we are allocating more space than we will use,
763 use the least significant bytes of those that are allocated. */
764 if (BYTES_BIG_ENDIAN && mode != BLKmode)
765 bigend_correction = size - GET_MODE_SIZE (mode);
766
767 #ifdef FRAME_GROWS_DOWNWARD
768 function->frame_offset -= size;
769 #endif
770 addr = plus_constant (virtual_stack_vars_rtx,
771 function->frame_offset + bigend_correction);
772 #ifndef FRAME_GROWS_DOWNWARD
773 function->frame_offset += size;
774 #endif
775
776 x = gen_rtx (MEM, mode, addr);
777
778 function->stack_slot_list
779 = gen_rtx (EXPR_LIST, VOIDmode, x, function->stack_slot_list);
780
781 pop_obstacks ();
782
783 return x;
784 }
785 \f
786 /* Allocate a temporary stack slot and record it for possible later
787 reuse.
788
789 MODE is the machine mode to be given to the returned rtx.
790
791 SIZE is the size in units of the space required. We do no rounding here
792 since assign_stack_local will do any required rounding.
793
794 KEEP is 1 if this slot is to be retained after a call to
795 free_temp_slots. Automatic variables for a block are allocated
796 with this flag. KEEP is 2, if we allocate a longer term temporary,
797 whose lifetime is controlled by CLEANUP_POINT_EXPRs. */
798
799 rtx
800 assign_stack_temp (mode, size, keep)
801 enum machine_mode mode;
802 int size;
803 int keep;
804 {
805 struct temp_slot *p, *best_p = 0;
806
807 /* If SIZE is -1 it means that somebody tried to allocate a temporary
808 of a variable size. */
809 if (size == -1)
810 abort ();
811
812 /* First try to find an available, already-allocated temporary that is the
813 exact size we require. */
814 for (p = temp_slots; p; p = p->next)
815 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
816 break;
817
818 /* If we didn't find, one, try one that is larger than what we want. We
819 find the smallest such. */
820 if (p == 0)
821 for (p = temp_slots; p; p = p->next)
822 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
823 && (best_p == 0 || best_p->size > p->size))
824 best_p = p;
825
826 /* Make our best, if any, the one to use. */
827 if (best_p)
828 {
829 /* If there are enough aligned bytes left over, make them into a new
830 temp_slot so that the extra bytes don't get wasted. Do this only
831 for BLKmode slots, so that we can be sure of the alignment. */
832 if (GET_MODE (best_p->slot) == BLKmode)
833 {
834 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
835 int rounded_size = CEIL_ROUND (size, alignment);
836
837 if (best_p->size - rounded_size >= alignment)
838 {
839 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
840 p->in_use = p->addr_taken = 0;
841 p->size = best_p->size - rounded_size;
842 p->base_offset = best_p->base_offset + rounded_size;
843 p->full_size = best_p->full_size - rounded_size;
844 p->slot = gen_rtx (MEM, BLKmode,
845 plus_constant (XEXP (best_p->slot, 0),
846 rounded_size));
847 p->address = 0;
848 p->rtl_expr = 0;
849 p->next = temp_slots;
850 temp_slots = p;
851
852 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, p->slot,
853 stack_slot_list);
854
855 best_p->size = rounded_size;
856 best_p->full_size = rounded_size;
857 }
858 }
859
860 p = best_p;
861 }
862
863 /* If we still didn't find one, make a new temporary. */
864 if (p == 0)
865 {
866 int frame_offset_old = frame_offset;
867 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
868 /* If the temp slot mode doesn't indicate the alignment,
869 use the largest possible, so no one will be disappointed. */
870 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
871 /* The following slot size computation is necessary because we don't
872 know the actual size of the temporary slot until assign_stack_local
873 has performed all the frame alignment and size rounding for the
874 requested temporary. Note that extra space added for alignment
875 can be either above or below this stack slot depending on which
876 way the frame grows. We include the extra space if and only if it
877 is above this slot. */
878 #ifdef FRAME_GROWS_DOWNWARD
879 p->size = frame_offset_old - frame_offset;
880 #else
881 p->size = size;
882 #endif
883 /* Now define the fields used by combine_temp_slots. */
884 #ifdef FRAME_GROWS_DOWNWARD
885 p->base_offset = frame_offset;
886 p->full_size = frame_offset_old - frame_offset;
887 #else
888 p->base_offset = frame_offset_old;
889 p->full_size = frame_offset - frame_offset_old;
890 #endif
891 p->address = 0;
892 p->next = temp_slots;
893 temp_slots = p;
894 }
895
896 p->in_use = 1;
897 p->addr_taken = 0;
898 p->rtl_expr = sequence_rtl_expr;
899
900 if (keep == 2)
901 {
902 p->level = target_temp_slot_level;
903 p->keep = 0;
904 }
905 else
906 {
907 p->level = temp_slot_level;
908 p->keep = keep;
909 }
910 return p->slot;
911 }
912 \f
913 /* Assign a temporary of given TYPE.
914 KEEP is as for assign_stack_temp.
915 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
916 it is 0 if a register is OK.
917 DONT_PROMOTE is 1 if we should not promote values in register
918 to wider modes. */
919
920 rtx
921 assign_temp (type, keep, memory_required, dont_promote)
922 tree type;
923 int keep;
924 int memory_required;
925 int dont_promote;
926 {
927 enum machine_mode mode = TYPE_MODE (type);
928 int unsignedp = TREE_UNSIGNED (type);
929
930 if (mode == BLKmode || memory_required)
931 {
932 int size = int_size_in_bytes (type);
933 rtx tmp;
934
935 /* Unfortunately, we don't yet know how to allocate variable-sized
936 temporaries. However, sometimes we have a fixed upper limit on
937 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
938 instead. This is the case for Chill variable-sized strings. */
939 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
940 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
941 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
942 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
943
944 tmp = assign_stack_temp (mode, size, keep);
945 MEM_IN_STRUCT_P (tmp) = AGGREGATE_TYPE_P (type);
946 return tmp;
947 }
948
949 #ifndef PROMOTE_FOR_CALL_ONLY
950 if (! dont_promote)
951 mode = promote_mode (type, mode, &unsignedp, 0);
952 #endif
953
954 return gen_reg_rtx (mode);
955 }
956 \f
957 /* Combine temporary stack slots which are adjacent on the stack.
958
959 This allows for better use of already allocated stack space. This is only
960 done for BLKmode slots because we can be sure that we won't have alignment
961 problems in this case. */
962
963 void
964 combine_temp_slots ()
965 {
966 struct temp_slot *p, *q;
967 struct temp_slot *prev_p, *prev_q;
968 /* Determine where to free back to after this function. */
969 rtx free_pointer = rtx_alloc (CONST_INT);
970
971 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
972 {
973 int delete_p = 0;
974 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
975 for (q = p->next, prev_q = p; q; q = prev_q->next)
976 {
977 int delete_q = 0;
978 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
979 {
980 if (p->base_offset + p->full_size == q->base_offset)
981 {
982 /* Q comes after P; combine Q into P. */
983 p->size += q->size;
984 p->full_size += q->full_size;
985 delete_q = 1;
986 }
987 else if (q->base_offset + q->full_size == p->base_offset)
988 {
989 /* P comes after Q; combine P into Q. */
990 q->size += p->size;
991 q->full_size += p->full_size;
992 delete_p = 1;
993 break;
994 }
995 }
996 /* Either delete Q or advance past it. */
997 if (delete_q)
998 prev_q->next = q->next;
999 else
1000 prev_q = q;
1001 }
1002 /* Either delete P or advance past it. */
1003 if (delete_p)
1004 {
1005 if (prev_p)
1006 prev_p->next = p->next;
1007 else
1008 temp_slots = p->next;
1009 }
1010 else
1011 prev_p = p;
1012 }
1013
1014 /* Free all the RTL made by plus_constant. */
1015 rtx_free (free_pointer);
1016 }
1017 \f
1018 /* Find the temp slot corresponding to the object at address X. */
1019
1020 static struct temp_slot *
1021 find_temp_slot_from_address (x)
1022 rtx x;
1023 {
1024 struct temp_slot *p;
1025 rtx next;
1026
1027 for (p = temp_slots; p; p = p->next)
1028 {
1029 if (! p->in_use)
1030 continue;
1031 else if (XEXP (p->slot, 0) == x
1032 || p->address == x
1033 || (GET_CODE (x) == PLUS
1034 && XEXP (x, 0) == virtual_stack_vars_rtx
1035 && GET_CODE (XEXP (x, 1)) == CONST_INT
1036 && INTVAL (XEXP (x, 1)) >= p->base_offset
1037 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1038 return p;
1039
1040 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1041 for (next = p->address; next; next = XEXP (next, 1))
1042 if (XEXP (next, 0) == x)
1043 return p;
1044 }
1045
1046 return 0;
1047 }
1048
1049 /* Indicate that NEW is an alternate way of referring to the temp slot
1050 that previous was known by OLD. */
1051
1052 void
1053 update_temp_slot_address (old, new)
1054 rtx old, new;
1055 {
1056 struct temp_slot *p = find_temp_slot_from_address (old);
1057
1058 /* If none, return. Else add NEW as an alias. */
1059 if (p == 0)
1060 return;
1061 else if (p->address == 0)
1062 p->address = new;
1063 else
1064 {
1065 if (GET_CODE (p->address) != EXPR_LIST)
1066 p->address = gen_rtx (EXPR_LIST, VOIDmode, p->address, NULL_RTX);
1067
1068 p->address = gen_rtx (EXPR_LIST, VOIDmode, new, p->address);
1069 }
1070 }
1071
1072 /* If X could be a reference to a temporary slot, mark the fact that its
1073 address was taken. */
1074
1075 void
1076 mark_temp_addr_taken (x)
1077 rtx x;
1078 {
1079 struct temp_slot *p;
1080
1081 if (x == 0)
1082 return;
1083
1084 /* If X is not in memory or is at a constant address, it cannot be in
1085 a temporary slot. */
1086 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1087 return;
1088
1089 p = find_temp_slot_from_address (XEXP (x, 0));
1090 if (p != 0)
1091 p->addr_taken = 1;
1092 }
1093
1094 /* If X could be a reference to a temporary slot, mark that slot as
1095 belonging to the to one level higher than the current level. If X
1096 matched one of our slots, just mark that one. Otherwise, we can't
1097 easily predict which it is, so upgrade all of them. Kept slots
1098 need not be touched.
1099
1100 This is called when an ({...}) construct occurs and a statement
1101 returns a value in memory. */
1102
1103 void
1104 preserve_temp_slots (x)
1105 rtx x;
1106 {
1107 struct temp_slot *p = 0;
1108
1109 /* If there is no result, we still might have some objects whose address
1110 were taken, so we need to make sure they stay around. */
1111 if (x == 0)
1112 {
1113 for (p = temp_slots; p; p = p->next)
1114 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1115 p->level--;
1116
1117 return;
1118 }
1119
1120 /* If X is a register that is being used as a pointer, see if we have
1121 a temporary slot we know it points to. To be consistent with
1122 the code below, we really should preserve all non-kept slots
1123 if we can't find a match, but that seems to be much too costly. */
1124 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1125 p = find_temp_slot_from_address (x);
1126
1127 /* If X is not in memory or is at a constant address, it cannot be in
1128 a temporary slot, but it can contain something whose address was
1129 taken. */
1130 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1131 {
1132 for (p = temp_slots; p; p = p->next)
1133 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1134 p->level--;
1135
1136 return;
1137 }
1138
1139 /* First see if we can find a match. */
1140 if (p == 0)
1141 p = find_temp_slot_from_address (XEXP (x, 0));
1142
1143 if (p != 0)
1144 {
1145 /* Move everything at our level whose address was taken to our new
1146 level in case we used its address. */
1147 struct temp_slot *q;
1148
1149 if (p->level == temp_slot_level)
1150 {
1151 for (q = temp_slots; q; q = q->next)
1152 if (q != p && q->addr_taken && q->level == p->level)
1153 q->level--;
1154
1155 p->level--;
1156 p->addr_taken = 0;
1157 }
1158 return;
1159 }
1160
1161 /* Otherwise, preserve all non-kept slots at this level. */
1162 for (p = temp_slots; p; p = p->next)
1163 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1164 p->level--;
1165 }
1166
1167 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1168 with that RTL_EXPR, promote it into a temporary slot at the present
1169 level so it will not be freed when we free slots made in the
1170 RTL_EXPR. */
1171
1172 void
1173 preserve_rtl_expr_result (x)
1174 rtx x;
1175 {
1176 struct temp_slot *p;
1177
1178 /* If X is not in memory or is at a constant address, it cannot be in
1179 a temporary slot. */
1180 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1181 return;
1182
1183 /* If we can find a match, move it to our level unless it is already at
1184 an upper level. */
1185 p = find_temp_slot_from_address (XEXP (x, 0));
1186 if (p != 0)
1187 {
1188 p->level = MIN (p->level, temp_slot_level);
1189 p->rtl_expr = 0;
1190 }
1191
1192 return;
1193 }
1194
1195 /* Free all temporaries used so far. This is normally called at the end
1196 of generating code for a statement. Don't free any temporaries
1197 currently in use for an RTL_EXPR that hasn't yet been emitted.
1198 We could eventually do better than this since it can be reused while
1199 generating the same RTL_EXPR, but this is complex and probably not
1200 worthwhile. */
1201
1202 void
1203 free_temp_slots ()
1204 {
1205 struct temp_slot *p;
1206
1207 for (p = temp_slots; p; p = p->next)
1208 if (p->in_use && p->level == temp_slot_level && ! p->keep
1209 && p->rtl_expr == 0)
1210 p->in_use = 0;
1211
1212 combine_temp_slots ();
1213 }
1214
1215 /* Free all temporary slots used in T, an RTL_EXPR node. */
1216
1217 void
1218 free_temps_for_rtl_expr (t)
1219 tree t;
1220 {
1221 struct temp_slot *p;
1222
1223 for (p = temp_slots; p; p = p->next)
1224 if (p->rtl_expr == t)
1225 p->in_use = 0;
1226
1227 combine_temp_slots ();
1228 }
1229
1230 /* Mark all temporaries ever allocated in this functon as not suitable
1231 for reuse until the current level is exited. */
1232
1233 void
1234 mark_all_temps_used ()
1235 {
1236 struct temp_slot *p;
1237
1238 for (p = temp_slots; p; p = p->next)
1239 {
1240 p->in_use = 1;
1241 p->level = MIN (p->level, temp_slot_level);
1242 }
1243 }
1244
1245 /* Push deeper into the nesting level for stack temporaries. */
1246
1247 void
1248 push_temp_slots ()
1249 {
1250 temp_slot_level++;
1251 }
1252
1253 /* Pop a temporary nesting level. All slots in use in the current level
1254 are freed. */
1255
1256 void
1257 pop_temp_slots ()
1258 {
1259 struct temp_slot *p;
1260
1261 for (p = temp_slots; p; p = p->next)
1262 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1263 p->in_use = 0;
1264
1265 combine_temp_slots ();
1266
1267 temp_slot_level--;
1268 }
1269
1270 /* Initialize temporary slots. */
1271
1272 void
1273 init_temp_slots ()
1274 {
1275 /* We have not allocated any temporaries yet. */
1276 temp_slots = 0;
1277 temp_slot_level = 0;
1278 target_temp_slot_level = 0;
1279 }
1280 \f
1281 /* Retroactively move an auto variable from a register to a stack slot.
1282 This is done when an address-reference to the variable is seen. */
1283
1284 void
1285 put_var_into_stack (decl)
1286 tree decl;
1287 {
1288 register rtx reg;
1289 enum machine_mode promoted_mode, decl_mode;
1290 struct function *function = 0;
1291 tree context;
1292
1293 if (output_bytecode)
1294 return;
1295
1296 context = decl_function_context (decl);
1297
1298 /* Get the current rtl used for this object and it's original mode. */
1299 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1300
1301 /* No need to do anything if decl has no rtx yet
1302 since in that case caller is setting TREE_ADDRESSABLE
1303 and a stack slot will be assigned when the rtl is made. */
1304 if (reg == 0)
1305 return;
1306
1307 /* Get the declared mode for this object. */
1308 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1309 : DECL_MODE (decl));
1310 /* Get the mode it's actually stored in. */
1311 promoted_mode = GET_MODE (reg);
1312
1313 /* If this variable comes from an outer function,
1314 find that function's saved context. */
1315 if (context != current_function_decl)
1316 for (function = outer_function_chain; function; function = function->next)
1317 if (function->decl == context)
1318 break;
1319
1320 /* If this is a variable-size object with a pseudo to address it,
1321 put that pseudo into the stack, if the var is nonlocal. */
1322 if (DECL_NONLOCAL (decl)
1323 && GET_CODE (reg) == MEM
1324 && GET_CODE (XEXP (reg, 0)) == REG
1325 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1326 {
1327 reg = XEXP (reg, 0);
1328 decl_mode = promoted_mode = GET_MODE (reg);
1329 }
1330
1331 /* Now we should have a value that resides in one or more pseudo regs. */
1332
1333 if (GET_CODE (reg) == REG)
1334 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1335 promoted_mode, decl_mode, TREE_SIDE_EFFECTS (decl));
1336 else if (GET_CODE (reg) == CONCAT)
1337 {
1338 /* A CONCAT contains two pseudos; put them both in the stack.
1339 We do it so they end up consecutive. */
1340 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1341 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1342 #ifdef FRAME_GROWS_DOWNWARD
1343 /* Since part 0 should have a lower address, do it second. */
1344 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1345 part_mode, TREE_SIDE_EFFECTS (decl));
1346 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1347 part_mode, TREE_SIDE_EFFECTS (decl));
1348 #else
1349 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1350 part_mode, TREE_SIDE_EFFECTS (decl));
1351 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1352 part_mode, TREE_SIDE_EFFECTS (decl));
1353 #endif
1354
1355 /* Change the CONCAT into a combined MEM for both parts. */
1356 PUT_CODE (reg, MEM);
1357 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1358
1359 /* The two parts are in memory order already.
1360 Use the lower parts address as ours. */
1361 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1362 /* Prevent sharing of rtl that might lose. */
1363 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1364 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1365 }
1366 }
1367
1368 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1369 into the stack frame of FUNCTION (0 means the current function).
1370 DECL_MODE is the machine mode of the user-level data type.
1371 PROMOTED_MODE is the machine mode of the register.
1372 VOLATILE_P is nonzero if this is for a "volatile" decl. */
1373
1374 static void
1375 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p)
1376 struct function *function;
1377 rtx reg;
1378 tree type;
1379 enum machine_mode promoted_mode, decl_mode;
1380 int volatile_p;
1381 {
1382 rtx new = 0;
1383
1384 if (function)
1385 {
1386 if (REGNO (reg) < function->max_parm_reg)
1387 new = function->parm_reg_stack_loc[REGNO (reg)];
1388 if (new == 0)
1389 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1390 0, function);
1391 }
1392 else
1393 {
1394 if (REGNO (reg) < max_parm_reg)
1395 new = parm_reg_stack_loc[REGNO (reg)];
1396 if (new == 0)
1397 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1398 }
1399
1400 PUT_MODE (reg, decl_mode);
1401 XEXP (reg, 0) = XEXP (new, 0);
1402 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1403 MEM_VOLATILE_P (reg) = volatile_p;
1404 PUT_CODE (reg, MEM);
1405
1406 /* If this is a memory ref that contains aggregate components,
1407 mark it as such for cse and loop optimize. */
1408 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type);
1409
1410 /* Now make sure that all refs to the variable, previously made
1411 when it was a register, are fixed up to be valid again. */
1412 if (function)
1413 {
1414 struct var_refs_queue *temp;
1415
1416 /* Variable is inherited; fix it up when we get back to its function. */
1417 push_obstacks (function->function_obstack,
1418 function->function_maybepermanent_obstack);
1419
1420 /* See comment in restore_tree_status in tree.c for why this needs to be
1421 on saveable obstack. */
1422 temp
1423 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1424 temp->modified = reg;
1425 temp->promoted_mode = promoted_mode;
1426 temp->unsignedp = TREE_UNSIGNED (type);
1427 temp->next = function->fixup_var_refs_queue;
1428 function->fixup_var_refs_queue = temp;
1429 pop_obstacks ();
1430 }
1431 else
1432 /* Variable is local; fix it up now. */
1433 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
1434 }
1435 \f
1436 static void
1437 fixup_var_refs (var, promoted_mode, unsignedp)
1438 rtx var;
1439 enum machine_mode promoted_mode;
1440 int unsignedp;
1441 {
1442 tree pending;
1443 rtx first_insn = get_insns ();
1444 struct sequence_stack *stack = sequence_stack;
1445 tree rtl_exps = rtl_expr_chain;
1446
1447 /* Must scan all insns for stack-refs that exceed the limit. */
1448 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
1449
1450 /* Scan all pending sequences too. */
1451 for (; stack; stack = stack->next)
1452 {
1453 push_to_sequence (stack->first);
1454 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1455 stack->first, stack->next != 0);
1456 /* Update remembered end of sequence
1457 in case we added an insn at the end. */
1458 stack->last = get_last_insn ();
1459 end_sequence ();
1460 }
1461
1462 /* Scan all waiting RTL_EXPRs too. */
1463 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1464 {
1465 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1466 if (seq != const0_rtx && seq != 0)
1467 {
1468 push_to_sequence (seq);
1469 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
1470 end_sequence ();
1471 }
1472 }
1473 }
1474 \f
1475 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1476 some part of an insn. Return a struct fixup_replacement whose OLD
1477 value is equal to X. Allocate a new structure if no such entry exists. */
1478
1479 static struct fixup_replacement *
1480 find_fixup_replacement (replacements, x)
1481 struct fixup_replacement **replacements;
1482 rtx x;
1483 {
1484 struct fixup_replacement *p;
1485
1486 /* See if we have already replaced this. */
1487 for (p = *replacements; p && p->old != x; p = p->next)
1488 ;
1489
1490 if (p == 0)
1491 {
1492 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1493 p->old = x;
1494 p->new = 0;
1495 p->next = *replacements;
1496 *replacements = p;
1497 }
1498
1499 return p;
1500 }
1501
1502 /* Scan the insn-chain starting with INSN for refs to VAR
1503 and fix them up. TOPLEVEL is nonzero if this chain is the
1504 main chain of insns for the current function. */
1505
1506 static void
1507 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
1508 rtx var;
1509 enum machine_mode promoted_mode;
1510 int unsignedp;
1511 rtx insn;
1512 int toplevel;
1513 {
1514 rtx call_dest = 0;
1515
1516 while (insn)
1517 {
1518 rtx next = NEXT_INSN (insn);
1519 rtx note;
1520 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1521 {
1522 /* If this is a CLOBBER of VAR, delete it.
1523
1524 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1525 and REG_RETVAL notes too. */
1526 if (GET_CODE (PATTERN (insn)) == CLOBBER
1527 && XEXP (PATTERN (insn), 0) == var)
1528 {
1529 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1530 /* The REG_LIBCALL note will go away since we are going to
1531 turn INSN into a NOTE, so just delete the
1532 corresponding REG_RETVAL note. */
1533 remove_note (XEXP (note, 0),
1534 find_reg_note (XEXP (note, 0), REG_RETVAL,
1535 NULL_RTX));
1536
1537 /* In unoptimized compilation, we shouldn't call delete_insn
1538 except in jump.c doing warnings. */
1539 PUT_CODE (insn, NOTE);
1540 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1541 NOTE_SOURCE_FILE (insn) = 0;
1542 }
1543
1544 /* The insn to load VAR from a home in the arglist
1545 is now a no-op. When we see it, just delete it. */
1546 else if (toplevel
1547 && GET_CODE (PATTERN (insn)) == SET
1548 && SET_DEST (PATTERN (insn)) == var
1549 /* If this represents the result of an insn group,
1550 don't delete the insn. */
1551 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1552 && rtx_equal_p (SET_SRC (PATTERN (insn)), var))
1553 {
1554 /* In unoptimized compilation, we shouldn't call delete_insn
1555 except in jump.c doing warnings. */
1556 PUT_CODE (insn, NOTE);
1557 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1558 NOTE_SOURCE_FILE (insn) = 0;
1559 if (insn == last_parm_insn)
1560 last_parm_insn = PREV_INSN (next);
1561 }
1562 else
1563 {
1564 struct fixup_replacement *replacements = 0;
1565 rtx next_insn = NEXT_INSN (insn);
1566
1567 #ifdef SMALL_REGISTER_CLASSES
1568 /* If the insn that copies the results of a CALL_INSN
1569 into a pseudo now references VAR, we have to use an
1570 intermediate pseudo since we want the life of the
1571 return value register to be only a single insn.
1572
1573 If we don't use an intermediate pseudo, such things as
1574 address computations to make the address of VAR valid
1575 if it is not can be placed between the CALL_INSN and INSN.
1576
1577 To make sure this doesn't happen, we record the destination
1578 of the CALL_INSN and see if the next insn uses both that
1579 and VAR. */
1580
1581 if (SMALL_REGISTER_CLASSES)
1582 {
1583 if (call_dest != 0 && GET_CODE (insn) == INSN
1584 && reg_mentioned_p (var, PATTERN (insn))
1585 && reg_mentioned_p (call_dest, PATTERN (insn)))
1586 {
1587 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1588
1589 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1590
1591 PATTERN (insn) = replace_rtx (PATTERN (insn),
1592 call_dest, temp);
1593 }
1594
1595 if (GET_CODE (insn) == CALL_INSN
1596 && GET_CODE (PATTERN (insn)) == SET)
1597 call_dest = SET_DEST (PATTERN (insn));
1598 else if (GET_CODE (insn) == CALL_INSN
1599 && GET_CODE (PATTERN (insn)) == PARALLEL
1600 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1601 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1602 else
1603 call_dest = 0;
1604 }
1605 #endif
1606
1607 /* See if we have to do anything to INSN now that VAR is in
1608 memory. If it needs to be loaded into a pseudo, use a single
1609 pseudo for the entire insn in case there is a MATCH_DUP
1610 between two operands. We pass a pointer to the head of
1611 a list of struct fixup_replacements. If fixup_var_refs_1
1612 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1613 it will record them in this list.
1614
1615 If it allocated a pseudo for any replacement, we copy into
1616 it here. */
1617
1618 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1619 &replacements);
1620
1621 /* If this is last_parm_insn, and any instructions were output
1622 after it to fix it up, then we must set last_parm_insn to
1623 the last such instruction emitted. */
1624 if (insn == last_parm_insn)
1625 last_parm_insn = PREV_INSN (next_insn);
1626
1627 while (replacements)
1628 {
1629 if (GET_CODE (replacements->new) == REG)
1630 {
1631 rtx insert_before;
1632 rtx seq;
1633
1634 /* OLD might be a (subreg (mem)). */
1635 if (GET_CODE (replacements->old) == SUBREG)
1636 replacements->old
1637 = fixup_memory_subreg (replacements->old, insn, 0);
1638 else
1639 replacements->old
1640 = fixup_stack_1 (replacements->old, insn);
1641
1642 insert_before = insn;
1643
1644 /* If we are changing the mode, do a conversion.
1645 This might be wasteful, but combine.c will
1646 eliminate much of the waste. */
1647
1648 if (GET_MODE (replacements->new)
1649 != GET_MODE (replacements->old))
1650 {
1651 start_sequence ();
1652 convert_move (replacements->new,
1653 replacements->old, unsignedp);
1654 seq = gen_sequence ();
1655 end_sequence ();
1656 }
1657 else
1658 seq = gen_move_insn (replacements->new,
1659 replacements->old);
1660
1661 emit_insn_before (seq, insert_before);
1662 }
1663
1664 replacements = replacements->next;
1665 }
1666 }
1667
1668 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1669 But don't touch other insns referred to by reg-notes;
1670 we will get them elsewhere. */
1671 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1672 if (GET_CODE (note) != INSN_LIST)
1673 XEXP (note, 0)
1674 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1675 }
1676 insn = next;
1677 }
1678 }
1679 \f
1680 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1681 See if the rtx expression at *LOC in INSN needs to be changed.
1682
1683 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1684 contain a list of original rtx's and replacements. If we find that we need
1685 to modify this insn by replacing a memory reference with a pseudo or by
1686 making a new MEM to implement a SUBREG, we consult that list to see if
1687 we have already chosen a replacement. If none has already been allocated,
1688 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1689 or the SUBREG, as appropriate, to the pseudo. */
1690
1691 static void
1692 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1693 register rtx var;
1694 enum machine_mode promoted_mode;
1695 register rtx *loc;
1696 rtx insn;
1697 struct fixup_replacement **replacements;
1698 {
1699 register int i;
1700 register rtx x = *loc;
1701 RTX_CODE code = GET_CODE (x);
1702 register char *fmt;
1703 register rtx tem, tem1;
1704 struct fixup_replacement *replacement;
1705
1706 switch (code)
1707 {
1708 case MEM:
1709 if (var == x)
1710 {
1711 /* If we already have a replacement, use it. Otherwise,
1712 try to fix up this address in case it is invalid. */
1713
1714 replacement = find_fixup_replacement (replacements, var);
1715 if (replacement->new)
1716 {
1717 *loc = replacement->new;
1718 return;
1719 }
1720
1721 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1722
1723 /* Unless we are forcing memory to register or we changed the mode,
1724 we can leave things the way they are if the insn is valid. */
1725
1726 INSN_CODE (insn) = -1;
1727 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1728 && recog_memoized (insn) >= 0)
1729 return;
1730
1731 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1732 return;
1733 }
1734
1735 /* If X contains VAR, we need to unshare it here so that we update
1736 each occurrence separately. But all identical MEMs in one insn
1737 must be replaced with the same rtx because of the possibility of
1738 MATCH_DUPs. */
1739
1740 if (reg_mentioned_p (var, x))
1741 {
1742 replacement = find_fixup_replacement (replacements, x);
1743 if (replacement->new == 0)
1744 replacement->new = copy_most_rtx (x, var);
1745
1746 *loc = x = replacement->new;
1747 }
1748 break;
1749
1750 case REG:
1751 case CC0:
1752 case PC:
1753 case CONST_INT:
1754 case CONST:
1755 case SYMBOL_REF:
1756 case LABEL_REF:
1757 case CONST_DOUBLE:
1758 return;
1759
1760 case SIGN_EXTRACT:
1761 case ZERO_EXTRACT:
1762 /* Note that in some cases those types of expressions are altered
1763 by optimize_bit_field, and do not survive to get here. */
1764 if (XEXP (x, 0) == var
1765 || (GET_CODE (XEXP (x, 0)) == SUBREG
1766 && SUBREG_REG (XEXP (x, 0)) == var))
1767 {
1768 /* Get TEM as a valid MEM in the mode presently in the insn.
1769
1770 We don't worry about the possibility of MATCH_DUP here; it
1771 is highly unlikely and would be tricky to handle. */
1772
1773 tem = XEXP (x, 0);
1774 if (GET_CODE (tem) == SUBREG)
1775 {
1776 if (GET_MODE_BITSIZE (GET_MODE (tem))
1777 > GET_MODE_BITSIZE (GET_MODE (var)))
1778 {
1779 replacement = find_fixup_replacement (replacements, var);
1780 if (replacement->new == 0)
1781 replacement->new = gen_reg_rtx (GET_MODE (var));
1782 SUBREG_REG (tem) = replacement->new;
1783 }
1784 else
1785 tem = fixup_memory_subreg (tem, insn, 0);
1786 }
1787 else
1788 tem = fixup_stack_1 (tem, insn);
1789
1790 /* Unless we want to load from memory, get TEM into the proper mode
1791 for an extract from memory. This can only be done if the
1792 extract is at a constant position and length. */
1793
1794 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1795 && GET_CODE (XEXP (x, 2)) == CONST_INT
1796 && ! mode_dependent_address_p (XEXP (tem, 0))
1797 && ! MEM_VOLATILE_P (tem))
1798 {
1799 enum machine_mode wanted_mode = VOIDmode;
1800 enum machine_mode is_mode = GET_MODE (tem);
1801 int width = INTVAL (XEXP (x, 1));
1802 int pos = INTVAL (XEXP (x, 2));
1803
1804 #ifdef HAVE_extzv
1805 if (GET_CODE (x) == ZERO_EXTRACT)
1806 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1807 #endif
1808 #ifdef HAVE_extv
1809 if (GET_CODE (x) == SIGN_EXTRACT)
1810 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1811 #endif
1812 /* If we have a narrower mode, we can do something. */
1813 if (wanted_mode != VOIDmode
1814 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1815 {
1816 int offset = pos / BITS_PER_UNIT;
1817 rtx old_pos = XEXP (x, 2);
1818 rtx newmem;
1819
1820 /* If the bytes and bits are counted differently, we
1821 must adjust the offset. */
1822 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1823 offset = (GET_MODE_SIZE (is_mode)
1824 - GET_MODE_SIZE (wanted_mode) - offset);
1825
1826 pos %= GET_MODE_BITSIZE (wanted_mode);
1827
1828 newmem = gen_rtx (MEM, wanted_mode,
1829 plus_constant (XEXP (tem, 0), offset));
1830 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1831 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1832 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1833
1834 /* Make the change and see if the insn remains valid. */
1835 INSN_CODE (insn) = -1;
1836 XEXP (x, 0) = newmem;
1837 XEXP (x, 2) = GEN_INT (pos);
1838
1839 if (recog_memoized (insn) >= 0)
1840 return;
1841
1842 /* Otherwise, restore old position. XEXP (x, 0) will be
1843 restored later. */
1844 XEXP (x, 2) = old_pos;
1845 }
1846 }
1847
1848 /* If we get here, the bitfield extract insn can't accept a memory
1849 reference. Copy the input into a register. */
1850
1851 tem1 = gen_reg_rtx (GET_MODE (tem));
1852 emit_insn_before (gen_move_insn (tem1, tem), insn);
1853 XEXP (x, 0) = tem1;
1854 return;
1855 }
1856 break;
1857
1858 case SUBREG:
1859 if (SUBREG_REG (x) == var)
1860 {
1861 /* If this is a special SUBREG made because VAR was promoted
1862 from a wider mode, replace it with VAR and call ourself
1863 recursively, this time saying that the object previously
1864 had its current mode (by virtue of the SUBREG). */
1865
1866 if (SUBREG_PROMOTED_VAR_P (x))
1867 {
1868 *loc = var;
1869 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
1870 return;
1871 }
1872
1873 /* If this SUBREG makes VAR wider, it has become a paradoxical
1874 SUBREG with VAR in memory, but these aren't allowed at this
1875 stage of the compilation. So load VAR into a pseudo and take
1876 a SUBREG of that pseudo. */
1877 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1878 {
1879 replacement = find_fixup_replacement (replacements, var);
1880 if (replacement->new == 0)
1881 replacement->new = gen_reg_rtx (GET_MODE (var));
1882 SUBREG_REG (x) = replacement->new;
1883 return;
1884 }
1885
1886 /* See if we have already found a replacement for this SUBREG.
1887 If so, use it. Otherwise, make a MEM and see if the insn
1888 is recognized. If not, or if we should force MEM into a register,
1889 make a pseudo for this SUBREG. */
1890 replacement = find_fixup_replacement (replacements, x);
1891 if (replacement->new)
1892 {
1893 *loc = replacement->new;
1894 return;
1895 }
1896
1897 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1898
1899 INSN_CODE (insn) = -1;
1900 if (! flag_force_mem && recog_memoized (insn) >= 0)
1901 return;
1902
1903 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1904 return;
1905 }
1906 break;
1907
1908 case SET:
1909 /* First do special simplification of bit-field references. */
1910 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1911 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1912 optimize_bit_field (x, insn, 0);
1913 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1914 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
1915 optimize_bit_field (x, insn, NULL_PTR);
1916
1917 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
1918 into a register and then store it back out. */
1919 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
1920 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
1921 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
1922 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
1923 > GET_MODE_SIZE (GET_MODE (var))))
1924 {
1925 replacement = find_fixup_replacement (replacements, var);
1926 if (replacement->new == 0)
1927 replacement->new = gen_reg_rtx (GET_MODE (var));
1928
1929 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
1930 emit_insn_after (gen_move_insn (var, replacement->new), insn);
1931 }
1932
1933 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1934 insn into a pseudo and store the low part of the pseudo into VAR. */
1935 if (GET_CODE (SET_DEST (x)) == SUBREG
1936 && SUBREG_REG (SET_DEST (x)) == var
1937 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
1938 > GET_MODE_SIZE (GET_MODE (var))))
1939 {
1940 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
1941 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
1942 tem)),
1943 insn);
1944 break;
1945 }
1946
1947 {
1948 rtx dest = SET_DEST (x);
1949 rtx src = SET_SRC (x);
1950 rtx outerdest = dest;
1951
1952 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
1953 || GET_CODE (dest) == SIGN_EXTRACT
1954 || GET_CODE (dest) == ZERO_EXTRACT)
1955 dest = XEXP (dest, 0);
1956
1957 if (GET_CODE (src) == SUBREG)
1958 src = XEXP (src, 0);
1959
1960 /* If VAR does not appear at the top level of the SET
1961 just scan the lower levels of the tree. */
1962
1963 if (src != var && dest != var)
1964 break;
1965
1966 /* We will need to rerecognize this insn. */
1967 INSN_CODE (insn) = -1;
1968
1969 #ifdef HAVE_insv
1970 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
1971 {
1972 /* Since this case will return, ensure we fixup all the
1973 operands here. */
1974 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
1975 insn, replacements);
1976 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
1977 insn, replacements);
1978 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
1979 insn, replacements);
1980
1981 tem = XEXP (outerdest, 0);
1982
1983 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
1984 that may appear inside a ZERO_EXTRACT.
1985 This was legitimate when the MEM was a REG. */
1986 if (GET_CODE (tem) == SUBREG
1987 && SUBREG_REG (tem) == var)
1988 tem = fixup_memory_subreg (tem, insn, 0);
1989 else
1990 tem = fixup_stack_1 (tem, insn);
1991
1992 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
1993 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
1994 && ! mode_dependent_address_p (XEXP (tem, 0))
1995 && ! MEM_VOLATILE_P (tem))
1996 {
1997 enum machine_mode wanted_mode
1998 = insn_operand_mode[(int) CODE_FOR_insv][0];
1999 enum machine_mode is_mode = GET_MODE (tem);
2000 int width = INTVAL (XEXP (outerdest, 1));
2001 int pos = INTVAL (XEXP (outerdest, 2));
2002
2003 /* If we have a narrower mode, we can do something. */
2004 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2005 {
2006 int offset = pos / BITS_PER_UNIT;
2007 rtx old_pos = XEXP (outerdest, 2);
2008 rtx newmem;
2009
2010 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2011 offset = (GET_MODE_SIZE (is_mode)
2012 - GET_MODE_SIZE (wanted_mode) - offset);
2013
2014 pos %= GET_MODE_BITSIZE (wanted_mode);
2015
2016 newmem = gen_rtx (MEM, wanted_mode,
2017 plus_constant (XEXP (tem, 0), offset));
2018 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2019 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
2020 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
2021
2022 /* Make the change and see if the insn remains valid. */
2023 INSN_CODE (insn) = -1;
2024 XEXP (outerdest, 0) = newmem;
2025 XEXP (outerdest, 2) = GEN_INT (pos);
2026
2027 if (recog_memoized (insn) >= 0)
2028 return;
2029
2030 /* Otherwise, restore old position. XEXP (x, 0) will be
2031 restored later. */
2032 XEXP (outerdest, 2) = old_pos;
2033 }
2034 }
2035
2036 /* If we get here, the bit-field store doesn't allow memory
2037 or isn't located at a constant position. Load the value into
2038 a register, do the store, and put it back into memory. */
2039
2040 tem1 = gen_reg_rtx (GET_MODE (tem));
2041 emit_insn_before (gen_move_insn (tem1, tem), insn);
2042 emit_insn_after (gen_move_insn (tem, tem1), insn);
2043 XEXP (outerdest, 0) = tem1;
2044 return;
2045 }
2046 #endif
2047
2048 /* STRICT_LOW_PART is a no-op on memory references
2049 and it can cause combinations to be unrecognizable,
2050 so eliminate it. */
2051
2052 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2053 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2054
2055 /* A valid insn to copy VAR into or out of a register
2056 must be left alone, to avoid an infinite loop here.
2057 If the reference to VAR is by a subreg, fix that up,
2058 since SUBREG is not valid for a memref.
2059 Also fix up the address of the stack slot.
2060
2061 Note that we must not try to recognize the insn until
2062 after we know that we have valid addresses and no
2063 (subreg (mem ...) ...) constructs, since these interfere
2064 with determining the validity of the insn. */
2065
2066 if ((SET_SRC (x) == var
2067 || (GET_CODE (SET_SRC (x)) == SUBREG
2068 && SUBREG_REG (SET_SRC (x)) == var))
2069 && (GET_CODE (SET_DEST (x)) == REG
2070 || (GET_CODE (SET_DEST (x)) == SUBREG
2071 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2072 && GET_MODE (var) == promoted_mode
2073 && x == single_set (insn))
2074 {
2075 rtx pat;
2076
2077 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2078 if (replacement->new)
2079 SET_SRC (x) = replacement->new;
2080 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2081 SET_SRC (x) = replacement->new
2082 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2083 else
2084 SET_SRC (x) = replacement->new
2085 = fixup_stack_1 (SET_SRC (x), insn);
2086
2087 if (recog_memoized (insn) >= 0)
2088 return;
2089
2090 /* INSN is not valid, but we know that we want to
2091 copy SET_SRC (x) to SET_DEST (x) in some way. So
2092 we generate the move and see whether it requires more
2093 than one insn. If it does, we emit those insns and
2094 delete INSN. Otherwise, we an just replace the pattern
2095 of INSN; we have already verified above that INSN has
2096 no other function that to do X. */
2097
2098 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2099 if (GET_CODE (pat) == SEQUENCE)
2100 {
2101 emit_insn_after (pat, insn);
2102 PUT_CODE (insn, NOTE);
2103 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2104 NOTE_SOURCE_FILE (insn) = 0;
2105 }
2106 else
2107 PATTERN (insn) = pat;
2108
2109 return;
2110 }
2111
2112 if ((SET_DEST (x) == var
2113 || (GET_CODE (SET_DEST (x)) == SUBREG
2114 && SUBREG_REG (SET_DEST (x)) == var))
2115 && (GET_CODE (SET_SRC (x)) == REG
2116 || (GET_CODE (SET_SRC (x)) == SUBREG
2117 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2118 && GET_MODE (var) == promoted_mode
2119 && x == single_set (insn))
2120 {
2121 rtx pat;
2122
2123 if (GET_CODE (SET_DEST (x)) == SUBREG)
2124 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2125 else
2126 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2127
2128 if (recog_memoized (insn) >= 0)
2129 return;
2130
2131 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2132 if (GET_CODE (pat) == SEQUENCE)
2133 {
2134 emit_insn_after (pat, insn);
2135 PUT_CODE (insn, NOTE);
2136 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2137 NOTE_SOURCE_FILE (insn) = 0;
2138 }
2139 else
2140 PATTERN (insn) = pat;
2141
2142 return;
2143 }
2144
2145 /* Otherwise, storing into VAR must be handled specially
2146 by storing into a temporary and copying that into VAR
2147 with a new insn after this one. Note that this case
2148 will be used when storing into a promoted scalar since
2149 the insn will now have different modes on the input
2150 and output and hence will be invalid (except for the case
2151 of setting it to a constant, which does not need any
2152 change if it is valid). We generate extra code in that case,
2153 but combine.c will eliminate it. */
2154
2155 if (dest == var)
2156 {
2157 rtx temp;
2158 rtx fixeddest = SET_DEST (x);
2159
2160 /* STRICT_LOW_PART can be discarded, around a MEM. */
2161 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2162 fixeddest = XEXP (fixeddest, 0);
2163 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2164 if (GET_CODE (fixeddest) == SUBREG)
2165 {
2166 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2167 promoted_mode = GET_MODE (fixeddest);
2168 }
2169 else
2170 fixeddest = fixup_stack_1 (fixeddest, insn);
2171
2172 temp = gen_reg_rtx (promoted_mode);
2173
2174 emit_insn_after (gen_move_insn (fixeddest,
2175 gen_lowpart (GET_MODE (fixeddest),
2176 temp)),
2177 insn);
2178
2179 SET_DEST (x) = temp;
2180 }
2181 }
2182 }
2183
2184 /* Nothing special about this RTX; fix its operands. */
2185
2186 fmt = GET_RTX_FORMAT (code);
2187 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2188 {
2189 if (fmt[i] == 'e')
2190 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2191 if (fmt[i] == 'E')
2192 {
2193 register int j;
2194 for (j = 0; j < XVECLEN (x, i); j++)
2195 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2196 insn, replacements);
2197 }
2198 }
2199 }
2200 \f
2201 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2202 return an rtx (MEM:m1 newaddr) which is equivalent.
2203 If any insns must be emitted to compute NEWADDR, put them before INSN.
2204
2205 UNCRITICAL nonzero means accept paradoxical subregs.
2206 This is used for subregs found inside REG_NOTES. */
2207
2208 static rtx
2209 fixup_memory_subreg (x, insn, uncritical)
2210 rtx x;
2211 rtx insn;
2212 int uncritical;
2213 {
2214 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2215 rtx addr = XEXP (SUBREG_REG (x), 0);
2216 enum machine_mode mode = GET_MODE (x);
2217 rtx saved, result;
2218
2219 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2220 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2221 && ! uncritical)
2222 abort ();
2223
2224 if (BYTES_BIG_ENDIAN)
2225 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2226 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2227 addr = plus_constant (addr, offset);
2228 if (!flag_force_addr && memory_address_p (mode, addr))
2229 /* Shortcut if no insns need be emitted. */
2230 return change_address (SUBREG_REG (x), mode, addr);
2231 start_sequence ();
2232 result = change_address (SUBREG_REG (x), mode, addr);
2233 emit_insn_before (gen_sequence (), insn);
2234 end_sequence ();
2235 return result;
2236 }
2237
2238 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2239 Replace subexpressions of X in place.
2240 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2241 Otherwise return X, with its contents possibly altered.
2242
2243 If any insns must be emitted to compute NEWADDR, put them before INSN.
2244
2245 UNCRITICAL is as in fixup_memory_subreg. */
2246
2247 static rtx
2248 walk_fixup_memory_subreg (x, insn, uncritical)
2249 register rtx x;
2250 rtx insn;
2251 int uncritical;
2252 {
2253 register enum rtx_code code;
2254 register char *fmt;
2255 register int i;
2256
2257 if (x == 0)
2258 return 0;
2259
2260 code = GET_CODE (x);
2261
2262 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2263 return fixup_memory_subreg (x, insn, uncritical);
2264
2265 /* Nothing special about this RTX; fix its operands. */
2266
2267 fmt = GET_RTX_FORMAT (code);
2268 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2269 {
2270 if (fmt[i] == 'e')
2271 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2272 if (fmt[i] == 'E')
2273 {
2274 register int j;
2275 for (j = 0; j < XVECLEN (x, i); j++)
2276 XVECEXP (x, i, j)
2277 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2278 }
2279 }
2280 return x;
2281 }
2282 \f
2283 /* For each memory ref within X, if it refers to a stack slot
2284 with an out of range displacement, put the address in a temp register
2285 (emitting new insns before INSN to load these registers)
2286 and alter the memory ref to use that register.
2287 Replace each such MEM rtx with a copy, to avoid clobberage. */
2288
2289 static rtx
2290 fixup_stack_1 (x, insn)
2291 rtx x;
2292 rtx insn;
2293 {
2294 register int i;
2295 register RTX_CODE code = GET_CODE (x);
2296 register char *fmt;
2297
2298 if (code == MEM)
2299 {
2300 register rtx ad = XEXP (x, 0);
2301 /* If we have address of a stack slot but it's not valid
2302 (displacement is too large), compute the sum in a register. */
2303 if (GET_CODE (ad) == PLUS
2304 && GET_CODE (XEXP (ad, 0)) == REG
2305 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2306 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2307 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2308 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2309 {
2310 rtx temp, seq;
2311 if (memory_address_p (GET_MODE (x), ad))
2312 return x;
2313
2314 start_sequence ();
2315 temp = copy_to_reg (ad);
2316 seq = gen_sequence ();
2317 end_sequence ();
2318 emit_insn_before (seq, insn);
2319 return change_address (x, VOIDmode, temp);
2320 }
2321 return x;
2322 }
2323
2324 fmt = GET_RTX_FORMAT (code);
2325 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2326 {
2327 if (fmt[i] == 'e')
2328 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2329 if (fmt[i] == 'E')
2330 {
2331 register int j;
2332 for (j = 0; j < XVECLEN (x, i); j++)
2333 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2334 }
2335 }
2336 return x;
2337 }
2338 \f
2339 /* Optimization: a bit-field instruction whose field
2340 happens to be a byte or halfword in memory
2341 can be changed to a move instruction.
2342
2343 We call here when INSN is an insn to examine or store into a bit-field.
2344 BODY is the SET-rtx to be altered.
2345
2346 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2347 (Currently this is called only from function.c, and EQUIV_MEM
2348 is always 0.) */
2349
2350 static void
2351 optimize_bit_field (body, insn, equiv_mem)
2352 rtx body;
2353 rtx insn;
2354 rtx *equiv_mem;
2355 {
2356 register rtx bitfield;
2357 int destflag;
2358 rtx seq = 0;
2359 enum machine_mode mode;
2360
2361 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2362 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2363 bitfield = SET_DEST (body), destflag = 1;
2364 else
2365 bitfield = SET_SRC (body), destflag = 0;
2366
2367 /* First check that the field being stored has constant size and position
2368 and is in fact a byte or halfword suitably aligned. */
2369
2370 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2371 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2372 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2373 != BLKmode)
2374 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2375 {
2376 register rtx memref = 0;
2377
2378 /* Now check that the containing word is memory, not a register,
2379 and that it is safe to change the machine mode. */
2380
2381 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2382 memref = XEXP (bitfield, 0);
2383 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2384 && equiv_mem != 0)
2385 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2386 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2387 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2388 memref = SUBREG_REG (XEXP (bitfield, 0));
2389 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2390 && equiv_mem != 0
2391 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2392 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2393
2394 if (memref
2395 && ! mode_dependent_address_p (XEXP (memref, 0))
2396 && ! MEM_VOLATILE_P (memref))
2397 {
2398 /* Now adjust the address, first for any subreg'ing
2399 that we are now getting rid of,
2400 and then for which byte of the word is wanted. */
2401
2402 register int offset = INTVAL (XEXP (bitfield, 2));
2403 rtx insns;
2404
2405 /* Adjust OFFSET to count bits from low-address byte. */
2406 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2407 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2408 - offset - INTVAL (XEXP (bitfield, 1)));
2409
2410 /* Adjust OFFSET to count bytes from low-address byte. */
2411 offset /= BITS_PER_UNIT;
2412 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2413 {
2414 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2415 if (BYTES_BIG_ENDIAN)
2416 offset -= (MIN (UNITS_PER_WORD,
2417 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2418 - MIN (UNITS_PER_WORD,
2419 GET_MODE_SIZE (GET_MODE (memref))));
2420 }
2421
2422 start_sequence ();
2423 memref = change_address (memref, mode,
2424 plus_constant (XEXP (memref, 0), offset));
2425 insns = get_insns ();
2426 end_sequence ();
2427 emit_insns_before (insns, insn);
2428
2429 /* Store this memory reference where
2430 we found the bit field reference. */
2431
2432 if (destflag)
2433 {
2434 validate_change (insn, &SET_DEST (body), memref, 1);
2435 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2436 {
2437 rtx src = SET_SRC (body);
2438 while (GET_CODE (src) == SUBREG
2439 && SUBREG_WORD (src) == 0)
2440 src = SUBREG_REG (src);
2441 if (GET_MODE (src) != GET_MODE (memref))
2442 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2443 validate_change (insn, &SET_SRC (body), src, 1);
2444 }
2445 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2446 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2447 /* This shouldn't happen because anything that didn't have
2448 one of these modes should have got converted explicitly
2449 and then referenced through a subreg.
2450 This is so because the original bit-field was
2451 handled by agg_mode and so its tree structure had
2452 the same mode that memref now has. */
2453 abort ();
2454 }
2455 else
2456 {
2457 rtx dest = SET_DEST (body);
2458
2459 while (GET_CODE (dest) == SUBREG
2460 && SUBREG_WORD (dest) == 0
2461 && (GET_MODE_CLASS (GET_MODE (dest))
2462 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest)))))
2463 dest = SUBREG_REG (dest);
2464
2465 validate_change (insn, &SET_DEST (body), dest, 1);
2466
2467 if (GET_MODE (dest) == GET_MODE (memref))
2468 validate_change (insn, &SET_SRC (body), memref, 1);
2469 else
2470 {
2471 /* Convert the mem ref to the destination mode. */
2472 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2473
2474 start_sequence ();
2475 convert_move (newreg, memref,
2476 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2477 seq = get_insns ();
2478 end_sequence ();
2479
2480 validate_change (insn, &SET_SRC (body), newreg, 1);
2481 }
2482 }
2483
2484 /* See if we can convert this extraction or insertion into
2485 a simple move insn. We might not be able to do so if this
2486 was, for example, part of a PARALLEL.
2487
2488 If we succeed, write out any needed conversions. If we fail,
2489 it is hard to guess why we failed, so don't do anything
2490 special; just let the optimization be suppressed. */
2491
2492 if (apply_change_group () && seq)
2493 emit_insns_before (seq, insn);
2494 }
2495 }
2496 }
2497 \f
2498 /* These routines are responsible for converting virtual register references
2499 to the actual hard register references once RTL generation is complete.
2500
2501 The following four variables are used for communication between the
2502 routines. They contain the offsets of the virtual registers from their
2503 respective hard registers. */
2504
2505 static int in_arg_offset;
2506 static int var_offset;
2507 static int dynamic_offset;
2508 static int out_arg_offset;
2509
2510 /* In most machines, the stack pointer register is equivalent to the bottom
2511 of the stack. */
2512
2513 #ifndef STACK_POINTER_OFFSET
2514 #define STACK_POINTER_OFFSET 0
2515 #endif
2516
2517 /* If not defined, pick an appropriate default for the offset of dynamically
2518 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2519 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2520
2521 #ifndef STACK_DYNAMIC_OFFSET
2522
2523 #ifdef ACCUMULATE_OUTGOING_ARGS
2524 /* The bottom of the stack points to the actual arguments. If
2525 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2526 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2527 stack space for register parameters is not pushed by the caller, but
2528 rather part of the fixed stack areas and hence not included in
2529 `current_function_outgoing_args_size'. Nevertheless, we must allow
2530 for it when allocating stack dynamic objects. */
2531
2532 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2533 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2534 (current_function_outgoing_args_size \
2535 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2536
2537 #else
2538 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2539 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2540 #endif
2541
2542 #else
2543 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2544 #endif
2545 #endif
2546
2547 /* Pass through the INSNS of function FNDECL and convert virtual register
2548 references to hard register references. */
2549
2550 void
2551 instantiate_virtual_regs (fndecl, insns)
2552 tree fndecl;
2553 rtx insns;
2554 {
2555 rtx insn;
2556
2557 /* Compute the offsets to use for this function. */
2558 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
2559 var_offset = STARTING_FRAME_OFFSET;
2560 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
2561 out_arg_offset = STACK_POINTER_OFFSET;
2562
2563 /* Scan all variables and parameters of this function. For each that is
2564 in memory, instantiate all virtual registers if the result is a valid
2565 address. If not, we do it later. That will handle most uses of virtual
2566 regs on many machines. */
2567 instantiate_decls (fndecl, 1);
2568
2569 /* Initialize recognition, indicating that volatile is OK. */
2570 init_recog ();
2571
2572 /* Scan through all the insns, instantiating every virtual register still
2573 present. */
2574 for (insn = insns; insn; insn = NEXT_INSN (insn))
2575 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2576 || GET_CODE (insn) == CALL_INSN)
2577 {
2578 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
2579 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
2580 }
2581
2582 /* Now instantiate the remaining register equivalences for debugging info.
2583 These will not be valid addresses. */
2584 instantiate_decls (fndecl, 0);
2585
2586 /* Indicate that, from now on, assign_stack_local should use
2587 frame_pointer_rtx. */
2588 virtuals_instantiated = 1;
2589 }
2590
2591 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
2592 all virtual registers in their DECL_RTL's.
2593
2594 If VALID_ONLY, do this only if the resulting address is still valid.
2595 Otherwise, always do it. */
2596
2597 static void
2598 instantiate_decls (fndecl, valid_only)
2599 tree fndecl;
2600 int valid_only;
2601 {
2602 tree decl;
2603
2604 if (DECL_SAVED_INSNS (fndecl))
2605 /* When compiling an inline function, the obstack used for
2606 rtl allocation is the maybepermanent_obstack. Calling
2607 `resume_temporary_allocation' switches us back to that
2608 obstack while we process this function's parameters. */
2609 resume_temporary_allocation ();
2610
2611 /* Process all parameters of the function. */
2612 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2613 {
2614 instantiate_decl (DECL_RTL (decl), int_size_in_bytes (TREE_TYPE (decl)),
2615 valid_only);
2616 instantiate_decl (DECL_INCOMING_RTL (decl),
2617 int_size_in_bytes (TREE_TYPE (decl)), valid_only);
2618 }
2619
2620 /* Now process all variables defined in the function or its subblocks. */
2621 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
2622
2623 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
2624 {
2625 /* Save all rtl allocated for this function by raising the
2626 high-water mark on the maybepermanent_obstack. */
2627 preserve_data ();
2628 /* All further rtl allocation is now done in the current_obstack. */
2629 rtl_in_current_obstack ();
2630 }
2631 }
2632
2633 /* Subroutine of instantiate_decls: Process all decls in the given
2634 BLOCK node and all its subblocks. */
2635
2636 static void
2637 instantiate_decls_1 (let, valid_only)
2638 tree let;
2639 int valid_only;
2640 {
2641 tree t;
2642
2643 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2644 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
2645 valid_only);
2646
2647 /* Process all subblocks. */
2648 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2649 instantiate_decls_1 (t, valid_only);
2650 }
2651
2652 /* Subroutine of the preceding procedures: Given RTL representing a
2653 decl and the size of the object, do any instantiation required.
2654
2655 If VALID_ONLY is non-zero, it means that the RTL should only be
2656 changed if the new address is valid. */
2657
2658 static void
2659 instantiate_decl (x, size, valid_only)
2660 rtx x;
2661 int size;
2662 int valid_only;
2663 {
2664 enum machine_mode mode;
2665 rtx addr;
2666
2667 /* If this is not a MEM, no need to do anything. Similarly if the
2668 address is a constant or a register that is not a virtual register. */
2669
2670 if (x == 0 || GET_CODE (x) != MEM)
2671 return;
2672
2673 addr = XEXP (x, 0);
2674 if (CONSTANT_P (addr)
2675 || (GET_CODE (addr) == REG
2676 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
2677 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
2678 return;
2679
2680 /* If we should only do this if the address is valid, copy the address.
2681 We need to do this so we can undo any changes that might make the
2682 address invalid. This copy is unfortunate, but probably can't be
2683 avoided. */
2684
2685 if (valid_only)
2686 addr = copy_rtx (addr);
2687
2688 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
2689
2690 if (valid_only)
2691 {
2692 /* Now verify that the resulting address is valid for every integer or
2693 floating-point mode up to and including SIZE bytes long. We do this
2694 since the object might be accessed in any mode and frame addresses
2695 are shared. */
2696
2697 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2698 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2699 mode = GET_MODE_WIDER_MODE (mode))
2700 if (! memory_address_p (mode, addr))
2701 return;
2702
2703 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
2704 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2705 mode = GET_MODE_WIDER_MODE (mode))
2706 if (! memory_address_p (mode, addr))
2707 return;
2708 }
2709
2710 /* Put back the address now that we have updated it and we either know
2711 it is valid or we don't care whether it is valid. */
2712
2713 XEXP (x, 0) = addr;
2714 }
2715 \f
2716 /* Given a pointer to a piece of rtx and an optional pointer to the
2717 containing object, instantiate any virtual registers present in it.
2718
2719 If EXTRA_INSNS, we always do the replacement and generate
2720 any extra insns before OBJECT. If it zero, we do nothing if replacement
2721 is not valid.
2722
2723 Return 1 if we either had nothing to do or if we were able to do the
2724 needed replacement. Return 0 otherwise; we only return zero if
2725 EXTRA_INSNS is zero.
2726
2727 We first try some simple transformations to avoid the creation of extra
2728 pseudos. */
2729
2730 static int
2731 instantiate_virtual_regs_1 (loc, object, extra_insns)
2732 rtx *loc;
2733 rtx object;
2734 int extra_insns;
2735 {
2736 rtx x;
2737 RTX_CODE code;
2738 rtx new = 0;
2739 int offset;
2740 rtx temp;
2741 rtx seq;
2742 int i, j;
2743 char *fmt;
2744
2745 /* Re-start here to avoid recursion in common cases. */
2746 restart:
2747
2748 x = *loc;
2749 if (x == 0)
2750 return 1;
2751
2752 code = GET_CODE (x);
2753
2754 /* Check for some special cases. */
2755 switch (code)
2756 {
2757 case CONST_INT:
2758 case CONST_DOUBLE:
2759 case CONST:
2760 case SYMBOL_REF:
2761 case CODE_LABEL:
2762 case PC:
2763 case CC0:
2764 case ASM_INPUT:
2765 case ADDR_VEC:
2766 case ADDR_DIFF_VEC:
2767 case RETURN:
2768 return 1;
2769
2770 case SET:
2771 /* We are allowed to set the virtual registers. This means that
2772 that the actual register should receive the source minus the
2773 appropriate offset. This is used, for example, in the handling
2774 of non-local gotos. */
2775 if (SET_DEST (x) == virtual_incoming_args_rtx)
2776 new = arg_pointer_rtx, offset = - in_arg_offset;
2777 else if (SET_DEST (x) == virtual_stack_vars_rtx)
2778 new = frame_pointer_rtx, offset = - var_offset;
2779 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
2780 new = stack_pointer_rtx, offset = - dynamic_offset;
2781 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
2782 new = stack_pointer_rtx, offset = - out_arg_offset;
2783
2784 if (new)
2785 {
2786 /* The only valid sources here are PLUS or REG. Just do
2787 the simplest possible thing to handle them. */
2788 if (GET_CODE (SET_SRC (x)) != REG
2789 && GET_CODE (SET_SRC (x)) != PLUS)
2790 abort ();
2791
2792 start_sequence ();
2793 if (GET_CODE (SET_SRC (x)) != REG)
2794 temp = force_operand (SET_SRC (x), NULL_RTX);
2795 else
2796 temp = SET_SRC (x);
2797 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
2798 seq = get_insns ();
2799 end_sequence ();
2800
2801 emit_insns_before (seq, object);
2802 SET_DEST (x) = new;
2803
2804 if (!validate_change (object, &SET_SRC (x), temp, 0)
2805 || ! extra_insns)
2806 abort ();
2807
2808 return 1;
2809 }
2810
2811 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
2812 loc = &SET_SRC (x);
2813 goto restart;
2814
2815 case PLUS:
2816 /* Handle special case of virtual register plus constant. */
2817 if (CONSTANT_P (XEXP (x, 1)))
2818 {
2819 rtx old, new_offset;
2820
2821 /* Check for (plus (plus VIRT foo) (const_int)) first. */
2822 if (GET_CODE (XEXP (x, 0)) == PLUS)
2823 {
2824 rtx inner = XEXP (XEXP (x, 0), 0);
2825
2826 if (inner == virtual_incoming_args_rtx)
2827 new = arg_pointer_rtx, offset = in_arg_offset;
2828 else if (inner == virtual_stack_vars_rtx)
2829 new = frame_pointer_rtx, offset = var_offset;
2830 else if (inner == virtual_stack_dynamic_rtx)
2831 new = stack_pointer_rtx, offset = dynamic_offset;
2832 else if (inner == virtual_outgoing_args_rtx)
2833 new = stack_pointer_rtx, offset = out_arg_offset;
2834 else
2835 {
2836 loc = &XEXP (x, 0);
2837 goto restart;
2838 }
2839
2840 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
2841 extra_insns);
2842 new = gen_rtx (PLUS, Pmode, new, XEXP (XEXP (x, 0), 1));
2843 }
2844
2845 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
2846 new = arg_pointer_rtx, offset = in_arg_offset;
2847 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
2848 new = frame_pointer_rtx, offset = var_offset;
2849 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
2850 new = stack_pointer_rtx, offset = dynamic_offset;
2851 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
2852 new = stack_pointer_rtx, offset = out_arg_offset;
2853 else
2854 {
2855 /* We know the second operand is a constant. Unless the
2856 first operand is a REG (which has been already checked),
2857 it needs to be checked. */
2858 if (GET_CODE (XEXP (x, 0)) != REG)
2859 {
2860 loc = &XEXP (x, 0);
2861 goto restart;
2862 }
2863 return 1;
2864 }
2865
2866 new_offset = plus_constant (XEXP (x, 1), offset);
2867
2868 /* If the new constant is zero, try to replace the sum with just
2869 the register. */
2870 if (new_offset == const0_rtx
2871 && validate_change (object, loc, new, 0))
2872 return 1;
2873
2874 /* Next try to replace the register and new offset.
2875 There are two changes to validate here and we can't assume that
2876 in the case of old offset equals new just changing the register
2877 will yield a valid insn. In the interests of a little efficiency,
2878 however, we only call validate change once (we don't queue up the
2879 changes and then call apply_change_group). */
2880
2881 old = XEXP (x, 0);
2882 if (offset == 0
2883 ? ! validate_change (object, &XEXP (x, 0), new, 0)
2884 : (XEXP (x, 0) = new,
2885 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
2886 {
2887 if (! extra_insns)
2888 {
2889 XEXP (x, 0) = old;
2890 return 0;
2891 }
2892
2893 /* Otherwise copy the new constant into a register and replace
2894 constant with that register. */
2895 temp = gen_reg_rtx (Pmode);
2896 XEXP (x, 0) = new;
2897 if (validate_change (object, &XEXP (x, 1), temp, 0))
2898 emit_insn_before (gen_move_insn (temp, new_offset), object);
2899 else
2900 {
2901 /* If that didn't work, replace this expression with a
2902 register containing the sum. */
2903
2904 XEXP (x, 0) = old;
2905 new = gen_rtx (PLUS, Pmode, new, new_offset);
2906
2907 start_sequence ();
2908 temp = force_operand (new, NULL_RTX);
2909 seq = get_insns ();
2910 end_sequence ();
2911
2912 emit_insns_before (seq, object);
2913 if (! validate_change (object, loc, temp, 0)
2914 && ! validate_replace_rtx (x, temp, object))
2915 abort ();
2916 }
2917 }
2918
2919 return 1;
2920 }
2921
2922 /* Fall through to generic two-operand expression case. */
2923 case EXPR_LIST:
2924 case CALL:
2925 case COMPARE:
2926 case MINUS:
2927 case MULT:
2928 case DIV: case UDIV:
2929 case MOD: case UMOD:
2930 case AND: case IOR: case XOR:
2931 case ROTATERT: case ROTATE:
2932 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2933 case NE: case EQ:
2934 case GE: case GT: case GEU: case GTU:
2935 case LE: case LT: case LEU: case LTU:
2936 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
2937 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
2938 loc = &XEXP (x, 0);
2939 goto restart;
2940
2941 case MEM:
2942 /* Most cases of MEM that convert to valid addresses have already been
2943 handled by our scan of regno_reg_rtx. The only special handling we
2944 need here is to make a copy of the rtx to ensure it isn't being
2945 shared if we have to change it to a pseudo.
2946
2947 If the rtx is a simple reference to an address via a virtual register,
2948 it can potentially be shared. In such cases, first try to make it
2949 a valid address, which can also be shared. Otherwise, copy it and
2950 proceed normally.
2951
2952 First check for common cases that need no processing. These are
2953 usually due to instantiation already being done on a previous instance
2954 of a shared rtx. */
2955
2956 temp = XEXP (x, 0);
2957 if (CONSTANT_ADDRESS_P (temp)
2958 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2959 || temp == arg_pointer_rtx
2960 #endif
2961 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2962 || temp == hard_frame_pointer_rtx
2963 #endif
2964 || temp == frame_pointer_rtx)
2965 return 1;
2966
2967 if (GET_CODE (temp) == PLUS
2968 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2969 && (XEXP (temp, 0) == frame_pointer_rtx
2970 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2971 || XEXP (temp, 0) == hard_frame_pointer_rtx
2972 #endif
2973 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2974 || XEXP (temp, 0) == arg_pointer_rtx
2975 #endif
2976 ))
2977 return 1;
2978
2979 if (temp == virtual_stack_vars_rtx
2980 || temp == virtual_incoming_args_rtx
2981 || (GET_CODE (temp) == PLUS
2982 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2983 && (XEXP (temp, 0) == virtual_stack_vars_rtx
2984 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
2985 {
2986 /* This MEM may be shared. If the substitution can be done without
2987 the need to generate new pseudos, we want to do it in place
2988 so all copies of the shared rtx benefit. The call below will
2989 only make substitutions if the resulting address is still
2990 valid.
2991
2992 Note that we cannot pass X as the object in the recursive call
2993 since the insn being processed may not allow all valid
2994 addresses. However, if we were not passed on object, we can
2995 only modify X without copying it if X will have a valid
2996 address.
2997
2998 ??? Also note that this can still lose if OBJECT is an insn that
2999 has less restrictions on an address that some other insn.
3000 In that case, we will modify the shared address. This case
3001 doesn't seem very likely, though. */
3002
3003 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3004 object ? object : x, 0))
3005 return 1;
3006
3007 /* Otherwise make a copy and process that copy. We copy the entire
3008 RTL expression since it might be a PLUS which could also be
3009 shared. */
3010 *loc = x = copy_rtx (x);
3011 }
3012
3013 /* Fall through to generic unary operation case. */
3014 case USE:
3015 case CLOBBER:
3016 case SUBREG:
3017 case STRICT_LOW_PART:
3018 case NEG: case NOT:
3019 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3020 case SIGN_EXTEND: case ZERO_EXTEND:
3021 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3022 case FLOAT: case FIX:
3023 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3024 case ABS:
3025 case SQRT:
3026 case FFS:
3027 /* These case either have just one operand or we know that we need not
3028 check the rest of the operands. */
3029 loc = &XEXP (x, 0);
3030 goto restart;
3031
3032 case REG:
3033 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3034 in front of this insn and substitute the temporary. */
3035 if (x == virtual_incoming_args_rtx)
3036 new = arg_pointer_rtx, offset = in_arg_offset;
3037 else if (x == virtual_stack_vars_rtx)
3038 new = frame_pointer_rtx, offset = var_offset;
3039 else if (x == virtual_stack_dynamic_rtx)
3040 new = stack_pointer_rtx, offset = dynamic_offset;
3041 else if (x == virtual_outgoing_args_rtx)
3042 new = stack_pointer_rtx, offset = out_arg_offset;
3043
3044 if (new)
3045 {
3046 temp = plus_constant (new, offset);
3047 if (!validate_change (object, loc, temp, 0))
3048 {
3049 if (! extra_insns)
3050 return 0;
3051
3052 start_sequence ();
3053 temp = force_operand (temp, NULL_RTX);
3054 seq = get_insns ();
3055 end_sequence ();
3056
3057 emit_insns_before (seq, object);
3058 if (! validate_change (object, loc, temp, 0)
3059 && ! validate_replace_rtx (x, temp, object))
3060 abort ();
3061 }
3062 }
3063
3064 return 1;
3065 }
3066
3067 /* Scan all subexpressions. */
3068 fmt = GET_RTX_FORMAT (code);
3069 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3070 if (*fmt == 'e')
3071 {
3072 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3073 return 0;
3074 }
3075 else if (*fmt == 'E')
3076 for (j = 0; j < XVECLEN (x, i); j++)
3077 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3078 extra_insns))
3079 return 0;
3080
3081 return 1;
3082 }
3083 \f
3084 /* Optimization: assuming this function does not receive nonlocal gotos,
3085 delete the handlers for such, as well as the insns to establish
3086 and disestablish them. */
3087
3088 static void
3089 delete_handlers ()
3090 {
3091 rtx insn;
3092 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3093 {
3094 /* Delete the handler by turning off the flag that would
3095 prevent jump_optimize from deleting it.
3096 Also permit deletion of the nonlocal labels themselves
3097 if nothing local refers to them. */
3098 if (GET_CODE (insn) == CODE_LABEL)
3099 {
3100 tree t, last_t;
3101
3102 LABEL_PRESERVE_P (insn) = 0;
3103
3104 /* Remove it from the nonlocal_label list, to avoid confusing
3105 flow. */
3106 for (t = nonlocal_labels, last_t = 0; t;
3107 last_t = t, t = TREE_CHAIN (t))
3108 if (DECL_RTL (TREE_VALUE (t)) == insn)
3109 break;
3110 if (t)
3111 {
3112 if (! last_t)
3113 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3114 else
3115 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3116 }
3117 }
3118 if (GET_CODE (insn) == INSN
3119 && ((nonlocal_goto_handler_slot != 0
3120 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
3121 || (nonlocal_goto_stack_level != 0
3122 && reg_mentioned_p (nonlocal_goto_stack_level,
3123 PATTERN (insn)))))
3124 delete_insn (insn);
3125 }
3126 }
3127
3128 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3129 of the current function. */
3130
3131 rtx
3132 nonlocal_label_rtx_list ()
3133 {
3134 tree t;
3135 rtx x = 0;
3136
3137 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
3138 x = gen_rtx (EXPR_LIST, VOIDmode, label_rtx (TREE_VALUE (t)), x);
3139
3140 return x;
3141 }
3142 \f
3143 /* Output a USE for any register use in RTL.
3144 This is used with -noreg to mark the extent of lifespan
3145 of any registers used in a user-visible variable's DECL_RTL. */
3146
3147 void
3148 use_variable (rtl)
3149 rtx rtl;
3150 {
3151 if (GET_CODE (rtl) == REG)
3152 /* This is a register variable. */
3153 emit_insn (gen_rtx (USE, VOIDmode, rtl));
3154 else if (GET_CODE (rtl) == MEM
3155 && GET_CODE (XEXP (rtl, 0)) == REG
3156 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3157 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3158 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3159 /* This is a variable-sized structure. */
3160 emit_insn (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)));
3161 }
3162
3163 /* Like use_variable except that it outputs the USEs after INSN
3164 instead of at the end of the insn-chain. */
3165
3166 void
3167 use_variable_after (rtl, insn)
3168 rtx rtl, insn;
3169 {
3170 if (GET_CODE (rtl) == REG)
3171 /* This is a register variable. */
3172 emit_insn_after (gen_rtx (USE, VOIDmode, rtl), insn);
3173 else if (GET_CODE (rtl) == MEM
3174 && GET_CODE (XEXP (rtl, 0)) == REG
3175 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3176 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3177 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3178 /* This is a variable-sized structure. */
3179 emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)), insn);
3180 }
3181 \f
3182 int
3183 max_parm_reg_num ()
3184 {
3185 return max_parm_reg;
3186 }
3187
3188 /* Return the first insn following those generated by `assign_parms'. */
3189
3190 rtx
3191 get_first_nonparm_insn ()
3192 {
3193 if (last_parm_insn)
3194 return NEXT_INSN (last_parm_insn);
3195 return get_insns ();
3196 }
3197
3198 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3199 Crash if there is none. */
3200
3201 rtx
3202 get_first_block_beg ()
3203 {
3204 register rtx searcher;
3205 register rtx insn = get_first_nonparm_insn ();
3206
3207 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3208 if (GET_CODE (searcher) == NOTE
3209 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3210 return searcher;
3211
3212 abort (); /* Invalid call to this function. (See comments above.) */
3213 return NULL_RTX;
3214 }
3215
3216 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3217 This means a type for which function calls must pass an address to the
3218 function or get an address back from the function.
3219 EXP may be a type node or an expression (whose type is tested). */
3220
3221 int
3222 aggregate_value_p (exp)
3223 tree exp;
3224 {
3225 int i, regno, nregs;
3226 rtx reg;
3227 tree type;
3228 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3229 type = exp;
3230 else
3231 type = TREE_TYPE (exp);
3232
3233 if (RETURN_IN_MEMORY (type))
3234 return 1;
3235 /* Types that are TREE_ADDRESSABLE must be contructed in memory,
3236 and thus can't be returned in registers. */
3237 if (TREE_ADDRESSABLE (type))
3238 return 1;
3239 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3240 return 1;
3241 /* Make sure we have suitable call-clobbered regs to return
3242 the value in; if not, we must return it in memory. */
3243 reg = hard_function_value (type, 0);
3244
3245 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3246 it is OK. */
3247 if (GET_CODE (reg) != REG)
3248 return 0;
3249
3250 regno = REGNO (reg);
3251 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3252 for (i = 0; i < nregs; i++)
3253 if (! call_used_regs[regno + i])
3254 return 1;
3255 return 0;
3256 }
3257 \f
3258 /* Assign RTL expressions to the function's parameters.
3259 This may involve copying them into registers and using
3260 those registers as the RTL for them.
3261
3262 If SECOND_TIME is non-zero it means that this function is being
3263 called a second time. This is done by integrate.c when a function's
3264 compilation is deferred. We need to come back here in case the
3265 FUNCTION_ARG macro computes items needed for the rest of the compilation
3266 (such as changing which registers are fixed or caller-saved). But suppress
3267 writing any insns or setting DECL_RTL of anything in this case. */
3268
3269 void
3270 assign_parms (fndecl, second_time)
3271 tree fndecl;
3272 int second_time;
3273 {
3274 register tree parm;
3275 register rtx entry_parm = 0;
3276 register rtx stack_parm = 0;
3277 CUMULATIVE_ARGS args_so_far;
3278 enum machine_mode promoted_mode, passed_mode;
3279 enum machine_mode nominal_mode, promoted_nominal_mode;
3280 int unsignedp;
3281 /* Total space needed so far for args on the stack,
3282 given as a constant and a tree-expression. */
3283 struct args_size stack_args_size;
3284 tree fntype = TREE_TYPE (fndecl);
3285 tree fnargs = DECL_ARGUMENTS (fndecl);
3286 /* This is used for the arg pointer when referring to stack args. */
3287 rtx internal_arg_pointer;
3288 /* This is a dummy PARM_DECL that we used for the function result if
3289 the function returns a structure. */
3290 tree function_result_decl = 0;
3291 int nparmregs = list_length (fnargs) + LAST_VIRTUAL_REGISTER + 1;
3292 int varargs_setup = 0;
3293 rtx conversion_insns = 0;
3294
3295 /* Nonzero if the last arg is named `__builtin_va_alist',
3296 which is used on some machines for old-fashioned non-ANSI varargs.h;
3297 this should be stuck onto the stack as if it had arrived there. */
3298 int hide_last_arg
3299 = (current_function_varargs
3300 && fnargs
3301 && (parm = tree_last (fnargs)) != 0
3302 && DECL_NAME (parm)
3303 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3304 "__builtin_va_alist")));
3305
3306 /* Nonzero if function takes extra anonymous args.
3307 This means the last named arg must be on the stack
3308 right before the anonymous ones. */
3309 int stdarg
3310 = (TYPE_ARG_TYPES (fntype) != 0
3311 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3312 != void_type_node));
3313
3314 current_function_stdarg = stdarg;
3315
3316 /* If the reg that the virtual arg pointer will be translated into is
3317 not a fixed reg or is the stack pointer, make a copy of the virtual
3318 arg pointer, and address parms via the copy. The frame pointer is
3319 considered fixed even though it is not marked as such.
3320
3321 The second time through, simply use ap to avoid generating rtx. */
3322
3323 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3324 || ! (fixed_regs[ARG_POINTER_REGNUM]
3325 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3326 && ! second_time)
3327 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3328 else
3329 internal_arg_pointer = virtual_incoming_args_rtx;
3330 current_function_internal_arg_pointer = internal_arg_pointer;
3331
3332 stack_args_size.constant = 0;
3333 stack_args_size.var = 0;
3334
3335 /* If struct value address is treated as the first argument, make it so. */
3336 if (aggregate_value_p (DECL_RESULT (fndecl))
3337 && ! current_function_returns_pcc_struct
3338 && struct_value_incoming_rtx == 0)
3339 {
3340 tree type = build_pointer_type (TREE_TYPE (fntype));
3341
3342 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
3343
3344 DECL_ARG_TYPE (function_result_decl) = type;
3345 TREE_CHAIN (function_result_decl) = fnargs;
3346 fnargs = function_result_decl;
3347 }
3348
3349 parm_reg_stack_loc = (rtx *) oballoc (nparmregs * sizeof (rtx));
3350 bzero ((char *) parm_reg_stack_loc, nparmregs * sizeof (rtx));
3351
3352 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3353 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
3354 #else
3355 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
3356 #endif
3357
3358 /* We haven't yet found an argument that we must push and pretend the
3359 caller did. */
3360 current_function_pretend_args_size = 0;
3361
3362 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3363 {
3364 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
3365 struct args_size stack_offset;
3366 struct args_size arg_size;
3367 int passed_pointer = 0;
3368 int did_conversion = 0;
3369 tree passed_type = DECL_ARG_TYPE (parm);
3370 tree nominal_type = TREE_TYPE (parm);
3371
3372 /* Set LAST_NAMED if this is last named arg before some
3373 anonymous args. We treat it as if it were anonymous too. */
3374 int last_named = ((TREE_CHAIN (parm) == 0
3375 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3376 && (stdarg || current_function_varargs));
3377
3378 if (TREE_TYPE (parm) == error_mark_node
3379 /* This can happen after weird syntax errors
3380 or if an enum type is defined among the parms. */
3381 || TREE_CODE (parm) != PARM_DECL
3382 || passed_type == NULL)
3383 {
3384 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = gen_rtx (MEM, BLKmode,
3385 const0_rtx);
3386 TREE_USED (parm) = 1;
3387 continue;
3388 }
3389
3390 /* For varargs.h function, save info about regs and stack space
3391 used by the individual args, not including the va_alist arg. */
3392 if (hide_last_arg && last_named)
3393 current_function_args_info = args_so_far;
3394
3395 /* Find mode of arg as it is passed, and mode of arg
3396 as it should be during execution of this function. */
3397 passed_mode = TYPE_MODE (passed_type);
3398 nominal_mode = TYPE_MODE (nominal_type);
3399
3400 /* If the parm's mode is VOID, its value doesn't matter,
3401 and avoid the usual things like emit_move_insn that could crash. */
3402 if (nominal_mode == VOIDmode)
3403 {
3404 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
3405 continue;
3406 }
3407
3408 /* If the parm is to be passed as a transparent union, use the
3409 type of the first field for the tests below. We have already
3410 verified that the modes are the same. */
3411 if (DECL_TRANSPARENT_UNION (parm)
3412 || TYPE_TRANSPARENT_UNION (passed_type))
3413 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
3414
3415 /* See if this arg was passed by invisible reference. It is if
3416 it is an object whose size depends on the contents of the
3417 object itself or if the machine requires these objects be passed
3418 that way. */
3419
3420 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
3421 && contains_placeholder_p (TYPE_SIZE (passed_type)))
3422 || TREE_ADDRESSABLE (passed_type)
3423 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3424 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
3425 passed_type, ! last_named)
3426 #endif
3427 )
3428 {
3429 passed_type = nominal_type = build_pointer_type (passed_type);
3430 passed_pointer = 1;
3431 passed_mode = nominal_mode = Pmode;
3432 }
3433
3434 promoted_mode = passed_mode;
3435
3436 #ifdef PROMOTE_FUNCTION_ARGS
3437 /* Compute the mode in which the arg is actually extended to. */
3438 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
3439 #endif
3440
3441 /* Let machine desc say which reg (if any) the parm arrives in.
3442 0 means it arrives on the stack. */
3443 #ifdef FUNCTION_INCOMING_ARG
3444 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3445 passed_type, ! last_named);
3446 #else
3447 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
3448 passed_type, ! last_named);
3449 #endif
3450
3451 if (entry_parm == 0)
3452 promoted_mode = passed_mode;
3453
3454 #ifdef SETUP_INCOMING_VARARGS
3455 /* If this is the last named parameter, do any required setup for
3456 varargs or stdargs. We need to know about the case of this being an
3457 addressable type, in which case we skip the registers it
3458 would have arrived in.
3459
3460 For stdargs, LAST_NAMED will be set for two parameters, the one that
3461 is actually the last named, and the dummy parameter. We only
3462 want to do this action once.
3463
3464 Also, indicate when RTL generation is to be suppressed. */
3465 if (last_named && !varargs_setup)
3466 {
3467 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
3468 current_function_pretend_args_size,
3469 second_time);
3470 varargs_setup = 1;
3471 }
3472 #endif
3473
3474 /* Determine parm's home in the stack,
3475 in case it arrives in the stack or we should pretend it did.
3476
3477 Compute the stack position and rtx where the argument arrives
3478 and its size.
3479
3480 There is one complexity here: If this was a parameter that would
3481 have been passed in registers, but wasn't only because it is
3482 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
3483 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
3484 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
3485 0 as it was the previous time. */
3486
3487 locate_and_pad_parm (promoted_mode, passed_type,
3488 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3489 1,
3490 #else
3491 #ifdef FUNCTION_INCOMING_ARG
3492 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3493 passed_type,
3494 (! last_named
3495 || varargs_setup)) != 0,
3496 #else
3497 FUNCTION_ARG (args_so_far, promoted_mode,
3498 passed_type,
3499 ! last_named || varargs_setup) != 0,
3500 #endif
3501 #endif
3502 fndecl, &stack_args_size, &stack_offset, &arg_size);
3503
3504 if (! second_time)
3505 {
3506 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
3507
3508 if (offset_rtx == const0_rtx)
3509 stack_parm = gen_rtx (MEM, promoted_mode, internal_arg_pointer);
3510 else
3511 stack_parm = gen_rtx (MEM, promoted_mode,
3512 gen_rtx (PLUS, Pmode,
3513 internal_arg_pointer, offset_rtx));
3514
3515 /* If this is a memory ref that contains aggregate components,
3516 mark it as such for cse and loop optimize. Likewise if it
3517 is readonly. */
3518 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3519 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
3520 }
3521
3522 /* If this parameter was passed both in registers and in the stack,
3523 use the copy on the stack. */
3524 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
3525 entry_parm = 0;
3526
3527 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3528 /* If this parm was passed part in regs and part in memory,
3529 pretend it arrived entirely in memory
3530 by pushing the register-part onto the stack.
3531
3532 In the special case of a DImode or DFmode that is split,
3533 we could put it together in a pseudoreg directly,
3534 but for now that's not worth bothering with. */
3535
3536 if (entry_parm)
3537 {
3538 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
3539 passed_type, ! last_named);
3540
3541 if (nregs > 0)
3542 {
3543 current_function_pretend_args_size
3544 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
3545 / (PARM_BOUNDARY / BITS_PER_UNIT)
3546 * (PARM_BOUNDARY / BITS_PER_UNIT));
3547
3548 if (! second_time)
3549 {
3550 /* Handle calls that pass values in multiple non-contiguous
3551 locations. The Irix 6 ABI has examples of this. */
3552 if (GET_CODE (entry_parm) == PARALLEL)
3553 emit_group_store (validize_mem (stack_parm),
3554 entry_parm);
3555 else
3556 move_block_from_reg (REGNO (entry_parm),
3557 validize_mem (stack_parm), nregs,
3558 int_size_in_bytes (TREE_TYPE (parm)));
3559 }
3560 entry_parm = stack_parm;
3561 }
3562 }
3563 #endif
3564
3565 /* If we didn't decide this parm came in a register,
3566 by default it came on the stack. */
3567 if (entry_parm == 0)
3568 entry_parm = stack_parm;
3569
3570 /* Record permanently how this parm was passed. */
3571 if (! second_time)
3572 DECL_INCOMING_RTL (parm) = entry_parm;
3573
3574 /* If there is actually space on the stack for this parm,
3575 count it in stack_args_size; otherwise set stack_parm to 0
3576 to indicate there is no preallocated stack slot for the parm. */
3577
3578 if (entry_parm == stack_parm
3579 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
3580 /* On some machines, even if a parm value arrives in a register
3581 there is still an (uninitialized) stack slot allocated for it.
3582
3583 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
3584 whether this parameter already has a stack slot allocated,
3585 because an arg block exists only if current_function_args_size
3586 is larger than some threshold, and we haven't calculated that
3587 yet. So, for now, we just assume that stack slots never exist
3588 in this case. */
3589 || REG_PARM_STACK_SPACE (fndecl) > 0
3590 #endif
3591 )
3592 {
3593 stack_args_size.constant += arg_size.constant;
3594 if (arg_size.var)
3595 ADD_PARM_SIZE (stack_args_size, arg_size.var);
3596 }
3597 else
3598 /* No stack slot was pushed for this parm. */
3599 stack_parm = 0;
3600
3601 /* Update info on where next arg arrives in registers. */
3602
3603 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
3604 passed_type, ! last_named);
3605
3606 /* If this is our second time through, we are done with this parm. */
3607 if (second_time)
3608 continue;
3609
3610 /* If we can't trust the parm stack slot to be aligned enough
3611 for its ultimate type, don't use that slot after entry.
3612 We'll make another stack slot, if we need one. */
3613 {
3614 int thisparm_boundary
3615 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
3616
3617 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
3618 stack_parm = 0;
3619 }
3620
3621 /* If parm was passed in memory, and we need to convert it on entry,
3622 don't store it back in that same slot. */
3623 if (entry_parm != 0
3624 && nominal_mode != BLKmode && nominal_mode != passed_mode)
3625 stack_parm = 0;
3626
3627 #if 0
3628 /* Now adjust STACK_PARM to the mode and precise location
3629 where this parameter should live during execution,
3630 if we discover that it must live in the stack during execution.
3631 To make debuggers happier on big-endian machines, we store
3632 the value in the last bytes of the space available. */
3633
3634 if (nominal_mode != BLKmode && nominal_mode != passed_mode
3635 && stack_parm != 0)
3636 {
3637 rtx offset_rtx;
3638
3639 if (BYTES_BIG_ENDIAN
3640 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
3641 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
3642 - GET_MODE_SIZE (nominal_mode));
3643
3644 offset_rtx = ARGS_SIZE_RTX (stack_offset);
3645 if (offset_rtx == const0_rtx)
3646 stack_parm = gen_rtx (MEM, nominal_mode, internal_arg_pointer);
3647 else
3648 stack_parm = gen_rtx (MEM, nominal_mode,
3649 gen_rtx (PLUS, Pmode,
3650 internal_arg_pointer, offset_rtx));
3651
3652 /* If this is a memory ref that contains aggregate components,
3653 mark it as such for cse and loop optimize. */
3654 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3655 }
3656 #endif /* 0 */
3657
3658 #ifdef STACK_REGS
3659 /* We need this "use" info, because the gcc-register->stack-register
3660 converter in reg-stack.c needs to know which registers are active
3661 at the start of the function call. The actual parameter loading
3662 instructions are not always available then anymore, since they might
3663 have been optimised away. */
3664
3665 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
3666 emit_insn (gen_rtx (USE, GET_MODE (entry_parm), entry_parm));
3667 #endif
3668
3669 /* ENTRY_PARM is an RTX for the parameter as it arrives,
3670 in the mode in which it arrives.
3671 STACK_PARM is an RTX for a stack slot where the parameter can live
3672 during the function (in case we want to put it there).
3673 STACK_PARM is 0 if no stack slot was pushed for it.
3674
3675 Now output code if necessary to convert ENTRY_PARM to
3676 the type in which this function declares it,
3677 and store that result in an appropriate place,
3678 which may be a pseudo reg, may be STACK_PARM,
3679 or may be a local stack slot if STACK_PARM is 0.
3680
3681 Set DECL_RTL to that place. */
3682
3683 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
3684 {
3685 /* If a BLKmode arrives in registers, copy it to a stack slot.
3686 Handle calls that pass values in multiple non-contiguous
3687 locations. The Irix 6 ABI has examples of this. */
3688 if (GET_CODE (entry_parm) == REG
3689 || GET_CODE (entry_parm) == PARALLEL)
3690 {
3691 int size_stored
3692 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
3693 UNITS_PER_WORD);
3694
3695 /* Note that we will be storing an integral number of words.
3696 So we have to be careful to ensure that we allocate an
3697 integral number of words. We do this below in the
3698 assign_stack_local if space was not allocated in the argument
3699 list. If it was, this will not work if PARM_BOUNDARY is not
3700 a multiple of BITS_PER_WORD. It isn't clear how to fix this
3701 if it becomes a problem. */
3702
3703 if (stack_parm == 0)
3704 {
3705 stack_parm
3706 = assign_stack_local (GET_MODE (entry_parm),
3707 size_stored, 0);
3708
3709 /* If this is a memory ref that contains aggregate
3710 components, mark it as such for cse and loop optimize. */
3711 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3712 }
3713
3714 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
3715 abort ();
3716
3717 if (TREE_READONLY (parm))
3718 RTX_UNCHANGING_P (stack_parm) = 1;
3719
3720 /* Handle calls that pass values in multiple non-contiguous
3721 locations. The Irix 6 ABI has examples of this. */
3722 if (GET_CODE (entry_parm) == PARALLEL)
3723 emit_group_store (validize_mem (stack_parm), entry_parm);
3724 else
3725 move_block_from_reg (REGNO (entry_parm),
3726 validize_mem (stack_parm),
3727 size_stored / UNITS_PER_WORD,
3728 int_size_in_bytes (TREE_TYPE (parm)));
3729 }
3730 DECL_RTL (parm) = stack_parm;
3731 }
3732 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
3733 && ! DECL_INLINE (fndecl))
3734 /* layout_decl may set this. */
3735 || TREE_ADDRESSABLE (parm)
3736 || TREE_SIDE_EFFECTS (parm)
3737 /* If -ffloat-store specified, don't put explicit
3738 float variables into registers. */
3739 || (flag_float_store
3740 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
3741 /* Always assign pseudo to structure return or item passed
3742 by invisible reference. */
3743 || passed_pointer || parm == function_result_decl)
3744 {
3745 /* Store the parm in a pseudoregister during the function, but we
3746 may need to do it in a wider mode. */
3747
3748 register rtx parmreg;
3749 int regno, regnoi, regnor;
3750
3751 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
3752
3753 promoted_nominal_mode
3754 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
3755
3756 parmreg = gen_reg_rtx (promoted_nominal_mode);
3757 mark_user_reg (parmreg);
3758
3759 /* If this was an item that we received a pointer to, set DECL_RTL
3760 appropriately. */
3761 if (passed_pointer)
3762 {
3763 DECL_RTL (parm)
3764 = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
3765 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
3766 }
3767 else
3768 DECL_RTL (parm) = parmreg;
3769
3770 /* Copy the value into the register. */
3771 if (nominal_mode != passed_mode
3772 || promoted_nominal_mode != promoted_mode)
3773 {
3774 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3775 mode, by the caller. We now have to convert it to
3776 NOMINAL_MODE, if different. However, PARMREG may be in
3777 a diffent mode than NOMINAL_MODE if it is being stored
3778 promoted.
3779
3780 If ENTRY_PARM is a hard register, it might be in a register
3781 not valid for operating in its mode (e.g., an odd-numbered
3782 register for a DFmode). In that case, moves are the only
3783 thing valid, so we can't do a convert from there. This
3784 occurs when the calling sequence allow such misaligned
3785 usages.
3786
3787 In addition, the conversion may involve a call, which could
3788 clobber parameters which haven't been copied to pseudo
3789 registers yet. Therefore, we must first copy the parm to
3790 a pseudo reg here, and save the conversion until after all
3791 parameters have been moved. */
3792
3793 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
3794
3795 emit_move_insn (tempreg, validize_mem (entry_parm));
3796
3797 push_to_sequence (conversion_insns);
3798 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
3799
3800 expand_assignment (parm,
3801 make_tree (nominal_type, tempreg), 0, 0);
3802 conversion_insns = get_insns ();
3803 did_conversion = 1;
3804 end_sequence ();
3805 }
3806 else
3807 emit_move_insn (parmreg, validize_mem (entry_parm));
3808
3809 /* If we were passed a pointer but the actual value
3810 can safely live in a register, put it in one. */
3811 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
3812 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
3813 && ! DECL_INLINE (fndecl))
3814 /* layout_decl may set this. */
3815 || TREE_ADDRESSABLE (parm)
3816 || TREE_SIDE_EFFECTS (parm)
3817 /* If -ffloat-store specified, don't put explicit
3818 float variables into registers. */
3819 || (flag_float_store
3820 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
3821 {
3822 /* We can't use nominal_mode, because it will have been set to
3823 Pmode above. We must use the actual mode of the parm. */
3824 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3825 mark_user_reg (parmreg);
3826 emit_move_insn (parmreg, DECL_RTL (parm));
3827 DECL_RTL (parm) = parmreg;
3828 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3829 now the parm. */
3830 stack_parm = 0;
3831 }
3832 #ifdef FUNCTION_ARG_CALLEE_COPIES
3833 /* If we are passed an arg by reference and it is our responsibility
3834 to make a copy, do it now.
3835 PASSED_TYPE and PASSED mode now refer to the pointer, not the
3836 original argument, so we must recreate them in the call to
3837 FUNCTION_ARG_CALLEE_COPIES. */
3838 /* ??? Later add code to handle the case that if the argument isn't
3839 modified, don't do the copy. */
3840
3841 else if (passed_pointer
3842 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
3843 TYPE_MODE (DECL_ARG_TYPE (parm)),
3844 DECL_ARG_TYPE (parm),
3845 ! last_named)
3846 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
3847 {
3848 rtx copy;
3849 tree type = DECL_ARG_TYPE (parm);
3850
3851 /* This sequence may involve a library call perhaps clobbering
3852 registers that haven't been copied to pseudos yet. */
3853
3854 push_to_sequence (conversion_insns);
3855
3856 if (TYPE_SIZE (type) == 0
3857 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3858 /* This is a variable sized object. */
3859 copy = gen_rtx (MEM, BLKmode,
3860 allocate_dynamic_stack_space
3861 (expr_size (parm), NULL_RTX,
3862 TYPE_ALIGN (type)));
3863 else
3864 copy = assign_stack_temp (TYPE_MODE (type),
3865 int_size_in_bytes (type), 1);
3866 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
3867
3868 store_expr (parm, copy, 0);
3869 emit_move_insn (parmreg, XEXP (copy, 0));
3870 conversion_insns = get_insns ();
3871 did_conversion = 1;
3872 end_sequence ();
3873 }
3874 #endif /* FUNCTION_ARG_CALLEE_COPIES */
3875
3876 /* In any case, record the parm's desired stack location
3877 in case we later discover it must live in the stack.
3878
3879 If it is a COMPLEX value, store the stack location for both
3880 halves. */
3881
3882 if (GET_CODE (parmreg) == CONCAT)
3883 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
3884 else
3885 regno = REGNO (parmreg);
3886
3887 if (regno >= nparmregs)
3888 {
3889 rtx *new;
3890 int old_nparmregs = nparmregs;
3891
3892 nparmregs = regno + 5;
3893 new = (rtx *) oballoc (nparmregs * sizeof (rtx));
3894 bcopy ((char *) parm_reg_stack_loc, (char *) new,
3895 old_nparmregs * sizeof (rtx));
3896 bzero ((char *) (new + old_nparmregs),
3897 (nparmregs - old_nparmregs) * sizeof (rtx));
3898 parm_reg_stack_loc = new;
3899 }
3900
3901 if (GET_CODE (parmreg) == CONCAT)
3902 {
3903 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
3904
3905 regnor = REGNO (gen_realpart (submode, parmreg));
3906 regnoi = REGNO (gen_imagpart (submode, parmreg));
3907
3908 if (stack_parm != 0)
3909 {
3910 parm_reg_stack_loc[regnor]
3911 = gen_realpart (submode, stack_parm);
3912 parm_reg_stack_loc[regnoi]
3913 = gen_imagpart (submode, stack_parm);
3914 }
3915 else
3916 {
3917 parm_reg_stack_loc[regnor] = 0;
3918 parm_reg_stack_loc[regnoi] = 0;
3919 }
3920 }
3921 else
3922 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
3923
3924 /* Mark the register as eliminable if we did no conversion
3925 and it was copied from memory at a fixed offset,
3926 and the arg pointer was not copied to a pseudo-reg.
3927 If the arg pointer is a pseudo reg or the offset formed
3928 an invalid address, such memory-equivalences
3929 as we make here would screw up life analysis for it. */
3930 if (nominal_mode == passed_mode
3931 && ! did_conversion
3932 && GET_CODE (entry_parm) == MEM
3933 && entry_parm == stack_parm
3934 && stack_offset.var == 0
3935 && reg_mentioned_p (virtual_incoming_args_rtx,
3936 XEXP (entry_parm, 0)))
3937 {
3938 rtx linsn = get_last_insn ();
3939 rtx sinsn, set;
3940
3941 /* Mark complex types separately. */
3942 if (GET_CODE (parmreg) == CONCAT)
3943 /* Scan backwards for the set of the real and
3944 imaginary parts. */
3945 for (sinsn = linsn; sinsn != 0;
3946 sinsn = prev_nonnote_insn (sinsn))
3947 {
3948 set = single_set (sinsn);
3949 if (set != 0
3950 && SET_DEST (set) == regno_reg_rtx [regnoi])
3951 REG_NOTES (sinsn)
3952 = gen_rtx (EXPR_LIST, REG_EQUIV,
3953 parm_reg_stack_loc[regnoi],
3954 REG_NOTES (sinsn));
3955 else if (set != 0
3956 && SET_DEST (set) == regno_reg_rtx [regnor])
3957 REG_NOTES (sinsn)
3958 = gen_rtx (EXPR_LIST, REG_EQUIV,
3959 parm_reg_stack_loc[regnor],
3960 REG_NOTES (sinsn));
3961 }
3962 else if ((set = single_set (linsn)) != 0
3963 && SET_DEST (set) == parmreg)
3964 REG_NOTES (linsn)
3965 = gen_rtx (EXPR_LIST, REG_EQUIV,
3966 entry_parm, REG_NOTES (linsn));
3967 }
3968
3969 /* For pointer data type, suggest pointer register. */
3970 if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE)
3971 mark_reg_pointer (parmreg,
3972 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
3973 / BITS_PER_UNIT));
3974 }
3975 else
3976 {
3977 /* Value must be stored in the stack slot STACK_PARM
3978 during function execution. */
3979
3980 if (promoted_mode != nominal_mode)
3981 {
3982 /* Conversion is required. */
3983 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
3984
3985 emit_move_insn (tempreg, validize_mem (entry_parm));
3986
3987 push_to_sequence (conversion_insns);
3988 entry_parm = convert_to_mode (nominal_mode, tempreg,
3989 TREE_UNSIGNED (TREE_TYPE (parm)));
3990 conversion_insns = get_insns ();
3991 did_conversion = 1;
3992 end_sequence ();
3993 }
3994
3995 if (entry_parm != stack_parm)
3996 {
3997 if (stack_parm == 0)
3998 {
3999 stack_parm
4000 = assign_stack_local (GET_MODE (entry_parm),
4001 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4002 /* If this is a memory ref that contains aggregate components,
4003 mark it as such for cse and loop optimize. */
4004 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4005 }
4006
4007 if (promoted_mode != nominal_mode)
4008 {
4009 push_to_sequence (conversion_insns);
4010 emit_move_insn (validize_mem (stack_parm),
4011 validize_mem (entry_parm));
4012 conversion_insns = get_insns ();
4013 end_sequence ();
4014 }
4015 else
4016 emit_move_insn (validize_mem (stack_parm),
4017 validize_mem (entry_parm));
4018 }
4019
4020 DECL_RTL (parm) = stack_parm;
4021 }
4022
4023 /* If this "parameter" was the place where we are receiving the
4024 function's incoming structure pointer, set up the result. */
4025 if (parm == function_result_decl)
4026 {
4027 tree result = DECL_RESULT (fndecl);
4028 tree restype = TREE_TYPE (result);
4029
4030 DECL_RTL (result)
4031 = gen_rtx (MEM, DECL_MODE (result), DECL_RTL (parm));
4032
4033 MEM_IN_STRUCT_P (DECL_RTL (result)) = AGGREGATE_TYPE_P (restype);
4034 }
4035
4036 if (TREE_THIS_VOLATILE (parm))
4037 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4038 if (TREE_READONLY (parm))
4039 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4040 }
4041
4042 /* Output all parameter conversion instructions (possibly including calls)
4043 now that all parameters have been copied out of hard registers. */
4044 emit_insns (conversion_insns);
4045
4046 max_parm_reg = max_reg_num ();
4047 last_parm_insn = get_last_insn ();
4048
4049 current_function_args_size = stack_args_size.constant;
4050
4051 /* Adjust function incoming argument size for alignment and
4052 minimum length. */
4053
4054 #ifdef REG_PARM_STACK_SPACE
4055 #ifndef MAYBE_REG_PARM_STACK_SPACE
4056 current_function_args_size = MAX (current_function_args_size,
4057 REG_PARM_STACK_SPACE (fndecl));
4058 #endif
4059 #endif
4060
4061 #ifdef STACK_BOUNDARY
4062 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4063
4064 current_function_args_size
4065 = ((current_function_args_size + STACK_BYTES - 1)
4066 / STACK_BYTES) * STACK_BYTES;
4067 #endif
4068
4069 #ifdef ARGS_GROW_DOWNWARD
4070 current_function_arg_offset_rtx
4071 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4072 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4073 size_int (-stack_args_size.constant)),
4074 NULL_RTX, VOIDmode, 0));
4075 #else
4076 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4077 #endif
4078
4079 /* See how many bytes, if any, of its args a function should try to pop
4080 on return. */
4081
4082 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4083 current_function_args_size);
4084
4085 /* For stdarg.h function, save info about
4086 regs and stack space used by the named args. */
4087
4088 if (!hide_last_arg)
4089 current_function_args_info = args_so_far;
4090
4091 /* Set the rtx used for the function return value. Put this in its
4092 own variable so any optimizers that need this information don't have
4093 to include tree.h. Do this here so it gets done when an inlined
4094 function gets output. */
4095
4096 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4097 }
4098 \f
4099 /* Indicate whether REGNO is an incoming argument to the current function
4100 that was promoted to a wider mode. If so, return the RTX for the
4101 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4102 that REGNO is promoted from and whether the promotion was signed or
4103 unsigned. */
4104
4105 #ifdef PROMOTE_FUNCTION_ARGS
4106
4107 rtx
4108 promoted_input_arg (regno, pmode, punsignedp)
4109 int regno;
4110 enum machine_mode *pmode;
4111 int *punsignedp;
4112 {
4113 tree arg;
4114
4115 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4116 arg = TREE_CHAIN (arg))
4117 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4118 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4119 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4120 {
4121 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4122 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4123
4124 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4125 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4126 && mode != DECL_MODE (arg))
4127 {
4128 *pmode = DECL_MODE (arg);
4129 *punsignedp = unsignedp;
4130 return DECL_INCOMING_RTL (arg);
4131 }
4132 }
4133
4134 return 0;
4135 }
4136
4137 #endif
4138 \f
4139 /* Compute the size and offset from the start of the stacked arguments for a
4140 parm passed in mode PASSED_MODE and with type TYPE.
4141
4142 INITIAL_OFFSET_PTR points to the current offset into the stacked
4143 arguments.
4144
4145 The starting offset and size for this parm are returned in *OFFSET_PTR
4146 and *ARG_SIZE_PTR, respectively.
4147
4148 IN_REGS is non-zero if the argument will be passed in registers. It will
4149 never be set if REG_PARM_STACK_SPACE is not defined.
4150
4151 FNDECL is the function in which the argument was defined.
4152
4153 There are two types of rounding that are done. The first, controlled by
4154 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4155 list to be aligned to the specific boundary (in bits). This rounding
4156 affects the initial and starting offsets, but not the argument size.
4157
4158 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4159 optionally rounds the size of the parm to PARM_BOUNDARY. The
4160 initial offset is not affected by this rounding, while the size always
4161 is and the starting offset may be. */
4162
4163 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4164 initial_offset_ptr is positive because locate_and_pad_parm's
4165 callers pass in the total size of args so far as
4166 initial_offset_ptr. arg_size_ptr is always positive.*/
4167
4168 void
4169 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4170 initial_offset_ptr, offset_ptr, arg_size_ptr)
4171 enum machine_mode passed_mode;
4172 tree type;
4173 int in_regs;
4174 tree fndecl;
4175 struct args_size *initial_offset_ptr;
4176 struct args_size *offset_ptr;
4177 struct args_size *arg_size_ptr;
4178 {
4179 tree sizetree
4180 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4181 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4182 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4183 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4184 int reg_parm_stack_space = 0;
4185
4186 #ifdef REG_PARM_STACK_SPACE
4187 /* If we have found a stack parm before we reach the end of the
4188 area reserved for registers, skip that area. */
4189 if (! in_regs)
4190 {
4191 #ifdef MAYBE_REG_PARM_STACK_SPACE
4192 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4193 #else
4194 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4195 #endif
4196 if (reg_parm_stack_space > 0)
4197 {
4198 if (initial_offset_ptr->var)
4199 {
4200 initial_offset_ptr->var
4201 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4202 size_int (reg_parm_stack_space));
4203 initial_offset_ptr->constant = 0;
4204 }
4205 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4206 initial_offset_ptr->constant = reg_parm_stack_space;
4207 }
4208 }
4209 #endif /* REG_PARM_STACK_SPACE */
4210
4211 arg_size_ptr->var = 0;
4212 arg_size_ptr->constant = 0;
4213
4214 #ifdef ARGS_GROW_DOWNWARD
4215 if (initial_offset_ptr->var)
4216 {
4217 offset_ptr->constant = 0;
4218 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4219 initial_offset_ptr->var);
4220 }
4221 else
4222 {
4223 offset_ptr->constant = - initial_offset_ptr->constant;
4224 offset_ptr->var = 0;
4225 }
4226 if (where_pad != none
4227 && (TREE_CODE (sizetree) != INTEGER_CST
4228 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4229 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4230 SUB_PARM_SIZE (*offset_ptr, sizetree);
4231 if (where_pad != downward)
4232 pad_to_arg_alignment (offset_ptr, boundary);
4233 if (initial_offset_ptr->var)
4234 {
4235 arg_size_ptr->var = size_binop (MINUS_EXPR,
4236 size_binop (MINUS_EXPR,
4237 integer_zero_node,
4238 initial_offset_ptr->var),
4239 offset_ptr->var);
4240 }
4241 else
4242 {
4243 arg_size_ptr->constant = (- initial_offset_ptr->constant -
4244 offset_ptr->constant);
4245 }
4246 #else /* !ARGS_GROW_DOWNWARD */
4247 pad_to_arg_alignment (initial_offset_ptr, boundary);
4248 *offset_ptr = *initial_offset_ptr;
4249
4250 #ifdef PUSH_ROUNDING
4251 if (passed_mode != BLKmode)
4252 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4253 #endif
4254
4255 /* Pad_below needs the pre-rounded size to know how much to pad below
4256 so this must be done before rounding up. */
4257 if (where_pad == downward
4258 /* However, BLKmode args passed in regs have their padding done elsewhere.
4259 The stack slot must be able to hold the entire register. */
4260 && !(in_regs && passed_mode == BLKmode))
4261 pad_below (offset_ptr, passed_mode, sizetree);
4262
4263 if (where_pad != none
4264 && (TREE_CODE (sizetree) != INTEGER_CST
4265 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4266 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4267
4268 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4269 #endif /* ARGS_GROW_DOWNWARD */
4270 }
4271
4272 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4273 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4274
4275 static void
4276 pad_to_arg_alignment (offset_ptr, boundary)
4277 struct args_size *offset_ptr;
4278 int boundary;
4279 {
4280 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4281
4282 if (boundary > BITS_PER_UNIT)
4283 {
4284 if (offset_ptr->var)
4285 {
4286 offset_ptr->var =
4287 #ifdef ARGS_GROW_DOWNWARD
4288 round_down
4289 #else
4290 round_up
4291 #endif
4292 (ARGS_SIZE_TREE (*offset_ptr),
4293 boundary / BITS_PER_UNIT);
4294 offset_ptr->constant = 0; /*?*/
4295 }
4296 else
4297 offset_ptr->constant =
4298 #ifdef ARGS_GROW_DOWNWARD
4299 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4300 #else
4301 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4302 #endif
4303 }
4304 }
4305
4306 static void
4307 pad_below (offset_ptr, passed_mode, sizetree)
4308 struct args_size *offset_ptr;
4309 enum machine_mode passed_mode;
4310 tree sizetree;
4311 {
4312 if (passed_mode != BLKmode)
4313 {
4314 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4315 offset_ptr->constant
4316 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4317 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4318 - GET_MODE_SIZE (passed_mode));
4319 }
4320 else
4321 {
4322 if (TREE_CODE (sizetree) != INTEGER_CST
4323 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4324 {
4325 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4326 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4327 /* Add it in. */
4328 ADD_PARM_SIZE (*offset_ptr, s2);
4329 SUB_PARM_SIZE (*offset_ptr, sizetree);
4330 }
4331 }
4332 }
4333
4334 static tree
4335 round_down (value, divisor)
4336 tree value;
4337 int divisor;
4338 {
4339 return size_binop (MULT_EXPR,
4340 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
4341 size_int (divisor));
4342 }
4343 \f
4344 /* Walk the tree of blocks describing the binding levels within a function
4345 and warn about uninitialized variables.
4346 This is done after calling flow_analysis and before global_alloc
4347 clobbers the pseudo-regs to hard regs. */
4348
4349 void
4350 uninitialized_vars_warning (block)
4351 tree block;
4352 {
4353 register tree decl, sub;
4354 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4355 {
4356 if (TREE_CODE (decl) == VAR_DECL
4357 /* These warnings are unreliable for and aggregates
4358 because assigning the fields one by one can fail to convince
4359 flow.c that the entire aggregate was initialized.
4360 Unions are troublesome because members may be shorter. */
4361 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
4362 && DECL_RTL (decl) != 0
4363 && GET_CODE (DECL_RTL (decl)) == REG
4364 && regno_uninitialized (REGNO (DECL_RTL (decl))))
4365 warning_with_decl (decl,
4366 "`%s' might be used uninitialized in this function");
4367 if (TREE_CODE (decl) == VAR_DECL
4368 && DECL_RTL (decl) != 0
4369 && GET_CODE (DECL_RTL (decl)) == REG
4370 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4371 warning_with_decl (decl,
4372 "variable `%s' might be clobbered by `longjmp' or `vfork'");
4373 }
4374 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4375 uninitialized_vars_warning (sub);
4376 }
4377
4378 /* Do the appropriate part of uninitialized_vars_warning
4379 but for arguments instead of local variables. */
4380
4381 void
4382 setjmp_args_warning ()
4383 {
4384 register tree decl;
4385 for (decl = DECL_ARGUMENTS (current_function_decl);
4386 decl; decl = TREE_CHAIN (decl))
4387 if (DECL_RTL (decl) != 0
4388 && GET_CODE (DECL_RTL (decl)) == REG
4389 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4390 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
4391 }
4392
4393 /* If this function call setjmp, put all vars into the stack
4394 unless they were declared `register'. */
4395
4396 void
4397 setjmp_protect (block)
4398 tree block;
4399 {
4400 register tree decl, sub;
4401 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4402 if ((TREE_CODE (decl) == VAR_DECL
4403 || TREE_CODE (decl) == PARM_DECL)
4404 && DECL_RTL (decl) != 0
4405 && GET_CODE (DECL_RTL (decl)) == REG
4406 /* If this variable came from an inline function, it must be
4407 that it's life doesn't overlap the setjmp. If there was a
4408 setjmp in the function, it would already be in memory. We
4409 must exclude such variable because their DECL_RTL might be
4410 set to strange things such as virtual_stack_vars_rtx. */
4411 && ! DECL_FROM_INLINE (decl)
4412 && (
4413 #ifdef NON_SAVING_SETJMP
4414 /* If longjmp doesn't restore the registers,
4415 don't put anything in them. */
4416 NON_SAVING_SETJMP
4417 ||
4418 #endif
4419 ! DECL_REGISTER (decl)))
4420 put_var_into_stack (decl);
4421 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4422 setjmp_protect (sub);
4423 }
4424 \f
4425 /* Like the previous function, but for args instead of local variables. */
4426
4427 void
4428 setjmp_protect_args ()
4429 {
4430 register tree decl, sub;
4431 for (decl = DECL_ARGUMENTS (current_function_decl);
4432 decl; decl = TREE_CHAIN (decl))
4433 if ((TREE_CODE (decl) == VAR_DECL
4434 || TREE_CODE (decl) == PARM_DECL)
4435 && DECL_RTL (decl) != 0
4436 && GET_CODE (DECL_RTL (decl)) == REG
4437 && (
4438 /* If longjmp doesn't restore the registers,
4439 don't put anything in them. */
4440 #ifdef NON_SAVING_SETJMP
4441 NON_SAVING_SETJMP
4442 ||
4443 #endif
4444 ! DECL_REGISTER (decl)))
4445 put_var_into_stack (decl);
4446 }
4447 \f
4448 /* Return the context-pointer register corresponding to DECL,
4449 or 0 if it does not need one. */
4450
4451 rtx
4452 lookup_static_chain (decl)
4453 tree decl;
4454 {
4455 tree context = decl_function_context (decl);
4456 tree link;
4457
4458 if (context == 0
4459 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
4460 return 0;
4461
4462 /* We treat inline_function_decl as an alias for the current function
4463 because that is the inline function whose vars, types, etc.
4464 are being merged into the current function.
4465 See expand_inline_function. */
4466 if (context == current_function_decl || context == inline_function_decl)
4467 return virtual_stack_vars_rtx;
4468
4469 for (link = context_display; link; link = TREE_CHAIN (link))
4470 if (TREE_PURPOSE (link) == context)
4471 return RTL_EXPR_RTL (TREE_VALUE (link));
4472
4473 abort ();
4474 }
4475 \f
4476 /* Convert a stack slot address ADDR for variable VAR
4477 (from a containing function)
4478 into an address valid in this function (using a static chain). */
4479
4480 rtx
4481 fix_lexical_addr (addr, var)
4482 rtx addr;
4483 tree var;
4484 {
4485 rtx basereg;
4486 int displacement;
4487 tree context = decl_function_context (var);
4488 struct function *fp;
4489 rtx base = 0;
4490
4491 /* If this is the present function, we need not do anything. */
4492 if (context == current_function_decl || context == inline_function_decl)
4493 return addr;
4494
4495 for (fp = outer_function_chain; fp; fp = fp->next)
4496 if (fp->decl == context)
4497 break;
4498
4499 if (fp == 0)
4500 abort ();
4501
4502 /* Decode given address as base reg plus displacement. */
4503 if (GET_CODE (addr) == REG)
4504 basereg = addr, displacement = 0;
4505 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4506 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
4507 else
4508 abort ();
4509
4510 /* We accept vars reached via the containing function's
4511 incoming arg pointer and via its stack variables pointer. */
4512 if (basereg == fp->internal_arg_pointer)
4513 {
4514 /* If reached via arg pointer, get the arg pointer value
4515 out of that function's stack frame.
4516
4517 There are two cases: If a separate ap is needed, allocate a
4518 slot in the outer function for it and dereference it that way.
4519 This is correct even if the real ap is actually a pseudo.
4520 Otherwise, just adjust the offset from the frame pointer to
4521 compensate. */
4522
4523 #ifdef NEED_SEPARATE_AP
4524 rtx addr;
4525
4526 if (fp->arg_pointer_save_area == 0)
4527 fp->arg_pointer_save_area
4528 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
4529
4530 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
4531 addr = memory_address (Pmode, addr);
4532
4533 base = copy_to_reg (gen_rtx (MEM, Pmode, addr));
4534 #else
4535 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
4536 base = lookup_static_chain (var);
4537 #endif
4538 }
4539
4540 else if (basereg == virtual_stack_vars_rtx)
4541 {
4542 /* This is the same code as lookup_static_chain, duplicated here to
4543 avoid an extra call to decl_function_context. */
4544 tree link;
4545
4546 for (link = context_display; link; link = TREE_CHAIN (link))
4547 if (TREE_PURPOSE (link) == context)
4548 {
4549 base = RTL_EXPR_RTL (TREE_VALUE (link));
4550 break;
4551 }
4552 }
4553
4554 if (base == 0)
4555 abort ();
4556
4557 /* Use same offset, relative to appropriate static chain or argument
4558 pointer. */
4559 return plus_constant (base, displacement);
4560 }
4561 \f
4562 /* Return the address of the trampoline for entering nested fn FUNCTION.
4563 If necessary, allocate a trampoline (in the stack frame)
4564 and emit rtl to initialize its contents (at entry to this function). */
4565
4566 rtx
4567 trampoline_address (function)
4568 tree function;
4569 {
4570 tree link;
4571 tree rtlexp;
4572 rtx tramp;
4573 struct function *fp;
4574 tree fn_context;
4575
4576 /* Find an existing trampoline and return it. */
4577 for (link = trampoline_list; link; link = TREE_CHAIN (link))
4578 if (TREE_PURPOSE (link) == function)
4579 return
4580 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
4581
4582 for (fp = outer_function_chain; fp; fp = fp->next)
4583 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
4584 if (TREE_PURPOSE (link) == function)
4585 {
4586 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
4587 function);
4588 return round_trampoline_addr (tramp);
4589 }
4590
4591 /* None exists; we must make one. */
4592
4593 /* Find the `struct function' for the function containing FUNCTION. */
4594 fp = 0;
4595 fn_context = decl_function_context (function);
4596 if (fn_context != current_function_decl)
4597 for (fp = outer_function_chain; fp; fp = fp->next)
4598 if (fp->decl == fn_context)
4599 break;
4600
4601 /* Allocate run-time space for this trampoline
4602 (usually in the defining function's stack frame). */
4603 #ifdef ALLOCATE_TRAMPOLINE
4604 tramp = ALLOCATE_TRAMPOLINE (fp);
4605 #else
4606 /* If rounding needed, allocate extra space
4607 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
4608 #ifdef TRAMPOLINE_ALIGNMENT
4609 #define TRAMPOLINE_REAL_SIZE \
4610 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
4611 #else
4612 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
4613 #endif
4614 if (fp != 0)
4615 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
4616 else
4617 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
4618 #endif
4619
4620 /* Record the trampoline for reuse and note it for later initialization
4621 by expand_function_end. */
4622 if (fp != 0)
4623 {
4624 push_obstacks (fp->function_maybepermanent_obstack,
4625 fp->function_maybepermanent_obstack);
4626 rtlexp = make_node (RTL_EXPR);
4627 RTL_EXPR_RTL (rtlexp) = tramp;
4628 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
4629 pop_obstacks ();
4630 }
4631 else
4632 {
4633 /* Make the RTL_EXPR node temporary, not momentary, so that the
4634 trampoline_list doesn't become garbage. */
4635 int momentary = suspend_momentary ();
4636 rtlexp = make_node (RTL_EXPR);
4637 resume_momentary (momentary);
4638
4639 RTL_EXPR_RTL (rtlexp) = tramp;
4640 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
4641 }
4642
4643 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
4644 return round_trampoline_addr (tramp);
4645 }
4646
4647 /* Given a trampoline address,
4648 round it to multiple of TRAMPOLINE_ALIGNMENT. */
4649
4650 static rtx
4651 round_trampoline_addr (tramp)
4652 rtx tramp;
4653 {
4654 #ifdef TRAMPOLINE_ALIGNMENT
4655 /* Round address up to desired boundary. */
4656 rtx temp = gen_reg_rtx (Pmode);
4657 temp = expand_binop (Pmode, add_optab, tramp,
4658 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
4659 temp, 0, OPTAB_LIB_WIDEN);
4660 tramp = expand_binop (Pmode, and_optab, temp,
4661 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
4662 temp, 0, OPTAB_LIB_WIDEN);
4663 #endif
4664 return tramp;
4665 }
4666 \f
4667 /* The functions identify_blocks and reorder_blocks provide a way to
4668 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
4669 duplicate portions of the RTL code. Call identify_blocks before
4670 changing the RTL, and call reorder_blocks after. */
4671
4672 /* Put all this function's BLOCK nodes including those that are chained
4673 onto the first block into a vector, and return it.
4674 Also store in each NOTE for the beginning or end of a block
4675 the index of that block in the vector.
4676 The arguments are BLOCK, the chain of top-level blocks of the function,
4677 and INSNS, the insn chain of the function. */
4678
4679 tree *
4680 identify_blocks (block, insns)
4681 tree block;
4682 rtx insns;
4683 {
4684 int n_blocks;
4685 tree *block_vector;
4686 int *block_stack;
4687 int depth = 0;
4688 int next_block_number = 1;
4689 int current_block_number = 1;
4690 rtx insn;
4691
4692 if (block == 0)
4693 return 0;
4694
4695 n_blocks = all_blocks (block, 0);
4696 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
4697 block_stack = (int *) alloca (n_blocks * sizeof (int));
4698
4699 all_blocks (block, block_vector);
4700
4701 for (insn = insns; insn; insn = NEXT_INSN (insn))
4702 if (GET_CODE (insn) == NOTE)
4703 {
4704 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
4705 {
4706 block_stack[depth++] = current_block_number;
4707 current_block_number = next_block_number;
4708 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
4709 }
4710 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
4711 {
4712 current_block_number = block_stack[--depth];
4713 NOTE_BLOCK_NUMBER (insn) = current_block_number;
4714 }
4715 }
4716
4717 if (n_blocks != next_block_number)
4718 abort ();
4719
4720 return block_vector;
4721 }
4722
4723 /* Given BLOCK_VECTOR which was returned by identify_blocks,
4724 and a revised instruction chain, rebuild the tree structure
4725 of BLOCK nodes to correspond to the new order of RTL.
4726 The new block tree is inserted below TOP_BLOCK.
4727 Returns the current top-level block. */
4728
4729 tree
4730 reorder_blocks (block_vector, block, insns)
4731 tree *block_vector;
4732 tree block;
4733 rtx insns;
4734 {
4735 tree current_block = block;
4736 rtx insn;
4737
4738 if (block_vector == 0)
4739 return block;
4740
4741 /* Prune the old trees away, so that it doesn't get in the way. */
4742 BLOCK_SUBBLOCKS (current_block) = 0;
4743 BLOCK_CHAIN (current_block) = 0;
4744
4745 for (insn = insns; insn; insn = NEXT_INSN (insn))
4746 if (GET_CODE (insn) == NOTE)
4747 {
4748 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
4749 {
4750 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
4751 /* If we have seen this block before, copy it. */
4752 if (TREE_ASM_WRITTEN (block))
4753 block = copy_node (block);
4754 BLOCK_SUBBLOCKS (block) = 0;
4755 TREE_ASM_WRITTEN (block) = 1;
4756 BLOCK_SUPERCONTEXT (block) = current_block;
4757 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4758 BLOCK_SUBBLOCKS (current_block) = block;
4759 current_block = block;
4760 NOTE_SOURCE_FILE (insn) = 0;
4761 }
4762 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
4763 {
4764 BLOCK_SUBBLOCKS (current_block)
4765 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
4766 current_block = BLOCK_SUPERCONTEXT (current_block);
4767 NOTE_SOURCE_FILE (insn) = 0;
4768 }
4769 }
4770
4771 BLOCK_SUBBLOCKS (current_block)
4772 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
4773 return current_block;
4774 }
4775
4776 /* Reverse the order of elements in the chain T of blocks,
4777 and return the new head of the chain (old last element). */
4778
4779 static tree
4780 blocks_nreverse (t)
4781 tree t;
4782 {
4783 register tree prev = 0, decl, next;
4784 for (decl = t; decl; decl = next)
4785 {
4786 next = BLOCK_CHAIN (decl);
4787 BLOCK_CHAIN (decl) = prev;
4788 prev = decl;
4789 }
4790 return prev;
4791 }
4792
4793 /* Count the subblocks of the list starting with BLOCK, and list them
4794 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
4795 blocks. */
4796
4797 static int
4798 all_blocks (block, vector)
4799 tree block;
4800 tree *vector;
4801 {
4802 int n_blocks = 0;
4803
4804 while (block)
4805 {
4806 TREE_ASM_WRITTEN (block) = 0;
4807
4808 /* Record this block. */
4809 if (vector)
4810 vector[n_blocks] = block;
4811
4812 ++n_blocks;
4813
4814 /* Record the subblocks, and their subblocks... */
4815 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4816 vector ? vector + n_blocks : 0);
4817 block = BLOCK_CHAIN (block);
4818 }
4819
4820 return n_blocks;
4821 }
4822 \f
4823 /* Build bytecode call descriptor for function SUBR. */
4824
4825 rtx
4826 bc_build_calldesc (subr)
4827 tree subr;
4828 {
4829 tree calldesc = 0, arg;
4830 int nargs = 0;
4831
4832 /* Build the argument description vector in reverse order. */
4833 DECL_ARGUMENTS (subr) = nreverse (DECL_ARGUMENTS (subr));
4834 nargs = 0;
4835
4836 for (arg = DECL_ARGUMENTS (subr); arg; arg = TREE_CHAIN (arg))
4837 {
4838 ++nargs;
4839
4840 calldesc = tree_cons ((tree) 0, size_in_bytes (TREE_TYPE (arg)), calldesc);
4841 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (TREE_TYPE (arg)), calldesc);
4842 }
4843
4844 DECL_ARGUMENTS (subr) = nreverse (DECL_ARGUMENTS (subr));
4845
4846 /* Prepend the function's return type. */
4847 calldesc = tree_cons ((tree) 0,
4848 size_in_bytes (TREE_TYPE (TREE_TYPE (subr))),
4849 calldesc);
4850
4851 calldesc = tree_cons ((tree) 0,
4852 bc_runtime_type_code (TREE_TYPE (TREE_TYPE (subr))),
4853 calldesc);
4854
4855 /* Prepend the arg count. */
4856 calldesc = tree_cons ((tree) 0, build_int_2 (nargs, 0), calldesc);
4857
4858 /* Output the call description vector and get its address. */
4859 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
4860 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
4861 build_index_type (build_int_2 (nargs * 2, 0)));
4862
4863 return output_constant_def (calldesc);
4864 }
4865
4866
4867 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4868 and initialize static variables for generating RTL for the statements
4869 of the function. */
4870
4871 void
4872 init_function_start (subr, filename, line)
4873 tree subr;
4874 char *filename;
4875 int line;
4876 {
4877 char *junk;
4878
4879 if (output_bytecode)
4880 {
4881 this_function_decl = subr;
4882 this_function_calldesc = bc_build_calldesc (subr);
4883 local_vars_size = 0;
4884 stack_depth = 0;
4885 max_stack_depth = 0;
4886 stmt_expr_depth = 0;
4887 return;
4888 }
4889
4890 init_stmt_for_function ();
4891
4892 cse_not_expected = ! optimize;
4893
4894 /* Caller save not needed yet. */
4895 caller_save_needed = 0;
4896
4897 /* No stack slots have been made yet. */
4898 stack_slot_list = 0;
4899
4900 /* There is no stack slot for handling nonlocal gotos. */
4901 nonlocal_goto_handler_slot = 0;
4902 nonlocal_goto_stack_level = 0;
4903
4904 /* No labels have been declared for nonlocal use. */
4905 nonlocal_labels = 0;
4906
4907 /* No function calls so far in this function. */
4908 function_call_count = 0;
4909
4910 /* No parm regs have been allocated.
4911 (This is important for output_inline_function.) */
4912 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4913
4914 /* Initialize the RTL mechanism. */
4915 init_emit ();
4916
4917 /* Initialize the queue of pending postincrement and postdecrements,
4918 and some other info in expr.c. */
4919 init_expr ();
4920
4921 /* We haven't done register allocation yet. */
4922 reg_renumber = 0;
4923
4924 init_const_rtx_hash_table ();
4925
4926 current_function_name = (*decl_printable_name) (subr, &junk);
4927
4928 /* Nonzero if this is a nested function that uses a static chain. */
4929
4930 current_function_needs_context
4931 = (decl_function_context (current_function_decl) != 0
4932 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
4933
4934 /* Set if a call to setjmp is seen. */
4935 current_function_calls_setjmp = 0;
4936
4937 /* Set if a call to longjmp is seen. */
4938 current_function_calls_longjmp = 0;
4939
4940 current_function_calls_alloca = 0;
4941 current_function_has_nonlocal_label = 0;
4942 current_function_has_nonlocal_goto = 0;
4943 current_function_contains_functions = 0;
4944
4945 current_function_returns_pcc_struct = 0;
4946 current_function_returns_struct = 0;
4947 current_function_epilogue_delay_list = 0;
4948 current_function_uses_const_pool = 0;
4949 current_function_uses_pic_offset_table = 0;
4950
4951 /* We have not yet needed to make a label to jump to for tail-recursion. */
4952 tail_recursion_label = 0;
4953
4954 /* We haven't had a need to make a save area for ap yet. */
4955
4956 arg_pointer_save_area = 0;
4957
4958 /* No stack slots allocated yet. */
4959 frame_offset = 0;
4960
4961 /* No SAVE_EXPRs in this function yet. */
4962 save_expr_regs = 0;
4963
4964 /* No RTL_EXPRs in this function yet. */
4965 rtl_expr_chain = 0;
4966
4967 /* Set up to allocate temporaries. */
4968 init_temp_slots ();
4969
4970 /* Within function body, compute a type's size as soon it is laid out. */
4971 immediate_size_expand++;
4972
4973 /* We haven't made any trampolines for this function yet. */
4974 trampoline_list = 0;
4975
4976 init_pending_stack_adjust ();
4977 inhibit_defer_pop = 0;
4978
4979 current_function_outgoing_args_size = 0;
4980
4981 /* Prevent ever trying to delete the first instruction of a function.
4982 Also tell final how to output a linenum before the function prologue. */
4983 emit_line_note (filename, line);
4984
4985 /* Make sure first insn is a note even if we don't want linenums.
4986 This makes sure the first insn will never be deleted.
4987 Also, final expects a note to appear there. */
4988 emit_note (NULL_PTR, NOTE_INSN_DELETED);
4989
4990 /* Set flags used by final.c. */
4991 if (aggregate_value_p (DECL_RESULT (subr)))
4992 {
4993 #ifdef PCC_STATIC_STRUCT_RETURN
4994 current_function_returns_pcc_struct = 1;
4995 #endif
4996 current_function_returns_struct = 1;
4997 }
4998
4999 /* Warn if this value is an aggregate type,
5000 regardless of which calling convention we are using for it. */
5001 if (warn_aggregate_return
5002 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5003 warning ("function returns an aggregate");
5004
5005 current_function_returns_pointer
5006 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5007
5008 /* Indicate that we need to distinguish between the return value of the
5009 present function and the return value of a function being called. */
5010 rtx_equal_function_value_matters = 1;
5011
5012 /* Indicate that we have not instantiated virtual registers yet. */
5013 virtuals_instantiated = 0;
5014
5015 /* Indicate we have no need of a frame pointer yet. */
5016 frame_pointer_needed = 0;
5017
5018 /* By default assume not varargs or stdarg. */
5019 current_function_varargs = 0;
5020 current_function_stdarg = 0;
5021 }
5022
5023 /* Indicate that the current function uses extra args
5024 not explicitly mentioned in the argument list in any fashion. */
5025
5026 void
5027 mark_varargs ()
5028 {
5029 current_function_varargs = 1;
5030 }
5031
5032 /* Expand a call to __main at the beginning of a possible main function. */
5033
5034 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5035 #undef HAS_INIT_SECTION
5036 #define HAS_INIT_SECTION
5037 #endif
5038
5039 void
5040 expand_main_function ()
5041 {
5042 if (!output_bytecode)
5043 {
5044 /* The zero below avoids a possible parse error */
5045 0;
5046 #if !defined (HAS_INIT_SECTION)
5047 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, NAME__MAIN), 0,
5048 VOIDmode, 0);
5049 #endif /* not HAS_INIT_SECTION */
5050 }
5051 }
5052 \f
5053 extern struct obstack permanent_obstack;
5054
5055 /* Expand start of bytecode function. See comment at
5056 expand_function_start below for details. */
5057
5058 void
5059 bc_expand_function_start (subr, parms_have_cleanups)
5060 tree subr;
5061 int parms_have_cleanups;
5062 {
5063 char label[20], *name;
5064 static int nlab;
5065 tree thisarg;
5066 int argsz;
5067
5068 if (TREE_PUBLIC (subr))
5069 bc_globalize_label (IDENTIFIER_POINTER (DECL_NAME (subr)));
5070
5071 #ifdef DEBUG_PRINT_CODE
5072 fprintf (stderr, "\n<func %s>\n", IDENTIFIER_POINTER (DECL_NAME (subr)));
5073 #endif
5074
5075 for (argsz = 0, thisarg = DECL_ARGUMENTS (subr); thisarg; thisarg = TREE_CHAIN (thisarg))
5076 {
5077 if (DECL_RTL (thisarg))
5078 abort (); /* Should be NULL here I think. */
5079 else if (TREE_CONSTANT (DECL_SIZE (thisarg)))
5080 {
5081 DECL_RTL (thisarg) = bc_gen_rtx ((char *) 0, argsz, (struct bc_label *) 0);
5082 argsz += TREE_INT_CST_LOW (DECL_SIZE (thisarg));
5083 }
5084 else
5085 {
5086 /* Variable-sized objects are pointers to their storage. */
5087 DECL_RTL (thisarg) = bc_gen_rtx ((char *) 0, argsz, (struct bc_label *) 0);
5088 argsz += POINTER_SIZE;
5089 }
5090 }
5091
5092 bc_begin_function (xstrdup (IDENTIFIER_POINTER (DECL_NAME (subr))));
5093
5094 ASM_GENERATE_INTERNAL_LABEL (label, "LX", nlab);
5095
5096 ++nlab;
5097 name = (char *) obstack_copy0 (&permanent_obstack, label, strlen (label));
5098 this_function_callinfo = bc_gen_rtx (name, 0, (struct bc_label *) 0);
5099 this_function_bytecode =
5100 bc_emit_trampoline (BYTECODE_LABEL (this_function_callinfo));
5101 }
5102
5103
5104 /* Expand end of bytecode function. See details the comment of
5105 expand_function_end(), below. */
5106
5107 void
5108 bc_expand_function_end ()
5109 {
5110 char *ptrconsts;
5111
5112 expand_null_return ();
5113
5114 /* Emit any fixup code. This must be done before the call to
5115 to BC_END_FUNCTION (), since that will cause the bytecode
5116 segment to be finished off and closed. */
5117
5118 expand_fixups (NULL_RTX);
5119
5120 ptrconsts = bc_end_function ();
5121
5122 bc_align_const (2 /* INT_ALIGN */);
5123
5124 /* If this changes also make sure to change bc-interp.h! */
5125
5126 bc_emit_const_labeldef (BYTECODE_LABEL (this_function_callinfo));
5127 bc_emit_const ((char *) &max_stack_depth, sizeof max_stack_depth);
5128 bc_emit_const ((char *) &local_vars_size, sizeof local_vars_size);
5129 bc_emit_const_labelref (this_function_bytecode, 0);
5130 bc_emit_const_labelref (ptrconsts, 0);
5131 bc_emit_const_labelref (BYTECODE_LABEL (this_function_calldesc), 0);
5132 }
5133
5134
5135 /* Start the RTL for a new function, and set variables used for
5136 emitting RTL.
5137 SUBR is the FUNCTION_DECL node.
5138 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5139 the function's parameters, which must be run at any return statement. */
5140
5141 void
5142 expand_function_start (subr, parms_have_cleanups)
5143 tree subr;
5144 int parms_have_cleanups;
5145 {
5146 register int i;
5147 tree tem;
5148 rtx last_ptr;
5149
5150 if (output_bytecode)
5151 {
5152 bc_expand_function_start (subr, parms_have_cleanups);
5153 return;
5154 }
5155
5156 /* Make sure volatile mem refs aren't considered
5157 valid operands of arithmetic insns. */
5158 init_recog_no_volatile ();
5159
5160 /* If function gets a static chain arg, store it in the stack frame.
5161 Do this first, so it gets the first stack slot offset. */
5162 if (current_function_needs_context)
5163 {
5164 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5165
5166 #ifdef SMALL_REGISTER_CLASSES
5167 /* Delay copying static chain if it is not a register to avoid
5168 conflicts with regs used for parameters. */
5169 if (! SMALL_REGISTER_CLASSES
5170 || GET_CODE (static_chain_incoming_rtx) == REG)
5171 #endif
5172 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5173 }
5174
5175 /* If the parameters of this function need cleaning up, get a label
5176 for the beginning of the code which executes those cleanups. This must
5177 be done before doing anything with return_label. */
5178 if (parms_have_cleanups)
5179 cleanup_label = gen_label_rtx ();
5180 else
5181 cleanup_label = 0;
5182
5183 /* Make the label for return statements to jump to, if this machine
5184 does not have a one-instruction return and uses an epilogue,
5185 or if it returns a structure, or if it has parm cleanups. */
5186 #ifdef HAVE_return
5187 if (cleanup_label == 0 && HAVE_return
5188 && ! current_function_returns_pcc_struct
5189 && ! (current_function_returns_struct && ! optimize))
5190 return_label = 0;
5191 else
5192 return_label = gen_label_rtx ();
5193 #else
5194 return_label = gen_label_rtx ();
5195 #endif
5196
5197 /* Initialize rtx used to return the value. */
5198 /* Do this before assign_parms so that we copy the struct value address
5199 before any library calls that assign parms might generate. */
5200
5201 /* Decide whether to return the value in memory or in a register. */
5202 if (aggregate_value_p (DECL_RESULT (subr)))
5203 {
5204 /* Returning something that won't go in a register. */
5205 register rtx value_address = 0;
5206
5207 #ifdef PCC_STATIC_STRUCT_RETURN
5208 if (current_function_returns_pcc_struct)
5209 {
5210 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5211 value_address = assemble_static_space (size);
5212 }
5213 else
5214 #endif
5215 {
5216 /* Expect to be passed the address of a place to store the value.
5217 If it is passed as an argument, assign_parms will take care of
5218 it. */
5219 if (struct_value_incoming_rtx)
5220 {
5221 value_address = gen_reg_rtx (Pmode);
5222 emit_move_insn (value_address, struct_value_incoming_rtx);
5223 }
5224 }
5225 if (value_address)
5226 {
5227 DECL_RTL (DECL_RESULT (subr))
5228 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (subr)), value_address);
5229 MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)))
5230 = AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5231 }
5232 }
5233 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5234 /* If return mode is void, this decl rtl should not be used. */
5235 DECL_RTL (DECL_RESULT (subr)) = 0;
5236 else if (parms_have_cleanups)
5237 {
5238 /* If function will end with cleanup code for parms,
5239 compute the return values into a pseudo reg,
5240 which we will copy into the true return register
5241 after the cleanups are done. */
5242
5243 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5244
5245 #ifdef PROMOTE_FUNCTION_RETURN
5246 tree type = TREE_TYPE (DECL_RESULT (subr));
5247 int unsignedp = TREE_UNSIGNED (type);
5248
5249 mode = promote_mode (type, mode, &unsignedp, 1);
5250 #endif
5251
5252 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5253 }
5254 else
5255 /* Scalar, returned in a register. */
5256 {
5257 #ifdef FUNCTION_OUTGOING_VALUE
5258 DECL_RTL (DECL_RESULT (subr))
5259 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5260 #else
5261 DECL_RTL (DECL_RESULT (subr))
5262 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5263 #endif
5264
5265 /* Mark this reg as the function's return value. */
5266 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5267 {
5268 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5269 /* Needed because we may need to move this to memory
5270 in case it's a named return value whose address is taken. */
5271 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5272 }
5273 }
5274
5275 /* Initialize rtx for parameters and local variables.
5276 In some cases this requires emitting insns. */
5277
5278 assign_parms (subr, 0);
5279
5280 #ifdef SMALL_REGISTER_CLASSES
5281 /* Copy the static chain now if it wasn't a register. The delay is to
5282 avoid conflicts with the parameter passing registers. */
5283
5284 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
5285 if (GET_CODE (static_chain_incoming_rtx) != REG)
5286 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5287 #endif
5288
5289 /* The following was moved from init_function_start.
5290 The move is supposed to make sdb output more accurate. */
5291 /* Indicate the beginning of the function body,
5292 as opposed to parm setup. */
5293 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5294
5295 /* If doing stupid allocation, mark parms as born here. */
5296
5297 if (GET_CODE (get_last_insn ()) != NOTE)
5298 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5299 parm_birth_insn = get_last_insn ();
5300
5301 if (obey_regdecls)
5302 {
5303 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5304 use_variable (regno_reg_rtx[i]);
5305
5306 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5307 use_variable (current_function_internal_arg_pointer);
5308 }
5309
5310 context_display = 0;
5311 if (current_function_needs_context)
5312 {
5313 /* Fetch static chain values for containing functions. */
5314 tem = decl_function_context (current_function_decl);
5315 /* If not doing stupid register allocation copy the static chain
5316 pointer into a pseudo. If we have small register classes, copy
5317 the value from memory if static_chain_incoming_rtx is a REG. If
5318 we do stupid register allocation, we use the stack address
5319 generated above. */
5320 if (tem && ! obey_regdecls)
5321 {
5322 #ifdef SMALL_REGISTER_CLASSES
5323 /* If the static chain originally came in a register, put it back
5324 there, then move it out in the next insn. The reason for
5325 this peculiar code is to satisfy function integration. */
5326 if (SMALL_REGISTER_CLASSES
5327 && GET_CODE (static_chain_incoming_rtx) == REG)
5328 emit_move_insn (static_chain_incoming_rtx, last_ptr);
5329 #endif
5330
5331 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5332 }
5333
5334 while (tem)
5335 {
5336 tree rtlexp = make_node (RTL_EXPR);
5337
5338 RTL_EXPR_RTL (rtlexp) = last_ptr;
5339 context_display = tree_cons (tem, rtlexp, context_display);
5340 tem = decl_function_context (tem);
5341 if (tem == 0)
5342 break;
5343 /* Chain thru stack frames, assuming pointer to next lexical frame
5344 is found at the place we always store it. */
5345 #ifdef FRAME_GROWS_DOWNWARD
5346 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
5347 #endif
5348 last_ptr = copy_to_reg (gen_rtx (MEM, Pmode,
5349 memory_address (Pmode, last_ptr)));
5350
5351 /* If we are not optimizing, ensure that we know that this
5352 piece of context is live over the entire function. */
5353 if (! optimize)
5354 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, last_ptr,
5355 save_expr_regs);
5356 }
5357 }
5358
5359 /* After the display initializations is where the tail-recursion label
5360 should go, if we end up needing one. Ensure we have a NOTE here
5361 since some things (like trampolines) get placed before this. */
5362 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
5363
5364 /* Evaluate now the sizes of any types declared among the arguments. */
5365 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5366 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
5367
5368 /* Make sure there is a line number after the function entry setup code. */
5369 force_next_line_note ();
5370 }
5371 \f
5372 /* Generate RTL for the end of the current function.
5373 FILENAME and LINE are the current position in the source file.
5374
5375 It is up to language-specific callers to do cleanups for parameters--
5376 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5377
5378 void
5379 expand_function_end (filename, line, end_bindings)
5380 char *filename;
5381 int line;
5382 int end_bindings;
5383 {
5384 register int i;
5385 tree link;
5386
5387 #ifdef TRAMPOLINE_TEMPLATE
5388 static rtx initial_trampoline;
5389 #endif
5390
5391 if (output_bytecode)
5392 {
5393 bc_expand_function_end ();
5394 return;
5395 }
5396
5397 #ifdef NON_SAVING_SETJMP
5398 /* Don't put any variables in registers if we call setjmp
5399 on a machine that fails to restore the registers. */
5400 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
5401 {
5402 if (DECL_INITIAL (current_function_decl) != error_mark_node)
5403 setjmp_protect (DECL_INITIAL (current_function_decl));
5404
5405 setjmp_protect_args ();
5406 }
5407 #endif
5408
5409 /* Save the argument pointer if a save area was made for it. */
5410 if (arg_pointer_save_area)
5411 {
5412 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
5413 emit_insn_before (x, tail_recursion_reentry);
5414 }
5415
5416 /* Initialize any trampolines required by this function. */
5417 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5418 {
5419 tree function = TREE_PURPOSE (link);
5420 rtx context = lookup_static_chain (function);
5421 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
5422 rtx blktramp;
5423 rtx seq;
5424
5425 #ifdef TRAMPOLINE_TEMPLATE
5426 /* First make sure this compilation has a template for
5427 initializing trampolines. */
5428 if (initial_trampoline == 0)
5429 {
5430 end_temporary_allocation ();
5431 initial_trampoline
5432 = gen_rtx (MEM, BLKmode, assemble_trampoline_template ());
5433 resume_temporary_allocation ();
5434 }
5435 #endif
5436
5437 /* Generate insns to initialize the trampoline. */
5438 start_sequence ();
5439 tramp = round_trampoline_addr (XEXP (tramp, 0));
5440 #ifdef TRAMPOLINE_TEMPLATE
5441 blktramp = change_address (initial_trampoline, BLKmode, tramp);
5442 emit_block_move (blktramp, initial_trampoline,
5443 GEN_INT (TRAMPOLINE_SIZE),
5444 FUNCTION_BOUNDARY / BITS_PER_UNIT);
5445 #endif
5446 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
5447 seq = get_insns ();
5448 end_sequence ();
5449
5450 /* Put those insns at entry to the containing function (this one). */
5451 emit_insns_before (seq, tail_recursion_reentry);
5452 }
5453
5454 /* Warn about unused parms if extra warnings were specified. */
5455 if (warn_unused && extra_warnings)
5456 {
5457 tree decl;
5458
5459 for (decl = DECL_ARGUMENTS (current_function_decl);
5460 decl; decl = TREE_CHAIN (decl))
5461 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
5462 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
5463 warning_with_decl (decl, "unused parameter `%s'");
5464 }
5465
5466 /* Delete handlers for nonlocal gotos if nothing uses them. */
5467 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
5468 delete_handlers ();
5469
5470 /* End any sequences that failed to be closed due to syntax errors. */
5471 while (in_sequence_p ())
5472 end_sequence ();
5473
5474 /* Outside function body, can't compute type's actual size
5475 until next function's body starts. */
5476 immediate_size_expand--;
5477
5478 /* If doing stupid register allocation,
5479 mark register parms as dying here. */
5480
5481 if (obey_regdecls)
5482 {
5483 rtx tem;
5484 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5485 use_variable (regno_reg_rtx[i]);
5486
5487 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
5488
5489 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
5490 {
5491 use_variable (XEXP (tem, 0));
5492 use_variable_after (XEXP (tem, 0), parm_birth_insn);
5493 }
5494
5495 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5496 use_variable (current_function_internal_arg_pointer);
5497 }
5498
5499 clear_pending_stack_adjust ();
5500 do_pending_stack_adjust ();
5501
5502 /* Mark the end of the function body.
5503 If control reaches this insn, the function can drop through
5504 without returning a value. */
5505 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
5506
5507 /* Output a linenumber for the end of the function.
5508 SDB depends on this. */
5509 emit_line_note_force (filename, line);
5510
5511 /* Output the label for the actual return from the function,
5512 if one is expected. This happens either because a function epilogue
5513 is used instead of a return instruction, or because a return was done
5514 with a goto in order to run local cleanups, or because of pcc-style
5515 structure returning. */
5516
5517 if (return_label)
5518 emit_label (return_label);
5519
5520 /* C++ uses this. */
5521 if (end_bindings)
5522 expand_end_bindings (0, 0, 0);
5523
5524 /* If we had calls to alloca, and this machine needs
5525 an accurate stack pointer to exit the function,
5526 insert some code to save and restore the stack pointer. */
5527 #ifdef EXIT_IGNORE_STACK
5528 if (! EXIT_IGNORE_STACK)
5529 #endif
5530 if (current_function_calls_alloca)
5531 {
5532 rtx tem = 0;
5533
5534 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
5535 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
5536 }
5537
5538 /* If scalar return value was computed in a pseudo-reg,
5539 copy that to the hard return register. */
5540 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
5541 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
5542 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
5543 >= FIRST_PSEUDO_REGISTER))
5544 {
5545 rtx real_decl_result;
5546
5547 #ifdef FUNCTION_OUTGOING_VALUE
5548 real_decl_result
5549 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5550 current_function_decl);
5551 #else
5552 real_decl_result
5553 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5554 current_function_decl);
5555 #endif
5556 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
5557 /* If this is a BLKmode structure being returned in registers, then use
5558 the mode computed in expand_return. */
5559 if (GET_MODE (real_decl_result) == BLKmode)
5560 PUT_MODE (real_decl_result,
5561 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
5562 emit_move_insn (real_decl_result,
5563 DECL_RTL (DECL_RESULT (current_function_decl)));
5564 emit_insn (gen_rtx (USE, VOIDmode, real_decl_result));
5565 }
5566
5567 /* If returning a structure, arrange to return the address of the value
5568 in a place where debuggers expect to find it.
5569
5570 If returning a structure PCC style,
5571 the caller also depends on this value.
5572 And current_function_returns_pcc_struct is not necessarily set. */
5573 if (current_function_returns_struct
5574 || current_function_returns_pcc_struct)
5575 {
5576 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5577 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5578 #ifdef FUNCTION_OUTGOING_VALUE
5579 rtx outgoing
5580 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
5581 current_function_decl);
5582 #else
5583 rtx outgoing
5584 = FUNCTION_VALUE (build_pointer_type (type),
5585 current_function_decl);
5586 #endif
5587
5588 /* Mark this as a function return value so integrate will delete the
5589 assignment and USE below when inlining this function. */
5590 REG_FUNCTION_VALUE_P (outgoing) = 1;
5591
5592 emit_move_insn (outgoing, value_address);
5593 use_variable (outgoing);
5594 }
5595
5596 /* Output a return insn if we are using one.
5597 Otherwise, let the rtl chain end here, to drop through
5598 into the epilogue. */
5599
5600 #ifdef HAVE_return
5601 if (HAVE_return)
5602 {
5603 emit_jump_insn (gen_return ());
5604 emit_barrier ();
5605 }
5606 #endif
5607
5608 /* Fix up any gotos that jumped out to the outermost
5609 binding level of the function.
5610 Must follow emitting RETURN_LABEL. */
5611
5612 /* If you have any cleanups to do at this point,
5613 and they need to create temporary variables,
5614 then you will lose. */
5615 expand_fixups (get_insns ());
5616 }
5617 \f
5618 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
5619
5620 static int *prologue;
5621 static int *epilogue;
5622
5623 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
5624 or a single insn). */
5625
5626 static int *
5627 record_insns (insns)
5628 rtx insns;
5629 {
5630 int *vec;
5631
5632 if (GET_CODE (insns) == SEQUENCE)
5633 {
5634 int len = XVECLEN (insns, 0);
5635 vec = (int *) oballoc ((len + 1) * sizeof (int));
5636 vec[len] = 0;
5637 while (--len >= 0)
5638 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
5639 }
5640 else
5641 {
5642 vec = (int *) oballoc (2 * sizeof (int));
5643 vec[0] = INSN_UID (insns);
5644 vec[1] = 0;
5645 }
5646 return vec;
5647 }
5648
5649 /* Determine how many INSN_UIDs in VEC are part of INSN. */
5650
5651 static int
5652 contains (insn, vec)
5653 rtx insn;
5654 int *vec;
5655 {
5656 register int i, j;
5657
5658 if (GET_CODE (insn) == INSN
5659 && GET_CODE (PATTERN (insn)) == SEQUENCE)
5660 {
5661 int count = 0;
5662 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
5663 for (j = 0; vec[j]; j++)
5664 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
5665 count++;
5666 return count;
5667 }
5668 else
5669 {
5670 for (j = 0; vec[j]; j++)
5671 if (INSN_UID (insn) == vec[j])
5672 return 1;
5673 }
5674 return 0;
5675 }
5676
5677 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5678 this into place with notes indicating where the prologue ends and where
5679 the epilogue begins. Update the basic block information when possible. */
5680
5681 void
5682 thread_prologue_and_epilogue_insns (f)
5683 rtx f;
5684 {
5685 #ifdef HAVE_prologue
5686 if (HAVE_prologue)
5687 {
5688 rtx head, seq, insn;
5689
5690 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
5691 prologue insns and a NOTE_INSN_PROLOGUE_END. */
5692 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
5693 seq = gen_prologue ();
5694 head = emit_insn_after (seq, f);
5695
5696 /* Include the new prologue insns in the first block. Ignore them
5697 if they form a basic block unto themselves. */
5698 if (basic_block_head && n_basic_blocks
5699 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
5700 basic_block_head[0] = NEXT_INSN (f);
5701
5702 /* Retain a map of the prologue insns. */
5703 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
5704 }
5705 else
5706 #endif
5707 prologue = 0;
5708
5709 #ifdef HAVE_epilogue
5710 if (HAVE_epilogue)
5711 {
5712 rtx insn = get_last_insn ();
5713 rtx prev = prev_nonnote_insn (insn);
5714
5715 /* If we end with a BARRIER, we don't need an epilogue. */
5716 if (! (prev && GET_CODE (prev) == BARRIER))
5717 {
5718 rtx tail, seq, tem;
5719 rtx first_use = 0;
5720 rtx last_use = 0;
5721
5722 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
5723 epilogue insns, the USE insns at the end of a function,
5724 the jump insn that returns, and then a BARRIER. */
5725
5726 /* Move the USE insns at the end of a function onto a list. */
5727 while (prev
5728 && GET_CODE (prev) == INSN
5729 && GET_CODE (PATTERN (prev)) == USE)
5730 {
5731 tem = prev;
5732 prev = prev_nonnote_insn (prev);
5733
5734 NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem);
5735 PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem);
5736 if (first_use)
5737 {
5738 NEXT_INSN (tem) = first_use;
5739 PREV_INSN (first_use) = tem;
5740 }
5741 first_use = tem;
5742 if (!last_use)
5743 last_use = tem;
5744 }
5745
5746 emit_barrier_after (insn);
5747
5748 seq = gen_epilogue ();
5749 tail = emit_jump_insn_after (seq, insn);
5750
5751 /* Insert the USE insns immediately before the return insn, which
5752 must be the first instruction before the final barrier. */
5753 if (first_use)
5754 {
5755 tem = prev_nonnote_insn (get_last_insn ());
5756 NEXT_INSN (PREV_INSN (tem)) = first_use;
5757 PREV_INSN (first_use) = PREV_INSN (tem);
5758 PREV_INSN (tem) = last_use;
5759 NEXT_INSN (last_use) = tem;
5760 }
5761
5762 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
5763
5764 /* Include the new epilogue insns in the last block. Ignore
5765 them if they form a basic block unto themselves. */
5766 if (basic_block_end && n_basic_blocks
5767 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
5768 basic_block_end[n_basic_blocks - 1] = tail;
5769
5770 /* Retain a map of the epilogue insns. */
5771 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
5772 return;
5773 }
5774 }
5775 #endif
5776 epilogue = 0;
5777 }
5778
5779 /* Reposition the prologue-end and epilogue-begin notes after instruction
5780 scheduling and delayed branch scheduling. */
5781
5782 void
5783 reposition_prologue_and_epilogue_notes (f)
5784 rtx f;
5785 {
5786 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5787 /* Reposition the prologue and epilogue notes. */
5788 if (n_basic_blocks)
5789 {
5790 rtx next, prev;
5791 int len;
5792
5793 if (prologue)
5794 {
5795 register rtx insn, note = 0;
5796
5797 /* Scan from the beginning until we reach the last prologue insn.
5798 We apparently can't depend on basic_block_{head,end} after
5799 reorg has run. */
5800 for (len = 0; prologue[len]; len++)
5801 ;
5802 for (insn = f; len && insn; insn = NEXT_INSN (insn))
5803 {
5804 if (GET_CODE (insn) == NOTE)
5805 {
5806 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5807 note = insn;
5808 }
5809 else if ((len -= contains (insn, prologue)) == 0)
5810 {
5811 /* Find the prologue-end note if we haven't already, and
5812 move it to just after the last prologue insn. */
5813 if (note == 0)
5814 {
5815 for (note = insn; note = NEXT_INSN (note);)
5816 if (GET_CODE (note) == NOTE
5817 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5818 break;
5819 }
5820 next = NEXT_INSN (note);
5821 prev = PREV_INSN (note);
5822 if (prev)
5823 NEXT_INSN (prev) = next;
5824 if (next)
5825 PREV_INSN (next) = prev;
5826 add_insn_after (note, insn);
5827 }
5828 }
5829 }
5830
5831 if (epilogue)
5832 {
5833 register rtx insn, note = 0;
5834
5835 /* Scan from the end until we reach the first epilogue insn.
5836 We apparently can't depend on basic_block_{head,end} after
5837 reorg has run. */
5838 for (len = 0; epilogue[len]; len++)
5839 ;
5840 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
5841 {
5842 if (GET_CODE (insn) == NOTE)
5843 {
5844 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5845 note = insn;
5846 }
5847 else if ((len -= contains (insn, epilogue)) == 0)
5848 {
5849 /* Find the epilogue-begin note if we haven't already, and
5850 move it to just before the first epilogue insn. */
5851 if (note == 0)
5852 {
5853 for (note = insn; note = PREV_INSN (note);)
5854 if (GET_CODE (note) == NOTE
5855 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5856 break;
5857 }
5858 next = NEXT_INSN (note);
5859 prev = PREV_INSN (note);
5860 if (prev)
5861 NEXT_INSN (prev) = next;
5862 if (next)
5863 PREV_INSN (next) = prev;
5864 add_insn_after (note, PREV_INSN (insn));
5865 }
5866 }
5867 }
5868 }
5869 #endif /* HAVE_prologue or HAVE_epilogue */
5870 }