(assign_stack_temp): Clear MEM flags from reuse.
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-96, 1997 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42 #include <stdio.h>
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58 #include "bytecode.h"
59 #include "bc-emit.h"
60
61 #ifndef TRAMPOLINE_ALIGNMENT
62 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
63 #endif
64
65 /* Some systems use __main in a way incompatible with its use in gcc, in these
66 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
67 give the same symbol without quotes for an alternative entry point. You
68 must define both, or neither. */
69 #ifndef NAME__MAIN
70 #define NAME__MAIN "__main"
71 #define SYMBOL__MAIN __main
72 #endif
73
74 /* Round a value to the lowest integer less than it that is a multiple of
75 the required alignment. Avoid using division in case the value is
76 negative. Assume the alignment is a power of two. */
77 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
78
79 /* Similar, but round to the next highest integer that meets the
80 alignment. */
81 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
82
83 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
84 during rtl generation. If they are different register numbers, this is
85 always true. It may also be true if
86 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
87 generation. See fix_lexical_addr for details. */
88
89 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
90 #define NEED_SEPARATE_AP
91 #endif
92
93 /* Number of bytes of args popped by function being compiled on its return.
94 Zero if no bytes are to be popped.
95 May affect compilation of return insn or of function epilogue. */
96
97 int current_function_pops_args;
98
99 /* Nonzero if function being compiled needs to be given an address
100 where the value should be stored. */
101
102 int current_function_returns_struct;
103
104 /* Nonzero if function being compiled needs to
105 return the address of where it has put a structure value. */
106
107 int current_function_returns_pcc_struct;
108
109 /* Nonzero if function being compiled needs to be passed a static chain. */
110
111 int current_function_needs_context;
112
113 /* Nonzero if function being compiled can call setjmp. */
114
115 int current_function_calls_setjmp;
116
117 /* Nonzero if function being compiled can call longjmp. */
118
119 int current_function_calls_longjmp;
120
121 /* Nonzero if function being compiled receives nonlocal gotos
122 from nested functions. */
123
124 int current_function_has_nonlocal_label;
125
126 /* Nonzero if function being compiled has nonlocal gotos to parent
127 function. */
128
129 int current_function_has_nonlocal_goto;
130
131 /* Nonzero if function being compiled contains nested functions. */
132
133 int current_function_contains_functions;
134
135 /* Nonzero if function being compiled can call alloca,
136 either as a subroutine or builtin. */
137
138 int current_function_calls_alloca;
139
140 /* Nonzero if the current function returns a pointer type */
141
142 int current_function_returns_pointer;
143
144 /* If some insns can be deferred to the delay slots of the epilogue, the
145 delay list for them is recorded here. */
146
147 rtx current_function_epilogue_delay_list;
148
149 /* If function's args have a fixed size, this is that size, in bytes.
150 Otherwise, it is -1.
151 May affect compilation of return insn or of function epilogue. */
152
153 int current_function_args_size;
154
155 /* # bytes the prologue should push and pretend that the caller pushed them.
156 The prologue must do this, but only if parms can be passed in registers. */
157
158 int current_function_pretend_args_size;
159
160 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
161 defined, the needed space is pushed by the prologue. */
162
163 int current_function_outgoing_args_size;
164
165 /* This is the offset from the arg pointer to the place where the first
166 anonymous arg can be found, if there is one. */
167
168 rtx current_function_arg_offset_rtx;
169
170 /* Nonzero if current function uses varargs.h or equivalent.
171 Zero for functions that use stdarg.h. */
172
173 int current_function_varargs;
174
175 /* Nonzero if current function uses stdarg.h or equivalent.
176 Zero for functions that use varargs.h. */
177
178 int current_function_stdarg;
179
180 /* Quantities of various kinds of registers
181 used for the current function's args. */
182
183 CUMULATIVE_ARGS current_function_args_info;
184
185 /* Name of function now being compiled. */
186
187 char *current_function_name;
188
189 /* If non-zero, an RTL expression for that location at which the current
190 function returns its result. Always equal to
191 DECL_RTL (DECL_RESULT (current_function_decl)), but provided
192 independently of the tree structures. */
193
194 rtx current_function_return_rtx;
195
196 /* Nonzero if the current function uses the constant pool. */
197
198 int current_function_uses_const_pool;
199
200 /* Nonzero if the current function uses pic_offset_table_rtx. */
201 int current_function_uses_pic_offset_table;
202
203 /* The arg pointer hard register, or the pseudo into which it was copied. */
204 rtx current_function_internal_arg_pointer;
205
206 /* The FUNCTION_DECL for an inline function currently being expanded. */
207 tree inline_function_decl;
208
209 /* Number of function calls seen so far in current function. */
210
211 int function_call_count;
212
213 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
214 (labels to which there can be nonlocal gotos from nested functions)
215 in this function. */
216
217 tree nonlocal_labels;
218
219 /* RTX for stack slot that holds the current handler for nonlocal gotos.
220 Zero when function does not have nonlocal labels. */
221
222 rtx nonlocal_goto_handler_slot;
223
224 /* RTX for stack slot that holds the stack pointer value to restore
225 for a nonlocal goto.
226 Zero when function does not have nonlocal labels. */
227
228 rtx nonlocal_goto_stack_level;
229
230 /* Label that will go on parm cleanup code, if any.
231 Jumping to this label runs cleanup code for parameters, if
232 such code must be run. Following this code is the logical return label. */
233
234 rtx cleanup_label;
235
236 /* Label that will go on function epilogue.
237 Jumping to this label serves as a "return" instruction
238 on machines which require execution of the epilogue on all returns. */
239
240 rtx return_label;
241
242 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
243 So we can mark them all live at the end of the function, if nonopt. */
244 rtx save_expr_regs;
245
246 /* List (chain of EXPR_LISTs) of all stack slots in this function.
247 Made for the sake of unshare_all_rtl. */
248 rtx stack_slot_list;
249
250 /* Chain of all RTL_EXPRs that have insns in them. */
251 tree rtl_expr_chain;
252
253 /* Label to jump back to for tail recursion, or 0 if we have
254 not yet needed one for this function. */
255 rtx tail_recursion_label;
256
257 /* Place after which to insert the tail_recursion_label if we need one. */
258 rtx tail_recursion_reentry;
259
260 /* Location at which to save the argument pointer if it will need to be
261 referenced. There are two cases where this is done: if nonlocal gotos
262 exist, or if vars stored at an offset from the argument pointer will be
263 needed by inner routines. */
264
265 rtx arg_pointer_save_area;
266
267 /* Offset to end of allocated area of stack frame.
268 If stack grows down, this is the address of the last stack slot allocated.
269 If stack grows up, this is the address for the next slot. */
270 HOST_WIDE_INT frame_offset;
271
272 /* List (chain of TREE_LISTs) of static chains for containing functions.
273 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
274 in an RTL_EXPR in the TREE_VALUE. */
275 static tree context_display;
276
277 /* List (chain of TREE_LISTs) of trampolines for nested functions.
278 The trampoline sets up the static chain and jumps to the function.
279 We supply the trampoline's address when the function's address is requested.
280
281 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
282 in an RTL_EXPR in the TREE_VALUE. */
283 static tree trampoline_list;
284
285 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
286 static rtx parm_birth_insn;
287
288 #if 0
289 /* Nonzero if a stack slot has been generated whose address is not
290 actually valid. It means that the generated rtl must all be scanned
291 to detect and correct the invalid addresses where they occur. */
292 static int invalid_stack_slot;
293 #endif
294
295 /* Last insn of those whose job was to put parms into their nominal homes. */
296 static rtx last_parm_insn;
297
298 /* 1 + last pseudo register number used for loading a copy
299 of a parameter of this function. */
300 static int max_parm_reg;
301
302 /* Vector indexed by REGNO, containing location on stack in which
303 to put the parm which is nominally in pseudo register REGNO,
304 if we discover that that parm must go in the stack. */
305 static rtx *parm_reg_stack_loc;
306
307 /* Nonzero once virtual register instantiation has been done.
308 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
309 static int virtuals_instantiated;
310
311 /* These variables hold pointers to functions to
312 save and restore machine-specific data,
313 in push_function_context and pop_function_context. */
314 void (*save_machine_status) PROTO((struct function *));
315 void (*restore_machine_status) PROTO((struct function *));
316
317 /* Nonzero if we need to distinguish between the return value of this function
318 and the return value of a function called by this function. This helps
319 integrate.c */
320
321 extern int rtx_equal_function_value_matters;
322 extern tree sequence_rtl_expr;
323 \f
324 /* In order to evaluate some expressions, such as function calls returning
325 structures in memory, we need to temporarily allocate stack locations.
326 We record each allocated temporary in the following structure.
327
328 Associated with each temporary slot is a nesting level. When we pop up
329 one level, all temporaries associated with the previous level are freed.
330 Normally, all temporaries are freed after the execution of the statement
331 in which they were created. However, if we are inside a ({...}) grouping,
332 the result may be in a temporary and hence must be preserved. If the
333 result could be in a temporary, we preserve it if we can determine which
334 one it is in. If we cannot determine which temporary may contain the
335 result, all temporaries are preserved. A temporary is preserved by
336 pretending it was allocated at the previous nesting level.
337
338 Automatic variables are also assigned temporary slots, at the nesting
339 level where they are defined. They are marked a "kept" so that
340 free_temp_slots will not free them. */
341
342 struct temp_slot
343 {
344 /* Points to next temporary slot. */
345 struct temp_slot *next;
346 /* The rtx to used to reference the slot. */
347 rtx slot;
348 /* The rtx used to represent the address if not the address of the
349 slot above. May be an EXPR_LIST if multiple addresses exist. */
350 rtx address;
351 /* The size, in units, of the slot. */
352 int size;
353 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
354 tree rtl_expr;
355 /* Non-zero if this temporary is currently in use. */
356 char in_use;
357 /* Non-zero if this temporary has its address taken. */
358 char addr_taken;
359 /* Nesting level at which this slot is being used. */
360 int level;
361 /* Non-zero if this should survive a call to free_temp_slots. */
362 int keep;
363 /* The offset of the slot from the frame_pointer, including extra space
364 for alignment. This info is for combine_temp_slots. */
365 int base_offset;
366 /* The size of the slot, including extra space for alignment. This
367 info is for combine_temp_slots. */
368 int full_size;
369 };
370
371 /* List of all temporaries allocated, both available and in use. */
372
373 struct temp_slot *temp_slots;
374
375 /* Current nesting level for temporaries. */
376
377 int temp_slot_level;
378 \f
379 /* The FUNCTION_DECL node for the current function. */
380 static tree this_function_decl;
381
382 /* Callinfo pointer for the current function. */
383 static rtx this_function_callinfo;
384
385 /* The label in the bytecode file of this function's actual bytecode.
386 Not an rtx. */
387 static char *this_function_bytecode;
388
389 /* The call description vector for the current function. */
390 static rtx this_function_calldesc;
391
392 /* Size of the local variables allocated for the current function. */
393 int local_vars_size;
394
395 /* Current depth of the bytecode evaluation stack. */
396 int stack_depth;
397
398 /* Maximum depth of the evaluation stack in this function. */
399 int max_stack_depth;
400
401 /* Current depth in statement expressions. */
402 static int stmt_expr_depth;
403
404 /* This structure is used to record MEMs or pseudos used to replace VAR, any
405 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
406 maintain this list in case two operands of an insn were required to match;
407 in that case we must ensure we use the same replacement. */
408
409 struct fixup_replacement
410 {
411 rtx old;
412 rtx new;
413 struct fixup_replacement *next;
414 };
415
416 /* Forward declarations. */
417
418 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
419 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
420 enum machine_mode, enum machine_mode,
421 int));
422 static void fixup_var_refs PROTO((rtx, enum machine_mode, int));
423 static struct fixup_replacement
424 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
425 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
426 rtx, int));
427 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
428 struct fixup_replacement **));
429 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
430 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
431 static rtx fixup_stack_1 PROTO((rtx, rtx));
432 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
433 static void instantiate_decls PROTO((tree, int));
434 static void instantiate_decls_1 PROTO((tree, int));
435 static void instantiate_decl PROTO((rtx, int, int));
436 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
437 static void delete_handlers PROTO((void));
438 static void pad_to_arg_alignment PROTO((struct args_size *, int));
439 static void pad_below PROTO((struct args_size *, enum machine_mode,
440 tree));
441 static tree round_down PROTO((tree, int));
442 static rtx round_trampoline_addr PROTO((rtx));
443 static tree blocks_nreverse PROTO((tree));
444 static int all_blocks PROTO((tree, tree *));
445 static int *record_insns PROTO((rtx));
446 static int contains PROTO((rtx, int *));
447 \f
448 /* Pointer to chain of `struct function' for containing functions. */
449 struct function *outer_function_chain;
450
451 /* Given a function decl for a containing function,
452 return the `struct function' for it. */
453
454 struct function *
455 find_function_data (decl)
456 tree decl;
457 {
458 struct function *p;
459 for (p = outer_function_chain; p; p = p->next)
460 if (p->decl == decl)
461 return p;
462 abort ();
463 }
464
465 /* Save the current context for compilation of a nested function.
466 This is called from language-specific code.
467 The caller is responsible for saving any language-specific status,
468 since this function knows only about language-independent variables. */
469
470 void
471 push_function_context_to (context)
472 tree context;
473 {
474 struct function *p = (struct function *) xmalloc (sizeof (struct function));
475
476 p->next = outer_function_chain;
477 outer_function_chain = p;
478
479 p->name = current_function_name;
480 p->decl = current_function_decl;
481 p->pops_args = current_function_pops_args;
482 p->returns_struct = current_function_returns_struct;
483 p->returns_pcc_struct = current_function_returns_pcc_struct;
484 p->returns_pointer = current_function_returns_pointer;
485 p->needs_context = current_function_needs_context;
486 p->calls_setjmp = current_function_calls_setjmp;
487 p->calls_longjmp = current_function_calls_longjmp;
488 p->calls_alloca = current_function_calls_alloca;
489 p->has_nonlocal_label = current_function_has_nonlocal_label;
490 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
491 p->contains_functions = current_function_contains_functions;
492 p->args_size = current_function_args_size;
493 p->pretend_args_size = current_function_pretend_args_size;
494 p->arg_offset_rtx = current_function_arg_offset_rtx;
495 p->varargs = current_function_varargs;
496 p->stdarg = current_function_stdarg;
497 p->uses_const_pool = current_function_uses_const_pool;
498 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
499 p->internal_arg_pointer = current_function_internal_arg_pointer;
500 p->max_parm_reg = max_parm_reg;
501 p->parm_reg_stack_loc = parm_reg_stack_loc;
502 p->outgoing_args_size = current_function_outgoing_args_size;
503 p->return_rtx = current_function_return_rtx;
504 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
505 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
506 p->nonlocal_labels = nonlocal_labels;
507 p->cleanup_label = cleanup_label;
508 p->return_label = return_label;
509 p->save_expr_regs = save_expr_regs;
510 p->stack_slot_list = stack_slot_list;
511 p->parm_birth_insn = parm_birth_insn;
512 p->frame_offset = frame_offset;
513 p->tail_recursion_label = tail_recursion_label;
514 p->tail_recursion_reentry = tail_recursion_reentry;
515 p->arg_pointer_save_area = arg_pointer_save_area;
516 p->rtl_expr_chain = rtl_expr_chain;
517 p->last_parm_insn = last_parm_insn;
518 p->context_display = context_display;
519 p->trampoline_list = trampoline_list;
520 p->function_call_count = function_call_count;
521 p->temp_slots = temp_slots;
522 p->temp_slot_level = temp_slot_level;
523 p->fixup_var_refs_queue = 0;
524 p->epilogue_delay_list = current_function_epilogue_delay_list;
525 p->args_info = current_function_args_info;
526
527 save_tree_status (p, context);
528 save_storage_status (p);
529 save_emit_status (p);
530 init_emit ();
531 save_expr_status (p);
532 save_stmt_status (p);
533 save_varasm_status (p);
534
535 if (save_machine_status)
536 (*save_machine_status) (p);
537 }
538
539 void
540 push_function_context ()
541 {
542 push_function_context_to (current_function_decl);
543 }
544
545 /* Restore the last saved context, at the end of a nested function.
546 This function is called from language-specific code. */
547
548 void
549 pop_function_context_from (context)
550 tree context;
551 {
552 struct function *p = outer_function_chain;
553
554 outer_function_chain = p->next;
555
556 current_function_contains_functions
557 = p->contains_functions || p->inline_obstacks
558 || context == current_function_decl;
559 current_function_name = p->name;
560 current_function_decl = p->decl;
561 current_function_pops_args = p->pops_args;
562 current_function_returns_struct = p->returns_struct;
563 current_function_returns_pcc_struct = p->returns_pcc_struct;
564 current_function_returns_pointer = p->returns_pointer;
565 current_function_needs_context = p->needs_context;
566 current_function_calls_setjmp = p->calls_setjmp;
567 current_function_calls_longjmp = p->calls_longjmp;
568 current_function_calls_alloca = p->calls_alloca;
569 current_function_has_nonlocal_label = p->has_nonlocal_label;
570 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
571 current_function_args_size = p->args_size;
572 current_function_pretend_args_size = p->pretend_args_size;
573 current_function_arg_offset_rtx = p->arg_offset_rtx;
574 current_function_varargs = p->varargs;
575 current_function_stdarg = p->stdarg;
576 current_function_uses_const_pool = p->uses_const_pool;
577 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
578 current_function_internal_arg_pointer = p->internal_arg_pointer;
579 max_parm_reg = p->max_parm_reg;
580 parm_reg_stack_loc = p->parm_reg_stack_loc;
581 current_function_outgoing_args_size = p->outgoing_args_size;
582 current_function_return_rtx = p->return_rtx;
583 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
584 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
585 nonlocal_labels = p->nonlocal_labels;
586 cleanup_label = p->cleanup_label;
587 return_label = p->return_label;
588 save_expr_regs = p->save_expr_regs;
589 stack_slot_list = p->stack_slot_list;
590 parm_birth_insn = p->parm_birth_insn;
591 frame_offset = p->frame_offset;
592 tail_recursion_label = p->tail_recursion_label;
593 tail_recursion_reentry = p->tail_recursion_reentry;
594 arg_pointer_save_area = p->arg_pointer_save_area;
595 rtl_expr_chain = p->rtl_expr_chain;
596 last_parm_insn = p->last_parm_insn;
597 context_display = p->context_display;
598 trampoline_list = p->trampoline_list;
599 function_call_count = p->function_call_count;
600 temp_slots = p->temp_slots;
601 temp_slot_level = p->temp_slot_level;
602 current_function_epilogue_delay_list = p->epilogue_delay_list;
603 reg_renumber = 0;
604 current_function_args_info = p->args_info;
605
606 restore_tree_status (p);
607 restore_storage_status (p);
608 restore_expr_status (p);
609 restore_emit_status (p);
610 restore_stmt_status (p);
611 restore_varasm_status (p);
612
613 if (restore_machine_status)
614 (*restore_machine_status) (p);
615
616 /* Finish doing put_var_into_stack for any of our variables
617 which became addressable during the nested function. */
618 {
619 struct var_refs_queue *queue = p->fixup_var_refs_queue;
620 for (; queue; queue = queue->next)
621 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
622 }
623
624 free (p);
625
626 /* Reset variables that have known state during rtx generation. */
627 rtx_equal_function_value_matters = 1;
628 virtuals_instantiated = 0;
629 }
630
631 void pop_function_context ()
632 {
633 pop_function_context_from (current_function_decl);
634 }
635 \f
636 /* Allocate fixed slots in the stack frame of the current function. */
637
638 /* Return size needed for stack frame based on slots so far allocated.
639 This size counts from zero. It is not rounded to STACK_BOUNDARY;
640 the caller may have to do that. */
641
642 HOST_WIDE_INT
643 get_frame_size ()
644 {
645 #ifdef FRAME_GROWS_DOWNWARD
646 return -frame_offset;
647 #else
648 return frame_offset;
649 #endif
650 }
651
652 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
653 with machine mode MODE.
654
655 ALIGN controls the amount of alignment for the address of the slot:
656 0 means according to MODE,
657 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
658 positive specifies alignment boundary in bits.
659
660 We do not round to stack_boundary here. */
661
662 rtx
663 assign_stack_local (mode, size, align)
664 enum machine_mode mode;
665 int size;
666 int align;
667 {
668 register rtx x, addr;
669 int bigend_correction = 0;
670 int alignment;
671
672 if (align == 0)
673 {
674 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
675 if (mode == BLKmode)
676 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
677 }
678 else if (align == -1)
679 {
680 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
681 size = CEIL_ROUND (size, alignment);
682 }
683 else
684 alignment = align / BITS_PER_UNIT;
685
686 /* Round frame offset to that alignment.
687 We must be careful here, since FRAME_OFFSET might be negative and
688 division with a negative dividend isn't as well defined as we might
689 like. So we instead assume that ALIGNMENT is a power of two and
690 use logical operations which are unambiguous. */
691 #ifdef FRAME_GROWS_DOWNWARD
692 frame_offset = FLOOR_ROUND (frame_offset, alignment);
693 #else
694 frame_offset = CEIL_ROUND (frame_offset, alignment);
695 #endif
696
697 /* On a big-endian machine, if we are allocating more space than we will use,
698 use the least significant bytes of those that are allocated. */
699 if (BYTES_BIG_ENDIAN && mode != BLKmode)
700 bigend_correction = size - GET_MODE_SIZE (mode);
701
702 #ifdef FRAME_GROWS_DOWNWARD
703 frame_offset -= size;
704 #endif
705
706 /* If we have already instantiated virtual registers, return the actual
707 address relative to the frame pointer. */
708 if (virtuals_instantiated)
709 addr = plus_constant (frame_pointer_rtx,
710 (frame_offset + bigend_correction
711 + STARTING_FRAME_OFFSET));
712 else
713 addr = plus_constant (virtual_stack_vars_rtx,
714 frame_offset + bigend_correction);
715
716 #ifndef FRAME_GROWS_DOWNWARD
717 frame_offset += size;
718 #endif
719
720 x = gen_rtx (MEM, mode, addr);
721
722 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list);
723
724 return x;
725 }
726
727 /* Assign a stack slot in a containing function.
728 First three arguments are same as in preceding function.
729 The last argument specifies the function to allocate in. */
730
731 rtx
732 assign_outer_stack_local (mode, size, align, function)
733 enum machine_mode mode;
734 int size;
735 int align;
736 struct function *function;
737 {
738 register rtx x, addr;
739 int bigend_correction = 0;
740 int alignment;
741
742 /* Allocate in the memory associated with the function in whose frame
743 we are assigning. */
744 push_obstacks (function->function_obstack,
745 function->function_maybepermanent_obstack);
746
747 if (align == 0)
748 {
749 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
750 if (mode == BLKmode)
751 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
752 }
753 else if (align == -1)
754 {
755 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
756 size = CEIL_ROUND (size, alignment);
757 }
758 else
759 alignment = align / BITS_PER_UNIT;
760
761 /* Round frame offset to that alignment. */
762 #ifdef FRAME_GROWS_DOWNWARD
763 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
764 #else
765 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
766 #endif
767
768 /* On a big-endian machine, if we are allocating more space than we will use,
769 use the least significant bytes of those that are allocated. */
770 if (BYTES_BIG_ENDIAN && mode != BLKmode)
771 bigend_correction = size - GET_MODE_SIZE (mode);
772
773 #ifdef FRAME_GROWS_DOWNWARD
774 function->frame_offset -= size;
775 #endif
776 addr = plus_constant (virtual_stack_vars_rtx,
777 function->frame_offset + bigend_correction);
778 #ifndef FRAME_GROWS_DOWNWARD
779 function->frame_offset += size;
780 #endif
781
782 x = gen_rtx (MEM, mode, addr);
783
784 function->stack_slot_list
785 = gen_rtx (EXPR_LIST, VOIDmode, x, function->stack_slot_list);
786
787 pop_obstacks ();
788
789 return x;
790 }
791 \f
792 /* Allocate a temporary stack slot and record it for possible later
793 reuse.
794
795 MODE is the machine mode to be given to the returned rtx.
796
797 SIZE is the size in units of the space required. We do no rounding here
798 since assign_stack_local will do any required rounding.
799
800 KEEP is 1 if this slot is to be retained after a call to
801 free_temp_slots. Automatic variables for a block are allocated
802 with this flag. KEEP is 2, if we allocate a longer term temporary,
803 whose lifetime is controlled by CLEANUP_POINT_EXPRs. */
804
805 rtx
806 assign_stack_temp (mode, size, keep)
807 enum machine_mode mode;
808 int size;
809 int keep;
810 {
811 struct temp_slot *p, *best_p = 0;
812
813 /* If SIZE is -1 it means that somebody tried to allocate a temporary
814 of a variable size. */
815 if (size == -1)
816 abort ();
817
818 /* First try to find an available, already-allocated temporary that is the
819 exact size we require. */
820 for (p = temp_slots; p; p = p->next)
821 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
822 break;
823
824 /* If we didn't find, one, try one that is larger than what we want. We
825 find the smallest such. */
826 if (p == 0)
827 for (p = temp_slots; p; p = p->next)
828 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
829 && (best_p == 0 || best_p->size > p->size))
830 best_p = p;
831
832 /* Make our best, if any, the one to use. */
833 if (best_p)
834 {
835 /* If there are enough aligned bytes left over, make them into a new
836 temp_slot so that the extra bytes don't get wasted. Do this only
837 for BLKmode slots, so that we can be sure of the alignment. */
838 if (GET_MODE (best_p->slot) == BLKmode)
839 {
840 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
841 int rounded_size = CEIL_ROUND (size, alignment);
842
843 if (best_p->size - rounded_size >= alignment)
844 {
845 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
846 p->in_use = p->addr_taken = 0;
847 p->size = best_p->size - rounded_size;
848 p->base_offset = best_p->base_offset + rounded_size;
849 p->full_size = best_p->full_size - rounded_size;
850 p->slot = gen_rtx (MEM, BLKmode,
851 plus_constant (XEXP (best_p->slot, 0),
852 rounded_size));
853 p->address = 0;
854 p->rtl_expr = 0;
855 p->next = temp_slots;
856 temp_slots = p;
857
858 stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, p->slot,
859 stack_slot_list);
860
861 best_p->size = rounded_size;
862 best_p->full_size = rounded_size;
863 }
864 }
865
866 p = best_p;
867 }
868
869 /* If we still didn't find one, make a new temporary. */
870 if (p == 0)
871 {
872 int frame_offset_old = frame_offset;
873 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
874 /* If the temp slot mode doesn't indicate the alignment,
875 use the largest possible, so no one will be disappointed. */
876 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
877 /* The following slot size computation is necessary because we don't
878 know the actual size of the temporary slot until assign_stack_local
879 has performed all the frame alignment and size rounding for the
880 requested temporary. Note that extra space added for alignment
881 can be either above or below this stack slot depending on which
882 way the frame grows. We include the extra space if and only if it
883 is above this slot. */
884 #ifdef FRAME_GROWS_DOWNWARD
885 p->size = frame_offset_old - frame_offset;
886 #else
887 p->size = size;
888 #endif
889 /* Now define the fields used by combine_temp_slots. */
890 #ifdef FRAME_GROWS_DOWNWARD
891 p->base_offset = frame_offset;
892 p->full_size = frame_offset_old - frame_offset;
893 #else
894 p->base_offset = frame_offset_old;
895 p->full_size = frame_offset - frame_offset_old;
896 #endif
897 p->address = 0;
898 p->next = temp_slots;
899 temp_slots = p;
900 }
901
902 p->in_use = 1;
903 p->addr_taken = 0;
904 p->rtl_expr = sequence_rtl_expr;
905
906 if (keep == 2)
907 {
908 p->level = target_temp_slot_level;
909 p->keep = 0;
910 }
911 else
912 {
913 p->level = temp_slot_level;
914 p->keep = keep;
915 }
916
917 /* We may be reusing an old slot, so clear any MEM flags that may have been
918 set from before. */
919 RTX_UNCHANGING_P (p->slot) = 0;
920 MEM_IN_STRUCT_P (p->slot) = 0;
921 return p->slot;
922 }
923 \f
924 /* Assign a temporary of given TYPE.
925 KEEP is as for assign_stack_temp.
926 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
927 it is 0 if a register is OK.
928 DONT_PROMOTE is 1 if we should not promote values in register
929 to wider modes. */
930
931 rtx
932 assign_temp (type, keep, memory_required, dont_promote)
933 tree type;
934 int keep;
935 int memory_required;
936 int dont_promote;
937 {
938 enum machine_mode mode = TYPE_MODE (type);
939 int unsignedp = TREE_UNSIGNED (type);
940
941 if (mode == BLKmode || memory_required)
942 {
943 int size = int_size_in_bytes (type);
944 rtx tmp;
945
946 /* Unfortunately, we don't yet know how to allocate variable-sized
947 temporaries. However, sometimes we have a fixed upper limit on
948 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
949 instead. This is the case for Chill variable-sized strings. */
950 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
951 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
952 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
953 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
954
955 tmp = assign_stack_temp (mode, size, keep);
956 MEM_IN_STRUCT_P (tmp) = AGGREGATE_TYPE_P (type);
957 return tmp;
958 }
959
960 #ifndef PROMOTE_FOR_CALL_ONLY
961 if (! dont_promote)
962 mode = promote_mode (type, mode, &unsignedp, 0);
963 #endif
964
965 return gen_reg_rtx (mode);
966 }
967 \f
968 /* Combine temporary stack slots which are adjacent on the stack.
969
970 This allows for better use of already allocated stack space. This is only
971 done for BLKmode slots because we can be sure that we won't have alignment
972 problems in this case. */
973
974 void
975 combine_temp_slots ()
976 {
977 struct temp_slot *p, *q;
978 struct temp_slot *prev_p, *prev_q;
979 /* Determine where to free back to after this function. */
980 rtx free_pointer = rtx_alloc (CONST_INT);
981
982 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
983 {
984 int delete_p = 0;
985 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
986 for (q = p->next, prev_q = p; q; q = prev_q->next)
987 {
988 int delete_q = 0;
989 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
990 {
991 if (p->base_offset + p->full_size == q->base_offset)
992 {
993 /* Q comes after P; combine Q into P. */
994 p->size += q->size;
995 p->full_size += q->full_size;
996 delete_q = 1;
997 }
998 else if (q->base_offset + q->full_size == p->base_offset)
999 {
1000 /* P comes after Q; combine P into Q. */
1001 q->size += p->size;
1002 q->full_size += p->full_size;
1003 delete_p = 1;
1004 break;
1005 }
1006 }
1007 /* Either delete Q or advance past it. */
1008 if (delete_q)
1009 prev_q->next = q->next;
1010 else
1011 prev_q = q;
1012 }
1013 /* Either delete P or advance past it. */
1014 if (delete_p)
1015 {
1016 if (prev_p)
1017 prev_p->next = p->next;
1018 else
1019 temp_slots = p->next;
1020 }
1021 else
1022 prev_p = p;
1023 }
1024
1025 /* Free all the RTL made by plus_constant. */
1026 rtx_free (free_pointer);
1027 }
1028 \f
1029 /* Find the temp slot corresponding to the object at address X. */
1030
1031 static struct temp_slot *
1032 find_temp_slot_from_address (x)
1033 rtx x;
1034 {
1035 struct temp_slot *p;
1036 rtx next;
1037
1038 for (p = temp_slots; p; p = p->next)
1039 {
1040 if (! p->in_use)
1041 continue;
1042 else if (XEXP (p->slot, 0) == x
1043 || p->address == x
1044 || (GET_CODE (x) == PLUS
1045 && XEXP (x, 0) == virtual_stack_vars_rtx
1046 && GET_CODE (XEXP (x, 1)) == CONST_INT
1047 && INTVAL (XEXP (x, 1)) >= p->base_offset
1048 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1049 return p;
1050
1051 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1052 for (next = p->address; next; next = XEXP (next, 1))
1053 if (XEXP (next, 0) == x)
1054 return p;
1055 }
1056
1057 return 0;
1058 }
1059
1060 /* Indicate that NEW is an alternate way of referring to the temp slot
1061 that previous was known by OLD. */
1062
1063 void
1064 update_temp_slot_address (old, new)
1065 rtx old, new;
1066 {
1067 struct temp_slot *p = find_temp_slot_from_address (old);
1068
1069 /* If none, return. Else add NEW as an alias. */
1070 if (p == 0)
1071 return;
1072 else if (p->address == 0)
1073 p->address = new;
1074 else
1075 {
1076 if (GET_CODE (p->address) != EXPR_LIST)
1077 p->address = gen_rtx (EXPR_LIST, VOIDmode, p->address, NULL_RTX);
1078
1079 p->address = gen_rtx (EXPR_LIST, VOIDmode, new, p->address);
1080 }
1081 }
1082
1083 /* If X could be a reference to a temporary slot, mark the fact that its
1084 address was taken. */
1085
1086 void
1087 mark_temp_addr_taken (x)
1088 rtx x;
1089 {
1090 struct temp_slot *p;
1091
1092 if (x == 0)
1093 return;
1094
1095 /* If X is not in memory or is at a constant address, it cannot be in
1096 a temporary slot. */
1097 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1098 return;
1099
1100 p = find_temp_slot_from_address (XEXP (x, 0));
1101 if (p != 0)
1102 p->addr_taken = 1;
1103 }
1104
1105 /* If X could be a reference to a temporary slot, mark that slot as
1106 belonging to the to one level higher than the current level. If X
1107 matched one of our slots, just mark that one. Otherwise, we can't
1108 easily predict which it is, so upgrade all of them. Kept slots
1109 need not be touched.
1110
1111 This is called when an ({...}) construct occurs and a statement
1112 returns a value in memory. */
1113
1114 void
1115 preserve_temp_slots (x)
1116 rtx x;
1117 {
1118 struct temp_slot *p = 0;
1119
1120 /* If there is no result, we still might have some objects whose address
1121 were taken, so we need to make sure they stay around. */
1122 if (x == 0)
1123 {
1124 for (p = temp_slots; p; p = p->next)
1125 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1126 p->level--;
1127
1128 return;
1129 }
1130
1131 /* If X is a register that is being used as a pointer, see if we have
1132 a temporary slot we know it points to. To be consistent with
1133 the code below, we really should preserve all non-kept slots
1134 if we can't find a match, but that seems to be much too costly. */
1135 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1136 p = find_temp_slot_from_address (x);
1137
1138 /* If X is not in memory or is at a constant address, it cannot be in
1139 a temporary slot, but it can contain something whose address was
1140 taken. */
1141 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1142 {
1143 for (p = temp_slots; p; p = p->next)
1144 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1145 p->level--;
1146
1147 return;
1148 }
1149
1150 /* First see if we can find a match. */
1151 if (p == 0)
1152 p = find_temp_slot_from_address (XEXP (x, 0));
1153
1154 if (p != 0)
1155 {
1156 /* Move everything at our level whose address was taken to our new
1157 level in case we used its address. */
1158 struct temp_slot *q;
1159
1160 if (p->level == temp_slot_level)
1161 {
1162 for (q = temp_slots; q; q = q->next)
1163 if (q != p && q->addr_taken && q->level == p->level)
1164 q->level--;
1165
1166 p->level--;
1167 p->addr_taken = 0;
1168 }
1169 return;
1170 }
1171
1172 /* Otherwise, preserve all non-kept slots at this level. */
1173 for (p = temp_slots; p; p = p->next)
1174 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1175 p->level--;
1176 }
1177
1178 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1179 with that RTL_EXPR, promote it into a temporary slot at the present
1180 level so it will not be freed when we free slots made in the
1181 RTL_EXPR. */
1182
1183 void
1184 preserve_rtl_expr_result (x)
1185 rtx x;
1186 {
1187 struct temp_slot *p;
1188
1189 /* If X is not in memory or is at a constant address, it cannot be in
1190 a temporary slot. */
1191 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1192 return;
1193
1194 /* If we can find a match, move it to our level unless it is already at
1195 an upper level. */
1196 p = find_temp_slot_from_address (XEXP (x, 0));
1197 if (p != 0)
1198 {
1199 p->level = MIN (p->level, temp_slot_level);
1200 p->rtl_expr = 0;
1201 }
1202
1203 return;
1204 }
1205
1206 /* Free all temporaries used so far. This is normally called at the end
1207 of generating code for a statement. Don't free any temporaries
1208 currently in use for an RTL_EXPR that hasn't yet been emitted.
1209 We could eventually do better than this since it can be reused while
1210 generating the same RTL_EXPR, but this is complex and probably not
1211 worthwhile. */
1212
1213 void
1214 free_temp_slots ()
1215 {
1216 struct temp_slot *p;
1217
1218 for (p = temp_slots; p; p = p->next)
1219 if (p->in_use && p->level == temp_slot_level && ! p->keep
1220 && p->rtl_expr == 0)
1221 p->in_use = 0;
1222
1223 combine_temp_slots ();
1224 }
1225
1226 /* Free all temporary slots used in T, an RTL_EXPR node. */
1227
1228 void
1229 free_temps_for_rtl_expr (t)
1230 tree t;
1231 {
1232 struct temp_slot *p;
1233
1234 for (p = temp_slots; p; p = p->next)
1235 if (p->rtl_expr == t)
1236 p->in_use = 0;
1237
1238 combine_temp_slots ();
1239 }
1240
1241 /* Mark all temporaries ever allocated in this functon as not suitable
1242 for reuse until the current level is exited. */
1243
1244 void
1245 mark_all_temps_used ()
1246 {
1247 struct temp_slot *p;
1248
1249 for (p = temp_slots; p; p = p->next)
1250 {
1251 p->in_use = p->keep = 1;
1252 p->level = MIN (p->level, temp_slot_level);
1253 }
1254 }
1255
1256 /* Push deeper into the nesting level for stack temporaries. */
1257
1258 void
1259 push_temp_slots ()
1260 {
1261 temp_slot_level++;
1262 }
1263
1264 /* Pop a temporary nesting level. All slots in use in the current level
1265 are freed. */
1266
1267 void
1268 pop_temp_slots ()
1269 {
1270 struct temp_slot *p;
1271
1272 for (p = temp_slots; p; p = p->next)
1273 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1274 p->in_use = 0;
1275
1276 combine_temp_slots ();
1277
1278 temp_slot_level--;
1279 }
1280
1281 /* Initialize temporary slots. */
1282
1283 void
1284 init_temp_slots ()
1285 {
1286 /* We have not allocated any temporaries yet. */
1287 temp_slots = 0;
1288 temp_slot_level = 0;
1289 target_temp_slot_level = 0;
1290 }
1291 \f
1292 /* Retroactively move an auto variable from a register to a stack slot.
1293 This is done when an address-reference to the variable is seen. */
1294
1295 void
1296 put_var_into_stack (decl)
1297 tree decl;
1298 {
1299 register rtx reg;
1300 enum machine_mode promoted_mode, decl_mode;
1301 struct function *function = 0;
1302 tree context;
1303
1304 if (output_bytecode)
1305 return;
1306
1307 context = decl_function_context (decl);
1308
1309 /* Get the current rtl used for this object and it's original mode. */
1310 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1311
1312 /* No need to do anything if decl has no rtx yet
1313 since in that case caller is setting TREE_ADDRESSABLE
1314 and a stack slot will be assigned when the rtl is made. */
1315 if (reg == 0)
1316 return;
1317
1318 /* Get the declared mode for this object. */
1319 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1320 : DECL_MODE (decl));
1321 /* Get the mode it's actually stored in. */
1322 promoted_mode = GET_MODE (reg);
1323
1324 /* If this variable comes from an outer function,
1325 find that function's saved context. */
1326 if (context != current_function_decl)
1327 for (function = outer_function_chain; function; function = function->next)
1328 if (function->decl == context)
1329 break;
1330
1331 /* If this is a variable-size object with a pseudo to address it,
1332 put that pseudo into the stack, if the var is nonlocal. */
1333 if (DECL_NONLOCAL (decl)
1334 && GET_CODE (reg) == MEM
1335 && GET_CODE (XEXP (reg, 0)) == REG
1336 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1337 {
1338 reg = XEXP (reg, 0);
1339 decl_mode = promoted_mode = GET_MODE (reg);
1340 }
1341
1342 /* Now we should have a value that resides in one or more pseudo regs. */
1343
1344 if (GET_CODE (reg) == REG)
1345 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1346 promoted_mode, decl_mode, TREE_SIDE_EFFECTS (decl));
1347 else if (GET_CODE (reg) == CONCAT)
1348 {
1349 /* A CONCAT contains two pseudos; put them both in the stack.
1350 We do it so they end up consecutive. */
1351 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1352 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1353 #ifdef FRAME_GROWS_DOWNWARD
1354 /* Since part 0 should have a lower address, do it second. */
1355 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1356 part_mode, TREE_SIDE_EFFECTS (decl));
1357 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1358 part_mode, TREE_SIDE_EFFECTS (decl));
1359 #else
1360 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1361 part_mode, TREE_SIDE_EFFECTS (decl));
1362 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1363 part_mode, TREE_SIDE_EFFECTS (decl));
1364 #endif
1365
1366 /* Change the CONCAT into a combined MEM for both parts. */
1367 PUT_CODE (reg, MEM);
1368 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1369
1370 /* The two parts are in memory order already.
1371 Use the lower parts address as ours. */
1372 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1373 /* Prevent sharing of rtl that might lose. */
1374 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1375 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1376 }
1377 }
1378
1379 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1380 into the stack frame of FUNCTION (0 means the current function).
1381 DECL_MODE is the machine mode of the user-level data type.
1382 PROMOTED_MODE is the machine mode of the register.
1383 VOLATILE_P is nonzero if this is for a "volatile" decl. */
1384
1385 static void
1386 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p)
1387 struct function *function;
1388 rtx reg;
1389 tree type;
1390 enum machine_mode promoted_mode, decl_mode;
1391 int volatile_p;
1392 {
1393 rtx new = 0;
1394
1395 if (function)
1396 {
1397 if (REGNO (reg) < function->max_parm_reg)
1398 new = function->parm_reg_stack_loc[REGNO (reg)];
1399 if (new == 0)
1400 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1401 0, function);
1402 }
1403 else
1404 {
1405 if (REGNO (reg) < max_parm_reg)
1406 new = parm_reg_stack_loc[REGNO (reg)];
1407 if (new == 0)
1408 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1409 }
1410
1411 PUT_MODE (reg, decl_mode);
1412 XEXP (reg, 0) = XEXP (new, 0);
1413 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1414 MEM_VOLATILE_P (reg) = volatile_p;
1415 PUT_CODE (reg, MEM);
1416
1417 /* If this is a memory ref that contains aggregate components,
1418 mark it as such for cse and loop optimize. */
1419 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type);
1420
1421 /* Now make sure that all refs to the variable, previously made
1422 when it was a register, are fixed up to be valid again. */
1423 if (function)
1424 {
1425 struct var_refs_queue *temp;
1426
1427 /* Variable is inherited; fix it up when we get back to its function. */
1428 push_obstacks (function->function_obstack,
1429 function->function_maybepermanent_obstack);
1430
1431 /* See comment in restore_tree_status in tree.c for why this needs to be
1432 on saveable obstack. */
1433 temp
1434 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1435 temp->modified = reg;
1436 temp->promoted_mode = promoted_mode;
1437 temp->unsignedp = TREE_UNSIGNED (type);
1438 temp->next = function->fixup_var_refs_queue;
1439 function->fixup_var_refs_queue = temp;
1440 pop_obstacks ();
1441 }
1442 else
1443 /* Variable is local; fix it up now. */
1444 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
1445 }
1446 \f
1447 static void
1448 fixup_var_refs (var, promoted_mode, unsignedp)
1449 rtx var;
1450 enum machine_mode promoted_mode;
1451 int unsignedp;
1452 {
1453 tree pending;
1454 rtx first_insn = get_insns ();
1455 struct sequence_stack *stack = sequence_stack;
1456 tree rtl_exps = rtl_expr_chain;
1457
1458 /* Must scan all insns for stack-refs that exceed the limit. */
1459 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
1460
1461 /* Scan all pending sequences too. */
1462 for (; stack; stack = stack->next)
1463 {
1464 push_to_sequence (stack->first);
1465 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1466 stack->first, stack->next != 0);
1467 /* Update remembered end of sequence
1468 in case we added an insn at the end. */
1469 stack->last = get_last_insn ();
1470 end_sequence ();
1471 }
1472
1473 /* Scan all waiting RTL_EXPRs too. */
1474 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1475 {
1476 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1477 if (seq != const0_rtx && seq != 0)
1478 {
1479 push_to_sequence (seq);
1480 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
1481 end_sequence ();
1482 }
1483 }
1484 }
1485 \f
1486 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1487 some part of an insn. Return a struct fixup_replacement whose OLD
1488 value is equal to X. Allocate a new structure if no such entry exists. */
1489
1490 static struct fixup_replacement *
1491 find_fixup_replacement (replacements, x)
1492 struct fixup_replacement **replacements;
1493 rtx x;
1494 {
1495 struct fixup_replacement *p;
1496
1497 /* See if we have already replaced this. */
1498 for (p = *replacements; p && p->old != x; p = p->next)
1499 ;
1500
1501 if (p == 0)
1502 {
1503 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1504 p->old = x;
1505 p->new = 0;
1506 p->next = *replacements;
1507 *replacements = p;
1508 }
1509
1510 return p;
1511 }
1512
1513 /* Scan the insn-chain starting with INSN for refs to VAR
1514 and fix them up. TOPLEVEL is nonzero if this chain is the
1515 main chain of insns for the current function. */
1516
1517 static void
1518 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
1519 rtx var;
1520 enum machine_mode promoted_mode;
1521 int unsignedp;
1522 rtx insn;
1523 int toplevel;
1524 {
1525 rtx call_dest = 0;
1526
1527 while (insn)
1528 {
1529 rtx next = NEXT_INSN (insn);
1530 rtx note;
1531 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1532 {
1533 /* If this is a CLOBBER of VAR, delete it.
1534
1535 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1536 and REG_RETVAL notes too. */
1537 if (GET_CODE (PATTERN (insn)) == CLOBBER
1538 && XEXP (PATTERN (insn), 0) == var)
1539 {
1540 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1541 /* The REG_LIBCALL note will go away since we are going to
1542 turn INSN into a NOTE, so just delete the
1543 corresponding REG_RETVAL note. */
1544 remove_note (XEXP (note, 0),
1545 find_reg_note (XEXP (note, 0), REG_RETVAL,
1546 NULL_RTX));
1547
1548 /* In unoptimized compilation, we shouldn't call delete_insn
1549 except in jump.c doing warnings. */
1550 PUT_CODE (insn, NOTE);
1551 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1552 NOTE_SOURCE_FILE (insn) = 0;
1553 }
1554
1555 /* The insn to load VAR from a home in the arglist
1556 is now a no-op. When we see it, just delete it. */
1557 else if (toplevel
1558 && GET_CODE (PATTERN (insn)) == SET
1559 && SET_DEST (PATTERN (insn)) == var
1560 /* If this represents the result of an insn group,
1561 don't delete the insn. */
1562 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1563 && rtx_equal_p (SET_SRC (PATTERN (insn)), var))
1564 {
1565 /* In unoptimized compilation, we shouldn't call delete_insn
1566 except in jump.c doing warnings. */
1567 PUT_CODE (insn, NOTE);
1568 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1569 NOTE_SOURCE_FILE (insn) = 0;
1570 if (insn == last_parm_insn)
1571 last_parm_insn = PREV_INSN (next);
1572 }
1573 else
1574 {
1575 struct fixup_replacement *replacements = 0;
1576 rtx next_insn = NEXT_INSN (insn);
1577
1578 #ifdef SMALL_REGISTER_CLASSES
1579 /* If the insn that copies the results of a CALL_INSN
1580 into a pseudo now references VAR, we have to use an
1581 intermediate pseudo since we want the life of the
1582 return value register to be only a single insn.
1583
1584 If we don't use an intermediate pseudo, such things as
1585 address computations to make the address of VAR valid
1586 if it is not can be placed between the CALL_INSN and INSN.
1587
1588 To make sure this doesn't happen, we record the destination
1589 of the CALL_INSN and see if the next insn uses both that
1590 and VAR. */
1591
1592 if (SMALL_REGISTER_CLASSES)
1593 {
1594 if (call_dest != 0 && GET_CODE (insn) == INSN
1595 && reg_mentioned_p (var, PATTERN (insn))
1596 && reg_mentioned_p (call_dest, PATTERN (insn)))
1597 {
1598 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1599
1600 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1601
1602 PATTERN (insn) = replace_rtx (PATTERN (insn),
1603 call_dest, temp);
1604 }
1605
1606 if (GET_CODE (insn) == CALL_INSN
1607 && GET_CODE (PATTERN (insn)) == SET)
1608 call_dest = SET_DEST (PATTERN (insn));
1609 else if (GET_CODE (insn) == CALL_INSN
1610 && GET_CODE (PATTERN (insn)) == PARALLEL
1611 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1612 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1613 else
1614 call_dest = 0;
1615 }
1616 #endif
1617
1618 /* See if we have to do anything to INSN now that VAR is in
1619 memory. If it needs to be loaded into a pseudo, use a single
1620 pseudo for the entire insn in case there is a MATCH_DUP
1621 between two operands. We pass a pointer to the head of
1622 a list of struct fixup_replacements. If fixup_var_refs_1
1623 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1624 it will record them in this list.
1625
1626 If it allocated a pseudo for any replacement, we copy into
1627 it here. */
1628
1629 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1630 &replacements);
1631
1632 /* If this is last_parm_insn, and any instructions were output
1633 after it to fix it up, then we must set last_parm_insn to
1634 the last such instruction emitted. */
1635 if (insn == last_parm_insn)
1636 last_parm_insn = PREV_INSN (next_insn);
1637
1638 while (replacements)
1639 {
1640 if (GET_CODE (replacements->new) == REG)
1641 {
1642 rtx insert_before;
1643 rtx seq;
1644
1645 /* OLD might be a (subreg (mem)). */
1646 if (GET_CODE (replacements->old) == SUBREG)
1647 replacements->old
1648 = fixup_memory_subreg (replacements->old, insn, 0);
1649 else
1650 replacements->old
1651 = fixup_stack_1 (replacements->old, insn);
1652
1653 insert_before = insn;
1654
1655 /* If we are changing the mode, do a conversion.
1656 This might be wasteful, but combine.c will
1657 eliminate much of the waste. */
1658
1659 if (GET_MODE (replacements->new)
1660 != GET_MODE (replacements->old))
1661 {
1662 start_sequence ();
1663 convert_move (replacements->new,
1664 replacements->old, unsignedp);
1665 seq = gen_sequence ();
1666 end_sequence ();
1667 }
1668 else
1669 seq = gen_move_insn (replacements->new,
1670 replacements->old);
1671
1672 emit_insn_before (seq, insert_before);
1673 }
1674
1675 replacements = replacements->next;
1676 }
1677 }
1678
1679 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1680 But don't touch other insns referred to by reg-notes;
1681 we will get them elsewhere. */
1682 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1683 if (GET_CODE (note) != INSN_LIST)
1684 XEXP (note, 0)
1685 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1686 }
1687 insn = next;
1688 }
1689 }
1690 \f
1691 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1692 See if the rtx expression at *LOC in INSN needs to be changed.
1693
1694 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1695 contain a list of original rtx's and replacements. If we find that we need
1696 to modify this insn by replacing a memory reference with a pseudo or by
1697 making a new MEM to implement a SUBREG, we consult that list to see if
1698 we have already chosen a replacement. If none has already been allocated,
1699 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1700 or the SUBREG, as appropriate, to the pseudo. */
1701
1702 static void
1703 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1704 register rtx var;
1705 enum machine_mode promoted_mode;
1706 register rtx *loc;
1707 rtx insn;
1708 struct fixup_replacement **replacements;
1709 {
1710 register int i;
1711 register rtx x = *loc;
1712 RTX_CODE code = GET_CODE (x);
1713 register char *fmt;
1714 register rtx tem, tem1;
1715 struct fixup_replacement *replacement;
1716
1717 switch (code)
1718 {
1719 case MEM:
1720 if (var == x)
1721 {
1722 /* If we already have a replacement, use it. Otherwise,
1723 try to fix up this address in case it is invalid. */
1724
1725 replacement = find_fixup_replacement (replacements, var);
1726 if (replacement->new)
1727 {
1728 *loc = replacement->new;
1729 return;
1730 }
1731
1732 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1733
1734 /* Unless we are forcing memory to register or we changed the mode,
1735 we can leave things the way they are if the insn is valid. */
1736
1737 INSN_CODE (insn) = -1;
1738 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1739 && recog_memoized (insn) >= 0)
1740 return;
1741
1742 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1743 return;
1744 }
1745
1746 /* If X contains VAR, we need to unshare it here so that we update
1747 each occurrence separately. But all identical MEMs in one insn
1748 must be replaced with the same rtx because of the possibility of
1749 MATCH_DUPs. */
1750
1751 if (reg_mentioned_p (var, x))
1752 {
1753 replacement = find_fixup_replacement (replacements, x);
1754 if (replacement->new == 0)
1755 replacement->new = copy_most_rtx (x, var);
1756
1757 *loc = x = replacement->new;
1758 }
1759 break;
1760
1761 case REG:
1762 case CC0:
1763 case PC:
1764 case CONST_INT:
1765 case CONST:
1766 case SYMBOL_REF:
1767 case LABEL_REF:
1768 case CONST_DOUBLE:
1769 return;
1770
1771 case SIGN_EXTRACT:
1772 case ZERO_EXTRACT:
1773 /* Note that in some cases those types of expressions are altered
1774 by optimize_bit_field, and do not survive to get here. */
1775 if (XEXP (x, 0) == var
1776 || (GET_CODE (XEXP (x, 0)) == SUBREG
1777 && SUBREG_REG (XEXP (x, 0)) == var))
1778 {
1779 /* Get TEM as a valid MEM in the mode presently in the insn.
1780
1781 We don't worry about the possibility of MATCH_DUP here; it
1782 is highly unlikely and would be tricky to handle. */
1783
1784 tem = XEXP (x, 0);
1785 if (GET_CODE (tem) == SUBREG)
1786 {
1787 if (GET_MODE_BITSIZE (GET_MODE (tem))
1788 > GET_MODE_BITSIZE (GET_MODE (var)))
1789 {
1790 replacement = find_fixup_replacement (replacements, var);
1791 if (replacement->new == 0)
1792 replacement->new = gen_reg_rtx (GET_MODE (var));
1793 SUBREG_REG (tem) = replacement->new;
1794 }
1795 else
1796 tem = fixup_memory_subreg (tem, insn, 0);
1797 }
1798 else
1799 tem = fixup_stack_1 (tem, insn);
1800
1801 /* Unless we want to load from memory, get TEM into the proper mode
1802 for an extract from memory. This can only be done if the
1803 extract is at a constant position and length. */
1804
1805 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1806 && GET_CODE (XEXP (x, 2)) == CONST_INT
1807 && ! mode_dependent_address_p (XEXP (tem, 0))
1808 && ! MEM_VOLATILE_P (tem))
1809 {
1810 enum machine_mode wanted_mode = VOIDmode;
1811 enum machine_mode is_mode = GET_MODE (tem);
1812 int width = INTVAL (XEXP (x, 1));
1813 int pos = INTVAL (XEXP (x, 2));
1814
1815 #ifdef HAVE_extzv
1816 if (GET_CODE (x) == ZERO_EXTRACT)
1817 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1818 #endif
1819 #ifdef HAVE_extv
1820 if (GET_CODE (x) == SIGN_EXTRACT)
1821 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1822 #endif
1823 /* If we have a narrower mode, we can do something. */
1824 if (wanted_mode != VOIDmode
1825 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1826 {
1827 int offset = pos / BITS_PER_UNIT;
1828 rtx old_pos = XEXP (x, 2);
1829 rtx newmem;
1830
1831 /* If the bytes and bits are counted differently, we
1832 must adjust the offset. */
1833 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1834 offset = (GET_MODE_SIZE (is_mode)
1835 - GET_MODE_SIZE (wanted_mode) - offset);
1836
1837 pos %= GET_MODE_BITSIZE (wanted_mode);
1838
1839 newmem = gen_rtx (MEM, wanted_mode,
1840 plus_constant (XEXP (tem, 0), offset));
1841 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1842 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1843 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1844
1845 /* Make the change and see if the insn remains valid. */
1846 INSN_CODE (insn) = -1;
1847 XEXP (x, 0) = newmem;
1848 XEXP (x, 2) = GEN_INT (pos);
1849
1850 if (recog_memoized (insn) >= 0)
1851 return;
1852
1853 /* Otherwise, restore old position. XEXP (x, 0) will be
1854 restored later. */
1855 XEXP (x, 2) = old_pos;
1856 }
1857 }
1858
1859 /* If we get here, the bitfield extract insn can't accept a memory
1860 reference. Copy the input into a register. */
1861
1862 tem1 = gen_reg_rtx (GET_MODE (tem));
1863 emit_insn_before (gen_move_insn (tem1, tem), insn);
1864 XEXP (x, 0) = tem1;
1865 return;
1866 }
1867 break;
1868
1869 case SUBREG:
1870 if (SUBREG_REG (x) == var)
1871 {
1872 /* If this is a special SUBREG made because VAR was promoted
1873 from a wider mode, replace it with VAR and call ourself
1874 recursively, this time saying that the object previously
1875 had its current mode (by virtue of the SUBREG). */
1876
1877 if (SUBREG_PROMOTED_VAR_P (x))
1878 {
1879 *loc = var;
1880 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
1881 return;
1882 }
1883
1884 /* If this SUBREG makes VAR wider, it has become a paradoxical
1885 SUBREG with VAR in memory, but these aren't allowed at this
1886 stage of the compilation. So load VAR into a pseudo and take
1887 a SUBREG of that pseudo. */
1888 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
1889 {
1890 replacement = find_fixup_replacement (replacements, var);
1891 if (replacement->new == 0)
1892 replacement->new = gen_reg_rtx (GET_MODE (var));
1893 SUBREG_REG (x) = replacement->new;
1894 return;
1895 }
1896
1897 /* See if we have already found a replacement for this SUBREG.
1898 If so, use it. Otherwise, make a MEM and see if the insn
1899 is recognized. If not, or if we should force MEM into a register,
1900 make a pseudo for this SUBREG. */
1901 replacement = find_fixup_replacement (replacements, x);
1902 if (replacement->new)
1903 {
1904 *loc = replacement->new;
1905 return;
1906 }
1907
1908 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
1909
1910 INSN_CODE (insn) = -1;
1911 if (! flag_force_mem && recog_memoized (insn) >= 0)
1912 return;
1913
1914 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
1915 return;
1916 }
1917 break;
1918
1919 case SET:
1920 /* First do special simplification of bit-field references. */
1921 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
1922 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
1923 optimize_bit_field (x, insn, 0);
1924 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
1925 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
1926 optimize_bit_field (x, insn, NULL_PTR);
1927
1928 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
1929 into a register and then store it back out. */
1930 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
1931 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
1932 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
1933 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
1934 > GET_MODE_SIZE (GET_MODE (var))))
1935 {
1936 replacement = find_fixup_replacement (replacements, var);
1937 if (replacement->new == 0)
1938 replacement->new = gen_reg_rtx (GET_MODE (var));
1939
1940 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
1941 emit_insn_after (gen_move_insn (var, replacement->new), insn);
1942 }
1943
1944 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
1945 insn into a pseudo and store the low part of the pseudo into VAR. */
1946 if (GET_CODE (SET_DEST (x)) == SUBREG
1947 && SUBREG_REG (SET_DEST (x)) == var
1948 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
1949 > GET_MODE_SIZE (GET_MODE (var))))
1950 {
1951 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
1952 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
1953 tem)),
1954 insn);
1955 break;
1956 }
1957
1958 {
1959 rtx dest = SET_DEST (x);
1960 rtx src = SET_SRC (x);
1961 rtx outerdest = dest;
1962
1963 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
1964 || GET_CODE (dest) == SIGN_EXTRACT
1965 || GET_CODE (dest) == ZERO_EXTRACT)
1966 dest = XEXP (dest, 0);
1967
1968 if (GET_CODE (src) == SUBREG)
1969 src = XEXP (src, 0);
1970
1971 /* If VAR does not appear at the top level of the SET
1972 just scan the lower levels of the tree. */
1973
1974 if (src != var && dest != var)
1975 break;
1976
1977 /* We will need to rerecognize this insn. */
1978 INSN_CODE (insn) = -1;
1979
1980 #ifdef HAVE_insv
1981 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
1982 {
1983 /* Since this case will return, ensure we fixup all the
1984 operands here. */
1985 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
1986 insn, replacements);
1987 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
1988 insn, replacements);
1989 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
1990 insn, replacements);
1991
1992 tem = XEXP (outerdest, 0);
1993
1994 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
1995 that may appear inside a ZERO_EXTRACT.
1996 This was legitimate when the MEM was a REG. */
1997 if (GET_CODE (tem) == SUBREG
1998 && SUBREG_REG (tem) == var)
1999 tem = fixup_memory_subreg (tem, insn, 0);
2000 else
2001 tem = fixup_stack_1 (tem, insn);
2002
2003 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2004 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2005 && ! mode_dependent_address_p (XEXP (tem, 0))
2006 && ! MEM_VOLATILE_P (tem))
2007 {
2008 enum machine_mode wanted_mode
2009 = insn_operand_mode[(int) CODE_FOR_insv][0];
2010 enum machine_mode is_mode = GET_MODE (tem);
2011 int width = INTVAL (XEXP (outerdest, 1));
2012 int pos = INTVAL (XEXP (outerdest, 2));
2013
2014 /* If we have a narrower mode, we can do something. */
2015 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2016 {
2017 int offset = pos / BITS_PER_UNIT;
2018 rtx old_pos = XEXP (outerdest, 2);
2019 rtx newmem;
2020
2021 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2022 offset = (GET_MODE_SIZE (is_mode)
2023 - GET_MODE_SIZE (wanted_mode) - offset);
2024
2025 pos %= GET_MODE_BITSIZE (wanted_mode);
2026
2027 newmem = gen_rtx (MEM, wanted_mode,
2028 plus_constant (XEXP (tem, 0), offset));
2029 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2030 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
2031 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
2032
2033 /* Make the change and see if the insn remains valid. */
2034 INSN_CODE (insn) = -1;
2035 XEXP (outerdest, 0) = newmem;
2036 XEXP (outerdest, 2) = GEN_INT (pos);
2037
2038 if (recog_memoized (insn) >= 0)
2039 return;
2040
2041 /* Otherwise, restore old position. XEXP (x, 0) will be
2042 restored later. */
2043 XEXP (outerdest, 2) = old_pos;
2044 }
2045 }
2046
2047 /* If we get here, the bit-field store doesn't allow memory
2048 or isn't located at a constant position. Load the value into
2049 a register, do the store, and put it back into memory. */
2050
2051 tem1 = gen_reg_rtx (GET_MODE (tem));
2052 emit_insn_before (gen_move_insn (tem1, tem), insn);
2053 emit_insn_after (gen_move_insn (tem, tem1), insn);
2054 XEXP (outerdest, 0) = tem1;
2055 return;
2056 }
2057 #endif
2058
2059 /* STRICT_LOW_PART is a no-op on memory references
2060 and it can cause combinations to be unrecognizable,
2061 so eliminate it. */
2062
2063 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2064 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2065
2066 /* A valid insn to copy VAR into or out of a register
2067 must be left alone, to avoid an infinite loop here.
2068 If the reference to VAR is by a subreg, fix that up,
2069 since SUBREG is not valid for a memref.
2070 Also fix up the address of the stack slot.
2071
2072 Note that we must not try to recognize the insn until
2073 after we know that we have valid addresses and no
2074 (subreg (mem ...) ...) constructs, since these interfere
2075 with determining the validity of the insn. */
2076
2077 if ((SET_SRC (x) == var
2078 || (GET_CODE (SET_SRC (x)) == SUBREG
2079 && SUBREG_REG (SET_SRC (x)) == var))
2080 && (GET_CODE (SET_DEST (x)) == REG
2081 || (GET_CODE (SET_DEST (x)) == SUBREG
2082 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2083 && GET_MODE (var) == promoted_mode
2084 && x == single_set (insn))
2085 {
2086 rtx pat;
2087
2088 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2089 if (replacement->new)
2090 SET_SRC (x) = replacement->new;
2091 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2092 SET_SRC (x) = replacement->new
2093 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2094 else
2095 SET_SRC (x) = replacement->new
2096 = fixup_stack_1 (SET_SRC (x), insn);
2097
2098 if (recog_memoized (insn) >= 0)
2099 return;
2100
2101 /* INSN is not valid, but we know that we want to
2102 copy SET_SRC (x) to SET_DEST (x) in some way. So
2103 we generate the move and see whether it requires more
2104 than one insn. If it does, we emit those insns and
2105 delete INSN. Otherwise, we an just replace the pattern
2106 of INSN; we have already verified above that INSN has
2107 no other function that to do X. */
2108
2109 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2110 if (GET_CODE (pat) == SEQUENCE)
2111 {
2112 emit_insn_after (pat, insn);
2113 PUT_CODE (insn, NOTE);
2114 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2115 NOTE_SOURCE_FILE (insn) = 0;
2116 }
2117 else
2118 PATTERN (insn) = pat;
2119
2120 return;
2121 }
2122
2123 if ((SET_DEST (x) == var
2124 || (GET_CODE (SET_DEST (x)) == SUBREG
2125 && SUBREG_REG (SET_DEST (x)) == var))
2126 && (GET_CODE (SET_SRC (x)) == REG
2127 || (GET_CODE (SET_SRC (x)) == SUBREG
2128 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2129 && GET_MODE (var) == promoted_mode
2130 && x == single_set (insn))
2131 {
2132 rtx pat;
2133
2134 if (GET_CODE (SET_DEST (x)) == SUBREG)
2135 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2136 else
2137 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2138
2139 if (recog_memoized (insn) >= 0)
2140 return;
2141
2142 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2143 if (GET_CODE (pat) == SEQUENCE)
2144 {
2145 emit_insn_after (pat, insn);
2146 PUT_CODE (insn, NOTE);
2147 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2148 NOTE_SOURCE_FILE (insn) = 0;
2149 }
2150 else
2151 PATTERN (insn) = pat;
2152
2153 return;
2154 }
2155
2156 /* Otherwise, storing into VAR must be handled specially
2157 by storing into a temporary and copying that into VAR
2158 with a new insn after this one. Note that this case
2159 will be used when storing into a promoted scalar since
2160 the insn will now have different modes on the input
2161 and output and hence will be invalid (except for the case
2162 of setting it to a constant, which does not need any
2163 change if it is valid). We generate extra code in that case,
2164 but combine.c will eliminate it. */
2165
2166 if (dest == var)
2167 {
2168 rtx temp;
2169 rtx fixeddest = SET_DEST (x);
2170
2171 /* STRICT_LOW_PART can be discarded, around a MEM. */
2172 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2173 fixeddest = XEXP (fixeddest, 0);
2174 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2175 if (GET_CODE (fixeddest) == SUBREG)
2176 {
2177 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2178 promoted_mode = GET_MODE (fixeddest);
2179 }
2180 else
2181 fixeddest = fixup_stack_1 (fixeddest, insn);
2182
2183 temp = gen_reg_rtx (promoted_mode);
2184
2185 emit_insn_after (gen_move_insn (fixeddest,
2186 gen_lowpart (GET_MODE (fixeddest),
2187 temp)),
2188 insn);
2189
2190 SET_DEST (x) = temp;
2191 }
2192 }
2193 }
2194
2195 /* Nothing special about this RTX; fix its operands. */
2196
2197 fmt = GET_RTX_FORMAT (code);
2198 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2199 {
2200 if (fmt[i] == 'e')
2201 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2202 if (fmt[i] == 'E')
2203 {
2204 register int j;
2205 for (j = 0; j < XVECLEN (x, i); j++)
2206 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2207 insn, replacements);
2208 }
2209 }
2210 }
2211 \f
2212 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2213 return an rtx (MEM:m1 newaddr) which is equivalent.
2214 If any insns must be emitted to compute NEWADDR, put them before INSN.
2215
2216 UNCRITICAL nonzero means accept paradoxical subregs.
2217 This is used for subregs found inside REG_NOTES. */
2218
2219 static rtx
2220 fixup_memory_subreg (x, insn, uncritical)
2221 rtx x;
2222 rtx insn;
2223 int uncritical;
2224 {
2225 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2226 rtx addr = XEXP (SUBREG_REG (x), 0);
2227 enum machine_mode mode = GET_MODE (x);
2228 rtx saved, result;
2229
2230 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2231 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2232 && ! uncritical)
2233 abort ();
2234
2235 if (BYTES_BIG_ENDIAN)
2236 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2237 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2238 addr = plus_constant (addr, offset);
2239 if (!flag_force_addr && memory_address_p (mode, addr))
2240 /* Shortcut if no insns need be emitted. */
2241 return change_address (SUBREG_REG (x), mode, addr);
2242 start_sequence ();
2243 result = change_address (SUBREG_REG (x), mode, addr);
2244 emit_insn_before (gen_sequence (), insn);
2245 end_sequence ();
2246 return result;
2247 }
2248
2249 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2250 Replace subexpressions of X in place.
2251 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2252 Otherwise return X, with its contents possibly altered.
2253
2254 If any insns must be emitted to compute NEWADDR, put them before INSN.
2255
2256 UNCRITICAL is as in fixup_memory_subreg. */
2257
2258 static rtx
2259 walk_fixup_memory_subreg (x, insn, uncritical)
2260 register rtx x;
2261 rtx insn;
2262 int uncritical;
2263 {
2264 register enum rtx_code code;
2265 register char *fmt;
2266 register int i;
2267
2268 if (x == 0)
2269 return 0;
2270
2271 code = GET_CODE (x);
2272
2273 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2274 return fixup_memory_subreg (x, insn, uncritical);
2275
2276 /* Nothing special about this RTX; fix its operands. */
2277
2278 fmt = GET_RTX_FORMAT (code);
2279 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2280 {
2281 if (fmt[i] == 'e')
2282 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2283 if (fmt[i] == 'E')
2284 {
2285 register int j;
2286 for (j = 0; j < XVECLEN (x, i); j++)
2287 XVECEXP (x, i, j)
2288 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2289 }
2290 }
2291 return x;
2292 }
2293 \f
2294 /* For each memory ref within X, if it refers to a stack slot
2295 with an out of range displacement, put the address in a temp register
2296 (emitting new insns before INSN to load these registers)
2297 and alter the memory ref to use that register.
2298 Replace each such MEM rtx with a copy, to avoid clobberage. */
2299
2300 static rtx
2301 fixup_stack_1 (x, insn)
2302 rtx x;
2303 rtx insn;
2304 {
2305 register int i;
2306 register RTX_CODE code = GET_CODE (x);
2307 register char *fmt;
2308
2309 if (code == MEM)
2310 {
2311 register rtx ad = XEXP (x, 0);
2312 /* If we have address of a stack slot but it's not valid
2313 (displacement is too large), compute the sum in a register. */
2314 if (GET_CODE (ad) == PLUS
2315 && GET_CODE (XEXP (ad, 0)) == REG
2316 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2317 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2318 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2319 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2320 {
2321 rtx temp, seq;
2322 if (memory_address_p (GET_MODE (x), ad))
2323 return x;
2324
2325 start_sequence ();
2326 temp = copy_to_reg (ad);
2327 seq = gen_sequence ();
2328 end_sequence ();
2329 emit_insn_before (seq, insn);
2330 return change_address (x, VOIDmode, temp);
2331 }
2332 return x;
2333 }
2334
2335 fmt = GET_RTX_FORMAT (code);
2336 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2337 {
2338 if (fmt[i] == 'e')
2339 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2340 if (fmt[i] == 'E')
2341 {
2342 register int j;
2343 for (j = 0; j < XVECLEN (x, i); j++)
2344 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2345 }
2346 }
2347 return x;
2348 }
2349 \f
2350 /* Optimization: a bit-field instruction whose field
2351 happens to be a byte or halfword in memory
2352 can be changed to a move instruction.
2353
2354 We call here when INSN is an insn to examine or store into a bit-field.
2355 BODY is the SET-rtx to be altered.
2356
2357 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2358 (Currently this is called only from function.c, and EQUIV_MEM
2359 is always 0.) */
2360
2361 static void
2362 optimize_bit_field (body, insn, equiv_mem)
2363 rtx body;
2364 rtx insn;
2365 rtx *equiv_mem;
2366 {
2367 register rtx bitfield;
2368 int destflag;
2369 rtx seq = 0;
2370 enum machine_mode mode;
2371
2372 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2373 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2374 bitfield = SET_DEST (body), destflag = 1;
2375 else
2376 bitfield = SET_SRC (body), destflag = 0;
2377
2378 /* First check that the field being stored has constant size and position
2379 and is in fact a byte or halfword suitably aligned. */
2380
2381 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2382 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2383 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2384 != BLKmode)
2385 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2386 {
2387 register rtx memref = 0;
2388
2389 /* Now check that the containing word is memory, not a register,
2390 and that it is safe to change the machine mode. */
2391
2392 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2393 memref = XEXP (bitfield, 0);
2394 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2395 && equiv_mem != 0)
2396 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2397 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2398 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2399 memref = SUBREG_REG (XEXP (bitfield, 0));
2400 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2401 && equiv_mem != 0
2402 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2403 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2404
2405 if (memref
2406 && ! mode_dependent_address_p (XEXP (memref, 0))
2407 && ! MEM_VOLATILE_P (memref))
2408 {
2409 /* Now adjust the address, first for any subreg'ing
2410 that we are now getting rid of,
2411 and then for which byte of the word is wanted. */
2412
2413 register int offset = INTVAL (XEXP (bitfield, 2));
2414 rtx insns;
2415
2416 /* Adjust OFFSET to count bits from low-address byte. */
2417 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2418 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2419 - offset - INTVAL (XEXP (bitfield, 1)));
2420
2421 /* Adjust OFFSET to count bytes from low-address byte. */
2422 offset /= BITS_PER_UNIT;
2423 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2424 {
2425 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2426 if (BYTES_BIG_ENDIAN)
2427 offset -= (MIN (UNITS_PER_WORD,
2428 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2429 - MIN (UNITS_PER_WORD,
2430 GET_MODE_SIZE (GET_MODE (memref))));
2431 }
2432
2433 start_sequence ();
2434 memref = change_address (memref, mode,
2435 plus_constant (XEXP (memref, 0), offset));
2436 insns = get_insns ();
2437 end_sequence ();
2438 emit_insns_before (insns, insn);
2439
2440 /* Store this memory reference where
2441 we found the bit field reference. */
2442
2443 if (destflag)
2444 {
2445 validate_change (insn, &SET_DEST (body), memref, 1);
2446 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2447 {
2448 rtx src = SET_SRC (body);
2449 while (GET_CODE (src) == SUBREG
2450 && SUBREG_WORD (src) == 0)
2451 src = SUBREG_REG (src);
2452 if (GET_MODE (src) != GET_MODE (memref))
2453 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2454 validate_change (insn, &SET_SRC (body), src, 1);
2455 }
2456 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2457 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2458 /* This shouldn't happen because anything that didn't have
2459 one of these modes should have got converted explicitly
2460 and then referenced through a subreg.
2461 This is so because the original bit-field was
2462 handled by agg_mode and so its tree structure had
2463 the same mode that memref now has. */
2464 abort ();
2465 }
2466 else
2467 {
2468 rtx dest = SET_DEST (body);
2469
2470 while (GET_CODE (dest) == SUBREG
2471 && SUBREG_WORD (dest) == 0
2472 && (GET_MODE_CLASS (GET_MODE (dest))
2473 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest)))))
2474 dest = SUBREG_REG (dest);
2475
2476 validate_change (insn, &SET_DEST (body), dest, 1);
2477
2478 if (GET_MODE (dest) == GET_MODE (memref))
2479 validate_change (insn, &SET_SRC (body), memref, 1);
2480 else
2481 {
2482 /* Convert the mem ref to the destination mode. */
2483 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2484
2485 start_sequence ();
2486 convert_move (newreg, memref,
2487 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2488 seq = get_insns ();
2489 end_sequence ();
2490
2491 validate_change (insn, &SET_SRC (body), newreg, 1);
2492 }
2493 }
2494
2495 /* See if we can convert this extraction or insertion into
2496 a simple move insn. We might not be able to do so if this
2497 was, for example, part of a PARALLEL.
2498
2499 If we succeed, write out any needed conversions. If we fail,
2500 it is hard to guess why we failed, so don't do anything
2501 special; just let the optimization be suppressed. */
2502
2503 if (apply_change_group () && seq)
2504 emit_insns_before (seq, insn);
2505 }
2506 }
2507 }
2508 \f
2509 /* These routines are responsible for converting virtual register references
2510 to the actual hard register references once RTL generation is complete.
2511
2512 The following four variables are used for communication between the
2513 routines. They contain the offsets of the virtual registers from their
2514 respective hard registers. */
2515
2516 static int in_arg_offset;
2517 static int var_offset;
2518 static int dynamic_offset;
2519 static int out_arg_offset;
2520
2521 /* In most machines, the stack pointer register is equivalent to the bottom
2522 of the stack. */
2523
2524 #ifndef STACK_POINTER_OFFSET
2525 #define STACK_POINTER_OFFSET 0
2526 #endif
2527
2528 /* If not defined, pick an appropriate default for the offset of dynamically
2529 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2530 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2531
2532 #ifndef STACK_DYNAMIC_OFFSET
2533
2534 #ifdef ACCUMULATE_OUTGOING_ARGS
2535 /* The bottom of the stack points to the actual arguments. If
2536 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2537 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2538 stack space for register parameters is not pushed by the caller, but
2539 rather part of the fixed stack areas and hence not included in
2540 `current_function_outgoing_args_size'. Nevertheless, we must allow
2541 for it when allocating stack dynamic objects. */
2542
2543 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2544 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2545 (current_function_outgoing_args_size \
2546 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2547
2548 #else
2549 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2550 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2551 #endif
2552
2553 #else
2554 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2555 #endif
2556 #endif
2557
2558 /* Pass through the INSNS of function FNDECL and convert virtual register
2559 references to hard register references. */
2560
2561 void
2562 instantiate_virtual_regs (fndecl, insns)
2563 tree fndecl;
2564 rtx insns;
2565 {
2566 rtx insn;
2567
2568 /* Compute the offsets to use for this function. */
2569 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
2570 var_offset = STARTING_FRAME_OFFSET;
2571 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
2572 out_arg_offset = STACK_POINTER_OFFSET;
2573
2574 /* Scan all variables and parameters of this function. For each that is
2575 in memory, instantiate all virtual registers if the result is a valid
2576 address. If not, we do it later. That will handle most uses of virtual
2577 regs on many machines. */
2578 instantiate_decls (fndecl, 1);
2579
2580 /* Initialize recognition, indicating that volatile is OK. */
2581 init_recog ();
2582
2583 /* Scan through all the insns, instantiating every virtual register still
2584 present. */
2585 for (insn = insns; insn; insn = NEXT_INSN (insn))
2586 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2587 || GET_CODE (insn) == CALL_INSN)
2588 {
2589 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
2590 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
2591 }
2592
2593 /* Now instantiate the remaining register equivalences for debugging info.
2594 These will not be valid addresses. */
2595 instantiate_decls (fndecl, 0);
2596
2597 /* Indicate that, from now on, assign_stack_local should use
2598 frame_pointer_rtx. */
2599 virtuals_instantiated = 1;
2600 }
2601
2602 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
2603 all virtual registers in their DECL_RTL's.
2604
2605 If VALID_ONLY, do this only if the resulting address is still valid.
2606 Otherwise, always do it. */
2607
2608 static void
2609 instantiate_decls (fndecl, valid_only)
2610 tree fndecl;
2611 int valid_only;
2612 {
2613 tree decl;
2614
2615 if (DECL_SAVED_INSNS (fndecl))
2616 /* When compiling an inline function, the obstack used for
2617 rtl allocation is the maybepermanent_obstack. Calling
2618 `resume_temporary_allocation' switches us back to that
2619 obstack while we process this function's parameters. */
2620 resume_temporary_allocation ();
2621
2622 /* Process all parameters of the function. */
2623 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2624 {
2625 instantiate_decl (DECL_RTL (decl), int_size_in_bytes (TREE_TYPE (decl)),
2626 valid_only);
2627 instantiate_decl (DECL_INCOMING_RTL (decl),
2628 int_size_in_bytes (TREE_TYPE (decl)), valid_only);
2629 }
2630
2631 /* Now process all variables defined in the function or its subblocks. */
2632 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
2633
2634 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
2635 {
2636 /* Save all rtl allocated for this function by raising the
2637 high-water mark on the maybepermanent_obstack. */
2638 preserve_data ();
2639 /* All further rtl allocation is now done in the current_obstack. */
2640 rtl_in_current_obstack ();
2641 }
2642 }
2643
2644 /* Subroutine of instantiate_decls: Process all decls in the given
2645 BLOCK node and all its subblocks. */
2646
2647 static void
2648 instantiate_decls_1 (let, valid_only)
2649 tree let;
2650 int valid_only;
2651 {
2652 tree t;
2653
2654 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2655 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
2656 valid_only);
2657
2658 /* Process all subblocks. */
2659 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2660 instantiate_decls_1 (t, valid_only);
2661 }
2662
2663 /* Subroutine of the preceding procedures: Given RTL representing a
2664 decl and the size of the object, do any instantiation required.
2665
2666 If VALID_ONLY is non-zero, it means that the RTL should only be
2667 changed if the new address is valid. */
2668
2669 static void
2670 instantiate_decl (x, size, valid_only)
2671 rtx x;
2672 int size;
2673 int valid_only;
2674 {
2675 enum machine_mode mode;
2676 rtx addr;
2677
2678 /* If this is not a MEM, no need to do anything. Similarly if the
2679 address is a constant or a register that is not a virtual register. */
2680
2681 if (x == 0 || GET_CODE (x) != MEM)
2682 return;
2683
2684 addr = XEXP (x, 0);
2685 if (CONSTANT_P (addr)
2686 || (GET_CODE (addr) == REG
2687 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
2688 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
2689 return;
2690
2691 /* If we should only do this if the address is valid, copy the address.
2692 We need to do this so we can undo any changes that might make the
2693 address invalid. This copy is unfortunate, but probably can't be
2694 avoided. */
2695
2696 if (valid_only)
2697 addr = copy_rtx (addr);
2698
2699 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
2700
2701 if (valid_only)
2702 {
2703 /* Now verify that the resulting address is valid for every integer or
2704 floating-point mode up to and including SIZE bytes long. We do this
2705 since the object might be accessed in any mode and frame addresses
2706 are shared. */
2707
2708 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2709 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2710 mode = GET_MODE_WIDER_MODE (mode))
2711 if (! memory_address_p (mode, addr))
2712 return;
2713
2714 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
2715 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
2716 mode = GET_MODE_WIDER_MODE (mode))
2717 if (! memory_address_p (mode, addr))
2718 return;
2719 }
2720
2721 /* Put back the address now that we have updated it and we either know
2722 it is valid or we don't care whether it is valid. */
2723
2724 XEXP (x, 0) = addr;
2725 }
2726 \f
2727 /* Given a pointer to a piece of rtx and an optional pointer to the
2728 containing object, instantiate any virtual registers present in it.
2729
2730 If EXTRA_INSNS, we always do the replacement and generate
2731 any extra insns before OBJECT. If it zero, we do nothing if replacement
2732 is not valid.
2733
2734 Return 1 if we either had nothing to do or if we were able to do the
2735 needed replacement. Return 0 otherwise; we only return zero if
2736 EXTRA_INSNS is zero.
2737
2738 We first try some simple transformations to avoid the creation of extra
2739 pseudos. */
2740
2741 static int
2742 instantiate_virtual_regs_1 (loc, object, extra_insns)
2743 rtx *loc;
2744 rtx object;
2745 int extra_insns;
2746 {
2747 rtx x;
2748 RTX_CODE code;
2749 rtx new = 0;
2750 int offset;
2751 rtx temp;
2752 rtx seq;
2753 int i, j;
2754 char *fmt;
2755
2756 /* Re-start here to avoid recursion in common cases. */
2757 restart:
2758
2759 x = *loc;
2760 if (x == 0)
2761 return 1;
2762
2763 code = GET_CODE (x);
2764
2765 /* Check for some special cases. */
2766 switch (code)
2767 {
2768 case CONST_INT:
2769 case CONST_DOUBLE:
2770 case CONST:
2771 case SYMBOL_REF:
2772 case CODE_LABEL:
2773 case PC:
2774 case CC0:
2775 case ASM_INPUT:
2776 case ADDR_VEC:
2777 case ADDR_DIFF_VEC:
2778 case RETURN:
2779 return 1;
2780
2781 case SET:
2782 /* We are allowed to set the virtual registers. This means that
2783 that the actual register should receive the source minus the
2784 appropriate offset. This is used, for example, in the handling
2785 of non-local gotos. */
2786 if (SET_DEST (x) == virtual_incoming_args_rtx)
2787 new = arg_pointer_rtx, offset = - in_arg_offset;
2788 else if (SET_DEST (x) == virtual_stack_vars_rtx)
2789 new = frame_pointer_rtx, offset = - var_offset;
2790 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
2791 new = stack_pointer_rtx, offset = - dynamic_offset;
2792 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
2793 new = stack_pointer_rtx, offset = - out_arg_offset;
2794
2795 if (new)
2796 {
2797 /* The only valid sources here are PLUS or REG. Just do
2798 the simplest possible thing to handle them. */
2799 if (GET_CODE (SET_SRC (x)) != REG
2800 && GET_CODE (SET_SRC (x)) != PLUS)
2801 abort ();
2802
2803 start_sequence ();
2804 if (GET_CODE (SET_SRC (x)) != REG)
2805 temp = force_operand (SET_SRC (x), NULL_RTX);
2806 else
2807 temp = SET_SRC (x);
2808 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
2809 seq = get_insns ();
2810 end_sequence ();
2811
2812 emit_insns_before (seq, object);
2813 SET_DEST (x) = new;
2814
2815 if (!validate_change (object, &SET_SRC (x), temp, 0)
2816 || ! extra_insns)
2817 abort ();
2818
2819 return 1;
2820 }
2821
2822 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
2823 loc = &SET_SRC (x);
2824 goto restart;
2825
2826 case PLUS:
2827 /* Handle special case of virtual register plus constant. */
2828 if (CONSTANT_P (XEXP (x, 1)))
2829 {
2830 rtx old, new_offset;
2831
2832 /* Check for (plus (plus VIRT foo) (const_int)) first. */
2833 if (GET_CODE (XEXP (x, 0)) == PLUS)
2834 {
2835 rtx inner = XEXP (XEXP (x, 0), 0);
2836
2837 if (inner == virtual_incoming_args_rtx)
2838 new = arg_pointer_rtx, offset = in_arg_offset;
2839 else if (inner == virtual_stack_vars_rtx)
2840 new = frame_pointer_rtx, offset = var_offset;
2841 else if (inner == virtual_stack_dynamic_rtx)
2842 new = stack_pointer_rtx, offset = dynamic_offset;
2843 else if (inner == virtual_outgoing_args_rtx)
2844 new = stack_pointer_rtx, offset = out_arg_offset;
2845 else
2846 {
2847 loc = &XEXP (x, 0);
2848 goto restart;
2849 }
2850
2851 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
2852 extra_insns);
2853 new = gen_rtx (PLUS, Pmode, new, XEXP (XEXP (x, 0), 1));
2854 }
2855
2856 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
2857 new = arg_pointer_rtx, offset = in_arg_offset;
2858 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
2859 new = frame_pointer_rtx, offset = var_offset;
2860 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
2861 new = stack_pointer_rtx, offset = dynamic_offset;
2862 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
2863 new = stack_pointer_rtx, offset = out_arg_offset;
2864 else
2865 {
2866 /* We know the second operand is a constant. Unless the
2867 first operand is a REG (which has been already checked),
2868 it needs to be checked. */
2869 if (GET_CODE (XEXP (x, 0)) != REG)
2870 {
2871 loc = &XEXP (x, 0);
2872 goto restart;
2873 }
2874 return 1;
2875 }
2876
2877 new_offset = plus_constant (XEXP (x, 1), offset);
2878
2879 /* If the new constant is zero, try to replace the sum with just
2880 the register. */
2881 if (new_offset == const0_rtx
2882 && validate_change (object, loc, new, 0))
2883 return 1;
2884
2885 /* Next try to replace the register and new offset.
2886 There are two changes to validate here and we can't assume that
2887 in the case of old offset equals new just changing the register
2888 will yield a valid insn. In the interests of a little efficiency,
2889 however, we only call validate change once (we don't queue up the
2890 changes and then call apply_change_group). */
2891
2892 old = XEXP (x, 0);
2893 if (offset == 0
2894 ? ! validate_change (object, &XEXP (x, 0), new, 0)
2895 : (XEXP (x, 0) = new,
2896 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
2897 {
2898 if (! extra_insns)
2899 {
2900 XEXP (x, 0) = old;
2901 return 0;
2902 }
2903
2904 /* Otherwise copy the new constant into a register and replace
2905 constant with that register. */
2906 temp = gen_reg_rtx (Pmode);
2907 XEXP (x, 0) = new;
2908 if (validate_change (object, &XEXP (x, 1), temp, 0))
2909 emit_insn_before (gen_move_insn (temp, new_offset), object);
2910 else
2911 {
2912 /* If that didn't work, replace this expression with a
2913 register containing the sum. */
2914
2915 XEXP (x, 0) = old;
2916 new = gen_rtx (PLUS, Pmode, new, new_offset);
2917
2918 start_sequence ();
2919 temp = force_operand (new, NULL_RTX);
2920 seq = get_insns ();
2921 end_sequence ();
2922
2923 emit_insns_before (seq, object);
2924 if (! validate_change (object, loc, temp, 0)
2925 && ! validate_replace_rtx (x, temp, object))
2926 abort ();
2927 }
2928 }
2929
2930 return 1;
2931 }
2932
2933 /* Fall through to generic two-operand expression case. */
2934 case EXPR_LIST:
2935 case CALL:
2936 case COMPARE:
2937 case MINUS:
2938 case MULT:
2939 case DIV: case UDIV:
2940 case MOD: case UMOD:
2941 case AND: case IOR: case XOR:
2942 case ROTATERT: case ROTATE:
2943 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2944 case NE: case EQ:
2945 case GE: case GT: case GEU: case GTU:
2946 case LE: case LT: case LEU: case LTU:
2947 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
2948 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
2949 loc = &XEXP (x, 0);
2950 goto restart;
2951
2952 case MEM:
2953 /* Most cases of MEM that convert to valid addresses have already been
2954 handled by our scan of decls. The only special handling we
2955 need here is to make a copy of the rtx to ensure it isn't being
2956 shared if we have to change it to a pseudo.
2957
2958 If the rtx is a simple reference to an address via a virtual register,
2959 it can potentially be shared. In such cases, first try to make it
2960 a valid address, which can also be shared. Otherwise, copy it and
2961 proceed normally.
2962
2963 First check for common cases that need no processing. These are
2964 usually due to instantiation already being done on a previous instance
2965 of a shared rtx. */
2966
2967 temp = XEXP (x, 0);
2968 if (CONSTANT_ADDRESS_P (temp)
2969 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2970 || temp == arg_pointer_rtx
2971 #endif
2972 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2973 || temp == hard_frame_pointer_rtx
2974 #endif
2975 || temp == frame_pointer_rtx)
2976 return 1;
2977
2978 if (GET_CODE (temp) == PLUS
2979 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2980 && (XEXP (temp, 0) == frame_pointer_rtx
2981 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2982 || XEXP (temp, 0) == hard_frame_pointer_rtx
2983 #endif
2984 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2985 || XEXP (temp, 0) == arg_pointer_rtx
2986 #endif
2987 ))
2988 return 1;
2989
2990 if (temp == virtual_stack_vars_rtx
2991 || temp == virtual_incoming_args_rtx
2992 || (GET_CODE (temp) == PLUS
2993 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
2994 && (XEXP (temp, 0) == virtual_stack_vars_rtx
2995 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
2996 {
2997 /* This MEM may be shared. If the substitution can be done without
2998 the need to generate new pseudos, we want to do it in place
2999 so all copies of the shared rtx benefit. The call below will
3000 only make substitutions if the resulting address is still
3001 valid.
3002
3003 Note that we cannot pass X as the object in the recursive call
3004 since the insn being processed may not allow all valid
3005 addresses. However, if we were not passed on object, we can
3006 only modify X without copying it if X will have a valid
3007 address.
3008
3009 ??? Also note that this can still lose if OBJECT is an insn that
3010 has less restrictions on an address that some other insn.
3011 In that case, we will modify the shared address. This case
3012 doesn't seem very likely, though. One case where this could
3013 happen is in the case of a USE or CLOBBER reference, but we
3014 take care of that below. */
3015
3016 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3017 object ? object : x, 0))
3018 return 1;
3019
3020 /* Otherwise make a copy and process that copy. We copy the entire
3021 RTL expression since it might be a PLUS which could also be
3022 shared. */
3023 *loc = x = copy_rtx (x);
3024 }
3025
3026 /* Fall through to generic unary operation case. */
3027 case SUBREG:
3028 case STRICT_LOW_PART:
3029 case NEG: case NOT:
3030 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3031 case SIGN_EXTEND: case ZERO_EXTEND:
3032 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3033 case FLOAT: case FIX:
3034 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3035 case ABS:
3036 case SQRT:
3037 case FFS:
3038 /* These case either have just one operand or we know that we need not
3039 check the rest of the operands. */
3040 loc = &XEXP (x, 0);
3041 goto restart;
3042
3043 case USE:
3044 case CLOBBER:
3045 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3046 go ahead and make the invalid one, but do it to a copy. For a REG,
3047 just make the recursive call, since there's no chance of a problem. */
3048
3049 if ((GET_CODE (XEXP (x, 0)) == MEM
3050 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3051 0))
3052 || (GET_CODE (XEXP (x, 0)) == REG
3053 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3054 return 1;
3055
3056 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3057 loc = &XEXP (x, 0);
3058 goto restart;
3059
3060 case REG:
3061 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3062 in front of this insn and substitute the temporary. */
3063 if (x == virtual_incoming_args_rtx)
3064 new = arg_pointer_rtx, offset = in_arg_offset;
3065 else if (x == virtual_stack_vars_rtx)
3066 new = frame_pointer_rtx, offset = var_offset;
3067 else if (x == virtual_stack_dynamic_rtx)
3068 new = stack_pointer_rtx, offset = dynamic_offset;
3069 else if (x == virtual_outgoing_args_rtx)
3070 new = stack_pointer_rtx, offset = out_arg_offset;
3071
3072 if (new)
3073 {
3074 temp = plus_constant (new, offset);
3075 if (!validate_change (object, loc, temp, 0))
3076 {
3077 if (! extra_insns)
3078 return 0;
3079
3080 start_sequence ();
3081 temp = force_operand (temp, NULL_RTX);
3082 seq = get_insns ();
3083 end_sequence ();
3084
3085 emit_insns_before (seq, object);
3086 if (! validate_change (object, loc, temp, 0)
3087 && ! validate_replace_rtx (x, temp, object))
3088 abort ();
3089 }
3090 }
3091
3092 return 1;
3093 }
3094
3095 /* Scan all subexpressions. */
3096 fmt = GET_RTX_FORMAT (code);
3097 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3098 if (*fmt == 'e')
3099 {
3100 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3101 return 0;
3102 }
3103 else if (*fmt == 'E')
3104 for (j = 0; j < XVECLEN (x, i); j++)
3105 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3106 extra_insns))
3107 return 0;
3108
3109 return 1;
3110 }
3111 \f
3112 /* Optimization: assuming this function does not receive nonlocal gotos,
3113 delete the handlers for such, as well as the insns to establish
3114 and disestablish them. */
3115
3116 static void
3117 delete_handlers ()
3118 {
3119 rtx insn;
3120 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3121 {
3122 /* Delete the handler by turning off the flag that would
3123 prevent jump_optimize from deleting it.
3124 Also permit deletion of the nonlocal labels themselves
3125 if nothing local refers to them. */
3126 if (GET_CODE (insn) == CODE_LABEL)
3127 {
3128 tree t, last_t;
3129
3130 LABEL_PRESERVE_P (insn) = 0;
3131
3132 /* Remove it from the nonlocal_label list, to avoid confusing
3133 flow. */
3134 for (t = nonlocal_labels, last_t = 0; t;
3135 last_t = t, t = TREE_CHAIN (t))
3136 if (DECL_RTL (TREE_VALUE (t)) == insn)
3137 break;
3138 if (t)
3139 {
3140 if (! last_t)
3141 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3142 else
3143 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3144 }
3145 }
3146 if (GET_CODE (insn) == INSN
3147 && ((nonlocal_goto_handler_slot != 0
3148 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
3149 || (nonlocal_goto_stack_level != 0
3150 && reg_mentioned_p (nonlocal_goto_stack_level,
3151 PATTERN (insn)))))
3152 delete_insn (insn);
3153 }
3154 }
3155
3156 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3157 of the current function. */
3158
3159 rtx
3160 nonlocal_label_rtx_list ()
3161 {
3162 tree t;
3163 rtx x = 0;
3164
3165 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
3166 x = gen_rtx (EXPR_LIST, VOIDmode, label_rtx (TREE_VALUE (t)), x);
3167
3168 return x;
3169 }
3170 \f
3171 /* Output a USE for any register use in RTL.
3172 This is used with -noreg to mark the extent of lifespan
3173 of any registers used in a user-visible variable's DECL_RTL. */
3174
3175 void
3176 use_variable (rtl)
3177 rtx rtl;
3178 {
3179 if (GET_CODE (rtl) == REG)
3180 /* This is a register variable. */
3181 emit_insn (gen_rtx (USE, VOIDmode, rtl));
3182 else if (GET_CODE (rtl) == MEM
3183 && GET_CODE (XEXP (rtl, 0)) == REG
3184 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3185 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3186 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3187 /* This is a variable-sized structure. */
3188 emit_insn (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)));
3189 }
3190
3191 /* Like use_variable except that it outputs the USEs after INSN
3192 instead of at the end of the insn-chain. */
3193
3194 void
3195 use_variable_after (rtl, insn)
3196 rtx rtl, insn;
3197 {
3198 if (GET_CODE (rtl) == REG)
3199 /* This is a register variable. */
3200 emit_insn_after (gen_rtx (USE, VOIDmode, rtl), insn);
3201 else if (GET_CODE (rtl) == MEM
3202 && GET_CODE (XEXP (rtl, 0)) == REG
3203 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3204 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3205 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3206 /* This is a variable-sized structure. */
3207 emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)), insn);
3208 }
3209 \f
3210 int
3211 max_parm_reg_num ()
3212 {
3213 return max_parm_reg;
3214 }
3215
3216 /* Return the first insn following those generated by `assign_parms'. */
3217
3218 rtx
3219 get_first_nonparm_insn ()
3220 {
3221 if (last_parm_insn)
3222 return NEXT_INSN (last_parm_insn);
3223 return get_insns ();
3224 }
3225
3226 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3227 Crash if there is none. */
3228
3229 rtx
3230 get_first_block_beg ()
3231 {
3232 register rtx searcher;
3233 register rtx insn = get_first_nonparm_insn ();
3234
3235 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3236 if (GET_CODE (searcher) == NOTE
3237 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3238 return searcher;
3239
3240 abort (); /* Invalid call to this function. (See comments above.) */
3241 return NULL_RTX;
3242 }
3243
3244 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3245 This means a type for which function calls must pass an address to the
3246 function or get an address back from the function.
3247 EXP may be a type node or an expression (whose type is tested). */
3248
3249 int
3250 aggregate_value_p (exp)
3251 tree exp;
3252 {
3253 int i, regno, nregs;
3254 rtx reg;
3255 tree type;
3256 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3257 type = exp;
3258 else
3259 type = TREE_TYPE (exp);
3260
3261 if (RETURN_IN_MEMORY (type))
3262 return 1;
3263 /* Types that are TREE_ADDRESSABLE must be contructed in memory,
3264 and thus can't be returned in registers. */
3265 if (TREE_ADDRESSABLE (type))
3266 return 1;
3267 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3268 return 1;
3269 /* Make sure we have suitable call-clobbered regs to return
3270 the value in; if not, we must return it in memory. */
3271 reg = hard_function_value (type, 0);
3272
3273 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3274 it is OK. */
3275 if (GET_CODE (reg) != REG)
3276 return 0;
3277
3278 regno = REGNO (reg);
3279 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3280 for (i = 0; i < nregs; i++)
3281 if (! call_used_regs[regno + i])
3282 return 1;
3283 return 0;
3284 }
3285 \f
3286 /* Assign RTL expressions to the function's parameters.
3287 This may involve copying them into registers and using
3288 those registers as the RTL for them.
3289
3290 If SECOND_TIME is non-zero it means that this function is being
3291 called a second time. This is done by integrate.c when a function's
3292 compilation is deferred. We need to come back here in case the
3293 FUNCTION_ARG macro computes items needed for the rest of the compilation
3294 (such as changing which registers are fixed or caller-saved). But suppress
3295 writing any insns or setting DECL_RTL of anything in this case. */
3296
3297 void
3298 assign_parms (fndecl, second_time)
3299 tree fndecl;
3300 int second_time;
3301 {
3302 register tree parm;
3303 register rtx entry_parm = 0;
3304 register rtx stack_parm = 0;
3305 CUMULATIVE_ARGS args_so_far;
3306 enum machine_mode promoted_mode, passed_mode;
3307 enum machine_mode nominal_mode, promoted_nominal_mode;
3308 int unsignedp;
3309 /* Total space needed so far for args on the stack,
3310 given as a constant and a tree-expression. */
3311 struct args_size stack_args_size;
3312 tree fntype = TREE_TYPE (fndecl);
3313 tree fnargs = DECL_ARGUMENTS (fndecl);
3314 /* This is used for the arg pointer when referring to stack args. */
3315 rtx internal_arg_pointer;
3316 /* This is a dummy PARM_DECL that we used for the function result if
3317 the function returns a structure. */
3318 tree function_result_decl = 0;
3319 int nparmregs = list_length (fnargs) + LAST_VIRTUAL_REGISTER + 1;
3320 int varargs_setup = 0;
3321 rtx conversion_insns = 0;
3322
3323 /* Nonzero if the last arg is named `__builtin_va_alist',
3324 which is used on some machines for old-fashioned non-ANSI varargs.h;
3325 this should be stuck onto the stack as if it had arrived there. */
3326 int hide_last_arg
3327 = (current_function_varargs
3328 && fnargs
3329 && (parm = tree_last (fnargs)) != 0
3330 && DECL_NAME (parm)
3331 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3332 "__builtin_va_alist")));
3333
3334 /* Nonzero if function takes extra anonymous args.
3335 This means the last named arg must be on the stack
3336 right before the anonymous ones. */
3337 int stdarg
3338 = (TYPE_ARG_TYPES (fntype) != 0
3339 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3340 != void_type_node));
3341
3342 current_function_stdarg = stdarg;
3343
3344 /* If the reg that the virtual arg pointer will be translated into is
3345 not a fixed reg or is the stack pointer, make a copy of the virtual
3346 arg pointer, and address parms via the copy. The frame pointer is
3347 considered fixed even though it is not marked as such.
3348
3349 The second time through, simply use ap to avoid generating rtx. */
3350
3351 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3352 || ! (fixed_regs[ARG_POINTER_REGNUM]
3353 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3354 && ! second_time)
3355 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3356 else
3357 internal_arg_pointer = virtual_incoming_args_rtx;
3358 current_function_internal_arg_pointer = internal_arg_pointer;
3359
3360 stack_args_size.constant = 0;
3361 stack_args_size.var = 0;
3362
3363 /* If struct value address is treated as the first argument, make it so. */
3364 if (aggregate_value_p (DECL_RESULT (fndecl))
3365 && ! current_function_returns_pcc_struct
3366 && struct_value_incoming_rtx == 0)
3367 {
3368 tree type = build_pointer_type (TREE_TYPE (fntype));
3369
3370 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
3371
3372 DECL_ARG_TYPE (function_result_decl) = type;
3373 TREE_CHAIN (function_result_decl) = fnargs;
3374 fnargs = function_result_decl;
3375 }
3376
3377 parm_reg_stack_loc = (rtx *) oballoc (nparmregs * sizeof (rtx));
3378 bzero ((char *) parm_reg_stack_loc, nparmregs * sizeof (rtx));
3379
3380 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3381 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
3382 #else
3383 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
3384 #endif
3385
3386 /* We haven't yet found an argument that we must push and pretend the
3387 caller did. */
3388 current_function_pretend_args_size = 0;
3389
3390 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3391 {
3392 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
3393 struct args_size stack_offset;
3394 struct args_size arg_size;
3395 int passed_pointer = 0;
3396 int did_conversion = 0;
3397 tree passed_type = DECL_ARG_TYPE (parm);
3398 tree nominal_type = TREE_TYPE (parm);
3399
3400 /* Set LAST_NAMED if this is last named arg before some
3401 anonymous args. We treat it as if it were anonymous too. */
3402 int last_named = ((TREE_CHAIN (parm) == 0
3403 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3404 && (stdarg || current_function_varargs));
3405
3406 if (TREE_TYPE (parm) == error_mark_node
3407 /* This can happen after weird syntax errors
3408 or if an enum type is defined among the parms. */
3409 || TREE_CODE (parm) != PARM_DECL
3410 || passed_type == NULL)
3411 {
3412 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = gen_rtx (MEM, BLKmode,
3413 const0_rtx);
3414 TREE_USED (parm) = 1;
3415 continue;
3416 }
3417
3418 /* For varargs.h function, save info about regs and stack space
3419 used by the individual args, not including the va_alist arg. */
3420 if (hide_last_arg && last_named)
3421 current_function_args_info = args_so_far;
3422
3423 /* Find mode of arg as it is passed, and mode of arg
3424 as it should be during execution of this function. */
3425 passed_mode = TYPE_MODE (passed_type);
3426 nominal_mode = TYPE_MODE (nominal_type);
3427
3428 /* If the parm's mode is VOID, its value doesn't matter,
3429 and avoid the usual things like emit_move_insn that could crash. */
3430 if (nominal_mode == VOIDmode)
3431 {
3432 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
3433 continue;
3434 }
3435
3436 /* If the parm is to be passed as a transparent union, use the
3437 type of the first field for the tests below. We have already
3438 verified that the modes are the same. */
3439 if (DECL_TRANSPARENT_UNION (parm)
3440 || TYPE_TRANSPARENT_UNION (passed_type))
3441 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
3442
3443 /* See if this arg was passed by invisible reference. It is if
3444 it is an object whose size depends on the contents of the
3445 object itself or if the machine requires these objects be passed
3446 that way. */
3447
3448 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
3449 && contains_placeholder_p (TYPE_SIZE (passed_type)))
3450 || TREE_ADDRESSABLE (passed_type)
3451 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3452 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
3453 passed_type, ! last_named)
3454 #endif
3455 )
3456 {
3457 passed_type = nominal_type = build_pointer_type (passed_type);
3458 passed_pointer = 1;
3459 passed_mode = nominal_mode = Pmode;
3460 }
3461
3462 promoted_mode = passed_mode;
3463
3464 #ifdef PROMOTE_FUNCTION_ARGS
3465 /* Compute the mode in which the arg is actually extended to. */
3466 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
3467 #endif
3468
3469 /* Let machine desc say which reg (if any) the parm arrives in.
3470 0 means it arrives on the stack. */
3471 #ifdef FUNCTION_INCOMING_ARG
3472 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3473 passed_type, ! last_named);
3474 #else
3475 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
3476 passed_type, ! last_named);
3477 #endif
3478
3479 if (entry_parm == 0)
3480 promoted_mode = passed_mode;
3481
3482 #ifdef SETUP_INCOMING_VARARGS
3483 /* If this is the last named parameter, do any required setup for
3484 varargs or stdargs. We need to know about the case of this being an
3485 addressable type, in which case we skip the registers it
3486 would have arrived in.
3487
3488 For stdargs, LAST_NAMED will be set for two parameters, the one that
3489 is actually the last named, and the dummy parameter. We only
3490 want to do this action once.
3491
3492 Also, indicate when RTL generation is to be suppressed. */
3493 if (last_named && !varargs_setup)
3494 {
3495 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
3496 current_function_pretend_args_size,
3497 second_time);
3498 varargs_setup = 1;
3499 }
3500 #endif
3501
3502 /* Determine parm's home in the stack,
3503 in case it arrives in the stack or we should pretend it did.
3504
3505 Compute the stack position and rtx where the argument arrives
3506 and its size.
3507
3508 There is one complexity here: If this was a parameter that would
3509 have been passed in registers, but wasn't only because it is
3510 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
3511 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
3512 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
3513 0 as it was the previous time. */
3514
3515 locate_and_pad_parm (promoted_mode, passed_type,
3516 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3517 1,
3518 #else
3519 #ifdef FUNCTION_INCOMING_ARG
3520 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3521 passed_type,
3522 (! last_named
3523 || varargs_setup)) != 0,
3524 #else
3525 FUNCTION_ARG (args_so_far, promoted_mode,
3526 passed_type,
3527 ! last_named || varargs_setup) != 0,
3528 #endif
3529 #endif
3530 fndecl, &stack_args_size, &stack_offset, &arg_size);
3531
3532 if (! second_time)
3533 {
3534 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
3535
3536 if (offset_rtx == const0_rtx)
3537 stack_parm = gen_rtx (MEM, promoted_mode, internal_arg_pointer);
3538 else
3539 stack_parm = gen_rtx (MEM, promoted_mode,
3540 gen_rtx (PLUS, Pmode,
3541 internal_arg_pointer, offset_rtx));
3542
3543 /* If this is a memory ref that contains aggregate components,
3544 mark it as such for cse and loop optimize. Likewise if it
3545 is readonly. */
3546 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3547 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
3548 }
3549
3550 /* If this parameter was passed both in registers and in the stack,
3551 use the copy on the stack. */
3552 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
3553 entry_parm = 0;
3554
3555 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3556 /* If this parm was passed part in regs and part in memory,
3557 pretend it arrived entirely in memory
3558 by pushing the register-part onto the stack.
3559
3560 In the special case of a DImode or DFmode that is split,
3561 we could put it together in a pseudoreg directly,
3562 but for now that's not worth bothering with. */
3563
3564 if (entry_parm)
3565 {
3566 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
3567 passed_type, ! last_named);
3568
3569 if (nregs > 0)
3570 {
3571 current_function_pretend_args_size
3572 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
3573 / (PARM_BOUNDARY / BITS_PER_UNIT)
3574 * (PARM_BOUNDARY / BITS_PER_UNIT));
3575
3576 if (! second_time)
3577 {
3578 /* Handle calls that pass values in multiple non-contiguous
3579 locations. The Irix 6 ABI has examples of this. */
3580 if (GET_CODE (entry_parm) == PARALLEL)
3581 emit_group_store (validize_mem (stack_parm),
3582 entry_parm);
3583 else
3584 move_block_from_reg (REGNO (entry_parm),
3585 validize_mem (stack_parm), nregs,
3586 int_size_in_bytes (TREE_TYPE (parm)));
3587 }
3588 entry_parm = stack_parm;
3589 }
3590 }
3591 #endif
3592
3593 /* If we didn't decide this parm came in a register,
3594 by default it came on the stack. */
3595 if (entry_parm == 0)
3596 entry_parm = stack_parm;
3597
3598 /* Record permanently how this parm was passed. */
3599 if (! second_time)
3600 DECL_INCOMING_RTL (parm) = entry_parm;
3601
3602 /* If there is actually space on the stack for this parm,
3603 count it in stack_args_size; otherwise set stack_parm to 0
3604 to indicate there is no preallocated stack slot for the parm. */
3605
3606 if (entry_parm == stack_parm
3607 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
3608 /* On some machines, even if a parm value arrives in a register
3609 there is still an (uninitialized) stack slot allocated for it.
3610
3611 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
3612 whether this parameter already has a stack slot allocated,
3613 because an arg block exists only if current_function_args_size
3614 is larger than some threshold, and we haven't calculated that
3615 yet. So, for now, we just assume that stack slots never exist
3616 in this case. */
3617 || REG_PARM_STACK_SPACE (fndecl) > 0
3618 #endif
3619 )
3620 {
3621 stack_args_size.constant += arg_size.constant;
3622 if (arg_size.var)
3623 ADD_PARM_SIZE (stack_args_size, arg_size.var);
3624 }
3625 else
3626 /* No stack slot was pushed for this parm. */
3627 stack_parm = 0;
3628
3629 /* Update info on where next arg arrives in registers. */
3630
3631 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
3632 passed_type, ! last_named);
3633
3634 /* If this is our second time through, we are done with this parm. */
3635 if (second_time)
3636 continue;
3637
3638 /* If we can't trust the parm stack slot to be aligned enough
3639 for its ultimate type, don't use that slot after entry.
3640 We'll make another stack slot, if we need one. */
3641 {
3642 int thisparm_boundary
3643 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
3644
3645 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
3646 stack_parm = 0;
3647 }
3648
3649 /* If parm was passed in memory, and we need to convert it on entry,
3650 don't store it back in that same slot. */
3651 if (entry_parm != 0
3652 && nominal_mode != BLKmode && nominal_mode != passed_mode)
3653 stack_parm = 0;
3654
3655 #if 0
3656 /* Now adjust STACK_PARM to the mode and precise location
3657 where this parameter should live during execution,
3658 if we discover that it must live in the stack during execution.
3659 To make debuggers happier on big-endian machines, we store
3660 the value in the last bytes of the space available. */
3661
3662 if (nominal_mode != BLKmode && nominal_mode != passed_mode
3663 && stack_parm != 0)
3664 {
3665 rtx offset_rtx;
3666
3667 if (BYTES_BIG_ENDIAN
3668 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
3669 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
3670 - GET_MODE_SIZE (nominal_mode));
3671
3672 offset_rtx = ARGS_SIZE_RTX (stack_offset);
3673 if (offset_rtx == const0_rtx)
3674 stack_parm = gen_rtx (MEM, nominal_mode, internal_arg_pointer);
3675 else
3676 stack_parm = gen_rtx (MEM, nominal_mode,
3677 gen_rtx (PLUS, Pmode,
3678 internal_arg_pointer, offset_rtx));
3679
3680 /* If this is a memory ref that contains aggregate components,
3681 mark it as such for cse and loop optimize. */
3682 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3683 }
3684 #endif /* 0 */
3685
3686 #ifdef STACK_REGS
3687 /* We need this "use" info, because the gcc-register->stack-register
3688 converter in reg-stack.c needs to know which registers are active
3689 at the start of the function call. The actual parameter loading
3690 instructions are not always available then anymore, since they might
3691 have been optimised away. */
3692
3693 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
3694 emit_insn (gen_rtx (USE, GET_MODE (entry_parm), entry_parm));
3695 #endif
3696
3697 /* ENTRY_PARM is an RTX for the parameter as it arrives,
3698 in the mode in which it arrives.
3699 STACK_PARM is an RTX for a stack slot where the parameter can live
3700 during the function (in case we want to put it there).
3701 STACK_PARM is 0 if no stack slot was pushed for it.
3702
3703 Now output code if necessary to convert ENTRY_PARM to
3704 the type in which this function declares it,
3705 and store that result in an appropriate place,
3706 which may be a pseudo reg, may be STACK_PARM,
3707 or may be a local stack slot if STACK_PARM is 0.
3708
3709 Set DECL_RTL to that place. */
3710
3711 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
3712 {
3713 /* If a BLKmode arrives in registers, copy it to a stack slot.
3714 Handle calls that pass values in multiple non-contiguous
3715 locations. The Irix 6 ABI has examples of this. */
3716 if (GET_CODE (entry_parm) == REG
3717 || GET_CODE (entry_parm) == PARALLEL)
3718 {
3719 int size_stored
3720 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
3721 UNITS_PER_WORD);
3722
3723 /* Note that we will be storing an integral number of words.
3724 So we have to be careful to ensure that we allocate an
3725 integral number of words. We do this below in the
3726 assign_stack_local if space was not allocated in the argument
3727 list. If it was, this will not work if PARM_BOUNDARY is not
3728 a multiple of BITS_PER_WORD. It isn't clear how to fix this
3729 if it becomes a problem. */
3730
3731 if (stack_parm == 0)
3732 {
3733 stack_parm
3734 = assign_stack_local (GET_MODE (entry_parm),
3735 size_stored, 0);
3736
3737 /* If this is a memory ref that contains aggregate
3738 components, mark it as such for cse and loop optimize. */
3739 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3740 }
3741
3742 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
3743 abort ();
3744
3745 if (TREE_READONLY (parm))
3746 RTX_UNCHANGING_P (stack_parm) = 1;
3747
3748 /* Handle calls that pass values in multiple non-contiguous
3749 locations. The Irix 6 ABI has examples of this. */
3750 if (GET_CODE (entry_parm) == PARALLEL)
3751 emit_group_store (validize_mem (stack_parm), entry_parm);
3752 else
3753 move_block_from_reg (REGNO (entry_parm),
3754 validize_mem (stack_parm),
3755 size_stored / UNITS_PER_WORD,
3756 int_size_in_bytes (TREE_TYPE (parm)));
3757 }
3758 DECL_RTL (parm) = stack_parm;
3759 }
3760 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
3761 && ! DECL_INLINE (fndecl))
3762 /* layout_decl may set this. */
3763 || TREE_ADDRESSABLE (parm)
3764 || TREE_SIDE_EFFECTS (parm)
3765 /* If -ffloat-store specified, don't put explicit
3766 float variables into registers. */
3767 || (flag_float_store
3768 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
3769 /* Always assign pseudo to structure return or item passed
3770 by invisible reference. */
3771 || passed_pointer || parm == function_result_decl)
3772 {
3773 /* Store the parm in a pseudoregister during the function, but we
3774 may need to do it in a wider mode. */
3775
3776 register rtx parmreg;
3777 int regno, regnoi, regnor;
3778
3779 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
3780
3781 promoted_nominal_mode
3782 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
3783
3784 parmreg = gen_reg_rtx (promoted_nominal_mode);
3785 mark_user_reg (parmreg);
3786
3787 /* If this was an item that we received a pointer to, set DECL_RTL
3788 appropriately. */
3789 if (passed_pointer)
3790 {
3791 DECL_RTL (parm)
3792 = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
3793 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
3794 }
3795 else
3796 DECL_RTL (parm) = parmreg;
3797
3798 /* Copy the value into the register. */
3799 if (nominal_mode != passed_mode
3800 || promoted_nominal_mode != promoted_mode)
3801 {
3802 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
3803 mode, by the caller. We now have to convert it to
3804 NOMINAL_MODE, if different. However, PARMREG may be in
3805 a diffent mode than NOMINAL_MODE if it is being stored
3806 promoted.
3807
3808 If ENTRY_PARM is a hard register, it might be in a register
3809 not valid for operating in its mode (e.g., an odd-numbered
3810 register for a DFmode). In that case, moves are the only
3811 thing valid, so we can't do a convert from there. This
3812 occurs when the calling sequence allow such misaligned
3813 usages.
3814
3815 In addition, the conversion may involve a call, which could
3816 clobber parameters which haven't been copied to pseudo
3817 registers yet. Therefore, we must first copy the parm to
3818 a pseudo reg here, and save the conversion until after all
3819 parameters have been moved. */
3820
3821 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
3822
3823 emit_move_insn (tempreg, validize_mem (entry_parm));
3824
3825 push_to_sequence (conversion_insns);
3826 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
3827
3828 expand_assignment (parm,
3829 make_tree (nominal_type, tempreg), 0, 0);
3830 conversion_insns = get_insns ();
3831 did_conversion = 1;
3832 end_sequence ();
3833 }
3834 else
3835 emit_move_insn (parmreg, validize_mem (entry_parm));
3836
3837 /* If we were passed a pointer but the actual value
3838 can safely live in a register, put it in one. */
3839 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
3840 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
3841 && ! DECL_INLINE (fndecl))
3842 /* layout_decl may set this. */
3843 || TREE_ADDRESSABLE (parm)
3844 || TREE_SIDE_EFFECTS (parm)
3845 /* If -ffloat-store specified, don't put explicit
3846 float variables into registers. */
3847 || (flag_float_store
3848 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
3849 {
3850 /* We can't use nominal_mode, because it will have been set to
3851 Pmode above. We must use the actual mode of the parm. */
3852 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
3853 mark_user_reg (parmreg);
3854 emit_move_insn (parmreg, DECL_RTL (parm));
3855 DECL_RTL (parm) = parmreg;
3856 /* STACK_PARM is the pointer, not the parm, and PARMREG is
3857 now the parm. */
3858 stack_parm = 0;
3859 }
3860 #ifdef FUNCTION_ARG_CALLEE_COPIES
3861 /* If we are passed an arg by reference and it is our responsibility
3862 to make a copy, do it now.
3863 PASSED_TYPE and PASSED mode now refer to the pointer, not the
3864 original argument, so we must recreate them in the call to
3865 FUNCTION_ARG_CALLEE_COPIES. */
3866 /* ??? Later add code to handle the case that if the argument isn't
3867 modified, don't do the copy. */
3868
3869 else if (passed_pointer
3870 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
3871 TYPE_MODE (DECL_ARG_TYPE (parm)),
3872 DECL_ARG_TYPE (parm),
3873 ! last_named)
3874 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
3875 {
3876 rtx copy;
3877 tree type = DECL_ARG_TYPE (parm);
3878
3879 /* This sequence may involve a library call perhaps clobbering
3880 registers that haven't been copied to pseudos yet. */
3881
3882 push_to_sequence (conversion_insns);
3883
3884 if (TYPE_SIZE (type) == 0
3885 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3886 /* This is a variable sized object. */
3887 copy = gen_rtx (MEM, BLKmode,
3888 allocate_dynamic_stack_space
3889 (expr_size (parm), NULL_RTX,
3890 TYPE_ALIGN (type)));
3891 else
3892 copy = assign_stack_temp (TYPE_MODE (type),
3893 int_size_in_bytes (type), 1);
3894 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
3895
3896 store_expr (parm, copy, 0);
3897 emit_move_insn (parmreg, XEXP (copy, 0));
3898 conversion_insns = get_insns ();
3899 did_conversion = 1;
3900 end_sequence ();
3901 }
3902 #endif /* FUNCTION_ARG_CALLEE_COPIES */
3903
3904 /* In any case, record the parm's desired stack location
3905 in case we later discover it must live in the stack.
3906
3907 If it is a COMPLEX value, store the stack location for both
3908 halves. */
3909
3910 if (GET_CODE (parmreg) == CONCAT)
3911 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
3912 else
3913 regno = REGNO (parmreg);
3914
3915 if (regno >= nparmregs)
3916 {
3917 rtx *new;
3918 int old_nparmregs = nparmregs;
3919
3920 nparmregs = regno + 5;
3921 new = (rtx *) oballoc (nparmregs * sizeof (rtx));
3922 bcopy ((char *) parm_reg_stack_loc, (char *) new,
3923 old_nparmregs * sizeof (rtx));
3924 bzero ((char *) (new + old_nparmregs),
3925 (nparmregs - old_nparmregs) * sizeof (rtx));
3926 parm_reg_stack_loc = new;
3927 }
3928
3929 if (GET_CODE (parmreg) == CONCAT)
3930 {
3931 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
3932
3933 regnor = REGNO (gen_realpart (submode, parmreg));
3934 regnoi = REGNO (gen_imagpart (submode, parmreg));
3935
3936 if (stack_parm != 0)
3937 {
3938 parm_reg_stack_loc[regnor]
3939 = gen_realpart (submode, stack_parm);
3940 parm_reg_stack_loc[regnoi]
3941 = gen_imagpart (submode, stack_parm);
3942 }
3943 else
3944 {
3945 parm_reg_stack_loc[regnor] = 0;
3946 parm_reg_stack_loc[regnoi] = 0;
3947 }
3948 }
3949 else
3950 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
3951
3952 /* Mark the register as eliminable if we did no conversion
3953 and it was copied from memory at a fixed offset,
3954 and the arg pointer was not copied to a pseudo-reg.
3955 If the arg pointer is a pseudo reg or the offset formed
3956 an invalid address, such memory-equivalences
3957 as we make here would screw up life analysis for it. */
3958 if (nominal_mode == passed_mode
3959 && ! did_conversion
3960 && GET_CODE (entry_parm) == MEM
3961 && entry_parm == stack_parm
3962 && stack_offset.var == 0
3963 && reg_mentioned_p (virtual_incoming_args_rtx,
3964 XEXP (entry_parm, 0)))
3965 {
3966 rtx linsn = get_last_insn ();
3967 rtx sinsn, set;
3968
3969 /* Mark complex types separately. */
3970 if (GET_CODE (parmreg) == CONCAT)
3971 /* Scan backwards for the set of the real and
3972 imaginary parts. */
3973 for (sinsn = linsn; sinsn != 0;
3974 sinsn = prev_nonnote_insn (sinsn))
3975 {
3976 set = single_set (sinsn);
3977 if (set != 0
3978 && SET_DEST (set) == regno_reg_rtx [regnoi])
3979 REG_NOTES (sinsn)
3980 = gen_rtx (EXPR_LIST, REG_EQUIV,
3981 parm_reg_stack_loc[regnoi],
3982 REG_NOTES (sinsn));
3983 else if (set != 0
3984 && SET_DEST (set) == regno_reg_rtx [regnor])
3985 REG_NOTES (sinsn)
3986 = gen_rtx (EXPR_LIST, REG_EQUIV,
3987 parm_reg_stack_loc[regnor],
3988 REG_NOTES (sinsn));
3989 }
3990 else if ((set = single_set (linsn)) != 0
3991 && SET_DEST (set) == parmreg)
3992 REG_NOTES (linsn)
3993 = gen_rtx (EXPR_LIST, REG_EQUIV,
3994 entry_parm, REG_NOTES (linsn));
3995 }
3996
3997 /* For pointer data type, suggest pointer register. */
3998 if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE)
3999 mark_reg_pointer (parmreg,
4000 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4001 / BITS_PER_UNIT));
4002 }
4003 else
4004 {
4005 /* Value must be stored in the stack slot STACK_PARM
4006 during function execution. */
4007
4008 if (promoted_mode != nominal_mode)
4009 {
4010 /* Conversion is required. */
4011 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4012
4013 emit_move_insn (tempreg, validize_mem (entry_parm));
4014
4015 push_to_sequence (conversion_insns);
4016 entry_parm = convert_to_mode (nominal_mode, tempreg,
4017 TREE_UNSIGNED (TREE_TYPE (parm)));
4018 conversion_insns = get_insns ();
4019 did_conversion = 1;
4020 end_sequence ();
4021 }
4022
4023 if (entry_parm != stack_parm)
4024 {
4025 if (stack_parm == 0)
4026 {
4027 stack_parm
4028 = assign_stack_local (GET_MODE (entry_parm),
4029 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4030 /* If this is a memory ref that contains aggregate components,
4031 mark it as such for cse and loop optimize. */
4032 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4033 }
4034
4035 if (promoted_mode != nominal_mode)
4036 {
4037 push_to_sequence (conversion_insns);
4038 emit_move_insn (validize_mem (stack_parm),
4039 validize_mem (entry_parm));
4040 conversion_insns = get_insns ();
4041 end_sequence ();
4042 }
4043 else
4044 emit_move_insn (validize_mem (stack_parm),
4045 validize_mem (entry_parm));
4046 }
4047
4048 DECL_RTL (parm) = stack_parm;
4049 }
4050
4051 /* If this "parameter" was the place where we are receiving the
4052 function's incoming structure pointer, set up the result. */
4053 if (parm == function_result_decl)
4054 {
4055 tree result = DECL_RESULT (fndecl);
4056 tree restype = TREE_TYPE (result);
4057
4058 DECL_RTL (result)
4059 = gen_rtx (MEM, DECL_MODE (result), DECL_RTL (parm));
4060
4061 MEM_IN_STRUCT_P (DECL_RTL (result)) = AGGREGATE_TYPE_P (restype);
4062 }
4063
4064 if (TREE_THIS_VOLATILE (parm))
4065 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4066 if (TREE_READONLY (parm))
4067 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4068 }
4069
4070 /* Output all parameter conversion instructions (possibly including calls)
4071 now that all parameters have been copied out of hard registers. */
4072 emit_insns (conversion_insns);
4073
4074 max_parm_reg = max_reg_num ();
4075 last_parm_insn = get_last_insn ();
4076
4077 current_function_args_size = stack_args_size.constant;
4078
4079 /* Adjust function incoming argument size for alignment and
4080 minimum length. */
4081
4082 #ifdef REG_PARM_STACK_SPACE
4083 #ifndef MAYBE_REG_PARM_STACK_SPACE
4084 current_function_args_size = MAX (current_function_args_size,
4085 REG_PARM_STACK_SPACE (fndecl));
4086 #endif
4087 #endif
4088
4089 #ifdef STACK_BOUNDARY
4090 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4091
4092 current_function_args_size
4093 = ((current_function_args_size + STACK_BYTES - 1)
4094 / STACK_BYTES) * STACK_BYTES;
4095 #endif
4096
4097 #ifdef ARGS_GROW_DOWNWARD
4098 current_function_arg_offset_rtx
4099 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4100 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4101 size_int (-stack_args_size.constant)),
4102 NULL_RTX, VOIDmode, 0));
4103 #else
4104 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4105 #endif
4106
4107 /* See how many bytes, if any, of its args a function should try to pop
4108 on return. */
4109
4110 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4111 current_function_args_size);
4112
4113 /* For stdarg.h function, save info about
4114 regs and stack space used by the named args. */
4115
4116 if (!hide_last_arg)
4117 current_function_args_info = args_so_far;
4118
4119 /* Set the rtx used for the function return value. Put this in its
4120 own variable so any optimizers that need this information don't have
4121 to include tree.h. Do this here so it gets done when an inlined
4122 function gets output. */
4123
4124 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4125 }
4126 \f
4127 /* Indicate whether REGNO is an incoming argument to the current function
4128 that was promoted to a wider mode. If so, return the RTX for the
4129 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4130 that REGNO is promoted from and whether the promotion was signed or
4131 unsigned. */
4132
4133 #ifdef PROMOTE_FUNCTION_ARGS
4134
4135 rtx
4136 promoted_input_arg (regno, pmode, punsignedp)
4137 int regno;
4138 enum machine_mode *pmode;
4139 int *punsignedp;
4140 {
4141 tree arg;
4142
4143 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4144 arg = TREE_CHAIN (arg))
4145 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4146 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4147 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4148 {
4149 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4150 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4151
4152 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4153 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4154 && mode != DECL_MODE (arg))
4155 {
4156 *pmode = DECL_MODE (arg);
4157 *punsignedp = unsignedp;
4158 return DECL_INCOMING_RTL (arg);
4159 }
4160 }
4161
4162 return 0;
4163 }
4164
4165 #endif
4166 \f
4167 /* Compute the size and offset from the start of the stacked arguments for a
4168 parm passed in mode PASSED_MODE and with type TYPE.
4169
4170 INITIAL_OFFSET_PTR points to the current offset into the stacked
4171 arguments.
4172
4173 The starting offset and size for this parm are returned in *OFFSET_PTR
4174 and *ARG_SIZE_PTR, respectively.
4175
4176 IN_REGS is non-zero if the argument will be passed in registers. It will
4177 never be set if REG_PARM_STACK_SPACE is not defined.
4178
4179 FNDECL is the function in which the argument was defined.
4180
4181 There are two types of rounding that are done. The first, controlled by
4182 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4183 list to be aligned to the specific boundary (in bits). This rounding
4184 affects the initial and starting offsets, but not the argument size.
4185
4186 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4187 optionally rounds the size of the parm to PARM_BOUNDARY. The
4188 initial offset is not affected by this rounding, while the size always
4189 is and the starting offset may be. */
4190
4191 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4192 initial_offset_ptr is positive because locate_and_pad_parm's
4193 callers pass in the total size of args so far as
4194 initial_offset_ptr. arg_size_ptr is always positive.*/
4195
4196 void
4197 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4198 initial_offset_ptr, offset_ptr, arg_size_ptr)
4199 enum machine_mode passed_mode;
4200 tree type;
4201 int in_regs;
4202 tree fndecl;
4203 struct args_size *initial_offset_ptr;
4204 struct args_size *offset_ptr;
4205 struct args_size *arg_size_ptr;
4206 {
4207 tree sizetree
4208 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4209 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4210 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4211 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4212 int reg_parm_stack_space = 0;
4213
4214 #ifdef REG_PARM_STACK_SPACE
4215 /* If we have found a stack parm before we reach the end of the
4216 area reserved for registers, skip that area. */
4217 if (! in_regs)
4218 {
4219 #ifdef MAYBE_REG_PARM_STACK_SPACE
4220 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4221 #else
4222 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4223 #endif
4224 if (reg_parm_stack_space > 0)
4225 {
4226 if (initial_offset_ptr->var)
4227 {
4228 initial_offset_ptr->var
4229 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4230 size_int (reg_parm_stack_space));
4231 initial_offset_ptr->constant = 0;
4232 }
4233 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4234 initial_offset_ptr->constant = reg_parm_stack_space;
4235 }
4236 }
4237 #endif /* REG_PARM_STACK_SPACE */
4238
4239 arg_size_ptr->var = 0;
4240 arg_size_ptr->constant = 0;
4241
4242 #ifdef ARGS_GROW_DOWNWARD
4243 if (initial_offset_ptr->var)
4244 {
4245 offset_ptr->constant = 0;
4246 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4247 initial_offset_ptr->var);
4248 }
4249 else
4250 {
4251 offset_ptr->constant = - initial_offset_ptr->constant;
4252 offset_ptr->var = 0;
4253 }
4254 if (where_pad != none
4255 && (TREE_CODE (sizetree) != INTEGER_CST
4256 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4257 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4258 SUB_PARM_SIZE (*offset_ptr, sizetree);
4259 if (where_pad != downward)
4260 pad_to_arg_alignment (offset_ptr, boundary);
4261 if (initial_offset_ptr->var)
4262 {
4263 arg_size_ptr->var = size_binop (MINUS_EXPR,
4264 size_binop (MINUS_EXPR,
4265 integer_zero_node,
4266 initial_offset_ptr->var),
4267 offset_ptr->var);
4268 }
4269 else
4270 {
4271 arg_size_ptr->constant = (- initial_offset_ptr->constant -
4272 offset_ptr->constant);
4273 }
4274 #else /* !ARGS_GROW_DOWNWARD */
4275 pad_to_arg_alignment (initial_offset_ptr, boundary);
4276 *offset_ptr = *initial_offset_ptr;
4277
4278 #ifdef PUSH_ROUNDING
4279 if (passed_mode != BLKmode)
4280 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4281 #endif
4282
4283 /* Pad_below needs the pre-rounded size to know how much to pad below
4284 so this must be done before rounding up. */
4285 if (where_pad == downward
4286 /* However, BLKmode args passed in regs have their padding done elsewhere.
4287 The stack slot must be able to hold the entire register. */
4288 && !(in_regs && passed_mode == BLKmode))
4289 pad_below (offset_ptr, passed_mode, sizetree);
4290
4291 if (where_pad != none
4292 && (TREE_CODE (sizetree) != INTEGER_CST
4293 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4294 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4295
4296 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4297 #endif /* ARGS_GROW_DOWNWARD */
4298 }
4299
4300 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4301 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4302
4303 static void
4304 pad_to_arg_alignment (offset_ptr, boundary)
4305 struct args_size *offset_ptr;
4306 int boundary;
4307 {
4308 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4309
4310 if (boundary > BITS_PER_UNIT)
4311 {
4312 if (offset_ptr->var)
4313 {
4314 offset_ptr->var =
4315 #ifdef ARGS_GROW_DOWNWARD
4316 round_down
4317 #else
4318 round_up
4319 #endif
4320 (ARGS_SIZE_TREE (*offset_ptr),
4321 boundary / BITS_PER_UNIT);
4322 offset_ptr->constant = 0; /*?*/
4323 }
4324 else
4325 offset_ptr->constant =
4326 #ifdef ARGS_GROW_DOWNWARD
4327 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4328 #else
4329 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4330 #endif
4331 }
4332 }
4333
4334 static void
4335 pad_below (offset_ptr, passed_mode, sizetree)
4336 struct args_size *offset_ptr;
4337 enum machine_mode passed_mode;
4338 tree sizetree;
4339 {
4340 if (passed_mode != BLKmode)
4341 {
4342 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4343 offset_ptr->constant
4344 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4345 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4346 - GET_MODE_SIZE (passed_mode));
4347 }
4348 else
4349 {
4350 if (TREE_CODE (sizetree) != INTEGER_CST
4351 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4352 {
4353 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4354 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4355 /* Add it in. */
4356 ADD_PARM_SIZE (*offset_ptr, s2);
4357 SUB_PARM_SIZE (*offset_ptr, sizetree);
4358 }
4359 }
4360 }
4361
4362 static tree
4363 round_down (value, divisor)
4364 tree value;
4365 int divisor;
4366 {
4367 return size_binop (MULT_EXPR,
4368 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
4369 size_int (divisor));
4370 }
4371 \f
4372 /* Walk the tree of blocks describing the binding levels within a function
4373 and warn about uninitialized variables.
4374 This is done after calling flow_analysis and before global_alloc
4375 clobbers the pseudo-regs to hard regs. */
4376
4377 void
4378 uninitialized_vars_warning (block)
4379 tree block;
4380 {
4381 register tree decl, sub;
4382 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4383 {
4384 if (TREE_CODE (decl) == VAR_DECL
4385 /* These warnings are unreliable for and aggregates
4386 because assigning the fields one by one can fail to convince
4387 flow.c that the entire aggregate was initialized.
4388 Unions are troublesome because members may be shorter. */
4389 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
4390 && DECL_RTL (decl) != 0
4391 && GET_CODE (DECL_RTL (decl)) == REG
4392 && regno_uninitialized (REGNO (DECL_RTL (decl))))
4393 warning_with_decl (decl,
4394 "`%s' might be used uninitialized in this function");
4395 if (TREE_CODE (decl) == VAR_DECL
4396 && DECL_RTL (decl) != 0
4397 && GET_CODE (DECL_RTL (decl)) == REG
4398 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4399 warning_with_decl (decl,
4400 "variable `%s' might be clobbered by `longjmp' or `vfork'");
4401 }
4402 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4403 uninitialized_vars_warning (sub);
4404 }
4405
4406 /* Do the appropriate part of uninitialized_vars_warning
4407 but for arguments instead of local variables. */
4408
4409 void
4410 setjmp_args_warning ()
4411 {
4412 register tree decl;
4413 for (decl = DECL_ARGUMENTS (current_function_decl);
4414 decl; decl = TREE_CHAIN (decl))
4415 if (DECL_RTL (decl) != 0
4416 && GET_CODE (DECL_RTL (decl)) == REG
4417 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4418 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
4419 }
4420
4421 /* If this function call setjmp, put all vars into the stack
4422 unless they were declared `register'. */
4423
4424 void
4425 setjmp_protect (block)
4426 tree block;
4427 {
4428 register tree decl, sub;
4429 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4430 if ((TREE_CODE (decl) == VAR_DECL
4431 || TREE_CODE (decl) == PARM_DECL)
4432 && DECL_RTL (decl) != 0
4433 && GET_CODE (DECL_RTL (decl)) == REG
4434 /* If this variable came from an inline function, it must be
4435 that it's life doesn't overlap the setjmp. If there was a
4436 setjmp in the function, it would already be in memory. We
4437 must exclude such variable because their DECL_RTL might be
4438 set to strange things such as virtual_stack_vars_rtx. */
4439 && ! DECL_FROM_INLINE (decl)
4440 && (
4441 #ifdef NON_SAVING_SETJMP
4442 /* If longjmp doesn't restore the registers,
4443 don't put anything in them. */
4444 NON_SAVING_SETJMP
4445 ||
4446 #endif
4447 ! DECL_REGISTER (decl)))
4448 put_var_into_stack (decl);
4449 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4450 setjmp_protect (sub);
4451 }
4452 \f
4453 /* Like the previous function, but for args instead of local variables. */
4454
4455 void
4456 setjmp_protect_args ()
4457 {
4458 register tree decl, sub;
4459 for (decl = DECL_ARGUMENTS (current_function_decl);
4460 decl; decl = TREE_CHAIN (decl))
4461 if ((TREE_CODE (decl) == VAR_DECL
4462 || TREE_CODE (decl) == PARM_DECL)
4463 && DECL_RTL (decl) != 0
4464 && GET_CODE (DECL_RTL (decl)) == REG
4465 && (
4466 /* If longjmp doesn't restore the registers,
4467 don't put anything in them. */
4468 #ifdef NON_SAVING_SETJMP
4469 NON_SAVING_SETJMP
4470 ||
4471 #endif
4472 ! DECL_REGISTER (decl)))
4473 put_var_into_stack (decl);
4474 }
4475 \f
4476 /* Return the context-pointer register corresponding to DECL,
4477 or 0 if it does not need one. */
4478
4479 rtx
4480 lookup_static_chain (decl)
4481 tree decl;
4482 {
4483 tree context = decl_function_context (decl);
4484 tree link;
4485
4486 if (context == 0
4487 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
4488 return 0;
4489
4490 /* We treat inline_function_decl as an alias for the current function
4491 because that is the inline function whose vars, types, etc.
4492 are being merged into the current function.
4493 See expand_inline_function. */
4494 if (context == current_function_decl || context == inline_function_decl)
4495 return virtual_stack_vars_rtx;
4496
4497 for (link = context_display; link; link = TREE_CHAIN (link))
4498 if (TREE_PURPOSE (link) == context)
4499 return RTL_EXPR_RTL (TREE_VALUE (link));
4500
4501 abort ();
4502 }
4503 \f
4504 /* Convert a stack slot address ADDR for variable VAR
4505 (from a containing function)
4506 into an address valid in this function (using a static chain). */
4507
4508 rtx
4509 fix_lexical_addr (addr, var)
4510 rtx addr;
4511 tree var;
4512 {
4513 rtx basereg;
4514 int displacement;
4515 tree context = decl_function_context (var);
4516 struct function *fp;
4517 rtx base = 0;
4518
4519 /* If this is the present function, we need not do anything. */
4520 if (context == current_function_decl || context == inline_function_decl)
4521 return addr;
4522
4523 for (fp = outer_function_chain; fp; fp = fp->next)
4524 if (fp->decl == context)
4525 break;
4526
4527 if (fp == 0)
4528 abort ();
4529
4530 /* Decode given address as base reg plus displacement. */
4531 if (GET_CODE (addr) == REG)
4532 basereg = addr, displacement = 0;
4533 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4534 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
4535 else
4536 abort ();
4537
4538 /* We accept vars reached via the containing function's
4539 incoming arg pointer and via its stack variables pointer. */
4540 if (basereg == fp->internal_arg_pointer)
4541 {
4542 /* If reached via arg pointer, get the arg pointer value
4543 out of that function's stack frame.
4544
4545 There are two cases: If a separate ap is needed, allocate a
4546 slot in the outer function for it and dereference it that way.
4547 This is correct even if the real ap is actually a pseudo.
4548 Otherwise, just adjust the offset from the frame pointer to
4549 compensate. */
4550
4551 #ifdef NEED_SEPARATE_AP
4552 rtx addr;
4553
4554 if (fp->arg_pointer_save_area == 0)
4555 fp->arg_pointer_save_area
4556 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
4557
4558 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
4559 addr = memory_address (Pmode, addr);
4560
4561 base = copy_to_reg (gen_rtx (MEM, Pmode, addr));
4562 #else
4563 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
4564 base = lookup_static_chain (var);
4565 #endif
4566 }
4567
4568 else if (basereg == virtual_stack_vars_rtx)
4569 {
4570 /* This is the same code as lookup_static_chain, duplicated here to
4571 avoid an extra call to decl_function_context. */
4572 tree link;
4573
4574 for (link = context_display; link; link = TREE_CHAIN (link))
4575 if (TREE_PURPOSE (link) == context)
4576 {
4577 base = RTL_EXPR_RTL (TREE_VALUE (link));
4578 break;
4579 }
4580 }
4581
4582 if (base == 0)
4583 abort ();
4584
4585 /* Use same offset, relative to appropriate static chain or argument
4586 pointer. */
4587 return plus_constant (base, displacement);
4588 }
4589 \f
4590 /* Return the address of the trampoline for entering nested fn FUNCTION.
4591 If necessary, allocate a trampoline (in the stack frame)
4592 and emit rtl to initialize its contents (at entry to this function). */
4593
4594 rtx
4595 trampoline_address (function)
4596 tree function;
4597 {
4598 tree link;
4599 tree rtlexp;
4600 rtx tramp;
4601 struct function *fp;
4602 tree fn_context;
4603
4604 /* Find an existing trampoline and return it. */
4605 for (link = trampoline_list; link; link = TREE_CHAIN (link))
4606 if (TREE_PURPOSE (link) == function)
4607 return
4608 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
4609
4610 for (fp = outer_function_chain; fp; fp = fp->next)
4611 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
4612 if (TREE_PURPOSE (link) == function)
4613 {
4614 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
4615 function);
4616 return round_trampoline_addr (tramp);
4617 }
4618
4619 /* None exists; we must make one. */
4620
4621 /* Find the `struct function' for the function containing FUNCTION. */
4622 fp = 0;
4623 fn_context = decl_function_context (function);
4624 if (fn_context != current_function_decl)
4625 for (fp = outer_function_chain; fp; fp = fp->next)
4626 if (fp->decl == fn_context)
4627 break;
4628
4629 /* Allocate run-time space for this trampoline
4630 (usually in the defining function's stack frame). */
4631 #ifdef ALLOCATE_TRAMPOLINE
4632 tramp = ALLOCATE_TRAMPOLINE (fp);
4633 #else
4634 /* If rounding needed, allocate extra space
4635 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
4636 #ifdef TRAMPOLINE_ALIGNMENT
4637 #define TRAMPOLINE_REAL_SIZE \
4638 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
4639 #else
4640 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
4641 #endif
4642 if (fp != 0)
4643 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
4644 else
4645 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
4646 #endif
4647
4648 /* Record the trampoline for reuse and note it for later initialization
4649 by expand_function_end. */
4650 if (fp != 0)
4651 {
4652 push_obstacks (fp->function_maybepermanent_obstack,
4653 fp->function_maybepermanent_obstack);
4654 rtlexp = make_node (RTL_EXPR);
4655 RTL_EXPR_RTL (rtlexp) = tramp;
4656 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
4657 pop_obstacks ();
4658 }
4659 else
4660 {
4661 /* Make the RTL_EXPR node temporary, not momentary, so that the
4662 trampoline_list doesn't become garbage. */
4663 int momentary = suspend_momentary ();
4664 rtlexp = make_node (RTL_EXPR);
4665 resume_momentary (momentary);
4666
4667 RTL_EXPR_RTL (rtlexp) = tramp;
4668 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
4669 }
4670
4671 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
4672 return round_trampoline_addr (tramp);
4673 }
4674
4675 /* Given a trampoline address,
4676 round it to multiple of TRAMPOLINE_ALIGNMENT. */
4677
4678 static rtx
4679 round_trampoline_addr (tramp)
4680 rtx tramp;
4681 {
4682 #ifdef TRAMPOLINE_ALIGNMENT
4683 /* Round address up to desired boundary. */
4684 rtx temp = gen_reg_rtx (Pmode);
4685 temp = expand_binop (Pmode, add_optab, tramp,
4686 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
4687 temp, 0, OPTAB_LIB_WIDEN);
4688 tramp = expand_binop (Pmode, and_optab, temp,
4689 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
4690 temp, 0, OPTAB_LIB_WIDEN);
4691 #endif
4692 return tramp;
4693 }
4694 \f
4695 /* The functions identify_blocks and reorder_blocks provide a way to
4696 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
4697 duplicate portions of the RTL code. Call identify_blocks before
4698 changing the RTL, and call reorder_blocks after. */
4699
4700 /* Put all this function's BLOCK nodes including those that are chained
4701 onto the first block into a vector, and return it.
4702 Also store in each NOTE for the beginning or end of a block
4703 the index of that block in the vector.
4704 The arguments are BLOCK, the chain of top-level blocks of the function,
4705 and INSNS, the insn chain of the function. */
4706
4707 tree *
4708 identify_blocks (block, insns)
4709 tree block;
4710 rtx insns;
4711 {
4712 int n_blocks;
4713 tree *block_vector;
4714 int *block_stack;
4715 int depth = 0;
4716 int next_block_number = 1;
4717 int current_block_number = 1;
4718 rtx insn;
4719
4720 if (block == 0)
4721 return 0;
4722
4723 n_blocks = all_blocks (block, 0);
4724 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
4725 block_stack = (int *) alloca (n_blocks * sizeof (int));
4726
4727 all_blocks (block, block_vector);
4728
4729 for (insn = insns; insn; insn = NEXT_INSN (insn))
4730 if (GET_CODE (insn) == NOTE)
4731 {
4732 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
4733 {
4734 block_stack[depth++] = current_block_number;
4735 current_block_number = next_block_number;
4736 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
4737 }
4738 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
4739 {
4740 current_block_number = block_stack[--depth];
4741 NOTE_BLOCK_NUMBER (insn) = current_block_number;
4742 }
4743 }
4744
4745 if (n_blocks != next_block_number)
4746 abort ();
4747
4748 return block_vector;
4749 }
4750
4751 /* Given BLOCK_VECTOR which was returned by identify_blocks,
4752 and a revised instruction chain, rebuild the tree structure
4753 of BLOCK nodes to correspond to the new order of RTL.
4754 The new block tree is inserted below TOP_BLOCK.
4755 Returns the current top-level block. */
4756
4757 tree
4758 reorder_blocks (block_vector, block, insns)
4759 tree *block_vector;
4760 tree block;
4761 rtx insns;
4762 {
4763 tree current_block = block;
4764 rtx insn;
4765
4766 if (block_vector == 0)
4767 return block;
4768
4769 /* Prune the old trees away, so that it doesn't get in the way. */
4770 BLOCK_SUBBLOCKS (current_block) = 0;
4771 BLOCK_CHAIN (current_block) = 0;
4772
4773 for (insn = insns; insn; insn = NEXT_INSN (insn))
4774 if (GET_CODE (insn) == NOTE)
4775 {
4776 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
4777 {
4778 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
4779 /* If we have seen this block before, copy it. */
4780 if (TREE_ASM_WRITTEN (block))
4781 block = copy_node (block);
4782 BLOCK_SUBBLOCKS (block) = 0;
4783 TREE_ASM_WRITTEN (block) = 1;
4784 BLOCK_SUPERCONTEXT (block) = current_block;
4785 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
4786 BLOCK_SUBBLOCKS (current_block) = block;
4787 current_block = block;
4788 NOTE_SOURCE_FILE (insn) = 0;
4789 }
4790 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
4791 {
4792 BLOCK_SUBBLOCKS (current_block)
4793 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
4794 current_block = BLOCK_SUPERCONTEXT (current_block);
4795 NOTE_SOURCE_FILE (insn) = 0;
4796 }
4797 }
4798
4799 BLOCK_SUBBLOCKS (current_block)
4800 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
4801 return current_block;
4802 }
4803
4804 /* Reverse the order of elements in the chain T of blocks,
4805 and return the new head of the chain (old last element). */
4806
4807 static tree
4808 blocks_nreverse (t)
4809 tree t;
4810 {
4811 register tree prev = 0, decl, next;
4812 for (decl = t; decl; decl = next)
4813 {
4814 next = BLOCK_CHAIN (decl);
4815 BLOCK_CHAIN (decl) = prev;
4816 prev = decl;
4817 }
4818 return prev;
4819 }
4820
4821 /* Count the subblocks of the list starting with BLOCK, and list them
4822 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
4823 blocks. */
4824
4825 static int
4826 all_blocks (block, vector)
4827 tree block;
4828 tree *vector;
4829 {
4830 int n_blocks = 0;
4831
4832 while (block)
4833 {
4834 TREE_ASM_WRITTEN (block) = 0;
4835
4836 /* Record this block. */
4837 if (vector)
4838 vector[n_blocks] = block;
4839
4840 ++n_blocks;
4841
4842 /* Record the subblocks, and their subblocks... */
4843 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
4844 vector ? vector + n_blocks : 0);
4845 block = BLOCK_CHAIN (block);
4846 }
4847
4848 return n_blocks;
4849 }
4850 \f
4851 /* Build bytecode call descriptor for function SUBR. */
4852
4853 rtx
4854 bc_build_calldesc (subr)
4855 tree subr;
4856 {
4857 tree calldesc = 0, arg;
4858 int nargs = 0;
4859
4860 /* Build the argument description vector in reverse order. */
4861 DECL_ARGUMENTS (subr) = nreverse (DECL_ARGUMENTS (subr));
4862 nargs = 0;
4863
4864 for (arg = DECL_ARGUMENTS (subr); arg; arg = TREE_CHAIN (arg))
4865 {
4866 ++nargs;
4867
4868 calldesc = tree_cons ((tree) 0, size_in_bytes (TREE_TYPE (arg)), calldesc);
4869 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (TREE_TYPE (arg)), calldesc);
4870 }
4871
4872 DECL_ARGUMENTS (subr) = nreverse (DECL_ARGUMENTS (subr));
4873
4874 /* Prepend the function's return type. */
4875 calldesc = tree_cons ((tree) 0,
4876 size_in_bytes (TREE_TYPE (TREE_TYPE (subr))),
4877 calldesc);
4878
4879 calldesc = tree_cons ((tree) 0,
4880 bc_runtime_type_code (TREE_TYPE (TREE_TYPE (subr))),
4881 calldesc);
4882
4883 /* Prepend the arg count. */
4884 calldesc = tree_cons ((tree) 0, build_int_2 (nargs, 0), calldesc);
4885
4886 /* Output the call description vector and get its address. */
4887 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
4888 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
4889 build_index_type (build_int_2 (nargs * 2, 0)));
4890
4891 return output_constant_def (calldesc);
4892 }
4893
4894
4895 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
4896 and initialize static variables for generating RTL for the statements
4897 of the function. */
4898
4899 void
4900 init_function_start (subr, filename, line)
4901 tree subr;
4902 char *filename;
4903 int line;
4904 {
4905 char *junk;
4906
4907 if (output_bytecode)
4908 {
4909 this_function_decl = subr;
4910 this_function_calldesc = bc_build_calldesc (subr);
4911 local_vars_size = 0;
4912 stack_depth = 0;
4913 max_stack_depth = 0;
4914 stmt_expr_depth = 0;
4915 return;
4916 }
4917
4918 init_stmt_for_function ();
4919
4920 cse_not_expected = ! optimize;
4921
4922 /* Caller save not needed yet. */
4923 caller_save_needed = 0;
4924
4925 /* No stack slots have been made yet. */
4926 stack_slot_list = 0;
4927
4928 /* There is no stack slot for handling nonlocal gotos. */
4929 nonlocal_goto_handler_slot = 0;
4930 nonlocal_goto_stack_level = 0;
4931
4932 /* No labels have been declared for nonlocal use. */
4933 nonlocal_labels = 0;
4934
4935 /* No function calls so far in this function. */
4936 function_call_count = 0;
4937
4938 /* No parm regs have been allocated.
4939 (This is important for output_inline_function.) */
4940 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4941
4942 /* Initialize the RTL mechanism. */
4943 init_emit ();
4944
4945 /* Initialize the queue of pending postincrement and postdecrements,
4946 and some other info in expr.c. */
4947 init_expr ();
4948
4949 /* We haven't done register allocation yet. */
4950 reg_renumber = 0;
4951
4952 init_const_rtx_hash_table ();
4953
4954 current_function_name = (*decl_printable_name) (subr, &junk);
4955
4956 /* Nonzero if this is a nested function that uses a static chain. */
4957
4958 current_function_needs_context
4959 = (decl_function_context (current_function_decl) != 0
4960 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
4961
4962 /* Set if a call to setjmp is seen. */
4963 current_function_calls_setjmp = 0;
4964
4965 /* Set if a call to longjmp is seen. */
4966 current_function_calls_longjmp = 0;
4967
4968 current_function_calls_alloca = 0;
4969 current_function_has_nonlocal_label = 0;
4970 current_function_has_nonlocal_goto = 0;
4971 current_function_contains_functions = 0;
4972
4973 current_function_returns_pcc_struct = 0;
4974 current_function_returns_struct = 0;
4975 current_function_epilogue_delay_list = 0;
4976 current_function_uses_const_pool = 0;
4977 current_function_uses_pic_offset_table = 0;
4978
4979 /* We have not yet needed to make a label to jump to for tail-recursion. */
4980 tail_recursion_label = 0;
4981
4982 /* We haven't had a need to make a save area for ap yet. */
4983
4984 arg_pointer_save_area = 0;
4985
4986 /* No stack slots allocated yet. */
4987 frame_offset = 0;
4988
4989 /* No SAVE_EXPRs in this function yet. */
4990 save_expr_regs = 0;
4991
4992 /* No RTL_EXPRs in this function yet. */
4993 rtl_expr_chain = 0;
4994
4995 /* Set up to allocate temporaries. */
4996 init_temp_slots ();
4997
4998 /* Within function body, compute a type's size as soon it is laid out. */
4999 immediate_size_expand++;
5000
5001 /* We haven't made any trampolines for this function yet. */
5002 trampoline_list = 0;
5003
5004 init_pending_stack_adjust ();
5005 inhibit_defer_pop = 0;
5006
5007 current_function_outgoing_args_size = 0;
5008
5009 /* Prevent ever trying to delete the first instruction of a function.
5010 Also tell final how to output a linenum before the function prologue. */
5011 emit_line_note (filename, line);
5012
5013 /* Make sure first insn is a note even if we don't want linenums.
5014 This makes sure the first insn will never be deleted.
5015 Also, final expects a note to appear there. */
5016 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5017
5018 /* Set flags used by final.c. */
5019 if (aggregate_value_p (DECL_RESULT (subr)))
5020 {
5021 #ifdef PCC_STATIC_STRUCT_RETURN
5022 current_function_returns_pcc_struct = 1;
5023 #endif
5024 current_function_returns_struct = 1;
5025 }
5026
5027 /* Warn if this value is an aggregate type,
5028 regardless of which calling convention we are using for it. */
5029 if (warn_aggregate_return
5030 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5031 warning ("function returns an aggregate");
5032
5033 current_function_returns_pointer
5034 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5035
5036 /* Indicate that we need to distinguish between the return value of the
5037 present function and the return value of a function being called. */
5038 rtx_equal_function_value_matters = 1;
5039
5040 /* Indicate that we have not instantiated virtual registers yet. */
5041 virtuals_instantiated = 0;
5042
5043 /* Indicate we have no need of a frame pointer yet. */
5044 frame_pointer_needed = 0;
5045
5046 /* By default assume not varargs or stdarg. */
5047 current_function_varargs = 0;
5048 current_function_stdarg = 0;
5049 }
5050
5051 /* Indicate that the current function uses extra args
5052 not explicitly mentioned in the argument list in any fashion. */
5053
5054 void
5055 mark_varargs ()
5056 {
5057 current_function_varargs = 1;
5058 }
5059
5060 /* Expand a call to __main at the beginning of a possible main function. */
5061
5062 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5063 #undef HAS_INIT_SECTION
5064 #define HAS_INIT_SECTION
5065 #endif
5066
5067 void
5068 expand_main_function ()
5069 {
5070 if (!output_bytecode)
5071 {
5072 /* The zero below avoids a possible parse error */
5073 0;
5074 #if !defined (HAS_INIT_SECTION)
5075 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, NAME__MAIN), 0,
5076 VOIDmode, 0);
5077 #endif /* not HAS_INIT_SECTION */
5078 }
5079 }
5080 \f
5081 extern struct obstack permanent_obstack;
5082
5083 /* Expand start of bytecode function. See comment at
5084 expand_function_start below for details. */
5085
5086 void
5087 bc_expand_function_start (subr, parms_have_cleanups)
5088 tree subr;
5089 int parms_have_cleanups;
5090 {
5091 char label[20], *name;
5092 static int nlab;
5093 tree thisarg;
5094 int argsz;
5095
5096 if (TREE_PUBLIC (subr))
5097 bc_globalize_label (IDENTIFIER_POINTER (DECL_NAME (subr)));
5098
5099 #ifdef DEBUG_PRINT_CODE
5100 fprintf (stderr, "\n<func %s>\n", IDENTIFIER_POINTER (DECL_NAME (subr)));
5101 #endif
5102
5103 for (argsz = 0, thisarg = DECL_ARGUMENTS (subr); thisarg; thisarg = TREE_CHAIN (thisarg))
5104 {
5105 if (DECL_RTL (thisarg))
5106 abort (); /* Should be NULL here I think. */
5107 else if (TREE_CONSTANT (DECL_SIZE (thisarg)))
5108 {
5109 DECL_RTL (thisarg) = bc_gen_rtx ((char *) 0, argsz, (struct bc_label *) 0);
5110 argsz += TREE_INT_CST_LOW (DECL_SIZE (thisarg));
5111 }
5112 else
5113 {
5114 /* Variable-sized objects are pointers to their storage. */
5115 DECL_RTL (thisarg) = bc_gen_rtx ((char *) 0, argsz, (struct bc_label *) 0);
5116 argsz += POINTER_SIZE;
5117 }
5118 }
5119
5120 bc_begin_function (xstrdup (IDENTIFIER_POINTER (DECL_NAME (subr))));
5121
5122 ASM_GENERATE_INTERNAL_LABEL (label, "LX", nlab);
5123
5124 ++nlab;
5125 name = (char *) obstack_copy0 (&permanent_obstack, label, strlen (label));
5126 this_function_callinfo = bc_gen_rtx (name, 0, (struct bc_label *) 0);
5127 this_function_bytecode =
5128 bc_emit_trampoline (BYTECODE_LABEL (this_function_callinfo));
5129 }
5130
5131
5132 /* Expand end of bytecode function. See details the comment of
5133 expand_function_end(), below. */
5134
5135 void
5136 bc_expand_function_end ()
5137 {
5138 char *ptrconsts;
5139
5140 expand_null_return ();
5141
5142 /* Emit any fixup code. This must be done before the call to
5143 to BC_END_FUNCTION (), since that will cause the bytecode
5144 segment to be finished off and closed. */
5145
5146 expand_fixups (NULL_RTX);
5147
5148 ptrconsts = bc_end_function ();
5149
5150 bc_align_const (2 /* INT_ALIGN */);
5151
5152 /* If this changes also make sure to change bc-interp.h! */
5153
5154 bc_emit_const_labeldef (BYTECODE_LABEL (this_function_callinfo));
5155 bc_emit_const ((char *) &max_stack_depth, sizeof max_stack_depth);
5156 bc_emit_const ((char *) &local_vars_size, sizeof local_vars_size);
5157 bc_emit_const_labelref (this_function_bytecode, 0);
5158 bc_emit_const_labelref (ptrconsts, 0);
5159 bc_emit_const_labelref (BYTECODE_LABEL (this_function_calldesc), 0);
5160 }
5161
5162
5163 /* Start the RTL for a new function, and set variables used for
5164 emitting RTL.
5165 SUBR is the FUNCTION_DECL node.
5166 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5167 the function's parameters, which must be run at any return statement. */
5168
5169 void
5170 expand_function_start (subr, parms_have_cleanups)
5171 tree subr;
5172 int parms_have_cleanups;
5173 {
5174 register int i;
5175 tree tem;
5176 rtx last_ptr;
5177
5178 if (output_bytecode)
5179 {
5180 bc_expand_function_start (subr, parms_have_cleanups);
5181 return;
5182 }
5183
5184 /* Make sure volatile mem refs aren't considered
5185 valid operands of arithmetic insns. */
5186 init_recog_no_volatile ();
5187
5188 /* If function gets a static chain arg, store it in the stack frame.
5189 Do this first, so it gets the first stack slot offset. */
5190 if (current_function_needs_context)
5191 {
5192 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5193
5194 #ifdef SMALL_REGISTER_CLASSES
5195 /* Delay copying static chain if it is not a register to avoid
5196 conflicts with regs used for parameters. */
5197 if (! SMALL_REGISTER_CLASSES
5198 || GET_CODE (static_chain_incoming_rtx) == REG)
5199 #endif
5200 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5201 }
5202
5203 /* If the parameters of this function need cleaning up, get a label
5204 for the beginning of the code which executes those cleanups. This must
5205 be done before doing anything with return_label. */
5206 if (parms_have_cleanups)
5207 cleanup_label = gen_label_rtx ();
5208 else
5209 cleanup_label = 0;
5210
5211 /* Make the label for return statements to jump to, if this machine
5212 does not have a one-instruction return and uses an epilogue,
5213 or if it returns a structure, or if it has parm cleanups. */
5214 #ifdef HAVE_return
5215 if (cleanup_label == 0 && HAVE_return
5216 && ! current_function_returns_pcc_struct
5217 && ! (current_function_returns_struct && ! optimize))
5218 return_label = 0;
5219 else
5220 return_label = gen_label_rtx ();
5221 #else
5222 return_label = gen_label_rtx ();
5223 #endif
5224
5225 /* Initialize rtx used to return the value. */
5226 /* Do this before assign_parms so that we copy the struct value address
5227 before any library calls that assign parms might generate. */
5228
5229 /* Decide whether to return the value in memory or in a register. */
5230 if (aggregate_value_p (DECL_RESULT (subr)))
5231 {
5232 /* Returning something that won't go in a register. */
5233 register rtx value_address = 0;
5234
5235 #ifdef PCC_STATIC_STRUCT_RETURN
5236 if (current_function_returns_pcc_struct)
5237 {
5238 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5239 value_address = assemble_static_space (size);
5240 }
5241 else
5242 #endif
5243 {
5244 /* Expect to be passed the address of a place to store the value.
5245 If it is passed as an argument, assign_parms will take care of
5246 it. */
5247 if (struct_value_incoming_rtx)
5248 {
5249 value_address = gen_reg_rtx (Pmode);
5250 emit_move_insn (value_address, struct_value_incoming_rtx);
5251 }
5252 }
5253 if (value_address)
5254 {
5255 DECL_RTL (DECL_RESULT (subr))
5256 = gen_rtx (MEM, DECL_MODE (DECL_RESULT (subr)), value_address);
5257 MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)))
5258 = AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5259 }
5260 }
5261 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5262 /* If return mode is void, this decl rtl should not be used. */
5263 DECL_RTL (DECL_RESULT (subr)) = 0;
5264 else if (parms_have_cleanups)
5265 {
5266 /* If function will end with cleanup code for parms,
5267 compute the return values into a pseudo reg,
5268 which we will copy into the true return register
5269 after the cleanups are done. */
5270
5271 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5272
5273 #ifdef PROMOTE_FUNCTION_RETURN
5274 tree type = TREE_TYPE (DECL_RESULT (subr));
5275 int unsignedp = TREE_UNSIGNED (type);
5276
5277 mode = promote_mode (type, mode, &unsignedp, 1);
5278 #endif
5279
5280 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5281 }
5282 else
5283 /* Scalar, returned in a register. */
5284 {
5285 #ifdef FUNCTION_OUTGOING_VALUE
5286 DECL_RTL (DECL_RESULT (subr))
5287 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5288 #else
5289 DECL_RTL (DECL_RESULT (subr))
5290 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5291 #endif
5292
5293 /* Mark this reg as the function's return value. */
5294 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5295 {
5296 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5297 /* Needed because we may need to move this to memory
5298 in case it's a named return value whose address is taken. */
5299 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5300 }
5301 }
5302
5303 /* Initialize rtx for parameters and local variables.
5304 In some cases this requires emitting insns. */
5305
5306 assign_parms (subr, 0);
5307
5308 #ifdef SMALL_REGISTER_CLASSES
5309 /* Copy the static chain now if it wasn't a register. The delay is to
5310 avoid conflicts with the parameter passing registers. */
5311
5312 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
5313 if (GET_CODE (static_chain_incoming_rtx) != REG)
5314 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5315 #endif
5316
5317 /* The following was moved from init_function_start.
5318 The move is supposed to make sdb output more accurate. */
5319 /* Indicate the beginning of the function body,
5320 as opposed to parm setup. */
5321 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5322
5323 /* If doing stupid allocation, mark parms as born here. */
5324
5325 if (GET_CODE (get_last_insn ()) != NOTE)
5326 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5327 parm_birth_insn = get_last_insn ();
5328
5329 if (obey_regdecls)
5330 {
5331 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5332 use_variable (regno_reg_rtx[i]);
5333
5334 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5335 use_variable (current_function_internal_arg_pointer);
5336 }
5337
5338 context_display = 0;
5339 if (current_function_needs_context)
5340 {
5341 /* Fetch static chain values for containing functions. */
5342 tem = decl_function_context (current_function_decl);
5343 /* If not doing stupid register allocation copy the static chain
5344 pointer into a pseudo. If we have small register classes, copy
5345 the value from memory if static_chain_incoming_rtx is a REG. If
5346 we do stupid register allocation, we use the stack address
5347 generated above. */
5348 if (tem && ! obey_regdecls)
5349 {
5350 #ifdef SMALL_REGISTER_CLASSES
5351 /* If the static chain originally came in a register, put it back
5352 there, then move it out in the next insn. The reason for
5353 this peculiar code is to satisfy function integration. */
5354 if (SMALL_REGISTER_CLASSES
5355 && GET_CODE (static_chain_incoming_rtx) == REG)
5356 emit_move_insn (static_chain_incoming_rtx, last_ptr);
5357 #endif
5358
5359 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5360 }
5361
5362 while (tem)
5363 {
5364 tree rtlexp = make_node (RTL_EXPR);
5365
5366 RTL_EXPR_RTL (rtlexp) = last_ptr;
5367 context_display = tree_cons (tem, rtlexp, context_display);
5368 tem = decl_function_context (tem);
5369 if (tem == 0)
5370 break;
5371 /* Chain thru stack frames, assuming pointer to next lexical frame
5372 is found at the place we always store it. */
5373 #ifdef FRAME_GROWS_DOWNWARD
5374 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
5375 #endif
5376 last_ptr = copy_to_reg (gen_rtx (MEM, Pmode,
5377 memory_address (Pmode, last_ptr)));
5378
5379 /* If we are not optimizing, ensure that we know that this
5380 piece of context is live over the entire function. */
5381 if (! optimize)
5382 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, last_ptr,
5383 save_expr_regs);
5384 }
5385 }
5386
5387 /* After the display initializations is where the tail-recursion label
5388 should go, if we end up needing one. Ensure we have a NOTE here
5389 since some things (like trampolines) get placed before this. */
5390 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
5391
5392 /* Evaluate now the sizes of any types declared among the arguments. */
5393 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5394 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
5395
5396 /* Make sure there is a line number after the function entry setup code. */
5397 force_next_line_note ();
5398 }
5399 \f
5400 /* Generate RTL for the end of the current function.
5401 FILENAME and LINE are the current position in the source file.
5402
5403 It is up to language-specific callers to do cleanups for parameters--
5404 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5405
5406 void
5407 expand_function_end (filename, line, end_bindings)
5408 char *filename;
5409 int line;
5410 int end_bindings;
5411 {
5412 register int i;
5413 tree link;
5414
5415 #ifdef TRAMPOLINE_TEMPLATE
5416 static rtx initial_trampoline;
5417 #endif
5418
5419 if (output_bytecode)
5420 {
5421 bc_expand_function_end ();
5422 return;
5423 }
5424
5425 #ifdef NON_SAVING_SETJMP
5426 /* Don't put any variables in registers if we call setjmp
5427 on a machine that fails to restore the registers. */
5428 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
5429 {
5430 if (DECL_INITIAL (current_function_decl) != error_mark_node)
5431 setjmp_protect (DECL_INITIAL (current_function_decl));
5432
5433 setjmp_protect_args ();
5434 }
5435 #endif
5436
5437 /* Save the argument pointer if a save area was made for it. */
5438 if (arg_pointer_save_area)
5439 {
5440 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
5441 emit_insn_before (x, tail_recursion_reentry);
5442 }
5443
5444 /* Initialize any trampolines required by this function. */
5445 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5446 {
5447 tree function = TREE_PURPOSE (link);
5448 rtx context = lookup_static_chain (function);
5449 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
5450 rtx blktramp;
5451 rtx seq;
5452
5453 #ifdef TRAMPOLINE_TEMPLATE
5454 /* First make sure this compilation has a template for
5455 initializing trampolines. */
5456 if (initial_trampoline == 0)
5457 {
5458 end_temporary_allocation ();
5459 initial_trampoline
5460 = gen_rtx (MEM, BLKmode, assemble_trampoline_template ());
5461 resume_temporary_allocation ();
5462 }
5463 #endif
5464
5465 /* Generate insns to initialize the trampoline. */
5466 start_sequence ();
5467 tramp = round_trampoline_addr (XEXP (tramp, 0));
5468 #ifdef TRAMPOLINE_TEMPLATE
5469 blktramp = change_address (initial_trampoline, BLKmode, tramp);
5470 emit_block_move (blktramp, initial_trampoline,
5471 GEN_INT (TRAMPOLINE_SIZE),
5472 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5473 #endif
5474 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
5475 seq = get_insns ();
5476 end_sequence ();
5477
5478 /* Put those insns at entry to the containing function (this one). */
5479 emit_insns_before (seq, tail_recursion_reentry);
5480 }
5481
5482 /* Warn about unused parms if extra warnings were specified. */
5483 if (warn_unused && extra_warnings)
5484 {
5485 tree decl;
5486
5487 for (decl = DECL_ARGUMENTS (current_function_decl);
5488 decl; decl = TREE_CHAIN (decl))
5489 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
5490 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
5491 warning_with_decl (decl, "unused parameter `%s'");
5492 }
5493
5494 /* Delete handlers for nonlocal gotos if nothing uses them. */
5495 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
5496 delete_handlers ();
5497
5498 /* End any sequences that failed to be closed due to syntax errors. */
5499 while (in_sequence_p ())
5500 end_sequence ();
5501
5502 /* Outside function body, can't compute type's actual size
5503 until next function's body starts. */
5504 immediate_size_expand--;
5505
5506 /* If doing stupid register allocation,
5507 mark register parms as dying here. */
5508
5509 if (obey_regdecls)
5510 {
5511 rtx tem;
5512 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5513 use_variable (regno_reg_rtx[i]);
5514
5515 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
5516
5517 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
5518 {
5519 use_variable (XEXP (tem, 0));
5520 use_variable_after (XEXP (tem, 0), parm_birth_insn);
5521 }
5522
5523 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5524 use_variable (current_function_internal_arg_pointer);
5525 }
5526
5527 clear_pending_stack_adjust ();
5528 do_pending_stack_adjust ();
5529
5530 /* Mark the end of the function body.
5531 If control reaches this insn, the function can drop through
5532 without returning a value. */
5533 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
5534
5535 /* Output a linenumber for the end of the function.
5536 SDB depends on this. */
5537 emit_line_note_force (filename, line);
5538
5539 /* Output the label for the actual return from the function,
5540 if one is expected. This happens either because a function epilogue
5541 is used instead of a return instruction, or because a return was done
5542 with a goto in order to run local cleanups, or because of pcc-style
5543 structure returning. */
5544
5545 if (return_label)
5546 emit_label (return_label);
5547
5548 /* C++ uses this. */
5549 if (end_bindings)
5550 expand_end_bindings (0, 0, 0);
5551
5552 /* If we had calls to alloca, and this machine needs
5553 an accurate stack pointer to exit the function,
5554 insert some code to save and restore the stack pointer. */
5555 #ifdef EXIT_IGNORE_STACK
5556 if (! EXIT_IGNORE_STACK)
5557 #endif
5558 if (current_function_calls_alloca)
5559 {
5560 rtx tem = 0;
5561
5562 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
5563 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
5564 }
5565
5566 /* If scalar return value was computed in a pseudo-reg,
5567 copy that to the hard return register. */
5568 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
5569 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
5570 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
5571 >= FIRST_PSEUDO_REGISTER))
5572 {
5573 rtx real_decl_result;
5574
5575 #ifdef FUNCTION_OUTGOING_VALUE
5576 real_decl_result
5577 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5578 current_function_decl);
5579 #else
5580 real_decl_result
5581 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5582 current_function_decl);
5583 #endif
5584 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
5585 /* If this is a BLKmode structure being returned in registers, then use
5586 the mode computed in expand_return. */
5587 if (GET_MODE (real_decl_result) == BLKmode)
5588 PUT_MODE (real_decl_result,
5589 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
5590 emit_move_insn (real_decl_result,
5591 DECL_RTL (DECL_RESULT (current_function_decl)));
5592 emit_insn (gen_rtx (USE, VOIDmode, real_decl_result));
5593 }
5594
5595 /* If returning a structure, arrange to return the address of the value
5596 in a place where debuggers expect to find it.
5597
5598 If returning a structure PCC style,
5599 the caller also depends on this value.
5600 And current_function_returns_pcc_struct is not necessarily set. */
5601 if (current_function_returns_struct
5602 || current_function_returns_pcc_struct)
5603 {
5604 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5605 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5606 #ifdef FUNCTION_OUTGOING_VALUE
5607 rtx outgoing
5608 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
5609 current_function_decl);
5610 #else
5611 rtx outgoing
5612 = FUNCTION_VALUE (build_pointer_type (type),
5613 current_function_decl);
5614 #endif
5615
5616 /* Mark this as a function return value so integrate will delete the
5617 assignment and USE below when inlining this function. */
5618 REG_FUNCTION_VALUE_P (outgoing) = 1;
5619
5620 emit_move_insn (outgoing, value_address);
5621 use_variable (outgoing);
5622 }
5623
5624 /* Output a return insn if we are using one.
5625 Otherwise, let the rtl chain end here, to drop through
5626 into the epilogue. */
5627
5628 #ifdef HAVE_return
5629 if (HAVE_return)
5630 {
5631 emit_jump_insn (gen_return ());
5632 emit_barrier ();
5633 }
5634 #endif
5635
5636 /* Fix up any gotos that jumped out to the outermost
5637 binding level of the function.
5638 Must follow emitting RETURN_LABEL. */
5639
5640 /* If you have any cleanups to do at this point,
5641 and they need to create temporary variables,
5642 then you will lose. */
5643 expand_fixups (get_insns ());
5644 }
5645 \f
5646 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
5647
5648 static int *prologue;
5649 static int *epilogue;
5650
5651 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
5652 or a single insn). */
5653
5654 static int *
5655 record_insns (insns)
5656 rtx insns;
5657 {
5658 int *vec;
5659
5660 if (GET_CODE (insns) == SEQUENCE)
5661 {
5662 int len = XVECLEN (insns, 0);
5663 vec = (int *) oballoc ((len + 1) * sizeof (int));
5664 vec[len] = 0;
5665 while (--len >= 0)
5666 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
5667 }
5668 else
5669 {
5670 vec = (int *) oballoc (2 * sizeof (int));
5671 vec[0] = INSN_UID (insns);
5672 vec[1] = 0;
5673 }
5674 return vec;
5675 }
5676
5677 /* Determine how many INSN_UIDs in VEC are part of INSN. */
5678
5679 static int
5680 contains (insn, vec)
5681 rtx insn;
5682 int *vec;
5683 {
5684 register int i, j;
5685
5686 if (GET_CODE (insn) == INSN
5687 && GET_CODE (PATTERN (insn)) == SEQUENCE)
5688 {
5689 int count = 0;
5690 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
5691 for (j = 0; vec[j]; j++)
5692 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
5693 count++;
5694 return count;
5695 }
5696 else
5697 {
5698 for (j = 0; vec[j]; j++)
5699 if (INSN_UID (insn) == vec[j])
5700 return 1;
5701 }
5702 return 0;
5703 }
5704
5705 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5706 this into place with notes indicating where the prologue ends and where
5707 the epilogue begins. Update the basic block information when possible. */
5708
5709 void
5710 thread_prologue_and_epilogue_insns (f)
5711 rtx f;
5712 {
5713 #ifdef HAVE_prologue
5714 if (HAVE_prologue)
5715 {
5716 rtx head, seq, insn;
5717
5718 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
5719 prologue insns and a NOTE_INSN_PROLOGUE_END. */
5720 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
5721 seq = gen_prologue ();
5722 head = emit_insn_after (seq, f);
5723
5724 /* Include the new prologue insns in the first block. Ignore them
5725 if they form a basic block unto themselves. */
5726 if (basic_block_head && n_basic_blocks
5727 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
5728 basic_block_head[0] = NEXT_INSN (f);
5729
5730 /* Retain a map of the prologue insns. */
5731 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
5732 }
5733 else
5734 #endif
5735 prologue = 0;
5736
5737 #ifdef HAVE_epilogue
5738 if (HAVE_epilogue)
5739 {
5740 rtx insn = get_last_insn ();
5741 rtx prev = prev_nonnote_insn (insn);
5742
5743 /* If we end with a BARRIER, we don't need an epilogue. */
5744 if (! (prev && GET_CODE (prev) == BARRIER))
5745 {
5746 rtx tail, seq, tem;
5747 rtx first_use = 0;
5748 rtx last_use = 0;
5749
5750 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
5751 epilogue insns, the USE insns at the end of a function,
5752 the jump insn that returns, and then a BARRIER. */
5753
5754 /* Move the USE insns at the end of a function onto a list. */
5755 while (prev
5756 && GET_CODE (prev) == INSN
5757 && GET_CODE (PATTERN (prev)) == USE)
5758 {
5759 tem = prev;
5760 prev = prev_nonnote_insn (prev);
5761
5762 NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem);
5763 PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem);
5764 if (first_use)
5765 {
5766 NEXT_INSN (tem) = first_use;
5767 PREV_INSN (first_use) = tem;
5768 }
5769 first_use = tem;
5770 if (!last_use)
5771 last_use = tem;
5772 }
5773
5774 emit_barrier_after (insn);
5775
5776 seq = gen_epilogue ();
5777 tail = emit_jump_insn_after (seq, insn);
5778
5779 /* Insert the USE insns immediately before the return insn, which
5780 must be the first instruction before the final barrier. */
5781 if (first_use)
5782 {
5783 tem = prev_nonnote_insn (get_last_insn ());
5784 NEXT_INSN (PREV_INSN (tem)) = first_use;
5785 PREV_INSN (first_use) = PREV_INSN (tem);
5786 PREV_INSN (tem) = last_use;
5787 NEXT_INSN (last_use) = tem;
5788 }
5789
5790 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
5791
5792 /* Include the new epilogue insns in the last block. Ignore
5793 them if they form a basic block unto themselves. */
5794 if (basic_block_end && n_basic_blocks
5795 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
5796 basic_block_end[n_basic_blocks - 1] = tail;
5797
5798 /* Retain a map of the epilogue insns. */
5799 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
5800 return;
5801 }
5802 }
5803 #endif
5804 epilogue = 0;
5805 }
5806
5807 /* Reposition the prologue-end and epilogue-begin notes after instruction
5808 scheduling and delayed branch scheduling. */
5809
5810 void
5811 reposition_prologue_and_epilogue_notes (f)
5812 rtx f;
5813 {
5814 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5815 /* Reposition the prologue and epilogue notes. */
5816 if (n_basic_blocks)
5817 {
5818 rtx next, prev;
5819 int len;
5820
5821 if (prologue)
5822 {
5823 register rtx insn, note = 0;
5824
5825 /* Scan from the beginning until we reach the last prologue insn.
5826 We apparently can't depend on basic_block_{head,end} after
5827 reorg has run. */
5828 for (len = 0; prologue[len]; len++)
5829 ;
5830 for (insn = f; len && insn; insn = NEXT_INSN (insn))
5831 {
5832 if (GET_CODE (insn) == NOTE)
5833 {
5834 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5835 note = insn;
5836 }
5837 else if ((len -= contains (insn, prologue)) == 0)
5838 {
5839 /* Find the prologue-end note if we haven't already, and
5840 move it to just after the last prologue insn. */
5841 if (note == 0)
5842 {
5843 for (note = insn; note = NEXT_INSN (note);)
5844 if (GET_CODE (note) == NOTE
5845 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5846 break;
5847 }
5848 next = NEXT_INSN (note);
5849 prev = PREV_INSN (note);
5850 if (prev)
5851 NEXT_INSN (prev) = next;
5852 if (next)
5853 PREV_INSN (next) = prev;
5854 add_insn_after (note, insn);
5855 }
5856 }
5857 }
5858
5859 if (epilogue)
5860 {
5861 register rtx insn, note = 0;
5862
5863 /* Scan from the end until we reach the first epilogue insn.
5864 We apparently can't depend on basic_block_{head,end} after
5865 reorg has run. */
5866 for (len = 0; epilogue[len]; len++)
5867 ;
5868 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
5869 {
5870 if (GET_CODE (insn) == NOTE)
5871 {
5872 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5873 note = insn;
5874 }
5875 else if ((len -= contains (insn, epilogue)) == 0)
5876 {
5877 /* Find the epilogue-begin note if we haven't already, and
5878 move it to just before the first epilogue insn. */
5879 if (note == 0)
5880 {
5881 for (note = insn; note = PREV_INSN (note);)
5882 if (GET_CODE (note) == NOTE
5883 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5884 break;
5885 }
5886 next = NEXT_INSN (note);
5887 prev = PREV_INSN (note);
5888 if (prev)
5889 NEXT_INSN (prev) = next;
5890 if (next)
5891 PREV_INSN (next) = prev;
5892 add_insn_after (note, PREV_INSN (insn));
5893 }
5894 }
5895 }
5896 }
5897 #endif /* HAVE_prologue or HAVE_epilogue */
5898 }