* Merge from gcc2 June 9, 1998 snapshot. See ChangeLog.13 for
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-97, 1998 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58 #include "toplev.h"
59
60 #ifndef TRAMPOLINE_ALIGNMENT
61 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
62 #endif
63
64 /* Some systems use __main in a way incompatible with its use in gcc, in these
65 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
66 give the same symbol without quotes for an alternative entry point. You
67 must define both, or neither. */
68 #ifndef NAME__MAIN
69 #define NAME__MAIN "__main"
70 #define SYMBOL__MAIN __main
71 #endif
72
73 /* Round a value to the lowest integer less than it that is a multiple of
74 the required alignment. Avoid using division in case the value is
75 negative. Assume the alignment is a power of two. */
76 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
77
78 /* Similar, but round to the next highest integer that meets the
79 alignment. */
80 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
81
82 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
83 during rtl generation. If they are different register numbers, this is
84 always true. It may also be true if
85 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
86 generation. See fix_lexical_addr for details. */
87
88 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
89 #define NEED_SEPARATE_AP
90 #endif
91
92 /* Number of bytes of args popped by function being compiled on its return.
93 Zero if no bytes are to be popped.
94 May affect compilation of return insn or of function epilogue. */
95
96 int current_function_pops_args;
97
98 /* Nonzero if function being compiled needs to be given an address
99 where the value should be stored. */
100
101 int current_function_returns_struct;
102
103 /* Nonzero if function being compiled needs to
104 return the address of where it has put a structure value. */
105
106 int current_function_returns_pcc_struct;
107
108 /* Nonzero if function being compiled needs to be passed a static chain. */
109
110 int current_function_needs_context;
111
112 /* Nonzero if function being compiled can call setjmp. */
113
114 int current_function_calls_setjmp;
115
116 /* Nonzero if function being compiled can call longjmp. */
117
118 int current_function_calls_longjmp;
119
120 /* Nonzero if function being compiled receives nonlocal gotos
121 from nested functions. */
122
123 int current_function_has_nonlocal_label;
124
125 /* Nonzero if function being compiled has nonlocal gotos to parent
126 function. */
127
128 int current_function_has_nonlocal_goto;
129
130 /* Nonzero if this function has a computed goto.
131
132 It is computed during find_basic_blocks or during stupid life
133 analysis. */
134
135 int current_function_has_computed_jump;
136
137 /* Nonzero if function being compiled contains nested functions. */
138
139 int current_function_contains_functions;
140
141 /* Nonzero if the current function is a thunk (a lightweight function that
142 just adjusts one of its arguments and forwards to another function), so
143 we should try to cut corners where we can. */
144 int current_function_is_thunk;
145
146 /* Nonzero if function being compiled can call alloca,
147 either as a subroutine or builtin. */
148
149 int current_function_calls_alloca;
150
151 /* Nonzero if the current function returns a pointer type */
152
153 int current_function_returns_pointer;
154
155 /* If some insns can be deferred to the delay slots of the epilogue, the
156 delay list for them is recorded here. */
157
158 rtx current_function_epilogue_delay_list;
159
160 /* If function's args have a fixed size, this is that size, in bytes.
161 Otherwise, it is -1.
162 May affect compilation of return insn or of function epilogue. */
163
164 int current_function_args_size;
165
166 /* # bytes the prologue should push and pretend that the caller pushed them.
167 The prologue must do this, but only if parms can be passed in registers. */
168
169 int current_function_pretend_args_size;
170
171 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
172 defined, the needed space is pushed by the prologue. */
173
174 int current_function_outgoing_args_size;
175
176 /* This is the offset from the arg pointer to the place where the first
177 anonymous arg can be found, if there is one. */
178
179 rtx current_function_arg_offset_rtx;
180
181 /* Nonzero if current function uses varargs.h or equivalent.
182 Zero for functions that use stdarg.h. */
183
184 int current_function_varargs;
185
186 /* Nonzero if current function uses stdarg.h or equivalent.
187 Zero for functions that use varargs.h. */
188
189 int current_function_stdarg;
190
191 /* Quantities of various kinds of registers
192 used for the current function's args. */
193
194 CUMULATIVE_ARGS current_function_args_info;
195
196 /* Name of function now being compiled. */
197
198 char *current_function_name;
199
200 /* If non-zero, an RTL expression for the location at which the current
201 function returns its result. If the current function returns its
202 result in a register, current_function_return_rtx will always be
203 the hard register containing the result. */
204
205 rtx current_function_return_rtx;
206
207 /* Nonzero if the current function uses the constant pool. */
208
209 int current_function_uses_const_pool;
210
211 /* Nonzero if the current function uses pic_offset_table_rtx. */
212 int current_function_uses_pic_offset_table;
213
214 /* The arg pointer hard register, or the pseudo into which it was copied. */
215 rtx current_function_internal_arg_pointer;
216
217 /* Language-specific reason why the current function cannot be made inline. */
218 char *current_function_cannot_inline;
219
220 /* The FUNCTION_DECL for an inline function currently being expanded. */
221 tree inline_function_decl;
222
223 /* Number of function calls seen so far in current function. */
224
225 int function_call_count;
226
227 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
228 (labels to which there can be nonlocal gotos from nested functions)
229 in this function. */
230
231 tree nonlocal_labels;
232
233 /* RTX for stack slot that holds the current handler for nonlocal gotos.
234 Zero when function does not have nonlocal labels. */
235
236 rtx nonlocal_goto_handler_slot;
237
238 /* RTX for stack slot that holds the stack pointer value to restore
239 for a nonlocal goto.
240 Zero when function does not have nonlocal labels. */
241
242 rtx nonlocal_goto_stack_level;
243
244 /* Label that will go on parm cleanup code, if any.
245 Jumping to this label runs cleanup code for parameters, if
246 such code must be run. Following this code is the logical return label. */
247
248 rtx cleanup_label;
249
250 /* Label that will go on function epilogue.
251 Jumping to this label serves as a "return" instruction
252 on machines which require execution of the epilogue on all returns. */
253
254 rtx return_label;
255
256 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
257 So we can mark them all live at the end of the function, if nonopt. */
258 rtx save_expr_regs;
259
260 /* List (chain of EXPR_LISTs) of all stack slots in this function.
261 Made for the sake of unshare_all_rtl. */
262 rtx stack_slot_list;
263
264 /* Chain of all RTL_EXPRs that have insns in them. */
265 tree rtl_expr_chain;
266
267 /* Label to jump back to for tail recursion, or 0 if we have
268 not yet needed one for this function. */
269 rtx tail_recursion_label;
270
271 /* Place after which to insert the tail_recursion_label if we need one. */
272 rtx tail_recursion_reentry;
273
274 /* Location at which to save the argument pointer if it will need to be
275 referenced. There are two cases where this is done: if nonlocal gotos
276 exist, or if vars stored at an offset from the argument pointer will be
277 needed by inner routines. */
278
279 rtx arg_pointer_save_area;
280
281 /* Offset to end of allocated area of stack frame.
282 If stack grows down, this is the address of the last stack slot allocated.
283 If stack grows up, this is the address for the next slot. */
284 HOST_WIDE_INT frame_offset;
285
286 /* List (chain of TREE_LISTs) of static chains for containing functions.
287 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
288 in an RTL_EXPR in the TREE_VALUE. */
289 static tree context_display;
290
291 /* List (chain of TREE_LISTs) of trampolines for nested functions.
292 The trampoline sets up the static chain and jumps to the function.
293 We supply the trampoline's address when the function's address is requested.
294
295 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
296 in an RTL_EXPR in the TREE_VALUE. */
297 static tree trampoline_list;
298
299 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
300 static rtx parm_birth_insn;
301
302 #if 0
303 /* Nonzero if a stack slot has been generated whose address is not
304 actually valid. It means that the generated rtl must all be scanned
305 to detect and correct the invalid addresses where they occur. */
306 static int invalid_stack_slot;
307 #endif
308
309 /* Last insn of those whose job was to put parms into their nominal homes. */
310 static rtx last_parm_insn;
311
312 /* 1 + last pseudo register number possibly used for loading a copy
313 of a parameter of this function. */
314 int max_parm_reg;
315
316 /* Vector indexed by REGNO, containing location on stack in which
317 to put the parm which is nominally in pseudo register REGNO,
318 if we discover that that parm must go in the stack. The highest
319 element in this vector is one less than MAX_PARM_REG, above. */
320 rtx *parm_reg_stack_loc;
321
322 /* Nonzero once virtual register instantiation has been done.
323 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
324 static int virtuals_instantiated;
325
326 /* These variables hold pointers to functions to
327 save and restore machine-specific data,
328 in push_function_context and pop_function_context. */
329 void (*save_machine_status) PROTO((struct function *));
330 void (*restore_machine_status) PROTO((struct function *));
331
332 /* Nonzero if we need to distinguish between the return value of this function
333 and the return value of a function called by this function. This helps
334 integrate.c */
335
336 extern int rtx_equal_function_value_matters;
337 extern tree sequence_rtl_expr;
338 \f
339 /* In order to evaluate some expressions, such as function calls returning
340 structures in memory, we need to temporarily allocate stack locations.
341 We record each allocated temporary in the following structure.
342
343 Associated with each temporary slot is a nesting level. When we pop up
344 one level, all temporaries associated with the previous level are freed.
345 Normally, all temporaries are freed after the execution of the statement
346 in which they were created. However, if we are inside a ({...}) grouping,
347 the result may be in a temporary and hence must be preserved. If the
348 result could be in a temporary, we preserve it if we can determine which
349 one it is in. If we cannot determine which temporary may contain the
350 result, all temporaries are preserved. A temporary is preserved by
351 pretending it was allocated at the previous nesting level.
352
353 Automatic variables are also assigned temporary slots, at the nesting
354 level where they are defined. They are marked a "kept" so that
355 free_temp_slots will not free them. */
356
357 struct temp_slot
358 {
359 /* Points to next temporary slot. */
360 struct temp_slot *next;
361 /* The rtx to used to reference the slot. */
362 rtx slot;
363 /* The rtx used to represent the address if not the address of the
364 slot above. May be an EXPR_LIST if multiple addresses exist. */
365 rtx address;
366 /* The size, in units, of the slot. */
367 HOST_WIDE_INT size;
368 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
369 tree rtl_expr;
370 /* Non-zero if this temporary is currently in use. */
371 char in_use;
372 /* Non-zero if this temporary has its address taken. */
373 char addr_taken;
374 /* Nesting level at which this slot is being used. */
375 int level;
376 /* Non-zero if this should survive a call to free_temp_slots. */
377 int keep;
378 /* The offset of the slot from the frame_pointer, including extra space
379 for alignment. This info is for combine_temp_slots. */
380 HOST_WIDE_INT base_offset;
381 /* The size of the slot, including extra space for alignment. This
382 info is for combine_temp_slots. */
383 HOST_WIDE_INT full_size;
384 };
385
386 /* List of all temporaries allocated, both available and in use. */
387
388 struct temp_slot *temp_slots;
389
390 /* Current nesting level for temporaries. */
391
392 int temp_slot_level;
393
394 /* Current nesting level for variables in a block. */
395
396 int var_temp_slot_level;
397
398 /* When temporaries are created by TARGET_EXPRs, they are created at
399 this level of temp_slot_level, so that they can remain allocated
400 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
401 of TARGET_EXPRs. */
402 int target_temp_slot_level;
403 \f
404 /* This structure is used to record MEMs or pseudos used to replace VAR, any
405 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
406 maintain this list in case two operands of an insn were required to match;
407 in that case we must ensure we use the same replacement. */
408
409 struct fixup_replacement
410 {
411 rtx old;
412 rtx new;
413 struct fixup_replacement *next;
414 };
415
416 /* Forward declarations. */
417
418 static rtx assign_outer_stack_local PROTO ((enum machine_mode, HOST_WIDE_INT,
419 int, struct function *));
420 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
421 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
422 enum machine_mode, enum machine_mode,
423 int, int, int));
424 static void fixup_var_refs PROTO((rtx, enum machine_mode, int));
425 static struct fixup_replacement
426 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
427 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
428 rtx, int));
429 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
430 struct fixup_replacement **));
431 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
432 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
433 static rtx fixup_stack_1 PROTO((rtx, rtx));
434 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
435 static void instantiate_decls PROTO((tree, int));
436 static void instantiate_decls_1 PROTO((tree, int));
437 static void instantiate_decl PROTO((rtx, int, int));
438 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
439 static void delete_handlers PROTO((void));
440 static void pad_to_arg_alignment PROTO((struct args_size *, int));
441 #ifndef ARGS_GROW_DOWNWARD
442 static void pad_below PROTO((struct args_size *, enum machine_mode,
443 tree));
444 #endif
445 #ifdef ARGS_GROW_DOWNWARD
446 static tree round_down PROTO((tree, int));
447 #endif
448 static rtx round_trampoline_addr PROTO((rtx));
449 static tree blocks_nreverse PROTO((tree));
450 static int all_blocks PROTO((tree, tree *));
451 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
452 static int *record_insns PROTO((rtx));
453 static int contains PROTO((rtx, int *));
454 #endif /* HAVE_prologue || HAVE_epilogue */
455 static void put_addressof_into_stack PROTO((rtx));
456 static void purge_addressof_1 PROTO((rtx *, rtx, int));
457 \f
458 /* Pointer to chain of `struct function' for containing functions. */
459 struct function *outer_function_chain;
460
461 /* Given a function decl for a containing function,
462 return the `struct function' for it. */
463
464 struct function *
465 find_function_data (decl)
466 tree decl;
467 {
468 struct function *p;
469
470 for (p = outer_function_chain; p; p = p->next)
471 if (p->decl == decl)
472 return p;
473
474 abort ();
475 }
476
477 /* Save the current context for compilation of a nested function.
478 This is called from language-specific code.
479 The caller is responsible for saving any language-specific status,
480 since this function knows only about language-independent variables. */
481
482 void
483 push_function_context_to (context)
484 tree context;
485 {
486 struct function *p = (struct function *) xmalloc (sizeof (struct function));
487
488 p->next = outer_function_chain;
489 outer_function_chain = p;
490
491 p->name = current_function_name;
492 p->decl = current_function_decl;
493 p->pops_args = current_function_pops_args;
494 p->returns_struct = current_function_returns_struct;
495 p->returns_pcc_struct = current_function_returns_pcc_struct;
496 p->returns_pointer = current_function_returns_pointer;
497 p->needs_context = current_function_needs_context;
498 p->calls_setjmp = current_function_calls_setjmp;
499 p->calls_longjmp = current_function_calls_longjmp;
500 p->calls_alloca = current_function_calls_alloca;
501 p->has_nonlocal_label = current_function_has_nonlocal_label;
502 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
503 p->contains_functions = current_function_contains_functions;
504 p->is_thunk = current_function_is_thunk;
505 p->args_size = current_function_args_size;
506 p->pretend_args_size = current_function_pretend_args_size;
507 p->arg_offset_rtx = current_function_arg_offset_rtx;
508 p->varargs = current_function_varargs;
509 p->stdarg = current_function_stdarg;
510 p->uses_const_pool = current_function_uses_const_pool;
511 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
512 p->internal_arg_pointer = current_function_internal_arg_pointer;
513 p->cannot_inline = current_function_cannot_inline;
514 p->max_parm_reg = max_parm_reg;
515 p->parm_reg_stack_loc = parm_reg_stack_loc;
516 p->outgoing_args_size = current_function_outgoing_args_size;
517 p->return_rtx = current_function_return_rtx;
518 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
519 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
520 p->nonlocal_labels = nonlocal_labels;
521 p->cleanup_label = cleanup_label;
522 p->return_label = return_label;
523 p->save_expr_regs = save_expr_regs;
524 p->stack_slot_list = stack_slot_list;
525 p->parm_birth_insn = parm_birth_insn;
526 p->frame_offset = frame_offset;
527 p->tail_recursion_label = tail_recursion_label;
528 p->tail_recursion_reentry = tail_recursion_reentry;
529 p->arg_pointer_save_area = arg_pointer_save_area;
530 p->rtl_expr_chain = rtl_expr_chain;
531 p->last_parm_insn = last_parm_insn;
532 p->context_display = context_display;
533 p->trampoline_list = trampoline_list;
534 p->function_call_count = function_call_count;
535 p->temp_slots = temp_slots;
536 p->temp_slot_level = temp_slot_level;
537 p->target_temp_slot_level = target_temp_slot_level;
538 p->var_temp_slot_level = var_temp_slot_level;
539 p->fixup_var_refs_queue = 0;
540 p->epilogue_delay_list = current_function_epilogue_delay_list;
541 p->args_info = current_function_args_info;
542
543 save_tree_status (p, context);
544 save_storage_status (p);
545 save_emit_status (p);
546 save_expr_status (p);
547 save_stmt_status (p);
548 save_varasm_status (p, context);
549 if (save_machine_status)
550 (*save_machine_status) (p);
551 }
552
553 void
554 push_function_context ()
555 {
556 push_function_context_to (current_function_decl);
557 }
558
559 /* Restore the last saved context, at the end of a nested function.
560 This function is called from language-specific code. */
561
562 void
563 pop_function_context_from (context)
564 tree context;
565 {
566 struct function *p = outer_function_chain;
567 struct var_refs_queue *queue;
568
569 outer_function_chain = p->next;
570
571 current_function_contains_functions
572 = p->contains_functions || p->inline_obstacks
573 || context == current_function_decl;
574 current_function_name = p->name;
575 current_function_decl = p->decl;
576 current_function_pops_args = p->pops_args;
577 current_function_returns_struct = p->returns_struct;
578 current_function_returns_pcc_struct = p->returns_pcc_struct;
579 current_function_returns_pointer = p->returns_pointer;
580 current_function_needs_context = p->needs_context;
581 current_function_calls_setjmp = p->calls_setjmp;
582 current_function_calls_longjmp = p->calls_longjmp;
583 current_function_calls_alloca = p->calls_alloca;
584 current_function_has_nonlocal_label = p->has_nonlocal_label;
585 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
586 current_function_is_thunk = p->is_thunk;
587 current_function_args_size = p->args_size;
588 current_function_pretend_args_size = p->pretend_args_size;
589 current_function_arg_offset_rtx = p->arg_offset_rtx;
590 current_function_varargs = p->varargs;
591 current_function_stdarg = p->stdarg;
592 current_function_uses_const_pool = p->uses_const_pool;
593 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
594 current_function_internal_arg_pointer = p->internal_arg_pointer;
595 current_function_cannot_inline = p->cannot_inline;
596 max_parm_reg = p->max_parm_reg;
597 parm_reg_stack_loc = p->parm_reg_stack_loc;
598 current_function_outgoing_args_size = p->outgoing_args_size;
599 current_function_return_rtx = p->return_rtx;
600 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
601 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
602 nonlocal_labels = p->nonlocal_labels;
603 cleanup_label = p->cleanup_label;
604 return_label = p->return_label;
605 save_expr_regs = p->save_expr_regs;
606 stack_slot_list = p->stack_slot_list;
607 parm_birth_insn = p->parm_birth_insn;
608 frame_offset = p->frame_offset;
609 tail_recursion_label = p->tail_recursion_label;
610 tail_recursion_reentry = p->tail_recursion_reentry;
611 arg_pointer_save_area = p->arg_pointer_save_area;
612 rtl_expr_chain = p->rtl_expr_chain;
613 last_parm_insn = p->last_parm_insn;
614 context_display = p->context_display;
615 trampoline_list = p->trampoline_list;
616 function_call_count = p->function_call_count;
617 temp_slots = p->temp_slots;
618 temp_slot_level = p->temp_slot_level;
619 target_temp_slot_level = p->target_temp_slot_level;
620 var_temp_slot_level = p->var_temp_slot_level;
621 current_function_epilogue_delay_list = p->epilogue_delay_list;
622 reg_renumber = 0;
623 current_function_args_info = p->args_info;
624
625 restore_tree_status (p, context);
626 restore_storage_status (p);
627 restore_expr_status (p);
628 restore_emit_status (p);
629 restore_stmt_status (p);
630 restore_varasm_status (p);
631
632 if (restore_machine_status)
633 (*restore_machine_status) (p);
634
635 /* Finish doing put_var_into_stack for any of our variables
636 which became addressable during the nested function. */
637 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
638 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
639
640 free (p);
641
642 /* Reset variables that have known state during rtx generation. */
643 rtx_equal_function_value_matters = 1;
644 virtuals_instantiated = 0;
645 }
646
647 void pop_function_context ()
648 {
649 pop_function_context_from (current_function_decl);
650 }
651 \f
652 /* Allocate fixed slots in the stack frame of the current function. */
653
654 /* Return size needed for stack frame based on slots so far allocated.
655 This size counts from zero. It is not rounded to STACK_BOUNDARY;
656 the caller may have to do that. */
657
658 HOST_WIDE_INT
659 get_frame_size ()
660 {
661 #ifdef FRAME_GROWS_DOWNWARD
662 return -frame_offset;
663 #else
664 return frame_offset;
665 #endif
666 }
667
668 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
669 with machine mode MODE.
670
671 ALIGN controls the amount of alignment for the address of the slot:
672 0 means according to MODE,
673 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
674 positive specifies alignment boundary in bits.
675
676 We do not round to stack_boundary here. */
677
678 rtx
679 assign_stack_local (mode, size, align)
680 enum machine_mode mode;
681 HOST_WIDE_INT size;
682 int align;
683 {
684 register rtx x, addr;
685 int bigend_correction = 0;
686 int alignment;
687
688 if (align == 0)
689 {
690 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
691 if (mode == BLKmode)
692 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
693 }
694 else if (align == -1)
695 {
696 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
697 size = CEIL_ROUND (size, alignment);
698 }
699 else
700 alignment = align / BITS_PER_UNIT;
701
702 /* Round frame offset to that alignment.
703 We must be careful here, since FRAME_OFFSET might be negative and
704 division with a negative dividend isn't as well defined as we might
705 like. So we instead assume that ALIGNMENT is a power of two and
706 use logical operations which are unambiguous. */
707 #ifdef FRAME_GROWS_DOWNWARD
708 frame_offset = FLOOR_ROUND (frame_offset, alignment);
709 #else
710 frame_offset = CEIL_ROUND (frame_offset, alignment);
711 #endif
712
713 /* On a big-endian machine, if we are allocating more space than we will use,
714 use the least significant bytes of those that are allocated. */
715 if (BYTES_BIG_ENDIAN && mode != BLKmode)
716 bigend_correction = size - GET_MODE_SIZE (mode);
717
718 #ifdef FRAME_GROWS_DOWNWARD
719 frame_offset -= size;
720 #endif
721
722 /* If we have already instantiated virtual registers, return the actual
723 address relative to the frame pointer. */
724 if (virtuals_instantiated)
725 addr = plus_constant (frame_pointer_rtx,
726 (frame_offset + bigend_correction
727 + STARTING_FRAME_OFFSET));
728 else
729 addr = plus_constant (virtual_stack_vars_rtx,
730 frame_offset + bigend_correction);
731
732 #ifndef FRAME_GROWS_DOWNWARD
733 frame_offset += size;
734 #endif
735
736 x = gen_rtx_MEM (mode, addr);
737
738 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
739
740 return x;
741 }
742
743 /* Assign a stack slot in a containing function.
744 First three arguments are same as in preceding function.
745 The last argument specifies the function to allocate in. */
746
747 static rtx
748 assign_outer_stack_local (mode, size, align, function)
749 enum machine_mode mode;
750 HOST_WIDE_INT size;
751 int align;
752 struct function *function;
753 {
754 register rtx x, addr;
755 int bigend_correction = 0;
756 int alignment;
757
758 /* Allocate in the memory associated with the function in whose frame
759 we are assigning. */
760 push_obstacks (function->function_obstack,
761 function->function_maybepermanent_obstack);
762
763 if (align == 0)
764 {
765 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
766 if (mode == BLKmode)
767 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
768 }
769 else if (align == -1)
770 {
771 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
772 size = CEIL_ROUND (size, alignment);
773 }
774 else
775 alignment = align / BITS_PER_UNIT;
776
777 /* Round frame offset to that alignment. */
778 #ifdef FRAME_GROWS_DOWNWARD
779 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
780 #else
781 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
782 #endif
783
784 /* On a big-endian machine, if we are allocating more space than we will use,
785 use the least significant bytes of those that are allocated. */
786 if (BYTES_BIG_ENDIAN && mode != BLKmode)
787 bigend_correction = size - GET_MODE_SIZE (mode);
788
789 #ifdef FRAME_GROWS_DOWNWARD
790 function->frame_offset -= size;
791 #endif
792 addr = plus_constant (virtual_stack_vars_rtx,
793 function->frame_offset + bigend_correction);
794 #ifndef FRAME_GROWS_DOWNWARD
795 function->frame_offset += size;
796 #endif
797
798 x = gen_rtx_MEM (mode, addr);
799
800 function->stack_slot_list
801 = gen_rtx_EXPR_LIST (VOIDmode, x, function->stack_slot_list);
802
803 pop_obstacks ();
804
805 return x;
806 }
807 \f
808 /* Allocate a temporary stack slot and record it for possible later
809 reuse.
810
811 MODE is the machine mode to be given to the returned rtx.
812
813 SIZE is the size in units of the space required. We do no rounding here
814 since assign_stack_local will do any required rounding.
815
816 KEEP is 1 if this slot is to be retained after a call to
817 free_temp_slots. Automatic variables for a block are allocated
818 with this flag. KEEP is 2 if we allocate a longer term temporary,
819 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
820 if we are to allocate something at an inner level to be treated as
821 a variable in the block (e.g., a SAVE_EXPR). */
822
823 rtx
824 assign_stack_temp (mode, size, keep)
825 enum machine_mode mode;
826 HOST_WIDE_INT size;
827 int keep;
828 {
829 struct temp_slot *p, *best_p = 0;
830
831 /* If SIZE is -1 it means that somebody tried to allocate a temporary
832 of a variable size. */
833 if (size == -1)
834 abort ();
835
836 /* First try to find an available, already-allocated temporary that is the
837 exact size we require. */
838 for (p = temp_slots; p; p = p->next)
839 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
840 break;
841
842 /* If we didn't find, one, try one that is larger than what we want. We
843 find the smallest such. */
844 if (p == 0)
845 for (p = temp_slots; p; p = p->next)
846 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
847 && (best_p == 0 || best_p->size > p->size))
848 best_p = p;
849
850 /* Make our best, if any, the one to use. */
851 if (best_p)
852 {
853 /* If there are enough aligned bytes left over, make them into a new
854 temp_slot so that the extra bytes don't get wasted. Do this only
855 for BLKmode slots, so that we can be sure of the alignment. */
856 if (GET_MODE (best_p->slot) == BLKmode)
857 {
858 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
859 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
860
861 if (best_p->size - rounded_size >= alignment)
862 {
863 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
864 p->in_use = p->addr_taken = 0;
865 p->size = best_p->size - rounded_size;
866 p->base_offset = best_p->base_offset + rounded_size;
867 p->full_size = best_p->full_size - rounded_size;
868 p->slot = gen_rtx_MEM (BLKmode,
869 plus_constant (XEXP (best_p->slot, 0),
870 rounded_size));
871 p->address = 0;
872 p->rtl_expr = 0;
873 p->next = temp_slots;
874 temp_slots = p;
875
876 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
877 stack_slot_list);
878
879 best_p->size = rounded_size;
880 best_p->full_size = rounded_size;
881 }
882 }
883
884 p = best_p;
885 }
886
887 /* If we still didn't find one, make a new temporary. */
888 if (p == 0)
889 {
890 HOST_WIDE_INT frame_offset_old = frame_offset;
891
892 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
893
894 /* If the temp slot mode doesn't indicate the alignment,
895 use the largest possible, so no one will be disappointed. */
896 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
897
898 /* The following slot size computation is necessary because we don't
899 know the actual size of the temporary slot until assign_stack_local
900 has performed all the frame alignment and size rounding for the
901 requested temporary. Note that extra space added for alignment
902 can be either above or below this stack slot depending on which
903 way the frame grows. We include the extra space if and only if it
904 is above this slot. */
905 #ifdef FRAME_GROWS_DOWNWARD
906 p->size = frame_offset_old - frame_offset;
907 #else
908 p->size = size;
909 #endif
910
911 /* Now define the fields used by combine_temp_slots. */
912 #ifdef FRAME_GROWS_DOWNWARD
913 p->base_offset = frame_offset;
914 p->full_size = frame_offset_old - frame_offset;
915 #else
916 p->base_offset = frame_offset_old;
917 p->full_size = frame_offset - frame_offset_old;
918 #endif
919 p->address = 0;
920 p->next = temp_slots;
921 temp_slots = p;
922 }
923
924 p->in_use = 1;
925 p->addr_taken = 0;
926 p->rtl_expr = sequence_rtl_expr;
927
928 if (keep == 2)
929 {
930 p->level = target_temp_slot_level;
931 p->keep = 0;
932 }
933 else if (keep == 3)
934 {
935 p->level = var_temp_slot_level;
936 p->keep = 0;
937 }
938 else
939 {
940 p->level = temp_slot_level;
941 p->keep = keep;
942 }
943
944 /* We may be reusing an old slot, so clear any MEM flags that may have been
945 set from before. */
946 RTX_UNCHANGING_P (p->slot) = 0;
947 MEM_IN_STRUCT_P (p->slot) = 0;
948 return p->slot;
949 }
950 \f
951 /* Assign a temporary of given TYPE.
952 KEEP is as for assign_stack_temp.
953 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
954 it is 0 if a register is OK.
955 DONT_PROMOTE is 1 if we should not promote values in register
956 to wider modes. */
957
958 rtx
959 assign_temp (type, keep, memory_required, dont_promote)
960 tree type;
961 int keep;
962 int memory_required;
963 int dont_promote;
964 {
965 enum machine_mode mode = TYPE_MODE (type);
966 int unsignedp = TREE_UNSIGNED (type);
967
968 if (mode == BLKmode || memory_required)
969 {
970 HOST_WIDE_INT size = int_size_in_bytes (type);
971 rtx tmp;
972
973 /* Unfortunately, we don't yet know how to allocate variable-sized
974 temporaries. However, sometimes we have a fixed upper limit on
975 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
976 instead. This is the case for Chill variable-sized strings. */
977 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
978 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
979 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
980 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
981
982 tmp = assign_stack_temp (mode, size, keep);
983 MEM_IN_STRUCT_P (tmp) = AGGREGATE_TYPE_P (type);
984 return tmp;
985 }
986
987 #ifndef PROMOTE_FOR_CALL_ONLY
988 if (! dont_promote)
989 mode = promote_mode (type, mode, &unsignedp, 0);
990 #endif
991
992 return gen_reg_rtx (mode);
993 }
994 \f
995 /* Combine temporary stack slots which are adjacent on the stack.
996
997 This allows for better use of already allocated stack space. This is only
998 done for BLKmode slots because we can be sure that we won't have alignment
999 problems in this case. */
1000
1001 void
1002 combine_temp_slots ()
1003 {
1004 struct temp_slot *p, *q;
1005 struct temp_slot *prev_p, *prev_q;
1006 int num_slots;
1007
1008 /* If there are a lot of temp slots, don't do anything unless
1009 high levels of optimizaton. */
1010 if (! flag_expensive_optimizations)
1011 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1012 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1013 return;
1014
1015 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
1016 {
1017 int delete_p = 0;
1018
1019 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
1020 for (q = p->next, prev_q = p; q; q = prev_q->next)
1021 {
1022 int delete_q = 0;
1023 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
1024 {
1025 if (p->base_offset + p->full_size == q->base_offset)
1026 {
1027 /* Q comes after P; combine Q into P. */
1028 p->size += q->size;
1029 p->full_size += q->full_size;
1030 delete_q = 1;
1031 }
1032 else if (q->base_offset + q->full_size == p->base_offset)
1033 {
1034 /* P comes after Q; combine P into Q. */
1035 q->size += p->size;
1036 q->full_size += p->full_size;
1037 delete_p = 1;
1038 break;
1039 }
1040 }
1041 /* Either delete Q or advance past it. */
1042 if (delete_q)
1043 prev_q->next = q->next;
1044 else
1045 prev_q = q;
1046 }
1047 /* Either delete P or advance past it. */
1048 if (delete_p)
1049 {
1050 if (prev_p)
1051 prev_p->next = p->next;
1052 else
1053 temp_slots = p->next;
1054 }
1055 else
1056 prev_p = p;
1057 }
1058 }
1059 \f
1060 /* Find the temp slot corresponding to the object at address X. */
1061
1062 static struct temp_slot *
1063 find_temp_slot_from_address (x)
1064 rtx x;
1065 {
1066 struct temp_slot *p;
1067 rtx next;
1068
1069 for (p = temp_slots; p; p = p->next)
1070 {
1071 if (! p->in_use)
1072 continue;
1073
1074 else if (XEXP (p->slot, 0) == x
1075 || p->address == x
1076 || (GET_CODE (x) == PLUS
1077 && XEXP (x, 0) == virtual_stack_vars_rtx
1078 && GET_CODE (XEXP (x, 1)) == CONST_INT
1079 && INTVAL (XEXP (x, 1)) >= p->base_offset
1080 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1081 return p;
1082
1083 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1084 for (next = p->address; next; next = XEXP (next, 1))
1085 if (XEXP (next, 0) == x)
1086 return p;
1087 }
1088
1089 return 0;
1090 }
1091
1092 /* Indicate that NEW is an alternate way of referring to the temp slot
1093 that previously was known by OLD. */
1094
1095 void
1096 update_temp_slot_address (old, new)
1097 rtx old, new;
1098 {
1099 struct temp_slot *p = find_temp_slot_from_address (old);
1100
1101 /* If none, return. Else add NEW as an alias. */
1102 if (p == 0)
1103 return;
1104 else if (p->address == 0)
1105 p->address = new;
1106 else
1107 {
1108 if (GET_CODE (p->address) != EXPR_LIST)
1109 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1110
1111 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1112 }
1113 }
1114
1115 /* If X could be a reference to a temporary slot, mark the fact that its
1116 address was taken. */
1117
1118 void
1119 mark_temp_addr_taken (x)
1120 rtx x;
1121 {
1122 struct temp_slot *p;
1123
1124 if (x == 0)
1125 return;
1126
1127 /* If X is not in memory or is at a constant address, it cannot be in
1128 a temporary slot. */
1129 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1130 return;
1131
1132 p = find_temp_slot_from_address (XEXP (x, 0));
1133 if (p != 0)
1134 p->addr_taken = 1;
1135 }
1136
1137 /* If X could be a reference to a temporary slot, mark that slot as
1138 belonging to the to one level higher than the current level. If X
1139 matched one of our slots, just mark that one. Otherwise, we can't
1140 easily predict which it is, so upgrade all of them. Kept slots
1141 need not be touched.
1142
1143 This is called when an ({...}) construct occurs and a statement
1144 returns a value in memory. */
1145
1146 void
1147 preserve_temp_slots (x)
1148 rtx x;
1149 {
1150 struct temp_slot *p = 0;
1151
1152 /* If there is no result, we still might have some objects whose address
1153 were taken, so we need to make sure they stay around. */
1154 if (x == 0)
1155 {
1156 for (p = temp_slots; p; p = p->next)
1157 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1158 p->level--;
1159
1160 return;
1161 }
1162
1163 /* If X is a register that is being used as a pointer, see if we have
1164 a temporary slot we know it points to. To be consistent with
1165 the code below, we really should preserve all non-kept slots
1166 if we can't find a match, but that seems to be much too costly. */
1167 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1168 p = find_temp_slot_from_address (x);
1169
1170 /* If X is not in memory or is at a constant address, it cannot be in
1171 a temporary slot, but it can contain something whose address was
1172 taken. */
1173 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1174 {
1175 for (p = temp_slots; p; p = p->next)
1176 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1177 p->level--;
1178
1179 return;
1180 }
1181
1182 /* First see if we can find a match. */
1183 if (p == 0)
1184 p = find_temp_slot_from_address (XEXP (x, 0));
1185
1186 if (p != 0)
1187 {
1188 /* Move everything at our level whose address was taken to our new
1189 level in case we used its address. */
1190 struct temp_slot *q;
1191
1192 if (p->level == temp_slot_level)
1193 {
1194 for (q = temp_slots; q; q = q->next)
1195 if (q != p && q->addr_taken && q->level == p->level)
1196 q->level--;
1197
1198 p->level--;
1199 p->addr_taken = 0;
1200 }
1201 return;
1202 }
1203
1204 /* Otherwise, preserve all non-kept slots at this level. */
1205 for (p = temp_slots; p; p = p->next)
1206 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1207 p->level--;
1208 }
1209
1210 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1211 with that RTL_EXPR, promote it into a temporary slot at the present
1212 level so it will not be freed when we free slots made in the
1213 RTL_EXPR. */
1214
1215 void
1216 preserve_rtl_expr_result (x)
1217 rtx x;
1218 {
1219 struct temp_slot *p;
1220
1221 /* If X is not in memory or is at a constant address, it cannot be in
1222 a temporary slot. */
1223 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1224 return;
1225
1226 /* If we can find a match, move it to our level unless it is already at
1227 an upper level. */
1228 p = find_temp_slot_from_address (XEXP (x, 0));
1229 if (p != 0)
1230 {
1231 p->level = MIN (p->level, temp_slot_level);
1232 p->rtl_expr = 0;
1233 }
1234
1235 return;
1236 }
1237
1238 /* Free all temporaries used so far. This is normally called at the end
1239 of generating code for a statement. Don't free any temporaries
1240 currently in use for an RTL_EXPR that hasn't yet been emitted.
1241 We could eventually do better than this since it can be reused while
1242 generating the same RTL_EXPR, but this is complex and probably not
1243 worthwhile. */
1244
1245 void
1246 free_temp_slots ()
1247 {
1248 struct temp_slot *p;
1249
1250 for (p = temp_slots; p; p = p->next)
1251 if (p->in_use && p->level == temp_slot_level && ! p->keep
1252 && p->rtl_expr == 0)
1253 p->in_use = 0;
1254
1255 combine_temp_slots ();
1256 }
1257
1258 /* Free all temporary slots used in T, an RTL_EXPR node. */
1259
1260 void
1261 free_temps_for_rtl_expr (t)
1262 tree t;
1263 {
1264 struct temp_slot *p;
1265
1266 for (p = temp_slots; p; p = p->next)
1267 if (p->rtl_expr == t)
1268 p->in_use = 0;
1269
1270 combine_temp_slots ();
1271 }
1272
1273 /* Mark all temporaries ever allocated in this function as not suitable
1274 for reuse until the current level is exited. */
1275
1276 void
1277 mark_all_temps_used ()
1278 {
1279 struct temp_slot *p;
1280
1281 for (p = temp_slots; p; p = p->next)
1282 {
1283 p->in_use = p->keep = 1;
1284 p->level = MIN (p->level, temp_slot_level);
1285 }
1286 }
1287
1288 /* Push deeper into the nesting level for stack temporaries. */
1289
1290 void
1291 push_temp_slots ()
1292 {
1293 temp_slot_level++;
1294 }
1295
1296 /* Likewise, but save the new level as the place to allocate variables
1297 for blocks. */
1298
1299 void
1300 push_temp_slots_for_block ()
1301 {
1302 push_temp_slots ();
1303
1304 var_temp_slot_level = temp_slot_level;
1305 }
1306
1307 /* Likewise, but save the new level as the place to allocate temporaries
1308 for TARGET_EXPRs. */
1309
1310 void
1311 push_temp_slots_for_target ()
1312 {
1313 push_temp_slots ();
1314
1315 target_temp_slot_level = temp_slot_level;
1316 }
1317
1318 /* Set and get the value of target_temp_slot_level. The only
1319 permitted use of these functions is to save and restore this value. */
1320
1321 int
1322 get_target_temp_slot_level ()
1323 {
1324 return target_temp_slot_level;
1325 }
1326
1327 void
1328 set_target_temp_slot_level (level)
1329 int level;
1330 {
1331 target_temp_slot_level = level;
1332 }
1333
1334 /* Pop a temporary nesting level. All slots in use in the current level
1335 are freed. */
1336
1337 void
1338 pop_temp_slots ()
1339 {
1340 struct temp_slot *p;
1341
1342 for (p = temp_slots; p; p = p->next)
1343 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1344 p->in_use = 0;
1345
1346 combine_temp_slots ();
1347
1348 temp_slot_level--;
1349 }
1350
1351 /* Initialize temporary slots. */
1352
1353 void
1354 init_temp_slots ()
1355 {
1356 /* We have not allocated any temporaries yet. */
1357 temp_slots = 0;
1358 temp_slot_level = 0;
1359 var_temp_slot_level = 0;
1360 target_temp_slot_level = 0;
1361 }
1362 \f
1363 /* Retroactively move an auto variable from a register to a stack slot.
1364 This is done when an address-reference to the variable is seen. */
1365
1366 void
1367 put_var_into_stack (decl)
1368 tree decl;
1369 {
1370 register rtx reg;
1371 enum machine_mode promoted_mode, decl_mode;
1372 struct function *function = 0;
1373 tree context;
1374 int can_use_addressof;
1375
1376 context = decl_function_context (decl);
1377
1378 /* Get the current rtl used for this object and its original mode. */
1379 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1380
1381 /* No need to do anything if decl has no rtx yet
1382 since in that case caller is setting TREE_ADDRESSABLE
1383 and a stack slot will be assigned when the rtl is made. */
1384 if (reg == 0)
1385 return;
1386
1387 /* Get the declared mode for this object. */
1388 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1389 : DECL_MODE (decl));
1390 /* Get the mode it's actually stored in. */
1391 promoted_mode = GET_MODE (reg);
1392
1393 /* If this variable comes from an outer function,
1394 find that function's saved context. */
1395 if (context != current_function_decl && context != inline_function_decl)
1396 for (function = outer_function_chain; function; function = function->next)
1397 if (function->decl == context)
1398 break;
1399
1400 /* If this is a variable-size object with a pseudo to address it,
1401 put that pseudo into the stack, if the var is nonlocal. */
1402 if (DECL_NONLOCAL (decl)
1403 && GET_CODE (reg) == MEM
1404 && GET_CODE (XEXP (reg, 0)) == REG
1405 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1406 {
1407 reg = XEXP (reg, 0);
1408 decl_mode = promoted_mode = GET_MODE (reg);
1409 }
1410
1411 can_use_addressof
1412 = (function == 0
1413 && optimize > 0
1414 /* FIXME make it work for promoted modes too */
1415 && decl_mode == promoted_mode
1416 #ifdef NON_SAVING_SETJMP
1417 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1418 #endif
1419 );
1420
1421 /* If we can't use ADDRESSOF, make sure we see through one we already
1422 generated. */
1423 if (! can_use_addressof && GET_CODE (reg) == MEM
1424 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1425 reg = XEXP (XEXP (reg, 0), 0);
1426
1427 /* Now we should have a value that resides in one or more pseudo regs. */
1428
1429 if (GET_CODE (reg) == REG)
1430 {
1431 /* If this variable lives in the current function and we don't need
1432 to put things in the stack for the sake of setjmp, try to keep it
1433 in a register until we know we actually need the address. */
1434 if (can_use_addressof)
1435 gen_mem_addressof (reg, decl);
1436 else
1437 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1438 promoted_mode, decl_mode,
1439 TREE_SIDE_EFFECTS (decl), 0,
1440 TREE_USED (decl)
1441 || DECL_INITIAL (decl) != 0);
1442 }
1443 else if (GET_CODE (reg) == CONCAT)
1444 {
1445 /* A CONCAT contains two pseudos; put them both in the stack.
1446 We do it so they end up consecutive. */
1447 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1448 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1449 #ifdef FRAME_GROWS_DOWNWARD
1450 /* Since part 0 should have a lower address, do it second. */
1451 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1452 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1453 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1454 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1455 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1456 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1457 #else
1458 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1459 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1460 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1461 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1462 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1463 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1464 #endif
1465
1466 /* Change the CONCAT into a combined MEM for both parts. */
1467 PUT_CODE (reg, MEM);
1468 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1469 MEM_ALIAS_SET (reg) = get_alias_set (decl);
1470
1471 /* The two parts are in memory order already.
1472 Use the lower parts address as ours. */
1473 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1474 /* Prevent sharing of rtl that might lose. */
1475 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1476 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1477 }
1478 else
1479 return;
1480
1481 if (flag_check_memory_usage)
1482 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1483 XEXP (reg, 0), ptr_mode,
1484 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1485 TYPE_MODE (sizetype),
1486 GEN_INT (MEMORY_USE_RW),
1487 TYPE_MODE (integer_type_node));
1488 }
1489
1490 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1491 into the stack frame of FUNCTION (0 means the current function).
1492 DECL_MODE is the machine mode of the user-level data type.
1493 PROMOTED_MODE is the machine mode of the register.
1494 VOLATILE_P is nonzero if this is for a "volatile" decl.
1495 USED_P is nonzero if this reg might have already been used in an insn. */
1496
1497 static void
1498 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1499 original_regno, used_p)
1500 struct function *function;
1501 rtx reg;
1502 tree type;
1503 enum machine_mode promoted_mode, decl_mode;
1504 int volatile_p;
1505 int original_regno;
1506 int used_p;
1507 {
1508 rtx new = 0;
1509 int regno = original_regno;
1510
1511 if (regno == 0)
1512 regno = REGNO (reg);
1513
1514 if (function)
1515 {
1516 if (regno < function->max_parm_reg)
1517 new = function->parm_reg_stack_loc[regno];
1518 if (new == 0)
1519 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1520 0, function);
1521 }
1522 else
1523 {
1524 if (regno < max_parm_reg)
1525 new = parm_reg_stack_loc[regno];
1526 if (new == 0)
1527 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1528 }
1529
1530 PUT_MODE (reg, decl_mode);
1531 XEXP (reg, 0) = XEXP (new, 0);
1532 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1533 MEM_VOLATILE_P (reg) = volatile_p;
1534 PUT_CODE (reg, MEM);
1535
1536 /* If this is a memory ref that contains aggregate components,
1537 mark it as such for cse and loop optimize. If we are reusing a
1538 previously generated stack slot, then we need to copy the bit in
1539 case it was set for other reasons. For instance, it is set for
1540 __builtin_va_alist. */
1541 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type) | MEM_IN_STRUCT_P (new);
1542 MEM_ALIAS_SET (reg) = get_alias_set (type);
1543
1544 /* Now make sure that all refs to the variable, previously made
1545 when it was a register, are fixed up to be valid again. */
1546
1547 if (used_p && function != 0)
1548 {
1549 struct var_refs_queue *temp;
1550
1551 /* Variable is inherited; fix it up when we get back to its function. */
1552 push_obstacks (function->function_obstack,
1553 function->function_maybepermanent_obstack);
1554
1555 /* See comment in restore_tree_status in tree.c for why this needs to be
1556 on saveable obstack. */
1557 temp
1558 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1559 temp->modified = reg;
1560 temp->promoted_mode = promoted_mode;
1561 temp->unsignedp = TREE_UNSIGNED (type);
1562 temp->next = function->fixup_var_refs_queue;
1563 function->fixup_var_refs_queue = temp;
1564 pop_obstacks ();
1565 }
1566 else if (used_p)
1567 /* Variable is local; fix it up now. */
1568 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
1569 }
1570 \f
1571 static void
1572 fixup_var_refs (var, promoted_mode, unsignedp)
1573 rtx var;
1574 enum machine_mode promoted_mode;
1575 int unsignedp;
1576 {
1577 tree pending;
1578 rtx first_insn = get_insns ();
1579 struct sequence_stack *stack = sequence_stack;
1580 tree rtl_exps = rtl_expr_chain;
1581
1582 /* Must scan all insns for stack-refs that exceed the limit. */
1583 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
1584
1585 /* Scan all pending sequences too. */
1586 for (; stack; stack = stack->next)
1587 {
1588 push_to_sequence (stack->first);
1589 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1590 stack->first, stack->next != 0);
1591 /* Update remembered end of sequence
1592 in case we added an insn at the end. */
1593 stack->last = get_last_insn ();
1594 end_sequence ();
1595 }
1596
1597 /* Scan all waiting RTL_EXPRs too. */
1598 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1599 {
1600 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1601 if (seq != const0_rtx && seq != 0)
1602 {
1603 push_to_sequence (seq);
1604 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
1605 end_sequence ();
1606 }
1607 }
1608 }
1609 \f
1610 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1611 some part of an insn. Return a struct fixup_replacement whose OLD
1612 value is equal to X. Allocate a new structure if no such entry exists. */
1613
1614 static struct fixup_replacement *
1615 find_fixup_replacement (replacements, x)
1616 struct fixup_replacement **replacements;
1617 rtx x;
1618 {
1619 struct fixup_replacement *p;
1620
1621 /* See if we have already replaced this. */
1622 for (p = *replacements; p && p->old != x; p = p->next)
1623 ;
1624
1625 if (p == 0)
1626 {
1627 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1628 p->old = x;
1629 p->new = 0;
1630 p->next = *replacements;
1631 *replacements = p;
1632 }
1633
1634 return p;
1635 }
1636
1637 /* Scan the insn-chain starting with INSN for refs to VAR
1638 and fix them up. TOPLEVEL is nonzero if this chain is the
1639 main chain of insns for the current function. */
1640
1641 static void
1642 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
1643 rtx var;
1644 enum machine_mode promoted_mode;
1645 int unsignedp;
1646 rtx insn;
1647 int toplevel;
1648 {
1649 rtx call_dest = 0;
1650
1651 while (insn)
1652 {
1653 rtx next = NEXT_INSN (insn);
1654 rtx set, prev, prev_set;
1655 rtx note;
1656
1657 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1658 {
1659 /* If this is a CLOBBER of VAR, delete it.
1660
1661 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1662 and REG_RETVAL notes too. */
1663 if (GET_CODE (PATTERN (insn)) == CLOBBER
1664 && XEXP (PATTERN (insn), 0) == var)
1665 {
1666 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1667 /* The REG_LIBCALL note will go away since we are going to
1668 turn INSN into a NOTE, so just delete the
1669 corresponding REG_RETVAL note. */
1670 remove_note (XEXP (note, 0),
1671 find_reg_note (XEXP (note, 0), REG_RETVAL,
1672 NULL_RTX));
1673
1674 /* In unoptimized compilation, we shouldn't call delete_insn
1675 except in jump.c doing warnings. */
1676 PUT_CODE (insn, NOTE);
1677 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1678 NOTE_SOURCE_FILE (insn) = 0;
1679 }
1680
1681 /* The insn to load VAR from a home in the arglist
1682 is now a no-op. When we see it, just delete it.
1683 Similarly if this is storing VAR from a register from which
1684 it was loaded in the previous insn. This will occur
1685 when an ADDRESSOF was made for an arglist slot. */
1686 else if (toplevel
1687 && (set = single_set (insn)) != 0
1688 && SET_DEST (set) == var
1689 /* If this represents the result of an insn group,
1690 don't delete the insn. */
1691 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1692 && (rtx_equal_p (SET_SRC (set), var)
1693 || (GET_CODE (SET_SRC (set)) == REG
1694 && (prev = prev_nonnote_insn (insn)) != 0
1695 && (prev_set = single_set (prev)) != 0
1696 && SET_DEST (prev_set) == SET_SRC (set)
1697 && rtx_equal_p (SET_SRC (prev_set), var))))
1698 {
1699 /* In unoptimized compilation, we shouldn't call delete_insn
1700 except in jump.c doing warnings. */
1701 PUT_CODE (insn, NOTE);
1702 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1703 NOTE_SOURCE_FILE (insn) = 0;
1704 if (insn == last_parm_insn)
1705 last_parm_insn = PREV_INSN (next);
1706 }
1707 else
1708 {
1709 struct fixup_replacement *replacements = 0;
1710 rtx next_insn = NEXT_INSN (insn);
1711
1712 if (SMALL_REGISTER_CLASSES)
1713 {
1714 /* If the insn that copies the results of a CALL_INSN
1715 into a pseudo now references VAR, we have to use an
1716 intermediate pseudo since we want the life of the
1717 return value register to be only a single insn.
1718
1719 If we don't use an intermediate pseudo, such things as
1720 address computations to make the address of VAR valid
1721 if it is not can be placed between the CALL_INSN and INSN.
1722
1723 To make sure this doesn't happen, we record the destination
1724 of the CALL_INSN and see if the next insn uses both that
1725 and VAR. */
1726
1727 if (call_dest != 0 && GET_CODE (insn) == INSN
1728 && reg_mentioned_p (var, PATTERN (insn))
1729 && reg_mentioned_p (call_dest, PATTERN (insn)))
1730 {
1731 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1732
1733 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1734
1735 PATTERN (insn) = replace_rtx (PATTERN (insn),
1736 call_dest, temp);
1737 }
1738
1739 if (GET_CODE (insn) == CALL_INSN
1740 && GET_CODE (PATTERN (insn)) == SET)
1741 call_dest = SET_DEST (PATTERN (insn));
1742 else if (GET_CODE (insn) == CALL_INSN
1743 && GET_CODE (PATTERN (insn)) == PARALLEL
1744 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1745 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1746 else
1747 call_dest = 0;
1748 }
1749
1750 /* See if we have to do anything to INSN now that VAR is in
1751 memory. If it needs to be loaded into a pseudo, use a single
1752 pseudo for the entire insn in case there is a MATCH_DUP
1753 between two operands. We pass a pointer to the head of
1754 a list of struct fixup_replacements. If fixup_var_refs_1
1755 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1756 it will record them in this list.
1757
1758 If it allocated a pseudo for any replacement, we copy into
1759 it here. */
1760
1761 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1762 &replacements);
1763
1764 /* If this is last_parm_insn, and any instructions were output
1765 after it to fix it up, then we must set last_parm_insn to
1766 the last such instruction emitted. */
1767 if (insn == last_parm_insn)
1768 last_parm_insn = PREV_INSN (next_insn);
1769
1770 while (replacements)
1771 {
1772 if (GET_CODE (replacements->new) == REG)
1773 {
1774 rtx insert_before;
1775 rtx seq;
1776
1777 /* OLD might be a (subreg (mem)). */
1778 if (GET_CODE (replacements->old) == SUBREG)
1779 replacements->old
1780 = fixup_memory_subreg (replacements->old, insn, 0);
1781 else
1782 replacements->old
1783 = fixup_stack_1 (replacements->old, insn);
1784
1785 insert_before = insn;
1786
1787 /* If we are changing the mode, do a conversion.
1788 This might be wasteful, but combine.c will
1789 eliminate much of the waste. */
1790
1791 if (GET_MODE (replacements->new)
1792 != GET_MODE (replacements->old))
1793 {
1794 start_sequence ();
1795 convert_move (replacements->new,
1796 replacements->old, unsignedp);
1797 seq = gen_sequence ();
1798 end_sequence ();
1799 }
1800 else
1801 seq = gen_move_insn (replacements->new,
1802 replacements->old);
1803
1804 emit_insn_before (seq, insert_before);
1805 }
1806
1807 replacements = replacements->next;
1808 }
1809 }
1810
1811 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1812 But don't touch other insns referred to by reg-notes;
1813 we will get them elsewhere. */
1814 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1815 if (GET_CODE (note) != INSN_LIST)
1816 XEXP (note, 0)
1817 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1818 }
1819 insn = next;
1820 }
1821 }
1822 \f
1823 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1824 See if the rtx expression at *LOC in INSN needs to be changed.
1825
1826 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1827 contain a list of original rtx's and replacements. If we find that we need
1828 to modify this insn by replacing a memory reference with a pseudo or by
1829 making a new MEM to implement a SUBREG, we consult that list to see if
1830 we have already chosen a replacement. If none has already been allocated,
1831 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1832 or the SUBREG, as appropriate, to the pseudo. */
1833
1834 static void
1835 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1836 register rtx var;
1837 enum machine_mode promoted_mode;
1838 register rtx *loc;
1839 rtx insn;
1840 struct fixup_replacement **replacements;
1841 {
1842 register int i;
1843 register rtx x = *loc;
1844 RTX_CODE code = GET_CODE (x);
1845 register char *fmt;
1846 register rtx tem, tem1;
1847 struct fixup_replacement *replacement;
1848
1849 switch (code)
1850 {
1851 case ADDRESSOF:
1852 if (XEXP (x, 0) == var)
1853 {
1854 /* Prevent sharing of rtl that might lose. */
1855 rtx sub = copy_rtx (XEXP (var, 0));
1856
1857 start_sequence ();
1858
1859 if (! validate_change (insn, loc, sub, 0))
1860 {
1861 rtx y = force_operand (sub, NULL_RTX);
1862
1863 if (! validate_change (insn, loc, y, 0))
1864 *loc = copy_to_reg (y);
1865 }
1866
1867 emit_insn_before (gen_sequence (), insn);
1868 end_sequence ();
1869 }
1870 return;
1871
1872 case MEM:
1873 if (var == x)
1874 {
1875 /* If we already have a replacement, use it. Otherwise,
1876 try to fix up this address in case it is invalid. */
1877
1878 replacement = find_fixup_replacement (replacements, var);
1879 if (replacement->new)
1880 {
1881 *loc = replacement->new;
1882 return;
1883 }
1884
1885 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1886
1887 /* Unless we are forcing memory to register or we changed the mode,
1888 we can leave things the way they are if the insn is valid. */
1889
1890 INSN_CODE (insn) = -1;
1891 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1892 && recog_memoized (insn) >= 0)
1893 return;
1894
1895 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1896 return;
1897 }
1898
1899 /* If X contains VAR, we need to unshare it here so that we update
1900 each occurrence separately. But all identical MEMs in one insn
1901 must be replaced with the same rtx because of the possibility of
1902 MATCH_DUPs. */
1903
1904 if (reg_mentioned_p (var, x))
1905 {
1906 replacement = find_fixup_replacement (replacements, x);
1907 if (replacement->new == 0)
1908 replacement->new = copy_most_rtx (x, var);
1909
1910 *loc = x = replacement->new;
1911 }
1912 break;
1913
1914 case REG:
1915 case CC0:
1916 case PC:
1917 case CONST_INT:
1918 case CONST:
1919 case SYMBOL_REF:
1920 case LABEL_REF:
1921 case CONST_DOUBLE:
1922 return;
1923
1924 case SIGN_EXTRACT:
1925 case ZERO_EXTRACT:
1926 /* Note that in some cases those types of expressions are altered
1927 by optimize_bit_field, and do not survive to get here. */
1928 if (XEXP (x, 0) == var
1929 || (GET_CODE (XEXP (x, 0)) == SUBREG
1930 && SUBREG_REG (XEXP (x, 0)) == var))
1931 {
1932 /* Get TEM as a valid MEM in the mode presently in the insn.
1933
1934 We don't worry about the possibility of MATCH_DUP here; it
1935 is highly unlikely and would be tricky to handle. */
1936
1937 tem = XEXP (x, 0);
1938 if (GET_CODE (tem) == SUBREG)
1939 {
1940 if (GET_MODE_BITSIZE (GET_MODE (tem))
1941 > GET_MODE_BITSIZE (GET_MODE (var)))
1942 {
1943 replacement = find_fixup_replacement (replacements, var);
1944 if (replacement->new == 0)
1945 replacement->new = gen_reg_rtx (GET_MODE (var));
1946 SUBREG_REG (tem) = replacement->new;
1947 }
1948 else
1949 tem = fixup_memory_subreg (tem, insn, 0);
1950 }
1951 else
1952 tem = fixup_stack_1 (tem, insn);
1953
1954 /* Unless we want to load from memory, get TEM into the proper mode
1955 for an extract from memory. This can only be done if the
1956 extract is at a constant position and length. */
1957
1958 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1959 && GET_CODE (XEXP (x, 2)) == CONST_INT
1960 && ! mode_dependent_address_p (XEXP (tem, 0))
1961 && ! MEM_VOLATILE_P (tem))
1962 {
1963 enum machine_mode wanted_mode = VOIDmode;
1964 enum machine_mode is_mode = GET_MODE (tem);
1965 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
1966
1967 #ifdef HAVE_extzv
1968 if (GET_CODE (x) == ZERO_EXTRACT)
1969 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1970 #endif
1971 #ifdef HAVE_extv
1972 if (GET_CODE (x) == SIGN_EXTRACT)
1973 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1974 #endif
1975 /* If we have a narrower mode, we can do something. */
1976 if (wanted_mode != VOIDmode
1977 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1978 {
1979 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
1980 rtx old_pos = XEXP (x, 2);
1981 rtx newmem;
1982
1983 /* If the bytes and bits are counted differently, we
1984 must adjust the offset. */
1985 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1986 offset = (GET_MODE_SIZE (is_mode)
1987 - GET_MODE_SIZE (wanted_mode) - offset);
1988
1989 pos %= GET_MODE_BITSIZE (wanted_mode);
1990
1991 newmem = gen_rtx_MEM (wanted_mode,
1992 plus_constant (XEXP (tem, 0), offset));
1993 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1994 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1995 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1996
1997 /* Make the change and see if the insn remains valid. */
1998 INSN_CODE (insn) = -1;
1999 XEXP (x, 0) = newmem;
2000 XEXP (x, 2) = GEN_INT (pos);
2001
2002 if (recog_memoized (insn) >= 0)
2003 return;
2004
2005 /* Otherwise, restore old position. XEXP (x, 0) will be
2006 restored later. */
2007 XEXP (x, 2) = old_pos;
2008 }
2009 }
2010
2011 /* If we get here, the bitfield extract insn can't accept a memory
2012 reference. Copy the input into a register. */
2013
2014 tem1 = gen_reg_rtx (GET_MODE (tem));
2015 emit_insn_before (gen_move_insn (tem1, tem), insn);
2016 XEXP (x, 0) = tem1;
2017 return;
2018 }
2019 break;
2020
2021 case SUBREG:
2022 if (SUBREG_REG (x) == var)
2023 {
2024 /* If this is a special SUBREG made because VAR was promoted
2025 from a wider mode, replace it with VAR and call ourself
2026 recursively, this time saying that the object previously
2027 had its current mode (by virtue of the SUBREG). */
2028
2029 if (SUBREG_PROMOTED_VAR_P (x))
2030 {
2031 *loc = var;
2032 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2033 return;
2034 }
2035
2036 /* If this SUBREG makes VAR wider, it has become a paradoxical
2037 SUBREG with VAR in memory, but these aren't allowed at this
2038 stage of the compilation. So load VAR into a pseudo and take
2039 a SUBREG of that pseudo. */
2040 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2041 {
2042 replacement = find_fixup_replacement (replacements, var);
2043 if (replacement->new == 0)
2044 replacement->new = gen_reg_rtx (GET_MODE (var));
2045 SUBREG_REG (x) = replacement->new;
2046 return;
2047 }
2048
2049 /* See if we have already found a replacement for this SUBREG.
2050 If so, use it. Otherwise, make a MEM and see if the insn
2051 is recognized. If not, or if we should force MEM into a register,
2052 make a pseudo for this SUBREG. */
2053 replacement = find_fixup_replacement (replacements, x);
2054 if (replacement->new)
2055 {
2056 *loc = replacement->new;
2057 return;
2058 }
2059
2060 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2061
2062 INSN_CODE (insn) = -1;
2063 if (! flag_force_mem && recog_memoized (insn) >= 0)
2064 return;
2065
2066 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2067 return;
2068 }
2069 break;
2070
2071 case SET:
2072 /* First do special simplification of bit-field references. */
2073 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2074 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2075 optimize_bit_field (x, insn, 0);
2076 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2077 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2078 optimize_bit_field (x, insn, NULL_PTR);
2079
2080 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2081 into a register and then store it back out. */
2082 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2083 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2084 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2085 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2086 > GET_MODE_SIZE (GET_MODE (var))))
2087 {
2088 replacement = find_fixup_replacement (replacements, var);
2089 if (replacement->new == 0)
2090 replacement->new = gen_reg_rtx (GET_MODE (var));
2091
2092 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2093 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2094 }
2095
2096 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2097 insn into a pseudo and store the low part of the pseudo into VAR. */
2098 if (GET_CODE (SET_DEST (x)) == SUBREG
2099 && SUBREG_REG (SET_DEST (x)) == var
2100 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2101 > GET_MODE_SIZE (GET_MODE (var))))
2102 {
2103 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2104 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2105 tem)),
2106 insn);
2107 break;
2108 }
2109
2110 {
2111 rtx dest = SET_DEST (x);
2112 rtx src = SET_SRC (x);
2113 #ifdef HAVE_insv
2114 rtx outerdest = dest;
2115 #endif
2116
2117 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2118 || GET_CODE (dest) == SIGN_EXTRACT
2119 || GET_CODE (dest) == ZERO_EXTRACT)
2120 dest = XEXP (dest, 0);
2121
2122 if (GET_CODE (src) == SUBREG)
2123 src = XEXP (src, 0);
2124
2125 /* If VAR does not appear at the top level of the SET
2126 just scan the lower levels of the tree. */
2127
2128 if (src != var && dest != var)
2129 break;
2130
2131 /* We will need to rerecognize this insn. */
2132 INSN_CODE (insn) = -1;
2133
2134 #ifdef HAVE_insv
2135 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2136 {
2137 /* Since this case will return, ensure we fixup all the
2138 operands here. */
2139 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2140 insn, replacements);
2141 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2142 insn, replacements);
2143 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2144 insn, replacements);
2145
2146 tem = XEXP (outerdest, 0);
2147
2148 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2149 that may appear inside a ZERO_EXTRACT.
2150 This was legitimate when the MEM was a REG. */
2151 if (GET_CODE (tem) == SUBREG
2152 && SUBREG_REG (tem) == var)
2153 tem = fixup_memory_subreg (tem, insn, 0);
2154 else
2155 tem = fixup_stack_1 (tem, insn);
2156
2157 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2158 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2159 && ! mode_dependent_address_p (XEXP (tem, 0))
2160 && ! MEM_VOLATILE_P (tem))
2161 {
2162 enum machine_mode wanted_mode
2163 = insn_operand_mode[(int) CODE_FOR_insv][0];
2164 enum machine_mode is_mode = GET_MODE (tem);
2165 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2166
2167 /* If we have a narrower mode, we can do something. */
2168 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2169 {
2170 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2171 rtx old_pos = XEXP (outerdest, 2);
2172 rtx newmem;
2173
2174 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2175 offset = (GET_MODE_SIZE (is_mode)
2176 - GET_MODE_SIZE (wanted_mode) - offset);
2177
2178 pos %= GET_MODE_BITSIZE (wanted_mode);
2179
2180 newmem = gen_rtx_MEM (wanted_mode,
2181 plus_constant (XEXP (tem, 0), offset));
2182 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2183 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
2184 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
2185
2186 /* Make the change and see if the insn remains valid. */
2187 INSN_CODE (insn) = -1;
2188 XEXP (outerdest, 0) = newmem;
2189 XEXP (outerdest, 2) = GEN_INT (pos);
2190
2191 if (recog_memoized (insn) >= 0)
2192 return;
2193
2194 /* Otherwise, restore old position. XEXP (x, 0) will be
2195 restored later. */
2196 XEXP (outerdest, 2) = old_pos;
2197 }
2198 }
2199
2200 /* If we get here, the bit-field store doesn't allow memory
2201 or isn't located at a constant position. Load the value into
2202 a register, do the store, and put it back into memory. */
2203
2204 tem1 = gen_reg_rtx (GET_MODE (tem));
2205 emit_insn_before (gen_move_insn (tem1, tem), insn);
2206 emit_insn_after (gen_move_insn (tem, tem1), insn);
2207 XEXP (outerdest, 0) = tem1;
2208 return;
2209 }
2210 #endif
2211
2212 /* STRICT_LOW_PART is a no-op on memory references
2213 and it can cause combinations to be unrecognizable,
2214 so eliminate it. */
2215
2216 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2217 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2218
2219 /* A valid insn to copy VAR into or out of a register
2220 must be left alone, to avoid an infinite loop here.
2221 If the reference to VAR is by a subreg, fix that up,
2222 since SUBREG is not valid for a memref.
2223 Also fix up the address of the stack slot.
2224
2225 Note that we must not try to recognize the insn until
2226 after we know that we have valid addresses and no
2227 (subreg (mem ...) ...) constructs, since these interfere
2228 with determining the validity of the insn. */
2229
2230 if ((SET_SRC (x) == var
2231 || (GET_CODE (SET_SRC (x)) == SUBREG
2232 && SUBREG_REG (SET_SRC (x)) == var))
2233 && (GET_CODE (SET_DEST (x)) == REG
2234 || (GET_CODE (SET_DEST (x)) == SUBREG
2235 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2236 && GET_MODE (var) == promoted_mode
2237 && x == single_set (insn))
2238 {
2239 rtx pat;
2240
2241 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2242 if (replacement->new)
2243 SET_SRC (x) = replacement->new;
2244 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2245 SET_SRC (x) = replacement->new
2246 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2247 else
2248 SET_SRC (x) = replacement->new
2249 = fixup_stack_1 (SET_SRC (x), insn);
2250
2251 if (recog_memoized (insn) >= 0)
2252 return;
2253
2254 /* INSN is not valid, but we know that we want to
2255 copy SET_SRC (x) to SET_DEST (x) in some way. So
2256 we generate the move and see whether it requires more
2257 than one insn. If it does, we emit those insns and
2258 delete INSN. Otherwise, we an just replace the pattern
2259 of INSN; we have already verified above that INSN has
2260 no other function that to do X. */
2261
2262 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2263 if (GET_CODE (pat) == SEQUENCE)
2264 {
2265 emit_insn_after (pat, insn);
2266 PUT_CODE (insn, NOTE);
2267 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2268 NOTE_SOURCE_FILE (insn) = 0;
2269 }
2270 else
2271 PATTERN (insn) = pat;
2272
2273 return;
2274 }
2275
2276 if ((SET_DEST (x) == var
2277 || (GET_CODE (SET_DEST (x)) == SUBREG
2278 && SUBREG_REG (SET_DEST (x)) == var))
2279 && (GET_CODE (SET_SRC (x)) == REG
2280 || (GET_CODE (SET_SRC (x)) == SUBREG
2281 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2282 && GET_MODE (var) == promoted_mode
2283 && x == single_set (insn))
2284 {
2285 rtx pat;
2286
2287 if (GET_CODE (SET_DEST (x)) == SUBREG)
2288 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2289 else
2290 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2291
2292 if (recog_memoized (insn) >= 0)
2293 return;
2294
2295 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2296 if (GET_CODE (pat) == SEQUENCE)
2297 {
2298 emit_insn_after (pat, insn);
2299 PUT_CODE (insn, NOTE);
2300 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2301 NOTE_SOURCE_FILE (insn) = 0;
2302 }
2303 else
2304 PATTERN (insn) = pat;
2305
2306 return;
2307 }
2308
2309 /* Otherwise, storing into VAR must be handled specially
2310 by storing into a temporary and copying that into VAR
2311 with a new insn after this one. Note that this case
2312 will be used when storing into a promoted scalar since
2313 the insn will now have different modes on the input
2314 and output and hence will be invalid (except for the case
2315 of setting it to a constant, which does not need any
2316 change if it is valid). We generate extra code in that case,
2317 but combine.c will eliminate it. */
2318
2319 if (dest == var)
2320 {
2321 rtx temp;
2322 rtx fixeddest = SET_DEST (x);
2323
2324 /* STRICT_LOW_PART can be discarded, around a MEM. */
2325 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2326 fixeddest = XEXP (fixeddest, 0);
2327 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2328 if (GET_CODE (fixeddest) == SUBREG)
2329 {
2330 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2331 promoted_mode = GET_MODE (fixeddest);
2332 }
2333 else
2334 fixeddest = fixup_stack_1 (fixeddest, insn);
2335
2336 temp = gen_reg_rtx (promoted_mode);
2337
2338 emit_insn_after (gen_move_insn (fixeddest,
2339 gen_lowpart (GET_MODE (fixeddest),
2340 temp)),
2341 insn);
2342
2343 SET_DEST (x) = temp;
2344 }
2345 }
2346
2347 default:
2348 break;
2349 }
2350
2351 /* Nothing special about this RTX; fix its operands. */
2352
2353 fmt = GET_RTX_FORMAT (code);
2354 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2355 {
2356 if (fmt[i] == 'e')
2357 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2358 if (fmt[i] == 'E')
2359 {
2360 register int j;
2361 for (j = 0; j < XVECLEN (x, i); j++)
2362 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2363 insn, replacements);
2364 }
2365 }
2366 }
2367 \f
2368 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2369 return an rtx (MEM:m1 newaddr) which is equivalent.
2370 If any insns must be emitted to compute NEWADDR, put them before INSN.
2371
2372 UNCRITICAL nonzero means accept paradoxical subregs.
2373 This is used for subregs found inside REG_NOTES. */
2374
2375 static rtx
2376 fixup_memory_subreg (x, insn, uncritical)
2377 rtx x;
2378 rtx insn;
2379 int uncritical;
2380 {
2381 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2382 rtx addr = XEXP (SUBREG_REG (x), 0);
2383 enum machine_mode mode = GET_MODE (x);
2384 rtx result;
2385
2386 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2387 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2388 && ! uncritical)
2389 abort ();
2390
2391 if (BYTES_BIG_ENDIAN)
2392 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2393 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2394 addr = plus_constant (addr, offset);
2395 if (!flag_force_addr && memory_address_p (mode, addr))
2396 /* Shortcut if no insns need be emitted. */
2397 return change_address (SUBREG_REG (x), mode, addr);
2398 start_sequence ();
2399 result = change_address (SUBREG_REG (x), mode, addr);
2400 emit_insn_before (gen_sequence (), insn);
2401 end_sequence ();
2402 return result;
2403 }
2404
2405 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2406 Replace subexpressions of X in place.
2407 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2408 Otherwise return X, with its contents possibly altered.
2409
2410 If any insns must be emitted to compute NEWADDR, put them before INSN.
2411
2412 UNCRITICAL is as in fixup_memory_subreg. */
2413
2414 static rtx
2415 walk_fixup_memory_subreg (x, insn, uncritical)
2416 register rtx x;
2417 rtx insn;
2418 int uncritical;
2419 {
2420 register enum rtx_code code;
2421 register char *fmt;
2422 register int i;
2423
2424 if (x == 0)
2425 return 0;
2426
2427 code = GET_CODE (x);
2428
2429 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2430 return fixup_memory_subreg (x, insn, uncritical);
2431
2432 /* Nothing special about this RTX; fix its operands. */
2433
2434 fmt = GET_RTX_FORMAT (code);
2435 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2436 {
2437 if (fmt[i] == 'e')
2438 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2439 if (fmt[i] == 'E')
2440 {
2441 register int j;
2442 for (j = 0; j < XVECLEN (x, i); j++)
2443 XVECEXP (x, i, j)
2444 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2445 }
2446 }
2447 return x;
2448 }
2449 \f
2450 /* For each memory ref within X, if it refers to a stack slot
2451 with an out of range displacement, put the address in a temp register
2452 (emitting new insns before INSN to load these registers)
2453 and alter the memory ref to use that register.
2454 Replace each such MEM rtx with a copy, to avoid clobberage. */
2455
2456 static rtx
2457 fixup_stack_1 (x, insn)
2458 rtx x;
2459 rtx insn;
2460 {
2461 register int i;
2462 register RTX_CODE code = GET_CODE (x);
2463 register char *fmt;
2464
2465 if (code == MEM)
2466 {
2467 register rtx ad = XEXP (x, 0);
2468 /* If we have address of a stack slot but it's not valid
2469 (displacement is too large), compute the sum in a register. */
2470 if (GET_CODE (ad) == PLUS
2471 && GET_CODE (XEXP (ad, 0)) == REG
2472 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2473 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2474 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2475 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2476 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2477 #endif
2478 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2479 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2480 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2481 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2482 {
2483 rtx temp, seq;
2484 if (memory_address_p (GET_MODE (x), ad))
2485 return x;
2486
2487 start_sequence ();
2488 temp = copy_to_reg (ad);
2489 seq = gen_sequence ();
2490 end_sequence ();
2491 emit_insn_before (seq, insn);
2492 return change_address (x, VOIDmode, temp);
2493 }
2494 return x;
2495 }
2496
2497 fmt = GET_RTX_FORMAT (code);
2498 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2499 {
2500 if (fmt[i] == 'e')
2501 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2502 if (fmt[i] == 'E')
2503 {
2504 register int j;
2505 for (j = 0; j < XVECLEN (x, i); j++)
2506 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2507 }
2508 }
2509 return x;
2510 }
2511 \f
2512 /* Optimization: a bit-field instruction whose field
2513 happens to be a byte or halfword in memory
2514 can be changed to a move instruction.
2515
2516 We call here when INSN is an insn to examine or store into a bit-field.
2517 BODY is the SET-rtx to be altered.
2518
2519 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2520 (Currently this is called only from function.c, and EQUIV_MEM
2521 is always 0.) */
2522
2523 static void
2524 optimize_bit_field (body, insn, equiv_mem)
2525 rtx body;
2526 rtx insn;
2527 rtx *equiv_mem;
2528 {
2529 register rtx bitfield;
2530 int destflag;
2531 rtx seq = 0;
2532 enum machine_mode mode;
2533
2534 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2535 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2536 bitfield = SET_DEST (body), destflag = 1;
2537 else
2538 bitfield = SET_SRC (body), destflag = 0;
2539
2540 /* First check that the field being stored has constant size and position
2541 and is in fact a byte or halfword suitably aligned. */
2542
2543 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2544 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2545 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2546 != BLKmode)
2547 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2548 {
2549 register rtx memref = 0;
2550
2551 /* Now check that the containing word is memory, not a register,
2552 and that it is safe to change the machine mode. */
2553
2554 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2555 memref = XEXP (bitfield, 0);
2556 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2557 && equiv_mem != 0)
2558 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2559 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2560 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2561 memref = SUBREG_REG (XEXP (bitfield, 0));
2562 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2563 && equiv_mem != 0
2564 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2565 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2566
2567 if (memref
2568 && ! mode_dependent_address_p (XEXP (memref, 0))
2569 && ! MEM_VOLATILE_P (memref))
2570 {
2571 /* Now adjust the address, first for any subreg'ing
2572 that we are now getting rid of,
2573 and then for which byte of the word is wanted. */
2574
2575 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2576 rtx insns;
2577
2578 /* Adjust OFFSET to count bits from low-address byte. */
2579 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2580 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2581 - offset - INTVAL (XEXP (bitfield, 1)));
2582
2583 /* Adjust OFFSET to count bytes from low-address byte. */
2584 offset /= BITS_PER_UNIT;
2585 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2586 {
2587 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2588 if (BYTES_BIG_ENDIAN)
2589 offset -= (MIN (UNITS_PER_WORD,
2590 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2591 - MIN (UNITS_PER_WORD,
2592 GET_MODE_SIZE (GET_MODE (memref))));
2593 }
2594
2595 start_sequence ();
2596 memref = change_address (memref, mode,
2597 plus_constant (XEXP (memref, 0), offset));
2598 insns = get_insns ();
2599 end_sequence ();
2600 emit_insns_before (insns, insn);
2601
2602 /* Store this memory reference where
2603 we found the bit field reference. */
2604
2605 if (destflag)
2606 {
2607 validate_change (insn, &SET_DEST (body), memref, 1);
2608 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2609 {
2610 rtx src = SET_SRC (body);
2611 while (GET_CODE (src) == SUBREG
2612 && SUBREG_WORD (src) == 0)
2613 src = SUBREG_REG (src);
2614 if (GET_MODE (src) != GET_MODE (memref))
2615 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2616 validate_change (insn, &SET_SRC (body), src, 1);
2617 }
2618 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2619 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2620 /* This shouldn't happen because anything that didn't have
2621 one of these modes should have got converted explicitly
2622 and then referenced through a subreg.
2623 This is so because the original bit-field was
2624 handled by agg_mode and so its tree structure had
2625 the same mode that memref now has. */
2626 abort ();
2627 }
2628 else
2629 {
2630 rtx dest = SET_DEST (body);
2631
2632 while (GET_CODE (dest) == SUBREG
2633 && SUBREG_WORD (dest) == 0
2634 && (GET_MODE_CLASS (GET_MODE (dest))
2635 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest)))))
2636 dest = SUBREG_REG (dest);
2637
2638 validate_change (insn, &SET_DEST (body), dest, 1);
2639
2640 if (GET_MODE (dest) == GET_MODE (memref))
2641 validate_change (insn, &SET_SRC (body), memref, 1);
2642 else
2643 {
2644 /* Convert the mem ref to the destination mode. */
2645 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2646
2647 start_sequence ();
2648 convert_move (newreg, memref,
2649 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2650 seq = get_insns ();
2651 end_sequence ();
2652
2653 validate_change (insn, &SET_SRC (body), newreg, 1);
2654 }
2655 }
2656
2657 /* See if we can convert this extraction or insertion into
2658 a simple move insn. We might not be able to do so if this
2659 was, for example, part of a PARALLEL.
2660
2661 If we succeed, write out any needed conversions. If we fail,
2662 it is hard to guess why we failed, so don't do anything
2663 special; just let the optimization be suppressed. */
2664
2665 if (apply_change_group () && seq)
2666 emit_insns_before (seq, insn);
2667 }
2668 }
2669 }
2670 \f
2671 /* These routines are responsible for converting virtual register references
2672 to the actual hard register references once RTL generation is complete.
2673
2674 The following four variables are used for communication between the
2675 routines. They contain the offsets of the virtual registers from their
2676 respective hard registers. */
2677
2678 static int in_arg_offset;
2679 static int var_offset;
2680 static int dynamic_offset;
2681 static int out_arg_offset;
2682
2683 /* In most machines, the stack pointer register is equivalent to the bottom
2684 of the stack. */
2685
2686 #ifndef STACK_POINTER_OFFSET
2687 #define STACK_POINTER_OFFSET 0
2688 #endif
2689
2690 /* If not defined, pick an appropriate default for the offset of dynamically
2691 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2692 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2693
2694 #ifndef STACK_DYNAMIC_OFFSET
2695
2696 #ifdef ACCUMULATE_OUTGOING_ARGS
2697 /* The bottom of the stack points to the actual arguments. If
2698 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2699 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2700 stack space for register parameters is not pushed by the caller, but
2701 rather part of the fixed stack areas and hence not included in
2702 `current_function_outgoing_args_size'. Nevertheless, we must allow
2703 for it when allocating stack dynamic objects. */
2704
2705 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2706 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2707 (current_function_outgoing_args_size \
2708 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2709
2710 #else
2711 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2712 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2713 #endif
2714
2715 #else
2716 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2717 #endif
2718 #endif
2719
2720 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2721 its address taken. DECL is the decl for the object stored in the
2722 register, for later use if we do need to force REG into the stack.
2723 REG is overwritten by the MEM like in put_reg_into_stack. */
2724
2725 rtx
2726 gen_mem_addressof (reg, decl)
2727 rtx reg;
2728 tree decl;
2729 {
2730 tree type = TREE_TYPE (decl);
2731
2732 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)), REGNO (reg));
2733 SET_ADDRESSOF_DECL (r, decl);
2734
2735 XEXP (reg, 0) = r;
2736 PUT_CODE (reg, MEM);
2737 PUT_MODE (reg, DECL_MODE (decl));
2738 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2739 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type);
2740 MEM_ALIAS_SET (reg) = get_alias_set (decl);
2741
2742 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2743 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type));
2744
2745 return reg;
2746 }
2747
2748 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2749
2750 void
2751 flush_addressof (decl)
2752 tree decl;
2753 {
2754 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2755 && DECL_RTL (decl) != 0
2756 && GET_CODE (DECL_RTL (decl)) == MEM
2757 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2758 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2759 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0));
2760 }
2761
2762 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2763
2764 static void
2765 put_addressof_into_stack (r)
2766 rtx r;
2767 {
2768 tree decl = ADDRESSOF_DECL (r);
2769 rtx reg = XEXP (r, 0);
2770
2771 if (GET_CODE (reg) != REG)
2772 abort ();
2773
2774 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2775 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
2776 ADDRESSOF_REGNO (r),
2777 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
2778 }
2779
2780 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2781 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2782 the stack. */
2783
2784 static void
2785 purge_addressof_1 (loc, insn, force)
2786 rtx *loc;
2787 rtx insn;
2788 int force;
2789 {
2790 rtx x;
2791 RTX_CODE code;
2792 int i, j;
2793 char *fmt;
2794
2795 /* Re-start here to avoid recursion in common cases. */
2796 restart:
2797
2798 x = *loc;
2799 if (x == 0)
2800 return;
2801
2802 code = GET_CODE (x);
2803
2804 if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2805 {
2806 rtx insns;
2807 /* We must create a copy of the rtx because it was created by
2808 overwriting a REG rtx which is always shared. */
2809 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2810
2811 if (validate_change (insn, loc, sub, 0))
2812 return;
2813
2814 start_sequence ();
2815 if (! validate_change (insn, loc,
2816 force_operand (sub, NULL_RTX),
2817 0))
2818 abort ();
2819
2820 insns = get_insns ();
2821 end_sequence ();
2822 emit_insns_before (insns, insn);
2823 return;
2824 }
2825 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2826 {
2827 rtx sub = XEXP (XEXP (x, 0), 0);
2828
2829 if (GET_CODE (sub) == MEM)
2830 sub = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2831
2832 if (GET_CODE (sub) == REG
2833 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2834 {
2835 put_addressof_into_stack (XEXP (x, 0));
2836 return;
2837 }
2838 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
2839 {
2840 if (! BYTES_BIG_ENDIAN && ! WORDS_BIG_ENDIAN)
2841 {
2842 rtx sub2 = gen_rtx_SUBREG (GET_MODE (x), sub, 0);
2843 if (validate_change (insn, loc, sub2, 0))
2844 goto restart;
2845 }
2846 }
2847 else if (validate_change (insn, loc, sub, 0))
2848 goto restart;
2849 /* else give up and put it into the stack */
2850 }
2851 else if (code == ADDRESSOF)
2852 {
2853 put_addressof_into_stack (x);
2854 return;
2855 }
2856
2857 /* Scan all subexpressions. */
2858 fmt = GET_RTX_FORMAT (code);
2859 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2860 {
2861 if (*fmt == 'e')
2862 purge_addressof_1 (&XEXP (x, i), insn, force);
2863 else if (*fmt == 'E')
2864 for (j = 0; j < XVECLEN (x, i); j++)
2865 purge_addressof_1 (&XVECEXP (x, i, j), insn, force);
2866 }
2867 }
2868
2869 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
2870 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
2871 stack. */
2872
2873 void
2874 purge_addressof (insns)
2875 rtx insns;
2876 {
2877 rtx insn;
2878 for (insn = insns; insn; insn = NEXT_INSN (insn))
2879 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2880 || GET_CODE (insn) == CALL_INSN)
2881 {
2882 purge_addressof_1 (&PATTERN (insn), insn,
2883 asm_noperands (PATTERN (insn)) > 0);
2884 purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0);
2885 }
2886 }
2887 \f
2888 /* Pass through the INSNS of function FNDECL and convert virtual register
2889 references to hard register references. */
2890
2891 void
2892 instantiate_virtual_regs (fndecl, insns)
2893 tree fndecl;
2894 rtx insns;
2895 {
2896 rtx insn;
2897 int i;
2898
2899 /* Compute the offsets to use for this function. */
2900 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
2901 var_offset = STARTING_FRAME_OFFSET;
2902 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
2903 out_arg_offset = STACK_POINTER_OFFSET;
2904
2905 /* Scan all variables and parameters of this function. For each that is
2906 in memory, instantiate all virtual registers if the result is a valid
2907 address. If not, we do it later. That will handle most uses of virtual
2908 regs on many machines. */
2909 instantiate_decls (fndecl, 1);
2910
2911 /* Initialize recognition, indicating that volatile is OK. */
2912 init_recog ();
2913
2914 /* Scan through all the insns, instantiating every virtual register still
2915 present. */
2916 for (insn = insns; insn; insn = NEXT_INSN (insn))
2917 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2918 || GET_CODE (insn) == CALL_INSN)
2919 {
2920 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
2921 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
2922 }
2923
2924 /* Instantiate the stack slots for the parm registers, for later use in
2925 addressof elimination. */
2926 for (i = 0; i < max_parm_reg; ++i)
2927 if (parm_reg_stack_loc[i])
2928 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
2929
2930 /* Now instantiate the remaining register equivalences for debugging info.
2931 These will not be valid addresses. */
2932 instantiate_decls (fndecl, 0);
2933
2934 /* Indicate that, from now on, assign_stack_local should use
2935 frame_pointer_rtx. */
2936 virtuals_instantiated = 1;
2937 }
2938
2939 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
2940 all virtual registers in their DECL_RTL's.
2941
2942 If VALID_ONLY, do this only if the resulting address is still valid.
2943 Otherwise, always do it. */
2944
2945 static void
2946 instantiate_decls (fndecl, valid_only)
2947 tree fndecl;
2948 int valid_only;
2949 {
2950 tree decl;
2951
2952 if (DECL_SAVED_INSNS (fndecl))
2953 /* When compiling an inline function, the obstack used for
2954 rtl allocation is the maybepermanent_obstack. Calling
2955 `resume_temporary_allocation' switches us back to that
2956 obstack while we process this function's parameters. */
2957 resume_temporary_allocation ();
2958
2959 /* Process all parameters of the function. */
2960 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2961 {
2962 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
2963
2964 instantiate_decl (DECL_RTL (decl), size, valid_only);
2965
2966 /* If the parameter was promoted, then the incoming RTL mode may be
2967 larger than the declared type size. We must use the larger of
2968 the two sizes. */
2969 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
2970 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
2971 }
2972
2973 /* Now process all variables defined in the function or its subblocks. */
2974 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
2975
2976 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
2977 {
2978 /* Save all rtl allocated for this function by raising the
2979 high-water mark on the maybepermanent_obstack. */
2980 preserve_data ();
2981 /* All further rtl allocation is now done in the current_obstack. */
2982 rtl_in_current_obstack ();
2983 }
2984 }
2985
2986 /* Subroutine of instantiate_decls: Process all decls in the given
2987 BLOCK node and all its subblocks. */
2988
2989 static void
2990 instantiate_decls_1 (let, valid_only)
2991 tree let;
2992 int valid_only;
2993 {
2994 tree t;
2995
2996 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2997 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
2998 valid_only);
2999
3000 /* Process all subblocks. */
3001 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3002 instantiate_decls_1 (t, valid_only);
3003 }
3004
3005 /* Subroutine of the preceding procedures: Given RTL representing a
3006 decl and the size of the object, do any instantiation required.
3007
3008 If VALID_ONLY is non-zero, it means that the RTL should only be
3009 changed if the new address is valid. */
3010
3011 static void
3012 instantiate_decl (x, size, valid_only)
3013 rtx x;
3014 int size;
3015 int valid_only;
3016 {
3017 enum machine_mode mode;
3018 rtx addr;
3019
3020 /* If this is not a MEM, no need to do anything. Similarly if the
3021 address is a constant or a register that is not a virtual register. */
3022
3023 if (x == 0 || GET_CODE (x) != MEM)
3024 return;
3025
3026 addr = XEXP (x, 0);
3027 if (CONSTANT_P (addr)
3028 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3029 || (GET_CODE (addr) == REG
3030 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3031 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3032 return;
3033
3034 /* If we should only do this if the address is valid, copy the address.
3035 We need to do this so we can undo any changes that might make the
3036 address invalid. This copy is unfortunate, but probably can't be
3037 avoided. */
3038
3039 if (valid_only)
3040 addr = copy_rtx (addr);
3041
3042 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3043
3044 if (valid_only)
3045 {
3046 /* Now verify that the resulting address is valid for every integer or
3047 floating-point mode up to and including SIZE bytes long. We do this
3048 since the object might be accessed in any mode and frame addresses
3049 are shared. */
3050
3051 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3052 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3053 mode = GET_MODE_WIDER_MODE (mode))
3054 if (! memory_address_p (mode, addr))
3055 return;
3056
3057 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3058 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3059 mode = GET_MODE_WIDER_MODE (mode))
3060 if (! memory_address_p (mode, addr))
3061 return;
3062 }
3063
3064 /* Put back the address now that we have updated it and we either know
3065 it is valid or we don't care whether it is valid. */
3066
3067 XEXP (x, 0) = addr;
3068 }
3069 \f
3070 /* Given a pointer to a piece of rtx and an optional pointer to the
3071 containing object, instantiate any virtual registers present in it.
3072
3073 If EXTRA_INSNS, we always do the replacement and generate
3074 any extra insns before OBJECT. If it zero, we do nothing if replacement
3075 is not valid.
3076
3077 Return 1 if we either had nothing to do or if we were able to do the
3078 needed replacement. Return 0 otherwise; we only return zero if
3079 EXTRA_INSNS is zero.
3080
3081 We first try some simple transformations to avoid the creation of extra
3082 pseudos. */
3083
3084 static int
3085 instantiate_virtual_regs_1 (loc, object, extra_insns)
3086 rtx *loc;
3087 rtx object;
3088 int extra_insns;
3089 {
3090 rtx x;
3091 RTX_CODE code;
3092 rtx new = 0;
3093 HOST_WIDE_INT offset;
3094 rtx temp;
3095 rtx seq;
3096 int i, j;
3097 char *fmt;
3098
3099 /* Re-start here to avoid recursion in common cases. */
3100 restart:
3101
3102 x = *loc;
3103 if (x == 0)
3104 return 1;
3105
3106 code = GET_CODE (x);
3107
3108 /* Check for some special cases. */
3109 switch (code)
3110 {
3111 case CONST_INT:
3112 case CONST_DOUBLE:
3113 case CONST:
3114 case SYMBOL_REF:
3115 case CODE_LABEL:
3116 case PC:
3117 case CC0:
3118 case ASM_INPUT:
3119 case ADDR_VEC:
3120 case ADDR_DIFF_VEC:
3121 case RETURN:
3122 return 1;
3123
3124 case SET:
3125 /* We are allowed to set the virtual registers. This means that
3126 the actual register should receive the source minus the
3127 appropriate offset. This is used, for example, in the handling
3128 of non-local gotos. */
3129 if (SET_DEST (x) == virtual_incoming_args_rtx)
3130 new = arg_pointer_rtx, offset = - in_arg_offset;
3131 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3132 new = frame_pointer_rtx, offset = - var_offset;
3133 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3134 new = stack_pointer_rtx, offset = - dynamic_offset;
3135 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3136 new = stack_pointer_rtx, offset = - out_arg_offset;
3137
3138 if (new)
3139 {
3140 /* The only valid sources here are PLUS or REG. Just do
3141 the simplest possible thing to handle them. */
3142 if (GET_CODE (SET_SRC (x)) != REG
3143 && GET_CODE (SET_SRC (x)) != PLUS)
3144 abort ();
3145
3146 start_sequence ();
3147 if (GET_CODE (SET_SRC (x)) != REG)
3148 temp = force_operand (SET_SRC (x), NULL_RTX);
3149 else
3150 temp = SET_SRC (x);
3151 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3152 seq = get_insns ();
3153 end_sequence ();
3154
3155 emit_insns_before (seq, object);
3156 SET_DEST (x) = new;
3157
3158 if (! validate_change (object, &SET_SRC (x), temp, 0)
3159 || ! extra_insns)
3160 abort ();
3161
3162 return 1;
3163 }
3164
3165 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3166 loc = &SET_SRC (x);
3167 goto restart;
3168
3169 case PLUS:
3170 /* Handle special case of virtual register plus constant. */
3171 if (CONSTANT_P (XEXP (x, 1)))
3172 {
3173 rtx old, new_offset;
3174
3175 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3176 if (GET_CODE (XEXP (x, 0)) == PLUS)
3177 {
3178 rtx inner = XEXP (XEXP (x, 0), 0);
3179
3180 if (inner == virtual_incoming_args_rtx)
3181 new = arg_pointer_rtx, offset = in_arg_offset;
3182 else if (inner == virtual_stack_vars_rtx)
3183 new = frame_pointer_rtx, offset = var_offset;
3184 else if (inner == virtual_stack_dynamic_rtx)
3185 new = stack_pointer_rtx, offset = dynamic_offset;
3186 else if (inner == virtual_outgoing_args_rtx)
3187 new = stack_pointer_rtx, offset = out_arg_offset;
3188 else
3189 {
3190 loc = &XEXP (x, 0);
3191 goto restart;
3192 }
3193
3194 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3195 extra_insns);
3196 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3197 }
3198
3199 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3200 new = arg_pointer_rtx, offset = in_arg_offset;
3201 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3202 new = frame_pointer_rtx, offset = var_offset;
3203 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3204 new = stack_pointer_rtx, offset = dynamic_offset;
3205 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3206 new = stack_pointer_rtx, offset = out_arg_offset;
3207 else
3208 {
3209 /* We know the second operand is a constant. Unless the
3210 first operand is a REG (which has been already checked),
3211 it needs to be checked. */
3212 if (GET_CODE (XEXP (x, 0)) != REG)
3213 {
3214 loc = &XEXP (x, 0);
3215 goto restart;
3216 }
3217 return 1;
3218 }
3219
3220 new_offset = plus_constant (XEXP (x, 1), offset);
3221
3222 /* If the new constant is zero, try to replace the sum with just
3223 the register. */
3224 if (new_offset == const0_rtx
3225 && validate_change (object, loc, new, 0))
3226 return 1;
3227
3228 /* Next try to replace the register and new offset.
3229 There are two changes to validate here and we can't assume that
3230 in the case of old offset equals new just changing the register
3231 will yield a valid insn. In the interests of a little efficiency,
3232 however, we only call validate change once (we don't queue up the
3233 changes and then call apply_change_group). */
3234
3235 old = XEXP (x, 0);
3236 if (offset == 0
3237 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3238 : (XEXP (x, 0) = new,
3239 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3240 {
3241 if (! extra_insns)
3242 {
3243 XEXP (x, 0) = old;
3244 return 0;
3245 }
3246
3247 /* Otherwise copy the new constant into a register and replace
3248 constant with that register. */
3249 temp = gen_reg_rtx (Pmode);
3250 XEXP (x, 0) = new;
3251 if (validate_change (object, &XEXP (x, 1), temp, 0))
3252 emit_insn_before (gen_move_insn (temp, new_offset), object);
3253 else
3254 {
3255 /* If that didn't work, replace this expression with a
3256 register containing the sum. */
3257
3258 XEXP (x, 0) = old;
3259 new = gen_rtx_PLUS (Pmode, new, new_offset);
3260
3261 start_sequence ();
3262 temp = force_operand (new, NULL_RTX);
3263 seq = get_insns ();
3264 end_sequence ();
3265
3266 emit_insns_before (seq, object);
3267 if (! validate_change (object, loc, temp, 0)
3268 && ! validate_replace_rtx (x, temp, object))
3269 abort ();
3270 }
3271 }
3272
3273 return 1;
3274 }
3275
3276 /* Fall through to generic two-operand expression case. */
3277 case EXPR_LIST:
3278 case CALL:
3279 case COMPARE:
3280 case MINUS:
3281 case MULT:
3282 case DIV: case UDIV:
3283 case MOD: case UMOD:
3284 case AND: case IOR: case XOR:
3285 case ROTATERT: case ROTATE:
3286 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3287 case NE: case EQ:
3288 case GE: case GT: case GEU: case GTU:
3289 case LE: case LT: case LEU: case LTU:
3290 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3291 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3292 loc = &XEXP (x, 0);
3293 goto restart;
3294
3295 case MEM:
3296 /* Most cases of MEM that convert to valid addresses have already been
3297 handled by our scan of decls. The only special handling we
3298 need here is to make a copy of the rtx to ensure it isn't being
3299 shared if we have to change it to a pseudo.
3300
3301 If the rtx is a simple reference to an address via a virtual register,
3302 it can potentially be shared. In such cases, first try to make it
3303 a valid address, which can also be shared. Otherwise, copy it and
3304 proceed normally.
3305
3306 First check for common cases that need no processing. These are
3307 usually due to instantiation already being done on a previous instance
3308 of a shared rtx. */
3309
3310 temp = XEXP (x, 0);
3311 if (CONSTANT_ADDRESS_P (temp)
3312 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3313 || temp == arg_pointer_rtx
3314 #endif
3315 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3316 || temp == hard_frame_pointer_rtx
3317 #endif
3318 || temp == frame_pointer_rtx)
3319 return 1;
3320
3321 if (GET_CODE (temp) == PLUS
3322 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3323 && (XEXP (temp, 0) == frame_pointer_rtx
3324 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3325 || XEXP (temp, 0) == hard_frame_pointer_rtx
3326 #endif
3327 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3328 || XEXP (temp, 0) == arg_pointer_rtx
3329 #endif
3330 ))
3331 return 1;
3332
3333 if (temp == virtual_stack_vars_rtx
3334 || temp == virtual_incoming_args_rtx
3335 || (GET_CODE (temp) == PLUS
3336 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3337 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3338 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3339 {
3340 /* This MEM may be shared. If the substitution can be done without
3341 the need to generate new pseudos, we want to do it in place
3342 so all copies of the shared rtx benefit. The call below will
3343 only make substitutions if the resulting address is still
3344 valid.
3345
3346 Note that we cannot pass X as the object in the recursive call
3347 since the insn being processed may not allow all valid
3348 addresses. However, if we were not passed on object, we can
3349 only modify X without copying it if X will have a valid
3350 address.
3351
3352 ??? Also note that this can still lose if OBJECT is an insn that
3353 has less restrictions on an address that some other insn.
3354 In that case, we will modify the shared address. This case
3355 doesn't seem very likely, though. One case where this could
3356 happen is in the case of a USE or CLOBBER reference, but we
3357 take care of that below. */
3358
3359 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3360 object ? object : x, 0))
3361 return 1;
3362
3363 /* Otherwise make a copy and process that copy. We copy the entire
3364 RTL expression since it might be a PLUS which could also be
3365 shared. */
3366 *loc = x = copy_rtx (x);
3367 }
3368
3369 /* Fall through to generic unary operation case. */
3370 case SUBREG:
3371 case STRICT_LOW_PART:
3372 case NEG: case NOT:
3373 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3374 case SIGN_EXTEND: case ZERO_EXTEND:
3375 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3376 case FLOAT: case FIX:
3377 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3378 case ABS:
3379 case SQRT:
3380 case FFS:
3381 /* These case either have just one operand or we know that we need not
3382 check the rest of the operands. */
3383 loc = &XEXP (x, 0);
3384 goto restart;
3385
3386 case USE:
3387 case CLOBBER:
3388 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3389 go ahead and make the invalid one, but do it to a copy. For a REG,
3390 just make the recursive call, since there's no chance of a problem. */
3391
3392 if ((GET_CODE (XEXP (x, 0)) == MEM
3393 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3394 0))
3395 || (GET_CODE (XEXP (x, 0)) == REG
3396 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3397 return 1;
3398
3399 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3400 loc = &XEXP (x, 0);
3401 goto restart;
3402
3403 case REG:
3404 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3405 in front of this insn and substitute the temporary. */
3406 if (x == virtual_incoming_args_rtx)
3407 new = arg_pointer_rtx, offset = in_arg_offset;
3408 else if (x == virtual_stack_vars_rtx)
3409 new = frame_pointer_rtx, offset = var_offset;
3410 else if (x == virtual_stack_dynamic_rtx)
3411 new = stack_pointer_rtx, offset = dynamic_offset;
3412 else if (x == virtual_outgoing_args_rtx)
3413 new = stack_pointer_rtx, offset = out_arg_offset;
3414
3415 if (new)
3416 {
3417 temp = plus_constant (new, offset);
3418 if (!validate_change (object, loc, temp, 0))
3419 {
3420 if (! extra_insns)
3421 return 0;
3422
3423 start_sequence ();
3424 temp = force_operand (temp, NULL_RTX);
3425 seq = get_insns ();
3426 end_sequence ();
3427
3428 emit_insns_before (seq, object);
3429 if (! validate_change (object, loc, temp, 0)
3430 && ! validate_replace_rtx (x, temp, object))
3431 abort ();
3432 }
3433 }
3434
3435 return 1;
3436
3437 case ADDRESSOF:
3438 if (GET_CODE (XEXP (x, 0)) == REG)
3439 return 1;
3440
3441 else if (GET_CODE (XEXP (x, 0)) == MEM)
3442 {
3443 /* If we have a (addressof (mem ..)), do any instantiation inside
3444 since we know we'll be making the inside valid when we finally
3445 remove the ADDRESSOF. */
3446 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3447 return 1;
3448 }
3449 break;
3450
3451 default:
3452 break;
3453 }
3454
3455 /* Scan all subexpressions. */
3456 fmt = GET_RTX_FORMAT (code);
3457 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3458 if (*fmt == 'e')
3459 {
3460 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3461 return 0;
3462 }
3463 else if (*fmt == 'E')
3464 for (j = 0; j < XVECLEN (x, i); j++)
3465 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3466 extra_insns))
3467 return 0;
3468
3469 return 1;
3470 }
3471 \f
3472 /* Optimization: assuming this function does not receive nonlocal gotos,
3473 delete the handlers for such, as well as the insns to establish
3474 and disestablish them. */
3475
3476 static void
3477 delete_handlers ()
3478 {
3479 rtx insn;
3480 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3481 {
3482 /* Delete the handler by turning off the flag that would
3483 prevent jump_optimize from deleting it.
3484 Also permit deletion of the nonlocal labels themselves
3485 if nothing local refers to them. */
3486 if (GET_CODE (insn) == CODE_LABEL)
3487 {
3488 tree t, last_t;
3489
3490 LABEL_PRESERVE_P (insn) = 0;
3491
3492 /* Remove it from the nonlocal_label list, to avoid confusing
3493 flow. */
3494 for (t = nonlocal_labels, last_t = 0; t;
3495 last_t = t, t = TREE_CHAIN (t))
3496 if (DECL_RTL (TREE_VALUE (t)) == insn)
3497 break;
3498 if (t)
3499 {
3500 if (! last_t)
3501 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3502 else
3503 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3504 }
3505 }
3506 if (GET_CODE (insn) == INSN
3507 && ((nonlocal_goto_handler_slot != 0
3508 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
3509 || (nonlocal_goto_stack_level != 0
3510 && reg_mentioned_p (nonlocal_goto_stack_level,
3511 PATTERN (insn)))))
3512 delete_insn (insn);
3513 }
3514 }
3515
3516 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3517 of the current function. */
3518
3519 rtx
3520 nonlocal_label_rtx_list ()
3521 {
3522 tree t;
3523 rtx x = 0;
3524
3525 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
3526 x = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (TREE_VALUE (t)), x);
3527
3528 return x;
3529 }
3530 \f
3531 /* Output a USE for any register use in RTL.
3532 This is used with -noreg to mark the extent of lifespan
3533 of any registers used in a user-visible variable's DECL_RTL. */
3534
3535 void
3536 use_variable (rtl)
3537 rtx rtl;
3538 {
3539 if (GET_CODE (rtl) == REG)
3540 /* This is a register variable. */
3541 emit_insn (gen_rtx_USE (VOIDmode, rtl));
3542 else if (GET_CODE (rtl) == MEM
3543 && GET_CODE (XEXP (rtl, 0)) == REG
3544 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3545 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3546 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3547 /* This is a variable-sized structure. */
3548 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
3549 }
3550
3551 /* Like use_variable except that it outputs the USEs after INSN
3552 instead of at the end of the insn-chain. */
3553
3554 void
3555 use_variable_after (rtl, insn)
3556 rtx rtl, insn;
3557 {
3558 if (GET_CODE (rtl) == REG)
3559 /* This is a register variable. */
3560 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
3561 else if (GET_CODE (rtl) == MEM
3562 && GET_CODE (XEXP (rtl, 0)) == REG
3563 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3564 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3565 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3566 /* This is a variable-sized structure. */
3567 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
3568 }
3569 \f
3570 int
3571 max_parm_reg_num ()
3572 {
3573 return max_parm_reg;
3574 }
3575
3576 /* Return the first insn following those generated by `assign_parms'. */
3577
3578 rtx
3579 get_first_nonparm_insn ()
3580 {
3581 if (last_parm_insn)
3582 return NEXT_INSN (last_parm_insn);
3583 return get_insns ();
3584 }
3585
3586 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3587 Crash if there is none. */
3588
3589 rtx
3590 get_first_block_beg ()
3591 {
3592 register rtx searcher;
3593 register rtx insn = get_first_nonparm_insn ();
3594
3595 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3596 if (GET_CODE (searcher) == NOTE
3597 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3598 return searcher;
3599
3600 abort (); /* Invalid call to this function. (See comments above.) */
3601 return NULL_RTX;
3602 }
3603
3604 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3605 This means a type for which function calls must pass an address to the
3606 function or get an address back from the function.
3607 EXP may be a type node or an expression (whose type is tested). */
3608
3609 int
3610 aggregate_value_p (exp)
3611 tree exp;
3612 {
3613 int i, regno, nregs;
3614 rtx reg;
3615 tree type;
3616 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3617 type = exp;
3618 else
3619 type = TREE_TYPE (exp);
3620
3621 if (RETURN_IN_MEMORY (type))
3622 return 1;
3623 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3624 and thus can't be returned in registers. */
3625 if (TREE_ADDRESSABLE (type))
3626 return 1;
3627 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3628 return 1;
3629 /* Make sure we have suitable call-clobbered regs to return
3630 the value in; if not, we must return it in memory. */
3631 reg = hard_function_value (type, 0);
3632
3633 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3634 it is OK. */
3635 if (GET_CODE (reg) != REG)
3636 return 0;
3637
3638 regno = REGNO (reg);
3639 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3640 for (i = 0; i < nregs; i++)
3641 if (! call_used_regs[regno + i])
3642 return 1;
3643 return 0;
3644 }
3645 \f
3646 /* Assign RTL expressions to the function's parameters.
3647 This may involve copying them into registers and using
3648 those registers as the RTL for them.
3649
3650 If SECOND_TIME is non-zero it means that this function is being
3651 called a second time. This is done by integrate.c when a function's
3652 compilation is deferred. We need to come back here in case the
3653 FUNCTION_ARG macro computes items needed for the rest of the compilation
3654 (such as changing which registers are fixed or caller-saved). But suppress
3655 writing any insns or setting DECL_RTL of anything in this case. */
3656
3657 void
3658 assign_parms (fndecl, second_time)
3659 tree fndecl;
3660 int second_time;
3661 {
3662 register tree parm;
3663 register rtx entry_parm = 0;
3664 register rtx stack_parm = 0;
3665 CUMULATIVE_ARGS args_so_far;
3666 enum machine_mode promoted_mode, passed_mode;
3667 enum machine_mode nominal_mode, promoted_nominal_mode;
3668 int unsignedp;
3669 /* Total space needed so far for args on the stack,
3670 given as a constant and a tree-expression. */
3671 struct args_size stack_args_size;
3672 tree fntype = TREE_TYPE (fndecl);
3673 tree fnargs = DECL_ARGUMENTS (fndecl);
3674 /* This is used for the arg pointer when referring to stack args. */
3675 rtx internal_arg_pointer;
3676 /* This is a dummy PARM_DECL that we used for the function result if
3677 the function returns a structure. */
3678 tree function_result_decl = 0;
3679 int varargs_setup = 0;
3680 rtx conversion_insns = 0;
3681
3682 /* Nonzero if the last arg is named `__builtin_va_alist',
3683 which is used on some machines for old-fashioned non-ANSI varargs.h;
3684 this should be stuck onto the stack as if it had arrived there. */
3685 int hide_last_arg
3686 = (current_function_varargs
3687 && fnargs
3688 && (parm = tree_last (fnargs)) != 0
3689 && DECL_NAME (parm)
3690 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3691 "__builtin_va_alist")));
3692
3693 /* Nonzero if function takes extra anonymous args.
3694 This means the last named arg must be on the stack
3695 right before the anonymous ones. */
3696 int stdarg
3697 = (TYPE_ARG_TYPES (fntype) != 0
3698 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3699 != void_type_node));
3700
3701 current_function_stdarg = stdarg;
3702
3703 /* If the reg that the virtual arg pointer will be translated into is
3704 not a fixed reg or is the stack pointer, make a copy of the virtual
3705 arg pointer, and address parms via the copy. The frame pointer is
3706 considered fixed even though it is not marked as such.
3707
3708 The second time through, simply use ap to avoid generating rtx. */
3709
3710 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3711 || ! (fixed_regs[ARG_POINTER_REGNUM]
3712 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3713 && ! second_time)
3714 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3715 else
3716 internal_arg_pointer = virtual_incoming_args_rtx;
3717 current_function_internal_arg_pointer = internal_arg_pointer;
3718
3719 stack_args_size.constant = 0;
3720 stack_args_size.var = 0;
3721
3722 /* If struct value address is treated as the first argument, make it so. */
3723 if (aggregate_value_p (DECL_RESULT (fndecl))
3724 && ! current_function_returns_pcc_struct
3725 && struct_value_incoming_rtx == 0)
3726 {
3727 tree type = build_pointer_type (TREE_TYPE (fntype));
3728
3729 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
3730
3731 DECL_ARG_TYPE (function_result_decl) = type;
3732 TREE_CHAIN (function_result_decl) = fnargs;
3733 fnargs = function_result_decl;
3734 }
3735
3736 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3737 parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
3738 bzero ((char *) parm_reg_stack_loc, max_parm_reg * sizeof (rtx));
3739
3740 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3741 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
3742 #else
3743 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
3744 #endif
3745
3746 /* We haven't yet found an argument that we must push and pretend the
3747 caller did. */
3748 current_function_pretend_args_size = 0;
3749
3750 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3751 {
3752 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
3753 struct args_size stack_offset;
3754 struct args_size arg_size;
3755 int passed_pointer = 0;
3756 int did_conversion = 0;
3757 tree passed_type = DECL_ARG_TYPE (parm);
3758 tree nominal_type = TREE_TYPE (parm);
3759
3760 /* Set LAST_NAMED if this is last named arg before some
3761 anonymous args. */
3762 int last_named = ((TREE_CHAIN (parm) == 0
3763 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3764 && (stdarg || current_function_varargs));
3765 /* Set NAMED_ARG if this arg should be treated as a named arg. For
3766 most machines, if this is a varargs/stdarg function, then we treat
3767 the last named arg as if it were anonymous too. */
3768 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
3769
3770 if (TREE_TYPE (parm) == error_mark_node
3771 /* This can happen after weird syntax errors
3772 or if an enum type is defined among the parms. */
3773 || TREE_CODE (parm) != PARM_DECL
3774 || passed_type == NULL)
3775 {
3776 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
3777 = gen_rtx_MEM (BLKmode, const0_rtx);
3778 TREE_USED (parm) = 1;
3779 continue;
3780 }
3781
3782 /* For varargs.h function, save info about regs and stack space
3783 used by the individual args, not including the va_alist arg. */
3784 if (hide_last_arg && last_named)
3785 current_function_args_info = args_so_far;
3786
3787 /* Find mode of arg as it is passed, and mode of arg
3788 as it should be during execution of this function. */
3789 passed_mode = TYPE_MODE (passed_type);
3790 nominal_mode = TYPE_MODE (nominal_type);
3791
3792 /* If the parm's mode is VOID, its value doesn't matter,
3793 and avoid the usual things like emit_move_insn that could crash. */
3794 if (nominal_mode == VOIDmode)
3795 {
3796 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
3797 continue;
3798 }
3799
3800 /* If the parm is to be passed as a transparent union, use the
3801 type of the first field for the tests below. We have already
3802 verified that the modes are the same. */
3803 if (DECL_TRANSPARENT_UNION (parm)
3804 || TYPE_TRANSPARENT_UNION (passed_type))
3805 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
3806
3807 /* See if this arg was passed by invisible reference. It is if
3808 it is an object whose size depends on the contents of the
3809 object itself or if the machine requires these objects be passed
3810 that way. */
3811
3812 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
3813 && contains_placeholder_p (TYPE_SIZE (passed_type)))
3814 || TREE_ADDRESSABLE (passed_type)
3815 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3816 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
3817 passed_type, named_arg)
3818 #endif
3819 )
3820 {
3821 passed_type = nominal_type = build_pointer_type (passed_type);
3822 passed_pointer = 1;
3823 passed_mode = nominal_mode = Pmode;
3824 }
3825
3826 promoted_mode = passed_mode;
3827
3828 #ifdef PROMOTE_FUNCTION_ARGS
3829 /* Compute the mode in which the arg is actually extended to. */
3830 unsignedp = TREE_UNSIGNED (passed_type);
3831 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
3832 #endif
3833
3834 /* Let machine desc say which reg (if any) the parm arrives in.
3835 0 means it arrives on the stack. */
3836 #ifdef FUNCTION_INCOMING_ARG
3837 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3838 passed_type, named_arg);
3839 #else
3840 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
3841 passed_type, named_arg);
3842 #endif
3843
3844 if (entry_parm == 0)
3845 promoted_mode = passed_mode;
3846
3847 #ifdef SETUP_INCOMING_VARARGS
3848 /* If this is the last named parameter, do any required setup for
3849 varargs or stdargs. We need to know about the case of this being an
3850 addressable type, in which case we skip the registers it
3851 would have arrived in.
3852
3853 For stdargs, LAST_NAMED will be set for two parameters, the one that
3854 is actually the last named, and the dummy parameter. We only
3855 want to do this action once.
3856
3857 Also, indicate when RTL generation is to be suppressed. */
3858 if (last_named && !varargs_setup)
3859 {
3860 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
3861 current_function_pretend_args_size,
3862 second_time);
3863 varargs_setup = 1;
3864 }
3865 #endif
3866
3867 /* Determine parm's home in the stack,
3868 in case it arrives in the stack or we should pretend it did.
3869
3870 Compute the stack position and rtx where the argument arrives
3871 and its size.
3872
3873 There is one complexity here: If this was a parameter that would
3874 have been passed in registers, but wasn't only because it is
3875 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
3876 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
3877 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
3878 0 as it was the previous time. */
3879
3880 locate_and_pad_parm (promoted_mode, passed_type,
3881 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3882 1,
3883 #else
3884 #ifdef FUNCTION_INCOMING_ARG
3885 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3886 passed_type,
3887 (named_arg
3888 || varargs_setup)) != 0,
3889 #else
3890 FUNCTION_ARG (args_so_far, promoted_mode,
3891 passed_type,
3892 named_arg || varargs_setup) != 0,
3893 #endif
3894 #endif
3895 fndecl, &stack_args_size, &stack_offset, &arg_size);
3896
3897 if (! second_time)
3898 {
3899 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
3900
3901 if (offset_rtx == const0_rtx)
3902 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
3903 else
3904 stack_parm = gen_rtx_MEM (promoted_mode,
3905 gen_rtx_PLUS (Pmode,
3906 internal_arg_pointer,
3907 offset_rtx));
3908
3909 /* If this is a memory ref that contains aggregate components,
3910 mark it as such for cse and loop optimize. Likewise if it
3911 is readonly. */
3912 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3913 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
3914 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
3915 }
3916
3917 /* If this parameter was passed both in registers and in the stack,
3918 use the copy on the stack. */
3919 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
3920 entry_parm = 0;
3921
3922 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3923 /* If this parm was passed part in regs and part in memory,
3924 pretend it arrived entirely in memory
3925 by pushing the register-part onto the stack.
3926
3927 In the special case of a DImode or DFmode that is split,
3928 we could put it together in a pseudoreg directly,
3929 but for now that's not worth bothering with. */
3930
3931 if (entry_parm)
3932 {
3933 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
3934 passed_type, named_arg);
3935
3936 if (nregs > 0)
3937 {
3938 current_function_pretend_args_size
3939 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
3940 / (PARM_BOUNDARY / BITS_PER_UNIT)
3941 * (PARM_BOUNDARY / BITS_PER_UNIT));
3942
3943 if (! second_time)
3944 {
3945 /* Handle calls that pass values in multiple non-contiguous
3946 locations. The Irix 6 ABI has examples of this. */
3947 if (GET_CODE (entry_parm) == PARALLEL)
3948 emit_group_store (validize_mem (stack_parm),
3949 entry_parm);
3950 else
3951 move_block_from_reg (REGNO (entry_parm),
3952 validize_mem (stack_parm), nregs,
3953 int_size_in_bytes (TREE_TYPE (parm)));
3954 }
3955 entry_parm = stack_parm;
3956 }
3957 }
3958 #endif
3959
3960 /* If we didn't decide this parm came in a register,
3961 by default it came on the stack. */
3962 if (entry_parm == 0)
3963 entry_parm = stack_parm;
3964
3965 /* Record permanently how this parm was passed. */
3966 if (! second_time)
3967 DECL_INCOMING_RTL (parm) = entry_parm;
3968
3969 /* If there is actually space on the stack for this parm,
3970 count it in stack_args_size; otherwise set stack_parm to 0
3971 to indicate there is no preallocated stack slot for the parm. */
3972
3973 if (entry_parm == stack_parm
3974 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
3975 /* On some machines, even if a parm value arrives in a register
3976 there is still an (uninitialized) stack slot allocated for it.
3977
3978 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
3979 whether this parameter already has a stack slot allocated,
3980 because an arg block exists only if current_function_args_size
3981 is larger than some threshold, and we haven't calculated that
3982 yet. So, for now, we just assume that stack slots never exist
3983 in this case. */
3984 || REG_PARM_STACK_SPACE (fndecl) > 0
3985 #endif
3986 )
3987 {
3988 stack_args_size.constant += arg_size.constant;
3989 if (arg_size.var)
3990 ADD_PARM_SIZE (stack_args_size, arg_size.var);
3991 }
3992 else
3993 /* No stack slot was pushed for this parm. */
3994 stack_parm = 0;
3995
3996 /* Update info on where next arg arrives in registers. */
3997
3998 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
3999 passed_type, named_arg);
4000
4001 /* If this is our second time through, we are done with this parm. */
4002 if (second_time)
4003 continue;
4004
4005 /* If we can't trust the parm stack slot to be aligned enough
4006 for its ultimate type, don't use that slot after entry.
4007 We'll make another stack slot, if we need one. */
4008 {
4009 int thisparm_boundary
4010 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4011
4012 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4013 stack_parm = 0;
4014 }
4015
4016 /* If parm was passed in memory, and we need to convert it on entry,
4017 don't store it back in that same slot. */
4018 if (entry_parm != 0
4019 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4020 stack_parm = 0;
4021
4022 #if 0
4023 /* Now adjust STACK_PARM to the mode and precise location
4024 where this parameter should live during execution,
4025 if we discover that it must live in the stack during execution.
4026 To make debuggers happier on big-endian machines, we store
4027 the value in the last bytes of the space available. */
4028
4029 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4030 && stack_parm != 0)
4031 {
4032 rtx offset_rtx;
4033
4034 if (BYTES_BIG_ENDIAN
4035 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4036 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4037 - GET_MODE_SIZE (nominal_mode));
4038
4039 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4040 if (offset_rtx == const0_rtx)
4041 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4042 else
4043 stack_parm = gen_rtx_MEM (nominal_mode,
4044 gen_rtx_PLUS (Pmode,
4045 internal_arg_pointer,
4046 offset_rtx));
4047
4048 /* If this is a memory ref that contains aggregate components,
4049 mark it as such for cse and loop optimize. */
4050 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4051 }
4052 #endif /* 0 */
4053
4054 #ifdef STACK_REGS
4055 /* We need this "use" info, because the gcc-register->stack-register
4056 converter in reg-stack.c needs to know which registers are active
4057 at the start of the function call. The actual parameter loading
4058 instructions are not always available then anymore, since they might
4059 have been optimised away. */
4060
4061 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
4062 emit_insn (gen_rtx_USE (GET_MODE (entry_parm), entry_parm));
4063 #endif
4064
4065 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4066 in the mode in which it arrives.
4067 STACK_PARM is an RTX for a stack slot where the parameter can live
4068 during the function (in case we want to put it there).
4069 STACK_PARM is 0 if no stack slot was pushed for it.
4070
4071 Now output code if necessary to convert ENTRY_PARM to
4072 the type in which this function declares it,
4073 and store that result in an appropriate place,
4074 which may be a pseudo reg, may be STACK_PARM,
4075 or may be a local stack slot if STACK_PARM is 0.
4076
4077 Set DECL_RTL to that place. */
4078
4079 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4080 {
4081 /* If a BLKmode arrives in registers, copy it to a stack slot.
4082 Handle calls that pass values in multiple non-contiguous
4083 locations. The Irix 6 ABI has examples of this. */
4084 if (GET_CODE (entry_parm) == REG
4085 || GET_CODE (entry_parm) == PARALLEL)
4086 {
4087 int size_stored
4088 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4089 UNITS_PER_WORD);
4090
4091 /* Note that we will be storing an integral number of words.
4092 So we have to be careful to ensure that we allocate an
4093 integral number of words. We do this below in the
4094 assign_stack_local if space was not allocated in the argument
4095 list. If it was, this will not work if PARM_BOUNDARY is not
4096 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4097 if it becomes a problem. */
4098
4099 if (stack_parm == 0)
4100 {
4101 stack_parm
4102 = assign_stack_local (GET_MODE (entry_parm),
4103 size_stored, 0);
4104
4105 /* If this is a memory ref that contains aggregate
4106 components, mark it as such for cse and loop optimize. */
4107 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4108 }
4109
4110 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4111 abort ();
4112
4113 if (TREE_READONLY (parm))
4114 RTX_UNCHANGING_P (stack_parm) = 1;
4115
4116 /* Handle calls that pass values in multiple non-contiguous
4117 locations. The Irix 6 ABI has examples of this. */
4118 if (GET_CODE (entry_parm) == PARALLEL)
4119 emit_group_store (validize_mem (stack_parm), entry_parm);
4120 else
4121 move_block_from_reg (REGNO (entry_parm),
4122 validize_mem (stack_parm),
4123 size_stored / UNITS_PER_WORD,
4124 int_size_in_bytes (TREE_TYPE (parm)));
4125 }
4126 DECL_RTL (parm) = stack_parm;
4127 }
4128 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
4129 && ! DECL_INLINE (fndecl))
4130 /* layout_decl may set this. */
4131 || TREE_ADDRESSABLE (parm)
4132 || TREE_SIDE_EFFECTS (parm)
4133 /* If -ffloat-store specified, don't put explicit
4134 float variables into registers. */
4135 || (flag_float_store
4136 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4137 /* Always assign pseudo to structure return or item passed
4138 by invisible reference. */
4139 || passed_pointer || parm == function_result_decl)
4140 {
4141 /* Store the parm in a pseudoregister during the function, but we
4142 may need to do it in a wider mode. */
4143
4144 register rtx parmreg;
4145 int regno, regnoi = 0, regnor = 0;
4146
4147 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4148
4149 promoted_nominal_mode
4150 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4151
4152 parmreg = gen_reg_rtx (promoted_nominal_mode);
4153 mark_user_reg (parmreg);
4154
4155 /* If this was an item that we received a pointer to, set DECL_RTL
4156 appropriately. */
4157 if (passed_pointer)
4158 {
4159 DECL_RTL (parm)
4160 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4161 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
4162 }
4163 else
4164 DECL_RTL (parm) = parmreg;
4165
4166 /* Copy the value into the register. */
4167 if (nominal_mode != passed_mode
4168 || promoted_nominal_mode != promoted_mode)
4169 {
4170 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4171 mode, by the caller. We now have to convert it to
4172 NOMINAL_MODE, if different. However, PARMREG may be in
4173 a different mode than NOMINAL_MODE if it is being stored
4174 promoted.
4175
4176 If ENTRY_PARM is a hard register, it might be in a register
4177 not valid for operating in its mode (e.g., an odd-numbered
4178 register for a DFmode). In that case, moves are the only
4179 thing valid, so we can't do a convert from there. This
4180 occurs when the calling sequence allow such misaligned
4181 usages.
4182
4183 In addition, the conversion may involve a call, which could
4184 clobber parameters which haven't been copied to pseudo
4185 registers yet. Therefore, we must first copy the parm to
4186 a pseudo reg here, and save the conversion until after all
4187 parameters have been moved. */
4188
4189 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4190
4191 emit_move_insn (tempreg, validize_mem (entry_parm));
4192
4193 push_to_sequence (conversion_insns);
4194 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4195
4196 expand_assignment (parm,
4197 make_tree (nominal_type, tempreg), 0, 0);
4198 conversion_insns = get_insns ();
4199 did_conversion = 1;
4200 end_sequence ();
4201 }
4202 else
4203 emit_move_insn (parmreg, validize_mem (entry_parm));
4204
4205 /* If we were passed a pointer but the actual value
4206 can safely live in a register, put it in one. */
4207 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4208 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4209 && ! DECL_INLINE (fndecl))
4210 /* layout_decl may set this. */
4211 || TREE_ADDRESSABLE (parm)
4212 || TREE_SIDE_EFFECTS (parm)
4213 /* If -ffloat-store specified, don't put explicit
4214 float variables into registers. */
4215 || (flag_float_store
4216 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4217 {
4218 /* We can't use nominal_mode, because it will have been set to
4219 Pmode above. We must use the actual mode of the parm. */
4220 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4221 mark_user_reg (parmreg);
4222 emit_move_insn (parmreg, DECL_RTL (parm));
4223 DECL_RTL (parm) = parmreg;
4224 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4225 now the parm. */
4226 stack_parm = 0;
4227 }
4228 #ifdef FUNCTION_ARG_CALLEE_COPIES
4229 /* If we are passed an arg by reference and it is our responsibility
4230 to make a copy, do it now.
4231 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4232 original argument, so we must recreate them in the call to
4233 FUNCTION_ARG_CALLEE_COPIES. */
4234 /* ??? Later add code to handle the case that if the argument isn't
4235 modified, don't do the copy. */
4236
4237 else if (passed_pointer
4238 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4239 TYPE_MODE (DECL_ARG_TYPE (parm)),
4240 DECL_ARG_TYPE (parm),
4241 named_arg)
4242 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4243 {
4244 rtx copy;
4245 tree type = DECL_ARG_TYPE (parm);
4246
4247 /* This sequence may involve a library call perhaps clobbering
4248 registers that haven't been copied to pseudos yet. */
4249
4250 push_to_sequence (conversion_insns);
4251
4252 if (TYPE_SIZE (type) == 0
4253 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4254 /* This is a variable sized object. */
4255 copy = gen_rtx_MEM (BLKmode,
4256 allocate_dynamic_stack_space
4257 (expr_size (parm), NULL_RTX,
4258 TYPE_ALIGN (type)));
4259 else
4260 copy = assign_stack_temp (TYPE_MODE (type),
4261 int_size_in_bytes (type), 1);
4262 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
4263 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4264
4265 store_expr (parm, copy, 0);
4266 emit_move_insn (parmreg, XEXP (copy, 0));
4267 if (flag_check_memory_usage)
4268 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4269 XEXP (copy, 0), ptr_mode,
4270 GEN_INT (int_size_in_bytes (type)),
4271 TYPE_MODE (sizetype),
4272 GEN_INT (MEMORY_USE_RW),
4273 TYPE_MODE (integer_type_node));
4274 conversion_insns = get_insns ();
4275 did_conversion = 1;
4276 end_sequence ();
4277 }
4278 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4279
4280 /* In any case, record the parm's desired stack location
4281 in case we later discover it must live in the stack.
4282
4283 If it is a COMPLEX value, store the stack location for both
4284 halves. */
4285
4286 if (GET_CODE (parmreg) == CONCAT)
4287 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4288 else
4289 regno = REGNO (parmreg);
4290
4291 if (regno >= max_parm_reg)
4292 {
4293 rtx *new;
4294 int old_max_parm_reg = max_parm_reg;
4295
4296 /* It's slow to expand this one register at a time,
4297 but it's also rare and we need max_parm_reg to be
4298 precisely correct. */
4299 max_parm_reg = regno + 1;
4300 new = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4301 bcopy ((char *) parm_reg_stack_loc, (char *) new,
4302 old_max_parm_reg * sizeof (rtx));
4303 bzero ((char *) (new + old_max_parm_reg),
4304 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4305 parm_reg_stack_loc = new;
4306 }
4307
4308 if (GET_CODE (parmreg) == CONCAT)
4309 {
4310 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4311
4312 regnor = REGNO (gen_realpart (submode, parmreg));
4313 regnoi = REGNO (gen_imagpart (submode, parmreg));
4314
4315 if (stack_parm != 0)
4316 {
4317 parm_reg_stack_loc[regnor]
4318 = gen_realpart (submode, stack_parm);
4319 parm_reg_stack_loc[regnoi]
4320 = gen_imagpart (submode, stack_parm);
4321 }
4322 else
4323 {
4324 parm_reg_stack_loc[regnor] = 0;
4325 parm_reg_stack_loc[regnoi] = 0;
4326 }
4327 }
4328 else
4329 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4330
4331 /* Mark the register as eliminable if we did no conversion
4332 and it was copied from memory at a fixed offset,
4333 and the arg pointer was not copied to a pseudo-reg.
4334 If the arg pointer is a pseudo reg or the offset formed
4335 an invalid address, such memory-equivalences
4336 as we make here would screw up life analysis for it. */
4337 if (nominal_mode == passed_mode
4338 && ! did_conversion
4339 && stack_parm != 0
4340 && GET_CODE (stack_parm) == MEM
4341 && stack_offset.var == 0
4342 && reg_mentioned_p (virtual_incoming_args_rtx,
4343 XEXP (stack_parm, 0)))
4344 {
4345 rtx linsn = get_last_insn ();
4346 rtx sinsn, set;
4347
4348 /* Mark complex types separately. */
4349 if (GET_CODE (parmreg) == CONCAT)
4350 /* Scan backwards for the set of the real and
4351 imaginary parts. */
4352 for (sinsn = linsn; sinsn != 0;
4353 sinsn = prev_nonnote_insn (sinsn))
4354 {
4355 set = single_set (sinsn);
4356 if (set != 0
4357 && SET_DEST (set) == regno_reg_rtx [regnoi])
4358 REG_NOTES (sinsn)
4359 = gen_rtx_EXPR_LIST (REG_EQUIV,
4360 parm_reg_stack_loc[regnoi],
4361 REG_NOTES (sinsn));
4362 else if (set != 0
4363 && SET_DEST (set) == regno_reg_rtx [regnor])
4364 REG_NOTES (sinsn)
4365 = gen_rtx_EXPR_LIST (REG_EQUIV,
4366 parm_reg_stack_loc[regnor],
4367 REG_NOTES (sinsn));
4368 }
4369 else if ((set = single_set (linsn)) != 0
4370 && SET_DEST (set) == parmreg)
4371 REG_NOTES (linsn)
4372 = gen_rtx_EXPR_LIST (REG_EQUIV,
4373 stack_parm, REG_NOTES (linsn));
4374 }
4375
4376 /* For pointer data type, suggest pointer register. */
4377 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4378 mark_reg_pointer (parmreg,
4379 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4380 / BITS_PER_UNIT));
4381 }
4382 else
4383 {
4384 /* Value must be stored in the stack slot STACK_PARM
4385 during function execution. */
4386
4387 if (promoted_mode != nominal_mode)
4388 {
4389 /* Conversion is required. */
4390 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4391
4392 emit_move_insn (tempreg, validize_mem (entry_parm));
4393
4394 push_to_sequence (conversion_insns);
4395 entry_parm = convert_to_mode (nominal_mode, tempreg,
4396 TREE_UNSIGNED (TREE_TYPE (parm)));
4397 if (stack_parm)
4398 {
4399 /* ??? This may need a big-endian conversion on sparc64. */
4400 stack_parm = change_address (stack_parm, nominal_mode,
4401 NULL_RTX);
4402 }
4403 conversion_insns = get_insns ();
4404 did_conversion = 1;
4405 end_sequence ();
4406 }
4407
4408 if (entry_parm != stack_parm)
4409 {
4410 if (stack_parm == 0)
4411 {
4412 stack_parm
4413 = assign_stack_local (GET_MODE (entry_parm),
4414 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4415 /* If this is a memory ref that contains aggregate components,
4416 mark it as such for cse and loop optimize. */
4417 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4418 }
4419
4420 if (promoted_mode != nominal_mode)
4421 {
4422 push_to_sequence (conversion_insns);
4423 emit_move_insn (validize_mem (stack_parm),
4424 validize_mem (entry_parm));
4425 conversion_insns = get_insns ();
4426 end_sequence ();
4427 }
4428 else
4429 emit_move_insn (validize_mem (stack_parm),
4430 validize_mem (entry_parm));
4431 }
4432 if (flag_check_memory_usage)
4433 {
4434 push_to_sequence (conversion_insns);
4435 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4436 XEXP (stack_parm, 0), ptr_mode,
4437 GEN_INT (GET_MODE_SIZE (GET_MODE
4438 (entry_parm))),
4439 TYPE_MODE (sizetype),
4440 GEN_INT (MEMORY_USE_RW),
4441 TYPE_MODE (integer_type_node));
4442
4443 conversion_insns = get_insns ();
4444 end_sequence ();
4445 }
4446 DECL_RTL (parm) = stack_parm;
4447 }
4448
4449 /* If this "parameter" was the place where we are receiving the
4450 function's incoming structure pointer, set up the result. */
4451 if (parm == function_result_decl)
4452 {
4453 tree result = DECL_RESULT (fndecl);
4454 tree restype = TREE_TYPE (result);
4455
4456 DECL_RTL (result)
4457 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4458
4459 MEM_IN_STRUCT_P (DECL_RTL (result)) = AGGREGATE_TYPE_P (restype);
4460 }
4461
4462 if (TREE_THIS_VOLATILE (parm))
4463 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4464 if (TREE_READONLY (parm))
4465 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4466 }
4467
4468 /* Output all parameter conversion instructions (possibly including calls)
4469 now that all parameters have been copied out of hard registers. */
4470 emit_insns (conversion_insns);
4471
4472 last_parm_insn = get_last_insn ();
4473
4474 current_function_args_size = stack_args_size.constant;
4475
4476 /* Adjust function incoming argument size for alignment and
4477 minimum length. */
4478
4479 #ifdef REG_PARM_STACK_SPACE
4480 #ifndef MAYBE_REG_PARM_STACK_SPACE
4481 current_function_args_size = MAX (current_function_args_size,
4482 REG_PARM_STACK_SPACE (fndecl));
4483 #endif
4484 #endif
4485
4486 #ifdef STACK_BOUNDARY
4487 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4488
4489 current_function_args_size
4490 = ((current_function_args_size + STACK_BYTES - 1)
4491 / STACK_BYTES) * STACK_BYTES;
4492 #endif
4493
4494 #ifdef ARGS_GROW_DOWNWARD
4495 current_function_arg_offset_rtx
4496 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4497 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4498 size_int (-stack_args_size.constant)),
4499 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4500 #else
4501 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4502 #endif
4503
4504 /* See how many bytes, if any, of its args a function should try to pop
4505 on return. */
4506
4507 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4508 current_function_args_size);
4509
4510 /* For stdarg.h function, save info about
4511 regs and stack space used by the named args. */
4512
4513 if (!hide_last_arg)
4514 current_function_args_info = args_so_far;
4515
4516 /* Set the rtx used for the function return value. Put this in its
4517 own variable so any optimizers that need this information don't have
4518 to include tree.h. Do this here so it gets done when an inlined
4519 function gets output. */
4520
4521 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4522 }
4523 \f
4524 /* Indicate whether REGNO is an incoming argument to the current function
4525 that was promoted to a wider mode. If so, return the RTX for the
4526 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4527 that REGNO is promoted from and whether the promotion was signed or
4528 unsigned. */
4529
4530 #ifdef PROMOTE_FUNCTION_ARGS
4531
4532 rtx
4533 promoted_input_arg (regno, pmode, punsignedp)
4534 int regno;
4535 enum machine_mode *pmode;
4536 int *punsignedp;
4537 {
4538 tree arg;
4539
4540 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4541 arg = TREE_CHAIN (arg))
4542 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4543 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4544 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4545 {
4546 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4547 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4548
4549 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4550 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4551 && mode != DECL_MODE (arg))
4552 {
4553 *pmode = DECL_MODE (arg);
4554 *punsignedp = unsignedp;
4555 return DECL_INCOMING_RTL (arg);
4556 }
4557 }
4558
4559 return 0;
4560 }
4561
4562 #endif
4563 \f
4564 /* Compute the size and offset from the start of the stacked arguments for a
4565 parm passed in mode PASSED_MODE and with type TYPE.
4566
4567 INITIAL_OFFSET_PTR points to the current offset into the stacked
4568 arguments.
4569
4570 The starting offset and size for this parm are returned in *OFFSET_PTR
4571 and *ARG_SIZE_PTR, respectively.
4572
4573 IN_REGS is non-zero if the argument will be passed in registers. It will
4574 never be set if REG_PARM_STACK_SPACE is not defined.
4575
4576 FNDECL is the function in which the argument was defined.
4577
4578 There are two types of rounding that are done. The first, controlled by
4579 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4580 list to be aligned to the specific boundary (in bits). This rounding
4581 affects the initial and starting offsets, but not the argument size.
4582
4583 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4584 optionally rounds the size of the parm to PARM_BOUNDARY. The
4585 initial offset is not affected by this rounding, while the size always
4586 is and the starting offset may be. */
4587
4588 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4589 initial_offset_ptr is positive because locate_and_pad_parm's
4590 callers pass in the total size of args so far as
4591 initial_offset_ptr. arg_size_ptr is always positive.*/
4592
4593 void
4594 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4595 initial_offset_ptr, offset_ptr, arg_size_ptr)
4596 enum machine_mode passed_mode;
4597 tree type;
4598 int in_regs;
4599 tree fndecl;
4600 struct args_size *initial_offset_ptr;
4601 struct args_size *offset_ptr;
4602 struct args_size *arg_size_ptr;
4603 {
4604 tree sizetree
4605 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4606 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4607 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4608
4609 #ifdef REG_PARM_STACK_SPACE
4610 /* If we have found a stack parm before we reach the end of the
4611 area reserved for registers, skip that area. */
4612 if (! in_regs)
4613 {
4614 int reg_parm_stack_space = 0;
4615
4616 #ifdef MAYBE_REG_PARM_STACK_SPACE
4617 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4618 #else
4619 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4620 #endif
4621 if (reg_parm_stack_space > 0)
4622 {
4623 if (initial_offset_ptr->var)
4624 {
4625 initial_offset_ptr->var
4626 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4627 size_int (reg_parm_stack_space));
4628 initial_offset_ptr->constant = 0;
4629 }
4630 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4631 initial_offset_ptr->constant = reg_parm_stack_space;
4632 }
4633 }
4634 #endif /* REG_PARM_STACK_SPACE */
4635
4636 arg_size_ptr->var = 0;
4637 arg_size_ptr->constant = 0;
4638
4639 #ifdef ARGS_GROW_DOWNWARD
4640 if (initial_offset_ptr->var)
4641 {
4642 offset_ptr->constant = 0;
4643 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4644 initial_offset_ptr->var);
4645 }
4646 else
4647 {
4648 offset_ptr->constant = - initial_offset_ptr->constant;
4649 offset_ptr->var = 0;
4650 }
4651 if (where_pad != none
4652 && (TREE_CODE (sizetree) != INTEGER_CST
4653 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4654 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4655 SUB_PARM_SIZE (*offset_ptr, sizetree);
4656 if (where_pad != downward)
4657 pad_to_arg_alignment (offset_ptr, boundary);
4658 if (initial_offset_ptr->var)
4659 {
4660 arg_size_ptr->var = size_binop (MINUS_EXPR,
4661 size_binop (MINUS_EXPR,
4662 integer_zero_node,
4663 initial_offset_ptr->var),
4664 offset_ptr->var);
4665 }
4666 else
4667 {
4668 arg_size_ptr->constant = (- initial_offset_ptr->constant
4669 - offset_ptr->constant);
4670 }
4671 #else /* !ARGS_GROW_DOWNWARD */
4672 pad_to_arg_alignment (initial_offset_ptr, boundary);
4673 *offset_ptr = *initial_offset_ptr;
4674
4675 #ifdef PUSH_ROUNDING
4676 if (passed_mode != BLKmode)
4677 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4678 #endif
4679
4680 /* Pad_below needs the pre-rounded size to know how much to pad below
4681 so this must be done before rounding up. */
4682 if (where_pad == downward
4683 /* However, BLKmode args passed in regs have their padding done elsewhere.
4684 The stack slot must be able to hold the entire register. */
4685 && !(in_regs && passed_mode == BLKmode))
4686 pad_below (offset_ptr, passed_mode, sizetree);
4687
4688 if (where_pad != none
4689 && (TREE_CODE (sizetree) != INTEGER_CST
4690 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4691 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4692
4693 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4694 #endif /* ARGS_GROW_DOWNWARD */
4695 }
4696
4697 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4698 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4699
4700 static void
4701 pad_to_arg_alignment (offset_ptr, boundary)
4702 struct args_size *offset_ptr;
4703 int boundary;
4704 {
4705 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4706
4707 if (boundary > BITS_PER_UNIT)
4708 {
4709 if (offset_ptr->var)
4710 {
4711 offset_ptr->var =
4712 #ifdef ARGS_GROW_DOWNWARD
4713 round_down
4714 #else
4715 round_up
4716 #endif
4717 (ARGS_SIZE_TREE (*offset_ptr),
4718 boundary / BITS_PER_UNIT);
4719 offset_ptr->constant = 0; /*?*/
4720 }
4721 else
4722 offset_ptr->constant =
4723 #ifdef ARGS_GROW_DOWNWARD
4724 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4725 #else
4726 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4727 #endif
4728 }
4729 }
4730
4731 #ifndef ARGS_GROW_DOWNWARD
4732 static void
4733 pad_below (offset_ptr, passed_mode, sizetree)
4734 struct args_size *offset_ptr;
4735 enum machine_mode passed_mode;
4736 tree sizetree;
4737 {
4738 if (passed_mode != BLKmode)
4739 {
4740 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4741 offset_ptr->constant
4742 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4743 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4744 - GET_MODE_SIZE (passed_mode));
4745 }
4746 else
4747 {
4748 if (TREE_CODE (sizetree) != INTEGER_CST
4749 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4750 {
4751 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4752 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4753 /* Add it in. */
4754 ADD_PARM_SIZE (*offset_ptr, s2);
4755 SUB_PARM_SIZE (*offset_ptr, sizetree);
4756 }
4757 }
4758 }
4759 #endif
4760
4761 #ifdef ARGS_GROW_DOWNWARD
4762 static tree
4763 round_down (value, divisor)
4764 tree value;
4765 int divisor;
4766 {
4767 return size_binop (MULT_EXPR,
4768 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
4769 size_int (divisor));
4770 }
4771 #endif
4772 \f
4773 /* Walk the tree of blocks describing the binding levels within a function
4774 and warn about uninitialized variables.
4775 This is done after calling flow_analysis and before global_alloc
4776 clobbers the pseudo-regs to hard regs. */
4777
4778 void
4779 uninitialized_vars_warning (block)
4780 tree block;
4781 {
4782 register tree decl, sub;
4783 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4784 {
4785 if (TREE_CODE (decl) == VAR_DECL
4786 /* These warnings are unreliable for and aggregates
4787 because assigning the fields one by one can fail to convince
4788 flow.c that the entire aggregate was initialized.
4789 Unions are troublesome because members may be shorter. */
4790 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
4791 && DECL_RTL (decl) != 0
4792 && GET_CODE (DECL_RTL (decl)) == REG
4793 && regno_uninitialized (REGNO (DECL_RTL (decl))))
4794 warning_with_decl (decl,
4795 "`%s' might be used uninitialized in this function");
4796 if (TREE_CODE (decl) == VAR_DECL
4797 && DECL_RTL (decl) != 0
4798 && GET_CODE (DECL_RTL (decl)) == REG
4799 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4800 warning_with_decl (decl,
4801 "variable `%s' might be clobbered by `longjmp' or `vfork'");
4802 }
4803 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4804 uninitialized_vars_warning (sub);
4805 }
4806
4807 /* Do the appropriate part of uninitialized_vars_warning
4808 but for arguments instead of local variables. */
4809
4810 void
4811 setjmp_args_warning ()
4812 {
4813 register tree decl;
4814 for (decl = DECL_ARGUMENTS (current_function_decl);
4815 decl; decl = TREE_CHAIN (decl))
4816 if (DECL_RTL (decl) != 0
4817 && GET_CODE (DECL_RTL (decl)) == REG
4818 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4819 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
4820 }
4821
4822 /* If this function call setjmp, put all vars into the stack
4823 unless they were declared `register'. */
4824
4825 void
4826 setjmp_protect (block)
4827 tree block;
4828 {
4829 register tree decl, sub;
4830 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4831 if ((TREE_CODE (decl) == VAR_DECL
4832 || TREE_CODE (decl) == PARM_DECL)
4833 && DECL_RTL (decl) != 0
4834 && (GET_CODE (DECL_RTL (decl)) == REG
4835 || (GET_CODE (DECL_RTL (decl)) == MEM
4836 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
4837 /* If this variable came from an inline function, it must be
4838 that its life doesn't overlap the setjmp. If there was a
4839 setjmp in the function, it would already be in memory. We
4840 must exclude such variable because their DECL_RTL might be
4841 set to strange things such as virtual_stack_vars_rtx. */
4842 && ! DECL_FROM_INLINE (decl)
4843 && (
4844 #ifdef NON_SAVING_SETJMP
4845 /* If longjmp doesn't restore the registers,
4846 don't put anything in them. */
4847 NON_SAVING_SETJMP
4848 ||
4849 #endif
4850 ! DECL_REGISTER (decl)))
4851 put_var_into_stack (decl);
4852 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4853 setjmp_protect (sub);
4854 }
4855 \f
4856 /* Like the previous function, but for args instead of local variables. */
4857
4858 void
4859 setjmp_protect_args ()
4860 {
4861 register tree decl;
4862 for (decl = DECL_ARGUMENTS (current_function_decl);
4863 decl; decl = TREE_CHAIN (decl))
4864 if ((TREE_CODE (decl) == VAR_DECL
4865 || TREE_CODE (decl) == PARM_DECL)
4866 && DECL_RTL (decl) != 0
4867 && (GET_CODE (DECL_RTL (decl)) == REG
4868 || (GET_CODE (DECL_RTL (decl)) == MEM
4869 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
4870 && (
4871 /* If longjmp doesn't restore the registers,
4872 don't put anything in them. */
4873 #ifdef NON_SAVING_SETJMP
4874 NON_SAVING_SETJMP
4875 ||
4876 #endif
4877 ! DECL_REGISTER (decl)))
4878 put_var_into_stack (decl);
4879 }
4880 \f
4881 /* Return the context-pointer register corresponding to DECL,
4882 or 0 if it does not need one. */
4883
4884 rtx
4885 lookup_static_chain (decl)
4886 tree decl;
4887 {
4888 tree context = decl_function_context (decl);
4889 tree link;
4890
4891 if (context == 0
4892 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
4893 return 0;
4894
4895 /* We treat inline_function_decl as an alias for the current function
4896 because that is the inline function whose vars, types, etc.
4897 are being merged into the current function.
4898 See expand_inline_function. */
4899 if (context == current_function_decl || context == inline_function_decl)
4900 return virtual_stack_vars_rtx;
4901
4902 for (link = context_display; link; link = TREE_CHAIN (link))
4903 if (TREE_PURPOSE (link) == context)
4904 return RTL_EXPR_RTL (TREE_VALUE (link));
4905
4906 abort ();
4907 }
4908 \f
4909 /* Convert a stack slot address ADDR for variable VAR
4910 (from a containing function)
4911 into an address valid in this function (using a static chain). */
4912
4913 rtx
4914 fix_lexical_addr (addr, var)
4915 rtx addr;
4916 tree var;
4917 {
4918 rtx basereg;
4919 HOST_WIDE_INT displacement;
4920 tree context = decl_function_context (var);
4921 struct function *fp;
4922 rtx base = 0;
4923
4924 /* If this is the present function, we need not do anything. */
4925 if (context == current_function_decl || context == inline_function_decl)
4926 return addr;
4927
4928 for (fp = outer_function_chain; fp; fp = fp->next)
4929 if (fp->decl == context)
4930 break;
4931
4932 if (fp == 0)
4933 abort ();
4934
4935 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
4936 addr = XEXP (XEXP (addr, 0), 0);
4937
4938 /* Decode given address as base reg plus displacement. */
4939 if (GET_CODE (addr) == REG)
4940 basereg = addr, displacement = 0;
4941 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4942 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
4943 else
4944 abort ();
4945
4946 /* We accept vars reached via the containing function's
4947 incoming arg pointer and via its stack variables pointer. */
4948 if (basereg == fp->internal_arg_pointer)
4949 {
4950 /* If reached via arg pointer, get the arg pointer value
4951 out of that function's stack frame.
4952
4953 There are two cases: If a separate ap is needed, allocate a
4954 slot in the outer function for it and dereference it that way.
4955 This is correct even if the real ap is actually a pseudo.
4956 Otherwise, just adjust the offset from the frame pointer to
4957 compensate. */
4958
4959 #ifdef NEED_SEPARATE_AP
4960 rtx addr;
4961
4962 if (fp->arg_pointer_save_area == 0)
4963 fp->arg_pointer_save_area
4964 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
4965
4966 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
4967 addr = memory_address (Pmode, addr);
4968
4969 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
4970 #else
4971 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
4972 base = lookup_static_chain (var);
4973 #endif
4974 }
4975
4976 else if (basereg == virtual_stack_vars_rtx)
4977 {
4978 /* This is the same code as lookup_static_chain, duplicated here to
4979 avoid an extra call to decl_function_context. */
4980 tree link;
4981
4982 for (link = context_display; link; link = TREE_CHAIN (link))
4983 if (TREE_PURPOSE (link) == context)
4984 {
4985 base = RTL_EXPR_RTL (TREE_VALUE (link));
4986 break;
4987 }
4988 }
4989
4990 if (base == 0)
4991 abort ();
4992
4993 /* Use same offset, relative to appropriate static chain or argument
4994 pointer. */
4995 return plus_constant (base, displacement);
4996 }
4997 \f
4998 /* Return the address of the trampoline for entering nested fn FUNCTION.
4999 If necessary, allocate a trampoline (in the stack frame)
5000 and emit rtl to initialize its contents (at entry to this function). */
5001
5002 rtx
5003 trampoline_address (function)
5004 tree function;
5005 {
5006 tree link;
5007 tree rtlexp;
5008 rtx tramp;
5009 struct function *fp;
5010 tree fn_context;
5011
5012 /* Find an existing trampoline and return it. */
5013 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5014 if (TREE_PURPOSE (link) == function)
5015 return
5016 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5017
5018 for (fp = outer_function_chain; fp; fp = fp->next)
5019 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
5020 if (TREE_PURPOSE (link) == function)
5021 {
5022 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5023 function);
5024 return round_trampoline_addr (tramp);
5025 }
5026
5027 /* None exists; we must make one. */
5028
5029 /* Find the `struct function' for the function containing FUNCTION. */
5030 fp = 0;
5031 fn_context = decl_function_context (function);
5032 if (fn_context != current_function_decl
5033 && fn_context != inline_function_decl)
5034 for (fp = outer_function_chain; fp; fp = fp->next)
5035 if (fp->decl == fn_context)
5036 break;
5037
5038 /* Allocate run-time space for this trampoline
5039 (usually in the defining function's stack frame). */
5040 #ifdef ALLOCATE_TRAMPOLINE
5041 tramp = ALLOCATE_TRAMPOLINE (fp);
5042 #else
5043 /* If rounding needed, allocate extra space
5044 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5045 #ifdef TRAMPOLINE_ALIGNMENT
5046 #define TRAMPOLINE_REAL_SIZE \
5047 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5048 #else
5049 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5050 #endif
5051 if (fp != 0)
5052 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
5053 else
5054 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
5055 #endif
5056
5057 /* Record the trampoline for reuse and note it for later initialization
5058 by expand_function_end. */
5059 if (fp != 0)
5060 {
5061 push_obstacks (fp->function_maybepermanent_obstack,
5062 fp->function_maybepermanent_obstack);
5063 rtlexp = make_node (RTL_EXPR);
5064 RTL_EXPR_RTL (rtlexp) = tramp;
5065 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
5066 pop_obstacks ();
5067 }
5068 else
5069 {
5070 /* Make the RTL_EXPR node temporary, not momentary, so that the
5071 trampoline_list doesn't become garbage. */
5072 int momentary = suspend_momentary ();
5073 rtlexp = make_node (RTL_EXPR);
5074 resume_momentary (momentary);
5075
5076 RTL_EXPR_RTL (rtlexp) = tramp;
5077 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5078 }
5079
5080 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5081 return round_trampoline_addr (tramp);
5082 }
5083
5084 /* Given a trampoline address,
5085 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5086
5087 static rtx
5088 round_trampoline_addr (tramp)
5089 rtx tramp;
5090 {
5091 #ifdef TRAMPOLINE_ALIGNMENT
5092 /* Round address up to desired boundary. */
5093 rtx temp = gen_reg_rtx (Pmode);
5094 temp = expand_binop (Pmode, add_optab, tramp,
5095 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5096 temp, 0, OPTAB_LIB_WIDEN);
5097 tramp = expand_binop (Pmode, and_optab, temp,
5098 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5099 temp, 0, OPTAB_LIB_WIDEN);
5100 #endif
5101 return tramp;
5102 }
5103 \f
5104 /* The functions identify_blocks and reorder_blocks provide a way to
5105 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5106 duplicate portions of the RTL code. Call identify_blocks before
5107 changing the RTL, and call reorder_blocks after. */
5108
5109 /* Put all this function's BLOCK nodes including those that are chained
5110 onto the first block into a vector, and return it.
5111 Also store in each NOTE for the beginning or end of a block
5112 the index of that block in the vector.
5113 The arguments are BLOCK, the chain of top-level blocks of the function,
5114 and INSNS, the insn chain of the function. */
5115
5116 tree *
5117 identify_blocks (block, insns)
5118 tree block;
5119 rtx insns;
5120 {
5121 int n_blocks;
5122 tree *block_vector;
5123 int *block_stack;
5124 int depth = 0;
5125 int next_block_number = 1;
5126 int current_block_number = 1;
5127 rtx insn;
5128
5129 if (block == 0)
5130 return 0;
5131
5132 n_blocks = all_blocks (block, 0);
5133 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5134 block_stack = (int *) alloca (n_blocks * sizeof (int));
5135
5136 all_blocks (block, block_vector);
5137
5138 for (insn = insns; insn; insn = NEXT_INSN (insn))
5139 if (GET_CODE (insn) == NOTE)
5140 {
5141 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5142 {
5143 block_stack[depth++] = current_block_number;
5144 current_block_number = next_block_number;
5145 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
5146 }
5147 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5148 {
5149 NOTE_BLOCK_NUMBER (insn) = current_block_number;
5150 current_block_number = block_stack[--depth];
5151 }
5152 }
5153
5154 if (n_blocks != next_block_number)
5155 abort ();
5156
5157 return block_vector;
5158 }
5159
5160 /* Given BLOCK_VECTOR which was returned by identify_blocks,
5161 and a revised instruction chain, rebuild the tree structure
5162 of BLOCK nodes to correspond to the new order of RTL.
5163 The new block tree is inserted below TOP_BLOCK.
5164 Returns the current top-level block. */
5165
5166 tree
5167 reorder_blocks (block_vector, block, insns)
5168 tree *block_vector;
5169 tree block;
5170 rtx insns;
5171 {
5172 tree current_block = block;
5173 rtx insn;
5174
5175 if (block_vector == 0)
5176 return block;
5177
5178 /* Prune the old trees away, so that it doesn't get in the way. */
5179 BLOCK_SUBBLOCKS (current_block) = 0;
5180 BLOCK_CHAIN (current_block) = 0;
5181
5182 for (insn = insns; insn; insn = NEXT_INSN (insn))
5183 if (GET_CODE (insn) == NOTE)
5184 {
5185 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5186 {
5187 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
5188 /* If we have seen this block before, copy it. */
5189 if (TREE_ASM_WRITTEN (block))
5190 block = copy_node (block);
5191 BLOCK_SUBBLOCKS (block) = 0;
5192 TREE_ASM_WRITTEN (block) = 1;
5193 BLOCK_SUPERCONTEXT (block) = current_block;
5194 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5195 BLOCK_SUBBLOCKS (current_block) = block;
5196 current_block = block;
5197 NOTE_SOURCE_FILE (insn) = 0;
5198 }
5199 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5200 {
5201 BLOCK_SUBBLOCKS (current_block)
5202 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5203 current_block = BLOCK_SUPERCONTEXT (current_block);
5204 NOTE_SOURCE_FILE (insn) = 0;
5205 }
5206 }
5207
5208 BLOCK_SUBBLOCKS (current_block)
5209 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5210 return current_block;
5211 }
5212
5213 /* Reverse the order of elements in the chain T of blocks,
5214 and return the new head of the chain (old last element). */
5215
5216 static tree
5217 blocks_nreverse (t)
5218 tree t;
5219 {
5220 register tree prev = 0, decl, next;
5221 for (decl = t; decl; decl = next)
5222 {
5223 next = BLOCK_CHAIN (decl);
5224 BLOCK_CHAIN (decl) = prev;
5225 prev = decl;
5226 }
5227 return prev;
5228 }
5229
5230 /* Count the subblocks of the list starting with BLOCK, and list them
5231 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5232 blocks. */
5233
5234 static int
5235 all_blocks (block, vector)
5236 tree block;
5237 tree *vector;
5238 {
5239 int n_blocks = 0;
5240
5241 while (block)
5242 {
5243 TREE_ASM_WRITTEN (block) = 0;
5244
5245 /* Record this block. */
5246 if (vector)
5247 vector[n_blocks] = block;
5248
5249 ++n_blocks;
5250
5251 /* Record the subblocks, and their subblocks... */
5252 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5253 vector ? vector + n_blocks : 0);
5254 block = BLOCK_CHAIN (block);
5255 }
5256
5257 return n_blocks;
5258 }
5259 \f
5260 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5261 and initialize static variables for generating RTL for the statements
5262 of the function. */
5263
5264 void
5265 init_function_start (subr, filename, line)
5266 tree subr;
5267 char *filename;
5268 int line;
5269 {
5270 init_stmt_for_function ();
5271
5272 cse_not_expected = ! optimize;
5273
5274 /* Caller save not needed yet. */
5275 caller_save_needed = 0;
5276
5277 /* No stack slots have been made yet. */
5278 stack_slot_list = 0;
5279
5280 /* There is no stack slot for handling nonlocal gotos. */
5281 nonlocal_goto_handler_slot = 0;
5282 nonlocal_goto_stack_level = 0;
5283
5284 /* No labels have been declared for nonlocal use. */
5285 nonlocal_labels = 0;
5286
5287 /* No function calls so far in this function. */
5288 function_call_count = 0;
5289
5290 /* No parm regs have been allocated.
5291 (This is important for output_inline_function.) */
5292 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5293
5294 /* Initialize the RTL mechanism. */
5295 init_emit ();
5296
5297 /* Initialize the queue of pending postincrement and postdecrements,
5298 and some other info in expr.c. */
5299 init_expr ();
5300
5301 /* We haven't done register allocation yet. */
5302 reg_renumber = 0;
5303
5304 init_const_rtx_hash_table ();
5305
5306 current_function_name = (*decl_printable_name) (subr, 2);
5307
5308 /* Nonzero if this is a nested function that uses a static chain. */
5309
5310 current_function_needs_context
5311 = (decl_function_context (current_function_decl) != 0
5312 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5313
5314 /* Set if a call to setjmp is seen. */
5315 current_function_calls_setjmp = 0;
5316
5317 /* Set if a call to longjmp is seen. */
5318 current_function_calls_longjmp = 0;
5319
5320 current_function_calls_alloca = 0;
5321 current_function_has_nonlocal_label = 0;
5322 current_function_has_nonlocal_goto = 0;
5323 current_function_contains_functions = 0;
5324 current_function_is_thunk = 0;
5325
5326 current_function_returns_pcc_struct = 0;
5327 current_function_returns_struct = 0;
5328 current_function_epilogue_delay_list = 0;
5329 current_function_uses_const_pool = 0;
5330 current_function_uses_pic_offset_table = 0;
5331 current_function_cannot_inline = 0;
5332
5333 /* We have not yet needed to make a label to jump to for tail-recursion. */
5334 tail_recursion_label = 0;
5335
5336 /* We haven't had a need to make a save area for ap yet. */
5337
5338 arg_pointer_save_area = 0;
5339
5340 /* No stack slots allocated yet. */
5341 frame_offset = 0;
5342
5343 /* No SAVE_EXPRs in this function yet. */
5344 save_expr_regs = 0;
5345
5346 /* No RTL_EXPRs in this function yet. */
5347 rtl_expr_chain = 0;
5348
5349 /* Set up to allocate temporaries. */
5350 init_temp_slots ();
5351
5352 /* Within function body, compute a type's size as soon it is laid out. */
5353 immediate_size_expand++;
5354
5355 /* We haven't made any trampolines for this function yet. */
5356 trampoline_list = 0;
5357
5358 init_pending_stack_adjust ();
5359 inhibit_defer_pop = 0;
5360
5361 current_function_outgoing_args_size = 0;
5362
5363 /* Prevent ever trying to delete the first instruction of a function.
5364 Also tell final how to output a linenum before the function prologue.
5365 Note linenums could be missing, e.g. when compiling a Java .class file. */
5366 if (line > 0)
5367 emit_line_note (filename, line);
5368
5369 /* Make sure first insn is a note even if we don't want linenums.
5370 This makes sure the first insn will never be deleted.
5371 Also, final expects a note to appear there. */
5372 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5373
5374 /* Set flags used by final.c. */
5375 if (aggregate_value_p (DECL_RESULT (subr)))
5376 {
5377 #ifdef PCC_STATIC_STRUCT_RETURN
5378 current_function_returns_pcc_struct = 1;
5379 #endif
5380 current_function_returns_struct = 1;
5381 }
5382
5383 /* Warn if this value is an aggregate type,
5384 regardless of which calling convention we are using for it. */
5385 if (warn_aggregate_return
5386 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5387 warning ("function returns an aggregate");
5388
5389 current_function_returns_pointer
5390 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5391
5392 /* Indicate that we need to distinguish between the return value of the
5393 present function and the return value of a function being called. */
5394 rtx_equal_function_value_matters = 1;
5395
5396 /* Indicate that we have not instantiated virtual registers yet. */
5397 virtuals_instantiated = 0;
5398
5399 /* Indicate we have no need of a frame pointer yet. */
5400 frame_pointer_needed = 0;
5401
5402 /* By default assume not varargs or stdarg. */
5403 current_function_varargs = 0;
5404 current_function_stdarg = 0;
5405 }
5406
5407 /* Indicate that the current function uses extra args
5408 not explicitly mentioned in the argument list in any fashion. */
5409
5410 void
5411 mark_varargs ()
5412 {
5413 current_function_varargs = 1;
5414 }
5415
5416 /* Expand a call to __main at the beginning of a possible main function. */
5417
5418 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5419 #undef HAS_INIT_SECTION
5420 #define HAS_INIT_SECTION
5421 #endif
5422
5423 void
5424 expand_main_function ()
5425 {
5426 #if !defined (HAS_INIT_SECTION)
5427 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5428 VOIDmode, 0);
5429 #endif /* not HAS_INIT_SECTION */
5430 }
5431 \f
5432 extern struct obstack permanent_obstack;
5433
5434 /* Start the RTL for a new function, and set variables used for
5435 emitting RTL.
5436 SUBR is the FUNCTION_DECL node.
5437 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5438 the function's parameters, which must be run at any return statement. */
5439
5440 void
5441 expand_function_start (subr, parms_have_cleanups)
5442 tree subr;
5443 int parms_have_cleanups;
5444 {
5445 register int i;
5446 tree tem;
5447 rtx last_ptr = NULL_RTX;
5448
5449 /* Make sure volatile mem refs aren't considered
5450 valid operands of arithmetic insns. */
5451 init_recog_no_volatile ();
5452
5453 /* If function gets a static chain arg, store it in the stack frame.
5454 Do this first, so it gets the first stack slot offset. */
5455 if (current_function_needs_context)
5456 {
5457 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5458
5459 /* Delay copying static chain if it is not a register to avoid
5460 conflicts with regs used for parameters. */
5461 if (! SMALL_REGISTER_CLASSES
5462 || GET_CODE (static_chain_incoming_rtx) == REG)
5463 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5464 }
5465
5466 /* If the parameters of this function need cleaning up, get a label
5467 for the beginning of the code which executes those cleanups. This must
5468 be done before doing anything with return_label. */
5469 if (parms_have_cleanups)
5470 cleanup_label = gen_label_rtx ();
5471 else
5472 cleanup_label = 0;
5473
5474 /* Make the label for return statements to jump to, if this machine
5475 does not have a one-instruction return and uses an epilogue,
5476 or if it returns a structure, or if it has parm cleanups. */
5477 #ifdef HAVE_return
5478 if (cleanup_label == 0 && HAVE_return
5479 && ! current_function_returns_pcc_struct
5480 && ! (current_function_returns_struct && ! optimize))
5481 return_label = 0;
5482 else
5483 return_label = gen_label_rtx ();
5484 #else
5485 return_label = gen_label_rtx ();
5486 #endif
5487
5488 /* Initialize rtx used to return the value. */
5489 /* Do this before assign_parms so that we copy the struct value address
5490 before any library calls that assign parms might generate. */
5491
5492 /* Decide whether to return the value in memory or in a register. */
5493 if (aggregate_value_p (DECL_RESULT (subr)))
5494 {
5495 /* Returning something that won't go in a register. */
5496 register rtx value_address = 0;
5497
5498 #ifdef PCC_STATIC_STRUCT_RETURN
5499 if (current_function_returns_pcc_struct)
5500 {
5501 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5502 value_address = assemble_static_space (size);
5503 }
5504 else
5505 #endif
5506 {
5507 /* Expect to be passed the address of a place to store the value.
5508 If it is passed as an argument, assign_parms will take care of
5509 it. */
5510 if (struct_value_incoming_rtx)
5511 {
5512 value_address = gen_reg_rtx (Pmode);
5513 emit_move_insn (value_address, struct_value_incoming_rtx);
5514 }
5515 }
5516 if (value_address)
5517 {
5518 DECL_RTL (DECL_RESULT (subr))
5519 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
5520 MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)))
5521 = AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5522 }
5523 }
5524 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5525 /* If return mode is void, this decl rtl should not be used. */
5526 DECL_RTL (DECL_RESULT (subr)) = 0;
5527 else if (parms_have_cleanups)
5528 {
5529 /* If function will end with cleanup code for parms,
5530 compute the return values into a pseudo reg,
5531 which we will copy into the true return register
5532 after the cleanups are done. */
5533
5534 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5535
5536 #ifdef PROMOTE_FUNCTION_RETURN
5537 tree type = TREE_TYPE (DECL_RESULT (subr));
5538 int unsignedp = TREE_UNSIGNED (type);
5539
5540 mode = promote_mode (type, mode, &unsignedp, 1);
5541 #endif
5542
5543 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5544 }
5545 else
5546 /* Scalar, returned in a register. */
5547 {
5548 #ifdef FUNCTION_OUTGOING_VALUE
5549 DECL_RTL (DECL_RESULT (subr))
5550 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5551 #else
5552 DECL_RTL (DECL_RESULT (subr))
5553 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5554 #endif
5555
5556 /* Mark this reg as the function's return value. */
5557 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5558 {
5559 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5560 /* Needed because we may need to move this to memory
5561 in case it's a named return value whose address is taken. */
5562 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5563 }
5564 }
5565
5566 /* Initialize rtx for parameters and local variables.
5567 In some cases this requires emitting insns. */
5568
5569 assign_parms (subr, 0);
5570
5571 /* Copy the static chain now if it wasn't a register. The delay is to
5572 avoid conflicts with the parameter passing registers. */
5573
5574 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
5575 if (GET_CODE (static_chain_incoming_rtx) != REG)
5576 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5577
5578 /* The following was moved from init_function_start.
5579 The move is supposed to make sdb output more accurate. */
5580 /* Indicate the beginning of the function body,
5581 as opposed to parm setup. */
5582 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5583
5584 /* If doing stupid allocation, mark parms as born here. */
5585
5586 if (GET_CODE (get_last_insn ()) != NOTE)
5587 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5588 parm_birth_insn = get_last_insn ();
5589
5590 if (obey_regdecls)
5591 {
5592 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5593 use_variable (regno_reg_rtx[i]);
5594
5595 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5596 use_variable (current_function_internal_arg_pointer);
5597 }
5598
5599 context_display = 0;
5600 if (current_function_needs_context)
5601 {
5602 /* Fetch static chain values for containing functions. */
5603 tem = decl_function_context (current_function_decl);
5604 /* If not doing stupid register allocation copy the static chain
5605 pointer into a pseudo. If we have small register classes, copy
5606 the value from memory if static_chain_incoming_rtx is a REG. If
5607 we do stupid register allocation, we use the stack address
5608 generated above. */
5609 if (tem && ! obey_regdecls)
5610 {
5611 /* If the static chain originally came in a register, put it back
5612 there, then move it out in the next insn. The reason for
5613 this peculiar code is to satisfy function integration. */
5614 if (SMALL_REGISTER_CLASSES
5615 && GET_CODE (static_chain_incoming_rtx) == REG)
5616 emit_move_insn (static_chain_incoming_rtx, last_ptr);
5617 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5618 }
5619
5620 while (tem)
5621 {
5622 tree rtlexp = make_node (RTL_EXPR);
5623
5624 RTL_EXPR_RTL (rtlexp) = last_ptr;
5625 context_display = tree_cons (tem, rtlexp, context_display);
5626 tem = decl_function_context (tem);
5627 if (tem == 0)
5628 break;
5629 /* Chain thru stack frames, assuming pointer to next lexical frame
5630 is found at the place we always store it. */
5631 #ifdef FRAME_GROWS_DOWNWARD
5632 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
5633 #endif
5634 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
5635 memory_address (Pmode, last_ptr)));
5636
5637 /* If we are not optimizing, ensure that we know that this
5638 piece of context is live over the entire function. */
5639 if (! optimize)
5640 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
5641 save_expr_regs);
5642 }
5643 }
5644
5645 /* After the display initializations is where the tail-recursion label
5646 should go, if we end up needing one. Ensure we have a NOTE here
5647 since some things (like trampolines) get placed before this. */
5648 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
5649
5650 /* Evaluate now the sizes of any types declared among the arguments. */
5651 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5652 {
5653 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
5654 EXPAND_MEMORY_USE_BAD);
5655 /* Flush the queue in case this parameter declaration has
5656 side-effects. */
5657 emit_queue ();
5658 }
5659
5660 /* Make sure there is a line number after the function entry setup code. */
5661 force_next_line_note ();
5662 }
5663 \f
5664 /* Generate RTL for the end of the current function.
5665 FILENAME and LINE are the current position in the source file.
5666
5667 It is up to language-specific callers to do cleanups for parameters--
5668 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5669
5670 void
5671 expand_function_end (filename, line, end_bindings)
5672 char *filename;
5673 int line;
5674 int end_bindings;
5675 {
5676 register int i;
5677 tree link;
5678
5679 #ifdef TRAMPOLINE_TEMPLATE
5680 static rtx initial_trampoline;
5681 #endif
5682
5683 #ifdef NON_SAVING_SETJMP
5684 /* Don't put any variables in registers if we call setjmp
5685 on a machine that fails to restore the registers. */
5686 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
5687 {
5688 if (DECL_INITIAL (current_function_decl) != error_mark_node)
5689 setjmp_protect (DECL_INITIAL (current_function_decl));
5690
5691 setjmp_protect_args ();
5692 }
5693 #endif
5694
5695 /* Save the argument pointer if a save area was made for it. */
5696 if (arg_pointer_save_area)
5697 {
5698 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
5699 emit_insn_before (x, tail_recursion_reentry);
5700 }
5701
5702 /* Initialize any trampolines required by this function. */
5703 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5704 {
5705 tree function = TREE_PURPOSE (link);
5706 rtx context = lookup_static_chain (function);
5707 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
5708 #ifdef TRAMPOLINE_TEMPLATE
5709 rtx blktramp;
5710 #endif
5711 rtx seq;
5712
5713 #ifdef TRAMPOLINE_TEMPLATE
5714 /* First make sure this compilation has a template for
5715 initializing trampolines. */
5716 if (initial_trampoline == 0)
5717 {
5718 end_temporary_allocation ();
5719 initial_trampoline
5720 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
5721 resume_temporary_allocation ();
5722 }
5723 #endif
5724
5725 /* Generate insns to initialize the trampoline. */
5726 start_sequence ();
5727 tramp = round_trampoline_addr (XEXP (tramp, 0));
5728 #ifdef TRAMPOLINE_TEMPLATE
5729 blktramp = change_address (initial_trampoline, BLKmode, tramp);
5730 emit_block_move (blktramp, initial_trampoline,
5731 GEN_INT (TRAMPOLINE_SIZE),
5732 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5733 #endif
5734 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
5735 seq = get_insns ();
5736 end_sequence ();
5737
5738 /* Put those insns at entry to the containing function (this one). */
5739 emit_insns_before (seq, tail_recursion_reentry);
5740 }
5741
5742 /* If we are doing stack checking and this function makes calls,
5743 do a stack probe at the start of the function to ensure we have enough
5744 space for another stack frame. */
5745 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
5746 {
5747 rtx insn, seq;
5748
5749 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5750 if (GET_CODE (insn) == CALL_INSN)
5751 {
5752 start_sequence ();
5753 probe_stack_range (STACK_CHECK_PROTECT,
5754 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
5755 seq = get_insns ();
5756 end_sequence ();
5757 emit_insns_before (seq, tail_recursion_reentry);
5758 break;
5759 }
5760 }
5761
5762 /* Warn about unused parms if extra warnings were specified. */
5763 if (warn_unused && extra_warnings)
5764 {
5765 tree decl;
5766
5767 for (decl = DECL_ARGUMENTS (current_function_decl);
5768 decl; decl = TREE_CHAIN (decl))
5769 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
5770 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
5771 warning_with_decl (decl, "unused parameter `%s'");
5772 }
5773
5774 /* Delete handlers for nonlocal gotos if nothing uses them. */
5775 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
5776 delete_handlers ();
5777
5778 /* End any sequences that failed to be closed due to syntax errors. */
5779 while (in_sequence_p ())
5780 end_sequence ();
5781
5782 /* Outside function body, can't compute type's actual size
5783 until next function's body starts. */
5784 immediate_size_expand--;
5785
5786 /* If doing stupid register allocation,
5787 mark register parms as dying here. */
5788
5789 if (obey_regdecls)
5790 {
5791 rtx tem;
5792 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5793 use_variable (regno_reg_rtx[i]);
5794
5795 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
5796
5797 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
5798 {
5799 use_variable (XEXP (tem, 0));
5800 use_variable_after (XEXP (tem, 0), parm_birth_insn);
5801 }
5802
5803 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5804 use_variable (current_function_internal_arg_pointer);
5805 }
5806
5807 clear_pending_stack_adjust ();
5808 do_pending_stack_adjust ();
5809
5810 /* Mark the end of the function body.
5811 If control reaches this insn, the function can drop through
5812 without returning a value. */
5813 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
5814
5815 /* Must mark the last line number note in the function, so that the test
5816 coverage code can avoid counting the last line twice. This just tells
5817 the code to ignore the immediately following line note, since there
5818 already exists a copy of this note somewhere above. This line number
5819 note is still needed for debugging though, so we can't delete it. */
5820 if (flag_test_coverage)
5821 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
5822
5823 /* Output a linenumber for the end of the function.
5824 SDB depends on this. */
5825 emit_line_note_force (filename, line);
5826
5827 /* Output the label for the actual return from the function,
5828 if one is expected. This happens either because a function epilogue
5829 is used instead of a return instruction, or because a return was done
5830 with a goto in order to run local cleanups, or because of pcc-style
5831 structure returning. */
5832
5833 if (return_label)
5834 emit_label (return_label);
5835
5836 /* C++ uses this. */
5837 if (end_bindings)
5838 expand_end_bindings (0, 0, 0);
5839
5840 /* Now handle any leftover exception regions that may have been
5841 created for the parameters. */
5842 {
5843 rtx last = get_last_insn ();
5844 rtx label;
5845
5846 expand_leftover_cleanups ();
5847
5848 /* If the above emitted any code, may sure we jump around it. */
5849 if (last != get_last_insn ())
5850 {
5851 label = gen_label_rtx ();
5852 last = emit_jump_insn_after (gen_jump (label), last);
5853 last = emit_barrier_after (last);
5854 emit_label (label);
5855 }
5856 }
5857
5858 /* If we had calls to alloca, and this machine needs
5859 an accurate stack pointer to exit the function,
5860 insert some code to save and restore the stack pointer. */
5861 #ifdef EXIT_IGNORE_STACK
5862 if (! EXIT_IGNORE_STACK)
5863 #endif
5864 if (current_function_calls_alloca)
5865 {
5866 rtx tem = 0;
5867
5868 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
5869 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
5870 }
5871
5872 /* If scalar return value was computed in a pseudo-reg,
5873 copy that to the hard return register. */
5874 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
5875 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
5876 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
5877 >= FIRST_PSEUDO_REGISTER))
5878 {
5879 rtx real_decl_result;
5880
5881 #ifdef FUNCTION_OUTGOING_VALUE
5882 real_decl_result
5883 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5884 current_function_decl);
5885 #else
5886 real_decl_result
5887 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5888 current_function_decl);
5889 #endif
5890 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
5891 /* If this is a BLKmode structure being returned in registers, then use
5892 the mode computed in expand_return. */
5893 if (GET_MODE (real_decl_result) == BLKmode)
5894 PUT_MODE (real_decl_result,
5895 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
5896 emit_move_insn (real_decl_result,
5897 DECL_RTL (DECL_RESULT (current_function_decl)));
5898 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
5899
5900 /* The delay slot scheduler assumes that current_function_return_rtx
5901 holds the hard register containing the return value, not a temporary
5902 pseudo. */
5903 current_function_return_rtx = real_decl_result;
5904 }
5905
5906 /* If returning a structure, arrange to return the address of the value
5907 in a place where debuggers expect to find it.
5908
5909 If returning a structure PCC style,
5910 the caller also depends on this value.
5911 And current_function_returns_pcc_struct is not necessarily set. */
5912 if (current_function_returns_struct
5913 || current_function_returns_pcc_struct)
5914 {
5915 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5916 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5917 #ifdef FUNCTION_OUTGOING_VALUE
5918 rtx outgoing
5919 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
5920 current_function_decl);
5921 #else
5922 rtx outgoing
5923 = FUNCTION_VALUE (build_pointer_type (type),
5924 current_function_decl);
5925 #endif
5926
5927 /* Mark this as a function return value so integrate will delete the
5928 assignment and USE below when inlining this function. */
5929 REG_FUNCTION_VALUE_P (outgoing) = 1;
5930
5931 emit_move_insn (outgoing, value_address);
5932 use_variable (outgoing);
5933 }
5934
5935 /* Output a return insn if we are using one.
5936 Otherwise, let the rtl chain end here, to drop through
5937 into the epilogue. */
5938
5939 #ifdef HAVE_return
5940 if (HAVE_return)
5941 {
5942 emit_jump_insn (gen_return ());
5943 emit_barrier ();
5944 }
5945 #endif
5946
5947 /* Fix up any gotos that jumped out to the outermost
5948 binding level of the function.
5949 Must follow emitting RETURN_LABEL. */
5950
5951 /* If you have any cleanups to do at this point,
5952 and they need to create temporary variables,
5953 then you will lose. */
5954 expand_fixups (get_insns ());
5955 }
5956 \f
5957 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
5958
5959 static int *prologue;
5960 static int *epilogue;
5961
5962 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
5963 or a single insn). */
5964
5965 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5966 static int *
5967 record_insns (insns)
5968 rtx insns;
5969 {
5970 int *vec;
5971
5972 if (GET_CODE (insns) == SEQUENCE)
5973 {
5974 int len = XVECLEN (insns, 0);
5975 vec = (int *) oballoc ((len + 1) * sizeof (int));
5976 vec[len] = 0;
5977 while (--len >= 0)
5978 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
5979 }
5980 else
5981 {
5982 vec = (int *) oballoc (2 * sizeof (int));
5983 vec[0] = INSN_UID (insns);
5984 vec[1] = 0;
5985 }
5986 return vec;
5987 }
5988
5989 /* Determine how many INSN_UIDs in VEC are part of INSN. */
5990
5991 static int
5992 contains (insn, vec)
5993 rtx insn;
5994 int *vec;
5995 {
5996 register int i, j;
5997
5998 if (GET_CODE (insn) == INSN
5999 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6000 {
6001 int count = 0;
6002 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6003 for (j = 0; vec[j]; j++)
6004 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6005 count++;
6006 return count;
6007 }
6008 else
6009 {
6010 for (j = 0; vec[j]; j++)
6011 if (INSN_UID (insn) == vec[j])
6012 return 1;
6013 }
6014 return 0;
6015 }
6016 #endif /* HAVE_prologue || HAVE_epilogue */
6017
6018 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6019 this into place with notes indicating where the prologue ends and where
6020 the epilogue begins. Update the basic block information when possible. */
6021
6022 void
6023 thread_prologue_and_epilogue_insns (f)
6024 rtx f;
6025 {
6026 #ifdef HAVE_prologue
6027 if (HAVE_prologue)
6028 {
6029 rtx head, seq;
6030
6031 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
6032 prologue insns and a NOTE_INSN_PROLOGUE_END. */
6033 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
6034 seq = gen_prologue ();
6035 head = emit_insn_after (seq, f);
6036
6037 /* Include the new prologue insns in the first block. Ignore them
6038 if they form a basic block unto themselves. */
6039 if (basic_block_head && n_basic_blocks
6040 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
6041 basic_block_head[0] = NEXT_INSN (f);
6042
6043 /* Retain a map of the prologue insns. */
6044 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
6045 }
6046 else
6047 #endif
6048 prologue = 0;
6049
6050 #ifdef HAVE_epilogue
6051 if (HAVE_epilogue)
6052 {
6053 rtx insn = get_last_insn ();
6054 rtx prev = prev_nonnote_insn (insn);
6055
6056 /* If we end with a BARRIER, we don't need an epilogue. */
6057 if (! (prev && GET_CODE (prev) == BARRIER))
6058 {
6059 rtx tail, seq, tem;
6060 rtx first_use = 0;
6061 rtx last_use = 0;
6062
6063 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6064 epilogue insns, the USE insns at the end of a function,
6065 the jump insn that returns, and then a BARRIER. */
6066
6067 /* Move the USE insns at the end of a function onto a list. */
6068 while (prev
6069 && GET_CODE (prev) == INSN
6070 && GET_CODE (PATTERN (prev)) == USE)
6071 {
6072 tem = prev;
6073 prev = prev_nonnote_insn (prev);
6074
6075 NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem);
6076 PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem);
6077 if (first_use)
6078 {
6079 NEXT_INSN (tem) = first_use;
6080 PREV_INSN (first_use) = tem;
6081 }
6082 first_use = tem;
6083 if (!last_use)
6084 last_use = tem;
6085 }
6086
6087 emit_barrier_after (insn);
6088
6089 seq = gen_epilogue ();
6090 tail = emit_jump_insn_after (seq, insn);
6091
6092 /* Insert the USE insns immediately before the return insn, which
6093 must be the first instruction before the final barrier. */
6094 if (first_use)
6095 {
6096 tem = prev_nonnote_insn (get_last_insn ());
6097 NEXT_INSN (PREV_INSN (tem)) = first_use;
6098 PREV_INSN (first_use) = PREV_INSN (tem);
6099 PREV_INSN (tem) = last_use;
6100 NEXT_INSN (last_use) = tem;
6101 }
6102
6103 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
6104
6105 /* Include the new epilogue insns in the last block. Ignore
6106 them if they form a basic block unto themselves. */
6107 if (basic_block_end && n_basic_blocks
6108 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
6109 basic_block_end[n_basic_blocks - 1] = tail;
6110
6111 /* Retain a map of the epilogue insns. */
6112 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6113 return;
6114 }
6115 }
6116 #endif
6117 epilogue = 0;
6118 }
6119
6120 /* Reposition the prologue-end and epilogue-begin notes after instruction
6121 scheduling and delayed branch scheduling. */
6122
6123 void
6124 reposition_prologue_and_epilogue_notes (f)
6125 rtx f;
6126 {
6127 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6128 /* Reposition the prologue and epilogue notes. */
6129 if (n_basic_blocks)
6130 {
6131 rtx next, prev;
6132 int len;
6133
6134 if (prologue)
6135 {
6136 register rtx insn, note = 0;
6137
6138 /* Scan from the beginning until we reach the last prologue insn.
6139 We apparently can't depend on basic_block_{head,end} after
6140 reorg has run. */
6141 for (len = 0; prologue[len]; len++)
6142 ;
6143 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6144 {
6145 if (GET_CODE (insn) == NOTE)
6146 {
6147 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6148 note = insn;
6149 }
6150 else if ((len -= contains (insn, prologue)) == 0)
6151 {
6152 /* Find the prologue-end note if we haven't already, and
6153 move it to just after the last prologue insn. */
6154 if (note == 0)
6155 {
6156 for (note = insn; (note = NEXT_INSN (note));)
6157 if (GET_CODE (note) == NOTE
6158 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6159 break;
6160 }
6161 next = NEXT_INSN (note);
6162 prev = PREV_INSN (note);
6163 if (prev)
6164 NEXT_INSN (prev) = next;
6165 if (next)
6166 PREV_INSN (next) = prev;
6167 add_insn_after (note, insn);
6168 }
6169 }
6170 }
6171
6172 if (epilogue)
6173 {
6174 register rtx insn, note = 0;
6175
6176 /* Scan from the end until we reach the first epilogue insn.
6177 We apparently can't depend on basic_block_{head,end} after
6178 reorg has run. */
6179 for (len = 0; epilogue[len]; len++)
6180 ;
6181 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6182 {
6183 if (GET_CODE (insn) == NOTE)
6184 {
6185 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6186 note = insn;
6187 }
6188 else if ((len -= contains (insn, epilogue)) == 0)
6189 {
6190 /* Find the epilogue-begin note if we haven't already, and
6191 move it to just before the first epilogue insn. */
6192 if (note == 0)
6193 {
6194 for (note = insn; (note = PREV_INSN (note));)
6195 if (GET_CODE (note) == NOTE
6196 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6197 break;
6198 }
6199 next = NEXT_INSN (note);
6200 prev = PREV_INSN (note);
6201 if (prev)
6202 NEXT_INSN (prev) = next;
6203 if (next)
6204 PREV_INSN (next) = prev;
6205 add_insn_after (note, PREV_INSN (insn));
6206 }
6207 }
6208 }
6209 }
6210 #endif /* HAVE_prologue or HAVE_epilogue */
6211 }