function.c: Define current_function_cannot_inline.
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-97, 1998 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58 #include "toplev.h"
59
60 #ifndef TRAMPOLINE_ALIGNMENT
61 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
62 #endif
63
64 /* Some systems use __main in a way incompatible with its use in gcc, in these
65 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
66 give the same symbol without quotes for an alternative entry point. You
67 must define both, or neither. */
68 #ifndef NAME__MAIN
69 #define NAME__MAIN "__main"
70 #define SYMBOL__MAIN __main
71 #endif
72
73 /* Round a value to the lowest integer less than it that is a multiple of
74 the required alignment. Avoid using division in case the value is
75 negative. Assume the alignment is a power of two. */
76 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
77
78 /* Similar, but round to the next highest integer that meets the
79 alignment. */
80 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
81
82 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
83 during rtl generation. If they are different register numbers, this is
84 always true. It may also be true if
85 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
86 generation. See fix_lexical_addr for details. */
87
88 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
89 #define NEED_SEPARATE_AP
90 #endif
91
92 /* Number of bytes of args popped by function being compiled on its return.
93 Zero if no bytes are to be popped.
94 May affect compilation of return insn or of function epilogue. */
95
96 int current_function_pops_args;
97
98 /* Nonzero if function being compiled needs to be given an address
99 where the value should be stored. */
100
101 int current_function_returns_struct;
102
103 /* Nonzero if function being compiled needs to
104 return the address of where it has put a structure value. */
105
106 int current_function_returns_pcc_struct;
107
108 /* Nonzero if function being compiled needs to be passed a static chain. */
109
110 int current_function_needs_context;
111
112 /* Nonzero if function being compiled can call setjmp. */
113
114 int current_function_calls_setjmp;
115
116 /* Nonzero if function being compiled can call longjmp. */
117
118 int current_function_calls_longjmp;
119
120 /* Nonzero if function being compiled receives nonlocal gotos
121 from nested functions. */
122
123 int current_function_has_nonlocal_label;
124
125 /* Nonzero if function being compiled has nonlocal gotos to parent
126 function. */
127
128 int current_function_has_nonlocal_goto;
129
130 /* Nonzero if this function has a computed goto.
131
132 It is computed during find_basic_blocks or during stupid life
133 analysis. */
134
135 int current_function_has_computed_jump;
136
137 /* Nonzero if function being compiled contains nested functions. */
138
139 int current_function_contains_functions;
140
141 /* Nonzero if the current function is a thunk (a lightweight function that
142 just adjusts one of its arguments and forwards to another function), so
143 we should try to cut corners where we can. */
144 int current_function_is_thunk;
145
146 /* Nonzero if function being compiled can call alloca,
147 either as a subroutine or builtin. */
148
149 int current_function_calls_alloca;
150
151 /* Nonzero if the current function returns a pointer type */
152
153 int current_function_returns_pointer;
154
155 /* If some insns can be deferred to the delay slots of the epilogue, the
156 delay list for them is recorded here. */
157
158 rtx current_function_epilogue_delay_list;
159
160 /* If function's args have a fixed size, this is that size, in bytes.
161 Otherwise, it is -1.
162 May affect compilation of return insn or of function epilogue. */
163
164 int current_function_args_size;
165
166 /* # bytes the prologue should push and pretend that the caller pushed them.
167 The prologue must do this, but only if parms can be passed in registers. */
168
169 int current_function_pretend_args_size;
170
171 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
172 defined, the needed space is pushed by the prologue. */
173
174 int current_function_outgoing_args_size;
175
176 /* This is the offset from the arg pointer to the place where the first
177 anonymous arg can be found, if there is one. */
178
179 rtx current_function_arg_offset_rtx;
180
181 /* Nonzero if current function uses varargs.h or equivalent.
182 Zero for functions that use stdarg.h. */
183
184 int current_function_varargs;
185
186 /* Nonzero if current function uses stdarg.h or equivalent.
187 Zero for functions that use varargs.h. */
188
189 int current_function_stdarg;
190
191 /* Quantities of various kinds of registers
192 used for the current function's args. */
193
194 CUMULATIVE_ARGS current_function_args_info;
195
196 /* Name of function now being compiled. */
197
198 char *current_function_name;
199
200 /* If non-zero, an RTL expression for the location at which the current
201 function returns its result. If the current function returns its
202 result in a register, current_function_return_rtx will always be
203 the hard register containing the result. */
204
205 rtx current_function_return_rtx;
206
207 /* Nonzero if the current function uses the constant pool. */
208
209 int current_function_uses_const_pool;
210
211 /* Nonzero if the current function uses pic_offset_table_rtx. */
212 int current_function_uses_pic_offset_table;
213
214 /* The arg pointer hard register, or the pseudo into which it was copied. */
215 rtx current_function_internal_arg_pointer;
216
217 /* Language-specific reason why the current function cannot be made inline. */
218 char *current_function_cannot_inline;
219
220 /* The FUNCTION_DECL for an inline function currently being expanded. */
221 tree inline_function_decl;
222
223 /* Number of function calls seen so far in current function. */
224
225 int function_call_count;
226
227 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
228 (labels to which there can be nonlocal gotos from nested functions)
229 in this function. */
230
231 tree nonlocal_labels;
232
233 /* RTX for stack slot that holds the current handler for nonlocal gotos.
234 Zero when function does not have nonlocal labels. */
235
236 rtx nonlocal_goto_handler_slot;
237
238 /* RTX for stack slot that holds the stack pointer value to restore
239 for a nonlocal goto.
240 Zero when function does not have nonlocal labels. */
241
242 rtx nonlocal_goto_stack_level;
243
244 /* Label that will go on parm cleanup code, if any.
245 Jumping to this label runs cleanup code for parameters, if
246 such code must be run. Following this code is the logical return label. */
247
248 rtx cleanup_label;
249
250 /* Label that will go on function epilogue.
251 Jumping to this label serves as a "return" instruction
252 on machines which require execution of the epilogue on all returns. */
253
254 rtx return_label;
255
256 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
257 So we can mark them all live at the end of the function, if nonopt. */
258 rtx save_expr_regs;
259
260 /* List (chain of EXPR_LISTs) of all stack slots in this function.
261 Made for the sake of unshare_all_rtl. */
262 rtx stack_slot_list;
263
264 /* Chain of all RTL_EXPRs that have insns in them. */
265 tree rtl_expr_chain;
266
267 /* Label to jump back to for tail recursion, or 0 if we have
268 not yet needed one for this function. */
269 rtx tail_recursion_label;
270
271 /* Place after which to insert the tail_recursion_label if we need one. */
272 rtx tail_recursion_reentry;
273
274 /* Location at which to save the argument pointer if it will need to be
275 referenced. There are two cases where this is done: if nonlocal gotos
276 exist, or if vars stored at an offset from the argument pointer will be
277 needed by inner routines. */
278
279 rtx arg_pointer_save_area;
280
281 /* Offset to end of allocated area of stack frame.
282 If stack grows down, this is the address of the last stack slot allocated.
283 If stack grows up, this is the address for the next slot. */
284 HOST_WIDE_INT frame_offset;
285
286 /* List (chain of TREE_LISTs) of static chains for containing functions.
287 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
288 in an RTL_EXPR in the TREE_VALUE. */
289 static tree context_display;
290
291 /* List (chain of TREE_LISTs) of trampolines for nested functions.
292 The trampoline sets up the static chain and jumps to the function.
293 We supply the trampoline's address when the function's address is requested.
294
295 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
296 in an RTL_EXPR in the TREE_VALUE. */
297 static tree trampoline_list;
298
299 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
300 static rtx parm_birth_insn;
301
302 #if 0
303 /* Nonzero if a stack slot has been generated whose address is not
304 actually valid. It means that the generated rtl must all be scanned
305 to detect and correct the invalid addresses where they occur. */
306 static int invalid_stack_slot;
307 #endif
308
309 /* Last insn of those whose job was to put parms into their nominal homes. */
310 static rtx last_parm_insn;
311
312 /* 1 + last pseudo register number possibly used for loading a copy
313 of a parameter of this function. */
314 int max_parm_reg;
315
316 /* Vector indexed by REGNO, containing location on stack in which
317 to put the parm which is nominally in pseudo register REGNO,
318 if we discover that that parm must go in the stack. The highest
319 element in this vector is one less than MAX_PARM_REG, above. */
320 rtx *parm_reg_stack_loc;
321
322 /* Nonzero once virtual register instantiation has been done.
323 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
324 static int virtuals_instantiated;
325
326 /* These variables hold pointers to functions to
327 save and restore machine-specific data,
328 in push_function_context and pop_function_context. */
329 void (*save_machine_status) PROTO((struct function *));
330 void (*restore_machine_status) PROTO((struct function *));
331
332 /* Nonzero if we need to distinguish between the return value of this function
333 and the return value of a function called by this function. This helps
334 integrate.c */
335
336 extern int rtx_equal_function_value_matters;
337 extern tree sequence_rtl_expr;
338 \f
339 /* In order to evaluate some expressions, such as function calls returning
340 structures in memory, we need to temporarily allocate stack locations.
341 We record each allocated temporary in the following structure.
342
343 Associated with each temporary slot is a nesting level. When we pop up
344 one level, all temporaries associated with the previous level are freed.
345 Normally, all temporaries are freed after the execution of the statement
346 in which they were created. However, if we are inside a ({...}) grouping,
347 the result may be in a temporary and hence must be preserved. If the
348 result could be in a temporary, we preserve it if we can determine which
349 one it is in. If we cannot determine which temporary may contain the
350 result, all temporaries are preserved. A temporary is preserved by
351 pretending it was allocated at the previous nesting level.
352
353 Automatic variables are also assigned temporary slots, at the nesting
354 level where they are defined. They are marked a "kept" so that
355 free_temp_slots will not free them. */
356
357 struct temp_slot
358 {
359 /* Points to next temporary slot. */
360 struct temp_slot *next;
361 /* The rtx to used to reference the slot. */
362 rtx slot;
363 /* The rtx used to represent the address if not the address of the
364 slot above. May be an EXPR_LIST if multiple addresses exist. */
365 rtx address;
366 /* The size, in units, of the slot. */
367 HOST_WIDE_INT size;
368 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
369 tree rtl_expr;
370 /* Non-zero if this temporary is currently in use. */
371 char in_use;
372 /* Non-zero if this temporary has its address taken. */
373 char addr_taken;
374 /* Nesting level at which this slot is being used. */
375 int level;
376 /* Non-zero if this should survive a call to free_temp_slots. */
377 int keep;
378 /* The offset of the slot from the frame_pointer, including extra space
379 for alignment. This info is for combine_temp_slots. */
380 HOST_WIDE_INT base_offset;
381 /* The size of the slot, including extra space for alignment. This
382 info is for combine_temp_slots. */
383 HOST_WIDE_INT full_size;
384 };
385
386 /* List of all temporaries allocated, both available and in use. */
387
388 struct temp_slot *temp_slots;
389
390 /* Current nesting level for temporaries. */
391
392 int temp_slot_level;
393
394 /* Current nesting level for variables in a block. */
395
396 int var_temp_slot_level;
397
398 /* When temporaries are created by TARGET_EXPRs, they are created at
399 this level of temp_slot_level, so that they can remain allocated
400 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
401 of TARGET_EXPRs. */
402 int target_temp_slot_level;
403 \f
404 /* This structure is used to record MEMs or pseudos used to replace VAR, any
405 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
406 maintain this list in case two operands of an insn were required to match;
407 in that case we must ensure we use the same replacement. */
408
409 struct fixup_replacement
410 {
411 rtx old;
412 rtx new;
413 struct fixup_replacement *next;
414 };
415
416 /* Forward declarations. */
417
418 static rtx assign_outer_stack_local PROTO ((enum machine_mode, HOST_WIDE_INT,
419 int, struct function *));
420 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
421 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
422 enum machine_mode, enum machine_mode,
423 int, int, int));
424 static void fixup_var_refs PROTO((rtx, enum machine_mode, int));
425 static struct fixup_replacement
426 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
427 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
428 rtx, int));
429 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
430 struct fixup_replacement **));
431 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
432 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
433 static rtx fixup_stack_1 PROTO((rtx, rtx));
434 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
435 static void instantiate_decls PROTO((tree, int));
436 static void instantiate_decls_1 PROTO((tree, int));
437 static void instantiate_decl PROTO((rtx, int, int));
438 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
439 static void delete_handlers PROTO((void));
440 static void pad_to_arg_alignment PROTO((struct args_size *, int));
441 #ifndef ARGS_GROW_DOWNWARD
442 static void pad_below PROTO((struct args_size *, enum machine_mode,
443 tree));
444 #endif
445 #ifdef ARGS_GROW_DOWNWARD
446 static tree round_down PROTO((tree, int));
447 #endif
448 static rtx round_trampoline_addr PROTO((rtx));
449 static tree blocks_nreverse PROTO((tree));
450 static int all_blocks PROTO((tree, tree *));
451 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
452 static int *record_insns PROTO((rtx));
453 static int contains PROTO((rtx, int *));
454 #endif /* HAVE_prologue || HAVE_epilogue */
455 static void put_addressof_into_stack PROTO((rtx));
456 static void purge_addressof_1 PROTO((rtx *, rtx, int));
457 \f
458 /* Pointer to chain of `struct function' for containing functions. */
459 struct function *outer_function_chain;
460
461 /* Given a function decl for a containing function,
462 return the `struct function' for it. */
463
464 struct function *
465 find_function_data (decl)
466 tree decl;
467 {
468 struct function *p;
469
470 for (p = outer_function_chain; p; p = p->next)
471 if (p->decl == decl)
472 return p;
473
474 abort ();
475 }
476
477 /* Save the current context for compilation of a nested function.
478 This is called from language-specific code.
479 The caller is responsible for saving any language-specific status,
480 since this function knows only about language-independent variables. */
481
482 void
483 push_function_context_to (context)
484 tree context;
485 {
486 struct function *p = (struct function *) xmalloc (sizeof (struct function));
487
488 p->next = outer_function_chain;
489 outer_function_chain = p;
490
491 p->name = current_function_name;
492 p->decl = current_function_decl;
493 p->pops_args = current_function_pops_args;
494 p->returns_struct = current_function_returns_struct;
495 p->returns_pcc_struct = current_function_returns_pcc_struct;
496 p->returns_pointer = current_function_returns_pointer;
497 p->needs_context = current_function_needs_context;
498 p->calls_setjmp = current_function_calls_setjmp;
499 p->calls_longjmp = current_function_calls_longjmp;
500 p->calls_alloca = current_function_calls_alloca;
501 p->has_nonlocal_label = current_function_has_nonlocal_label;
502 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
503 p->contains_functions = current_function_contains_functions;
504 p->is_thunk = current_function_is_thunk;
505 p->args_size = current_function_args_size;
506 p->pretend_args_size = current_function_pretend_args_size;
507 p->arg_offset_rtx = current_function_arg_offset_rtx;
508 p->varargs = current_function_varargs;
509 p->stdarg = current_function_stdarg;
510 p->uses_const_pool = current_function_uses_const_pool;
511 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
512 p->internal_arg_pointer = current_function_internal_arg_pointer;
513 p->cannot_inline = current_function_cannot_inline;
514 p->max_parm_reg = max_parm_reg;
515 p->parm_reg_stack_loc = parm_reg_stack_loc;
516 p->outgoing_args_size = current_function_outgoing_args_size;
517 p->return_rtx = current_function_return_rtx;
518 p->nonlocal_goto_handler_slot = nonlocal_goto_handler_slot;
519 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
520 p->nonlocal_labels = nonlocal_labels;
521 p->cleanup_label = cleanup_label;
522 p->return_label = return_label;
523 p->save_expr_regs = save_expr_regs;
524 p->stack_slot_list = stack_slot_list;
525 p->parm_birth_insn = parm_birth_insn;
526 p->frame_offset = frame_offset;
527 p->tail_recursion_label = tail_recursion_label;
528 p->tail_recursion_reentry = tail_recursion_reentry;
529 p->arg_pointer_save_area = arg_pointer_save_area;
530 p->rtl_expr_chain = rtl_expr_chain;
531 p->last_parm_insn = last_parm_insn;
532 p->context_display = context_display;
533 p->trampoline_list = trampoline_list;
534 p->function_call_count = function_call_count;
535 p->temp_slots = temp_slots;
536 p->temp_slot_level = temp_slot_level;
537 p->target_temp_slot_level = target_temp_slot_level;
538 p->var_temp_slot_level = var_temp_slot_level;
539 p->fixup_var_refs_queue = 0;
540 p->epilogue_delay_list = current_function_epilogue_delay_list;
541 p->args_info = current_function_args_info;
542
543 save_tree_status (p, context);
544 save_storage_status (p);
545 save_emit_status (p);
546 save_expr_status (p);
547 save_stmt_status (p);
548 save_varasm_status (p, context);
549 if (save_machine_status)
550 (*save_machine_status) (p);
551 }
552
553 void
554 push_function_context ()
555 {
556 push_function_context_to (current_function_decl);
557 }
558
559 /* Restore the last saved context, at the end of a nested function.
560 This function is called from language-specific code. */
561
562 void
563 pop_function_context_from (context)
564 tree context;
565 {
566 struct function *p = outer_function_chain;
567 struct var_refs_queue *queue;
568
569 outer_function_chain = p->next;
570
571 current_function_contains_functions
572 = p->contains_functions || p->inline_obstacks
573 || context == current_function_decl;
574 current_function_name = p->name;
575 current_function_decl = p->decl;
576 current_function_pops_args = p->pops_args;
577 current_function_returns_struct = p->returns_struct;
578 current_function_returns_pcc_struct = p->returns_pcc_struct;
579 current_function_returns_pointer = p->returns_pointer;
580 current_function_needs_context = p->needs_context;
581 current_function_calls_setjmp = p->calls_setjmp;
582 current_function_calls_longjmp = p->calls_longjmp;
583 current_function_calls_alloca = p->calls_alloca;
584 current_function_has_nonlocal_label = p->has_nonlocal_label;
585 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
586 current_function_is_thunk = p->is_thunk;
587 current_function_args_size = p->args_size;
588 current_function_pretend_args_size = p->pretend_args_size;
589 current_function_arg_offset_rtx = p->arg_offset_rtx;
590 current_function_varargs = p->varargs;
591 current_function_stdarg = p->stdarg;
592 current_function_uses_const_pool = p->uses_const_pool;
593 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
594 current_function_internal_arg_pointer = p->internal_arg_pointer;
595 current_function_cannot_inline = p->cannot_inline;
596 max_parm_reg = p->max_parm_reg;
597 parm_reg_stack_loc = p->parm_reg_stack_loc;
598 current_function_outgoing_args_size = p->outgoing_args_size;
599 current_function_return_rtx = p->return_rtx;
600 nonlocal_goto_handler_slot = p->nonlocal_goto_handler_slot;
601 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
602 nonlocal_labels = p->nonlocal_labels;
603 cleanup_label = p->cleanup_label;
604 return_label = p->return_label;
605 save_expr_regs = p->save_expr_regs;
606 stack_slot_list = p->stack_slot_list;
607 parm_birth_insn = p->parm_birth_insn;
608 frame_offset = p->frame_offset;
609 tail_recursion_label = p->tail_recursion_label;
610 tail_recursion_reentry = p->tail_recursion_reentry;
611 arg_pointer_save_area = p->arg_pointer_save_area;
612 rtl_expr_chain = p->rtl_expr_chain;
613 last_parm_insn = p->last_parm_insn;
614 context_display = p->context_display;
615 trampoline_list = p->trampoline_list;
616 function_call_count = p->function_call_count;
617 temp_slots = p->temp_slots;
618 temp_slot_level = p->temp_slot_level;
619 target_temp_slot_level = p->target_temp_slot_level;
620 var_temp_slot_level = p->var_temp_slot_level;
621 current_function_epilogue_delay_list = p->epilogue_delay_list;
622 reg_renumber = 0;
623 current_function_args_info = p->args_info;
624
625 restore_tree_status (p, context);
626 restore_storage_status (p);
627 restore_expr_status (p);
628 restore_emit_status (p);
629 restore_stmt_status (p);
630 restore_varasm_status (p);
631
632 if (restore_machine_status)
633 (*restore_machine_status) (p);
634
635 /* Finish doing put_var_into_stack for any of our variables
636 which became addressable during the nested function. */
637 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
638 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
639
640 free (p);
641
642 /* Reset variables that have known state during rtx generation. */
643 rtx_equal_function_value_matters = 1;
644 virtuals_instantiated = 0;
645 }
646
647 void pop_function_context ()
648 {
649 pop_function_context_from (current_function_decl);
650 }
651 \f
652 /* Allocate fixed slots in the stack frame of the current function. */
653
654 /* Return size needed for stack frame based on slots so far allocated.
655 This size counts from zero. It is not rounded to STACK_BOUNDARY;
656 the caller may have to do that. */
657
658 HOST_WIDE_INT
659 get_frame_size ()
660 {
661 #ifdef FRAME_GROWS_DOWNWARD
662 return -frame_offset;
663 #else
664 return frame_offset;
665 #endif
666 }
667
668 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
669 with machine mode MODE.
670
671 ALIGN controls the amount of alignment for the address of the slot:
672 0 means according to MODE,
673 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
674 positive specifies alignment boundary in bits.
675
676 We do not round to stack_boundary here. */
677
678 rtx
679 assign_stack_local (mode, size, align)
680 enum machine_mode mode;
681 HOST_WIDE_INT size;
682 int align;
683 {
684 register rtx x, addr;
685 int bigend_correction = 0;
686 int alignment;
687
688 if (align == 0)
689 {
690 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
691 if (mode == BLKmode)
692 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
693 }
694 else if (align == -1)
695 {
696 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
697 size = CEIL_ROUND (size, alignment);
698 }
699 else
700 alignment = align / BITS_PER_UNIT;
701
702 /* Round frame offset to that alignment.
703 We must be careful here, since FRAME_OFFSET might be negative and
704 division with a negative dividend isn't as well defined as we might
705 like. So we instead assume that ALIGNMENT is a power of two and
706 use logical operations which are unambiguous. */
707 #ifdef FRAME_GROWS_DOWNWARD
708 frame_offset = FLOOR_ROUND (frame_offset, alignment);
709 #else
710 frame_offset = CEIL_ROUND (frame_offset, alignment);
711 #endif
712
713 /* On a big-endian machine, if we are allocating more space than we will use,
714 use the least significant bytes of those that are allocated. */
715 if (BYTES_BIG_ENDIAN && mode != BLKmode)
716 bigend_correction = size - GET_MODE_SIZE (mode);
717
718 #ifdef FRAME_GROWS_DOWNWARD
719 frame_offset -= size;
720 #endif
721
722 /* If we have already instantiated virtual registers, return the actual
723 address relative to the frame pointer. */
724 if (virtuals_instantiated)
725 addr = plus_constant (frame_pointer_rtx,
726 (frame_offset + bigend_correction
727 + STARTING_FRAME_OFFSET));
728 else
729 addr = plus_constant (virtual_stack_vars_rtx,
730 frame_offset + bigend_correction);
731
732 #ifndef FRAME_GROWS_DOWNWARD
733 frame_offset += size;
734 #endif
735
736 x = gen_rtx_MEM (mode, addr);
737
738 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
739
740 return x;
741 }
742
743 /* Assign a stack slot in a containing function.
744 First three arguments are same as in preceding function.
745 The last argument specifies the function to allocate in. */
746
747 static rtx
748 assign_outer_stack_local (mode, size, align, function)
749 enum machine_mode mode;
750 HOST_WIDE_INT size;
751 int align;
752 struct function *function;
753 {
754 register rtx x, addr;
755 int bigend_correction = 0;
756 int alignment;
757
758 /* Allocate in the memory associated with the function in whose frame
759 we are assigning. */
760 push_obstacks (function->function_obstack,
761 function->function_maybepermanent_obstack);
762
763 if (align == 0)
764 {
765 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
766 if (mode == BLKmode)
767 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
768 }
769 else if (align == -1)
770 {
771 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
772 size = CEIL_ROUND (size, alignment);
773 }
774 else
775 alignment = align / BITS_PER_UNIT;
776
777 /* Round frame offset to that alignment. */
778 #ifdef FRAME_GROWS_DOWNWARD
779 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
780 #else
781 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
782 #endif
783
784 /* On a big-endian machine, if we are allocating more space than we will use,
785 use the least significant bytes of those that are allocated. */
786 if (BYTES_BIG_ENDIAN && mode != BLKmode)
787 bigend_correction = size - GET_MODE_SIZE (mode);
788
789 #ifdef FRAME_GROWS_DOWNWARD
790 function->frame_offset -= size;
791 #endif
792 addr = plus_constant (virtual_stack_vars_rtx,
793 function->frame_offset + bigend_correction);
794 #ifndef FRAME_GROWS_DOWNWARD
795 function->frame_offset += size;
796 #endif
797
798 x = gen_rtx_MEM (mode, addr);
799
800 function->stack_slot_list
801 = gen_rtx_EXPR_LIST (VOIDmode, x, function->stack_slot_list);
802
803 pop_obstacks ();
804
805 return x;
806 }
807 \f
808 /* Allocate a temporary stack slot and record it for possible later
809 reuse.
810
811 MODE is the machine mode to be given to the returned rtx.
812
813 SIZE is the size in units of the space required. We do no rounding here
814 since assign_stack_local will do any required rounding.
815
816 KEEP is 1 if this slot is to be retained after a call to
817 free_temp_slots. Automatic variables for a block are allocated
818 with this flag. KEEP is 2 if we allocate a longer term temporary,
819 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
820 if we are to allocate something at an inner level to be treated as
821 a variable in the block (e.g., a SAVE_EXPR). */
822
823 rtx
824 assign_stack_temp (mode, size, keep)
825 enum machine_mode mode;
826 HOST_WIDE_INT size;
827 int keep;
828 {
829 struct temp_slot *p, *best_p = 0;
830
831 /* If SIZE is -1 it means that somebody tried to allocate a temporary
832 of a variable size. */
833 if (size == -1)
834 abort ();
835
836 /* First try to find an available, already-allocated temporary that is the
837 exact size we require. */
838 for (p = temp_slots; p; p = p->next)
839 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
840 break;
841
842 /* If we didn't find, one, try one that is larger than what we want. We
843 find the smallest such. */
844 if (p == 0)
845 for (p = temp_slots; p; p = p->next)
846 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
847 && (best_p == 0 || best_p->size > p->size))
848 best_p = p;
849
850 /* Make our best, if any, the one to use. */
851 if (best_p)
852 {
853 /* If there are enough aligned bytes left over, make them into a new
854 temp_slot so that the extra bytes don't get wasted. Do this only
855 for BLKmode slots, so that we can be sure of the alignment. */
856 if (GET_MODE (best_p->slot) == BLKmode)
857 {
858 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
859 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
860
861 if (best_p->size - rounded_size >= alignment)
862 {
863 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
864 p->in_use = p->addr_taken = 0;
865 p->size = best_p->size - rounded_size;
866 p->base_offset = best_p->base_offset + rounded_size;
867 p->full_size = best_p->full_size - rounded_size;
868 p->slot = gen_rtx_MEM (BLKmode,
869 plus_constant (XEXP (best_p->slot, 0),
870 rounded_size));
871 p->address = 0;
872 p->rtl_expr = 0;
873 p->next = temp_slots;
874 temp_slots = p;
875
876 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
877 stack_slot_list);
878
879 best_p->size = rounded_size;
880 best_p->full_size = rounded_size;
881 }
882 }
883
884 p = best_p;
885 }
886
887 /* If we still didn't find one, make a new temporary. */
888 if (p == 0)
889 {
890 HOST_WIDE_INT frame_offset_old = frame_offset;
891
892 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
893
894 /* If the temp slot mode doesn't indicate the alignment,
895 use the largest possible, so no one will be disappointed. */
896 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
897
898 /* The following slot size computation is necessary because we don't
899 know the actual size of the temporary slot until assign_stack_local
900 has performed all the frame alignment and size rounding for the
901 requested temporary. Note that extra space added for alignment
902 can be either above or below this stack slot depending on which
903 way the frame grows. We include the extra space if and only if it
904 is above this slot. */
905 #ifdef FRAME_GROWS_DOWNWARD
906 p->size = frame_offset_old - frame_offset;
907 #else
908 p->size = size;
909 #endif
910
911 /* Now define the fields used by combine_temp_slots. */
912 #ifdef FRAME_GROWS_DOWNWARD
913 p->base_offset = frame_offset;
914 p->full_size = frame_offset_old - frame_offset;
915 #else
916 p->base_offset = frame_offset_old;
917 p->full_size = frame_offset - frame_offset_old;
918 #endif
919 p->address = 0;
920 p->next = temp_slots;
921 temp_slots = p;
922 }
923
924 p->in_use = 1;
925 p->addr_taken = 0;
926 p->rtl_expr = sequence_rtl_expr;
927
928 if (keep == 2)
929 {
930 p->level = target_temp_slot_level;
931 p->keep = 0;
932 }
933 else if (keep == 3)
934 {
935 p->level = var_temp_slot_level;
936 p->keep = 0;
937 }
938 else
939 {
940 p->level = temp_slot_level;
941 p->keep = keep;
942 }
943
944 /* We may be reusing an old slot, so clear any MEM flags that may have been
945 set from before. */
946 RTX_UNCHANGING_P (p->slot) = 0;
947 MEM_IN_STRUCT_P (p->slot) = 0;
948 return p->slot;
949 }
950 \f
951 /* Assign a temporary of given TYPE.
952 KEEP is as for assign_stack_temp.
953 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
954 it is 0 if a register is OK.
955 DONT_PROMOTE is 1 if we should not promote values in register
956 to wider modes. */
957
958 rtx
959 assign_temp (type, keep, memory_required, dont_promote)
960 tree type;
961 int keep;
962 int memory_required;
963 int dont_promote;
964 {
965 enum machine_mode mode = TYPE_MODE (type);
966 int unsignedp = TREE_UNSIGNED (type);
967
968 if (mode == BLKmode || memory_required)
969 {
970 HOST_WIDE_INT size = int_size_in_bytes (type);
971 rtx tmp;
972
973 /* Unfortunately, we don't yet know how to allocate variable-sized
974 temporaries. However, sometimes we have a fixed upper limit on
975 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
976 instead. This is the case for Chill variable-sized strings. */
977 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
978 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
979 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
980 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
981
982 tmp = assign_stack_temp (mode, size, keep);
983 MEM_IN_STRUCT_P (tmp) = AGGREGATE_TYPE_P (type);
984 return tmp;
985 }
986
987 #ifndef PROMOTE_FOR_CALL_ONLY
988 if (! dont_promote)
989 mode = promote_mode (type, mode, &unsignedp, 0);
990 #endif
991
992 return gen_reg_rtx (mode);
993 }
994 \f
995 /* Combine temporary stack slots which are adjacent on the stack.
996
997 This allows for better use of already allocated stack space. This is only
998 done for BLKmode slots because we can be sure that we won't have alignment
999 problems in this case. */
1000
1001 void
1002 combine_temp_slots ()
1003 {
1004 struct temp_slot *p, *q;
1005 struct temp_slot *prev_p, *prev_q;
1006 int num_slots;
1007
1008 /* If there are a lot of temp slots, don't do anything unless
1009 high levels of optimizaton. */
1010 if (! flag_expensive_optimizations)
1011 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1012 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1013 return;
1014
1015 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
1016 {
1017 int delete_p = 0;
1018
1019 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
1020 for (q = p->next, prev_q = p; q; q = prev_q->next)
1021 {
1022 int delete_q = 0;
1023 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
1024 {
1025 if (p->base_offset + p->full_size == q->base_offset)
1026 {
1027 /* Q comes after P; combine Q into P. */
1028 p->size += q->size;
1029 p->full_size += q->full_size;
1030 delete_q = 1;
1031 }
1032 else if (q->base_offset + q->full_size == p->base_offset)
1033 {
1034 /* P comes after Q; combine P into Q. */
1035 q->size += p->size;
1036 q->full_size += p->full_size;
1037 delete_p = 1;
1038 break;
1039 }
1040 }
1041 /* Either delete Q or advance past it. */
1042 if (delete_q)
1043 prev_q->next = q->next;
1044 else
1045 prev_q = q;
1046 }
1047 /* Either delete P or advance past it. */
1048 if (delete_p)
1049 {
1050 if (prev_p)
1051 prev_p->next = p->next;
1052 else
1053 temp_slots = p->next;
1054 }
1055 else
1056 prev_p = p;
1057 }
1058 }
1059 \f
1060 /* Find the temp slot corresponding to the object at address X. */
1061
1062 static struct temp_slot *
1063 find_temp_slot_from_address (x)
1064 rtx x;
1065 {
1066 struct temp_slot *p;
1067 rtx next;
1068
1069 for (p = temp_slots; p; p = p->next)
1070 {
1071 if (! p->in_use)
1072 continue;
1073
1074 else if (XEXP (p->slot, 0) == x
1075 || p->address == x
1076 || (GET_CODE (x) == PLUS
1077 && XEXP (x, 0) == virtual_stack_vars_rtx
1078 && GET_CODE (XEXP (x, 1)) == CONST_INT
1079 && INTVAL (XEXP (x, 1)) >= p->base_offset
1080 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1081 return p;
1082
1083 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1084 for (next = p->address; next; next = XEXP (next, 1))
1085 if (XEXP (next, 0) == x)
1086 return p;
1087 }
1088
1089 return 0;
1090 }
1091
1092 /* Indicate that NEW is an alternate way of referring to the temp slot
1093 that previously was known by OLD. */
1094
1095 void
1096 update_temp_slot_address (old, new)
1097 rtx old, new;
1098 {
1099 struct temp_slot *p = find_temp_slot_from_address (old);
1100
1101 /* If none, return. Else add NEW as an alias. */
1102 if (p == 0)
1103 return;
1104 else if (p->address == 0)
1105 p->address = new;
1106 else
1107 {
1108 if (GET_CODE (p->address) != EXPR_LIST)
1109 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1110
1111 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1112 }
1113 }
1114
1115 /* If X could be a reference to a temporary slot, mark the fact that its
1116 address was taken. */
1117
1118 void
1119 mark_temp_addr_taken (x)
1120 rtx x;
1121 {
1122 struct temp_slot *p;
1123
1124 if (x == 0)
1125 return;
1126
1127 /* If X is not in memory or is at a constant address, it cannot be in
1128 a temporary slot. */
1129 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1130 return;
1131
1132 p = find_temp_slot_from_address (XEXP (x, 0));
1133 if (p != 0)
1134 p->addr_taken = 1;
1135 }
1136
1137 /* If X could be a reference to a temporary slot, mark that slot as
1138 belonging to the to one level higher than the current level. If X
1139 matched one of our slots, just mark that one. Otherwise, we can't
1140 easily predict which it is, so upgrade all of them. Kept slots
1141 need not be touched.
1142
1143 This is called when an ({...}) construct occurs and a statement
1144 returns a value in memory. */
1145
1146 void
1147 preserve_temp_slots (x)
1148 rtx x;
1149 {
1150 struct temp_slot *p = 0;
1151
1152 /* If there is no result, we still might have some objects whose address
1153 were taken, so we need to make sure they stay around. */
1154 if (x == 0)
1155 {
1156 for (p = temp_slots; p; p = p->next)
1157 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1158 p->level--;
1159
1160 return;
1161 }
1162
1163 /* If X is a register that is being used as a pointer, see if we have
1164 a temporary slot we know it points to. To be consistent with
1165 the code below, we really should preserve all non-kept slots
1166 if we can't find a match, but that seems to be much too costly. */
1167 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1168 p = find_temp_slot_from_address (x);
1169
1170 /* If X is not in memory or is at a constant address, it cannot be in
1171 a temporary slot, but it can contain something whose address was
1172 taken. */
1173 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1174 {
1175 for (p = temp_slots; p; p = p->next)
1176 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1177 p->level--;
1178
1179 return;
1180 }
1181
1182 /* First see if we can find a match. */
1183 if (p == 0)
1184 p = find_temp_slot_from_address (XEXP (x, 0));
1185
1186 if (p != 0)
1187 {
1188 /* Move everything at our level whose address was taken to our new
1189 level in case we used its address. */
1190 struct temp_slot *q;
1191
1192 if (p->level == temp_slot_level)
1193 {
1194 for (q = temp_slots; q; q = q->next)
1195 if (q != p && q->addr_taken && q->level == p->level)
1196 q->level--;
1197
1198 p->level--;
1199 p->addr_taken = 0;
1200 }
1201 return;
1202 }
1203
1204 /* Otherwise, preserve all non-kept slots at this level. */
1205 for (p = temp_slots; p; p = p->next)
1206 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1207 p->level--;
1208 }
1209
1210 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1211 with that RTL_EXPR, promote it into a temporary slot at the present
1212 level so it will not be freed when we free slots made in the
1213 RTL_EXPR. */
1214
1215 void
1216 preserve_rtl_expr_result (x)
1217 rtx x;
1218 {
1219 struct temp_slot *p;
1220
1221 /* If X is not in memory or is at a constant address, it cannot be in
1222 a temporary slot. */
1223 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1224 return;
1225
1226 /* If we can find a match, move it to our level unless it is already at
1227 an upper level. */
1228 p = find_temp_slot_from_address (XEXP (x, 0));
1229 if (p != 0)
1230 {
1231 p->level = MIN (p->level, temp_slot_level);
1232 p->rtl_expr = 0;
1233 }
1234
1235 return;
1236 }
1237
1238 /* Free all temporaries used so far. This is normally called at the end
1239 of generating code for a statement. Don't free any temporaries
1240 currently in use for an RTL_EXPR that hasn't yet been emitted.
1241 We could eventually do better than this since it can be reused while
1242 generating the same RTL_EXPR, but this is complex and probably not
1243 worthwhile. */
1244
1245 void
1246 free_temp_slots ()
1247 {
1248 struct temp_slot *p;
1249
1250 for (p = temp_slots; p; p = p->next)
1251 if (p->in_use && p->level == temp_slot_level && ! p->keep
1252 && p->rtl_expr == 0)
1253 p->in_use = 0;
1254
1255 combine_temp_slots ();
1256 }
1257
1258 /* Free all temporary slots used in T, an RTL_EXPR node. */
1259
1260 void
1261 free_temps_for_rtl_expr (t)
1262 tree t;
1263 {
1264 struct temp_slot *p;
1265
1266 for (p = temp_slots; p; p = p->next)
1267 if (p->rtl_expr == t)
1268 p->in_use = 0;
1269
1270 combine_temp_slots ();
1271 }
1272
1273 /* Mark all temporaries ever allocated in this function as not suitable
1274 for reuse until the current level is exited. */
1275
1276 void
1277 mark_all_temps_used ()
1278 {
1279 struct temp_slot *p;
1280
1281 for (p = temp_slots; p; p = p->next)
1282 {
1283 p->in_use = p->keep = 1;
1284 p->level = MIN (p->level, temp_slot_level);
1285 }
1286 }
1287
1288 /* Push deeper into the nesting level for stack temporaries. */
1289
1290 void
1291 push_temp_slots ()
1292 {
1293 temp_slot_level++;
1294 }
1295
1296 /* Likewise, but save the new level as the place to allocate variables
1297 for blocks. */
1298
1299 void
1300 push_temp_slots_for_block ()
1301 {
1302 push_temp_slots ();
1303
1304 var_temp_slot_level = temp_slot_level;
1305 }
1306
1307 /* Likewise, but save the new level as the place to allocate temporaries
1308 for TARGET_EXPRs. */
1309
1310 void
1311 push_temp_slots_for_target ()
1312 {
1313 push_temp_slots ();
1314
1315 target_temp_slot_level = temp_slot_level;
1316 }
1317
1318 /* Set and get the value of target_temp_slot_level. The only
1319 permitted use of these functions is to save and restore this value. */
1320
1321 int
1322 get_target_temp_slot_level ()
1323 {
1324 return target_temp_slot_level;
1325 }
1326
1327 void
1328 set_target_temp_slot_level (level)
1329 int level;
1330 {
1331 target_temp_slot_level = level;
1332 }
1333
1334 /* Pop a temporary nesting level. All slots in use in the current level
1335 are freed. */
1336
1337 void
1338 pop_temp_slots ()
1339 {
1340 struct temp_slot *p;
1341
1342 for (p = temp_slots; p; p = p->next)
1343 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1344 p->in_use = 0;
1345
1346 combine_temp_slots ();
1347
1348 temp_slot_level--;
1349 }
1350
1351 /* Initialize temporary slots. */
1352
1353 void
1354 init_temp_slots ()
1355 {
1356 /* We have not allocated any temporaries yet. */
1357 temp_slots = 0;
1358 temp_slot_level = 0;
1359 var_temp_slot_level = 0;
1360 target_temp_slot_level = 0;
1361 }
1362 \f
1363 /* Retroactively move an auto variable from a register to a stack slot.
1364 This is done when an address-reference to the variable is seen. */
1365
1366 void
1367 put_var_into_stack (decl)
1368 tree decl;
1369 {
1370 register rtx reg;
1371 enum machine_mode promoted_mode, decl_mode;
1372 struct function *function = 0;
1373 tree context;
1374 int can_use_addressof;
1375
1376 context = decl_function_context (decl);
1377
1378 /* Get the current rtl used for this object and it's original mode. */
1379 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1380
1381 /* No need to do anything if decl has no rtx yet
1382 since in that case caller is setting TREE_ADDRESSABLE
1383 and a stack slot will be assigned when the rtl is made. */
1384 if (reg == 0)
1385 return;
1386
1387 /* Get the declared mode for this object. */
1388 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1389 : DECL_MODE (decl));
1390 /* Get the mode it's actually stored in. */
1391 promoted_mode = GET_MODE (reg);
1392
1393 /* If this variable comes from an outer function,
1394 find that function's saved context. */
1395 if (context != current_function_decl && context != inline_function_decl)
1396 for (function = outer_function_chain; function; function = function->next)
1397 if (function->decl == context)
1398 break;
1399
1400 /* If this is a variable-size object with a pseudo to address it,
1401 put that pseudo into the stack, if the var is nonlocal. */
1402 if (DECL_NONLOCAL (decl)
1403 && GET_CODE (reg) == MEM
1404 && GET_CODE (XEXP (reg, 0)) == REG
1405 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1406 {
1407 reg = XEXP (reg, 0);
1408 decl_mode = promoted_mode = GET_MODE (reg);
1409 }
1410
1411 can_use_addressof
1412 = (function == 0
1413 && optimize > 0
1414 /* FIXME make it work for promoted modes too */
1415 && decl_mode == promoted_mode
1416 #ifdef NON_SAVING_SETJMP
1417 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1418 #endif
1419 );
1420
1421 /* If we can't use ADDRESSOF, make sure we see through one we already
1422 generated. */
1423 if (! can_use_addressof && GET_CODE (reg) == MEM
1424 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1425 reg = XEXP (XEXP (reg, 0), 0);
1426
1427 /* Now we should have a value that resides in one or more pseudo regs. */
1428
1429 if (GET_CODE (reg) == REG)
1430 {
1431 /* If this variable lives in the current function and we don't need
1432 to put things in the stack for the sake of setjmp, try to keep it
1433 in a register until we know we actually need the address. */
1434 if (can_use_addressof)
1435 gen_mem_addressof (reg, decl);
1436 else
1437 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1438 promoted_mode, decl_mode,
1439 TREE_SIDE_EFFECTS (decl), 0,
1440 TREE_USED (decl)
1441 || DECL_INITIAL (decl) != 0);
1442 }
1443 else if (GET_CODE (reg) == CONCAT)
1444 {
1445 /* A CONCAT contains two pseudos; put them both in the stack.
1446 We do it so they end up consecutive. */
1447 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1448 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1449 #ifdef FRAME_GROWS_DOWNWARD
1450 /* Since part 0 should have a lower address, do it second. */
1451 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1452 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1453 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1454 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1455 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1456 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1457 #else
1458 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1459 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1460 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1461 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1462 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1463 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1464 #endif
1465
1466 /* Change the CONCAT into a combined MEM for both parts. */
1467 PUT_CODE (reg, MEM);
1468 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1469
1470 /* The two parts are in memory order already.
1471 Use the lower parts address as ours. */
1472 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1473 /* Prevent sharing of rtl that might lose. */
1474 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1475 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1476 }
1477 else
1478 return;
1479
1480 if (flag_check_memory_usage)
1481 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1482 XEXP (reg, 0), ptr_mode,
1483 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1484 TYPE_MODE (sizetype),
1485 GEN_INT (MEMORY_USE_RW),
1486 TYPE_MODE (integer_type_node));
1487 }
1488
1489 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1490 into the stack frame of FUNCTION (0 means the current function).
1491 DECL_MODE is the machine mode of the user-level data type.
1492 PROMOTED_MODE is the machine mode of the register.
1493 VOLATILE_P is nonzero if this is for a "volatile" decl.
1494 USED_P is nonzero if this reg might have already been used in an insn. */
1495
1496 static void
1497 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1498 original_regno, used_p)
1499 struct function *function;
1500 rtx reg;
1501 tree type;
1502 enum machine_mode promoted_mode, decl_mode;
1503 int volatile_p;
1504 int original_regno;
1505 int used_p;
1506 {
1507 rtx new = 0;
1508 int regno = original_regno;
1509
1510 if (regno == 0)
1511 regno = REGNO (reg);
1512
1513 if (function)
1514 {
1515 if (regno < function->max_parm_reg)
1516 new = function->parm_reg_stack_loc[regno];
1517 if (new == 0)
1518 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1519 0, function);
1520 }
1521 else
1522 {
1523 if (regno < max_parm_reg)
1524 new = parm_reg_stack_loc[regno];
1525 if (new == 0)
1526 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1527 }
1528
1529 PUT_MODE (reg, decl_mode);
1530 XEXP (reg, 0) = XEXP (new, 0);
1531 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1532 MEM_VOLATILE_P (reg) = volatile_p;
1533 PUT_CODE (reg, MEM);
1534
1535 /* If this is a memory ref that contains aggregate components,
1536 mark it as such for cse and loop optimize. If we are reusing a
1537 previously generated stack slot, then we need to copy the bit in
1538 case it was set for other reasons. For instance, it is set for
1539 __builtin_va_alist. */
1540 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type) | MEM_IN_STRUCT_P (new);
1541
1542 /* Now make sure that all refs to the variable, previously made
1543 when it was a register, are fixed up to be valid again. */
1544
1545 if (used_p && function != 0)
1546 {
1547 struct var_refs_queue *temp;
1548
1549 /* Variable is inherited; fix it up when we get back to its function. */
1550 push_obstacks (function->function_obstack,
1551 function->function_maybepermanent_obstack);
1552
1553 /* See comment in restore_tree_status in tree.c for why this needs to be
1554 on saveable obstack. */
1555 temp
1556 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1557 temp->modified = reg;
1558 temp->promoted_mode = promoted_mode;
1559 temp->unsignedp = TREE_UNSIGNED (type);
1560 temp->next = function->fixup_var_refs_queue;
1561 function->fixup_var_refs_queue = temp;
1562 pop_obstacks ();
1563 }
1564 else if (used_p)
1565 /* Variable is local; fix it up now. */
1566 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
1567 }
1568 \f
1569 static void
1570 fixup_var_refs (var, promoted_mode, unsignedp)
1571 rtx var;
1572 enum machine_mode promoted_mode;
1573 int unsignedp;
1574 {
1575 tree pending;
1576 rtx first_insn = get_insns ();
1577 struct sequence_stack *stack = sequence_stack;
1578 tree rtl_exps = rtl_expr_chain;
1579
1580 /* Must scan all insns for stack-refs that exceed the limit. */
1581 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
1582
1583 /* Scan all pending sequences too. */
1584 for (; stack; stack = stack->next)
1585 {
1586 push_to_sequence (stack->first);
1587 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1588 stack->first, stack->next != 0);
1589 /* Update remembered end of sequence
1590 in case we added an insn at the end. */
1591 stack->last = get_last_insn ();
1592 end_sequence ();
1593 }
1594
1595 /* Scan all waiting RTL_EXPRs too. */
1596 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1597 {
1598 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1599 if (seq != const0_rtx && seq != 0)
1600 {
1601 push_to_sequence (seq);
1602 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
1603 end_sequence ();
1604 }
1605 }
1606 }
1607 \f
1608 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1609 some part of an insn. Return a struct fixup_replacement whose OLD
1610 value is equal to X. Allocate a new structure if no such entry exists. */
1611
1612 static struct fixup_replacement *
1613 find_fixup_replacement (replacements, x)
1614 struct fixup_replacement **replacements;
1615 rtx x;
1616 {
1617 struct fixup_replacement *p;
1618
1619 /* See if we have already replaced this. */
1620 for (p = *replacements; p && p->old != x; p = p->next)
1621 ;
1622
1623 if (p == 0)
1624 {
1625 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1626 p->old = x;
1627 p->new = 0;
1628 p->next = *replacements;
1629 *replacements = p;
1630 }
1631
1632 return p;
1633 }
1634
1635 /* Scan the insn-chain starting with INSN for refs to VAR
1636 and fix them up. TOPLEVEL is nonzero if this chain is the
1637 main chain of insns for the current function. */
1638
1639 static void
1640 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
1641 rtx var;
1642 enum machine_mode promoted_mode;
1643 int unsignedp;
1644 rtx insn;
1645 int toplevel;
1646 {
1647 rtx call_dest = 0;
1648
1649 while (insn)
1650 {
1651 rtx next = NEXT_INSN (insn);
1652 rtx set, prev, prev_set;
1653 rtx note;
1654
1655 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1656 {
1657 /* If this is a CLOBBER of VAR, delete it.
1658
1659 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1660 and REG_RETVAL notes too. */
1661 if (GET_CODE (PATTERN (insn)) == CLOBBER
1662 && XEXP (PATTERN (insn), 0) == var)
1663 {
1664 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1665 /* The REG_LIBCALL note will go away since we are going to
1666 turn INSN into a NOTE, so just delete the
1667 corresponding REG_RETVAL note. */
1668 remove_note (XEXP (note, 0),
1669 find_reg_note (XEXP (note, 0), REG_RETVAL,
1670 NULL_RTX));
1671
1672 /* In unoptimized compilation, we shouldn't call delete_insn
1673 except in jump.c doing warnings. */
1674 PUT_CODE (insn, NOTE);
1675 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1676 NOTE_SOURCE_FILE (insn) = 0;
1677 }
1678
1679 /* The insn to load VAR from a home in the arglist
1680 is now a no-op. When we see it, just delete it.
1681 Similarly if this is storing VAR from a register from which
1682 it was loaded in the previous insn. This will occur
1683 when an ADDRESSOF was made for an arglist slot. */
1684 else if (toplevel
1685 && (set = single_set (insn)) != 0
1686 && SET_DEST (set) == var
1687 /* If this represents the result of an insn group,
1688 don't delete the insn. */
1689 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1690 && (rtx_equal_p (SET_SRC (set), var)
1691 || (GET_CODE (SET_SRC (set)) == REG
1692 && (prev = prev_nonnote_insn (insn)) != 0
1693 && (prev_set = single_set (prev)) != 0
1694 && SET_DEST (prev_set) == SET_SRC (set)
1695 && rtx_equal_p (SET_SRC (prev_set), var))))
1696 {
1697 /* In unoptimized compilation, we shouldn't call delete_insn
1698 except in jump.c doing warnings. */
1699 PUT_CODE (insn, NOTE);
1700 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1701 NOTE_SOURCE_FILE (insn) = 0;
1702 if (insn == last_parm_insn)
1703 last_parm_insn = PREV_INSN (next);
1704 }
1705 else
1706 {
1707 struct fixup_replacement *replacements = 0;
1708 rtx next_insn = NEXT_INSN (insn);
1709
1710 if (SMALL_REGISTER_CLASSES)
1711 {
1712 /* If the insn that copies the results of a CALL_INSN
1713 into a pseudo now references VAR, we have to use an
1714 intermediate pseudo since we want the life of the
1715 return value register to be only a single insn.
1716
1717 If we don't use an intermediate pseudo, such things as
1718 address computations to make the address of VAR valid
1719 if it is not can be placed between the CALL_INSN and INSN.
1720
1721 To make sure this doesn't happen, we record the destination
1722 of the CALL_INSN and see if the next insn uses both that
1723 and VAR. */
1724
1725 if (call_dest != 0 && GET_CODE (insn) == INSN
1726 && reg_mentioned_p (var, PATTERN (insn))
1727 && reg_mentioned_p (call_dest, PATTERN (insn)))
1728 {
1729 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1730
1731 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1732
1733 PATTERN (insn) = replace_rtx (PATTERN (insn),
1734 call_dest, temp);
1735 }
1736
1737 if (GET_CODE (insn) == CALL_INSN
1738 && GET_CODE (PATTERN (insn)) == SET)
1739 call_dest = SET_DEST (PATTERN (insn));
1740 else if (GET_CODE (insn) == CALL_INSN
1741 && GET_CODE (PATTERN (insn)) == PARALLEL
1742 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1743 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1744 else
1745 call_dest = 0;
1746 }
1747
1748 /* See if we have to do anything to INSN now that VAR is in
1749 memory. If it needs to be loaded into a pseudo, use a single
1750 pseudo for the entire insn in case there is a MATCH_DUP
1751 between two operands. We pass a pointer to the head of
1752 a list of struct fixup_replacements. If fixup_var_refs_1
1753 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1754 it will record them in this list.
1755
1756 If it allocated a pseudo for any replacement, we copy into
1757 it here. */
1758
1759 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1760 &replacements);
1761
1762 /* If this is last_parm_insn, and any instructions were output
1763 after it to fix it up, then we must set last_parm_insn to
1764 the last such instruction emitted. */
1765 if (insn == last_parm_insn)
1766 last_parm_insn = PREV_INSN (next_insn);
1767
1768 while (replacements)
1769 {
1770 if (GET_CODE (replacements->new) == REG)
1771 {
1772 rtx insert_before;
1773 rtx seq;
1774
1775 /* OLD might be a (subreg (mem)). */
1776 if (GET_CODE (replacements->old) == SUBREG)
1777 replacements->old
1778 = fixup_memory_subreg (replacements->old, insn, 0);
1779 else
1780 replacements->old
1781 = fixup_stack_1 (replacements->old, insn);
1782
1783 insert_before = insn;
1784
1785 /* If we are changing the mode, do a conversion.
1786 This might be wasteful, but combine.c will
1787 eliminate much of the waste. */
1788
1789 if (GET_MODE (replacements->new)
1790 != GET_MODE (replacements->old))
1791 {
1792 start_sequence ();
1793 convert_move (replacements->new,
1794 replacements->old, unsignedp);
1795 seq = gen_sequence ();
1796 end_sequence ();
1797 }
1798 else
1799 seq = gen_move_insn (replacements->new,
1800 replacements->old);
1801
1802 emit_insn_before (seq, insert_before);
1803 }
1804
1805 replacements = replacements->next;
1806 }
1807 }
1808
1809 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1810 But don't touch other insns referred to by reg-notes;
1811 we will get them elsewhere. */
1812 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1813 if (GET_CODE (note) != INSN_LIST)
1814 XEXP (note, 0)
1815 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1816 }
1817 insn = next;
1818 }
1819 }
1820 \f
1821 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1822 See if the rtx expression at *LOC in INSN needs to be changed.
1823
1824 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1825 contain a list of original rtx's and replacements. If we find that we need
1826 to modify this insn by replacing a memory reference with a pseudo or by
1827 making a new MEM to implement a SUBREG, we consult that list to see if
1828 we have already chosen a replacement. If none has already been allocated,
1829 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1830 or the SUBREG, as appropriate, to the pseudo. */
1831
1832 static void
1833 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1834 register rtx var;
1835 enum machine_mode promoted_mode;
1836 register rtx *loc;
1837 rtx insn;
1838 struct fixup_replacement **replacements;
1839 {
1840 register int i;
1841 register rtx x = *loc;
1842 RTX_CODE code = GET_CODE (x);
1843 register char *fmt;
1844 register rtx tem, tem1;
1845 struct fixup_replacement *replacement;
1846
1847 switch (code)
1848 {
1849 case ADDRESSOF:
1850 if (XEXP (x, 0) == var)
1851 {
1852 /* Prevent sharing of rtl that might lose. */
1853 rtx sub = copy_rtx (XEXP (var, 0));
1854
1855 start_sequence ();
1856
1857 if (! validate_change (insn, loc, sub, 0))
1858 {
1859 rtx y = force_operand (sub, NULL_RTX);
1860
1861 if (! validate_change (insn, loc, y, 0))
1862 *loc = copy_to_reg (y);
1863 }
1864
1865 emit_insn_before (gen_sequence (), insn);
1866 end_sequence ();
1867 }
1868 return;
1869
1870 case MEM:
1871 if (var == x)
1872 {
1873 /* If we already have a replacement, use it. Otherwise,
1874 try to fix up this address in case it is invalid. */
1875
1876 replacement = find_fixup_replacement (replacements, var);
1877 if (replacement->new)
1878 {
1879 *loc = replacement->new;
1880 return;
1881 }
1882
1883 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1884
1885 /* Unless we are forcing memory to register or we changed the mode,
1886 we can leave things the way they are if the insn is valid. */
1887
1888 INSN_CODE (insn) = -1;
1889 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1890 && recog_memoized (insn) >= 0)
1891 return;
1892
1893 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1894 return;
1895 }
1896
1897 /* If X contains VAR, we need to unshare it here so that we update
1898 each occurrence separately. But all identical MEMs in one insn
1899 must be replaced with the same rtx because of the possibility of
1900 MATCH_DUPs. */
1901
1902 if (reg_mentioned_p (var, x))
1903 {
1904 replacement = find_fixup_replacement (replacements, x);
1905 if (replacement->new == 0)
1906 replacement->new = copy_most_rtx (x, var);
1907
1908 *loc = x = replacement->new;
1909 }
1910 break;
1911
1912 case REG:
1913 case CC0:
1914 case PC:
1915 case CONST_INT:
1916 case CONST:
1917 case SYMBOL_REF:
1918 case LABEL_REF:
1919 case CONST_DOUBLE:
1920 return;
1921
1922 case SIGN_EXTRACT:
1923 case ZERO_EXTRACT:
1924 /* Note that in some cases those types of expressions are altered
1925 by optimize_bit_field, and do not survive to get here. */
1926 if (XEXP (x, 0) == var
1927 || (GET_CODE (XEXP (x, 0)) == SUBREG
1928 && SUBREG_REG (XEXP (x, 0)) == var))
1929 {
1930 /* Get TEM as a valid MEM in the mode presently in the insn.
1931
1932 We don't worry about the possibility of MATCH_DUP here; it
1933 is highly unlikely and would be tricky to handle. */
1934
1935 tem = XEXP (x, 0);
1936 if (GET_CODE (tem) == SUBREG)
1937 {
1938 if (GET_MODE_BITSIZE (GET_MODE (tem))
1939 > GET_MODE_BITSIZE (GET_MODE (var)))
1940 {
1941 replacement = find_fixup_replacement (replacements, var);
1942 if (replacement->new == 0)
1943 replacement->new = gen_reg_rtx (GET_MODE (var));
1944 SUBREG_REG (tem) = replacement->new;
1945 }
1946 else
1947 tem = fixup_memory_subreg (tem, insn, 0);
1948 }
1949 else
1950 tem = fixup_stack_1 (tem, insn);
1951
1952 /* Unless we want to load from memory, get TEM into the proper mode
1953 for an extract from memory. This can only be done if the
1954 extract is at a constant position and length. */
1955
1956 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1957 && GET_CODE (XEXP (x, 2)) == CONST_INT
1958 && ! mode_dependent_address_p (XEXP (tem, 0))
1959 && ! MEM_VOLATILE_P (tem))
1960 {
1961 enum machine_mode wanted_mode = VOIDmode;
1962 enum machine_mode is_mode = GET_MODE (tem);
1963 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
1964
1965 #ifdef HAVE_extzv
1966 if (GET_CODE (x) == ZERO_EXTRACT)
1967 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
1968 #endif
1969 #ifdef HAVE_extv
1970 if (GET_CODE (x) == SIGN_EXTRACT)
1971 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
1972 #endif
1973 /* If we have a narrower mode, we can do something. */
1974 if (wanted_mode != VOIDmode
1975 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
1976 {
1977 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
1978 rtx old_pos = XEXP (x, 2);
1979 rtx newmem;
1980
1981 /* If the bytes and bits are counted differently, we
1982 must adjust the offset. */
1983 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
1984 offset = (GET_MODE_SIZE (is_mode)
1985 - GET_MODE_SIZE (wanted_mode) - offset);
1986
1987 pos %= GET_MODE_BITSIZE (wanted_mode);
1988
1989 newmem = gen_rtx_MEM (wanted_mode,
1990 plus_constant (XEXP (tem, 0), offset));
1991 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
1992 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
1993 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
1994
1995 /* Make the change and see if the insn remains valid. */
1996 INSN_CODE (insn) = -1;
1997 XEXP (x, 0) = newmem;
1998 XEXP (x, 2) = GEN_INT (pos);
1999
2000 if (recog_memoized (insn) >= 0)
2001 return;
2002
2003 /* Otherwise, restore old position. XEXP (x, 0) will be
2004 restored later. */
2005 XEXP (x, 2) = old_pos;
2006 }
2007 }
2008
2009 /* If we get here, the bitfield extract insn can't accept a memory
2010 reference. Copy the input into a register. */
2011
2012 tem1 = gen_reg_rtx (GET_MODE (tem));
2013 emit_insn_before (gen_move_insn (tem1, tem), insn);
2014 XEXP (x, 0) = tem1;
2015 return;
2016 }
2017 break;
2018
2019 case SUBREG:
2020 if (SUBREG_REG (x) == var)
2021 {
2022 /* If this is a special SUBREG made because VAR was promoted
2023 from a wider mode, replace it with VAR and call ourself
2024 recursively, this time saying that the object previously
2025 had its current mode (by virtue of the SUBREG). */
2026
2027 if (SUBREG_PROMOTED_VAR_P (x))
2028 {
2029 *loc = var;
2030 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2031 return;
2032 }
2033
2034 /* If this SUBREG makes VAR wider, it has become a paradoxical
2035 SUBREG with VAR in memory, but these aren't allowed at this
2036 stage of the compilation. So load VAR into a pseudo and take
2037 a SUBREG of that pseudo. */
2038 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2039 {
2040 replacement = find_fixup_replacement (replacements, var);
2041 if (replacement->new == 0)
2042 replacement->new = gen_reg_rtx (GET_MODE (var));
2043 SUBREG_REG (x) = replacement->new;
2044 return;
2045 }
2046
2047 /* See if we have already found a replacement for this SUBREG.
2048 If so, use it. Otherwise, make a MEM and see if the insn
2049 is recognized. If not, or if we should force MEM into a register,
2050 make a pseudo for this SUBREG. */
2051 replacement = find_fixup_replacement (replacements, x);
2052 if (replacement->new)
2053 {
2054 *loc = replacement->new;
2055 return;
2056 }
2057
2058 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2059
2060 INSN_CODE (insn) = -1;
2061 if (! flag_force_mem && recog_memoized (insn) >= 0)
2062 return;
2063
2064 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2065 return;
2066 }
2067 break;
2068
2069 case SET:
2070 /* First do special simplification of bit-field references. */
2071 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2072 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2073 optimize_bit_field (x, insn, 0);
2074 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2075 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2076 optimize_bit_field (x, insn, NULL_PTR);
2077
2078 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2079 into a register and then store it back out. */
2080 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2081 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2082 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2083 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2084 > GET_MODE_SIZE (GET_MODE (var))))
2085 {
2086 replacement = find_fixup_replacement (replacements, var);
2087 if (replacement->new == 0)
2088 replacement->new = gen_reg_rtx (GET_MODE (var));
2089
2090 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2091 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2092 }
2093
2094 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2095 insn into a pseudo and store the low part of the pseudo into VAR. */
2096 if (GET_CODE (SET_DEST (x)) == SUBREG
2097 && SUBREG_REG (SET_DEST (x)) == var
2098 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2099 > GET_MODE_SIZE (GET_MODE (var))))
2100 {
2101 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2102 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2103 tem)),
2104 insn);
2105 break;
2106 }
2107
2108 {
2109 rtx dest = SET_DEST (x);
2110 rtx src = SET_SRC (x);
2111 #ifdef HAVE_insv
2112 rtx outerdest = dest;
2113 #endif
2114
2115 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2116 || GET_CODE (dest) == SIGN_EXTRACT
2117 || GET_CODE (dest) == ZERO_EXTRACT)
2118 dest = XEXP (dest, 0);
2119
2120 if (GET_CODE (src) == SUBREG)
2121 src = XEXP (src, 0);
2122
2123 /* If VAR does not appear at the top level of the SET
2124 just scan the lower levels of the tree. */
2125
2126 if (src != var && dest != var)
2127 break;
2128
2129 /* We will need to rerecognize this insn. */
2130 INSN_CODE (insn) = -1;
2131
2132 #ifdef HAVE_insv
2133 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2134 {
2135 /* Since this case will return, ensure we fixup all the
2136 operands here. */
2137 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2138 insn, replacements);
2139 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2140 insn, replacements);
2141 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2142 insn, replacements);
2143
2144 tem = XEXP (outerdest, 0);
2145
2146 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2147 that may appear inside a ZERO_EXTRACT.
2148 This was legitimate when the MEM was a REG. */
2149 if (GET_CODE (tem) == SUBREG
2150 && SUBREG_REG (tem) == var)
2151 tem = fixup_memory_subreg (tem, insn, 0);
2152 else
2153 tem = fixup_stack_1 (tem, insn);
2154
2155 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2156 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2157 && ! mode_dependent_address_p (XEXP (tem, 0))
2158 && ! MEM_VOLATILE_P (tem))
2159 {
2160 enum machine_mode wanted_mode
2161 = insn_operand_mode[(int) CODE_FOR_insv][0];
2162 enum machine_mode is_mode = GET_MODE (tem);
2163 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2164
2165 /* If we have a narrower mode, we can do something. */
2166 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2167 {
2168 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2169 rtx old_pos = XEXP (outerdest, 2);
2170 rtx newmem;
2171
2172 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2173 offset = (GET_MODE_SIZE (is_mode)
2174 - GET_MODE_SIZE (wanted_mode) - offset);
2175
2176 pos %= GET_MODE_BITSIZE (wanted_mode);
2177
2178 newmem = gen_rtx_MEM (wanted_mode,
2179 plus_constant (XEXP (tem, 0), offset));
2180 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2181 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
2182 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
2183
2184 /* Make the change and see if the insn remains valid. */
2185 INSN_CODE (insn) = -1;
2186 XEXP (outerdest, 0) = newmem;
2187 XEXP (outerdest, 2) = GEN_INT (pos);
2188
2189 if (recog_memoized (insn) >= 0)
2190 return;
2191
2192 /* Otherwise, restore old position. XEXP (x, 0) will be
2193 restored later. */
2194 XEXP (outerdest, 2) = old_pos;
2195 }
2196 }
2197
2198 /* If we get here, the bit-field store doesn't allow memory
2199 or isn't located at a constant position. Load the value into
2200 a register, do the store, and put it back into memory. */
2201
2202 tem1 = gen_reg_rtx (GET_MODE (tem));
2203 emit_insn_before (gen_move_insn (tem1, tem), insn);
2204 emit_insn_after (gen_move_insn (tem, tem1), insn);
2205 XEXP (outerdest, 0) = tem1;
2206 return;
2207 }
2208 #endif
2209
2210 /* STRICT_LOW_PART is a no-op on memory references
2211 and it can cause combinations to be unrecognizable,
2212 so eliminate it. */
2213
2214 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2215 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2216
2217 /* A valid insn to copy VAR into or out of a register
2218 must be left alone, to avoid an infinite loop here.
2219 If the reference to VAR is by a subreg, fix that up,
2220 since SUBREG is not valid for a memref.
2221 Also fix up the address of the stack slot.
2222
2223 Note that we must not try to recognize the insn until
2224 after we know that we have valid addresses and no
2225 (subreg (mem ...) ...) constructs, since these interfere
2226 with determining the validity of the insn. */
2227
2228 if ((SET_SRC (x) == var
2229 || (GET_CODE (SET_SRC (x)) == SUBREG
2230 && SUBREG_REG (SET_SRC (x)) == var))
2231 && (GET_CODE (SET_DEST (x)) == REG
2232 || (GET_CODE (SET_DEST (x)) == SUBREG
2233 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2234 && GET_MODE (var) == promoted_mode
2235 && x == single_set (insn))
2236 {
2237 rtx pat;
2238
2239 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2240 if (replacement->new)
2241 SET_SRC (x) = replacement->new;
2242 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2243 SET_SRC (x) = replacement->new
2244 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2245 else
2246 SET_SRC (x) = replacement->new
2247 = fixup_stack_1 (SET_SRC (x), insn);
2248
2249 if (recog_memoized (insn) >= 0)
2250 return;
2251
2252 /* INSN is not valid, but we know that we want to
2253 copy SET_SRC (x) to SET_DEST (x) in some way. So
2254 we generate the move and see whether it requires more
2255 than one insn. If it does, we emit those insns and
2256 delete INSN. Otherwise, we an just replace the pattern
2257 of INSN; we have already verified above that INSN has
2258 no other function that to do X. */
2259
2260 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2261 if (GET_CODE (pat) == SEQUENCE)
2262 {
2263 emit_insn_after (pat, insn);
2264 PUT_CODE (insn, NOTE);
2265 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2266 NOTE_SOURCE_FILE (insn) = 0;
2267 }
2268 else
2269 PATTERN (insn) = pat;
2270
2271 return;
2272 }
2273
2274 if ((SET_DEST (x) == var
2275 || (GET_CODE (SET_DEST (x)) == SUBREG
2276 && SUBREG_REG (SET_DEST (x)) == var))
2277 && (GET_CODE (SET_SRC (x)) == REG
2278 || (GET_CODE (SET_SRC (x)) == SUBREG
2279 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2280 && GET_MODE (var) == promoted_mode
2281 && x == single_set (insn))
2282 {
2283 rtx pat;
2284
2285 if (GET_CODE (SET_DEST (x)) == SUBREG)
2286 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2287 else
2288 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2289
2290 if (recog_memoized (insn) >= 0)
2291 return;
2292
2293 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2294 if (GET_CODE (pat) == SEQUENCE)
2295 {
2296 emit_insn_after (pat, insn);
2297 PUT_CODE (insn, NOTE);
2298 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2299 NOTE_SOURCE_FILE (insn) = 0;
2300 }
2301 else
2302 PATTERN (insn) = pat;
2303
2304 return;
2305 }
2306
2307 /* Otherwise, storing into VAR must be handled specially
2308 by storing into a temporary and copying that into VAR
2309 with a new insn after this one. Note that this case
2310 will be used when storing into a promoted scalar since
2311 the insn will now have different modes on the input
2312 and output and hence will be invalid (except for the case
2313 of setting it to a constant, which does not need any
2314 change if it is valid). We generate extra code in that case,
2315 but combine.c will eliminate it. */
2316
2317 if (dest == var)
2318 {
2319 rtx temp;
2320 rtx fixeddest = SET_DEST (x);
2321
2322 /* STRICT_LOW_PART can be discarded, around a MEM. */
2323 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2324 fixeddest = XEXP (fixeddest, 0);
2325 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2326 if (GET_CODE (fixeddest) == SUBREG)
2327 {
2328 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2329 promoted_mode = GET_MODE (fixeddest);
2330 }
2331 else
2332 fixeddest = fixup_stack_1 (fixeddest, insn);
2333
2334 temp = gen_reg_rtx (promoted_mode);
2335
2336 emit_insn_after (gen_move_insn (fixeddest,
2337 gen_lowpart (GET_MODE (fixeddest),
2338 temp)),
2339 insn);
2340
2341 SET_DEST (x) = temp;
2342 }
2343 }
2344
2345 default:
2346 break;
2347 }
2348
2349 /* Nothing special about this RTX; fix its operands. */
2350
2351 fmt = GET_RTX_FORMAT (code);
2352 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2353 {
2354 if (fmt[i] == 'e')
2355 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2356 if (fmt[i] == 'E')
2357 {
2358 register int j;
2359 for (j = 0; j < XVECLEN (x, i); j++)
2360 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2361 insn, replacements);
2362 }
2363 }
2364 }
2365 \f
2366 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2367 return an rtx (MEM:m1 newaddr) which is equivalent.
2368 If any insns must be emitted to compute NEWADDR, put them before INSN.
2369
2370 UNCRITICAL nonzero means accept paradoxical subregs.
2371 This is used for subregs found inside REG_NOTES. */
2372
2373 static rtx
2374 fixup_memory_subreg (x, insn, uncritical)
2375 rtx x;
2376 rtx insn;
2377 int uncritical;
2378 {
2379 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2380 rtx addr = XEXP (SUBREG_REG (x), 0);
2381 enum machine_mode mode = GET_MODE (x);
2382 rtx result;
2383
2384 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2385 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2386 && ! uncritical)
2387 abort ();
2388
2389 if (BYTES_BIG_ENDIAN)
2390 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2391 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2392 addr = plus_constant (addr, offset);
2393 if (!flag_force_addr && memory_address_p (mode, addr))
2394 /* Shortcut if no insns need be emitted. */
2395 return change_address (SUBREG_REG (x), mode, addr);
2396 start_sequence ();
2397 result = change_address (SUBREG_REG (x), mode, addr);
2398 emit_insn_before (gen_sequence (), insn);
2399 end_sequence ();
2400 return result;
2401 }
2402
2403 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2404 Replace subexpressions of X in place.
2405 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2406 Otherwise return X, with its contents possibly altered.
2407
2408 If any insns must be emitted to compute NEWADDR, put them before INSN.
2409
2410 UNCRITICAL is as in fixup_memory_subreg. */
2411
2412 static rtx
2413 walk_fixup_memory_subreg (x, insn, uncritical)
2414 register rtx x;
2415 rtx insn;
2416 int uncritical;
2417 {
2418 register enum rtx_code code;
2419 register char *fmt;
2420 register int i;
2421
2422 if (x == 0)
2423 return 0;
2424
2425 code = GET_CODE (x);
2426
2427 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2428 return fixup_memory_subreg (x, insn, uncritical);
2429
2430 /* Nothing special about this RTX; fix its operands. */
2431
2432 fmt = GET_RTX_FORMAT (code);
2433 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2434 {
2435 if (fmt[i] == 'e')
2436 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2437 if (fmt[i] == 'E')
2438 {
2439 register int j;
2440 for (j = 0; j < XVECLEN (x, i); j++)
2441 XVECEXP (x, i, j)
2442 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2443 }
2444 }
2445 return x;
2446 }
2447 \f
2448 /* For each memory ref within X, if it refers to a stack slot
2449 with an out of range displacement, put the address in a temp register
2450 (emitting new insns before INSN to load these registers)
2451 and alter the memory ref to use that register.
2452 Replace each such MEM rtx with a copy, to avoid clobberage. */
2453
2454 static rtx
2455 fixup_stack_1 (x, insn)
2456 rtx x;
2457 rtx insn;
2458 {
2459 register int i;
2460 register RTX_CODE code = GET_CODE (x);
2461 register char *fmt;
2462
2463 if (code == MEM)
2464 {
2465 register rtx ad = XEXP (x, 0);
2466 /* If we have address of a stack slot but it's not valid
2467 (displacement is too large), compute the sum in a register. */
2468 if (GET_CODE (ad) == PLUS
2469 && GET_CODE (XEXP (ad, 0)) == REG
2470 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2471 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2472 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2473 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2474 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2475 #endif
2476 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2477 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2478 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2479 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2480 {
2481 rtx temp, seq;
2482 if (memory_address_p (GET_MODE (x), ad))
2483 return x;
2484
2485 start_sequence ();
2486 temp = copy_to_reg (ad);
2487 seq = gen_sequence ();
2488 end_sequence ();
2489 emit_insn_before (seq, insn);
2490 return change_address (x, VOIDmode, temp);
2491 }
2492 return x;
2493 }
2494
2495 fmt = GET_RTX_FORMAT (code);
2496 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2497 {
2498 if (fmt[i] == 'e')
2499 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2500 if (fmt[i] == 'E')
2501 {
2502 register int j;
2503 for (j = 0; j < XVECLEN (x, i); j++)
2504 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2505 }
2506 }
2507 return x;
2508 }
2509 \f
2510 /* Optimization: a bit-field instruction whose field
2511 happens to be a byte or halfword in memory
2512 can be changed to a move instruction.
2513
2514 We call here when INSN is an insn to examine or store into a bit-field.
2515 BODY is the SET-rtx to be altered.
2516
2517 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2518 (Currently this is called only from function.c, and EQUIV_MEM
2519 is always 0.) */
2520
2521 static void
2522 optimize_bit_field (body, insn, equiv_mem)
2523 rtx body;
2524 rtx insn;
2525 rtx *equiv_mem;
2526 {
2527 register rtx bitfield;
2528 int destflag;
2529 rtx seq = 0;
2530 enum machine_mode mode;
2531
2532 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2533 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2534 bitfield = SET_DEST (body), destflag = 1;
2535 else
2536 bitfield = SET_SRC (body), destflag = 0;
2537
2538 /* First check that the field being stored has constant size and position
2539 and is in fact a byte or halfword suitably aligned. */
2540
2541 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2542 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2543 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2544 != BLKmode)
2545 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2546 {
2547 register rtx memref = 0;
2548
2549 /* Now check that the containing word is memory, not a register,
2550 and that it is safe to change the machine mode. */
2551
2552 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2553 memref = XEXP (bitfield, 0);
2554 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2555 && equiv_mem != 0)
2556 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2557 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2558 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2559 memref = SUBREG_REG (XEXP (bitfield, 0));
2560 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2561 && equiv_mem != 0
2562 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2563 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2564
2565 if (memref
2566 && ! mode_dependent_address_p (XEXP (memref, 0))
2567 && ! MEM_VOLATILE_P (memref))
2568 {
2569 /* Now adjust the address, first for any subreg'ing
2570 that we are now getting rid of,
2571 and then for which byte of the word is wanted. */
2572
2573 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2574 rtx insns;
2575
2576 /* Adjust OFFSET to count bits from low-address byte. */
2577 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2578 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2579 - offset - INTVAL (XEXP (bitfield, 1)));
2580
2581 /* Adjust OFFSET to count bytes from low-address byte. */
2582 offset /= BITS_PER_UNIT;
2583 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2584 {
2585 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2586 if (BYTES_BIG_ENDIAN)
2587 offset -= (MIN (UNITS_PER_WORD,
2588 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2589 - MIN (UNITS_PER_WORD,
2590 GET_MODE_SIZE (GET_MODE (memref))));
2591 }
2592
2593 start_sequence ();
2594 memref = change_address (memref, mode,
2595 plus_constant (XEXP (memref, 0), offset));
2596 insns = get_insns ();
2597 end_sequence ();
2598 emit_insns_before (insns, insn);
2599
2600 /* Store this memory reference where
2601 we found the bit field reference. */
2602
2603 if (destflag)
2604 {
2605 validate_change (insn, &SET_DEST (body), memref, 1);
2606 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2607 {
2608 rtx src = SET_SRC (body);
2609 while (GET_CODE (src) == SUBREG
2610 && SUBREG_WORD (src) == 0)
2611 src = SUBREG_REG (src);
2612 if (GET_MODE (src) != GET_MODE (memref))
2613 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2614 validate_change (insn, &SET_SRC (body), src, 1);
2615 }
2616 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2617 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2618 /* This shouldn't happen because anything that didn't have
2619 one of these modes should have got converted explicitly
2620 and then referenced through a subreg.
2621 This is so because the original bit-field was
2622 handled by agg_mode and so its tree structure had
2623 the same mode that memref now has. */
2624 abort ();
2625 }
2626 else
2627 {
2628 rtx dest = SET_DEST (body);
2629
2630 while (GET_CODE (dest) == SUBREG
2631 && SUBREG_WORD (dest) == 0
2632 && (GET_MODE_CLASS (GET_MODE (dest))
2633 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest)))))
2634 dest = SUBREG_REG (dest);
2635
2636 validate_change (insn, &SET_DEST (body), dest, 1);
2637
2638 if (GET_MODE (dest) == GET_MODE (memref))
2639 validate_change (insn, &SET_SRC (body), memref, 1);
2640 else
2641 {
2642 /* Convert the mem ref to the destination mode. */
2643 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2644
2645 start_sequence ();
2646 convert_move (newreg, memref,
2647 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2648 seq = get_insns ();
2649 end_sequence ();
2650
2651 validate_change (insn, &SET_SRC (body), newreg, 1);
2652 }
2653 }
2654
2655 /* See if we can convert this extraction or insertion into
2656 a simple move insn. We might not be able to do so if this
2657 was, for example, part of a PARALLEL.
2658
2659 If we succeed, write out any needed conversions. If we fail,
2660 it is hard to guess why we failed, so don't do anything
2661 special; just let the optimization be suppressed. */
2662
2663 if (apply_change_group () && seq)
2664 emit_insns_before (seq, insn);
2665 }
2666 }
2667 }
2668 \f
2669 /* These routines are responsible for converting virtual register references
2670 to the actual hard register references once RTL generation is complete.
2671
2672 The following four variables are used for communication between the
2673 routines. They contain the offsets of the virtual registers from their
2674 respective hard registers. */
2675
2676 static int in_arg_offset;
2677 static int var_offset;
2678 static int dynamic_offset;
2679 static int out_arg_offset;
2680
2681 /* In most machines, the stack pointer register is equivalent to the bottom
2682 of the stack. */
2683
2684 #ifndef STACK_POINTER_OFFSET
2685 #define STACK_POINTER_OFFSET 0
2686 #endif
2687
2688 /* If not defined, pick an appropriate default for the offset of dynamically
2689 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2690 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2691
2692 #ifndef STACK_DYNAMIC_OFFSET
2693
2694 #ifdef ACCUMULATE_OUTGOING_ARGS
2695 /* The bottom of the stack points to the actual arguments. If
2696 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2697 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2698 stack space for register parameters is not pushed by the caller, but
2699 rather part of the fixed stack areas and hence not included in
2700 `current_function_outgoing_args_size'. Nevertheless, we must allow
2701 for it when allocating stack dynamic objects. */
2702
2703 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2704 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2705 (current_function_outgoing_args_size \
2706 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2707
2708 #else
2709 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2710 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2711 #endif
2712
2713 #else
2714 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2715 #endif
2716 #endif
2717
2718 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2719 its address taken. DECL is the decl for the object stored in the
2720 register, for later use if we do need to force REG into the stack.
2721 REG is overwritten by the MEM like in put_reg_into_stack. */
2722
2723 rtx
2724 gen_mem_addressof (reg, decl)
2725 rtx reg;
2726 tree decl;
2727 {
2728 tree type = TREE_TYPE (decl);
2729
2730 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)), REGNO (reg));
2731 SET_ADDRESSOF_DECL (r, decl);
2732
2733 XEXP (reg, 0) = r;
2734 PUT_CODE (reg, MEM);
2735 PUT_MODE (reg, DECL_MODE (decl));
2736 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2737 MEM_IN_STRUCT_P (reg) = AGGREGATE_TYPE_P (type);
2738
2739 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2740 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type));
2741
2742 return reg;
2743 }
2744
2745 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2746
2747 void
2748 flush_addressof (decl)
2749 tree decl;
2750 {
2751 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2752 && DECL_RTL (decl) != 0
2753 && GET_CODE (DECL_RTL (decl)) == MEM
2754 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2755 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2756 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0));
2757 }
2758
2759 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2760
2761 static void
2762 put_addressof_into_stack (r)
2763 rtx r;
2764 {
2765 tree decl = ADDRESSOF_DECL (r);
2766 rtx reg = XEXP (r, 0);
2767
2768 if (GET_CODE (reg) != REG)
2769 abort ();
2770
2771 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2772 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
2773 ADDRESSOF_REGNO (r),
2774 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
2775 }
2776
2777 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2778 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2779 the stack. */
2780
2781 static void
2782 purge_addressof_1 (loc, insn, force)
2783 rtx *loc;
2784 rtx insn;
2785 int force;
2786 {
2787 rtx x;
2788 RTX_CODE code;
2789 int i, j;
2790 char *fmt;
2791
2792 /* Re-start here to avoid recursion in common cases. */
2793 restart:
2794
2795 x = *loc;
2796 if (x == 0)
2797 return;
2798
2799 code = GET_CODE (x);
2800
2801 if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2802 {
2803 rtx insns;
2804 /* We must create a copy of the rtx because it was created by
2805 overwriting a REG rtx which is always shared. */
2806 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2807
2808 if (validate_change (insn, loc, sub, 0))
2809 return;
2810
2811 start_sequence ();
2812 if (! validate_change (insn, loc,
2813 force_operand (sub, NULL_RTX),
2814 0))
2815 abort ();
2816
2817 insns = get_insns ();
2818 end_sequence ();
2819 emit_insns_before (insns, insn);
2820 return;
2821 }
2822 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2823 {
2824 rtx sub = XEXP (XEXP (x, 0), 0);
2825
2826 if (GET_CODE (sub) == MEM)
2827 sub = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2828
2829 if (GET_CODE (sub) == REG
2830 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2831 {
2832 put_addressof_into_stack (XEXP (x, 0));
2833 return;
2834 }
2835 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
2836 {
2837 if (! BYTES_BIG_ENDIAN && ! WORDS_BIG_ENDIAN)
2838 {
2839 rtx sub2 = gen_rtx_SUBREG (GET_MODE (x), sub, 0);
2840 if (validate_change (insn, loc, sub2, 0))
2841 goto restart;
2842 }
2843 }
2844 else if (validate_change (insn, loc, sub, 0))
2845 goto restart;
2846 /* else give up and put it into the stack */
2847 }
2848 else if (code == ADDRESSOF)
2849 {
2850 put_addressof_into_stack (x);
2851 return;
2852 }
2853
2854 /* Scan all subexpressions. */
2855 fmt = GET_RTX_FORMAT (code);
2856 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2857 {
2858 if (*fmt == 'e')
2859 purge_addressof_1 (&XEXP (x, i), insn, force);
2860 else if (*fmt == 'E')
2861 for (j = 0; j < XVECLEN (x, i); j++)
2862 purge_addressof_1 (&XVECEXP (x, i, j), insn, force);
2863 }
2864 }
2865
2866 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
2867 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
2868 stack. */
2869
2870 void
2871 purge_addressof (insns)
2872 rtx insns;
2873 {
2874 rtx insn;
2875 for (insn = insns; insn; insn = NEXT_INSN (insn))
2876 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2877 || GET_CODE (insn) == CALL_INSN)
2878 {
2879 purge_addressof_1 (&PATTERN (insn), insn,
2880 asm_noperands (PATTERN (insn)) > 0);
2881 purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0);
2882 }
2883 }
2884 \f
2885 /* Pass through the INSNS of function FNDECL and convert virtual register
2886 references to hard register references. */
2887
2888 void
2889 instantiate_virtual_regs (fndecl, insns)
2890 tree fndecl;
2891 rtx insns;
2892 {
2893 rtx insn;
2894 int i;
2895
2896 /* Compute the offsets to use for this function. */
2897 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
2898 var_offset = STARTING_FRAME_OFFSET;
2899 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
2900 out_arg_offset = STACK_POINTER_OFFSET;
2901
2902 /* Scan all variables and parameters of this function. For each that is
2903 in memory, instantiate all virtual registers if the result is a valid
2904 address. If not, we do it later. That will handle most uses of virtual
2905 regs on many machines. */
2906 instantiate_decls (fndecl, 1);
2907
2908 /* Initialize recognition, indicating that volatile is OK. */
2909 init_recog ();
2910
2911 /* Scan through all the insns, instantiating every virtual register still
2912 present. */
2913 for (insn = insns; insn; insn = NEXT_INSN (insn))
2914 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2915 || GET_CODE (insn) == CALL_INSN)
2916 {
2917 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
2918 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
2919 }
2920
2921 /* Instantiate the stack slots for the parm registers, for later use in
2922 addressof elimination. */
2923 for (i = 0; i < max_parm_reg; ++i)
2924 if (parm_reg_stack_loc[i])
2925 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
2926
2927 /* Now instantiate the remaining register equivalences for debugging info.
2928 These will not be valid addresses. */
2929 instantiate_decls (fndecl, 0);
2930
2931 /* Indicate that, from now on, assign_stack_local should use
2932 frame_pointer_rtx. */
2933 virtuals_instantiated = 1;
2934 }
2935
2936 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
2937 all virtual registers in their DECL_RTL's.
2938
2939 If VALID_ONLY, do this only if the resulting address is still valid.
2940 Otherwise, always do it. */
2941
2942 static void
2943 instantiate_decls (fndecl, valid_only)
2944 tree fndecl;
2945 int valid_only;
2946 {
2947 tree decl;
2948
2949 if (DECL_SAVED_INSNS (fndecl))
2950 /* When compiling an inline function, the obstack used for
2951 rtl allocation is the maybepermanent_obstack. Calling
2952 `resume_temporary_allocation' switches us back to that
2953 obstack while we process this function's parameters. */
2954 resume_temporary_allocation ();
2955
2956 /* Process all parameters of the function. */
2957 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2958 {
2959 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
2960
2961 instantiate_decl (DECL_RTL (decl), size, valid_only);
2962
2963 /* If the parameter was promoted, then the incoming RTL mode may be
2964 larger than the declared type size. We must use the larger of
2965 the two sizes. */
2966 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
2967 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
2968 }
2969
2970 /* Now process all variables defined in the function or its subblocks. */
2971 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
2972
2973 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
2974 {
2975 /* Save all rtl allocated for this function by raising the
2976 high-water mark on the maybepermanent_obstack. */
2977 preserve_data ();
2978 /* All further rtl allocation is now done in the current_obstack. */
2979 rtl_in_current_obstack ();
2980 }
2981 }
2982
2983 /* Subroutine of instantiate_decls: Process all decls in the given
2984 BLOCK node and all its subblocks. */
2985
2986 static void
2987 instantiate_decls_1 (let, valid_only)
2988 tree let;
2989 int valid_only;
2990 {
2991 tree t;
2992
2993 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
2994 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
2995 valid_only);
2996
2997 /* Process all subblocks. */
2998 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2999 instantiate_decls_1 (t, valid_only);
3000 }
3001
3002 /* Subroutine of the preceding procedures: Given RTL representing a
3003 decl and the size of the object, do any instantiation required.
3004
3005 If VALID_ONLY is non-zero, it means that the RTL should only be
3006 changed if the new address is valid. */
3007
3008 static void
3009 instantiate_decl (x, size, valid_only)
3010 rtx x;
3011 int size;
3012 int valid_only;
3013 {
3014 enum machine_mode mode;
3015 rtx addr;
3016
3017 /* If this is not a MEM, no need to do anything. Similarly if the
3018 address is a constant or a register that is not a virtual register. */
3019
3020 if (x == 0 || GET_CODE (x) != MEM)
3021 return;
3022
3023 addr = XEXP (x, 0);
3024 if (CONSTANT_P (addr)
3025 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3026 || (GET_CODE (addr) == REG
3027 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3028 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3029 return;
3030
3031 /* If we should only do this if the address is valid, copy the address.
3032 We need to do this so we can undo any changes that might make the
3033 address invalid. This copy is unfortunate, but probably can't be
3034 avoided. */
3035
3036 if (valid_only)
3037 addr = copy_rtx (addr);
3038
3039 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3040
3041 if (valid_only)
3042 {
3043 /* Now verify that the resulting address is valid for every integer or
3044 floating-point mode up to and including SIZE bytes long. We do this
3045 since the object might be accessed in any mode and frame addresses
3046 are shared. */
3047
3048 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3049 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3050 mode = GET_MODE_WIDER_MODE (mode))
3051 if (! memory_address_p (mode, addr))
3052 return;
3053
3054 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3055 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3056 mode = GET_MODE_WIDER_MODE (mode))
3057 if (! memory_address_p (mode, addr))
3058 return;
3059 }
3060
3061 /* Put back the address now that we have updated it and we either know
3062 it is valid or we don't care whether it is valid. */
3063
3064 XEXP (x, 0) = addr;
3065 }
3066 \f
3067 /* Given a pointer to a piece of rtx and an optional pointer to the
3068 containing object, instantiate any virtual registers present in it.
3069
3070 If EXTRA_INSNS, we always do the replacement and generate
3071 any extra insns before OBJECT. If it zero, we do nothing if replacement
3072 is not valid.
3073
3074 Return 1 if we either had nothing to do or if we were able to do the
3075 needed replacement. Return 0 otherwise; we only return zero if
3076 EXTRA_INSNS is zero.
3077
3078 We first try some simple transformations to avoid the creation of extra
3079 pseudos. */
3080
3081 static int
3082 instantiate_virtual_regs_1 (loc, object, extra_insns)
3083 rtx *loc;
3084 rtx object;
3085 int extra_insns;
3086 {
3087 rtx x;
3088 RTX_CODE code;
3089 rtx new = 0;
3090 HOST_WIDE_INT offset;
3091 rtx temp;
3092 rtx seq;
3093 int i, j;
3094 char *fmt;
3095
3096 /* Re-start here to avoid recursion in common cases. */
3097 restart:
3098
3099 x = *loc;
3100 if (x == 0)
3101 return 1;
3102
3103 code = GET_CODE (x);
3104
3105 /* Check for some special cases. */
3106 switch (code)
3107 {
3108 case CONST_INT:
3109 case CONST_DOUBLE:
3110 case CONST:
3111 case SYMBOL_REF:
3112 case CODE_LABEL:
3113 case PC:
3114 case CC0:
3115 case ASM_INPUT:
3116 case ADDR_VEC:
3117 case ADDR_DIFF_VEC:
3118 case RETURN:
3119 return 1;
3120
3121 case SET:
3122 /* We are allowed to set the virtual registers. This means that
3123 the actual register should receive the source minus the
3124 appropriate offset. This is used, for example, in the handling
3125 of non-local gotos. */
3126 if (SET_DEST (x) == virtual_incoming_args_rtx)
3127 new = arg_pointer_rtx, offset = - in_arg_offset;
3128 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3129 new = frame_pointer_rtx, offset = - var_offset;
3130 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3131 new = stack_pointer_rtx, offset = - dynamic_offset;
3132 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3133 new = stack_pointer_rtx, offset = - out_arg_offset;
3134
3135 if (new)
3136 {
3137 /* The only valid sources here are PLUS or REG. Just do
3138 the simplest possible thing to handle them. */
3139 if (GET_CODE (SET_SRC (x)) != REG
3140 && GET_CODE (SET_SRC (x)) != PLUS)
3141 abort ();
3142
3143 start_sequence ();
3144 if (GET_CODE (SET_SRC (x)) != REG)
3145 temp = force_operand (SET_SRC (x), NULL_RTX);
3146 else
3147 temp = SET_SRC (x);
3148 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3149 seq = get_insns ();
3150 end_sequence ();
3151
3152 emit_insns_before (seq, object);
3153 SET_DEST (x) = new;
3154
3155 if (! validate_change (object, &SET_SRC (x), temp, 0)
3156 || ! extra_insns)
3157 abort ();
3158
3159 return 1;
3160 }
3161
3162 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3163 loc = &SET_SRC (x);
3164 goto restart;
3165
3166 case PLUS:
3167 /* Handle special case of virtual register plus constant. */
3168 if (CONSTANT_P (XEXP (x, 1)))
3169 {
3170 rtx old, new_offset;
3171
3172 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3173 if (GET_CODE (XEXP (x, 0)) == PLUS)
3174 {
3175 rtx inner = XEXP (XEXP (x, 0), 0);
3176
3177 if (inner == virtual_incoming_args_rtx)
3178 new = arg_pointer_rtx, offset = in_arg_offset;
3179 else if (inner == virtual_stack_vars_rtx)
3180 new = frame_pointer_rtx, offset = var_offset;
3181 else if (inner == virtual_stack_dynamic_rtx)
3182 new = stack_pointer_rtx, offset = dynamic_offset;
3183 else if (inner == virtual_outgoing_args_rtx)
3184 new = stack_pointer_rtx, offset = out_arg_offset;
3185 else
3186 {
3187 loc = &XEXP (x, 0);
3188 goto restart;
3189 }
3190
3191 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3192 extra_insns);
3193 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3194 }
3195
3196 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3197 new = arg_pointer_rtx, offset = in_arg_offset;
3198 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3199 new = frame_pointer_rtx, offset = var_offset;
3200 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3201 new = stack_pointer_rtx, offset = dynamic_offset;
3202 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3203 new = stack_pointer_rtx, offset = out_arg_offset;
3204 else
3205 {
3206 /* We know the second operand is a constant. Unless the
3207 first operand is a REG (which has been already checked),
3208 it needs to be checked. */
3209 if (GET_CODE (XEXP (x, 0)) != REG)
3210 {
3211 loc = &XEXP (x, 0);
3212 goto restart;
3213 }
3214 return 1;
3215 }
3216
3217 new_offset = plus_constant (XEXP (x, 1), offset);
3218
3219 /* If the new constant is zero, try to replace the sum with just
3220 the register. */
3221 if (new_offset == const0_rtx
3222 && validate_change (object, loc, new, 0))
3223 return 1;
3224
3225 /* Next try to replace the register and new offset.
3226 There are two changes to validate here and we can't assume that
3227 in the case of old offset equals new just changing the register
3228 will yield a valid insn. In the interests of a little efficiency,
3229 however, we only call validate change once (we don't queue up the
3230 changes and then call apply_change_group). */
3231
3232 old = XEXP (x, 0);
3233 if (offset == 0
3234 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3235 : (XEXP (x, 0) = new,
3236 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3237 {
3238 if (! extra_insns)
3239 {
3240 XEXP (x, 0) = old;
3241 return 0;
3242 }
3243
3244 /* Otherwise copy the new constant into a register and replace
3245 constant with that register. */
3246 temp = gen_reg_rtx (Pmode);
3247 XEXP (x, 0) = new;
3248 if (validate_change (object, &XEXP (x, 1), temp, 0))
3249 emit_insn_before (gen_move_insn (temp, new_offset), object);
3250 else
3251 {
3252 /* If that didn't work, replace this expression with a
3253 register containing the sum. */
3254
3255 XEXP (x, 0) = old;
3256 new = gen_rtx_PLUS (Pmode, new, new_offset);
3257
3258 start_sequence ();
3259 temp = force_operand (new, NULL_RTX);
3260 seq = get_insns ();
3261 end_sequence ();
3262
3263 emit_insns_before (seq, object);
3264 if (! validate_change (object, loc, temp, 0)
3265 && ! validate_replace_rtx (x, temp, object))
3266 abort ();
3267 }
3268 }
3269
3270 return 1;
3271 }
3272
3273 /* Fall through to generic two-operand expression case. */
3274 case EXPR_LIST:
3275 case CALL:
3276 case COMPARE:
3277 case MINUS:
3278 case MULT:
3279 case DIV: case UDIV:
3280 case MOD: case UMOD:
3281 case AND: case IOR: case XOR:
3282 case ROTATERT: case ROTATE:
3283 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3284 case NE: case EQ:
3285 case GE: case GT: case GEU: case GTU:
3286 case LE: case LT: case LEU: case LTU:
3287 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3288 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3289 loc = &XEXP (x, 0);
3290 goto restart;
3291
3292 case MEM:
3293 /* Most cases of MEM that convert to valid addresses have already been
3294 handled by our scan of decls. The only special handling we
3295 need here is to make a copy of the rtx to ensure it isn't being
3296 shared if we have to change it to a pseudo.
3297
3298 If the rtx is a simple reference to an address via a virtual register,
3299 it can potentially be shared. In such cases, first try to make it
3300 a valid address, which can also be shared. Otherwise, copy it and
3301 proceed normally.
3302
3303 First check for common cases that need no processing. These are
3304 usually due to instantiation already being done on a previous instance
3305 of a shared rtx. */
3306
3307 temp = XEXP (x, 0);
3308 if (CONSTANT_ADDRESS_P (temp)
3309 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3310 || temp == arg_pointer_rtx
3311 #endif
3312 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3313 || temp == hard_frame_pointer_rtx
3314 #endif
3315 || temp == frame_pointer_rtx)
3316 return 1;
3317
3318 if (GET_CODE (temp) == PLUS
3319 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3320 && (XEXP (temp, 0) == frame_pointer_rtx
3321 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3322 || XEXP (temp, 0) == hard_frame_pointer_rtx
3323 #endif
3324 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3325 || XEXP (temp, 0) == arg_pointer_rtx
3326 #endif
3327 ))
3328 return 1;
3329
3330 if (temp == virtual_stack_vars_rtx
3331 || temp == virtual_incoming_args_rtx
3332 || (GET_CODE (temp) == PLUS
3333 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3334 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3335 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3336 {
3337 /* This MEM may be shared. If the substitution can be done without
3338 the need to generate new pseudos, we want to do it in place
3339 so all copies of the shared rtx benefit. The call below will
3340 only make substitutions if the resulting address is still
3341 valid.
3342
3343 Note that we cannot pass X as the object in the recursive call
3344 since the insn being processed may not allow all valid
3345 addresses. However, if we were not passed on object, we can
3346 only modify X without copying it if X will have a valid
3347 address.
3348
3349 ??? Also note that this can still lose if OBJECT is an insn that
3350 has less restrictions on an address that some other insn.
3351 In that case, we will modify the shared address. This case
3352 doesn't seem very likely, though. One case where this could
3353 happen is in the case of a USE or CLOBBER reference, but we
3354 take care of that below. */
3355
3356 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3357 object ? object : x, 0))
3358 return 1;
3359
3360 /* Otherwise make a copy and process that copy. We copy the entire
3361 RTL expression since it might be a PLUS which could also be
3362 shared. */
3363 *loc = x = copy_rtx (x);
3364 }
3365
3366 /* Fall through to generic unary operation case. */
3367 case SUBREG:
3368 case STRICT_LOW_PART:
3369 case NEG: case NOT:
3370 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3371 case SIGN_EXTEND: case ZERO_EXTEND:
3372 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3373 case FLOAT: case FIX:
3374 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3375 case ABS:
3376 case SQRT:
3377 case FFS:
3378 /* These case either have just one operand or we know that we need not
3379 check the rest of the operands. */
3380 loc = &XEXP (x, 0);
3381 goto restart;
3382
3383 case USE:
3384 case CLOBBER:
3385 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3386 go ahead and make the invalid one, but do it to a copy. For a REG,
3387 just make the recursive call, since there's no chance of a problem. */
3388
3389 if ((GET_CODE (XEXP (x, 0)) == MEM
3390 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3391 0))
3392 || (GET_CODE (XEXP (x, 0)) == REG
3393 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3394 return 1;
3395
3396 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3397 loc = &XEXP (x, 0);
3398 goto restart;
3399
3400 case REG:
3401 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3402 in front of this insn and substitute the temporary. */
3403 if (x == virtual_incoming_args_rtx)
3404 new = arg_pointer_rtx, offset = in_arg_offset;
3405 else if (x == virtual_stack_vars_rtx)
3406 new = frame_pointer_rtx, offset = var_offset;
3407 else if (x == virtual_stack_dynamic_rtx)
3408 new = stack_pointer_rtx, offset = dynamic_offset;
3409 else if (x == virtual_outgoing_args_rtx)
3410 new = stack_pointer_rtx, offset = out_arg_offset;
3411
3412 if (new)
3413 {
3414 temp = plus_constant (new, offset);
3415 if (!validate_change (object, loc, temp, 0))
3416 {
3417 if (! extra_insns)
3418 return 0;
3419
3420 start_sequence ();
3421 temp = force_operand (temp, NULL_RTX);
3422 seq = get_insns ();
3423 end_sequence ();
3424
3425 emit_insns_before (seq, object);
3426 if (! validate_change (object, loc, temp, 0)
3427 && ! validate_replace_rtx (x, temp, object))
3428 abort ();
3429 }
3430 }
3431
3432 return 1;
3433
3434 case ADDRESSOF:
3435 if (GET_CODE (XEXP (x, 0)) == REG)
3436 return 1;
3437
3438 else if (GET_CODE (XEXP (x, 0)) == MEM)
3439 {
3440 /* If we have a (addressof (mem ..)), do any instantiation inside
3441 since we know we'll be making the inside valid when we finally
3442 remove the ADDRESSOF. */
3443 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3444 return 1;
3445 }
3446 break;
3447
3448 default:
3449 break;
3450 }
3451
3452 /* Scan all subexpressions. */
3453 fmt = GET_RTX_FORMAT (code);
3454 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3455 if (*fmt == 'e')
3456 {
3457 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3458 return 0;
3459 }
3460 else if (*fmt == 'E')
3461 for (j = 0; j < XVECLEN (x, i); j++)
3462 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3463 extra_insns))
3464 return 0;
3465
3466 return 1;
3467 }
3468 \f
3469 /* Optimization: assuming this function does not receive nonlocal gotos,
3470 delete the handlers for such, as well as the insns to establish
3471 and disestablish them. */
3472
3473 static void
3474 delete_handlers ()
3475 {
3476 rtx insn;
3477 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3478 {
3479 /* Delete the handler by turning off the flag that would
3480 prevent jump_optimize from deleting it.
3481 Also permit deletion of the nonlocal labels themselves
3482 if nothing local refers to them. */
3483 if (GET_CODE (insn) == CODE_LABEL)
3484 {
3485 tree t, last_t;
3486
3487 LABEL_PRESERVE_P (insn) = 0;
3488
3489 /* Remove it from the nonlocal_label list, to avoid confusing
3490 flow. */
3491 for (t = nonlocal_labels, last_t = 0; t;
3492 last_t = t, t = TREE_CHAIN (t))
3493 if (DECL_RTL (TREE_VALUE (t)) == insn)
3494 break;
3495 if (t)
3496 {
3497 if (! last_t)
3498 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3499 else
3500 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3501 }
3502 }
3503 if (GET_CODE (insn) == INSN
3504 && ((nonlocal_goto_handler_slot != 0
3505 && reg_mentioned_p (nonlocal_goto_handler_slot, PATTERN (insn)))
3506 || (nonlocal_goto_stack_level != 0
3507 && reg_mentioned_p (nonlocal_goto_stack_level,
3508 PATTERN (insn)))))
3509 delete_insn (insn);
3510 }
3511 }
3512
3513 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3514 of the current function. */
3515
3516 rtx
3517 nonlocal_label_rtx_list ()
3518 {
3519 tree t;
3520 rtx x = 0;
3521
3522 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
3523 x = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (TREE_VALUE (t)), x);
3524
3525 return x;
3526 }
3527 \f
3528 /* Output a USE for any register use in RTL.
3529 This is used with -noreg to mark the extent of lifespan
3530 of any registers used in a user-visible variable's DECL_RTL. */
3531
3532 void
3533 use_variable (rtl)
3534 rtx rtl;
3535 {
3536 if (GET_CODE (rtl) == REG)
3537 /* This is a register variable. */
3538 emit_insn (gen_rtx_USE (VOIDmode, rtl));
3539 else if (GET_CODE (rtl) == MEM
3540 && GET_CODE (XEXP (rtl, 0)) == REG
3541 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3542 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3543 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3544 /* This is a variable-sized structure. */
3545 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
3546 }
3547
3548 /* Like use_variable except that it outputs the USEs after INSN
3549 instead of at the end of the insn-chain. */
3550
3551 void
3552 use_variable_after (rtl, insn)
3553 rtx rtl, insn;
3554 {
3555 if (GET_CODE (rtl) == REG)
3556 /* This is a register variable. */
3557 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
3558 else if (GET_CODE (rtl) == MEM
3559 && GET_CODE (XEXP (rtl, 0)) == REG
3560 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3561 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3562 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3563 /* This is a variable-sized structure. */
3564 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
3565 }
3566 \f
3567 int
3568 max_parm_reg_num ()
3569 {
3570 return max_parm_reg;
3571 }
3572
3573 /* Return the first insn following those generated by `assign_parms'. */
3574
3575 rtx
3576 get_first_nonparm_insn ()
3577 {
3578 if (last_parm_insn)
3579 return NEXT_INSN (last_parm_insn);
3580 return get_insns ();
3581 }
3582
3583 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3584 Crash if there is none. */
3585
3586 rtx
3587 get_first_block_beg ()
3588 {
3589 register rtx searcher;
3590 register rtx insn = get_first_nonparm_insn ();
3591
3592 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3593 if (GET_CODE (searcher) == NOTE
3594 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3595 return searcher;
3596
3597 abort (); /* Invalid call to this function. (See comments above.) */
3598 return NULL_RTX;
3599 }
3600
3601 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3602 This means a type for which function calls must pass an address to the
3603 function or get an address back from the function.
3604 EXP may be a type node or an expression (whose type is tested). */
3605
3606 int
3607 aggregate_value_p (exp)
3608 tree exp;
3609 {
3610 int i, regno, nregs;
3611 rtx reg;
3612 tree type;
3613 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3614 type = exp;
3615 else
3616 type = TREE_TYPE (exp);
3617
3618 if (RETURN_IN_MEMORY (type))
3619 return 1;
3620 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3621 and thus can't be returned in registers. */
3622 if (TREE_ADDRESSABLE (type))
3623 return 1;
3624 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3625 return 1;
3626 /* Make sure we have suitable call-clobbered regs to return
3627 the value in; if not, we must return it in memory. */
3628 reg = hard_function_value (type, 0);
3629
3630 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3631 it is OK. */
3632 if (GET_CODE (reg) != REG)
3633 return 0;
3634
3635 regno = REGNO (reg);
3636 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3637 for (i = 0; i < nregs; i++)
3638 if (! call_used_regs[regno + i])
3639 return 1;
3640 return 0;
3641 }
3642 \f
3643 /* Assign RTL expressions to the function's parameters.
3644 This may involve copying them into registers and using
3645 those registers as the RTL for them.
3646
3647 If SECOND_TIME is non-zero it means that this function is being
3648 called a second time. This is done by integrate.c when a function's
3649 compilation is deferred. We need to come back here in case the
3650 FUNCTION_ARG macro computes items needed for the rest of the compilation
3651 (such as changing which registers are fixed or caller-saved). But suppress
3652 writing any insns or setting DECL_RTL of anything in this case. */
3653
3654 void
3655 assign_parms (fndecl, second_time)
3656 tree fndecl;
3657 int second_time;
3658 {
3659 register tree parm;
3660 register rtx entry_parm = 0;
3661 register rtx stack_parm = 0;
3662 CUMULATIVE_ARGS args_so_far;
3663 enum machine_mode promoted_mode, passed_mode;
3664 enum machine_mode nominal_mode, promoted_nominal_mode;
3665 int unsignedp;
3666 /* Total space needed so far for args on the stack,
3667 given as a constant and a tree-expression. */
3668 struct args_size stack_args_size;
3669 tree fntype = TREE_TYPE (fndecl);
3670 tree fnargs = DECL_ARGUMENTS (fndecl);
3671 /* This is used for the arg pointer when referring to stack args. */
3672 rtx internal_arg_pointer;
3673 /* This is a dummy PARM_DECL that we used for the function result if
3674 the function returns a structure. */
3675 tree function_result_decl = 0;
3676 int varargs_setup = 0;
3677 rtx conversion_insns = 0;
3678
3679 /* Nonzero if the last arg is named `__builtin_va_alist',
3680 which is used on some machines for old-fashioned non-ANSI varargs.h;
3681 this should be stuck onto the stack as if it had arrived there. */
3682 int hide_last_arg
3683 = (current_function_varargs
3684 && fnargs
3685 && (parm = tree_last (fnargs)) != 0
3686 && DECL_NAME (parm)
3687 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3688 "__builtin_va_alist")));
3689
3690 /* Nonzero if function takes extra anonymous args.
3691 This means the last named arg must be on the stack
3692 right before the anonymous ones. */
3693 int stdarg
3694 = (TYPE_ARG_TYPES (fntype) != 0
3695 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3696 != void_type_node));
3697
3698 current_function_stdarg = stdarg;
3699
3700 /* If the reg that the virtual arg pointer will be translated into is
3701 not a fixed reg or is the stack pointer, make a copy of the virtual
3702 arg pointer, and address parms via the copy. The frame pointer is
3703 considered fixed even though it is not marked as such.
3704
3705 The second time through, simply use ap to avoid generating rtx. */
3706
3707 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3708 || ! (fixed_regs[ARG_POINTER_REGNUM]
3709 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3710 && ! second_time)
3711 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3712 else
3713 internal_arg_pointer = virtual_incoming_args_rtx;
3714 current_function_internal_arg_pointer = internal_arg_pointer;
3715
3716 stack_args_size.constant = 0;
3717 stack_args_size.var = 0;
3718
3719 /* If struct value address is treated as the first argument, make it so. */
3720 if (aggregate_value_p (DECL_RESULT (fndecl))
3721 && ! current_function_returns_pcc_struct
3722 && struct_value_incoming_rtx == 0)
3723 {
3724 tree type = build_pointer_type (TREE_TYPE (fntype));
3725
3726 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
3727
3728 DECL_ARG_TYPE (function_result_decl) = type;
3729 TREE_CHAIN (function_result_decl) = fnargs;
3730 fnargs = function_result_decl;
3731 }
3732
3733 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3734 parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
3735 bzero ((char *) parm_reg_stack_loc, max_parm_reg * sizeof (rtx));
3736
3737 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3738 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
3739 #else
3740 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
3741 #endif
3742
3743 /* We haven't yet found an argument that we must push and pretend the
3744 caller did. */
3745 current_function_pretend_args_size = 0;
3746
3747 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3748 {
3749 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
3750 struct args_size stack_offset;
3751 struct args_size arg_size;
3752 int passed_pointer = 0;
3753 int did_conversion = 0;
3754 tree passed_type = DECL_ARG_TYPE (parm);
3755 tree nominal_type = TREE_TYPE (parm);
3756
3757 /* Set LAST_NAMED if this is last named arg before some
3758 anonymous args. */
3759 int last_named = ((TREE_CHAIN (parm) == 0
3760 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3761 && (stdarg || current_function_varargs));
3762 /* Set NAMED_ARG if this arg should be treated as a named arg. For
3763 most machines, if this is a varargs/stdarg function, then we treat
3764 the last named arg as if it were anonymous too. */
3765 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
3766
3767 if (TREE_TYPE (parm) == error_mark_node
3768 /* This can happen after weird syntax errors
3769 or if an enum type is defined among the parms. */
3770 || TREE_CODE (parm) != PARM_DECL
3771 || passed_type == NULL)
3772 {
3773 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
3774 = gen_rtx_MEM (BLKmode, const0_rtx);
3775 TREE_USED (parm) = 1;
3776 continue;
3777 }
3778
3779 /* For varargs.h function, save info about regs and stack space
3780 used by the individual args, not including the va_alist arg. */
3781 if (hide_last_arg && last_named)
3782 current_function_args_info = args_so_far;
3783
3784 /* Find mode of arg as it is passed, and mode of arg
3785 as it should be during execution of this function. */
3786 passed_mode = TYPE_MODE (passed_type);
3787 nominal_mode = TYPE_MODE (nominal_type);
3788
3789 /* If the parm's mode is VOID, its value doesn't matter,
3790 and avoid the usual things like emit_move_insn that could crash. */
3791 if (nominal_mode == VOIDmode)
3792 {
3793 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
3794 continue;
3795 }
3796
3797 /* If the parm is to be passed as a transparent union, use the
3798 type of the first field for the tests below. We have already
3799 verified that the modes are the same. */
3800 if (DECL_TRANSPARENT_UNION (parm)
3801 || TYPE_TRANSPARENT_UNION (passed_type))
3802 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
3803
3804 /* See if this arg was passed by invisible reference. It is if
3805 it is an object whose size depends on the contents of the
3806 object itself or if the machine requires these objects be passed
3807 that way. */
3808
3809 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
3810 && contains_placeholder_p (TYPE_SIZE (passed_type)))
3811 || TREE_ADDRESSABLE (passed_type)
3812 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3813 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
3814 passed_type, named_arg)
3815 #endif
3816 )
3817 {
3818 passed_type = nominal_type = build_pointer_type (passed_type);
3819 passed_pointer = 1;
3820 passed_mode = nominal_mode = Pmode;
3821 }
3822
3823 promoted_mode = passed_mode;
3824
3825 #ifdef PROMOTE_FUNCTION_ARGS
3826 /* Compute the mode in which the arg is actually extended to. */
3827 unsignedp = TREE_UNSIGNED (passed_type);
3828 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
3829 #endif
3830
3831 /* Let machine desc say which reg (if any) the parm arrives in.
3832 0 means it arrives on the stack. */
3833 #ifdef FUNCTION_INCOMING_ARG
3834 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3835 passed_type, named_arg);
3836 #else
3837 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
3838 passed_type, named_arg);
3839 #endif
3840
3841 if (entry_parm == 0)
3842 promoted_mode = passed_mode;
3843
3844 #ifdef SETUP_INCOMING_VARARGS
3845 /* If this is the last named parameter, do any required setup for
3846 varargs or stdargs. We need to know about the case of this being an
3847 addressable type, in which case we skip the registers it
3848 would have arrived in.
3849
3850 For stdargs, LAST_NAMED will be set for two parameters, the one that
3851 is actually the last named, and the dummy parameter. We only
3852 want to do this action once.
3853
3854 Also, indicate when RTL generation is to be suppressed. */
3855 if (last_named && !varargs_setup)
3856 {
3857 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
3858 current_function_pretend_args_size,
3859 second_time);
3860 varargs_setup = 1;
3861 }
3862 #endif
3863
3864 /* Determine parm's home in the stack,
3865 in case it arrives in the stack or we should pretend it did.
3866
3867 Compute the stack position and rtx where the argument arrives
3868 and its size.
3869
3870 There is one complexity here: If this was a parameter that would
3871 have been passed in registers, but wasn't only because it is
3872 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
3873 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
3874 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
3875 0 as it was the previous time. */
3876
3877 locate_and_pad_parm (promoted_mode, passed_type,
3878 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3879 1,
3880 #else
3881 #ifdef FUNCTION_INCOMING_ARG
3882 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
3883 passed_type,
3884 (named_arg
3885 || varargs_setup)) != 0,
3886 #else
3887 FUNCTION_ARG (args_so_far, promoted_mode,
3888 passed_type,
3889 named_arg || varargs_setup) != 0,
3890 #endif
3891 #endif
3892 fndecl, &stack_args_size, &stack_offset, &arg_size);
3893
3894 if (! second_time)
3895 {
3896 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
3897
3898 if (offset_rtx == const0_rtx)
3899 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
3900 else
3901 stack_parm = gen_rtx_MEM (promoted_mode,
3902 gen_rtx_PLUS (Pmode,
3903 internal_arg_pointer,
3904 offset_rtx));
3905
3906 /* If this is a memory ref that contains aggregate components,
3907 mark it as such for cse and loop optimize. Likewise if it
3908 is readonly. */
3909 MEM_IN_STRUCT_P (stack_parm) = aggregate;
3910 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
3911 }
3912
3913 /* If this parameter was passed both in registers and in the stack,
3914 use the copy on the stack. */
3915 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
3916 entry_parm = 0;
3917
3918 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3919 /* If this parm was passed part in regs and part in memory,
3920 pretend it arrived entirely in memory
3921 by pushing the register-part onto the stack.
3922
3923 In the special case of a DImode or DFmode that is split,
3924 we could put it together in a pseudoreg directly,
3925 but for now that's not worth bothering with. */
3926
3927 if (entry_parm)
3928 {
3929 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
3930 passed_type, named_arg);
3931
3932 if (nregs > 0)
3933 {
3934 current_function_pretend_args_size
3935 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
3936 / (PARM_BOUNDARY / BITS_PER_UNIT)
3937 * (PARM_BOUNDARY / BITS_PER_UNIT));
3938
3939 if (! second_time)
3940 {
3941 /* Handle calls that pass values in multiple non-contiguous
3942 locations. The Irix 6 ABI has examples of this. */
3943 if (GET_CODE (entry_parm) == PARALLEL)
3944 emit_group_store (validize_mem (stack_parm),
3945 entry_parm);
3946 else
3947 move_block_from_reg (REGNO (entry_parm),
3948 validize_mem (stack_parm), nregs,
3949 int_size_in_bytes (TREE_TYPE (parm)));
3950 }
3951 entry_parm = stack_parm;
3952 }
3953 }
3954 #endif
3955
3956 /* If we didn't decide this parm came in a register,
3957 by default it came on the stack. */
3958 if (entry_parm == 0)
3959 entry_parm = stack_parm;
3960
3961 /* Record permanently how this parm was passed. */
3962 if (! second_time)
3963 DECL_INCOMING_RTL (parm) = entry_parm;
3964
3965 /* If there is actually space on the stack for this parm,
3966 count it in stack_args_size; otherwise set stack_parm to 0
3967 to indicate there is no preallocated stack slot for the parm. */
3968
3969 if (entry_parm == stack_parm
3970 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
3971 /* On some machines, even if a parm value arrives in a register
3972 there is still an (uninitialized) stack slot allocated for it.
3973
3974 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
3975 whether this parameter already has a stack slot allocated,
3976 because an arg block exists only if current_function_args_size
3977 is larger than some threshold, and we haven't calculated that
3978 yet. So, for now, we just assume that stack slots never exist
3979 in this case. */
3980 || REG_PARM_STACK_SPACE (fndecl) > 0
3981 #endif
3982 )
3983 {
3984 stack_args_size.constant += arg_size.constant;
3985 if (arg_size.var)
3986 ADD_PARM_SIZE (stack_args_size, arg_size.var);
3987 }
3988 else
3989 /* No stack slot was pushed for this parm. */
3990 stack_parm = 0;
3991
3992 /* Update info on where next arg arrives in registers. */
3993
3994 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
3995 passed_type, named_arg);
3996
3997 /* If this is our second time through, we are done with this parm. */
3998 if (second_time)
3999 continue;
4000
4001 /* If we can't trust the parm stack slot to be aligned enough
4002 for its ultimate type, don't use that slot after entry.
4003 We'll make another stack slot, if we need one. */
4004 {
4005 int thisparm_boundary
4006 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4007
4008 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4009 stack_parm = 0;
4010 }
4011
4012 /* If parm was passed in memory, and we need to convert it on entry,
4013 don't store it back in that same slot. */
4014 if (entry_parm != 0
4015 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4016 stack_parm = 0;
4017
4018 #if 0
4019 /* Now adjust STACK_PARM to the mode and precise location
4020 where this parameter should live during execution,
4021 if we discover that it must live in the stack during execution.
4022 To make debuggers happier on big-endian machines, we store
4023 the value in the last bytes of the space available. */
4024
4025 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4026 && stack_parm != 0)
4027 {
4028 rtx offset_rtx;
4029
4030 if (BYTES_BIG_ENDIAN
4031 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4032 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4033 - GET_MODE_SIZE (nominal_mode));
4034
4035 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4036 if (offset_rtx == const0_rtx)
4037 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4038 else
4039 stack_parm = gen_rtx_MEM (nominal_mode,
4040 gen_rtx_PLUS (Pmode,
4041 internal_arg_pointer,
4042 offset_rtx));
4043
4044 /* If this is a memory ref that contains aggregate components,
4045 mark it as such for cse and loop optimize. */
4046 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4047 }
4048 #endif /* 0 */
4049
4050 #ifdef STACK_REGS
4051 /* We need this "use" info, because the gcc-register->stack-register
4052 converter in reg-stack.c needs to know which registers are active
4053 at the start of the function call. The actual parameter loading
4054 instructions are not always available then anymore, since they might
4055 have been optimised away. */
4056
4057 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
4058 emit_insn (gen_rtx_USE (GET_MODE (entry_parm), entry_parm));
4059 #endif
4060
4061 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4062 in the mode in which it arrives.
4063 STACK_PARM is an RTX for a stack slot where the parameter can live
4064 during the function (in case we want to put it there).
4065 STACK_PARM is 0 if no stack slot was pushed for it.
4066
4067 Now output code if necessary to convert ENTRY_PARM to
4068 the type in which this function declares it,
4069 and store that result in an appropriate place,
4070 which may be a pseudo reg, may be STACK_PARM,
4071 or may be a local stack slot if STACK_PARM is 0.
4072
4073 Set DECL_RTL to that place. */
4074
4075 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4076 {
4077 /* If a BLKmode arrives in registers, copy it to a stack slot.
4078 Handle calls that pass values in multiple non-contiguous
4079 locations. The Irix 6 ABI has examples of this. */
4080 if (GET_CODE (entry_parm) == REG
4081 || GET_CODE (entry_parm) == PARALLEL)
4082 {
4083 int size_stored
4084 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4085 UNITS_PER_WORD);
4086
4087 /* Note that we will be storing an integral number of words.
4088 So we have to be careful to ensure that we allocate an
4089 integral number of words. We do this below in the
4090 assign_stack_local if space was not allocated in the argument
4091 list. If it was, this will not work if PARM_BOUNDARY is not
4092 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4093 if it becomes a problem. */
4094
4095 if (stack_parm == 0)
4096 {
4097 stack_parm
4098 = assign_stack_local (GET_MODE (entry_parm),
4099 size_stored, 0);
4100
4101 /* If this is a memory ref that contains aggregate
4102 components, mark it as such for cse and loop optimize. */
4103 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4104 }
4105
4106 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4107 abort ();
4108
4109 if (TREE_READONLY (parm))
4110 RTX_UNCHANGING_P (stack_parm) = 1;
4111
4112 /* Handle calls that pass values in multiple non-contiguous
4113 locations. The Irix 6 ABI has examples of this. */
4114 if (GET_CODE (entry_parm) == PARALLEL)
4115 emit_group_store (validize_mem (stack_parm), entry_parm);
4116 else
4117 move_block_from_reg (REGNO (entry_parm),
4118 validize_mem (stack_parm),
4119 size_stored / UNITS_PER_WORD,
4120 int_size_in_bytes (TREE_TYPE (parm)));
4121 }
4122 DECL_RTL (parm) = stack_parm;
4123 }
4124 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
4125 && ! DECL_INLINE (fndecl))
4126 /* layout_decl may set this. */
4127 || TREE_ADDRESSABLE (parm)
4128 || TREE_SIDE_EFFECTS (parm)
4129 /* If -ffloat-store specified, don't put explicit
4130 float variables into registers. */
4131 || (flag_float_store
4132 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4133 /* Always assign pseudo to structure return or item passed
4134 by invisible reference. */
4135 || passed_pointer || parm == function_result_decl)
4136 {
4137 /* Store the parm in a pseudoregister during the function, but we
4138 may need to do it in a wider mode. */
4139
4140 register rtx parmreg;
4141 int regno, regnoi = 0, regnor = 0;
4142
4143 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4144
4145 promoted_nominal_mode
4146 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4147
4148 parmreg = gen_reg_rtx (promoted_nominal_mode);
4149 mark_user_reg (parmreg);
4150
4151 /* If this was an item that we received a pointer to, set DECL_RTL
4152 appropriately. */
4153 if (passed_pointer)
4154 {
4155 DECL_RTL (parm)
4156 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4157 MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
4158 }
4159 else
4160 DECL_RTL (parm) = parmreg;
4161
4162 /* Copy the value into the register. */
4163 if (nominal_mode != passed_mode
4164 || promoted_nominal_mode != promoted_mode)
4165 {
4166 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4167 mode, by the caller. We now have to convert it to
4168 NOMINAL_MODE, if different. However, PARMREG may be in
4169 a different mode than NOMINAL_MODE if it is being stored
4170 promoted.
4171
4172 If ENTRY_PARM is a hard register, it might be in a register
4173 not valid for operating in its mode (e.g., an odd-numbered
4174 register for a DFmode). In that case, moves are the only
4175 thing valid, so we can't do a convert from there. This
4176 occurs when the calling sequence allow such misaligned
4177 usages.
4178
4179 In addition, the conversion may involve a call, which could
4180 clobber parameters which haven't been copied to pseudo
4181 registers yet. Therefore, we must first copy the parm to
4182 a pseudo reg here, and save the conversion until after all
4183 parameters have been moved. */
4184
4185 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4186
4187 emit_move_insn (tempreg, validize_mem (entry_parm));
4188
4189 push_to_sequence (conversion_insns);
4190 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4191
4192 expand_assignment (parm,
4193 make_tree (nominal_type, tempreg), 0, 0);
4194 conversion_insns = get_insns ();
4195 did_conversion = 1;
4196 end_sequence ();
4197 }
4198 else
4199 emit_move_insn (parmreg, validize_mem (entry_parm));
4200
4201 /* If we were passed a pointer but the actual value
4202 can safely live in a register, put it in one. */
4203 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4204 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4205 && ! DECL_INLINE (fndecl))
4206 /* layout_decl may set this. */
4207 || TREE_ADDRESSABLE (parm)
4208 || TREE_SIDE_EFFECTS (parm)
4209 /* If -ffloat-store specified, don't put explicit
4210 float variables into registers. */
4211 || (flag_float_store
4212 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4213 {
4214 /* We can't use nominal_mode, because it will have been set to
4215 Pmode above. We must use the actual mode of the parm. */
4216 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4217 mark_user_reg (parmreg);
4218 emit_move_insn (parmreg, DECL_RTL (parm));
4219 DECL_RTL (parm) = parmreg;
4220 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4221 now the parm. */
4222 stack_parm = 0;
4223 }
4224 #ifdef FUNCTION_ARG_CALLEE_COPIES
4225 /* If we are passed an arg by reference and it is our responsibility
4226 to make a copy, do it now.
4227 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4228 original argument, so we must recreate them in the call to
4229 FUNCTION_ARG_CALLEE_COPIES. */
4230 /* ??? Later add code to handle the case that if the argument isn't
4231 modified, don't do the copy. */
4232
4233 else if (passed_pointer
4234 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4235 TYPE_MODE (DECL_ARG_TYPE (parm)),
4236 DECL_ARG_TYPE (parm),
4237 named_arg)
4238 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4239 {
4240 rtx copy;
4241 tree type = DECL_ARG_TYPE (parm);
4242
4243 /* This sequence may involve a library call perhaps clobbering
4244 registers that haven't been copied to pseudos yet. */
4245
4246 push_to_sequence (conversion_insns);
4247
4248 if (TYPE_SIZE (type) == 0
4249 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4250 /* This is a variable sized object. */
4251 copy = gen_rtx_MEM (BLKmode,
4252 allocate_dynamic_stack_space
4253 (expr_size (parm), NULL_RTX,
4254 TYPE_ALIGN (type)));
4255 else
4256 copy = assign_stack_temp (TYPE_MODE (type),
4257 int_size_in_bytes (type), 1);
4258 MEM_IN_STRUCT_P (copy) = AGGREGATE_TYPE_P (type);
4259 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4260
4261 store_expr (parm, copy, 0);
4262 emit_move_insn (parmreg, XEXP (copy, 0));
4263 if (flag_check_memory_usage)
4264 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4265 XEXP (copy, 0), ptr_mode,
4266 GEN_INT (int_size_in_bytes (type)),
4267 TYPE_MODE (sizetype),
4268 GEN_INT (MEMORY_USE_RW),
4269 TYPE_MODE (integer_type_node));
4270 conversion_insns = get_insns ();
4271 did_conversion = 1;
4272 end_sequence ();
4273 }
4274 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4275
4276 /* In any case, record the parm's desired stack location
4277 in case we later discover it must live in the stack.
4278
4279 If it is a COMPLEX value, store the stack location for both
4280 halves. */
4281
4282 if (GET_CODE (parmreg) == CONCAT)
4283 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4284 else
4285 regno = REGNO (parmreg);
4286
4287 if (regno >= max_parm_reg)
4288 {
4289 rtx *new;
4290 int old_max_parm_reg = max_parm_reg;
4291
4292 /* It's slow to expand this one register at a time,
4293 but it's also rare and we need max_parm_reg to be
4294 precisely correct. */
4295 max_parm_reg = regno + 1;
4296 new = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4297 bcopy ((char *) parm_reg_stack_loc, (char *) new,
4298 old_max_parm_reg * sizeof (rtx));
4299 bzero ((char *) (new + old_max_parm_reg),
4300 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4301 parm_reg_stack_loc = new;
4302 }
4303
4304 if (GET_CODE (parmreg) == CONCAT)
4305 {
4306 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4307
4308 regnor = REGNO (gen_realpart (submode, parmreg));
4309 regnoi = REGNO (gen_imagpart (submode, parmreg));
4310
4311 if (stack_parm != 0)
4312 {
4313 parm_reg_stack_loc[regnor]
4314 = gen_realpart (submode, stack_parm);
4315 parm_reg_stack_loc[regnoi]
4316 = gen_imagpart (submode, stack_parm);
4317 }
4318 else
4319 {
4320 parm_reg_stack_loc[regnor] = 0;
4321 parm_reg_stack_loc[regnoi] = 0;
4322 }
4323 }
4324 else
4325 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4326
4327 /* Mark the register as eliminable if we did no conversion
4328 and it was copied from memory at a fixed offset,
4329 and the arg pointer was not copied to a pseudo-reg.
4330 If the arg pointer is a pseudo reg or the offset formed
4331 an invalid address, such memory-equivalences
4332 as we make here would screw up life analysis for it. */
4333 if (nominal_mode == passed_mode
4334 && ! did_conversion
4335 && stack_parm != 0
4336 && GET_CODE (stack_parm) == MEM
4337 && stack_offset.var == 0
4338 && reg_mentioned_p (virtual_incoming_args_rtx,
4339 XEXP (stack_parm, 0)))
4340 {
4341 rtx linsn = get_last_insn ();
4342 rtx sinsn, set;
4343
4344 /* Mark complex types separately. */
4345 if (GET_CODE (parmreg) == CONCAT)
4346 /* Scan backwards for the set of the real and
4347 imaginary parts. */
4348 for (sinsn = linsn; sinsn != 0;
4349 sinsn = prev_nonnote_insn (sinsn))
4350 {
4351 set = single_set (sinsn);
4352 if (set != 0
4353 && SET_DEST (set) == regno_reg_rtx [regnoi])
4354 REG_NOTES (sinsn)
4355 = gen_rtx_EXPR_LIST (REG_EQUIV,
4356 parm_reg_stack_loc[regnoi],
4357 REG_NOTES (sinsn));
4358 else if (set != 0
4359 && SET_DEST (set) == regno_reg_rtx [regnor])
4360 REG_NOTES (sinsn)
4361 = gen_rtx_EXPR_LIST (REG_EQUIV,
4362 parm_reg_stack_loc[regnor],
4363 REG_NOTES (sinsn));
4364 }
4365 else if ((set = single_set (linsn)) != 0
4366 && SET_DEST (set) == parmreg)
4367 REG_NOTES (linsn)
4368 = gen_rtx_EXPR_LIST (REG_EQUIV,
4369 stack_parm, REG_NOTES (linsn));
4370 }
4371
4372 /* For pointer data type, suggest pointer register. */
4373 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4374 mark_reg_pointer (parmreg,
4375 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4376 / BITS_PER_UNIT));
4377 }
4378 else
4379 {
4380 /* Value must be stored in the stack slot STACK_PARM
4381 during function execution. */
4382
4383 if (promoted_mode != nominal_mode)
4384 {
4385 /* Conversion is required. */
4386 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4387
4388 emit_move_insn (tempreg, validize_mem (entry_parm));
4389
4390 push_to_sequence (conversion_insns);
4391 entry_parm = convert_to_mode (nominal_mode, tempreg,
4392 TREE_UNSIGNED (TREE_TYPE (parm)));
4393 if (stack_parm)
4394 {
4395 /* ??? This may need a big-endian conversion on sparc64. */
4396 stack_parm = change_address (stack_parm, nominal_mode,
4397 NULL_RTX);
4398 }
4399 conversion_insns = get_insns ();
4400 did_conversion = 1;
4401 end_sequence ();
4402 }
4403
4404 if (entry_parm != stack_parm)
4405 {
4406 if (stack_parm == 0)
4407 {
4408 stack_parm
4409 = assign_stack_local (GET_MODE (entry_parm),
4410 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4411 /* If this is a memory ref that contains aggregate components,
4412 mark it as such for cse and loop optimize. */
4413 MEM_IN_STRUCT_P (stack_parm) = aggregate;
4414 }
4415
4416 if (promoted_mode != nominal_mode)
4417 {
4418 push_to_sequence (conversion_insns);
4419 emit_move_insn (validize_mem (stack_parm),
4420 validize_mem (entry_parm));
4421 conversion_insns = get_insns ();
4422 end_sequence ();
4423 }
4424 else
4425 emit_move_insn (validize_mem (stack_parm),
4426 validize_mem (entry_parm));
4427 }
4428 if (flag_check_memory_usage)
4429 {
4430 push_to_sequence (conversion_insns);
4431 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4432 XEXP (stack_parm, 0), ptr_mode,
4433 GEN_INT (GET_MODE_SIZE (GET_MODE
4434 (entry_parm))),
4435 TYPE_MODE (sizetype),
4436 GEN_INT (MEMORY_USE_RW),
4437 TYPE_MODE (integer_type_node));
4438
4439 conversion_insns = get_insns ();
4440 end_sequence ();
4441 }
4442 DECL_RTL (parm) = stack_parm;
4443 }
4444
4445 /* If this "parameter" was the place where we are receiving the
4446 function's incoming structure pointer, set up the result. */
4447 if (parm == function_result_decl)
4448 {
4449 tree result = DECL_RESULT (fndecl);
4450 tree restype = TREE_TYPE (result);
4451
4452 DECL_RTL (result)
4453 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4454
4455 MEM_IN_STRUCT_P (DECL_RTL (result)) = AGGREGATE_TYPE_P (restype);
4456 }
4457
4458 if (TREE_THIS_VOLATILE (parm))
4459 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4460 if (TREE_READONLY (parm))
4461 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4462 }
4463
4464 /* Output all parameter conversion instructions (possibly including calls)
4465 now that all parameters have been copied out of hard registers. */
4466 emit_insns (conversion_insns);
4467
4468 last_parm_insn = get_last_insn ();
4469
4470 current_function_args_size = stack_args_size.constant;
4471
4472 /* Adjust function incoming argument size for alignment and
4473 minimum length. */
4474
4475 #ifdef REG_PARM_STACK_SPACE
4476 #ifndef MAYBE_REG_PARM_STACK_SPACE
4477 current_function_args_size = MAX (current_function_args_size,
4478 REG_PARM_STACK_SPACE (fndecl));
4479 #endif
4480 #endif
4481
4482 #ifdef STACK_BOUNDARY
4483 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
4484
4485 current_function_args_size
4486 = ((current_function_args_size + STACK_BYTES - 1)
4487 / STACK_BYTES) * STACK_BYTES;
4488 #endif
4489
4490 #ifdef ARGS_GROW_DOWNWARD
4491 current_function_arg_offset_rtx
4492 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4493 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4494 size_int (-stack_args_size.constant)),
4495 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4496 #else
4497 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4498 #endif
4499
4500 /* See how many bytes, if any, of its args a function should try to pop
4501 on return. */
4502
4503 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4504 current_function_args_size);
4505
4506 /* For stdarg.h function, save info about
4507 regs and stack space used by the named args. */
4508
4509 if (!hide_last_arg)
4510 current_function_args_info = args_so_far;
4511
4512 /* Set the rtx used for the function return value. Put this in its
4513 own variable so any optimizers that need this information don't have
4514 to include tree.h. Do this here so it gets done when an inlined
4515 function gets output. */
4516
4517 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4518 }
4519 \f
4520 /* Indicate whether REGNO is an incoming argument to the current function
4521 that was promoted to a wider mode. If so, return the RTX for the
4522 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4523 that REGNO is promoted from and whether the promotion was signed or
4524 unsigned. */
4525
4526 #ifdef PROMOTE_FUNCTION_ARGS
4527
4528 rtx
4529 promoted_input_arg (regno, pmode, punsignedp)
4530 int regno;
4531 enum machine_mode *pmode;
4532 int *punsignedp;
4533 {
4534 tree arg;
4535
4536 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4537 arg = TREE_CHAIN (arg))
4538 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4539 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4540 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4541 {
4542 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4543 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4544
4545 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4546 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4547 && mode != DECL_MODE (arg))
4548 {
4549 *pmode = DECL_MODE (arg);
4550 *punsignedp = unsignedp;
4551 return DECL_INCOMING_RTL (arg);
4552 }
4553 }
4554
4555 return 0;
4556 }
4557
4558 #endif
4559 \f
4560 /* Compute the size and offset from the start of the stacked arguments for a
4561 parm passed in mode PASSED_MODE and with type TYPE.
4562
4563 INITIAL_OFFSET_PTR points to the current offset into the stacked
4564 arguments.
4565
4566 The starting offset and size for this parm are returned in *OFFSET_PTR
4567 and *ARG_SIZE_PTR, respectively.
4568
4569 IN_REGS is non-zero if the argument will be passed in registers. It will
4570 never be set if REG_PARM_STACK_SPACE is not defined.
4571
4572 FNDECL is the function in which the argument was defined.
4573
4574 There are two types of rounding that are done. The first, controlled by
4575 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4576 list to be aligned to the specific boundary (in bits). This rounding
4577 affects the initial and starting offsets, but not the argument size.
4578
4579 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4580 optionally rounds the size of the parm to PARM_BOUNDARY. The
4581 initial offset is not affected by this rounding, while the size always
4582 is and the starting offset may be. */
4583
4584 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4585 initial_offset_ptr is positive because locate_and_pad_parm's
4586 callers pass in the total size of args so far as
4587 initial_offset_ptr. arg_size_ptr is always positive.*/
4588
4589 void
4590 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4591 initial_offset_ptr, offset_ptr, arg_size_ptr)
4592 enum machine_mode passed_mode;
4593 tree type;
4594 int in_regs;
4595 tree fndecl;
4596 struct args_size *initial_offset_ptr;
4597 struct args_size *offset_ptr;
4598 struct args_size *arg_size_ptr;
4599 {
4600 tree sizetree
4601 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4602 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4603 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4604
4605 #ifdef REG_PARM_STACK_SPACE
4606 /* If we have found a stack parm before we reach the end of the
4607 area reserved for registers, skip that area. */
4608 if (! in_regs)
4609 {
4610 int reg_parm_stack_space = 0;
4611
4612 #ifdef MAYBE_REG_PARM_STACK_SPACE
4613 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4614 #else
4615 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4616 #endif
4617 if (reg_parm_stack_space > 0)
4618 {
4619 if (initial_offset_ptr->var)
4620 {
4621 initial_offset_ptr->var
4622 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4623 size_int (reg_parm_stack_space));
4624 initial_offset_ptr->constant = 0;
4625 }
4626 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4627 initial_offset_ptr->constant = reg_parm_stack_space;
4628 }
4629 }
4630 #endif /* REG_PARM_STACK_SPACE */
4631
4632 arg_size_ptr->var = 0;
4633 arg_size_ptr->constant = 0;
4634
4635 #ifdef ARGS_GROW_DOWNWARD
4636 if (initial_offset_ptr->var)
4637 {
4638 offset_ptr->constant = 0;
4639 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4640 initial_offset_ptr->var);
4641 }
4642 else
4643 {
4644 offset_ptr->constant = - initial_offset_ptr->constant;
4645 offset_ptr->var = 0;
4646 }
4647 if (where_pad != none
4648 && (TREE_CODE (sizetree) != INTEGER_CST
4649 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4650 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4651 SUB_PARM_SIZE (*offset_ptr, sizetree);
4652 if (where_pad != downward)
4653 pad_to_arg_alignment (offset_ptr, boundary);
4654 if (initial_offset_ptr->var)
4655 {
4656 arg_size_ptr->var = size_binop (MINUS_EXPR,
4657 size_binop (MINUS_EXPR,
4658 integer_zero_node,
4659 initial_offset_ptr->var),
4660 offset_ptr->var);
4661 }
4662 else
4663 {
4664 arg_size_ptr->constant = (- initial_offset_ptr->constant
4665 - offset_ptr->constant);
4666 }
4667 #else /* !ARGS_GROW_DOWNWARD */
4668 pad_to_arg_alignment (initial_offset_ptr, boundary);
4669 *offset_ptr = *initial_offset_ptr;
4670
4671 #ifdef PUSH_ROUNDING
4672 if (passed_mode != BLKmode)
4673 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4674 #endif
4675
4676 /* Pad_below needs the pre-rounded size to know how much to pad below
4677 so this must be done before rounding up. */
4678 if (where_pad == downward
4679 /* However, BLKmode args passed in regs have their padding done elsewhere.
4680 The stack slot must be able to hold the entire register. */
4681 && !(in_regs && passed_mode == BLKmode))
4682 pad_below (offset_ptr, passed_mode, sizetree);
4683
4684 if (where_pad != none
4685 && (TREE_CODE (sizetree) != INTEGER_CST
4686 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4687 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4688
4689 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4690 #endif /* ARGS_GROW_DOWNWARD */
4691 }
4692
4693 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4694 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4695
4696 static void
4697 pad_to_arg_alignment (offset_ptr, boundary)
4698 struct args_size *offset_ptr;
4699 int boundary;
4700 {
4701 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4702
4703 if (boundary > BITS_PER_UNIT)
4704 {
4705 if (offset_ptr->var)
4706 {
4707 offset_ptr->var =
4708 #ifdef ARGS_GROW_DOWNWARD
4709 round_down
4710 #else
4711 round_up
4712 #endif
4713 (ARGS_SIZE_TREE (*offset_ptr),
4714 boundary / BITS_PER_UNIT);
4715 offset_ptr->constant = 0; /*?*/
4716 }
4717 else
4718 offset_ptr->constant =
4719 #ifdef ARGS_GROW_DOWNWARD
4720 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4721 #else
4722 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4723 #endif
4724 }
4725 }
4726
4727 #ifndef ARGS_GROW_DOWNWARD
4728 static void
4729 pad_below (offset_ptr, passed_mode, sizetree)
4730 struct args_size *offset_ptr;
4731 enum machine_mode passed_mode;
4732 tree sizetree;
4733 {
4734 if (passed_mode != BLKmode)
4735 {
4736 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4737 offset_ptr->constant
4738 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4739 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4740 - GET_MODE_SIZE (passed_mode));
4741 }
4742 else
4743 {
4744 if (TREE_CODE (sizetree) != INTEGER_CST
4745 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4746 {
4747 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4748 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4749 /* Add it in. */
4750 ADD_PARM_SIZE (*offset_ptr, s2);
4751 SUB_PARM_SIZE (*offset_ptr, sizetree);
4752 }
4753 }
4754 }
4755 #endif
4756
4757 #ifdef ARGS_GROW_DOWNWARD
4758 static tree
4759 round_down (value, divisor)
4760 tree value;
4761 int divisor;
4762 {
4763 return size_binop (MULT_EXPR,
4764 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
4765 size_int (divisor));
4766 }
4767 #endif
4768 \f
4769 /* Walk the tree of blocks describing the binding levels within a function
4770 and warn about uninitialized variables.
4771 This is done after calling flow_analysis and before global_alloc
4772 clobbers the pseudo-regs to hard regs. */
4773
4774 void
4775 uninitialized_vars_warning (block)
4776 tree block;
4777 {
4778 register tree decl, sub;
4779 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4780 {
4781 if (TREE_CODE (decl) == VAR_DECL
4782 /* These warnings are unreliable for and aggregates
4783 because assigning the fields one by one can fail to convince
4784 flow.c that the entire aggregate was initialized.
4785 Unions are troublesome because members may be shorter. */
4786 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
4787 && DECL_RTL (decl) != 0
4788 && GET_CODE (DECL_RTL (decl)) == REG
4789 && regno_uninitialized (REGNO (DECL_RTL (decl))))
4790 warning_with_decl (decl,
4791 "`%s' might be used uninitialized in this function");
4792 if (TREE_CODE (decl) == VAR_DECL
4793 && DECL_RTL (decl) != 0
4794 && GET_CODE (DECL_RTL (decl)) == REG
4795 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4796 warning_with_decl (decl,
4797 "variable `%s' might be clobbered by `longjmp' or `vfork'");
4798 }
4799 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4800 uninitialized_vars_warning (sub);
4801 }
4802
4803 /* Do the appropriate part of uninitialized_vars_warning
4804 but for arguments instead of local variables. */
4805
4806 void
4807 setjmp_args_warning ()
4808 {
4809 register tree decl;
4810 for (decl = DECL_ARGUMENTS (current_function_decl);
4811 decl; decl = TREE_CHAIN (decl))
4812 if (DECL_RTL (decl) != 0
4813 && GET_CODE (DECL_RTL (decl)) == REG
4814 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
4815 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
4816 }
4817
4818 /* If this function call setjmp, put all vars into the stack
4819 unless they were declared `register'. */
4820
4821 void
4822 setjmp_protect (block)
4823 tree block;
4824 {
4825 register tree decl, sub;
4826 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4827 if ((TREE_CODE (decl) == VAR_DECL
4828 || TREE_CODE (decl) == PARM_DECL)
4829 && DECL_RTL (decl) != 0
4830 && (GET_CODE (DECL_RTL (decl)) == REG
4831 || (GET_CODE (DECL_RTL (decl)) == MEM
4832 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
4833 /* If this variable came from an inline function, it must be
4834 that it's life doesn't overlap the setjmp. If there was a
4835 setjmp in the function, it would already be in memory. We
4836 must exclude such variable because their DECL_RTL might be
4837 set to strange things such as virtual_stack_vars_rtx. */
4838 && ! DECL_FROM_INLINE (decl)
4839 && (
4840 #ifdef NON_SAVING_SETJMP
4841 /* If longjmp doesn't restore the registers,
4842 don't put anything in them. */
4843 NON_SAVING_SETJMP
4844 ||
4845 #endif
4846 ! DECL_REGISTER (decl)))
4847 put_var_into_stack (decl);
4848 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
4849 setjmp_protect (sub);
4850 }
4851 \f
4852 /* Like the previous function, but for args instead of local variables. */
4853
4854 void
4855 setjmp_protect_args ()
4856 {
4857 register tree decl;
4858 for (decl = DECL_ARGUMENTS (current_function_decl);
4859 decl; decl = TREE_CHAIN (decl))
4860 if ((TREE_CODE (decl) == VAR_DECL
4861 || TREE_CODE (decl) == PARM_DECL)
4862 && DECL_RTL (decl) != 0
4863 && (GET_CODE (DECL_RTL (decl)) == REG
4864 || (GET_CODE (DECL_RTL (decl)) == MEM
4865 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
4866 && (
4867 /* If longjmp doesn't restore the registers,
4868 don't put anything in them. */
4869 #ifdef NON_SAVING_SETJMP
4870 NON_SAVING_SETJMP
4871 ||
4872 #endif
4873 ! DECL_REGISTER (decl)))
4874 put_var_into_stack (decl);
4875 }
4876 \f
4877 /* Return the context-pointer register corresponding to DECL,
4878 or 0 if it does not need one. */
4879
4880 rtx
4881 lookup_static_chain (decl)
4882 tree decl;
4883 {
4884 tree context = decl_function_context (decl);
4885 tree link;
4886
4887 if (context == 0
4888 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
4889 return 0;
4890
4891 /* We treat inline_function_decl as an alias for the current function
4892 because that is the inline function whose vars, types, etc.
4893 are being merged into the current function.
4894 See expand_inline_function. */
4895 if (context == current_function_decl || context == inline_function_decl)
4896 return virtual_stack_vars_rtx;
4897
4898 for (link = context_display; link; link = TREE_CHAIN (link))
4899 if (TREE_PURPOSE (link) == context)
4900 return RTL_EXPR_RTL (TREE_VALUE (link));
4901
4902 abort ();
4903 }
4904 \f
4905 /* Convert a stack slot address ADDR for variable VAR
4906 (from a containing function)
4907 into an address valid in this function (using a static chain). */
4908
4909 rtx
4910 fix_lexical_addr (addr, var)
4911 rtx addr;
4912 tree var;
4913 {
4914 rtx basereg;
4915 HOST_WIDE_INT displacement;
4916 tree context = decl_function_context (var);
4917 struct function *fp;
4918 rtx base = 0;
4919
4920 /* If this is the present function, we need not do anything. */
4921 if (context == current_function_decl || context == inline_function_decl)
4922 return addr;
4923
4924 for (fp = outer_function_chain; fp; fp = fp->next)
4925 if (fp->decl == context)
4926 break;
4927
4928 if (fp == 0)
4929 abort ();
4930
4931 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
4932 addr = XEXP (XEXP (addr, 0), 0);
4933
4934 /* Decode given address as base reg plus displacement. */
4935 if (GET_CODE (addr) == REG)
4936 basereg = addr, displacement = 0;
4937 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4938 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
4939 else
4940 abort ();
4941
4942 /* We accept vars reached via the containing function's
4943 incoming arg pointer and via its stack variables pointer. */
4944 if (basereg == fp->internal_arg_pointer)
4945 {
4946 /* If reached via arg pointer, get the arg pointer value
4947 out of that function's stack frame.
4948
4949 There are two cases: If a separate ap is needed, allocate a
4950 slot in the outer function for it and dereference it that way.
4951 This is correct even if the real ap is actually a pseudo.
4952 Otherwise, just adjust the offset from the frame pointer to
4953 compensate. */
4954
4955 #ifdef NEED_SEPARATE_AP
4956 rtx addr;
4957
4958 if (fp->arg_pointer_save_area == 0)
4959 fp->arg_pointer_save_area
4960 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
4961
4962 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
4963 addr = memory_address (Pmode, addr);
4964
4965 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
4966 #else
4967 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
4968 base = lookup_static_chain (var);
4969 #endif
4970 }
4971
4972 else if (basereg == virtual_stack_vars_rtx)
4973 {
4974 /* This is the same code as lookup_static_chain, duplicated here to
4975 avoid an extra call to decl_function_context. */
4976 tree link;
4977
4978 for (link = context_display; link; link = TREE_CHAIN (link))
4979 if (TREE_PURPOSE (link) == context)
4980 {
4981 base = RTL_EXPR_RTL (TREE_VALUE (link));
4982 break;
4983 }
4984 }
4985
4986 if (base == 0)
4987 abort ();
4988
4989 /* Use same offset, relative to appropriate static chain or argument
4990 pointer. */
4991 return plus_constant (base, displacement);
4992 }
4993 \f
4994 /* Return the address of the trampoline for entering nested fn FUNCTION.
4995 If necessary, allocate a trampoline (in the stack frame)
4996 and emit rtl to initialize its contents (at entry to this function). */
4997
4998 rtx
4999 trampoline_address (function)
5000 tree function;
5001 {
5002 tree link;
5003 tree rtlexp;
5004 rtx tramp;
5005 struct function *fp;
5006 tree fn_context;
5007
5008 /* Find an existing trampoline and return it. */
5009 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5010 if (TREE_PURPOSE (link) == function)
5011 return
5012 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5013
5014 for (fp = outer_function_chain; fp; fp = fp->next)
5015 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
5016 if (TREE_PURPOSE (link) == function)
5017 {
5018 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5019 function);
5020 return round_trampoline_addr (tramp);
5021 }
5022
5023 /* None exists; we must make one. */
5024
5025 /* Find the `struct function' for the function containing FUNCTION. */
5026 fp = 0;
5027 fn_context = decl_function_context (function);
5028 if (fn_context != current_function_decl
5029 && fn_context != inline_function_decl)
5030 for (fp = outer_function_chain; fp; fp = fp->next)
5031 if (fp->decl == fn_context)
5032 break;
5033
5034 /* Allocate run-time space for this trampoline
5035 (usually in the defining function's stack frame). */
5036 #ifdef ALLOCATE_TRAMPOLINE
5037 tramp = ALLOCATE_TRAMPOLINE (fp);
5038 #else
5039 /* If rounding needed, allocate extra space
5040 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5041 #ifdef TRAMPOLINE_ALIGNMENT
5042 #define TRAMPOLINE_REAL_SIZE \
5043 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5044 #else
5045 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5046 #endif
5047 if (fp != 0)
5048 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
5049 else
5050 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
5051 #endif
5052
5053 /* Record the trampoline for reuse and note it for later initialization
5054 by expand_function_end. */
5055 if (fp != 0)
5056 {
5057 push_obstacks (fp->function_maybepermanent_obstack,
5058 fp->function_maybepermanent_obstack);
5059 rtlexp = make_node (RTL_EXPR);
5060 RTL_EXPR_RTL (rtlexp) = tramp;
5061 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
5062 pop_obstacks ();
5063 }
5064 else
5065 {
5066 /* Make the RTL_EXPR node temporary, not momentary, so that the
5067 trampoline_list doesn't become garbage. */
5068 int momentary = suspend_momentary ();
5069 rtlexp = make_node (RTL_EXPR);
5070 resume_momentary (momentary);
5071
5072 RTL_EXPR_RTL (rtlexp) = tramp;
5073 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5074 }
5075
5076 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5077 return round_trampoline_addr (tramp);
5078 }
5079
5080 /* Given a trampoline address,
5081 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5082
5083 static rtx
5084 round_trampoline_addr (tramp)
5085 rtx tramp;
5086 {
5087 #ifdef TRAMPOLINE_ALIGNMENT
5088 /* Round address up to desired boundary. */
5089 rtx temp = gen_reg_rtx (Pmode);
5090 temp = expand_binop (Pmode, add_optab, tramp,
5091 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5092 temp, 0, OPTAB_LIB_WIDEN);
5093 tramp = expand_binop (Pmode, and_optab, temp,
5094 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5095 temp, 0, OPTAB_LIB_WIDEN);
5096 #endif
5097 return tramp;
5098 }
5099 \f
5100 /* The functions identify_blocks and reorder_blocks provide a way to
5101 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5102 duplicate portions of the RTL code. Call identify_blocks before
5103 changing the RTL, and call reorder_blocks after. */
5104
5105 /* Put all this function's BLOCK nodes including those that are chained
5106 onto the first block into a vector, and return it.
5107 Also store in each NOTE for the beginning or end of a block
5108 the index of that block in the vector.
5109 The arguments are BLOCK, the chain of top-level blocks of the function,
5110 and INSNS, the insn chain of the function. */
5111
5112 tree *
5113 identify_blocks (block, insns)
5114 tree block;
5115 rtx insns;
5116 {
5117 int n_blocks;
5118 tree *block_vector;
5119 int *block_stack;
5120 int depth = 0;
5121 int next_block_number = 1;
5122 int current_block_number = 1;
5123 rtx insn;
5124
5125 if (block == 0)
5126 return 0;
5127
5128 n_blocks = all_blocks (block, 0);
5129 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5130 block_stack = (int *) alloca (n_blocks * sizeof (int));
5131
5132 all_blocks (block, block_vector);
5133
5134 for (insn = insns; insn; insn = NEXT_INSN (insn))
5135 if (GET_CODE (insn) == NOTE)
5136 {
5137 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5138 {
5139 block_stack[depth++] = current_block_number;
5140 current_block_number = next_block_number;
5141 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
5142 }
5143 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5144 {
5145 NOTE_BLOCK_NUMBER (insn) = current_block_number;
5146 current_block_number = block_stack[--depth];
5147 }
5148 }
5149
5150 if (n_blocks != next_block_number)
5151 abort ();
5152
5153 return block_vector;
5154 }
5155
5156 /* Given BLOCK_VECTOR which was returned by identify_blocks,
5157 and a revised instruction chain, rebuild the tree structure
5158 of BLOCK nodes to correspond to the new order of RTL.
5159 The new block tree is inserted below TOP_BLOCK.
5160 Returns the current top-level block. */
5161
5162 tree
5163 reorder_blocks (block_vector, block, insns)
5164 tree *block_vector;
5165 tree block;
5166 rtx insns;
5167 {
5168 tree current_block = block;
5169 rtx insn;
5170
5171 if (block_vector == 0)
5172 return block;
5173
5174 /* Prune the old trees away, so that it doesn't get in the way. */
5175 BLOCK_SUBBLOCKS (current_block) = 0;
5176 BLOCK_CHAIN (current_block) = 0;
5177
5178 for (insn = insns; insn; insn = NEXT_INSN (insn))
5179 if (GET_CODE (insn) == NOTE)
5180 {
5181 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5182 {
5183 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
5184 /* If we have seen this block before, copy it. */
5185 if (TREE_ASM_WRITTEN (block))
5186 block = copy_node (block);
5187 BLOCK_SUBBLOCKS (block) = 0;
5188 TREE_ASM_WRITTEN (block) = 1;
5189 BLOCK_SUPERCONTEXT (block) = current_block;
5190 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5191 BLOCK_SUBBLOCKS (current_block) = block;
5192 current_block = block;
5193 NOTE_SOURCE_FILE (insn) = 0;
5194 }
5195 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5196 {
5197 BLOCK_SUBBLOCKS (current_block)
5198 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5199 current_block = BLOCK_SUPERCONTEXT (current_block);
5200 NOTE_SOURCE_FILE (insn) = 0;
5201 }
5202 }
5203
5204 BLOCK_SUBBLOCKS (current_block)
5205 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5206 return current_block;
5207 }
5208
5209 /* Reverse the order of elements in the chain T of blocks,
5210 and return the new head of the chain (old last element). */
5211
5212 static tree
5213 blocks_nreverse (t)
5214 tree t;
5215 {
5216 register tree prev = 0, decl, next;
5217 for (decl = t; decl; decl = next)
5218 {
5219 next = BLOCK_CHAIN (decl);
5220 BLOCK_CHAIN (decl) = prev;
5221 prev = decl;
5222 }
5223 return prev;
5224 }
5225
5226 /* Count the subblocks of the list starting with BLOCK, and list them
5227 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5228 blocks. */
5229
5230 static int
5231 all_blocks (block, vector)
5232 tree block;
5233 tree *vector;
5234 {
5235 int n_blocks = 0;
5236
5237 while (block)
5238 {
5239 TREE_ASM_WRITTEN (block) = 0;
5240
5241 /* Record this block. */
5242 if (vector)
5243 vector[n_blocks] = block;
5244
5245 ++n_blocks;
5246
5247 /* Record the subblocks, and their subblocks... */
5248 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5249 vector ? vector + n_blocks : 0);
5250 block = BLOCK_CHAIN (block);
5251 }
5252
5253 return n_blocks;
5254 }
5255 \f
5256 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5257 and initialize static variables for generating RTL for the statements
5258 of the function. */
5259
5260 void
5261 init_function_start (subr, filename, line)
5262 tree subr;
5263 char *filename;
5264 int line;
5265 {
5266 init_stmt_for_function ();
5267
5268 cse_not_expected = ! optimize;
5269
5270 /* Caller save not needed yet. */
5271 caller_save_needed = 0;
5272
5273 /* No stack slots have been made yet. */
5274 stack_slot_list = 0;
5275
5276 /* There is no stack slot for handling nonlocal gotos. */
5277 nonlocal_goto_handler_slot = 0;
5278 nonlocal_goto_stack_level = 0;
5279
5280 /* No labels have been declared for nonlocal use. */
5281 nonlocal_labels = 0;
5282
5283 /* No function calls so far in this function. */
5284 function_call_count = 0;
5285
5286 /* No parm regs have been allocated.
5287 (This is important for output_inline_function.) */
5288 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5289
5290 /* Initialize the RTL mechanism. */
5291 init_emit ();
5292
5293 /* Initialize the queue of pending postincrement and postdecrements,
5294 and some other info in expr.c. */
5295 init_expr ();
5296
5297 /* We haven't done register allocation yet. */
5298 reg_renumber = 0;
5299
5300 init_const_rtx_hash_table ();
5301
5302 current_function_name = (*decl_printable_name) (subr, 2);
5303
5304 /* Nonzero if this is a nested function that uses a static chain. */
5305
5306 current_function_needs_context
5307 = (decl_function_context (current_function_decl) != 0
5308 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5309
5310 /* Set if a call to setjmp is seen. */
5311 current_function_calls_setjmp = 0;
5312
5313 /* Set if a call to longjmp is seen. */
5314 current_function_calls_longjmp = 0;
5315
5316 current_function_calls_alloca = 0;
5317 current_function_has_nonlocal_label = 0;
5318 current_function_has_nonlocal_goto = 0;
5319 current_function_contains_functions = 0;
5320 current_function_is_thunk = 0;
5321
5322 current_function_returns_pcc_struct = 0;
5323 current_function_returns_struct = 0;
5324 current_function_epilogue_delay_list = 0;
5325 current_function_uses_const_pool = 0;
5326 current_function_uses_pic_offset_table = 0;
5327 current_function_cannot_inline = 0;
5328
5329 /* We have not yet needed to make a label to jump to for tail-recursion. */
5330 tail_recursion_label = 0;
5331
5332 /* We haven't had a need to make a save area for ap yet. */
5333
5334 arg_pointer_save_area = 0;
5335
5336 /* No stack slots allocated yet. */
5337 frame_offset = 0;
5338
5339 /* No SAVE_EXPRs in this function yet. */
5340 save_expr_regs = 0;
5341
5342 /* No RTL_EXPRs in this function yet. */
5343 rtl_expr_chain = 0;
5344
5345 /* Set up to allocate temporaries. */
5346 init_temp_slots ();
5347
5348 /* Within function body, compute a type's size as soon it is laid out. */
5349 immediate_size_expand++;
5350
5351 /* We haven't made any trampolines for this function yet. */
5352 trampoline_list = 0;
5353
5354 init_pending_stack_adjust ();
5355 inhibit_defer_pop = 0;
5356
5357 current_function_outgoing_args_size = 0;
5358
5359 /* Prevent ever trying to delete the first instruction of a function.
5360 Also tell final how to output a linenum before the function prologue.
5361 Note linenums could be missing, e.g. when compiling a Java .class file. */
5362 if (line > 0)
5363 emit_line_note (filename, line);
5364
5365 /* Make sure first insn is a note even if we don't want linenums.
5366 This makes sure the first insn will never be deleted.
5367 Also, final expects a note to appear there. */
5368 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5369
5370 /* Set flags used by final.c. */
5371 if (aggregate_value_p (DECL_RESULT (subr)))
5372 {
5373 #ifdef PCC_STATIC_STRUCT_RETURN
5374 current_function_returns_pcc_struct = 1;
5375 #endif
5376 current_function_returns_struct = 1;
5377 }
5378
5379 /* Warn if this value is an aggregate type,
5380 regardless of which calling convention we are using for it. */
5381 if (warn_aggregate_return
5382 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5383 warning ("function returns an aggregate");
5384
5385 current_function_returns_pointer
5386 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5387
5388 /* Indicate that we need to distinguish between the return value of the
5389 present function and the return value of a function being called. */
5390 rtx_equal_function_value_matters = 1;
5391
5392 /* Indicate that we have not instantiated virtual registers yet. */
5393 virtuals_instantiated = 0;
5394
5395 /* Indicate we have no need of a frame pointer yet. */
5396 frame_pointer_needed = 0;
5397
5398 /* By default assume not varargs or stdarg. */
5399 current_function_varargs = 0;
5400 current_function_stdarg = 0;
5401 }
5402
5403 /* Indicate that the current function uses extra args
5404 not explicitly mentioned in the argument list in any fashion. */
5405
5406 void
5407 mark_varargs ()
5408 {
5409 current_function_varargs = 1;
5410 }
5411
5412 /* Expand a call to __main at the beginning of a possible main function. */
5413
5414 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5415 #undef HAS_INIT_SECTION
5416 #define HAS_INIT_SECTION
5417 #endif
5418
5419 void
5420 expand_main_function ()
5421 {
5422 #if !defined (HAS_INIT_SECTION)
5423 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5424 VOIDmode, 0);
5425 #endif /* not HAS_INIT_SECTION */
5426 }
5427 \f
5428 extern struct obstack permanent_obstack;
5429
5430 /* Start the RTL for a new function, and set variables used for
5431 emitting RTL.
5432 SUBR is the FUNCTION_DECL node.
5433 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5434 the function's parameters, which must be run at any return statement. */
5435
5436 void
5437 expand_function_start (subr, parms_have_cleanups)
5438 tree subr;
5439 int parms_have_cleanups;
5440 {
5441 register int i;
5442 tree tem;
5443 rtx last_ptr = NULL_RTX;
5444
5445 /* Make sure volatile mem refs aren't considered
5446 valid operands of arithmetic insns. */
5447 init_recog_no_volatile ();
5448
5449 /* If function gets a static chain arg, store it in the stack frame.
5450 Do this first, so it gets the first stack slot offset. */
5451 if (current_function_needs_context)
5452 {
5453 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5454
5455 /* Delay copying static chain if it is not a register to avoid
5456 conflicts with regs used for parameters. */
5457 if (! SMALL_REGISTER_CLASSES
5458 || GET_CODE (static_chain_incoming_rtx) == REG)
5459 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5460 }
5461
5462 /* If the parameters of this function need cleaning up, get a label
5463 for the beginning of the code which executes those cleanups. This must
5464 be done before doing anything with return_label. */
5465 if (parms_have_cleanups)
5466 cleanup_label = gen_label_rtx ();
5467 else
5468 cleanup_label = 0;
5469
5470 /* Make the label for return statements to jump to, if this machine
5471 does not have a one-instruction return and uses an epilogue,
5472 or if it returns a structure, or if it has parm cleanups. */
5473 #ifdef HAVE_return
5474 if (cleanup_label == 0 && HAVE_return
5475 && ! current_function_returns_pcc_struct
5476 && ! (current_function_returns_struct && ! optimize))
5477 return_label = 0;
5478 else
5479 return_label = gen_label_rtx ();
5480 #else
5481 return_label = gen_label_rtx ();
5482 #endif
5483
5484 /* Initialize rtx used to return the value. */
5485 /* Do this before assign_parms so that we copy the struct value address
5486 before any library calls that assign parms might generate. */
5487
5488 /* Decide whether to return the value in memory or in a register. */
5489 if (aggregate_value_p (DECL_RESULT (subr)))
5490 {
5491 /* Returning something that won't go in a register. */
5492 register rtx value_address = 0;
5493
5494 #ifdef PCC_STATIC_STRUCT_RETURN
5495 if (current_function_returns_pcc_struct)
5496 {
5497 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5498 value_address = assemble_static_space (size);
5499 }
5500 else
5501 #endif
5502 {
5503 /* Expect to be passed the address of a place to store the value.
5504 If it is passed as an argument, assign_parms will take care of
5505 it. */
5506 if (struct_value_incoming_rtx)
5507 {
5508 value_address = gen_reg_rtx (Pmode);
5509 emit_move_insn (value_address, struct_value_incoming_rtx);
5510 }
5511 }
5512 if (value_address)
5513 {
5514 DECL_RTL (DECL_RESULT (subr))
5515 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
5516 MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)))
5517 = AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5518 }
5519 }
5520 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5521 /* If return mode is void, this decl rtl should not be used. */
5522 DECL_RTL (DECL_RESULT (subr)) = 0;
5523 else if (parms_have_cleanups)
5524 {
5525 /* If function will end with cleanup code for parms,
5526 compute the return values into a pseudo reg,
5527 which we will copy into the true return register
5528 after the cleanups are done. */
5529
5530 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5531
5532 #ifdef PROMOTE_FUNCTION_RETURN
5533 tree type = TREE_TYPE (DECL_RESULT (subr));
5534 int unsignedp = TREE_UNSIGNED (type);
5535
5536 mode = promote_mode (type, mode, &unsignedp, 1);
5537 #endif
5538
5539 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5540 }
5541 else
5542 /* Scalar, returned in a register. */
5543 {
5544 #ifdef FUNCTION_OUTGOING_VALUE
5545 DECL_RTL (DECL_RESULT (subr))
5546 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5547 #else
5548 DECL_RTL (DECL_RESULT (subr))
5549 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5550 #endif
5551
5552 /* Mark this reg as the function's return value. */
5553 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5554 {
5555 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5556 /* Needed because we may need to move this to memory
5557 in case it's a named return value whose address is taken. */
5558 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5559 }
5560 }
5561
5562 /* Initialize rtx for parameters and local variables.
5563 In some cases this requires emitting insns. */
5564
5565 assign_parms (subr, 0);
5566
5567 /* Copy the static chain now if it wasn't a register. The delay is to
5568 avoid conflicts with the parameter passing registers. */
5569
5570 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
5571 if (GET_CODE (static_chain_incoming_rtx) != REG)
5572 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5573
5574 /* The following was moved from init_function_start.
5575 The move is supposed to make sdb output more accurate. */
5576 /* Indicate the beginning of the function body,
5577 as opposed to parm setup. */
5578 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5579
5580 /* If doing stupid allocation, mark parms as born here. */
5581
5582 if (GET_CODE (get_last_insn ()) != NOTE)
5583 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5584 parm_birth_insn = get_last_insn ();
5585
5586 if (obey_regdecls)
5587 {
5588 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5589 use_variable (regno_reg_rtx[i]);
5590
5591 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5592 use_variable (current_function_internal_arg_pointer);
5593 }
5594
5595 context_display = 0;
5596 if (current_function_needs_context)
5597 {
5598 /* Fetch static chain values for containing functions. */
5599 tem = decl_function_context (current_function_decl);
5600 /* If not doing stupid register allocation copy the static chain
5601 pointer into a pseudo. If we have small register classes, copy
5602 the value from memory if static_chain_incoming_rtx is a REG. If
5603 we do stupid register allocation, we use the stack address
5604 generated above. */
5605 if (tem && ! obey_regdecls)
5606 {
5607 /* If the static chain originally came in a register, put it back
5608 there, then move it out in the next insn. The reason for
5609 this peculiar code is to satisfy function integration. */
5610 if (SMALL_REGISTER_CLASSES
5611 && GET_CODE (static_chain_incoming_rtx) == REG)
5612 emit_move_insn (static_chain_incoming_rtx, last_ptr);
5613 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5614 }
5615
5616 while (tem)
5617 {
5618 tree rtlexp = make_node (RTL_EXPR);
5619
5620 RTL_EXPR_RTL (rtlexp) = last_ptr;
5621 context_display = tree_cons (tem, rtlexp, context_display);
5622 tem = decl_function_context (tem);
5623 if (tem == 0)
5624 break;
5625 /* Chain thru stack frames, assuming pointer to next lexical frame
5626 is found at the place we always store it. */
5627 #ifdef FRAME_GROWS_DOWNWARD
5628 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
5629 #endif
5630 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
5631 memory_address (Pmode, last_ptr)));
5632
5633 /* If we are not optimizing, ensure that we know that this
5634 piece of context is live over the entire function. */
5635 if (! optimize)
5636 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
5637 save_expr_regs);
5638 }
5639 }
5640
5641 /* After the display initializations is where the tail-recursion label
5642 should go, if we end up needing one. Ensure we have a NOTE here
5643 since some things (like trampolines) get placed before this. */
5644 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
5645
5646 /* Evaluate now the sizes of any types declared among the arguments. */
5647 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5648 {
5649 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
5650 EXPAND_MEMORY_USE_BAD);
5651 /* Flush the queue in case this parameter declaration has
5652 side-effects. */
5653 emit_queue ();
5654 }
5655
5656 /* Make sure there is a line number after the function entry setup code. */
5657 force_next_line_note ();
5658 }
5659 \f
5660 /* Generate RTL for the end of the current function.
5661 FILENAME and LINE are the current position in the source file.
5662
5663 It is up to language-specific callers to do cleanups for parameters--
5664 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5665
5666 void
5667 expand_function_end (filename, line, end_bindings)
5668 char *filename;
5669 int line;
5670 int end_bindings;
5671 {
5672 register int i;
5673 tree link;
5674
5675 #ifdef TRAMPOLINE_TEMPLATE
5676 static rtx initial_trampoline;
5677 #endif
5678
5679 #ifdef NON_SAVING_SETJMP
5680 /* Don't put any variables in registers if we call setjmp
5681 on a machine that fails to restore the registers. */
5682 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
5683 {
5684 if (DECL_INITIAL (current_function_decl) != error_mark_node)
5685 setjmp_protect (DECL_INITIAL (current_function_decl));
5686
5687 setjmp_protect_args ();
5688 }
5689 #endif
5690
5691 /* Save the argument pointer if a save area was made for it. */
5692 if (arg_pointer_save_area)
5693 {
5694 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
5695 emit_insn_before (x, tail_recursion_reentry);
5696 }
5697
5698 /* Initialize any trampolines required by this function. */
5699 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5700 {
5701 tree function = TREE_PURPOSE (link);
5702 rtx context = lookup_static_chain (function);
5703 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
5704 #ifdef TRAMPOLINE_TEMPLATE
5705 rtx blktramp;
5706 #endif
5707 rtx seq;
5708
5709 #ifdef TRAMPOLINE_TEMPLATE
5710 /* First make sure this compilation has a template for
5711 initializing trampolines. */
5712 if (initial_trampoline == 0)
5713 {
5714 end_temporary_allocation ();
5715 initial_trampoline
5716 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
5717 resume_temporary_allocation ();
5718 }
5719 #endif
5720
5721 /* Generate insns to initialize the trampoline. */
5722 start_sequence ();
5723 tramp = round_trampoline_addr (XEXP (tramp, 0));
5724 #ifdef TRAMPOLINE_TEMPLATE
5725 blktramp = change_address (initial_trampoline, BLKmode, tramp);
5726 emit_block_move (blktramp, initial_trampoline,
5727 GEN_INT (TRAMPOLINE_SIZE),
5728 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5729 #endif
5730 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
5731 seq = get_insns ();
5732 end_sequence ();
5733
5734 /* Put those insns at entry to the containing function (this one). */
5735 emit_insns_before (seq, tail_recursion_reentry);
5736 }
5737
5738 /* If we are doing stack checking and this function makes calls,
5739 do a stack probe at the start of the function to ensure we have enough
5740 space for another stack frame. */
5741 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
5742 {
5743 rtx insn, seq;
5744
5745 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5746 if (GET_CODE (insn) == CALL_INSN)
5747 {
5748 start_sequence ();
5749 probe_stack_range (STACK_CHECK_PROTECT,
5750 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
5751 seq = get_insns ();
5752 end_sequence ();
5753 emit_insns_before (seq, tail_recursion_reentry);
5754 break;
5755 }
5756 }
5757
5758 /* Warn about unused parms if extra warnings were specified. */
5759 if (warn_unused && extra_warnings)
5760 {
5761 tree decl;
5762
5763 for (decl = DECL_ARGUMENTS (current_function_decl);
5764 decl; decl = TREE_CHAIN (decl))
5765 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
5766 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
5767 warning_with_decl (decl, "unused parameter `%s'");
5768 }
5769
5770 /* Delete handlers for nonlocal gotos if nothing uses them. */
5771 if (nonlocal_goto_handler_slot != 0 && !current_function_has_nonlocal_label)
5772 delete_handlers ();
5773
5774 /* End any sequences that failed to be closed due to syntax errors. */
5775 while (in_sequence_p ())
5776 end_sequence ();
5777
5778 /* Outside function body, can't compute type's actual size
5779 until next function's body starts. */
5780 immediate_size_expand--;
5781
5782 /* If doing stupid register allocation,
5783 mark register parms as dying here. */
5784
5785 if (obey_regdecls)
5786 {
5787 rtx tem;
5788 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5789 use_variable (regno_reg_rtx[i]);
5790
5791 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
5792
5793 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
5794 {
5795 use_variable (XEXP (tem, 0));
5796 use_variable_after (XEXP (tem, 0), parm_birth_insn);
5797 }
5798
5799 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5800 use_variable (current_function_internal_arg_pointer);
5801 }
5802
5803 clear_pending_stack_adjust ();
5804 do_pending_stack_adjust ();
5805
5806 /* Mark the end of the function body.
5807 If control reaches this insn, the function can drop through
5808 without returning a value. */
5809 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
5810
5811 /* Must mark the last line number note in the function, so that the test
5812 coverage code can avoid counting the last line twice. This just tells
5813 the code to ignore the immediately following line note, since there
5814 already exists a copy of this note somewhere above. This line number
5815 note is still needed for debugging though, so we can't delete it. */
5816 if (flag_test_coverage)
5817 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
5818
5819 /* Output a linenumber for the end of the function.
5820 SDB depends on this. */
5821 emit_line_note_force (filename, line);
5822
5823 /* Output the label for the actual return from the function,
5824 if one is expected. This happens either because a function epilogue
5825 is used instead of a return instruction, or because a return was done
5826 with a goto in order to run local cleanups, or because of pcc-style
5827 structure returning. */
5828
5829 if (return_label)
5830 emit_label (return_label);
5831
5832 /* C++ uses this. */
5833 if (end_bindings)
5834 expand_end_bindings (0, 0, 0);
5835
5836 /* Now handle any leftover exception regions that may have been
5837 created for the parameters. */
5838 {
5839 rtx last = get_last_insn ();
5840 rtx label;
5841
5842 expand_leftover_cleanups ();
5843
5844 /* If the above emitted any code, may sure we jump around it. */
5845 if (last != get_last_insn ())
5846 {
5847 label = gen_label_rtx ();
5848 last = emit_jump_insn_after (gen_jump (label), last);
5849 last = emit_barrier_after (last);
5850 emit_label (label);
5851 }
5852 }
5853
5854 /* If we had calls to alloca, and this machine needs
5855 an accurate stack pointer to exit the function,
5856 insert some code to save and restore the stack pointer. */
5857 #ifdef EXIT_IGNORE_STACK
5858 if (! EXIT_IGNORE_STACK)
5859 #endif
5860 if (current_function_calls_alloca)
5861 {
5862 rtx tem = 0;
5863
5864 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
5865 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
5866 }
5867
5868 /* If scalar return value was computed in a pseudo-reg,
5869 copy that to the hard return register. */
5870 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
5871 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
5872 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
5873 >= FIRST_PSEUDO_REGISTER))
5874 {
5875 rtx real_decl_result;
5876
5877 #ifdef FUNCTION_OUTGOING_VALUE
5878 real_decl_result
5879 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5880 current_function_decl);
5881 #else
5882 real_decl_result
5883 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
5884 current_function_decl);
5885 #endif
5886 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
5887 /* If this is a BLKmode structure being returned in registers, then use
5888 the mode computed in expand_return. */
5889 if (GET_MODE (real_decl_result) == BLKmode)
5890 PUT_MODE (real_decl_result,
5891 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
5892 emit_move_insn (real_decl_result,
5893 DECL_RTL (DECL_RESULT (current_function_decl)));
5894 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
5895
5896 /* The delay slot scheduler assumes that current_function_return_rtx
5897 holds the hard register containing the return value, not a temporary
5898 pseudo. */
5899 current_function_return_rtx = real_decl_result;
5900 }
5901
5902 /* If returning a structure, arrange to return the address of the value
5903 in a place where debuggers expect to find it.
5904
5905 If returning a structure PCC style,
5906 the caller also depends on this value.
5907 And current_function_returns_pcc_struct is not necessarily set. */
5908 if (current_function_returns_struct
5909 || current_function_returns_pcc_struct)
5910 {
5911 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5912 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
5913 #ifdef FUNCTION_OUTGOING_VALUE
5914 rtx outgoing
5915 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
5916 current_function_decl);
5917 #else
5918 rtx outgoing
5919 = FUNCTION_VALUE (build_pointer_type (type),
5920 current_function_decl);
5921 #endif
5922
5923 /* Mark this as a function return value so integrate will delete the
5924 assignment and USE below when inlining this function. */
5925 REG_FUNCTION_VALUE_P (outgoing) = 1;
5926
5927 emit_move_insn (outgoing, value_address);
5928 use_variable (outgoing);
5929 }
5930
5931 /* Output a return insn if we are using one.
5932 Otherwise, let the rtl chain end here, to drop through
5933 into the epilogue. */
5934
5935 #ifdef HAVE_return
5936 if (HAVE_return)
5937 {
5938 emit_jump_insn (gen_return ());
5939 emit_barrier ();
5940 }
5941 #endif
5942
5943 /* Fix up any gotos that jumped out to the outermost
5944 binding level of the function.
5945 Must follow emitting RETURN_LABEL. */
5946
5947 /* If you have any cleanups to do at this point,
5948 and they need to create temporary variables,
5949 then you will lose. */
5950 expand_fixups (get_insns ());
5951 }
5952 \f
5953 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
5954
5955 static int *prologue;
5956 static int *epilogue;
5957
5958 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
5959 or a single insn). */
5960
5961 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5962 static int *
5963 record_insns (insns)
5964 rtx insns;
5965 {
5966 int *vec;
5967
5968 if (GET_CODE (insns) == SEQUENCE)
5969 {
5970 int len = XVECLEN (insns, 0);
5971 vec = (int *) oballoc ((len + 1) * sizeof (int));
5972 vec[len] = 0;
5973 while (--len >= 0)
5974 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
5975 }
5976 else
5977 {
5978 vec = (int *) oballoc (2 * sizeof (int));
5979 vec[0] = INSN_UID (insns);
5980 vec[1] = 0;
5981 }
5982 return vec;
5983 }
5984
5985 /* Determine how many INSN_UIDs in VEC are part of INSN. */
5986
5987 static int
5988 contains (insn, vec)
5989 rtx insn;
5990 int *vec;
5991 {
5992 register int i, j;
5993
5994 if (GET_CODE (insn) == INSN
5995 && GET_CODE (PATTERN (insn)) == SEQUENCE)
5996 {
5997 int count = 0;
5998 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
5999 for (j = 0; vec[j]; j++)
6000 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6001 count++;
6002 return count;
6003 }
6004 else
6005 {
6006 for (j = 0; vec[j]; j++)
6007 if (INSN_UID (insn) == vec[j])
6008 return 1;
6009 }
6010 return 0;
6011 }
6012 #endif /* HAVE_prologue || HAVE_epilogue */
6013
6014 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6015 this into place with notes indicating where the prologue ends and where
6016 the epilogue begins. Update the basic block information when possible. */
6017
6018 void
6019 thread_prologue_and_epilogue_insns (f)
6020 rtx f;
6021 {
6022 #ifdef HAVE_prologue
6023 if (HAVE_prologue)
6024 {
6025 rtx head, seq;
6026
6027 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
6028 prologue insns and a NOTE_INSN_PROLOGUE_END. */
6029 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
6030 seq = gen_prologue ();
6031 head = emit_insn_after (seq, f);
6032
6033 /* Include the new prologue insns in the first block. Ignore them
6034 if they form a basic block unto themselves. */
6035 if (basic_block_head && n_basic_blocks
6036 && GET_CODE (basic_block_head[0]) != CODE_LABEL)
6037 basic_block_head[0] = NEXT_INSN (f);
6038
6039 /* Retain a map of the prologue insns. */
6040 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
6041 }
6042 else
6043 #endif
6044 prologue = 0;
6045
6046 #ifdef HAVE_epilogue
6047 if (HAVE_epilogue)
6048 {
6049 rtx insn = get_last_insn ();
6050 rtx prev = prev_nonnote_insn (insn);
6051
6052 /* If we end with a BARRIER, we don't need an epilogue. */
6053 if (! (prev && GET_CODE (prev) == BARRIER))
6054 {
6055 rtx tail, seq, tem;
6056 rtx first_use = 0;
6057 rtx last_use = 0;
6058
6059 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6060 epilogue insns, the USE insns at the end of a function,
6061 the jump insn that returns, and then a BARRIER. */
6062
6063 /* Move the USE insns at the end of a function onto a list. */
6064 while (prev
6065 && GET_CODE (prev) == INSN
6066 && GET_CODE (PATTERN (prev)) == USE)
6067 {
6068 tem = prev;
6069 prev = prev_nonnote_insn (prev);
6070
6071 NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem);
6072 PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem);
6073 if (first_use)
6074 {
6075 NEXT_INSN (tem) = first_use;
6076 PREV_INSN (first_use) = tem;
6077 }
6078 first_use = tem;
6079 if (!last_use)
6080 last_use = tem;
6081 }
6082
6083 emit_barrier_after (insn);
6084
6085 seq = gen_epilogue ();
6086 tail = emit_jump_insn_after (seq, insn);
6087
6088 /* Insert the USE insns immediately before the return insn, which
6089 must be the first instruction before the final barrier. */
6090 if (first_use)
6091 {
6092 tem = prev_nonnote_insn (get_last_insn ());
6093 NEXT_INSN (PREV_INSN (tem)) = first_use;
6094 PREV_INSN (first_use) = PREV_INSN (tem);
6095 PREV_INSN (tem) = last_use;
6096 NEXT_INSN (last_use) = tem;
6097 }
6098
6099 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
6100
6101 /* Include the new epilogue insns in the last block. Ignore
6102 them if they form a basic block unto themselves. */
6103 if (basic_block_end && n_basic_blocks
6104 && GET_CODE (basic_block_end[n_basic_blocks - 1]) != JUMP_INSN)
6105 basic_block_end[n_basic_blocks - 1] = tail;
6106
6107 /* Retain a map of the epilogue insns. */
6108 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6109 return;
6110 }
6111 }
6112 #endif
6113 epilogue = 0;
6114 }
6115
6116 /* Reposition the prologue-end and epilogue-begin notes after instruction
6117 scheduling and delayed branch scheduling. */
6118
6119 void
6120 reposition_prologue_and_epilogue_notes (f)
6121 rtx f;
6122 {
6123 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6124 /* Reposition the prologue and epilogue notes. */
6125 if (n_basic_blocks)
6126 {
6127 rtx next, prev;
6128 int len;
6129
6130 if (prologue)
6131 {
6132 register rtx insn, note = 0;
6133
6134 /* Scan from the beginning until we reach the last prologue insn.
6135 We apparently can't depend on basic_block_{head,end} after
6136 reorg has run. */
6137 for (len = 0; prologue[len]; len++)
6138 ;
6139 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6140 {
6141 if (GET_CODE (insn) == NOTE)
6142 {
6143 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6144 note = insn;
6145 }
6146 else if ((len -= contains (insn, prologue)) == 0)
6147 {
6148 /* Find the prologue-end note if we haven't already, and
6149 move it to just after the last prologue insn. */
6150 if (note == 0)
6151 {
6152 for (note = insn; (note = NEXT_INSN (note));)
6153 if (GET_CODE (note) == NOTE
6154 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6155 break;
6156 }
6157 next = NEXT_INSN (note);
6158 prev = PREV_INSN (note);
6159 if (prev)
6160 NEXT_INSN (prev) = next;
6161 if (next)
6162 PREV_INSN (next) = prev;
6163 add_insn_after (note, insn);
6164 }
6165 }
6166 }
6167
6168 if (epilogue)
6169 {
6170 register rtx insn, note = 0;
6171
6172 /* Scan from the end until we reach the first epilogue insn.
6173 We apparently can't depend on basic_block_{head,end} after
6174 reorg has run. */
6175 for (len = 0; epilogue[len]; len++)
6176 ;
6177 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6178 {
6179 if (GET_CODE (insn) == NOTE)
6180 {
6181 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6182 note = insn;
6183 }
6184 else if ((len -= contains (insn, epilogue)) == 0)
6185 {
6186 /* Find the epilogue-begin note if we haven't already, and
6187 move it to just before the first epilogue insn. */
6188 if (note == 0)
6189 {
6190 for (note = insn; (note = PREV_INSN (note));)
6191 if (GET_CODE (note) == NOTE
6192 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6193 break;
6194 }
6195 next = NEXT_INSN (note);
6196 prev = PREV_INSN (note);
6197 if (prev)
6198 NEXT_INSN (prev) = next;
6199 if (next)
6200 PREV_INSN (next) = prev;
6201 add_insn_after (note, PREV_INSN (insn));
6202 }
6203 }
6204 }
6205 }
6206 #endif /* HAVE_prologue or HAVE_epilogue */
6207 }