8a67457495b80175f147ffb6c490c58d640bb57e
[gcc.git] / gcc / function.c
1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 88, 89, 91-98, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 /* This file handles the generation of rtl code from tree structure
23 at the level of the function as a whole.
24 It creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 `expand_function_start' is called at the beginning of a function,
28 before the function body is parsed, and `expand_function_end' is
29 called after parsing the body.
30
31 Call `assign_stack_local' to allocate a stack slot for a local variable.
32 This is usually done during the RTL generation for the function body,
33 but it can also be done in the reload pass when a pseudo-register does
34 not get a hard register.
35
36 Call `put_var_into_stack' when you learn, belatedly, that a variable
37 previously given a pseudo-register must in fact go in the stack.
38 This function changes the DECL_RTL to be a stack slot instead of a reg
39 then scans all the RTL instructions so far generated to correct them. */
40
41 #include "config.h"
42 #include "system.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "flags.h"
46 #include "except.h"
47 #include "function.h"
48 #include "insn-flags.h"
49 #include "expr.h"
50 #include "insn-codes.h"
51 #include "regs.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
54 #include "recog.h"
55 #include "output.h"
56 #include "basic-block.h"
57 #include "obstack.h"
58 #include "toplev.h"
59
60 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
61 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
62 #endif
63
64 #ifndef TRAMPOLINE_ALIGNMENT
65 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
66 #endif
67
68 /* Some systems use __main in a way incompatible with its use in gcc, in these
69 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
70 give the same symbol without quotes for an alternative entry point. You
71 must define both, or neither. */
72 #ifndef NAME__MAIN
73 #define NAME__MAIN "__main"
74 #define SYMBOL__MAIN __main
75 #endif
76
77 /* Round a value to the lowest integer less than it that is a multiple of
78 the required alignment. Avoid using division in case the value is
79 negative. Assume the alignment is a power of two. */
80 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
81
82 /* Similar, but round to the next highest integer that meets the
83 alignment. */
84 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
85
86 /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
87 during rtl generation. If they are different register numbers, this is
88 always true. It may also be true if
89 FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
90 generation. See fix_lexical_addr for details. */
91
92 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
93 #define NEED_SEPARATE_AP
94 #endif
95
96 /* Number of bytes of args popped by function being compiled on its return.
97 Zero if no bytes are to be popped.
98 May affect compilation of return insn or of function epilogue. */
99
100 int current_function_pops_args;
101
102 /* Nonzero if function being compiled needs to be given an address
103 where the value should be stored. */
104
105 int current_function_returns_struct;
106
107 /* Nonzero if function being compiled needs to
108 return the address of where it has put a structure value. */
109
110 int current_function_returns_pcc_struct;
111
112 /* Nonzero if function being compiled needs to be passed a static chain. */
113
114 int current_function_needs_context;
115
116 /* Nonzero if function being compiled can call setjmp. */
117
118 int current_function_calls_setjmp;
119
120 /* Nonzero if function being compiled can call longjmp. */
121
122 int current_function_calls_longjmp;
123
124 /* Nonzero if function being compiled receives nonlocal gotos
125 from nested functions. */
126
127 int current_function_has_nonlocal_label;
128
129 /* Nonzero if function being compiled has nonlocal gotos to parent
130 function. */
131
132 int current_function_has_nonlocal_goto;
133
134 /* Nonzero if this function has a computed goto.
135
136 It is computed during find_basic_blocks or during stupid life
137 analysis. */
138
139 int current_function_has_computed_jump;
140
141 /* Nonzero if function being compiled contains nested functions. */
142
143 int current_function_contains_functions;
144
145 /* Nonzero if function being compiled doesn't modify the stack pointer
146 (ignoring the prologue and epilogue). This is only valid after
147 life_analysis has run. */
148
149 int current_function_sp_is_unchanging;
150
151 /* Nonzero if the current function is a thunk (a lightweight function that
152 just adjusts one of its arguments and forwards to another function), so
153 we should try to cut corners where we can. */
154 int current_function_is_thunk;
155
156 /* Nonzero if function being compiled can call alloca,
157 either as a subroutine or builtin. */
158
159 int current_function_calls_alloca;
160
161 /* Nonzero if the current function returns a pointer type */
162
163 int current_function_returns_pointer;
164
165 /* If some insns can be deferred to the delay slots of the epilogue, the
166 delay list for them is recorded here. */
167
168 rtx current_function_epilogue_delay_list;
169
170 /* If function's args have a fixed size, this is that size, in bytes.
171 Otherwise, it is -1.
172 May affect compilation of return insn or of function epilogue. */
173
174 int current_function_args_size;
175
176 /* # bytes the prologue should push and pretend that the caller pushed them.
177 The prologue must do this, but only if parms can be passed in registers. */
178
179 int current_function_pretend_args_size;
180
181 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
182 defined, the needed space is pushed by the prologue. */
183
184 int current_function_outgoing_args_size;
185
186 /* This is the offset from the arg pointer to the place where the first
187 anonymous arg can be found, if there is one. */
188
189 rtx current_function_arg_offset_rtx;
190
191 /* Nonzero if current function uses varargs.h or equivalent.
192 Zero for functions that use stdarg.h. */
193
194 int current_function_varargs;
195
196 /* Nonzero if current function uses stdarg.h or equivalent.
197 Zero for functions that use varargs.h. */
198
199 int current_function_stdarg;
200
201 /* Quantities of various kinds of registers
202 used for the current function's args. */
203
204 CUMULATIVE_ARGS current_function_args_info;
205
206 /* Name of function now being compiled. */
207
208 char *current_function_name;
209
210 /* If non-zero, an RTL expression for the location at which the current
211 function returns its result. If the current function returns its
212 result in a register, current_function_return_rtx will always be
213 the hard register containing the result. */
214
215 rtx current_function_return_rtx;
216
217 /* Nonzero if the current function uses the constant pool. */
218
219 int current_function_uses_const_pool;
220
221 /* Nonzero if the current function uses pic_offset_table_rtx. */
222 int current_function_uses_pic_offset_table;
223
224 /* The arg pointer hard register, or the pseudo into which it was copied. */
225 rtx current_function_internal_arg_pointer;
226
227 /* Language-specific reason why the current function cannot be made inline. */
228 char *current_function_cannot_inline;
229
230 /* Nonzero if instrumentation calls for function entry and exit should be
231 generated. */
232 int current_function_instrument_entry_exit;
233
234 /* Nonzero if memory access checking be enabled in the current function. */
235 int current_function_check_memory_usage;
236
237 /* The FUNCTION_DECL for an inline function currently being expanded. */
238 tree inline_function_decl;
239
240 /* Number of function calls seen so far in current function. */
241
242 int function_call_count;
243
244 /* List (chain of TREE_LIST) of LABEL_DECLs for all nonlocal labels
245 (labels to which there can be nonlocal gotos from nested functions)
246 in this function. */
247
248 tree nonlocal_labels;
249
250 /* List (chain of EXPR_LIST) of stack slots that hold the current handlers
251 for nonlocal gotos. There is one for every nonlocal label in the function;
252 this list matches the one in nonlocal_labels.
253 Zero when function does not have nonlocal labels. */
254
255 rtx nonlocal_goto_handler_slots;
256
257 /* RTX for stack slot that holds the stack pointer value to restore
258 for a nonlocal goto.
259 Zero when function does not have nonlocal labels. */
260
261 rtx nonlocal_goto_stack_level;
262
263 /* Label that will go on parm cleanup code, if any.
264 Jumping to this label runs cleanup code for parameters, if
265 such code must be run. Following this code is the logical return label. */
266
267 rtx cleanup_label;
268
269 /* Label that will go on function epilogue.
270 Jumping to this label serves as a "return" instruction
271 on machines which require execution of the epilogue on all returns. */
272
273 rtx return_label;
274
275 /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs.
276 So we can mark them all live at the end of the function, if nonopt. */
277 rtx save_expr_regs;
278
279 /* List (chain of EXPR_LISTs) of all stack slots in this function.
280 Made for the sake of unshare_all_rtl. */
281 rtx stack_slot_list;
282
283 /* Chain of all RTL_EXPRs that have insns in them. */
284 tree rtl_expr_chain;
285
286 /* Label to jump back to for tail recursion, or 0 if we have
287 not yet needed one for this function. */
288 rtx tail_recursion_label;
289
290 /* Place after which to insert the tail_recursion_label if we need one. */
291 rtx tail_recursion_reentry;
292
293 /* Location at which to save the argument pointer if it will need to be
294 referenced. There are two cases where this is done: if nonlocal gotos
295 exist, or if vars stored at an offset from the argument pointer will be
296 needed by inner routines. */
297
298 rtx arg_pointer_save_area;
299
300 /* Offset to end of allocated area of stack frame.
301 If stack grows down, this is the address of the last stack slot allocated.
302 If stack grows up, this is the address for the next slot. */
303 HOST_WIDE_INT frame_offset;
304
305 /* List (chain of TREE_LISTs) of static chains for containing functions.
306 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
307 in an RTL_EXPR in the TREE_VALUE. */
308 static tree context_display;
309
310 /* List (chain of TREE_LISTs) of trampolines for nested functions.
311 The trampoline sets up the static chain and jumps to the function.
312 We supply the trampoline's address when the function's address is requested.
313
314 Each link has a FUNCTION_DECL in the TREE_PURPOSE and a reg rtx
315 in an RTL_EXPR in the TREE_VALUE. */
316 static tree trampoline_list;
317
318 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
319 static rtx parm_birth_insn;
320
321 #if 0
322 /* Nonzero if a stack slot has been generated whose address is not
323 actually valid. It means that the generated rtl must all be scanned
324 to detect and correct the invalid addresses where they occur. */
325 static int invalid_stack_slot;
326 #endif
327
328 /* Last insn of those whose job was to put parms into their nominal homes. */
329 static rtx last_parm_insn;
330
331 /* 1 + last pseudo register number possibly used for loading a copy
332 of a parameter of this function. */
333 int max_parm_reg;
334
335 /* Vector indexed by REGNO, containing location on stack in which
336 to put the parm which is nominally in pseudo register REGNO,
337 if we discover that that parm must go in the stack. The highest
338 element in this vector is one less than MAX_PARM_REG, above. */
339 rtx *parm_reg_stack_loc;
340
341 /* Nonzero once virtual register instantiation has been done.
342 assign_stack_local uses frame_pointer_rtx when this is nonzero. */
343 static int virtuals_instantiated;
344
345 /* These variables hold pointers to functions to
346 save and restore machine-specific data,
347 in push_function_context and pop_function_context. */
348 void (*save_machine_status) PROTO((struct function *));
349 void (*restore_machine_status) PROTO((struct function *));
350
351 /* Nonzero if we need to distinguish between the return value of this function
352 and the return value of a function called by this function. This helps
353 integrate.c */
354
355 extern int rtx_equal_function_value_matters;
356 extern tree sequence_rtl_expr;
357 \f
358 /* In order to evaluate some expressions, such as function calls returning
359 structures in memory, we need to temporarily allocate stack locations.
360 We record each allocated temporary in the following structure.
361
362 Associated with each temporary slot is a nesting level. When we pop up
363 one level, all temporaries associated with the previous level are freed.
364 Normally, all temporaries are freed after the execution of the statement
365 in which they were created. However, if we are inside a ({...}) grouping,
366 the result may be in a temporary and hence must be preserved. If the
367 result could be in a temporary, we preserve it if we can determine which
368 one it is in. If we cannot determine which temporary may contain the
369 result, all temporaries are preserved. A temporary is preserved by
370 pretending it was allocated at the previous nesting level.
371
372 Automatic variables are also assigned temporary slots, at the nesting
373 level where they are defined. They are marked a "kept" so that
374 free_temp_slots will not free them. */
375
376 struct temp_slot
377 {
378 /* Points to next temporary slot. */
379 struct temp_slot *next;
380 /* The rtx to used to reference the slot. */
381 rtx slot;
382 /* The rtx used to represent the address if not the address of the
383 slot above. May be an EXPR_LIST if multiple addresses exist. */
384 rtx address;
385 /* The size, in units, of the slot. */
386 HOST_WIDE_INT size;
387 /* The value of `sequence_rtl_expr' when this temporary is allocated. */
388 tree rtl_expr;
389 /* Non-zero if this temporary is currently in use. */
390 char in_use;
391 /* Non-zero if this temporary has its address taken. */
392 char addr_taken;
393 /* Nesting level at which this slot is being used. */
394 int level;
395 /* Non-zero if this should survive a call to free_temp_slots. */
396 int keep;
397 /* The offset of the slot from the frame_pointer, including extra space
398 for alignment. This info is for combine_temp_slots. */
399 HOST_WIDE_INT base_offset;
400 /* The size of the slot, including extra space for alignment. This
401 info is for combine_temp_slots. */
402 HOST_WIDE_INT full_size;
403 };
404
405 /* List of all temporaries allocated, both available and in use. */
406
407 struct temp_slot *temp_slots;
408
409 /* Current nesting level for temporaries. */
410
411 int temp_slot_level;
412
413 /* Current nesting level for variables in a block. */
414
415 int var_temp_slot_level;
416
417 /* When temporaries are created by TARGET_EXPRs, they are created at
418 this level of temp_slot_level, so that they can remain allocated
419 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
420 of TARGET_EXPRs. */
421 int target_temp_slot_level;
422 \f
423 /* This structure is used to record MEMs or pseudos used to replace VAR, any
424 SUBREGs of VAR, and any MEMs containing VAR as an address. We need to
425 maintain this list in case two operands of an insn were required to match;
426 in that case we must ensure we use the same replacement. */
427
428 struct fixup_replacement
429 {
430 rtx old;
431 rtx new;
432 struct fixup_replacement *next;
433 };
434
435 /* Forward declarations. */
436
437 static rtx assign_outer_stack_local PROTO ((enum machine_mode, HOST_WIDE_INT,
438 int, struct function *));
439 static struct temp_slot *find_temp_slot_from_address PROTO((rtx));
440 static void put_reg_into_stack PROTO((struct function *, rtx, tree,
441 enum machine_mode, enum machine_mode,
442 int, int, int));
443 static void fixup_var_refs PROTO((rtx, enum machine_mode, int));
444 static struct fixup_replacement
445 *find_fixup_replacement PROTO((struct fixup_replacement **, rtx));
446 static void fixup_var_refs_insns PROTO((rtx, enum machine_mode, int,
447 rtx, int));
448 static void fixup_var_refs_1 PROTO((rtx, enum machine_mode, rtx *, rtx,
449 struct fixup_replacement **));
450 static rtx fixup_memory_subreg PROTO((rtx, rtx, int));
451 static rtx walk_fixup_memory_subreg PROTO((rtx, rtx, int));
452 static rtx fixup_stack_1 PROTO((rtx, rtx));
453 static void optimize_bit_field PROTO((rtx, rtx, rtx *));
454 static void instantiate_decls PROTO((tree, int));
455 static void instantiate_decls_1 PROTO((tree, int));
456 static void instantiate_decl PROTO((rtx, int, int));
457 static int instantiate_virtual_regs_1 PROTO((rtx *, rtx, int));
458 static void delete_handlers PROTO((void));
459 static void pad_to_arg_alignment PROTO((struct args_size *, int));
460 #ifndef ARGS_GROW_DOWNWARD
461 static void pad_below PROTO((struct args_size *, enum machine_mode,
462 tree));
463 #endif
464 #ifdef ARGS_GROW_DOWNWARD
465 static tree round_down PROTO((tree, int));
466 #endif
467 static rtx round_trampoline_addr PROTO((rtx));
468 static tree blocks_nreverse PROTO((tree));
469 static int all_blocks PROTO((tree, tree *));
470 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
471 static int *record_insns PROTO((rtx));
472 static int contains PROTO((rtx, int *));
473 #endif /* HAVE_prologue || HAVE_epilogue */
474 static void put_addressof_into_stack PROTO((rtx));
475 static void purge_addressof_1 PROTO((rtx *, rtx, int, int));
476 \f
477 /* Pointer to chain of `struct function' for containing functions. */
478 struct function *outer_function_chain;
479
480 /* Given a function decl for a containing function,
481 return the `struct function' for it. */
482
483 struct function *
484 find_function_data (decl)
485 tree decl;
486 {
487 struct function *p;
488
489 for (p = outer_function_chain; p; p = p->next)
490 if (p->decl == decl)
491 return p;
492
493 abort ();
494 }
495
496 /* Save the current context for compilation of a nested function.
497 This is called from language-specific code.
498 The caller is responsible for saving any language-specific status,
499 since this function knows only about language-independent variables. */
500
501 void
502 push_function_context_to (context)
503 tree context;
504 {
505 struct function *p = (struct function *) xmalloc (sizeof (struct function));
506
507 p->next = outer_function_chain;
508 outer_function_chain = p;
509
510 p->name = current_function_name;
511 p->decl = current_function_decl;
512 p->pops_args = current_function_pops_args;
513 p->returns_struct = current_function_returns_struct;
514 p->returns_pcc_struct = current_function_returns_pcc_struct;
515 p->returns_pointer = current_function_returns_pointer;
516 p->needs_context = current_function_needs_context;
517 p->calls_setjmp = current_function_calls_setjmp;
518 p->calls_longjmp = current_function_calls_longjmp;
519 p->calls_alloca = current_function_calls_alloca;
520 p->has_nonlocal_label = current_function_has_nonlocal_label;
521 p->has_nonlocal_goto = current_function_has_nonlocal_goto;
522 p->contains_functions = current_function_contains_functions;
523 p->is_thunk = current_function_is_thunk;
524 p->args_size = current_function_args_size;
525 p->pretend_args_size = current_function_pretend_args_size;
526 p->arg_offset_rtx = current_function_arg_offset_rtx;
527 p->varargs = current_function_varargs;
528 p->stdarg = current_function_stdarg;
529 p->uses_const_pool = current_function_uses_const_pool;
530 p->uses_pic_offset_table = current_function_uses_pic_offset_table;
531 p->internal_arg_pointer = current_function_internal_arg_pointer;
532 p->cannot_inline = current_function_cannot_inline;
533 p->max_parm_reg = max_parm_reg;
534 p->parm_reg_stack_loc = parm_reg_stack_loc;
535 p->outgoing_args_size = current_function_outgoing_args_size;
536 p->return_rtx = current_function_return_rtx;
537 p->nonlocal_goto_handler_slots = nonlocal_goto_handler_slots;
538 p->nonlocal_goto_stack_level = nonlocal_goto_stack_level;
539 p->nonlocal_labels = nonlocal_labels;
540 p->cleanup_label = cleanup_label;
541 p->return_label = return_label;
542 p->save_expr_regs = save_expr_regs;
543 p->stack_slot_list = stack_slot_list;
544 p->parm_birth_insn = parm_birth_insn;
545 p->frame_offset = frame_offset;
546 p->tail_recursion_label = tail_recursion_label;
547 p->tail_recursion_reentry = tail_recursion_reentry;
548 p->arg_pointer_save_area = arg_pointer_save_area;
549 p->rtl_expr_chain = rtl_expr_chain;
550 p->last_parm_insn = last_parm_insn;
551 p->context_display = context_display;
552 p->trampoline_list = trampoline_list;
553 p->function_call_count = function_call_count;
554 p->temp_slots = temp_slots;
555 p->temp_slot_level = temp_slot_level;
556 p->target_temp_slot_level = target_temp_slot_level;
557 p->var_temp_slot_level = var_temp_slot_level;
558 p->fixup_var_refs_queue = 0;
559 p->epilogue_delay_list = current_function_epilogue_delay_list;
560 p->args_info = current_function_args_info;
561 p->check_memory_usage = current_function_check_memory_usage;
562 p->instrument_entry_exit = current_function_instrument_entry_exit;
563
564 save_tree_status (p, context);
565 save_storage_status (p);
566 save_emit_status (p);
567 save_expr_status (p);
568 save_stmt_status (p);
569 save_varasm_status (p, context);
570 if (save_machine_status)
571 (*save_machine_status) (p);
572 }
573
574 void
575 push_function_context ()
576 {
577 push_function_context_to (current_function_decl);
578 }
579
580 /* Restore the last saved context, at the end of a nested function.
581 This function is called from language-specific code. */
582
583 void
584 pop_function_context_from (context)
585 tree context;
586 {
587 struct function *p = outer_function_chain;
588 struct var_refs_queue *queue;
589
590 outer_function_chain = p->next;
591
592 current_function_contains_functions
593 = p->contains_functions || p->inline_obstacks
594 || context == current_function_decl;
595 current_function_name = p->name;
596 current_function_decl = p->decl;
597 current_function_pops_args = p->pops_args;
598 current_function_returns_struct = p->returns_struct;
599 current_function_returns_pcc_struct = p->returns_pcc_struct;
600 current_function_returns_pointer = p->returns_pointer;
601 current_function_needs_context = p->needs_context;
602 current_function_calls_setjmp = p->calls_setjmp;
603 current_function_calls_longjmp = p->calls_longjmp;
604 current_function_calls_alloca = p->calls_alloca;
605 current_function_has_nonlocal_label = p->has_nonlocal_label;
606 current_function_has_nonlocal_goto = p->has_nonlocal_goto;
607 current_function_is_thunk = p->is_thunk;
608 current_function_args_size = p->args_size;
609 current_function_pretend_args_size = p->pretend_args_size;
610 current_function_arg_offset_rtx = p->arg_offset_rtx;
611 current_function_varargs = p->varargs;
612 current_function_stdarg = p->stdarg;
613 current_function_uses_const_pool = p->uses_const_pool;
614 current_function_uses_pic_offset_table = p->uses_pic_offset_table;
615 current_function_internal_arg_pointer = p->internal_arg_pointer;
616 current_function_cannot_inline = p->cannot_inline;
617 max_parm_reg = p->max_parm_reg;
618 parm_reg_stack_loc = p->parm_reg_stack_loc;
619 current_function_outgoing_args_size = p->outgoing_args_size;
620 current_function_return_rtx = p->return_rtx;
621 nonlocal_goto_handler_slots = p->nonlocal_goto_handler_slots;
622 nonlocal_goto_stack_level = p->nonlocal_goto_stack_level;
623 nonlocal_labels = p->nonlocal_labels;
624 cleanup_label = p->cleanup_label;
625 return_label = p->return_label;
626 save_expr_regs = p->save_expr_regs;
627 stack_slot_list = p->stack_slot_list;
628 parm_birth_insn = p->parm_birth_insn;
629 frame_offset = p->frame_offset;
630 tail_recursion_label = p->tail_recursion_label;
631 tail_recursion_reentry = p->tail_recursion_reentry;
632 arg_pointer_save_area = p->arg_pointer_save_area;
633 rtl_expr_chain = p->rtl_expr_chain;
634 last_parm_insn = p->last_parm_insn;
635 context_display = p->context_display;
636 trampoline_list = p->trampoline_list;
637 function_call_count = p->function_call_count;
638 temp_slots = p->temp_slots;
639 temp_slot_level = p->temp_slot_level;
640 target_temp_slot_level = p->target_temp_slot_level;
641 var_temp_slot_level = p->var_temp_slot_level;
642 current_function_epilogue_delay_list = p->epilogue_delay_list;
643 reg_renumber = 0;
644 current_function_args_info = p->args_info;
645 current_function_check_memory_usage = p->check_memory_usage;
646 current_function_instrument_entry_exit = p->instrument_entry_exit;
647
648 restore_tree_status (p, context);
649 restore_storage_status (p);
650 restore_expr_status (p);
651 restore_emit_status (p);
652 restore_stmt_status (p);
653 restore_varasm_status (p);
654
655 if (restore_machine_status)
656 (*restore_machine_status) (p);
657
658 /* Finish doing put_var_into_stack for any of our variables
659 which became addressable during the nested function. */
660 for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
661 fixup_var_refs (queue->modified, queue->promoted_mode, queue->unsignedp);
662
663 free (p);
664
665 /* Reset variables that have known state during rtx generation. */
666 rtx_equal_function_value_matters = 1;
667 virtuals_instantiated = 0;
668 }
669
670 void pop_function_context ()
671 {
672 pop_function_context_from (current_function_decl);
673 }
674 \f
675 /* Allocate fixed slots in the stack frame of the current function. */
676
677 /* Return size needed for stack frame based on slots so far allocated.
678 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
679 the caller may have to do that. */
680
681 HOST_WIDE_INT
682 get_frame_size ()
683 {
684 #ifdef FRAME_GROWS_DOWNWARD
685 return -frame_offset;
686 #else
687 return frame_offset;
688 #endif
689 }
690
691 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
692 with machine mode MODE.
693
694 ALIGN controls the amount of alignment for the address of the slot:
695 0 means according to MODE,
696 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
697 positive specifies alignment boundary in bits.
698
699 We do not round to stack_boundary here. */
700
701 rtx
702 assign_stack_local (mode, size, align)
703 enum machine_mode mode;
704 HOST_WIDE_INT size;
705 int align;
706 {
707 register rtx x, addr;
708 int bigend_correction = 0;
709 int alignment;
710
711 if (align == 0)
712 {
713 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
714 if (mode == BLKmode)
715 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
716 }
717 else if (align == -1)
718 {
719 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
720 size = CEIL_ROUND (size, alignment);
721 }
722 else
723 alignment = align / BITS_PER_UNIT;
724
725 /* Round frame offset to that alignment.
726 We must be careful here, since FRAME_OFFSET might be negative and
727 division with a negative dividend isn't as well defined as we might
728 like. So we instead assume that ALIGNMENT is a power of two and
729 use logical operations which are unambiguous. */
730 #ifdef FRAME_GROWS_DOWNWARD
731 frame_offset = FLOOR_ROUND (frame_offset, alignment);
732 #else
733 frame_offset = CEIL_ROUND (frame_offset, alignment);
734 #endif
735
736 /* On a big-endian machine, if we are allocating more space than we will use,
737 use the least significant bytes of those that are allocated. */
738 if (BYTES_BIG_ENDIAN && mode != BLKmode)
739 bigend_correction = size - GET_MODE_SIZE (mode);
740
741 #ifdef FRAME_GROWS_DOWNWARD
742 frame_offset -= size;
743 #endif
744
745 /* If we have already instantiated virtual registers, return the actual
746 address relative to the frame pointer. */
747 if (virtuals_instantiated)
748 addr = plus_constant (frame_pointer_rtx,
749 (frame_offset + bigend_correction
750 + STARTING_FRAME_OFFSET));
751 else
752 addr = plus_constant (virtual_stack_vars_rtx,
753 frame_offset + bigend_correction);
754
755 #ifndef FRAME_GROWS_DOWNWARD
756 frame_offset += size;
757 #endif
758
759 x = gen_rtx_MEM (mode, addr);
760
761 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
762
763 return x;
764 }
765
766 /* Assign a stack slot in a containing function.
767 First three arguments are same as in preceding function.
768 The last argument specifies the function to allocate in. */
769
770 static rtx
771 assign_outer_stack_local (mode, size, align, function)
772 enum machine_mode mode;
773 HOST_WIDE_INT size;
774 int align;
775 struct function *function;
776 {
777 register rtx x, addr;
778 int bigend_correction = 0;
779 int alignment;
780
781 /* Allocate in the memory associated with the function in whose frame
782 we are assigning. */
783 push_obstacks (function->function_obstack,
784 function->function_maybepermanent_obstack);
785
786 if (align == 0)
787 {
788 alignment = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
789 if (mode == BLKmode)
790 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
791 }
792 else if (align == -1)
793 {
794 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
795 size = CEIL_ROUND (size, alignment);
796 }
797 else
798 alignment = align / BITS_PER_UNIT;
799
800 /* Round frame offset to that alignment. */
801 #ifdef FRAME_GROWS_DOWNWARD
802 function->frame_offset = FLOOR_ROUND (function->frame_offset, alignment);
803 #else
804 function->frame_offset = CEIL_ROUND (function->frame_offset, alignment);
805 #endif
806
807 /* On a big-endian machine, if we are allocating more space than we will use,
808 use the least significant bytes of those that are allocated. */
809 if (BYTES_BIG_ENDIAN && mode != BLKmode)
810 bigend_correction = size - GET_MODE_SIZE (mode);
811
812 #ifdef FRAME_GROWS_DOWNWARD
813 function->frame_offset -= size;
814 #endif
815 addr = plus_constant (virtual_stack_vars_rtx,
816 function->frame_offset + bigend_correction);
817 #ifndef FRAME_GROWS_DOWNWARD
818 function->frame_offset += size;
819 #endif
820
821 x = gen_rtx_MEM (mode, addr);
822
823 function->stack_slot_list
824 = gen_rtx_EXPR_LIST (VOIDmode, x, function->stack_slot_list);
825
826 pop_obstacks ();
827
828 return x;
829 }
830 \f
831 /* Allocate a temporary stack slot and record it for possible later
832 reuse.
833
834 MODE is the machine mode to be given to the returned rtx.
835
836 SIZE is the size in units of the space required. We do no rounding here
837 since assign_stack_local will do any required rounding.
838
839 KEEP is 1 if this slot is to be retained after a call to
840 free_temp_slots. Automatic variables for a block are allocated
841 with this flag. KEEP is 2 if we allocate a longer term temporary,
842 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
843 if we are to allocate something at an inner level to be treated as
844 a variable in the block (e.g., a SAVE_EXPR). */
845
846 rtx
847 assign_stack_temp (mode, size, keep)
848 enum machine_mode mode;
849 HOST_WIDE_INT size;
850 int keep;
851 {
852 struct temp_slot *p, *best_p = 0;
853
854 /* If SIZE is -1 it means that somebody tried to allocate a temporary
855 of a variable size. */
856 if (size == -1)
857 abort ();
858
859 /* First try to find an available, already-allocated temporary that is the
860 exact size we require. */
861 for (p = temp_slots; p; p = p->next)
862 if (p->size == size && GET_MODE (p->slot) == mode && ! p->in_use)
863 break;
864
865 /* If we didn't find, one, try one that is larger than what we want. We
866 find the smallest such. */
867 if (p == 0)
868 for (p = temp_slots; p; p = p->next)
869 if (p->size > size && GET_MODE (p->slot) == mode && ! p->in_use
870 && (best_p == 0 || best_p->size > p->size))
871 best_p = p;
872
873 /* Make our best, if any, the one to use. */
874 if (best_p)
875 {
876 /* If there are enough aligned bytes left over, make them into a new
877 temp_slot so that the extra bytes don't get wasted. Do this only
878 for BLKmode slots, so that we can be sure of the alignment. */
879 if (GET_MODE (best_p->slot) == BLKmode)
880 {
881 int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
882 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
883
884 if (best_p->size - rounded_size >= alignment)
885 {
886 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
887 p->in_use = p->addr_taken = 0;
888 p->size = best_p->size - rounded_size;
889 p->base_offset = best_p->base_offset + rounded_size;
890 p->full_size = best_p->full_size - rounded_size;
891 p->slot = gen_rtx_MEM (BLKmode,
892 plus_constant (XEXP (best_p->slot, 0),
893 rounded_size));
894 p->address = 0;
895 p->rtl_expr = 0;
896 p->next = temp_slots;
897 temp_slots = p;
898
899 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
900 stack_slot_list);
901
902 best_p->size = rounded_size;
903 best_p->full_size = rounded_size;
904 }
905 }
906
907 p = best_p;
908 }
909
910 /* If we still didn't find one, make a new temporary. */
911 if (p == 0)
912 {
913 HOST_WIDE_INT frame_offset_old = frame_offset;
914
915 p = (struct temp_slot *) oballoc (sizeof (struct temp_slot));
916
917 /* If the temp slot mode doesn't indicate the alignment,
918 use the largest possible, so no one will be disappointed. */
919 p->slot = assign_stack_local (mode, size, mode == BLKmode ? -1 : 0);
920
921 /* The following slot size computation is necessary because we don't
922 know the actual size of the temporary slot until assign_stack_local
923 has performed all the frame alignment and size rounding for the
924 requested temporary. Note that extra space added for alignment
925 can be either above or below this stack slot depending on which
926 way the frame grows. We include the extra space if and only if it
927 is above this slot. */
928 #ifdef FRAME_GROWS_DOWNWARD
929 p->size = frame_offset_old - frame_offset;
930 #else
931 p->size = size;
932 #endif
933
934 /* Now define the fields used by combine_temp_slots. */
935 #ifdef FRAME_GROWS_DOWNWARD
936 p->base_offset = frame_offset;
937 p->full_size = frame_offset_old - frame_offset;
938 #else
939 p->base_offset = frame_offset_old;
940 p->full_size = frame_offset - frame_offset_old;
941 #endif
942 p->address = 0;
943 p->next = temp_slots;
944 temp_slots = p;
945 }
946
947 p->in_use = 1;
948 p->addr_taken = 0;
949 p->rtl_expr = sequence_rtl_expr;
950
951 if (keep == 2)
952 {
953 p->level = target_temp_slot_level;
954 p->keep = 0;
955 }
956 else if (keep == 3)
957 {
958 p->level = var_temp_slot_level;
959 p->keep = 0;
960 }
961 else
962 {
963 p->level = temp_slot_level;
964 p->keep = keep;
965 }
966
967 /* We may be reusing an old slot, so clear any MEM flags that may have been
968 set from before. */
969 RTX_UNCHANGING_P (p->slot) = 0;
970 MEM_IN_STRUCT_P (p->slot) = 0;
971 MEM_SCALAR_P (p->slot) = 0;
972 MEM_ALIAS_SET (p->slot) = 0;
973 return p->slot;
974 }
975 \f
976 /* Assign a temporary of given TYPE.
977 KEEP is as for assign_stack_temp.
978 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
979 it is 0 if a register is OK.
980 DONT_PROMOTE is 1 if we should not promote values in register
981 to wider modes. */
982
983 rtx
984 assign_temp (type, keep, memory_required, dont_promote)
985 tree type;
986 int keep;
987 int memory_required;
988 int dont_promote;
989 {
990 enum machine_mode mode = TYPE_MODE (type);
991 int unsignedp = TREE_UNSIGNED (type);
992
993 if (mode == BLKmode || memory_required)
994 {
995 HOST_WIDE_INT size = int_size_in_bytes (type);
996 rtx tmp;
997
998 /* Unfortunately, we don't yet know how to allocate variable-sized
999 temporaries. However, sometimes we have a fixed upper limit on
1000 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
1001 instead. This is the case for Chill variable-sized strings. */
1002 if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
1003 && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
1004 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (type)) == INTEGER_CST)
1005 size = TREE_INT_CST_LOW (TYPE_ARRAY_MAX_SIZE (type));
1006
1007 tmp = assign_stack_temp (mode, size, keep);
1008 MEM_SET_IN_STRUCT_P (tmp, AGGREGATE_TYPE_P (type));
1009 return tmp;
1010 }
1011
1012 #ifndef PROMOTE_FOR_CALL_ONLY
1013 if (! dont_promote)
1014 mode = promote_mode (type, mode, &unsignedp, 0);
1015 #endif
1016
1017 return gen_reg_rtx (mode);
1018 }
1019 \f
1020 /* Combine temporary stack slots which are adjacent on the stack.
1021
1022 This allows for better use of already allocated stack space. This is only
1023 done for BLKmode slots because we can be sure that we won't have alignment
1024 problems in this case. */
1025
1026 void
1027 combine_temp_slots ()
1028 {
1029 struct temp_slot *p, *q;
1030 struct temp_slot *prev_p, *prev_q;
1031 int num_slots;
1032
1033 /* If there are a lot of temp slots, don't do anything unless
1034 high levels of optimizaton. */
1035 if (! flag_expensive_optimizations)
1036 for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
1037 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
1038 return;
1039
1040 for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
1041 {
1042 int delete_p = 0;
1043
1044 if (! p->in_use && GET_MODE (p->slot) == BLKmode)
1045 for (q = p->next, prev_q = p; q; q = prev_q->next)
1046 {
1047 int delete_q = 0;
1048 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
1049 {
1050 if (p->base_offset + p->full_size == q->base_offset)
1051 {
1052 /* Q comes after P; combine Q into P. */
1053 p->size += q->size;
1054 p->full_size += q->full_size;
1055 delete_q = 1;
1056 }
1057 else if (q->base_offset + q->full_size == p->base_offset)
1058 {
1059 /* P comes after Q; combine P into Q. */
1060 q->size += p->size;
1061 q->full_size += p->full_size;
1062 delete_p = 1;
1063 break;
1064 }
1065 }
1066 /* Either delete Q or advance past it. */
1067 if (delete_q)
1068 prev_q->next = q->next;
1069 else
1070 prev_q = q;
1071 }
1072 /* Either delete P or advance past it. */
1073 if (delete_p)
1074 {
1075 if (prev_p)
1076 prev_p->next = p->next;
1077 else
1078 temp_slots = p->next;
1079 }
1080 else
1081 prev_p = p;
1082 }
1083 }
1084 \f
1085 /* Find the temp slot corresponding to the object at address X. */
1086
1087 static struct temp_slot *
1088 find_temp_slot_from_address (x)
1089 rtx x;
1090 {
1091 struct temp_slot *p;
1092 rtx next;
1093
1094 for (p = temp_slots; p; p = p->next)
1095 {
1096 if (! p->in_use)
1097 continue;
1098
1099 else if (XEXP (p->slot, 0) == x
1100 || p->address == x
1101 || (GET_CODE (x) == PLUS
1102 && XEXP (x, 0) == virtual_stack_vars_rtx
1103 && GET_CODE (XEXP (x, 1)) == CONST_INT
1104 && INTVAL (XEXP (x, 1)) >= p->base_offset
1105 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1106 return p;
1107
1108 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1109 for (next = p->address; next; next = XEXP (next, 1))
1110 if (XEXP (next, 0) == x)
1111 return p;
1112 }
1113
1114 return 0;
1115 }
1116
1117 /* Indicate that NEW is an alternate way of referring to the temp slot
1118 that previously was known by OLD. */
1119
1120 void
1121 update_temp_slot_address (old, new)
1122 rtx old, new;
1123 {
1124 struct temp_slot *p = find_temp_slot_from_address (old);
1125
1126 /* If none, return. Else add NEW as an alias. */
1127 if (p == 0)
1128 return;
1129 else if (p->address == 0)
1130 p->address = new;
1131 else
1132 {
1133 if (GET_CODE (p->address) != EXPR_LIST)
1134 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1135
1136 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1137 }
1138 }
1139
1140 /* If X could be a reference to a temporary slot, mark the fact that its
1141 address was taken. */
1142
1143 void
1144 mark_temp_addr_taken (x)
1145 rtx x;
1146 {
1147 struct temp_slot *p;
1148
1149 if (x == 0)
1150 return;
1151
1152 /* If X is not in memory or is at a constant address, it cannot be in
1153 a temporary slot. */
1154 if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1155 return;
1156
1157 p = find_temp_slot_from_address (XEXP (x, 0));
1158 if (p != 0)
1159 p->addr_taken = 1;
1160 }
1161
1162 /* If X could be a reference to a temporary slot, mark that slot as
1163 belonging to the to one level higher than the current level. If X
1164 matched one of our slots, just mark that one. Otherwise, we can't
1165 easily predict which it is, so upgrade all of them. Kept slots
1166 need not be touched.
1167
1168 This is called when an ({...}) construct occurs and a statement
1169 returns a value in memory. */
1170
1171 void
1172 preserve_temp_slots (x)
1173 rtx x;
1174 {
1175 struct temp_slot *p = 0;
1176
1177 /* If there is no result, we still might have some objects whose address
1178 were taken, so we need to make sure they stay around. */
1179 if (x == 0)
1180 {
1181 for (p = temp_slots; p; p = p->next)
1182 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1183 p->level--;
1184
1185 return;
1186 }
1187
1188 /* If X is a register that is being used as a pointer, see if we have
1189 a temporary slot we know it points to. To be consistent with
1190 the code below, we really should preserve all non-kept slots
1191 if we can't find a match, but that seems to be much too costly. */
1192 if (GET_CODE (x) == REG && REGNO_POINTER_FLAG (REGNO (x)))
1193 p = find_temp_slot_from_address (x);
1194
1195 /* If X is not in memory or is at a constant address, it cannot be in
1196 a temporary slot, but it can contain something whose address was
1197 taken. */
1198 if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1199 {
1200 for (p = temp_slots; p; p = p->next)
1201 if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1202 p->level--;
1203
1204 return;
1205 }
1206
1207 /* First see if we can find a match. */
1208 if (p == 0)
1209 p = find_temp_slot_from_address (XEXP (x, 0));
1210
1211 if (p != 0)
1212 {
1213 /* Move everything at our level whose address was taken to our new
1214 level in case we used its address. */
1215 struct temp_slot *q;
1216
1217 if (p->level == temp_slot_level)
1218 {
1219 for (q = temp_slots; q; q = q->next)
1220 if (q != p && q->addr_taken && q->level == p->level)
1221 q->level--;
1222
1223 p->level--;
1224 p->addr_taken = 0;
1225 }
1226 return;
1227 }
1228
1229 /* Otherwise, preserve all non-kept slots at this level. */
1230 for (p = temp_slots; p; p = p->next)
1231 if (p->in_use && p->level == temp_slot_level && ! p->keep)
1232 p->level--;
1233 }
1234
1235 /* X is the result of an RTL_EXPR. If it is a temporary slot associated
1236 with that RTL_EXPR, promote it into a temporary slot at the present
1237 level so it will not be freed when we free slots made in the
1238 RTL_EXPR. */
1239
1240 void
1241 preserve_rtl_expr_result (x)
1242 rtx x;
1243 {
1244 struct temp_slot *p;
1245
1246 /* If X is not in memory or is at a constant address, it cannot be in
1247 a temporary slot. */
1248 if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1249 return;
1250
1251 /* If we can find a match, move it to our level unless it is already at
1252 an upper level. */
1253 p = find_temp_slot_from_address (XEXP (x, 0));
1254 if (p != 0)
1255 {
1256 p->level = MIN (p->level, temp_slot_level);
1257 p->rtl_expr = 0;
1258 }
1259
1260 return;
1261 }
1262
1263 /* Free all temporaries used so far. This is normally called at the end
1264 of generating code for a statement. Don't free any temporaries
1265 currently in use for an RTL_EXPR that hasn't yet been emitted.
1266 We could eventually do better than this since it can be reused while
1267 generating the same RTL_EXPR, but this is complex and probably not
1268 worthwhile. */
1269
1270 void
1271 free_temp_slots ()
1272 {
1273 struct temp_slot *p;
1274
1275 for (p = temp_slots; p; p = p->next)
1276 if (p->in_use && p->level == temp_slot_level && ! p->keep
1277 && p->rtl_expr == 0)
1278 p->in_use = 0;
1279
1280 combine_temp_slots ();
1281 }
1282
1283 /* Free all temporary slots used in T, an RTL_EXPR node. */
1284
1285 void
1286 free_temps_for_rtl_expr (t)
1287 tree t;
1288 {
1289 struct temp_slot *p;
1290
1291 for (p = temp_slots; p; p = p->next)
1292 if (p->rtl_expr == t)
1293 p->in_use = 0;
1294
1295 combine_temp_slots ();
1296 }
1297
1298 /* Mark all temporaries ever allocated in this function as not suitable
1299 for reuse until the current level is exited. */
1300
1301 void
1302 mark_all_temps_used ()
1303 {
1304 struct temp_slot *p;
1305
1306 for (p = temp_slots; p; p = p->next)
1307 {
1308 p->in_use = p->keep = 1;
1309 p->level = MIN (p->level, temp_slot_level);
1310 }
1311 }
1312
1313 /* Push deeper into the nesting level for stack temporaries. */
1314
1315 void
1316 push_temp_slots ()
1317 {
1318 temp_slot_level++;
1319 }
1320
1321 /* Likewise, but save the new level as the place to allocate variables
1322 for blocks. */
1323
1324 void
1325 push_temp_slots_for_block ()
1326 {
1327 push_temp_slots ();
1328
1329 var_temp_slot_level = temp_slot_level;
1330 }
1331
1332 /* Likewise, but save the new level as the place to allocate temporaries
1333 for TARGET_EXPRs. */
1334
1335 void
1336 push_temp_slots_for_target ()
1337 {
1338 push_temp_slots ();
1339
1340 target_temp_slot_level = temp_slot_level;
1341 }
1342
1343 /* Set and get the value of target_temp_slot_level. The only
1344 permitted use of these functions is to save and restore this value. */
1345
1346 int
1347 get_target_temp_slot_level ()
1348 {
1349 return target_temp_slot_level;
1350 }
1351
1352 void
1353 set_target_temp_slot_level (level)
1354 int level;
1355 {
1356 target_temp_slot_level = level;
1357 }
1358
1359 /* Pop a temporary nesting level. All slots in use in the current level
1360 are freed. */
1361
1362 void
1363 pop_temp_slots ()
1364 {
1365 struct temp_slot *p;
1366
1367 for (p = temp_slots; p; p = p->next)
1368 if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1369 p->in_use = 0;
1370
1371 combine_temp_slots ();
1372
1373 temp_slot_level--;
1374 }
1375
1376 /* Initialize temporary slots. */
1377
1378 void
1379 init_temp_slots ()
1380 {
1381 /* We have not allocated any temporaries yet. */
1382 temp_slots = 0;
1383 temp_slot_level = 0;
1384 var_temp_slot_level = 0;
1385 target_temp_slot_level = 0;
1386 }
1387 \f
1388 /* Retroactively move an auto variable from a register to a stack slot.
1389 This is done when an address-reference to the variable is seen. */
1390
1391 void
1392 put_var_into_stack (decl)
1393 tree decl;
1394 {
1395 register rtx reg;
1396 enum machine_mode promoted_mode, decl_mode;
1397 struct function *function = 0;
1398 tree context;
1399 int can_use_addressof;
1400
1401 context = decl_function_context (decl);
1402
1403 /* Get the current rtl used for this object and its original mode. */
1404 reg = TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl) : DECL_RTL (decl);
1405
1406 /* No need to do anything if decl has no rtx yet
1407 since in that case caller is setting TREE_ADDRESSABLE
1408 and a stack slot will be assigned when the rtl is made. */
1409 if (reg == 0)
1410 return;
1411
1412 /* Get the declared mode for this object. */
1413 decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1414 : DECL_MODE (decl));
1415 /* Get the mode it's actually stored in. */
1416 promoted_mode = GET_MODE (reg);
1417
1418 /* If this variable comes from an outer function,
1419 find that function's saved context. */
1420 if (context != current_function_decl && context != inline_function_decl)
1421 for (function = outer_function_chain; function; function = function->next)
1422 if (function->decl == context)
1423 break;
1424
1425 /* If this is a variable-size object with a pseudo to address it,
1426 put that pseudo into the stack, if the var is nonlocal. */
1427 if (DECL_NONLOCAL (decl)
1428 && GET_CODE (reg) == MEM
1429 && GET_CODE (XEXP (reg, 0)) == REG
1430 && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1431 {
1432 reg = XEXP (reg, 0);
1433 decl_mode = promoted_mode = GET_MODE (reg);
1434 }
1435
1436 can_use_addressof
1437 = (function == 0
1438 && optimize > 0
1439 /* FIXME make it work for promoted modes too */
1440 && decl_mode == promoted_mode
1441 #ifdef NON_SAVING_SETJMP
1442 && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1443 #endif
1444 );
1445
1446 /* If we can't use ADDRESSOF, make sure we see through one we already
1447 generated. */
1448 if (! can_use_addressof && GET_CODE (reg) == MEM
1449 && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1450 reg = XEXP (XEXP (reg, 0), 0);
1451
1452 /* Now we should have a value that resides in one or more pseudo regs. */
1453
1454 if (GET_CODE (reg) == REG)
1455 {
1456 /* If this variable lives in the current function and we don't need
1457 to put things in the stack for the sake of setjmp, try to keep it
1458 in a register until we know we actually need the address. */
1459 if (can_use_addressof)
1460 gen_mem_addressof (reg, decl);
1461 else
1462 put_reg_into_stack (function, reg, TREE_TYPE (decl),
1463 promoted_mode, decl_mode,
1464 TREE_SIDE_EFFECTS (decl), 0,
1465 TREE_USED (decl)
1466 || DECL_INITIAL (decl) != 0);
1467 }
1468 else if (GET_CODE (reg) == CONCAT)
1469 {
1470 /* A CONCAT contains two pseudos; put them both in the stack.
1471 We do it so they end up consecutive. */
1472 enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1473 tree part_type = TREE_TYPE (TREE_TYPE (decl));
1474 #ifdef FRAME_GROWS_DOWNWARD
1475 /* Since part 0 should have a lower address, do it second. */
1476 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1477 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1478 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1479 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1480 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1481 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1482 #else
1483 put_reg_into_stack (function, XEXP (reg, 0), part_type, part_mode,
1484 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1485 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1486 put_reg_into_stack (function, XEXP (reg, 1), part_type, part_mode,
1487 part_mode, TREE_SIDE_EFFECTS (decl), 0,
1488 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
1489 #endif
1490
1491 /* Change the CONCAT into a combined MEM for both parts. */
1492 PUT_CODE (reg, MEM);
1493 MEM_VOLATILE_P (reg) = MEM_VOLATILE_P (XEXP (reg, 0));
1494 MEM_ALIAS_SET (reg) = get_alias_set (decl);
1495
1496 /* The two parts are in memory order already.
1497 Use the lower parts address as ours. */
1498 XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1499 /* Prevent sharing of rtl that might lose. */
1500 if (GET_CODE (XEXP (reg, 0)) == PLUS)
1501 XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1502 }
1503 else
1504 return;
1505
1506 if (current_function_check_memory_usage)
1507 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
1508 XEXP (reg, 0), ptr_mode,
1509 GEN_INT (GET_MODE_SIZE (GET_MODE (reg))),
1510 TYPE_MODE (sizetype),
1511 GEN_INT (MEMORY_USE_RW),
1512 TYPE_MODE (integer_type_node));
1513 }
1514
1515 /* Subroutine of put_var_into_stack. This puts a single pseudo reg REG
1516 into the stack frame of FUNCTION (0 means the current function).
1517 DECL_MODE is the machine mode of the user-level data type.
1518 PROMOTED_MODE is the machine mode of the register.
1519 VOLATILE_P is nonzero if this is for a "volatile" decl.
1520 USED_P is nonzero if this reg might have already been used in an insn. */
1521
1522 static void
1523 put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
1524 original_regno, used_p)
1525 struct function *function;
1526 rtx reg;
1527 tree type;
1528 enum machine_mode promoted_mode, decl_mode;
1529 int volatile_p;
1530 int original_regno;
1531 int used_p;
1532 {
1533 rtx new = 0;
1534 int regno = original_regno;
1535
1536 if (regno == 0)
1537 regno = REGNO (reg);
1538
1539 if (function)
1540 {
1541 if (regno < function->max_parm_reg)
1542 new = function->parm_reg_stack_loc[regno];
1543 if (new == 0)
1544 new = assign_outer_stack_local (decl_mode, GET_MODE_SIZE (decl_mode),
1545 0, function);
1546 }
1547 else
1548 {
1549 if (regno < max_parm_reg)
1550 new = parm_reg_stack_loc[regno];
1551 if (new == 0)
1552 new = assign_stack_local (decl_mode, GET_MODE_SIZE (decl_mode), 0);
1553 }
1554
1555 PUT_MODE (reg, decl_mode);
1556 XEXP (reg, 0) = XEXP (new, 0);
1557 /* `volatil' bit means one thing for MEMs, another entirely for REGs. */
1558 MEM_VOLATILE_P (reg) = volatile_p;
1559 PUT_CODE (reg, MEM);
1560
1561 /* If this is a memory ref that contains aggregate components,
1562 mark it as such for cse and loop optimize. If we are reusing a
1563 previously generated stack slot, then we need to copy the bit in
1564 case it was set for other reasons. For instance, it is set for
1565 __builtin_va_alist. */
1566 MEM_SET_IN_STRUCT_P (reg,
1567 AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1568 MEM_ALIAS_SET (reg) = get_alias_set (type);
1569
1570 /* Now make sure that all refs to the variable, previously made
1571 when it was a register, are fixed up to be valid again. */
1572
1573 if (used_p && function != 0)
1574 {
1575 struct var_refs_queue *temp;
1576
1577 /* Variable is inherited; fix it up when we get back to its function. */
1578 push_obstacks (function->function_obstack,
1579 function->function_maybepermanent_obstack);
1580
1581 /* See comment in restore_tree_status in tree.c for why this needs to be
1582 on saveable obstack. */
1583 temp
1584 = (struct var_refs_queue *) savealloc (sizeof (struct var_refs_queue));
1585 temp->modified = reg;
1586 temp->promoted_mode = promoted_mode;
1587 temp->unsignedp = TREE_UNSIGNED (type);
1588 temp->next = function->fixup_var_refs_queue;
1589 function->fixup_var_refs_queue = temp;
1590 pop_obstacks ();
1591 }
1592 else if (used_p)
1593 /* Variable is local; fix it up now. */
1594 fixup_var_refs (reg, promoted_mode, TREE_UNSIGNED (type));
1595 }
1596 \f
1597 static void
1598 fixup_var_refs (var, promoted_mode, unsignedp)
1599 rtx var;
1600 enum machine_mode promoted_mode;
1601 int unsignedp;
1602 {
1603 tree pending;
1604 rtx first_insn = get_insns ();
1605 struct sequence_stack *stack = sequence_stack;
1606 tree rtl_exps = rtl_expr_chain;
1607
1608 /* Must scan all insns for stack-refs that exceed the limit. */
1609 fixup_var_refs_insns (var, promoted_mode, unsignedp, first_insn, stack == 0);
1610
1611 /* Scan all pending sequences too. */
1612 for (; stack; stack = stack->next)
1613 {
1614 push_to_sequence (stack->first);
1615 fixup_var_refs_insns (var, promoted_mode, unsignedp,
1616 stack->first, stack->next != 0);
1617 /* Update remembered end of sequence
1618 in case we added an insn at the end. */
1619 stack->last = get_last_insn ();
1620 end_sequence ();
1621 }
1622
1623 /* Scan all waiting RTL_EXPRs too. */
1624 for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1625 {
1626 rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1627 if (seq != const0_rtx && seq != 0)
1628 {
1629 push_to_sequence (seq);
1630 fixup_var_refs_insns (var, promoted_mode, unsignedp, seq, 0);
1631 end_sequence ();
1632 }
1633 }
1634 }
1635 \f
1636 /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1637 some part of an insn. Return a struct fixup_replacement whose OLD
1638 value is equal to X. Allocate a new structure if no such entry exists. */
1639
1640 static struct fixup_replacement *
1641 find_fixup_replacement (replacements, x)
1642 struct fixup_replacement **replacements;
1643 rtx x;
1644 {
1645 struct fixup_replacement *p;
1646
1647 /* See if we have already replaced this. */
1648 for (p = *replacements; p && p->old != x; p = p->next)
1649 ;
1650
1651 if (p == 0)
1652 {
1653 p = (struct fixup_replacement *) oballoc (sizeof (struct fixup_replacement));
1654 p->old = x;
1655 p->new = 0;
1656 p->next = *replacements;
1657 *replacements = p;
1658 }
1659
1660 return p;
1661 }
1662
1663 /* Scan the insn-chain starting with INSN for refs to VAR
1664 and fix them up. TOPLEVEL is nonzero if this chain is the
1665 main chain of insns for the current function. */
1666
1667 static void
1668 fixup_var_refs_insns (var, promoted_mode, unsignedp, insn, toplevel)
1669 rtx var;
1670 enum machine_mode promoted_mode;
1671 int unsignedp;
1672 rtx insn;
1673 int toplevel;
1674 {
1675 rtx call_dest = 0;
1676
1677 while (insn)
1678 {
1679 rtx next = NEXT_INSN (insn);
1680 rtx set, prev, prev_set;
1681 rtx note;
1682
1683 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1684 {
1685 /* If this is a CLOBBER of VAR, delete it.
1686
1687 If it has a REG_LIBCALL note, delete the REG_LIBCALL
1688 and REG_RETVAL notes too. */
1689 if (GET_CODE (PATTERN (insn)) == CLOBBER
1690 && (XEXP (PATTERN (insn), 0) == var
1691 || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1692 && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1693 || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1694 {
1695 if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1696 /* The REG_LIBCALL note will go away since we are going to
1697 turn INSN into a NOTE, so just delete the
1698 corresponding REG_RETVAL note. */
1699 remove_note (XEXP (note, 0),
1700 find_reg_note (XEXP (note, 0), REG_RETVAL,
1701 NULL_RTX));
1702
1703 /* In unoptimized compilation, we shouldn't call delete_insn
1704 except in jump.c doing warnings. */
1705 PUT_CODE (insn, NOTE);
1706 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1707 NOTE_SOURCE_FILE (insn) = 0;
1708 }
1709
1710 /* The insn to load VAR from a home in the arglist
1711 is now a no-op. When we see it, just delete it.
1712 Similarly if this is storing VAR from a register from which
1713 it was loaded in the previous insn. This will occur
1714 when an ADDRESSOF was made for an arglist slot. */
1715 else if (toplevel
1716 && (set = single_set (insn)) != 0
1717 && SET_DEST (set) == var
1718 /* If this represents the result of an insn group,
1719 don't delete the insn. */
1720 && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1721 && (rtx_equal_p (SET_SRC (set), var)
1722 || (GET_CODE (SET_SRC (set)) == REG
1723 && (prev = prev_nonnote_insn (insn)) != 0
1724 && (prev_set = single_set (prev)) != 0
1725 && SET_DEST (prev_set) == SET_SRC (set)
1726 && rtx_equal_p (SET_SRC (prev_set), var))))
1727 {
1728 /* In unoptimized compilation, we shouldn't call delete_insn
1729 except in jump.c doing warnings. */
1730 PUT_CODE (insn, NOTE);
1731 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1732 NOTE_SOURCE_FILE (insn) = 0;
1733 if (insn == last_parm_insn)
1734 last_parm_insn = PREV_INSN (next);
1735 }
1736 else
1737 {
1738 struct fixup_replacement *replacements = 0;
1739 rtx next_insn = NEXT_INSN (insn);
1740
1741 if (SMALL_REGISTER_CLASSES)
1742 {
1743 /* If the insn that copies the results of a CALL_INSN
1744 into a pseudo now references VAR, we have to use an
1745 intermediate pseudo since we want the life of the
1746 return value register to be only a single insn.
1747
1748 If we don't use an intermediate pseudo, such things as
1749 address computations to make the address of VAR valid
1750 if it is not can be placed between the CALL_INSN and INSN.
1751
1752 To make sure this doesn't happen, we record the destination
1753 of the CALL_INSN and see if the next insn uses both that
1754 and VAR. */
1755
1756 if (call_dest != 0 && GET_CODE (insn) == INSN
1757 && reg_mentioned_p (var, PATTERN (insn))
1758 && reg_mentioned_p (call_dest, PATTERN (insn)))
1759 {
1760 rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1761
1762 emit_insn_before (gen_move_insn (temp, call_dest), insn);
1763
1764 PATTERN (insn) = replace_rtx (PATTERN (insn),
1765 call_dest, temp);
1766 }
1767
1768 if (GET_CODE (insn) == CALL_INSN
1769 && GET_CODE (PATTERN (insn)) == SET)
1770 call_dest = SET_DEST (PATTERN (insn));
1771 else if (GET_CODE (insn) == CALL_INSN
1772 && GET_CODE (PATTERN (insn)) == PARALLEL
1773 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1774 call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1775 else
1776 call_dest = 0;
1777 }
1778
1779 /* See if we have to do anything to INSN now that VAR is in
1780 memory. If it needs to be loaded into a pseudo, use a single
1781 pseudo for the entire insn in case there is a MATCH_DUP
1782 between two operands. We pass a pointer to the head of
1783 a list of struct fixup_replacements. If fixup_var_refs_1
1784 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1785 it will record them in this list.
1786
1787 If it allocated a pseudo for any replacement, we copy into
1788 it here. */
1789
1790 fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1791 &replacements);
1792
1793 /* If this is last_parm_insn, and any instructions were output
1794 after it to fix it up, then we must set last_parm_insn to
1795 the last such instruction emitted. */
1796 if (insn == last_parm_insn)
1797 last_parm_insn = PREV_INSN (next_insn);
1798
1799 while (replacements)
1800 {
1801 if (GET_CODE (replacements->new) == REG)
1802 {
1803 rtx insert_before;
1804 rtx seq;
1805
1806 /* OLD might be a (subreg (mem)). */
1807 if (GET_CODE (replacements->old) == SUBREG)
1808 replacements->old
1809 = fixup_memory_subreg (replacements->old, insn, 0);
1810 else
1811 replacements->old
1812 = fixup_stack_1 (replacements->old, insn);
1813
1814 insert_before = insn;
1815
1816 /* If we are changing the mode, do a conversion.
1817 This might be wasteful, but combine.c will
1818 eliminate much of the waste. */
1819
1820 if (GET_MODE (replacements->new)
1821 != GET_MODE (replacements->old))
1822 {
1823 start_sequence ();
1824 convert_move (replacements->new,
1825 replacements->old, unsignedp);
1826 seq = gen_sequence ();
1827 end_sequence ();
1828 }
1829 else
1830 seq = gen_move_insn (replacements->new,
1831 replacements->old);
1832
1833 emit_insn_before (seq, insert_before);
1834 }
1835
1836 replacements = replacements->next;
1837 }
1838 }
1839
1840 /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1841 But don't touch other insns referred to by reg-notes;
1842 we will get them elsewhere. */
1843 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1844 if (GET_CODE (note) != INSN_LIST)
1845 XEXP (note, 0)
1846 = walk_fixup_memory_subreg (XEXP (note, 0), insn, 1);
1847 }
1848 insn = next;
1849 }
1850 }
1851 \f
1852 /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1853 See if the rtx expression at *LOC in INSN needs to be changed.
1854
1855 REPLACEMENTS is a pointer to a list head that starts out zero, but may
1856 contain a list of original rtx's and replacements. If we find that we need
1857 to modify this insn by replacing a memory reference with a pseudo or by
1858 making a new MEM to implement a SUBREG, we consult that list to see if
1859 we have already chosen a replacement. If none has already been allocated,
1860 we allocate it and update the list. fixup_var_refs_insns will copy VAR
1861 or the SUBREG, as appropriate, to the pseudo. */
1862
1863 static void
1864 fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements)
1865 register rtx var;
1866 enum machine_mode promoted_mode;
1867 register rtx *loc;
1868 rtx insn;
1869 struct fixup_replacement **replacements;
1870 {
1871 register int i;
1872 register rtx x = *loc;
1873 RTX_CODE code = GET_CODE (x);
1874 register char *fmt;
1875 register rtx tem, tem1;
1876 struct fixup_replacement *replacement;
1877
1878 switch (code)
1879 {
1880 case ADDRESSOF:
1881 if (XEXP (x, 0) == var)
1882 {
1883 /* Prevent sharing of rtl that might lose. */
1884 rtx sub = copy_rtx (XEXP (var, 0));
1885
1886 start_sequence ();
1887
1888 if (! validate_change (insn, loc, sub, 0))
1889 {
1890 rtx y = force_operand (sub, NULL_RTX);
1891
1892 if (! validate_change (insn, loc, y, 0))
1893 *loc = copy_to_reg (y);
1894 }
1895
1896 emit_insn_before (gen_sequence (), insn);
1897 end_sequence ();
1898 }
1899 return;
1900
1901 case MEM:
1902 if (var == x)
1903 {
1904 /* If we already have a replacement, use it. Otherwise,
1905 try to fix up this address in case it is invalid. */
1906
1907 replacement = find_fixup_replacement (replacements, var);
1908 if (replacement->new)
1909 {
1910 *loc = replacement->new;
1911 return;
1912 }
1913
1914 *loc = replacement->new = x = fixup_stack_1 (x, insn);
1915
1916 /* Unless we are forcing memory to register or we changed the mode,
1917 we can leave things the way they are if the insn is valid. */
1918
1919 INSN_CODE (insn) = -1;
1920 if (! flag_force_mem && GET_MODE (x) == promoted_mode
1921 && recog_memoized (insn) >= 0)
1922 return;
1923
1924 *loc = replacement->new = gen_reg_rtx (promoted_mode);
1925 return;
1926 }
1927
1928 /* If X contains VAR, we need to unshare it here so that we update
1929 each occurrence separately. But all identical MEMs in one insn
1930 must be replaced with the same rtx because of the possibility of
1931 MATCH_DUPs. */
1932
1933 if (reg_mentioned_p (var, x))
1934 {
1935 replacement = find_fixup_replacement (replacements, x);
1936 if (replacement->new == 0)
1937 replacement->new = copy_most_rtx (x, var);
1938
1939 *loc = x = replacement->new;
1940 }
1941 break;
1942
1943 case REG:
1944 case CC0:
1945 case PC:
1946 case CONST_INT:
1947 case CONST:
1948 case SYMBOL_REF:
1949 case LABEL_REF:
1950 case CONST_DOUBLE:
1951 return;
1952
1953 case SIGN_EXTRACT:
1954 case ZERO_EXTRACT:
1955 /* Note that in some cases those types of expressions are altered
1956 by optimize_bit_field, and do not survive to get here. */
1957 if (XEXP (x, 0) == var
1958 || (GET_CODE (XEXP (x, 0)) == SUBREG
1959 && SUBREG_REG (XEXP (x, 0)) == var))
1960 {
1961 /* Get TEM as a valid MEM in the mode presently in the insn.
1962
1963 We don't worry about the possibility of MATCH_DUP here; it
1964 is highly unlikely and would be tricky to handle. */
1965
1966 tem = XEXP (x, 0);
1967 if (GET_CODE (tem) == SUBREG)
1968 {
1969 if (GET_MODE_BITSIZE (GET_MODE (tem))
1970 > GET_MODE_BITSIZE (GET_MODE (var)))
1971 {
1972 replacement = find_fixup_replacement (replacements, var);
1973 if (replacement->new == 0)
1974 replacement->new = gen_reg_rtx (GET_MODE (var));
1975 SUBREG_REG (tem) = replacement->new;
1976 }
1977 else
1978 tem = fixup_memory_subreg (tem, insn, 0);
1979 }
1980 else
1981 tem = fixup_stack_1 (tem, insn);
1982
1983 /* Unless we want to load from memory, get TEM into the proper mode
1984 for an extract from memory. This can only be done if the
1985 extract is at a constant position and length. */
1986
1987 if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
1988 && GET_CODE (XEXP (x, 2)) == CONST_INT
1989 && ! mode_dependent_address_p (XEXP (tem, 0))
1990 && ! MEM_VOLATILE_P (tem))
1991 {
1992 enum machine_mode wanted_mode = VOIDmode;
1993 enum machine_mode is_mode = GET_MODE (tem);
1994 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
1995
1996 #ifdef HAVE_extzv
1997 if (GET_CODE (x) == ZERO_EXTRACT)
1998 {
1999 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
2000 if (wanted_mode == VOIDmode)
2001 wanted_mode = word_mode;
2002 }
2003 #endif
2004 #ifdef HAVE_extv
2005 if (GET_CODE (x) == SIGN_EXTRACT)
2006 {
2007 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
2008 if (wanted_mode == VOIDmode)
2009 wanted_mode = word_mode;
2010 }
2011 #endif
2012 /* If we have a narrower mode, we can do something. */
2013 if (wanted_mode != VOIDmode
2014 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2015 {
2016 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2017 rtx old_pos = XEXP (x, 2);
2018 rtx newmem;
2019
2020 /* If the bytes and bits are counted differently, we
2021 must adjust the offset. */
2022 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2023 offset = (GET_MODE_SIZE (is_mode)
2024 - GET_MODE_SIZE (wanted_mode) - offset);
2025
2026 pos %= GET_MODE_BITSIZE (wanted_mode);
2027
2028 newmem = gen_rtx_MEM (wanted_mode,
2029 plus_constant (XEXP (tem, 0), offset));
2030 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2031 MEM_COPY_ATTRIBUTES (newmem, tem);
2032
2033 /* Make the change and see if the insn remains valid. */
2034 INSN_CODE (insn) = -1;
2035 XEXP (x, 0) = newmem;
2036 XEXP (x, 2) = GEN_INT (pos);
2037
2038 if (recog_memoized (insn) >= 0)
2039 return;
2040
2041 /* Otherwise, restore old position. XEXP (x, 0) will be
2042 restored later. */
2043 XEXP (x, 2) = old_pos;
2044 }
2045 }
2046
2047 /* If we get here, the bitfield extract insn can't accept a memory
2048 reference. Copy the input into a register. */
2049
2050 tem1 = gen_reg_rtx (GET_MODE (tem));
2051 emit_insn_before (gen_move_insn (tem1, tem), insn);
2052 XEXP (x, 0) = tem1;
2053 return;
2054 }
2055 break;
2056
2057 case SUBREG:
2058 if (SUBREG_REG (x) == var)
2059 {
2060 /* If this is a special SUBREG made because VAR was promoted
2061 from a wider mode, replace it with VAR and call ourself
2062 recursively, this time saying that the object previously
2063 had its current mode (by virtue of the SUBREG). */
2064
2065 if (SUBREG_PROMOTED_VAR_P (x))
2066 {
2067 *loc = var;
2068 fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements);
2069 return;
2070 }
2071
2072 /* If this SUBREG makes VAR wider, it has become a paradoxical
2073 SUBREG with VAR in memory, but these aren't allowed at this
2074 stage of the compilation. So load VAR into a pseudo and take
2075 a SUBREG of that pseudo. */
2076 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2077 {
2078 replacement = find_fixup_replacement (replacements, var);
2079 if (replacement->new == 0)
2080 replacement->new = gen_reg_rtx (GET_MODE (var));
2081 SUBREG_REG (x) = replacement->new;
2082 return;
2083 }
2084
2085 /* See if we have already found a replacement for this SUBREG.
2086 If so, use it. Otherwise, make a MEM and see if the insn
2087 is recognized. If not, or if we should force MEM into a register,
2088 make a pseudo for this SUBREG. */
2089 replacement = find_fixup_replacement (replacements, x);
2090 if (replacement->new)
2091 {
2092 *loc = replacement->new;
2093 return;
2094 }
2095
2096 replacement->new = *loc = fixup_memory_subreg (x, insn, 0);
2097
2098 INSN_CODE (insn) = -1;
2099 if (! flag_force_mem && recog_memoized (insn) >= 0)
2100 return;
2101
2102 *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2103 return;
2104 }
2105 break;
2106
2107 case SET:
2108 /* First do special simplification of bit-field references. */
2109 if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2110 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2111 optimize_bit_field (x, insn, 0);
2112 if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2113 || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2114 optimize_bit_field (x, insn, NULL_PTR);
2115
2116 /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2117 into a register and then store it back out. */
2118 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2119 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2120 && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2121 && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2122 > GET_MODE_SIZE (GET_MODE (var))))
2123 {
2124 replacement = find_fixup_replacement (replacements, var);
2125 if (replacement->new == 0)
2126 replacement->new = gen_reg_rtx (GET_MODE (var));
2127
2128 SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2129 emit_insn_after (gen_move_insn (var, replacement->new), insn);
2130 }
2131
2132 /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2133 insn into a pseudo and store the low part of the pseudo into VAR. */
2134 if (GET_CODE (SET_DEST (x)) == SUBREG
2135 && SUBREG_REG (SET_DEST (x)) == var
2136 && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2137 > GET_MODE_SIZE (GET_MODE (var))))
2138 {
2139 SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2140 emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2141 tem)),
2142 insn);
2143 break;
2144 }
2145
2146 {
2147 rtx dest = SET_DEST (x);
2148 rtx src = SET_SRC (x);
2149 #ifdef HAVE_insv
2150 rtx outerdest = dest;
2151 #endif
2152
2153 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2154 || GET_CODE (dest) == SIGN_EXTRACT
2155 || GET_CODE (dest) == ZERO_EXTRACT)
2156 dest = XEXP (dest, 0);
2157
2158 if (GET_CODE (src) == SUBREG)
2159 src = XEXP (src, 0);
2160
2161 /* If VAR does not appear at the top level of the SET
2162 just scan the lower levels of the tree. */
2163
2164 if (src != var && dest != var)
2165 break;
2166
2167 /* We will need to rerecognize this insn. */
2168 INSN_CODE (insn) = -1;
2169
2170 #ifdef HAVE_insv
2171 if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var)
2172 {
2173 /* Since this case will return, ensure we fixup all the
2174 operands here. */
2175 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2176 insn, replacements);
2177 fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2178 insn, replacements);
2179 fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2180 insn, replacements);
2181
2182 tem = XEXP (outerdest, 0);
2183
2184 /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2185 that may appear inside a ZERO_EXTRACT.
2186 This was legitimate when the MEM was a REG. */
2187 if (GET_CODE (tem) == SUBREG
2188 && SUBREG_REG (tem) == var)
2189 tem = fixup_memory_subreg (tem, insn, 0);
2190 else
2191 tem = fixup_stack_1 (tem, insn);
2192
2193 if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2194 && GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2195 && ! mode_dependent_address_p (XEXP (tem, 0))
2196 && ! MEM_VOLATILE_P (tem))
2197 {
2198 enum machine_mode wanted_mode;
2199 enum machine_mode is_mode = GET_MODE (tem);
2200 HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2201
2202 wanted_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
2203 if (wanted_mode == VOIDmode)
2204 wanted_mode = word_mode;
2205
2206 /* If we have a narrower mode, we can do something. */
2207 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2208 {
2209 HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2210 rtx old_pos = XEXP (outerdest, 2);
2211 rtx newmem;
2212
2213 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2214 offset = (GET_MODE_SIZE (is_mode)
2215 - GET_MODE_SIZE (wanted_mode) - offset);
2216
2217 pos %= GET_MODE_BITSIZE (wanted_mode);
2218
2219 newmem = gen_rtx_MEM (wanted_mode,
2220 plus_constant (XEXP (tem, 0), offset));
2221 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
2222 MEM_COPY_ATTRIBUTES (newmem, tem);
2223
2224 /* Make the change and see if the insn remains valid. */
2225 INSN_CODE (insn) = -1;
2226 XEXP (outerdest, 0) = newmem;
2227 XEXP (outerdest, 2) = GEN_INT (pos);
2228
2229 if (recog_memoized (insn) >= 0)
2230 return;
2231
2232 /* Otherwise, restore old position. XEXP (x, 0) will be
2233 restored later. */
2234 XEXP (outerdest, 2) = old_pos;
2235 }
2236 }
2237
2238 /* If we get here, the bit-field store doesn't allow memory
2239 or isn't located at a constant position. Load the value into
2240 a register, do the store, and put it back into memory. */
2241
2242 tem1 = gen_reg_rtx (GET_MODE (tem));
2243 emit_insn_before (gen_move_insn (tem1, tem), insn);
2244 emit_insn_after (gen_move_insn (tem, tem1), insn);
2245 XEXP (outerdest, 0) = tem1;
2246 return;
2247 }
2248 #endif
2249
2250 /* STRICT_LOW_PART is a no-op on memory references
2251 and it can cause combinations to be unrecognizable,
2252 so eliminate it. */
2253
2254 if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2255 SET_DEST (x) = XEXP (SET_DEST (x), 0);
2256
2257 /* A valid insn to copy VAR into or out of a register
2258 must be left alone, to avoid an infinite loop here.
2259 If the reference to VAR is by a subreg, fix that up,
2260 since SUBREG is not valid for a memref.
2261 Also fix up the address of the stack slot.
2262
2263 Note that we must not try to recognize the insn until
2264 after we know that we have valid addresses and no
2265 (subreg (mem ...) ...) constructs, since these interfere
2266 with determining the validity of the insn. */
2267
2268 if ((SET_SRC (x) == var
2269 || (GET_CODE (SET_SRC (x)) == SUBREG
2270 && SUBREG_REG (SET_SRC (x)) == var))
2271 && (GET_CODE (SET_DEST (x)) == REG
2272 || (GET_CODE (SET_DEST (x)) == SUBREG
2273 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2274 && GET_MODE (var) == promoted_mode
2275 && x == single_set (insn))
2276 {
2277 rtx pat;
2278
2279 replacement = find_fixup_replacement (replacements, SET_SRC (x));
2280 if (replacement->new)
2281 SET_SRC (x) = replacement->new;
2282 else if (GET_CODE (SET_SRC (x)) == SUBREG)
2283 SET_SRC (x) = replacement->new
2284 = fixup_memory_subreg (SET_SRC (x), insn, 0);
2285 else
2286 SET_SRC (x) = replacement->new
2287 = fixup_stack_1 (SET_SRC (x), insn);
2288
2289 if (recog_memoized (insn) >= 0)
2290 return;
2291
2292 /* INSN is not valid, but we know that we want to
2293 copy SET_SRC (x) to SET_DEST (x) in some way. So
2294 we generate the move and see whether it requires more
2295 than one insn. If it does, we emit those insns and
2296 delete INSN. Otherwise, we an just replace the pattern
2297 of INSN; we have already verified above that INSN has
2298 no other function that to do X. */
2299
2300 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2301 if (GET_CODE (pat) == SEQUENCE)
2302 {
2303 emit_insn_after (pat, insn);
2304 PUT_CODE (insn, NOTE);
2305 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2306 NOTE_SOURCE_FILE (insn) = 0;
2307 }
2308 else
2309 PATTERN (insn) = pat;
2310
2311 return;
2312 }
2313
2314 if ((SET_DEST (x) == var
2315 || (GET_CODE (SET_DEST (x)) == SUBREG
2316 && SUBREG_REG (SET_DEST (x)) == var))
2317 && (GET_CODE (SET_SRC (x)) == REG
2318 || (GET_CODE (SET_SRC (x)) == SUBREG
2319 && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2320 && GET_MODE (var) == promoted_mode
2321 && x == single_set (insn))
2322 {
2323 rtx pat;
2324
2325 if (GET_CODE (SET_DEST (x)) == SUBREG)
2326 SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn, 0);
2327 else
2328 SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2329
2330 if (recog_memoized (insn) >= 0)
2331 return;
2332
2333 pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2334 if (GET_CODE (pat) == SEQUENCE)
2335 {
2336 emit_insn_after (pat, insn);
2337 PUT_CODE (insn, NOTE);
2338 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2339 NOTE_SOURCE_FILE (insn) = 0;
2340 }
2341 else
2342 PATTERN (insn) = pat;
2343
2344 return;
2345 }
2346
2347 /* Otherwise, storing into VAR must be handled specially
2348 by storing into a temporary and copying that into VAR
2349 with a new insn after this one. Note that this case
2350 will be used when storing into a promoted scalar since
2351 the insn will now have different modes on the input
2352 and output and hence will be invalid (except for the case
2353 of setting it to a constant, which does not need any
2354 change if it is valid). We generate extra code in that case,
2355 but combine.c will eliminate it. */
2356
2357 if (dest == var)
2358 {
2359 rtx temp;
2360 rtx fixeddest = SET_DEST (x);
2361
2362 /* STRICT_LOW_PART can be discarded, around a MEM. */
2363 if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2364 fixeddest = XEXP (fixeddest, 0);
2365 /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */
2366 if (GET_CODE (fixeddest) == SUBREG)
2367 {
2368 fixeddest = fixup_memory_subreg (fixeddest, insn, 0);
2369 promoted_mode = GET_MODE (fixeddest);
2370 }
2371 else
2372 fixeddest = fixup_stack_1 (fixeddest, insn);
2373
2374 temp = gen_reg_rtx (promoted_mode);
2375
2376 emit_insn_after (gen_move_insn (fixeddest,
2377 gen_lowpart (GET_MODE (fixeddest),
2378 temp)),
2379 insn);
2380
2381 SET_DEST (x) = temp;
2382 }
2383 }
2384
2385 default:
2386 break;
2387 }
2388
2389 /* Nothing special about this RTX; fix its operands. */
2390
2391 fmt = GET_RTX_FORMAT (code);
2392 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2393 {
2394 if (fmt[i] == 'e')
2395 fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements);
2396 if (fmt[i] == 'E')
2397 {
2398 register int j;
2399 for (j = 0; j < XVECLEN (x, i); j++)
2400 fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2401 insn, replacements);
2402 }
2403 }
2404 }
2405 \f
2406 /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)),
2407 return an rtx (MEM:m1 newaddr) which is equivalent.
2408 If any insns must be emitted to compute NEWADDR, put them before INSN.
2409
2410 UNCRITICAL nonzero means accept paradoxical subregs.
2411 This is used for subregs found inside REG_NOTES. */
2412
2413 static rtx
2414 fixup_memory_subreg (x, insn, uncritical)
2415 rtx x;
2416 rtx insn;
2417 int uncritical;
2418 {
2419 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2420 rtx addr = XEXP (SUBREG_REG (x), 0);
2421 enum machine_mode mode = GET_MODE (x);
2422 rtx result;
2423
2424 /* Paradoxical SUBREGs are usually invalid during RTL generation. */
2425 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2426 && ! uncritical)
2427 abort ();
2428
2429 if (BYTES_BIG_ENDIAN)
2430 offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2431 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2432 addr = plus_constant (addr, offset);
2433 if (!flag_force_addr && memory_address_p (mode, addr))
2434 /* Shortcut if no insns need be emitted. */
2435 return change_address (SUBREG_REG (x), mode, addr);
2436 start_sequence ();
2437 result = change_address (SUBREG_REG (x), mode, addr);
2438 emit_insn_before (gen_sequence (), insn);
2439 end_sequence ();
2440 return result;
2441 }
2442
2443 /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2444 Replace subexpressions of X in place.
2445 If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2446 Otherwise return X, with its contents possibly altered.
2447
2448 If any insns must be emitted to compute NEWADDR, put them before INSN.
2449
2450 UNCRITICAL is as in fixup_memory_subreg. */
2451
2452 static rtx
2453 walk_fixup_memory_subreg (x, insn, uncritical)
2454 register rtx x;
2455 rtx insn;
2456 int uncritical;
2457 {
2458 register enum rtx_code code;
2459 register char *fmt;
2460 register int i;
2461
2462 if (x == 0)
2463 return 0;
2464
2465 code = GET_CODE (x);
2466
2467 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2468 return fixup_memory_subreg (x, insn, uncritical);
2469
2470 /* Nothing special about this RTX; fix its operands. */
2471
2472 fmt = GET_RTX_FORMAT (code);
2473 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2474 {
2475 if (fmt[i] == 'e')
2476 XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn, uncritical);
2477 if (fmt[i] == 'E')
2478 {
2479 register int j;
2480 for (j = 0; j < XVECLEN (x, i); j++)
2481 XVECEXP (x, i, j)
2482 = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn, uncritical);
2483 }
2484 }
2485 return x;
2486 }
2487 \f
2488 /* For each memory ref within X, if it refers to a stack slot
2489 with an out of range displacement, put the address in a temp register
2490 (emitting new insns before INSN to load these registers)
2491 and alter the memory ref to use that register.
2492 Replace each such MEM rtx with a copy, to avoid clobberage. */
2493
2494 static rtx
2495 fixup_stack_1 (x, insn)
2496 rtx x;
2497 rtx insn;
2498 {
2499 register int i;
2500 register RTX_CODE code = GET_CODE (x);
2501 register char *fmt;
2502
2503 if (code == MEM)
2504 {
2505 register rtx ad = XEXP (x, 0);
2506 /* If we have address of a stack slot but it's not valid
2507 (displacement is too large), compute the sum in a register. */
2508 if (GET_CODE (ad) == PLUS
2509 && GET_CODE (XEXP (ad, 0)) == REG
2510 && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2511 && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2512 || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2513 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2514 || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2515 #endif
2516 || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2517 || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2518 || XEXP (ad, 0) == current_function_internal_arg_pointer)
2519 && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2520 {
2521 rtx temp, seq;
2522 if (memory_address_p (GET_MODE (x), ad))
2523 return x;
2524
2525 start_sequence ();
2526 temp = copy_to_reg (ad);
2527 seq = gen_sequence ();
2528 end_sequence ();
2529 emit_insn_before (seq, insn);
2530 return change_address (x, VOIDmode, temp);
2531 }
2532 return x;
2533 }
2534
2535 fmt = GET_RTX_FORMAT (code);
2536 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2537 {
2538 if (fmt[i] == 'e')
2539 XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2540 if (fmt[i] == 'E')
2541 {
2542 register int j;
2543 for (j = 0; j < XVECLEN (x, i); j++)
2544 XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2545 }
2546 }
2547 return x;
2548 }
2549 \f
2550 /* Optimization: a bit-field instruction whose field
2551 happens to be a byte or halfword in memory
2552 can be changed to a move instruction.
2553
2554 We call here when INSN is an insn to examine or store into a bit-field.
2555 BODY is the SET-rtx to be altered.
2556
2557 EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2558 (Currently this is called only from function.c, and EQUIV_MEM
2559 is always 0.) */
2560
2561 static void
2562 optimize_bit_field (body, insn, equiv_mem)
2563 rtx body;
2564 rtx insn;
2565 rtx *equiv_mem;
2566 {
2567 register rtx bitfield;
2568 int destflag;
2569 rtx seq = 0;
2570 enum machine_mode mode;
2571
2572 if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2573 || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2574 bitfield = SET_DEST (body), destflag = 1;
2575 else
2576 bitfield = SET_SRC (body), destflag = 0;
2577
2578 /* First check that the field being stored has constant size and position
2579 and is in fact a byte or halfword suitably aligned. */
2580
2581 if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2582 && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2583 && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2584 != BLKmode)
2585 && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2586 {
2587 register rtx memref = 0;
2588
2589 /* Now check that the containing word is memory, not a register,
2590 and that it is safe to change the machine mode. */
2591
2592 if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2593 memref = XEXP (bitfield, 0);
2594 else if (GET_CODE (XEXP (bitfield, 0)) == REG
2595 && equiv_mem != 0)
2596 memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2597 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2598 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2599 memref = SUBREG_REG (XEXP (bitfield, 0));
2600 else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2601 && equiv_mem != 0
2602 && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2603 memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2604
2605 if (memref
2606 && ! mode_dependent_address_p (XEXP (memref, 0))
2607 && ! MEM_VOLATILE_P (memref))
2608 {
2609 /* Now adjust the address, first for any subreg'ing
2610 that we are now getting rid of,
2611 and then for which byte of the word is wanted. */
2612
2613 HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2614 rtx insns;
2615
2616 /* Adjust OFFSET to count bits from low-address byte. */
2617 if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2618 offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2619 - offset - INTVAL (XEXP (bitfield, 1)));
2620
2621 /* Adjust OFFSET to count bytes from low-address byte. */
2622 offset /= BITS_PER_UNIT;
2623 if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2624 {
2625 offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD;
2626 if (BYTES_BIG_ENDIAN)
2627 offset -= (MIN (UNITS_PER_WORD,
2628 GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2629 - MIN (UNITS_PER_WORD,
2630 GET_MODE_SIZE (GET_MODE (memref))));
2631 }
2632
2633 start_sequence ();
2634 memref = change_address (memref, mode,
2635 plus_constant (XEXP (memref, 0), offset));
2636 insns = get_insns ();
2637 end_sequence ();
2638 emit_insns_before (insns, insn);
2639
2640 /* Store this memory reference where
2641 we found the bit field reference. */
2642
2643 if (destflag)
2644 {
2645 validate_change (insn, &SET_DEST (body), memref, 1);
2646 if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2647 {
2648 rtx src = SET_SRC (body);
2649 while (GET_CODE (src) == SUBREG
2650 && SUBREG_WORD (src) == 0)
2651 src = SUBREG_REG (src);
2652 if (GET_MODE (src) != GET_MODE (memref))
2653 src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2654 validate_change (insn, &SET_SRC (body), src, 1);
2655 }
2656 else if (GET_MODE (SET_SRC (body)) != VOIDmode
2657 && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2658 /* This shouldn't happen because anything that didn't have
2659 one of these modes should have got converted explicitly
2660 and then referenced through a subreg.
2661 This is so because the original bit-field was
2662 handled by agg_mode and so its tree structure had
2663 the same mode that memref now has. */
2664 abort ();
2665 }
2666 else
2667 {
2668 rtx dest = SET_DEST (body);
2669
2670 while (GET_CODE (dest) == SUBREG
2671 && SUBREG_WORD (dest) == 0
2672 && (GET_MODE_CLASS (GET_MODE (dest))
2673 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest)))))
2674 dest = SUBREG_REG (dest);
2675
2676 validate_change (insn, &SET_DEST (body), dest, 1);
2677
2678 if (GET_MODE (dest) == GET_MODE (memref))
2679 validate_change (insn, &SET_SRC (body), memref, 1);
2680 else
2681 {
2682 /* Convert the mem ref to the destination mode. */
2683 rtx newreg = gen_reg_rtx (GET_MODE (dest));
2684
2685 start_sequence ();
2686 convert_move (newreg, memref,
2687 GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2688 seq = get_insns ();
2689 end_sequence ();
2690
2691 validate_change (insn, &SET_SRC (body), newreg, 1);
2692 }
2693 }
2694
2695 /* See if we can convert this extraction or insertion into
2696 a simple move insn. We might not be able to do so if this
2697 was, for example, part of a PARALLEL.
2698
2699 If we succeed, write out any needed conversions. If we fail,
2700 it is hard to guess why we failed, so don't do anything
2701 special; just let the optimization be suppressed. */
2702
2703 if (apply_change_group () && seq)
2704 emit_insns_before (seq, insn);
2705 }
2706 }
2707 }
2708 \f
2709 /* These routines are responsible for converting virtual register references
2710 to the actual hard register references once RTL generation is complete.
2711
2712 The following four variables are used for communication between the
2713 routines. They contain the offsets of the virtual registers from their
2714 respective hard registers. */
2715
2716 static int in_arg_offset;
2717 static int var_offset;
2718 static int dynamic_offset;
2719 static int out_arg_offset;
2720 static int cfa_offset;
2721
2722 /* In most machines, the stack pointer register is equivalent to the bottom
2723 of the stack. */
2724
2725 #ifndef STACK_POINTER_OFFSET
2726 #define STACK_POINTER_OFFSET 0
2727 #endif
2728
2729 /* If not defined, pick an appropriate default for the offset of dynamically
2730 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2731 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
2732
2733 #ifndef STACK_DYNAMIC_OFFSET
2734
2735 #ifdef ACCUMULATE_OUTGOING_ARGS
2736 /* The bottom of the stack points to the actual arguments. If
2737 REG_PARM_STACK_SPACE is defined, this includes the space for the register
2738 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
2739 stack space for register parameters is not pushed by the caller, but
2740 rather part of the fixed stack areas and hence not included in
2741 `current_function_outgoing_args_size'. Nevertheless, we must allow
2742 for it when allocating stack dynamic objects. */
2743
2744 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2745 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2746 (current_function_outgoing_args_size \
2747 + REG_PARM_STACK_SPACE (FNDECL) + (STACK_POINTER_OFFSET))
2748
2749 #else
2750 #define STACK_DYNAMIC_OFFSET(FNDECL) \
2751 (current_function_outgoing_args_size + (STACK_POINTER_OFFSET))
2752 #endif
2753
2754 #else
2755 #define STACK_DYNAMIC_OFFSET(FNDECL) STACK_POINTER_OFFSET
2756 #endif
2757 #endif
2758
2759 /* On a few machines, the CFA coincides with the arg pointer. */
2760
2761 #ifndef ARG_POINTER_CFA_OFFSET
2762 #define ARG_POINTER_CFA_OFFSET 0
2763 #endif
2764
2765
2766 /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just had
2767 its address taken. DECL is the decl for the object stored in the
2768 register, for later use if we do need to force REG into the stack.
2769 REG is overwritten by the MEM like in put_reg_into_stack. */
2770
2771 rtx
2772 gen_mem_addressof (reg, decl)
2773 rtx reg;
2774 tree decl;
2775 {
2776 tree type = TREE_TYPE (decl);
2777 rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)), REGNO (reg));
2778 SET_ADDRESSOF_DECL (r, decl);
2779 /* If the original REG was a user-variable, then so is the REG whose
2780 address is being taken. */
2781 REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2782
2783 XEXP (reg, 0) = r;
2784 PUT_CODE (reg, MEM);
2785 PUT_MODE (reg, DECL_MODE (decl));
2786 MEM_VOLATILE_P (reg) = TREE_SIDE_EFFECTS (decl);
2787 MEM_SET_IN_STRUCT_P (reg, AGGREGATE_TYPE_P (type));
2788 MEM_ALIAS_SET (reg) = get_alias_set (decl);
2789
2790 if (TREE_USED (decl) || DECL_INITIAL (decl) != 0)
2791 fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type));
2792
2793 return reg;
2794 }
2795
2796 /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack. */
2797
2798 void
2799 flush_addressof (decl)
2800 tree decl;
2801 {
2802 if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
2803 && DECL_RTL (decl) != 0
2804 && GET_CODE (DECL_RTL (decl)) == MEM
2805 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
2806 && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
2807 put_addressof_into_stack (XEXP (DECL_RTL (decl), 0));
2808 }
2809
2810 /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack. */
2811
2812 static void
2813 put_addressof_into_stack (r)
2814 rtx r;
2815 {
2816 tree decl = ADDRESSOF_DECL (r);
2817 rtx reg = XEXP (r, 0);
2818
2819 if (GET_CODE (reg) != REG)
2820 abort ();
2821
2822 put_reg_into_stack (0, reg, TREE_TYPE (decl), GET_MODE (reg),
2823 DECL_MODE (decl), TREE_SIDE_EFFECTS (decl),
2824 ADDRESSOF_REGNO (r),
2825 TREE_USED (decl) || DECL_INITIAL (decl) != 0);
2826 }
2827
2828 /* List of replacements made below in purge_addressof_1 when creating
2829 bitfield insertions. */
2830 static rtx purge_addressof_replacements;
2831
2832 /* Helper function for purge_addressof. See if the rtx expression at *LOC
2833 in INSN needs to be changed. If FORCE, always put any ADDRESSOFs into
2834 the stack. */
2835
2836 static void
2837 purge_addressof_1 (loc, insn, force, store)
2838 rtx *loc;
2839 rtx insn;
2840 int force, store;
2841 {
2842 rtx x;
2843 RTX_CODE code;
2844 int i, j;
2845 char *fmt;
2846
2847 /* Re-start here to avoid recursion in common cases. */
2848 restart:
2849
2850 x = *loc;
2851 if (x == 0)
2852 return;
2853
2854 code = GET_CODE (x);
2855
2856 if (code == ADDRESSOF && GET_CODE (XEXP (x, 0)) == MEM)
2857 {
2858 rtx insns;
2859 /* We must create a copy of the rtx because it was created by
2860 overwriting a REG rtx which is always shared. */
2861 rtx sub = copy_rtx (XEXP (XEXP (x, 0), 0));
2862
2863 if (validate_change (insn, loc, sub, 0))
2864 return;
2865
2866 start_sequence ();
2867 if (! validate_change (insn, loc,
2868 force_operand (sub, NULL_RTX),
2869 0))
2870 abort ();
2871
2872 insns = gen_sequence ();
2873 end_sequence ();
2874 emit_insn_before (insns, insn);
2875 return;
2876 }
2877 else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
2878 {
2879 rtx sub = XEXP (XEXP (x, 0), 0);
2880
2881 if (GET_CODE (sub) == MEM)
2882 sub = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
2883
2884 if (GET_CODE (sub) == REG
2885 && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
2886 {
2887 put_addressof_into_stack (XEXP (x, 0));
2888 return;
2889 }
2890 else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
2891 {
2892 int size_x, size_sub;
2893
2894 if (!insn)
2895 {
2896 /* When processing REG_NOTES look at the list of
2897 replacements done on the insn to find the register that X
2898 was replaced by. */
2899 rtx tem;
2900
2901 for (tem = purge_addressof_replacements; tem != NULL_RTX;
2902 tem = XEXP (XEXP (tem, 1), 1))
2903 {
2904 rtx y = XEXP (tem, 0);
2905 if (GET_CODE (y) == MEM
2906 && rtx_equal_p (XEXP (x, 0), XEXP (y, 0)))
2907 {
2908 /* It can happen that the note may speak of things in
2909 a wider (or just different) mode than the code did.
2910 This is especially true of REG_RETVAL. */
2911
2912 rtx z = XEXP (XEXP (tem, 1), 0);
2913 if (GET_MODE (x) != GET_MODE (y))
2914 {
2915 if (GET_CODE (z) == SUBREG && SUBREG_WORD (z) == 0)
2916 z = SUBREG_REG (z);
2917
2918 /* ??? If we'd gotten into any of the really complex
2919 cases below, I'm not sure we can do a proper
2920 replacement. Might we be able to delete the
2921 note in some cases? */
2922 if (GET_MODE_SIZE (GET_MODE (x))
2923 < GET_MODE_SIZE (GET_MODE (y)))
2924 abort ();
2925
2926 z = gen_lowpart (GET_MODE (x), z);
2927 }
2928
2929 *loc = z;
2930 return;
2931 }
2932 }
2933
2934 /* There should always be such a replacement. */
2935 abort ();
2936 }
2937
2938 size_x = GET_MODE_BITSIZE (GET_MODE (x));
2939 size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
2940
2941 /* Don't even consider working with paradoxical subregs,
2942 or the moral equivalent seen here. */
2943 if (size_x <= size_sub
2944 && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
2945 {
2946 /* Do a bitfield insertion to mirror what would happen
2947 in memory. */
2948
2949 rtx val, seq;
2950
2951 if (store)
2952 {
2953 rtx p;
2954
2955 start_sequence ();
2956 val = gen_reg_rtx (GET_MODE (x));
2957 if (! validate_change (insn, loc, val, 0))
2958 {
2959 /* Discard the current sequence and put the
2960 ADDRESSOF on stack. */
2961 end_sequence ();
2962 goto give_up;
2963 }
2964 seq = gen_sequence ();
2965 end_sequence ();
2966 emit_insn_before (seq, insn);
2967
2968 start_sequence ();
2969 store_bit_field (sub, size_x, 0, GET_MODE (x),
2970 val, GET_MODE_SIZE (GET_MODE (sub)),
2971 GET_MODE_SIZE (GET_MODE (sub)));
2972
2973 /* Make sure to unshare any shared rtl that store_bit_field
2974 might have created. */
2975 for (p = get_insns(); p; p = NEXT_INSN (p))
2976 {
2977 reset_used_flags (PATTERN (p));
2978 reset_used_flags (REG_NOTES (p));
2979 reset_used_flags (LOG_LINKS (p));
2980 }
2981 unshare_all_rtl (get_insns ());
2982
2983 seq = gen_sequence ();
2984 end_sequence ();
2985 emit_insn_after (seq, insn);
2986 }
2987 else
2988 {
2989 start_sequence ();
2990 val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
2991 GET_MODE (x), GET_MODE (x),
2992 GET_MODE_SIZE (GET_MODE (sub)),
2993 GET_MODE_SIZE (GET_MODE (sub)));
2994
2995 if (! validate_change (insn, loc, val, 0))
2996 {
2997 /* Discard the current sequence and put the
2998 ADDRESSOF on stack. */
2999 end_sequence ();
3000 goto give_up;
3001 }
3002
3003 seq = gen_sequence ();
3004 end_sequence ();
3005 emit_insn_before (seq, insn);
3006 }
3007
3008 /* Remember the replacement so that the same one can be done
3009 on the REG_NOTES. */
3010 purge_addressof_replacements
3011 = gen_rtx_EXPR_LIST (VOIDmode, x,
3012 gen_rtx_EXPR_LIST (VOIDmode, val,
3013 purge_addressof_replacements));
3014
3015 /* We replaced with a reg -- all done. */
3016 return;
3017 }
3018 }
3019 else if (validate_change (insn, loc, sub, 0))
3020 {
3021 /* Remember the replacement so that the same one can be done
3022 on the REG_NOTES. */
3023 purge_addressof_replacements
3024 = gen_rtx_EXPR_LIST (VOIDmode, x,
3025 gen_rtx_EXPR_LIST (VOIDmode, sub,
3026 purge_addressof_replacements));
3027 goto restart;
3028 }
3029 give_up:;
3030 /* else give up and put it into the stack */
3031 }
3032 else if (code == ADDRESSOF)
3033 {
3034 put_addressof_into_stack (x);
3035 return;
3036 }
3037 else if (code == SET)
3038 {
3039 purge_addressof_1 (&SET_DEST (x), insn, force, 1);
3040 purge_addressof_1 (&SET_SRC (x), insn, force, 0);
3041 return;
3042 }
3043
3044 /* Scan all subexpressions. */
3045 fmt = GET_RTX_FORMAT (code);
3046 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3047 {
3048 if (*fmt == 'e')
3049 purge_addressof_1 (&XEXP (x, i), insn, force, 0);
3050 else if (*fmt == 'E')
3051 for (j = 0; j < XVECLEN (x, i); j++)
3052 purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0);
3053 }
3054 }
3055
3056 /* Eliminate all occurrences of ADDRESSOF from INSNS. Elide any remaining
3057 (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3058 stack. */
3059
3060 void
3061 purge_addressof (insns)
3062 rtx insns;
3063 {
3064 rtx insn;
3065 for (insn = insns; insn; insn = NEXT_INSN (insn))
3066 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3067 || GET_CODE (insn) == CALL_INSN)
3068 {
3069 purge_addressof_1 (&PATTERN (insn), insn,
3070 asm_noperands (PATTERN (insn)) > 0, 0);
3071 purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0);
3072 }
3073 purge_addressof_replacements = 0;
3074 }
3075 \f
3076 /* Pass through the INSNS of function FNDECL and convert virtual register
3077 references to hard register references. */
3078
3079 void
3080 instantiate_virtual_regs (fndecl, insns)
3081 tree fndecl;
3082 rtx insns;
3083 {
3084 rtx insn;
3085 int i;
3086
3087 /* Compute the offsets to use for this function. */
3088 in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3089 var_offset = STARTING_FRAME_OFFSET;
3090 dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3091 out_arg_offset = STACK_POINTER_OFFSET;
3092 cfa_offset = ARG_POINTER_CFA_OFFSET;
3093
3094 /* Scan all variables and parameters of this function. For each that is
3095 in memory, instantiate all virtual registers if the result is a valid
3096 address. If not, we do it later. That will handle most uses of virtual
3097 regs on many machines. */
3098 instantiate_decls (fndecl, 1);
3099
3100 /* Initialize recognition, indicating that volatile is OK. */
3101 init_recog ();
3102
3103 /* Scan through all the insns, instantiating every virtual register still
3104 present. */
3105 for (insn = insns; insn; insn = NEXT_INSN (insn))
3106 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3107 || GET_CODE (insn) == CALL_INSN)
3108 {
3109 instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3110 instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3111 }
3112
3113 /* Instantiate the stack slots for the parm registers, for later use in
3114 addressof elimination. */
3115 for (i = 0; i < max_parm_reg; ++i)
3116 if (parm_reg_stack_loc[i])
3117 instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3118
3119 /* Now instantiate the remaining register equivalences for debugging info.
3120 These will not be valid addresses. */
3121 instantiate_decls (fndecl, 0);
3122
3123 /* Indicate that, from now on, assign_stack_local should use
3124 frame_pointer_rtx. */
3125 virtuals_instantiated = 1;
3126 }
3127
3128 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3129 all virtual registers in their DECL_RTL's.
3130
3131 If VALID_ONLY, do this only if the resulting address is still valid.
3132 Otherwise, always do it. */
3133
3134 static void
3135 instantiate_decls (fndecl, valid_only)
3136 tree fndecl;
3137 int valid_only;
3138 {
3139 tree decl;
3140
3141 if (DECL_SAVED_INSNS (fndecl))
3142 /* When compiling an inline function, the obstack used for
3143 rtl allocation is the maybepermanent_obstack. Calling
3144 `resume_temporary_allocation' switches us back to that
3145 obstack while we process this function's parameters. */
3146 resume_temporary_allocation ();
3147
3148 /* Process all parameters of the function. */
3149 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3150 {
3151 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3152
3153 instantiate_decl (DECL_RTL (decl), size, valid_only);
3154
3155 /* If the parameter was promoted, then the incoming RTL mode may be
3156 larger than the declared type size. We must use the larger of
3157 the two sizes. */
3158 size = MAX (GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl))), size);
3159 instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3160 }
3161
3162 /* Now process all variables defined in the function or its subblocks. */
3163 instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3164
3165 if (DECL_INLINE (fndecl) || DECL_DEFER_OUTPUT (fndecl))
3166 {
3167 /* Save all rtl allocated for this function by raising the
3168 high-water mark on the maybepermanent_obstack. */
3169 preserve_data ();
3170 /* All further rtl allocation is now done in the current_obstack. */
3171 rtl_in_current_obstack ();
3172 }
3173 }
3174
3175 /* Subroutine of instantiate_decls: Process all decls in the given
3176 BLOCK node and all its subblocks. */
3177
3178 static void
3179 instantiate_decls_1 (let, valid_only)
3180 tree let;
3181 int valid_only;
3182 {
3183 tree t;
3184
3185 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3186 instantiate_decl (DECL_RTL (t), int_size_in_bytes (TREE_TYPE (t)),
3187 valid_only);
3188
3189 /* Process all subblocks. */
3190 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3191 instantiate_decls_1 (t, valid_only);
3192 }
3193
3194 /* Subroutine of the preceding procedures: Given RTL representing a
3195 decl and the size of the object, do any instantiation required.
3196
3197 If VALID_ONLY is non-zero, it means that the RTL should only be
3198 changed if the new address is valid. */
3199
3200 static void
3201 instantiate_decl (x, size, valid_only)
3202 rtx x;
3203 int size;
3204 int valid_only;
3205 {
3206 enum machine_mode mode;
3207 rtx addr;
3208
3209 /* If this is not a MEM, no need to do anything. Similarly if the
3210 address is a constant or a register that is not a virtual register. */
3211
3212 if (x == 0 || GET_CODE (x) != MEM)
3213 return;
3214
3215 addr = XEXP (x, 0);
3216 if (CONSTANT_P (addr)
3217 || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3218 || (GET_CODE (addr) == REG
3219 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3220 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3221 return;
3222
3223 /* If we should only do this if the address is valid, copy the address.
3224 We need to do this so we can undo any changes that might make the
3225 address invalid. This copy is unfortunate, but probably can't be
3226 avoided. */
3227
3228 if (valid_only)
3229 addr = copy_rtx (addr);
3230
3231 instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3232
3233 if (valid_only)
3234 {
3235 /* Now verify that the resulting address is valid for every integer or
3236 floating-point mode up to and including SIZE bytes long. We do this
3237 since the object might be accessed in any mode and frame addresses
3238 are shared. */
3239
3240 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3241 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3242 mode = GET_MODE_WIDER_MODE (mode))
3243 if (! memory_address_p (mode, addr))
3244 return;
3245
3246 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3247 mode != VOIDmode && GET_MODE_SIZE (mode) <= size;
3248 mode = GET_MODE_WIDER_MODE (mode))
3249 if (! memory_address_p (mode, addr))
3250 return;
3251 }
3252
3253 /* Put back the address now that we have updated it and we either know
3254 it is valid or we don't care whether it is valid. */
3255
3256 XEXP (x, 0) = addr;
3257 }
3258 \f
3259 /* Given a pointer to a piece of rtx and an optional pointer to the
3260 containing object, instantiate any virtual registers present in it.
3261
3262 If EXTRA_INSNS, we always do the replacement and generate
3263 any extra insns before OBJECT. If it zero, we do nothing if replacement
3264 is not valid.
3265
3266 Return 1 if we either had nothing to do or if we were able to do the
3267 needed replacement. Return 0 otherwise; we only return zero if
3268 EXTRA_INSNS is zero.
3269
3270 We first try some simple transformations to avoid the creation of extra
3271 pseudos. */
3272
3273 static int
3274 instantiate_virtual_regs_1 (loc, object, extra_insns)
3275 rtx *loc;
3276 rtx object;
3277 int extra_insns;
3278 {
3279 rtx x;
3280 RTX_CODE code;
3281 rtx new = 0;
3282 HOST_WIDE_INT offset;
3283 rtx temp;
3284 rtx seq;
3285 int i, j;
3286 char *fmt;
3287
3288 /* Re-start here to avoid recursion in common cases. */
3289 restart:
3290
3291 x = *loc;
3292 if (x == 0)
3293 return 1;
3294
3295 code = GET_CODE (x);
3296
3297 /* Check for some special cases. */
3298 switch (code)
3299 {
3300 case CONST_INT:
3301 case CONST_DOUBLE:
3302 case CONST:
3303 case SYMBOL_REF:
3304 case CODE_LABEL:
3305 case PC:
3306 case CC0:
3307 case ASM_INPUT:
3308 case ADDR_VEC:
3309 case ADDR_DIFF_VEC:
3310 case RETURN:
3311 return 1;
3312
3313 case SET:
3314 /* We are allowed to set the virtual registers. This means that
3315 the actual register should receive the source minus the
3316 appropriate offset. This is used, for example, in the handling
3317 of non-local gotos. */
3318 if (SET_DEST (x) == virtual_incoming_args_rtx)
3319 new = arg_pointer_rtx, offset = - in_arg_offset;
3320 else if (SET_DEST (x) == virtual_stack_vars_rtx)
3321 new = frame_pointer_rtx, offset = - var_offset;
3322 else if (SET_DEST (x) == virtual_stack_dynamic_rtx)
3323 new = stack_pointer_rtx, offset = - dynamic_offset;
3324 else if (SET_DEST (x) == virtual_outgoing_args_rtx)
3325 new = stack_pointer_rtx, offset = - out_arg_offset;
3326 else if (SET_DEST (x) == virtual_cfa_rtx)
3327 new = arg_pointer_rtx, offset = - cfa_offset;
3328
3329 if (new)
3330 {
3331 /* The only valid sources here are PLUS or REG. Just do
3332 the simplest possible thing to handle them. */
3333 if (GET_CODE (SET_SRC (x)) != REG
3334 && GET_CODE (SET_SRC (x)) != PLUS)
3335 abort ();
3336
3337 start_sequence ();
3338 if (GET_CODE (SET_SRC (x)) != REG)
3339 temp = force_operand (SET_SRC (x), NULL_RTX);
3340 else
3341 temp = SET_SRC (x);
3342 temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3343 seq = get_insns ();
3344 end_sequence ();
3345
3346 emit_insns_before (seq, object);
3347 SET_DEST (x) = new;
3348
3349 if (! validate_change (object, &SET_SRC (x), temp, 0)
3350 || ! extra_insns)
3351 abort ();
3352
3353 return 1;
3354 }
3355
3356 instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3357 loc = &SET_SRC (x);
3358 goto restart;
3359
3360 case PLUS:
3361 /* Handle special case of virtual register plus constant. */
3362 if (CONSTANT_P (XEXP (x, 1)))
3363 {
3364 rtx old, new_offset;
3365
3366 /* Check for (plus (plus VIRT foo) (const_int)) first. */
3367 if (GET_CODE (XEXP (x, 0)) == PLUS)
3368 {
3369 rtx inner = XEXP (XEXP (x, 0), 0);
3370
3371 if (inner == virtual_incoming_args_rtx)
3372 new = arg_pointer_rtx, offset = in_arg_offset;
3373 else if (inner == virtual_stack_vars_rtx)
3374 new = frame_pointer_rtx, offset = var_offset;
3375 else if (inner == virtual_stack_dynamic_rtx)
3376 new = stack_pointer_rtx, offset = dynamic_offset;
3377 else if (inner == virtual_outgoing_args_rtx)
3378 new = stack_pointer_rtx, offset = out_arg_offset;
3379 else if (inner == virtual_cfa_rtx)
3380 new = arg_pointer_rtx, offset = cfa_offset;
3381 else
3382 {
3383 loc = &XEXP (x, 0);
3384 goto restart;
3385 }
3386
3387 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3388 extra_insns);
3389 new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3390 }
3391
3392 else if (XEXP (x, 0) == virtual_incoming_args_rtx)
3393 new = arg_pointer_rtx, offset = in_arg_offset;
3394 else if (XEXP (x, 0) == virtual_stack_vars_rtx)
3395 new = frame_pointer_rtx, offset = var_offset;
3396 else if (XEXP (x, 0) == virtual_stack_dynamic_rtx)
3397 new = stack_pointer_rtx, offset = dynamic_offset;
3398 else if (XEXP (x, 0) == virtual_outgoing_args_rtx)
3399 new = stack_pointer_rtx, offset = out_arg_offset;
3400 else if (XEXP (x, 0) == virtual_cfa_rtx)
3401 new = arg_pointer_rtx, offset = cfa_offset;
3402 else
3403 {
3404 /* We know the second operand is a constant. Unless the
3405 first operand is a REG (which has been already checked),
3406 it needs to be checked. */
3407 if (GET_CODE (XEXP (x, 0)) != REG)
3408 {
3409 loc = &XEXP (x, 0);
3410 goto restart;
3411 }
3412 return 1;
3413 }
3414
3415 new_offset = plus_constant (XEXP (x, 1), offset);
3416
3417 /* If the new constant is zero, try to replace the sum with just
3418 the register. */
3419 if (new_offset == const0_rtx
3420 && validate_change (object, loc, new, 0))
3421 return 1;
3422
3423 /* Next try to replace the register and new offset.
3424 There are two changes to validate here and we can't assume that
3425 in the case of old offset equals new just changing the register
3426 will yield a valid insn. In the interests of a little efficiency,
3427 however, we only call validate change once (we don't queue up the
3428 changes and then call apply_change_group). */
3429
3430 old = XEXP (x, 0);
3431 if (offset == 0
3432 ? ! validate_change (object, &XEXP (x, 0), new, 0)
3433 : (XEXP (x, 0) = new,
3434 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
3435 {
3436 if (! extra_insns)
3437 {
3438 XEXP (x, 0) = old;
3439 return 0;
3440 }
3441
3442 /* Otherwise copy the new constant into a register and replace
3443 constant with that register. */
3444 temp = gen_reg_rtx (Pmode);
3445 XEXP (x, 0) = new;
3446 if (validate_change (object, &XEXP (x, 1), temp, 0))
3447 emit_insn_before (gen_move_insn (temp, new_offset), object);
3448 else
3449 {
3450 /* If that didn't work, replace this expression with a
3451 register containing the sum. */
3452
3453 XEXP (x, 0) = old;
3454 new = gen_rtx_PLUS (Pmode, new, new_offset);
3455
3456 start_sequence ();
3457 temp = force_operand (new, NULL_RTX);
3458 seq = get_insns ();
3459 end_sequence ();
3460
3461 emit_insns_before (seq, object);
3462 if (! validate_change (object, loc, temp, 0)
3463 && ! validate_replace_rtx (x, temp, object))
3464 abort ();
3465 }
3466 }
3467
3468 return 1;
3469 }
3470
3471 /* Fall through to generic two-operand expression case. */
3472 case EXPR_LIST:
3473 case CALL:
3474 case COMPARE:
3475 case MINUS:
3476 case MULT:
3477 case DIV: case UDIV:
3478 case MOD: case UMOD:
3479 case AND: case IOR: case XOR:
3480 case ROTATERT: case ROTATE:
3481 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3482 case NE: case EQ:
3483 case GE: case GT: case GEU: case GTU:
3484 case LE: case LT: case LEU: case LTU:
3485 if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
3486 instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
3487 loc = &XEXP (x, 0);
3488 goto restart;
3489
3490 case MEM:
3491 /* Most cases of MEM that convert to valid addresses have already been
3492 handled by our scan of decls. The only special handling we
3493 need here is to make a copy of the rtx to ensure it isn't being
3494 shared if we have to change it to a pseudo.
3495
3496 If the rtx is a simple reference to an address via a virtual register,
3497 it can potentially be shared. In such cases, first try to make it
3498 a valid address, which can also be shared. Otherwise, copy it and
3499 proceed normally.
3500
3501 First check for common cases that need no processing. These are
3502 usually due to instantiation already being done on a previous instance
3503 of a shared rtx. */
3504
3505 temp = XEXP (x, 0);
3506 if (CONSTANT_ADDRESS_P (temp)
3507 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3508 || temp == arg_pointer_rtx
3509 #endif
3510 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3511 || temp == hard_frame_pointer_rtx
3512 #endif
3513 || temp == frame_pointer_rtx)
3514 return 1;
3515
3516 if (GET_CODE (temp) == PLUS
3517 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3518 && (XEXP (temp, 0) == frame_pointer_rtx
3519 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3520 || XEXP (temp, 0) == hard_frame_pointer_rtx
3521 #endif
3522 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3523 || XEXP (temp, 0) == arg_pointer_rtx
3524 #endif
3525 ))
3526 return 1;
3527
3528 if (temp == virtual_stack_vars_rtx
3529 || temp == virtual_incoming_args_rtx
3530 || (GET_CODE (temp) == PLUS
3531 && CONSTANT_ADDRESS_P (XEXP (temp, 1))
3532 && (XEXP (temp, 0) == virtual_stack_vars_rtx
3533 || XEXP (temp, 0) == virtual_incoming_args_rtx)))
3534 {
3535 /* This MEM may be shared. If the substitution can be done without
3536 the need to generate new pseudos, we want to do it in place
3537 so all copies of the shared rtx benefit. The call below will
3538 only make substitutions if the resulting address is still
3539 valid.
3540
3541 Note that we cannot pass X as the object in the recursive call
3542 since the insn being processed may not allow all valid
3543 addresses. However, if we were not passed on object, we can
3544 only modify X without copying it if X will have a valid
3545 address.
3546
3547 ??? Also note that this can still lose if OBJECT is an insn that
3548 has less restrictions on an address that some other insn.
3549 In that case, we will modify the shared address. This case
3550 doesn't seem very likely, though. One case where this could
3551 happen is in the case of a USE or CLOBBER reference, but we
3552 take care of that below. */
3553
3554 if (instantiate_virtual_regs_1 (&XEXP (x, 0),
3555 object ? object : x, 0))
3556 return 1;
3557
3558 /* Otherwise make a copy and process that copy. We copy the entire
3559 RTL expression since it might be a PLUS which could also be
3560 shared. */
3561 *loc = x = copy_rtx (x);
3562 }
3563
3564 /* Fall through to generic unary operation case. */
3565 case SUBREG:
3566 case STRICT_LOW_PART:
3567 case NEG: case NOT:
3568 case PRE_DEC: case PRE_INC: case POST_DEC: case POST_INC:
3569 case SIGN_EXTEND: case ZERO_EXTEND:
3570 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3571 case FLOAT: case FIX:
3572 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3573 case ABS:
3574 case SQRT:
3575 case FFS:
3576 /* These case either have just one operand or we know that we need not
3577 check the rest of the operands. */
3578 loc = &XEXP (x, 0);
3579 goto restart;
3580
3581 case USE:
3582 case CLOBBER:
3583 /* If the operand is a MEM, see if the change is a valid MEM. If not,
3584 go ahead and make the invalid one, but do it to a copy. For a REG,
3585 just make the recursive call, since there's no chance of a problem. */
3586
3587 if ((GET_CODE (XEXP (x, 0)) == MEM
3588 && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
3589 0))
3590 || (GET_CODE (XEXP (x, 0)) == REG
3591 && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
3592 return 1;
3593
3594 XEXP (x, 0) = copy_rtx (XEXP (x, 0));
3595 loc = &XEXP (x, 0);
3596 goto restart;
3597
3598 case REG:
3599 /* Try to replace with a PLUS. If that doesn't work, compute the sum
3600 in front of this insn and substitute the temporary. */
3601 if (x == virtual_incoming_args_rtx)
3602 new = arg_pointer_rtx, offset = in_arg_offset;
3603 else if (x == virtual_stack_vars_rtx)
3604 new = frame_pointer_rtx, offset = var_offset;
3605 else if (x == virtual_stack_dynamic_rtx)
3606 new = stack_pointer_rtx, offset = dynamic_offset;
3607 else if (x == virtual_outgoing_args_rtx)
3608 new = stack_pointer_rtx, offset = out_arg_offset;
3609 else if (x == virtual_cfa_rtx)
3610 new = arg_pointer_rtx, offset = cfa_offset;
3611
3612 if (new)
3613 {
3614 temp = plus_constant (new, offset);
3615 if (!validate_change (object, loc, temp, 0))
3616 {
3617 if (! extra_insns)
3618 return 0;
3619
3620 start_sequence ();
3621 temp = force_operand (temp, NULL_RTX);
3622 seq = get_insns ();
3623 end_sequence ();
3624
3625 emit_insns_before (seq, object);
3626 if (! validate_change (object, loc, temp, 0)
3627 && ! validate_replace_rtx (x, temp, object))
3628 abort ();
3629 }
3630 }
3631
3632 return 1;
3633
3634 case ADDRESSOF:
3635 if (GET_CODE (XEXP (x, 0)) == REG)
3636 return 1;
3637
3638 else if (GET_CODE (XEXP (x, 0)) == MEM)
3639 {
3640 /* If we have a (addressof (mem ..)), do any instantiation inside
3641 since we know we'll be making the inside valid when we finally
3642 remove the ADDRESSOF. */
3643 instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
3644 return 1;
3645 }
3646 break;
3647
3648 default:
3649 break;
3650 }
3651
3652 /* Scan all subexpressions. */
3653 fmt = GET_RTX_FORMAT (code);
3654 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3655 if (*fmt == 'e')
3656 {
3657 if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
3658 return 0;
3659 }
3660 else if (*fmt == 'E')
3661 for (j = 0; j < XVECLEN (x, i); j++)
3662 if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
3663 extra_insns))
3664 return 0;
3665
3666 return 1;
3667 }
3668 \f
3669 /* Optimization: assuming this function does not receive nonlocal gotos,
3670 delete the handlers for such, as well as the insns to establish
3671 and disestablish them. */
3672
3673 static void
3674 delete_handlers ()
3675 {
3676 rtx insn;
3677 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3678 {
3679 /* Delete the handler by turning off the flag that would
3680 prevent jump_optimize from deleting it.
3681 Also permit deletion of the nonlocal labels themselves
3682 if nothing local refers to them. */
3683 if (GET_CODE (insn) == CODE_LABEL)
3684 {
3685 tree t, last_t;
3686
3687 LABEL_PRESERVE_P (insn) = 0;
3688
3689 /* Remove it from the nonlocal_label list, to avoid confusing
3690 flow. */
3691 for (t = nonlocal_labels, last_t = 0; t;
3692 last_t = t, t = TREE_CHAIN (t))
3693 if (DECL_RTL (TREE_VALUE (t)) == insn)
3694 break;
3695 if (t)
3696 {
3697 if (! last_t)
3698 nonlocal_labels = TREE_CHAIN (nonlocal_labels);
3699 else
3700 TREE_CHAIN (last_t) = TREE_CHAIN (t);
3701 }
3702 }
3703 if (GET_CODE (insn) == INSN)
3704 {
3705 int can_delete = 0;
3706 rtx t;
3707 for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
3708 if (reg_mentioned_p (t, PATTERN (insn)))
3709 {
3710 can_delete = 1;
3711 break;
3712 }
3713 if (can_delete
3714 || (nonlocal_goto_stack_level != 0
3715 && reg_mentioned_p (nonlocal_goto_stack_level,
3716 PATTERN (insn))))
3717 delete_insn (insn);
3718 }
3719 }
3720 }
3721
3722 /* Return a list (chain of EXPR_LIST nodes) for the nonlocal labels
3723 of the current function. */
3724
3725 rtx
3726 nonlocal_label_rtx_list ()
3727 {
3728 tree t;
3729 rtx x = 0;
3730
3731 for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
3732 x = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (TREE_VALUE (t)), x);
3733
3734 return x;
3735 }
3736 \f
3737 /* Output a USE for any register use in RTL.
3738 This is used with -noreg to mark the extent of lifespan
3739 of any registers used in a user-visible variable's DECL_RTL. */
3740
3741 void
3742 use_variable (rtl)
3743 rtx rtl;
3744 {
3745 if (GET_CODE (rtl) == REG)
3746 /* This is a register variable. */
3747 emit_insn (gen_rtx_USE (VOIDmode, rtl));
3748 else if (GET_CODE (rtl) == MEM
3749 && GET_CODE (XEXP (rtl, 0)) == REG
3750 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3751 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3752 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3753 /* This is a variable-sized structure. */
3754 emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
3755 }
3756
3757 /* Like use_variable except that it outputs the USEs after INSN
3758 instead of at the end of the insn-chain. */
3759
3760 void
3761 use_variable_after (rtl, insn)
3762 rtx rtl, insn;
3763 {
3764 if (GET_CODE (rtl) == REG)
3765 /* This is a register variable. */
3766 emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
3767 else if (GET_CODE (rtl) == MEM
3768 && GET_CODE (XEXP (rtl, 0)) == REG
3769 && (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
3770 || REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
3771 && XEXP (rtl, 0) != current_function_internal_arg_pointer)
3772 /* This is a variable-sized structure. */
3773 emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
3774 }
3775 \f
3776 int
3777 max_parm_reg_num ()
3778 {
3779 return max_parm_reg;
3780 }
3781
3782 /* Return the first insn following those generated by `assign_parms'. */
3783
3784 rtx
3785 get_first_nonparm_insn ()
3786 {
3787 if (last_parm_insn)
3788 return NEXT_INSN (last_parm_insn);
3789 return get_insns ();
3790 }
3791
3792 /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
3793 Crash if there is none. */
3794
3795 rtx
3796 get_first_block_beg ()
3797 {
3798 register rtx searcher;
3799 register rtx insn = get_first_nonparm_insn ();
3800
3801 for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
3802 if (GET_CODE (searcher) == NOTE
3803 && NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
3804 return searcher;
3805
3806 abort (); /* Invalid call to this function. (See comments above.) */
3807 return NULL_RTX;
3808 }
3809
3810 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
3811 This means a type for which function calls must pass an address to the
3812 function or get an address back from the function.
3813 EXP may be a type node or an expression (whose type is tested). */
3814
3815 int
3816 aggregate_value_p (exp)
3817 tree exp;
3818 {
3819 int i, regno, nregs;
3820 rtx reg;
3821 tree type;
3822 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 't')
3823 type = exp;
3824 else
3825 type = TREE_TYPE (exp);
3826
3827 if (RETURN_IN_MEMORY (type))
3828 return 1;
3829 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
3830 and thus can't be returned in registers. */
3831 if (TREE_ADDRESSABLE (type))
3832 return 1;
3833 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
3834 return 1;
3835 /* Make sure we have suitable call-clobbered regs to return
3836 the value in; if not, we must return it in memory. */
3837 reg = hard_function_value (type, 0);
3838
3839 /* If we have something other than a REG (e.g. a PARALLEL), then assume
3840 it is OK. */
3841 if (GET_CODE (reg) != REG)
3842 return 0;
3843
3844 regno = REGNO (reg);
3845 nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
3846 for (i = 0; i < nregs; i++)
3847 if (! call_used_regs[regno + i])
3848 return 1;
3849 return 0;
3850 }
3851 \f
3852 /* Assign RTL expressions to the function's parameters.
3853 This may involve copying them into registers and using
3854 those registers as the RTL for them.
3855
3856 If SECOND_TIME is non-zero it means that this function is being
3857 called a second time. This is done by integrate.c when a function's
3858 compilation is deferred. We need to come back here in case the
3859 FUNCTION_ARG macro computes items needed for the rest of the compilation
3860 (such as changing which registers are fixed or caller-saved). But suppress
3861 writing any insns or setting DECL_RTL of anything in this case. */
3862
3863 void
3864 assign_parms (fndecl, second_time)
3865 tree fndecl;
3866 int second_time;
3867 {
3868 register tree parm;
3869 register rtx entry_parm = 0;
3870 register rtx stack_parm = 0;
3871 CUMULATIVE_ARGS args_so_far;
3872 enum machine_mode promoted_mode, passed_mode;
3873 enum machine_mode nominal_mode, promoted_nominal_mode;
3874 int unsignedp;
3875 /* Total space needed so far for args on the stack,
3876 given as a constant and a tree-expression. */
3877 struct args_size stack_args_size;
3878 tree fntype = TREE_TYPE (fndecl);
3879 tree fnargs = DECL_ARGUMENTS (fndecl);
3880 /* This is used for the arg pointer when referring to stack args. */
3881 rtx internal_arg_pointer;
3882 /* This is a dummy PARM_DECL that we used for the function result if
3883 the function returns a structure. */
3884 tree function_result_decl = 0;
3885 int varargs_setup = 0;
3886 rtx conversion_insns = 0;
3887
3888 /* Nonzero if the last arg is named `__builtin_va_alist',
3889 which is used on some machines for old-fashioned non-ANSI varargs.h;
3890 this should be stuck onto the stack as if it had arrived there. */
3891 int hide_last_arg
3892 = (current_function_varargs
3893 && fnargs
3894 && (parm = tree_last (fnargs)) != 0
3895 && DECL_NAME (parm)
3896 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
3897 "__builtin_va_alist")));
3898
3899 /* Nonzero if function takes extra anonymous args.
3900 This means the last named arg must be on the stack
3901 right before the anonymous ones. */
3902 int stdarg
3903 = (TYPE_ARG_TYPES (fntype) != 0
3904 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3905 != void_type_node));
3906
3907 current_function_stdarg = stdarg;
3908
3909 /* If the reg that the virtual arg pointer will be translated into is
3910 not a fixed reg or is the stack pointer, make a copy of the virtual
3911 arg pointer, and address parms via the copy. The frame pointer is
3912 considered fixed even though it is not marked as such.
3913
3914 The second time through, simply use ap to avoid generating rtx. */
3915
3916 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
3917 || ! (fixed_regs[ARG_POINTER_REGNUM]
3918 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM))
3919 && ! second_time)
3920 internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
3921 else
3922 internal_arg_pointer = virtual_incoming_args_rtx;
3923 current_function_internal_arg_pointer = internal_arg_pointer;
3924
3925 stack_args_size.constant = 0;
3926 stack_args_size.var = 0;
3927
3928 /* If struct value address is treated as the first argument, make it so. */
3929 if (aggregate_value_p (DECL_RESULT (fndecl))
3930 && ! current_function_returns_pcc_struct
3931 && struct_value_incoming_rtx == 0)
3932 {
3933 tree type = build_pointer_type (TREE_TYPE (fntype));
3934
3935 function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
3936
3937 DECL_ARG_TYPE (function_result_decl) = type;
3938 TREE_CHAIN (function_result_decl) = fnargs;
3939 fnargs = function_result_decl;
3940 }
3941
3942 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
3943 parm_reg_stack_loc = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
3944 bzero ((char *) parm_reg_stack_loc, max_parm_reg * sizeof (rtx));
3945
3946 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
3947 INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
3948 #else
3949 INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
3950 #endif
3951
3952 /* We haven't yet found an argument that we must push and pretend the
3953 caller did. */
3954 current_function_pretend_args_size = 0;
3955
3956 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3957 {
3958 int aggregate = AGGREGATE_TYPE_P (TREE_TYPE (parm));
3959 struct args_size stack_offset;
3960 struct args_size arg_size;
3961 int passed_pointer = 0;
3962 int did_conversion = 0;
3963 tree passed_type = DECL_ARG_TYPE (parm);
3964 tree nominal_type = TREE_TYPE (parm);
3965
3966 /* Set LAST_NAMED if this is last named arg before some
3967 anonymous args. */
3968 int last_named = ((TREE_CHAIN (parm) == 0
3969 || DECL_NAME (TREE_CHAIN (parm)) == 0)
3970 && (stdarg || current_function_varargs));
3971 /* Set NAMED_ARG if this arg should be treated as a named arg. For
3972 most machines, if this is a varargs/stdarg function, then we treat
3973 the last named arg as if it were anonymous too. */
3974 int named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
3975
3976 if (TREE_TYPE (parm) == error_mark_node
3977 /* This can happen after weird syntax errors
3978 or if an enum type is defined among the parms. */
3979 || TREE_CODE (parm) != PARM_DECL
3980 || passed_type == NULL)
3981 {
3982 DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
3983 = gen_rtx_MEM (BLKmode, const0_rtx);
3984 TREE_USED (parm) = 1;
3985 continue;
3986 }
3987
3988 /* For varargs.h function, save info about regs and stack space
3989 used by the individual args, not including the va_alist arg. */
3990 if (hide_last_arg && last_named)
3991 current_function_args_info = args_so_far;
3992
3993 /* Find mode of arg as it is passed, and mode of arg
3994 as it should be during execution of this function. */
3995 passed_mode = TYPE_MODE (passed_type);
3996 nominal_mode = TYPE_MODE (nominal_type);
3997
3998 /* If the parm's mode is VOID, its value doesn't matter,
3999 and avoid the usual things like emit_move_insn that could crash. */
4000 if (nominal_mode == VOIDmode)
4001 {
4002 DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = const0_rtx;
4003 continue;
4004 }
4005
4006 /* If the parm is to be passed as a transparent union, use the
4007 type of the first field for the tests below. We have already
4008 verified that the modes are the same. */
4009 if (DECL_TRANSPARENT_UNION (parm)
4010 || TYPE_TRANSPARENT_UNION (passed_type))
4011 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4012
4013 /* See if this arg was passed by invisible reference. It is if
4014 it is an object whose size depends on the contents of the
4015 object itself or if the machine requires these objects be passed
4016 that way. */
4017
4018 if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4019 && contains_placeholder_p (TYPE_SIZE (passed_type)))
4020 || TREE_ADDRESSABLE (passed_type)
4021 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4022 || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4023 passed_type, named_arg)
4024 #endif
4025 )
4026 {
4027 passed_type = nominal_type = build_pointer_type (passed_type);
4028 passed_pointer = 1;
4029 passed_mode = nominal_mode = Pmode;
4030 }
4031
4032 promoted_mode = passed_mode;
4033
4034 #ifdef PROMOTE_FUNCTION_ARGS
4035 /* Compute the mode in which the arg is actually extended to. */
4036 unsignedp = TREE_UNSIGNED (passed_type);
4037 promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4038 #endif
4039
4040 /* Let machine desc say which reg (if any) the parm arrives in.
4041 0 means it arrives on the stack. */
4042 #ifdef FUNCTION_INCOMING_ARG
4043 entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4044 passed_type, named_arg);
4045 #else
4046 entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4047 passed_type, named_arg);
4048 #endif
4049
4050 if (entry_parm == 0)
4051 promoted_mode = passed_mode;
4052
4053 #ifdef SETUP_INCOMING_VARARGS
4054 /* If this is the last named parameter, do any required setup for
4055 varargs or stdargs. We need to know about the case of this being an
4056 addressable type, in which case we skip the registers it
4057 would have arrived in.
4058
4059 For stdargs, LAST_NAMED will be set for two parameters, the one that
4060 is actually the last named, and the dummy parameter. We only
4061 want to do this action once.
4062
4063 Also, indicate when RTL generation is to be suppressed. */
4064 if (last_named && !varargs_setup)
4065 {
4066 SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4067 current_function_pretend_args_size,
4068 second_time);
4069 varargs_setup = 1;
4070 }
4071 #endif
4072
4073 /* Determine parm's home in the stack,
4074 in case it arrives in the stack or we should pretend it did.
4075
4076 Compute the stack position and rtx where the argument arrives
4077 and its size.
4078
4079 There is one complexity here: If this was a parameter that would
4080 have been passed in registers, but wasn't only because it is
4081 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4082 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4083 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4084 0 as it was the previous time. */
4085
4086 locate_and_pad_parm (promoted_mode, passed_type,
4087 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4088 1,
4089 #else
4090 #ifdef FUNCTION_INCOMING_ARG
4091 FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4092 passed_type,
4093 (named_arg
4094 || varargs_setup)) != 0,
4095 #else
4096 FUNCTION_ARG (args_so_far, promoted_mode,
4097 passed_type,
4098 named_arg || varargs_setup) != 0,
4099 #endif
4100 #endif
4101 fndecl, &stack_args_size, &stack_offset, &arg_size);
4102
4103 if (! second_time)
4104 {
4105 rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4106
4107 if (offset_rtx == const0_rtx)
4108 stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4109 else
4110 stack_parm = gen_rtx_MEM (promoted_mode,
4111 gen_rtx_PLUS (Pmode,
4112 internal_arg_pointer,
4113 offset_rtx));
4114
4115 /* If this is a memory ref that contains aggregate components,
4116 mark it as such for cse and loop optimize. Likewise if it
4117 is readonly. */
4118 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4119 RTX_UNCHANGING_P (stack_parm) = TREE_READONLY (parm);
4120 MEM_ALIAS_SET (stack_parm) = get_alias_set (parm);
4121 }
4122
4123 /* If this parameter was passed both in registers and in the stack,
4124 use the copy on the stack. */
4125 if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4126 entry_parm = 0;
4127
4128 #ifdef FUNCTION_ARG_PARTIAL_NREGS
4129 /* If this parm was passed part in regs and part in memory,
4130 pretend it arrived entirely in memory
4131 by pushing the register-part onto the stack.
4132
4133 In the special case of a DImode or DFmode that is split,
4134 we could put it together in a pseudoreg directly,
4135 but for now that's not worth bothering with. */
4136
4137 if (entry_parm)
4138 {
4139 int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4140 passed_type, named_arg);
4141
4142 if (nregs > 0)
4143 {
4144 current_function_pretend_args_size
4145 = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4146 / (PARM_BOUNDARY / BITS_PER_UNIT)
4147 * (PARM_BOUNDARY / BITS_PER_UNIT));
4148
4149 if (! second_time)
4150 {
4151 /* Handle calls that pass values in multiple non-contiguous
4152 locations. The Irix 6 ABI has examples of this. */
4153 if (GET_CODE (entry_parm) == PARALLEL)
4154 emit_group_store (validize_mem (stack_parm), entry_parm,
4155 int_size_in_bytes (TREE_TYPE (parm)),
4156 (TYPE_ALIGN (TREE_TYPE (parm))
4157 / BITS_PER_UNIT));
4158 else
4159 move_block_from_reg (REGNO (entry_parm),
4160 validize_mem (stack_parm), nregs,
4161 int_size_in_bytes (TREE_TYPE (parm)));
4162 }
4163 entry_parm = stack_parm;
4164 }
4165 }
4166 #endif
4167
4168 /* If we didn't decide this parm came in a register,
4169 by default it came on the stack. */
4170 if (entry_parm == 0)
4171 entry_parm = stack_parm;
4172
4173 /* Record permanently how this parm was passed. */
4174 if (! second_time)
4175 DECL_INCOMING_RTL (parm) = entry_parm;
4176
4177 /* If there is actually space on the stack for this parm,
4178 count it in stack_args_size; otherwise set stack_parm to 0
4179 to indicate there is no preallocated stack slot for the parm. */
4180
4181 if (entry_parm == stack_parm
4182 #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4183 /* On some machines, even if a parm value arrives in a register
4184 there is still an (uninitialized) stack slot allocated for it.
4185
4186 ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4187 whether this parameter already has a stack slot allocated,
4188 because an arg block exists only if current_function_args_size
4189 is larger than some threshold, and we haven't calculated that
4190 yet. So, for now, we just assume that stack slots never exist
4191 in this case. */
4192 || REG_PARM_STACK_SPACE (fndecl) > 0
4193 #endif
4194 )
4195 {
4196 stack_args_size.constant += arg_size.constant;
4197 if (arg_size.var)
4198 ADD_PARM_SIZE (stack_args_size, arg_size.var);
4199 }
4200 else
4201 /* No stack slot was pushed for this parm. */
4202 stack_parm = 0;
4203
4204 /* Update info on where next arg arrives in registers. */
4205
4206 FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4207 passed_type, named_arg);
4208
4209 /* If this is our second time through, we are done with this parm. */
4210 if (second_time)
4211 continue;
4212
4213 /* If we can't trust the parm stack slot to be aligned enough
4214 for its ultimate type, don't use that slot after entry.
4215 We'll make another stack slot, if we need one. */
4216 {
4217 int thisparm_boundary
4218 = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4219
4220 if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4221 stack_parm = 0;
4222 }
4223
4224 /* If parm was passed in memory, and we need to convert it on entry,
4225 don't store it back in that same slot. */
4226 if (entry_parm != 0
4227 && nominal_mode != BLKmode && nominal_mode != passed_mode)
4228 stack_parm = 0;
4229
4230 #if 0
4231 /* Now adjust STACK_PARM to the mode and precise location
4232 where this parameter should live during execution,
4233 if we discover that it must live in the stack during execution.
4234 To make debuggers happier on big-endian machines, we store
4235 the value in the last bytes of the space available. */
4236
4237 if (nominal_mode != BLKmode && nominal_mode != passed_mode
4238 && stack_parm != 0)
4239 {
4240 rtx offset_rtx;
4241
4242 if (BYTES_BIG_ENDIAN
4243 && GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD)
4244 stack_offset.constant += (GET_MODE_SIZE (passed_mode)
4245 - GET_MODE_SIZE (nominal_mode));
4246
4247 offset_rtx = ARGS_SIZE_RTX (stack_offset);
4248 if (offset_rtx == const0_rtx)
4249 stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
4250 else
4251 stack_parm = gen_rtx_MEM (nominal_mode,
4252 gen_rtx_PLUS (Pmode,
4253 internal_arg_pointer,
4254 offset_rtx));
4255
4256 /* If this is a memory ref that contains aggregate components,
4257 mark it as such for cse and loop optimize. */
4258 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4259 }
4260 #endif /* 0 */
4261
4262 #ifdef STACK_REGS
4263 /* We need this "use" info, because the gcc-register->stack-register
4264 converter in reg-stack.c needs to know which registers are active
4265 at the start of the function call. The actual parameter loading
4266 instructions are not always available then anymore, since they might
4267 have been optimised away. */
4268
4269 if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
4270 emit_insn (gen_rtx_USE (GET_MODE (entry_parm), entry_parm));
4271 #endif
4272
4273 /* ENTRY_PARM is an RTX for the parameter as it arrives,
4274 in the mode in which it arrives.
4275 STACK_PARM is an RTX for a stack slot where the parameter can live
4276 during the function (in case we want to put it there).
4277 STACK_PARM is 0 if no stack slot was pushed for it.
4278
4279 Now output code if necessary to convert ENTRY_PARM to
4280 the type in which this function declares it,
4281 and store that result in an appropriate place,
4282 which may be a pseudo reg, may be STACK_PARM,
4283 or may be a local stack slot if STACK_PARM is 0.
4284
4285 Set DECL_RTL to that place. */
4286
4287 if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4288 {
4289 /* If a BLKmode arrives in registers, copy it to a stack slot.
4290 Handle calls that pass values in multiple non-contiguous
4291 locations. The Irix 6 ABI has examples of this. */
4292 if (GET_CODE (entry_parm) == REG
4293 || GET_CODE (entry_parm) == PARALLEL)
4294 {
4295 int size_stored
4296 = CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4297 UNITS_PER_WORD);
4298
4299 /* Note that we will be storing an integral number of words.
4300 So we have to be careful to ensure that we allocate an
4301 integral number of words. We do this below in the
4302 assign_stack_local if space was not allocated in the argument
4303 list. If it was, this will not work if PARM_BOUNDARY is not
4304 a multiple of BITS_PER_WORD. It isn't clear how to fix this
4305 if it becomes a problem. */
4306
4307 if (stack_parm == 0)
4308 {
4309 stack_parm
4310 = assign_stack_local (GET_MODE (entry_parm),
4311 size_stored, 0);
4312
4313 /* If this is a memory ref that contains aggregate
4314 components, mark it as such for cse and loop optimize. */
4315 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4316 }
4317
4318 else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4319 abort ();
4320
4321 if (TREE_READONLY (parm))
4322 RTX_UNCHANGING_P (stack_parm) = 1;
4323
4324 /* Handle calls that pass values in multiple non-contiguous
4325 locations. The Irix 6 ABI has examples of this. */
4326 if (GET_CODE (entry_parm) == PARALLEL)
4327 emit_group_store (validize_mem (stack_parm), entry_parm,
4328 int_size_in_bytes (TREE_TYPE (parm)),
4329 (TYPE_ALIGN (TREE_TYPE (parm))
4330 / BITS_PER_UNIT));
4331 else
4332 move_block_from_reg (REGNO (entry_parm),
4333 validize_mem (stack_parm),
4334 size_stored / UNITS_PER_WORD,
4335 int_size_in_bytes (TREE_TYPE (parm)));
4336 }
4337 DECL_RTL (parm) = stack_parm;
4338 }
4339 else if (! ((obey_regdecls && ! DECL_REGISTER (parm)
4340 && ! DECL_INLINE (fndecl))
4341 /* layout_decl may set this. */
4342 || TREE_ADDRESSABLE (parm)
4343 || TREE_SIDE_EFFECTS (parm)
4344 /* If -ffloat-store specified, don't put explicit
4345 float variables into registers. */
4346 || (flag_float_store
4347 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4348 /* Always assign pseudo to structure return or item passed
4349 by invisible reference. */
4350 || passed_pointer || parm == function_result_decl)
4351 {
4352 /* Store the parm in a pseudoregister during the function, but we
4353 may need to do it in a wider mode. */
4354
4355 register rtx parmreg;
4356 int regno, regnoi = 0, regnor = 0;
4357
4358 unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4359
4360 promoted_nominal_mode
4361 = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4362
4363 parmreg = gen_reg_rtx (promoted_nominal_mode);
4364 mark_user_reg (parmreg);
4365
4366 /* If this was an item that we received a pointer to, set DECL_RTL
4367 appropriately. */
4368 if (passed_pointer)
4369 {
4370 DECL_RTL (parm)
4371 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
4372 MEM_SET_IN_STRUCT_P (DECL_RTL (parm), aggregate);
4373 }
4374 else
4375 DECL_RTL (parm) = parmreg;
4376
4377 /* Copy the value into the register. */
4378 if (nominal_mode != passed_mode
4379 || promoted_nominal_mode != promoted_mode)
4380 {
4381 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4382 mode, by the caller. We now have to convert it to
4383 NOMINAL_MODE, if different. However, PARMREG may be in
4384 a different mode than NOMINAL_MODE if it is being stored
4385 promoted.
4386
4387 If ENTRY_PARM is a hard register, it might be in a register
4388 not valid for operating in its mode (e.g., an odd-numbered
4389 register for a DFmode). In that case, moves are the only
4390 thing valid, so we can't do a convert from there. This
4391 occurs when the calling sequence allow such misaligned
4392 usages.
4393
4394 In addition, the conversion may involve a call, which could
4395 clobber parameters which haven't been copied to pseudo
4396 registers yet. Therefore, we must first copy the parm to
4397 a pseudo reg here, and save the conversion until after all
4398 parameters have been moved. */
4399
4400 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4401
4402 emit_move_insn (tempreg, validize_mem (entry_parm));
4403
4404 push_to_sequence (conversion_insns);
4405 tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4406
4407 expand_assignment (parm,
4408 make_tree (nominal_type, tempreg), 0, 0);
4409 conversion_insns = get_insns ();
4410 did_conversion = 1;
4411 end_sequence ();
4412 }
4413 else
4414 emit_move_insn (parmreg, validize_mem (entry_parm));
4415
4416 /* If we were passed a pointer but the actual value
4417 can safely live in a register, put it in one. */
4418 if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4419 && ! ((obey_regdecls && ! DECL_REGISTER (parm)
4420 && ! DECL_INLINE (fndecl))
4421 /* layout_decl may set this. */
4422 || TREE_ADDRESSABLE (parm)
4423 || TREE_SIDE_EFFECTS (parm)
4424 /* If -ffloat-store specified, don't put explicit
4425 float variables into registers. */
4426 || (flag_float_store
4427 && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE)))
4428 {
4429 /* We can't use nominal_mode, because it will have been set to
4430 Pmode above. We must use the actual mode of the parm. */
4431 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4432 mark_user_reg (parmreg);
4433 emit_move_insn (parmreg, DECL_RTL (parm));
4434 DECL_RTL (parm) = parmreg;
4435 /* STACK_PARM is the pointer, not the parm, and PARMREG is
4436 now the parm. */
4437 stack_parm = 0;
4438 }
4439 #ifdef FUNCTION_ARG_CALLEE_COPIES
4440 /* If we are passed an arg by reference and it is our responsibility
4441 to make a copy, do it now.
4442 PASSED_TYPE and PASSED mode now refer to the pointer, not the
4443 original argument, so we must recreate them in the call to
4444 FUNCTION_ARG_CALLEE_COPIES. */
4445 /* ??? Later add code to handle the case that if the argument isn't
4446 modified, don't do the copy. */
4447
4448 else if (passed_pointer
4449 && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4450 TYPE_MODE (DECL_ARG_TYPE (parm)),
4451 DECL_ARG_TYPE (parm),
4452 named_arg)
4453 && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4454 {
4455 rtx copy;
4456 tree type = DECL_ARG_TYPE (parm);
4457
4458 /* This sequence may involve a library call perhaps clobbering
4459 registers that haven't been copied to pseudos yet. */
4460
4461 push_to_sequence (conversion_insns);
4462
4463 if (TYPE_SIZE (type) == 0
4464 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4465 /* This is a variable sized object. */
4466 copy = gen_rtx_MEM (BLKmode,
4467 allocate_dynamic_stack_space
4468 (expr_size (parm), NULL_RTX,
4469 TYPE_ALIGN (type)));
4470 else
4471 copy = assign_stack_temp (TYPE_MODE (type),
4472 int_size_in_bytes (type), 1);
4473 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
4474 RTX_UNCHANGING_P (copy) = TREE_READONLY (parm);
4475
4476 store_expr (parm, copy, 0);
4477 emit_move_insn (parmreg, XEXP (copy, 0));
4478 if (current_function_check_memory_usage)
4479 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4480 XEXP (copy, 0), ptr_mode,
4481 GEN_INT (int_size_in_bytes (type)),
4482 TYPE_MODE (sizetype),
4483 GEN_INT (MEMORY_USE_RW),
4484 TYPE_MODE (integer_type_node));
4485 conversion_insns = get_insns ();
4486 did_conversion = 1;
4487 end_sequence ();
4488 }
4489 #endif /* FUNCTION_ARG_CALLEE_COPIES */
4490
4491 /* In any case, record the parm's desired stack location
4492 in case we later discover it must live in the stack.
4493
4494 If it is a COMPLEX value, store the stack location for both
4495 halves. */
4496
4497 if (GET_CODE (parmreg) == CONCAT)
4498 regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4499 else
4500 regno = REGNO (parmreg);
4501
4502 if (regno >= max_parm_reg)
4503 {
4504 rtx *new;
4505 int old_max_parm_reg = max_parm_reg;
4506
4507 /* It's slow to expand this one register at a time,
4508 but it's also rare and we need max_parm_reg to be
4509 precisely correct. */
4510 max_parm_reg = regno + 1;
4511 new = (rtx *) savealloc (max_parm_reg * sizeof (rtx));
4512 bcopy ((char *) parm_reg_stack_loc, (char *) new,
4513 old_max_parm_reg * sizeof (rtx));
4514 bzero ((char *) (new + old_max_parm_reg),
4515 (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
4516 parm_reg_stack_loc = new;
4517 }
4518
4519 if (GET_CODE (parmreg) == CONCAT)
4520 {
4521 enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
4522
4523 regnor = REGNO (gen_realpart (submode, parmreg));
4524 regnoi = REGNO (gen_imagpart (submode, parmreg));
4525
4526 if (stack_parm != 0)
4527 {
4528 parm_reg_stack_loc[regnor]
4529 = gen_realpart (submode, stack_parm);
4530 parm_reg_stack_loc[regnoi]
4531 = gen_imagpart (submode, stack_parm);
4532 }
4533 else
4534 {
4535 parm_reg_stack_loc[regnor] = 0;
4536 parm_reg_stack_loc[regnoi] = 0;
4537 }
4538 }
4539 else
4540 parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
4541
4542 /* Mark the register as eliminable if we did no conversion
4543 and it was copied from memory at a fixed offset,
4544 and the arg pointer was not copied to a pseudo-reg.
4545 If the arg pointer is a pseudo reg or the offset formed
4546 an invalid address, such memory-equivalences
4547 as we make here would screw up life analysis for it. */
4548 if (nominal_mode == passed_mode
4549 && ! did_conversion
4550 && stack_parm != 0
4551 && GET_CODE (stack_parm) == MEM
4552 && stack_offset.var == 0
4553 && reg_mentioned_p (virtual_incoming_args_rtx,
4554 XEXP (stack_parm, 0)))
4555 {
4556 rtx linsn = get_last_insn ();
4557 rtx sinsn, set;
4558
4559 /* Mark complex types separately. */
4560 if (GET_CODE (parmreg) == CONCAT)
4561 /* Scan backwards for the set of the real and
4562 imaginary parts. */
4563 for (sinsn = linsn; sinsn != 0;
4564 sinsn = prev_nonnote_insn (sinsn))
4565 {
4566 set = single_set (sinsn);
4567 if (set != 0
4568 && SET_DEST (set) == regno_reg_rtx [regnoi])
4569 REG_NOTES (sinsn)
4570 = gen_rtx_EXPR_LIST (REG_EQUIV,
4571 parm_reg_stack_loc[regnoi],
4572 REG_NOTES (sinsn));
4573 else if (set != 0
4574 && SET_DEST (set) == regno_reg_rtx [regnor])
4575 REG_NOTES (sinsn)
4576 = gen_rtx_EXPR_LIST (REG_EQUIV,
4577 parm_reg_stack_loc[regnor],
4578 REG_NOTES (sinsn));
4579 }
4580 else if ((set = single_set (linsn)) != 0
4581 && SET_DEST (set) == parmreg)
4582 REG_NOTES (linsn)
4583 = gen_rtx_EXPR_LIST (REG_EQUIV,
4584 stack_parm, REG_NOTES (linsn));
4585 }
4586
4587 /* For pointer data type, suggest pointer register. */
4588 if (POINTER_TYPE_P (TREE_TYPE (parm)))
4589 mark_reg_pointer (parmreg,
4590 (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))
4591 / BITS_PER_UNIT));
4592 }
4593 else
4594 {
4595 /* Value must be stored in the stack slot STACK_PARM
4596 during function execution. */
4597
4598 if (promoted_mode != nominal_mode)
4599 {
4600 /* Conversion is required. */
4601 rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4602
4603 emit_move_insn (tempreg, validize_mem (entry_parm));
4604
4605 push_to_sequence (conversion_insns);
4606 entry_parm = convert_to_mode (nominal_mode, tempreg,
4607 TREE_UNSIGNED (TREE_TYPE (parm)));
4608 if (stack_parm)
4609 {
4610 /* ??? This may need a big-endian conversion on sparc64. */
4611 stack_parm = change_address (stack_parm, nominal_mode,
4612 NULL_RTX);
4613 }
4614 conversion_insns = get_insns ();
4615 did_conversion = 1;
4616 end_sequence ();
4617 }
4618
4619 if (entry_parm != stack_parm)
4620 {
4621 if (stack_parm == 0)
4622 {
4623 stack_parm
4624 = assign_stack_local (GET_MODE (entry_parm),
4625 GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
4626 /* If this is a memory ref that contains aggregate components,
4627 mark it as such for cse and loop optimize. */
4628 MEM_SET_IN_STRUCT_P (stack_parm, aggregate);
4629 }
4630
4631 if (promoted_mode != nominal_mode)
4632 {
4633 push_to_sequence (conversion_insns);
4634 emit_move_insn (validize_mem (stack_parm),
4635 validize_mem (entry_parm));
4636 conversion_insns = get_insns ();
4637 end_sequence ();
4638 }
4639 else
4640 emit_move_insn (validize_mem (stack_parm),
4641 validize_mem (entry_parm));
4642 }
4643 if (current_function_check_memory_usage)
4644 {
4645 push_to_sequence (conversion_insns);
4646 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
4647 XEXP (stack_parm, 0), ptr_mode,
4648 GEN_INT (GET_MODE_SIZE (GET_MODE
4649 (entry_parm))),
4650 TYPE_MODE (sizetype),
4651 GEN_INT (MEMORY_USE_RW),
4652 TYPE_MODE (integer_type_node));
4653
4654 conversion_insns = get_insns ();
4655 end_sequence ();
4656 }
4657 DECL_RTL (parm) = stack_parm;
4658 }
4659
4660 /* If this "parameter" was the place where we are receiving the
4661 function's incoming structure pointer, set up the result. */
4662 if (parm == function_result_decl)
4663 {
4664 tree result = DECL_RESULT (fndecl);
4665 tree restype = TREE_TYPE (result);
4666
4667 DECL_RTL (result)
4668 = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
4669
4670 MEM_SET_IN_STRUCT_P (DECL_RTL (result),
4671 AGGREGATE_TYPE_P (restype));
4672 }
4673
4674 if (TREE_THIS_VOLATILE (parm))
4675 MEM_VOLATILE_P (DECL_RTL (parm)) = 1;
4676 if (TREE_READONLY (parm))
4677 RTX_UNCHANGING_P (DECL_RTL (parm)) = 1;
4678 }
4679
4680 /* Output all parameter conversion instructions (possibly including calls)
4681 now that all parameters have been copied out of hard registers. */
4682 emit_insns (conversion_insns);
4683
4684 last_parm_insn = get_last_insn ();
4685
4686 current_function_args_size = stack_args_size.constant;
4687
4688 /* Adjust function incoming argument size for alignment and
4689 minimum length. */
4690
4691 #ifdef REG_PARM_STACK_SPACE
4692 #ifndef MAYBE_REG_PARM_STACK_SPACE
4693 current_function_args_size = MAX (current_function_args_size,
4694 REG_PARM_STACK_SPACE (fndecl));
4695 #endif
4696 #endif
4697
4698 #ifdef PREFERRED_STACK_BOUNDARY
4699 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
4700
4701 current_function_args_size
4702 = ((current_function_args_size + STACK_BYTES - 1)
4703 / STACK_BYTES) * STACK_BYTES;
4704 #endif
4705
4706 #ifdef ARGS_GROW_DOWNWARD
4707 current_function_arg_offset_rtx
4708 = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
4709 : expand_expr (size_binop (MINUS_EXPR, stack_args_size.var,
4710 size_int (-stack_args_size.constant)),
4711 NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD));
4712 #else
4713 current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
4714 #endif
4715
4716 /* See how many bytes, if any, of its args a function should try to pop
4717 on return. */
4718
4719 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
4720 current_function_args_size);
4721
4722 /* For stdarg.h function, save info about
4723 regs and stack space used by the named args. */
4724
4725 if (!hide_last_arg)
4726 current_function_args_info = args_so_far;
4727
4728 /* Set the rtx used for the function return value. Put this in its
4729 own variable so any optimizers that need this information don't have
4730 to include tree.h. Do this here so it gets done when an inlined
4731 function gets output. */
4732
4733 current_function_return_rtx = DECL_RTL (DECL_RESULT (fndecl));
4734 }
4735 \f
4736 /* Indicate whether REGNO is an incoming argument to the current function
4737 that was promoted to a wider mode. If so, return the RTX for the
4738 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
4739 that REGNO is promoted from and whether the promotion was signed or
4740 unsigned. */
4741
4742 #ifdef PROMOTE_FUNCTION_ARGS
4743
4744 rtx
4745 promoted_input_arg (regno, pmode, punsignedp)
4746 int regno;
4747 enum machine_mode *pmode;
4748 int *punsignedp;
4749 {
4750 tree arg;
4751
4752 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
4753 arg = TREE_CHAIN (arg))
4754 if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
4755 && REGNO (DECL_INCOMING_RTL (arg)) == regno
4756 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
4757 {
4758 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
4759 int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
4760
4761 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
4762 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
4763 && mode != DECL_MODE (arg))
4764 {
4765 *pmode = DECL_MODE (arg);
4766 *punsignedp = unsignedp;
4767 return DECL_INCOMING_RTL (arg);
4768 }
4769 }
4770
4771 return 0;
4772 }
4773
4774 #endif
4775 \f
4776 /* Compute the size and offset from the start of the stacked arguments for a
4777 parm passed in mode PASSED_MODE and with type TYPE.
4778
4779 INITIAL_OFFSET_PTR points to the current offset into the stacked
4780 arguments.
4781
4782 The starting offset and size for this parm are returned in *OFFSET_PTR
4783 and *ARG_SIZE_PTR, respectively.
4784
4785 IN_REGS is non-zero if the argument will be passed in registers. It will
4786 never be set if REG_PARM_STACK_SPACE is not defined.
4787
4788 FNDECL is the function in which the argument was defined.
4789
4790 There are two types of rounding that are done. The first, controlled by
4791 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
4792 list to be aligned to the specific boundary (in bits). This rounding
4793 affects the initial and starting offsets, but not the argument size.
4794
4795 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
4796 optionally rounds the size of the parm to PARM_BOUNDARY. The
4797 initial offset is not affected by this rounding, while the size always
4798 is and the starting offset may be. */
4799
4800 /* offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
4801 initial_offset_ptr is positive because locate_and_pad_parm's
4802 callers pass in the total size of args so far as
4803 initial_offset_ptr. arg_size_ptr is always positive.*/
4804
4805 void
4806 locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
4807 initial_offset_ptr, offset_ptr, arg_size_ptr)
4808 enum machine_mode passed_mode;
4809 tree type;
4810 int in_regs;
4811 tree fndecl;
4812 struct args_size *initial_offset_ptr;
4813 struct args_size *offset_ptr;
4814 struct args_size *arg_size_ptr;
4815 {
4816 tree sizetree
4817 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
4818 enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
4819 int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
4820
4821 #ifdef REG_PARM_STACK_SPACE
4822 /* If we have found a stack parm before we reach the end of the
4823 area reserved for registers, skip that area. */
4824 if (! in_regs)
4825 {
4826 int reg_parm_stack_space = 0;
4827
4828 #ifdef MAYBE_REG_PARM_STACK_SPACE
4829 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
4830 #else
4831 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
4832 #endif
4833 if (reg_parm_stack_space > 0)
4834 {
4835 if (initial_offset_ptr->var)
4836 {
4837 initial_offset_ptr->var
4838 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
4839 size_int (reg_parm_stack_space));
4840 initial_offset_ptr->constant = 0;
4841 }
4842 else if (initial_offset_ptr->constant < reg_parm_stack_space)
4843 initial_offset_ptr->constant = reg_parm_stack_space;
4844 }
4845 }
4846 #endif /* REG_PARM_STACK_SPACE */
4847
4848 arg_size_ptr->var = 0;
4849 arg_size_ptr->constant = 0;
4850
4851 #ifdef ARGS_GROW_DOWNWARD
4852 if (initial_offset_ptr->var)
4853 {
4854 offset_ptr->constant = 0;
4855 offset_ptr->var = size_binop (MINUS_EXPR, integer_zero_node,
4856 initial_offset_ptr->var);
4857 }
4858 else
4859 {
4860 offset_ptr->constant = - initial_offset_ptr->constant;
4861 offset_ptr->var = 0;
4862 }
4863 if (where_pad != none
4864 && (TREE_CODE (sizetree) != INTEGER_CST
4865 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4866 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4867 SUB_PARM_SIZE (*offset_ptr, sizetree);
4868 if (where_pad != downward)
4869 pad_to_arg_alignment (offset_ptr, boundary);
4870 if (initial_offset_ptr->var)
4871 {
4872 arg_size_ptr->var = size_binop (MINUS_EXPR,
4873 size_binop (MINUS_EXPR,
4874 integer_zero_node,
4875 initial_offset_ptr->var),
4876 offset_ptr->var);
4877 }
4878 else
4879 {
4880 arg_size_ptr->constant = (- initial_offset_ptr->constant
4881 - offset_ptr->constant);
4882 }
4883 #else /* !ARGS_GROW_DOWNWARD */
4884 pad_to_arg_alignment (initial_offset_ptr, boundary);
4885 *offset_ptr = *initial_offset_ptr;
4886
4887 #ifdef PUSH_ROUNDING
4888 if (passed_mode != BLKmode)
4889 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
4890 #endif
4891
4892 /* Pad_below needs the pre-rounded size to know how much to pad below
4893 so this must be done before rounding up. */
4894 if (where_pad == downward
4895 /* However, BLKmode args passed in regs have their padding done elsewhere.
4896 The stack slot must be able to hold the entire register. */
4897 && !(in_regs && passed_mode == BLKmode))
4898 pad_below (offset_ptr, passed_mode, sizetree);
4899
4900 if (where_pad != none
4901 && (TREE_CODE (sizetree) != INTEGER_CST
4902 || ((TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)))
4903 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4904
4905 ADD_PARM_SIZE (*arg_size_ptr, sizetree);
4906 #endif /* ARGS_GROW_DOWNWARD */
4907 }
4908
4909 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
4910 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
4911
4912 static void
4913 pad_to_arg_alignment (offset_ptr, boundary)
4914 struct args_size *offset_ptr;
4915 int boundary;
4916 {
4917 int boundary_in_bytes = boundary / BITS_PER_UNIT;
4918
4919 if (boundary > BITS_PER_UNIT)
4920 {
4921 if (offset_ptr->var)
4922 {
4923 offset_ptr->var =
4924 #ifdef ARGS_GROW_DOWNWARD
4925 round_down
4926 #else
4927 round_up
4928 #endif
4929 (ARGS_SIZE_TREE (*offset_ptr),
4930 boundary / BITS_PER_UNIT);
4931 offset_ptr->constant = 0; /*?*/
4932 }
4933 else
4934 offset_ptr->constant =
4935 #ifdef ARGS_GROW_DOWNWARD
4936 FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
4937 #else
4938 CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
4939 #endif
4940 }
4941 }
4942
4943 #ifndef ARGS_GROW_DOWNWARD
4944 static void
4945 pad_below (offset_ptr, passed_mode, sizetree)
4946 struct args_size *offset_ptr;
4947 enum machine_mode passed_mode;
4948 tree sizetree;
4949 {
4950 if (passed_mode != BLKmode)
4951 {
4952 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
4953 offset_ptr->constant
4954 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
4955 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
4956 - GET_MODE_SIZE (passed_mode));
4957 }
4958 else
4959 {
4960 if (TREE_CODE (sizetree) != INTEGER_CST
4961 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
4962 {
4963 /* Round the size up to multiple of PARM_BOUNDARY bits. */
4964 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
4965 /* Add it in. */
4966 ADD_PARM_SIZE (*offset_ptr, s2);
4967 SUB_PARM_SIZE (*offset_ptr, sizetree);
4968 }
4969 }
4970 }
4971 #endif
4972
4973 #ifdef ARGS_GROW_DOWNWARD
4974 static tree
4975 round_down (value, divisor)
4976 tree value;
4977 int divisor;
4978 {
4979 return size_binop (MULT_EXPR,
4980 size_binop (FLOOR_DIV_EXPR, value, size_int (divisor)),
4981 size_int (divisor));
4982 }
4983 #endif
4984 \f
4985 /* Walk the tree of blocks describing the binding levels within a function
4986 and warn about uninitialized variables.
4987 This is done after calling flow_analysis and before global_alloc
4988 clobbers the pseudo-regs to hard regs. */
4989
4990 void
4991 uninitialized_vars_warning (block)
4992 tree block;
4993 {
4994 register tree decl, sub;
4995 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
4996 {
4997 if (TREE_CODE (decl) == VAR_DECL
4998 /* These warnings are unreliable for and aggregates
4999 because assigning the fields one by one can fail to convince
5000 flow.c that the entire aggregate was initialized.
5001 Unions are troublesome because members may be shorter. */
5002 && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5003 && DECL_RTL (decl) != 0
5004 && GET_CODE (DECL_RTL (decl)) == REG
5005 /* Global optimizations can make it difficult to determine if a
5006 particular variable has been initialized. However, a VAR_DECL
5007 with a nonzero DECL_INITIAL had an initializer, so do not
5008 claim it is potentially uninitialized.
5009
5010 We do not care about the actual value in DECL_INITIAL, so we do
5011 not worry that it may be a dangling pointer. */
5012 && DECL_INITIAL (decl) == NULL_TREE
5013 && regno_uninitialized (REGNO (DECL_RTL (decl))))
5014 warning_with_decl (decl,
5015 "`%s' might be used uninitialized in this function");
5016 if (TREE_CODE (decl) == VAR_DECL
5017 && DECL_RTL (decl) != 0
5018 && GET_CODE (DECL_RTL (decl)) == REG
5019 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5020 warning_with_decl (decl,
5021 "variable `%s' might be clobbered by `longjmp' or `vfork'");
5022 }
5023 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5024 uninitialized_vars_warning (sub);
5025 }
5026
5027 /* Do the appropriate part of uninitialized_vars_warning
5028 but for arguments instead of local variables. */
5029
5030 void
5031 setjmp_args_warning ()
5032 {
5033 register tree decl;
5034 for (decl = DECL_ARGUMENTS (current_function_decl);
5035 decl; decl = TREE_CHAIN (decl))
5036 if (DECL_RTL (decl) != 0
5037 && GET_CODE (DECL_RTL (decl)) == REG
5038 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5039 warning_with_decl (decl, "argument `%s' might be clobbered by `longjmp' or `vfork'");
5040 }
5041
5042 /* If this function call setjmp, put all vars into the stack
5043 unless they were declared `register'. */
5044
5045 void
5046 setjmp_protect (block)
5047 tree block;
5048 {
5049 register tree decl, sub;
5050 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5051 if ((TREE_CODE (decl) == VAR_DECL
5052 || TREE_CODE (decl) == PARM_DECL)
5053 && DECL_RTL (decl) != 0
5054 && (GET_CODE (DECL_RTL (decl)) == REG
5055 || (GET_CODE (DECL_RTL (decl)) == MEM
5056 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5057 /* If this variable came from an inline function, it must be
5058 that its life doesn't overlap the setjmp. If there was a
5059 setjmp in the function, it would already be in memory. We
5060 must exclude such variable because their DECL_RTL might be
5061 set to strange things such as virtual_stack_vars_rtx. */
5062 && ! DECL_FROM_INLINE (decl)
5063 && (
5064 #ifdef NON_SAVING_SETJMP
5065 /* If longjmp doesn't restore the registers,
5066 don't put anything in them. */
5067 NON_SAVING_SETJMP
5068 ||
5069 #endif
5070 ! DECL_REGISTER (decl)))
5071 put_var_into_stack (decl);
5072 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5073 setjmp_protect (sub);
5074 }
5075 \f
5076 /* Like the previous function, but for args instead of local variables. */
5077
5078 void
5079 setjmp_protect_args ()
5080 {
5081 register tree decl;
5082 for (decl = DECL_ARGUMENTS (current_function_decl);
5083 decl; decl = TREE_CHAIN (decl))
5084 if ((TREE_CODE (decl) == VAR_DECL
5085 || TREE_CODE (decl) == PARM_DECL)
5086 && DECL_RTL (decl) != 0
5087 && (GET_CODE (DECL_RTL (decl)) == REG
5088 || (GET_CODE (DECL_RTL (decl)) == MEM
5089 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5090 && (
5091 /* If longjmp doesn't restore the registers,
5092 don't put anything in them. */
5093 #ifdef NON_SAVING_SETJMP
5094 NON_SAVING_SETJMP
5095 ||
5096 #endif
5097 ! DECL_REGISTER (decl)))
5098 put_var_into_stack (decl);
5099 }
5100 \f
5101 /* Return the context-pointer register corresponding to DECL,
5102 or 0 if it does not need one. */
5103
5104 rtx
5105 lookup_static_chain (decl)
5106 tree decl;
5107 {
5108 tree context = decl_function_context (decl);
5109 tree link;
5110
5111 if (context == 0
5112 || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5113 return 0;
5114
5115 /* We treat inline_function_decl as an alias for the current function
5116 because that is the inline function whose vars, types, etc.
5117 are being merged into the current function.
5118 See expand_inline_function. */
5119 if (context == current_function_decl || context == inline_function_decl)
5120 return virtual_stack_vars_rtx;
5121
5122 for (link = context_display; link; link = TREE_CHAIN (link))
5123 if (TREE_PURPOSE (link) == context)
5124 return RTL_EXPR_RTL (TREE_VALUE (link));
5125
5126 abort ();
5127 }
5128 \f
5129 /* Convert a stack slot address ADDR for variable VAR
5130 (from a containing function)
5131 into an address valid in this function (using a static chain). */
5132
5133 rtx
5134 fix_lexical_addr (addr, var)
5135 rtx addr;
5136 tree var;
5137 {
5138 rtx basereg;
5139 HOST_WIDE_INT displacement;
5140 tree context = decl_function_context (var);
5141 struct function *fp;
5142 rtx base = 0;
5143
5144 /* If this is the present function, we need not do anything. */
5145 if (context == current_function_decl || context == inline_function_decl)
5146 return addr;
5147
5148 for (fp = outer_function_chain; fp; fp = fp->next)
5149 if (fp->decl == context)
5150 break;
5151
5152 if (fp == 0)
5153 abort ();
5154
5155 if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5156 addr = XEXP (XEXP (addr, 0), 0);
5157
5158 /* Decode given address as base reg plus displacement. */
5159 if (GET_CODE (addr) == REG)
5160 basereg = addr, displacement = 0;
5161 else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5162 basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5163 else
5164 abort ();
5165
5166 /* We accept vars reached via the containing function's
5167 incoming arg pointer and via its stack variables pointer. */
5168 if (basereg == fp->internal_arg_pointer)
5169 {
5170 /* If reached via arg pointer, get the arg pointer value
5171 out of that function's stack frame.
5172
5173 There are two cases: If a separate ap is needed, allocate a
5174 slot in the outer function for it and dereference it that way.
5175 This is correct even if the real ap is actually a pseudo.
5176 Otherwise, just adjust the offset from the frame pointer to
5177 compensate. */
5178
5179 #ifdef NEED_SEPARATE_AP
5180 rtx addr;
5181
5182 if (fp->arg_pointer_save_area == 0)
5183 fp->arg_pointer_save_area
5184 = assign_outer_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0, fp);
5185
5186 addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
5187 addr = memory_address (Pmode, addr);
5188
5189 base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
5190 #else
5191 displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5192 base = lookup_static_chain (var);
5193 #endif
5194 }
5195
5196 else if (basereg == virtual_stack_vars_rtx)
5197 {
5198 /* This is the same code as lookup_static_chain, duplicated here to
5199 avoid an extra call to decl_function_context. */
5200 tree link;
5201
5202 for (link = context_display; link; link = TREE_CHAIN (link))
5203 if (TREE_PURPOSE (link) == context)
5204 {
5205 base = RTL_EXPR_RTL (TREE_VALUE (link));
5206 break;
5207 }
5208 }
5209
5210 if (base == 0)
5211 abort ();
5212
5213 /* Use same offset, relative to appropriate static chain or argument
5214 pointer. */
5215 return plus_constant (base, displacement);
5216 }
5217 \f
5218 /* Return the address of the trampoline for entering nested fn FUNCTION.
5219 If necessary, allocate a trampoline (in the stack frame)
5220 and emit rtl to initialize its contents (at entry to this function). */
5221
5222 rtx
5223 trampoline_address (function)
5224 tree function;
5225 {
5226 tree link;
5227 tree rtlexp;
5228 rtx tramp;
5229 struct function *fp;
5230 tree fn_context;
5231
5232 /* Find an existing trampoline and return it. */
5233 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5234 if (TREE_PURPOSE (link) == function)
5235 return
5236 round_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5237
5238 for (fp = outer_function_chain; fp; fp = fp->next)
5239 for (link = fp->trampoline_list; link; link = TREE_CHAIN (link))
5240 if (TREE_PURPOSE (link) == function)
5241 {
5242 tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5243 function);
5244 return round_trampoline_addr (tramp);
5245 }
5246
5247 /* None exists; we must make one. */
5248
5249 /* Find the `struct function' for the function containing FUNCTION. */
5250 fp = 0;
5251 fn_context = decl_function_context (function);
5252 if (fn_context != current_function_decl
5253 && fn_context != inline_function_decl)
5254 for (fp = outer_function_chain; fp; fp = fp->next)
5255 if (fp->decl == fn_context)
5256 break;
5257
5258 /* Allocate run-time space for this trampoline
5259 (usually in the defining function's stack frame). */
5260 #ifdef ALLOCATE_TRAMPOLINE
5261 tramp = ALLOCATE_TRAMPOLINE (fp);
5262 #else
5263 /* If rounding needed, allocate extra space
5264 to ensure we have TRAMPOLINE_SIZE bytes left after rounding up. */
5265 #ifdef TRAMPOLINE_ALIGNMENT
5266 #define TRAMPOLINE_REAL_SIZE \
5267 (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5268 #else
5269 #define TRAMPOLINE_REAL_SIZE (TRAMPOLINE_SIZE)
5270 #endif
5271 if (fp != 0)
5272 tramp = assign_outer_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0, fp);
5273 else
5274 tramp = assign_stack_local (BLKmode, TRAMPOLINE_REAL_SIZE, 0);
5275 #endif
5276
5277 /* Record the trampoline for reuse and note it for later initialization
5278 by expand_function_end. */
5279 if (fp != 0)
5280 {
5281 push_obstacks (fp->function_maybepermanent_obstack,
5282 fp->function_maybepermanent_obstack);
5283 rtlexp = make_node (RTL_EXPR);
5284 RTL_EXPR_RTL (rtlexp) = tramp;
5285 fp->trampoline_list = tree_cons (function, rtlexp, fp->trampoline_list);
5286 pop_obstacks ();
5287 }
5288 else
5289 {
5290 /* Make the RTL_EXPR node temporary, not momentary, so that the
5291 trampoline_list doesn't become garbage. */
5292 int momentary = suspend_momentary ();
5293 rtlexp = make_node (RTL_EXPR);
5294 resume_momentary (momentary);
5295
5296 RTL_EXPR_RTL (rtlexp) = tramp;
5297 trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5298 }
5299
5300 tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5301 return round_trampoline_addr (tramp);
5302 }
5303
5304 /* Given a trampoline address,
5305 round it to multiple of TRAMPOLINE_ALIGNMENT. */
5306
5307 static rtx
5308 round_trampoline_addr (tramp)
5309 rtx tramp;
5310 {
5311 #ifdef TRAMPOLINE_ALIGNMENT
5312 /* Round address up to desired boundary. */
5313 rtx temp = gen_reg_rtx (Pmode);
5314 temp = expand_binop (Pmode, add_optab, tramp,
5315 GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1),
5316 temp, 0, OPTAB_LIB_WIDEN);
5317 tramp = expand_binop (Pmode, and_optab, temp,
5318 GEN_INT (- TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT),
5319 temp, 0, OPTAB_LIB_WIDEN);
5320 #endif
5321 return tramp;
5322 }
5323 \f
5324 /* The functions identify_blocks and reorder_blocks provide a way to
5325 reorder the tree of BLOCK nodes, for optimizers that reshuffle or
5326 duplicate portions of the RTL code. Call identify_blocks before
5327 changing the RTL, and call reorder_blocks after. */
5328
5329 /* Put all this function's BLOCK nodes including those that are chained
5330 onto the first block into a vector, and return it.
5331 Also store in each NOTE for the beginning or end of a block
5332 the index of that block in the vector.
5333 The arguments are BLOCK, the chain of top-level blocks of the function,
5334 and INSNS, the insn chain of the function. */
5335
5336 tree *
5337 identify_blocks (block, insns)
5338 tree block;
5339 rtx insns;
5340 {
5341 int n_blocks;
5342 tree *block_vector;
5343 int *block_stack;
5344 int depth = 0;
5345 int next_block_number = 1;
5346 int current_block_number = 1;
5347 rtx insn;
5348
5349 if (block == 0)
5350 return 0;
5351
5352 n_blocks = all_blocks (block, 0);
5353 block_vector = (tree *) xmalloc (n_blocks * sizeof (tree));
5354 block_stack = (int *) alloca (n_blocks * sizeof (int));
5355
5356 all_blocks (block, block_vector);
5357
5358 for (insn = insns; insn; insn = NEXT_INSN (insn))
5359 if (GET_CODE (insn) == NOTE)
5360 {
5361 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5362 {
5363 block_stack[depth++] = current_block_number;
5364 current_block_number = next_block_number;
5365 NOTE_BLOCK_NUMBER (insn) = next_block_number++;
5366 }
5367 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5368 {
5369 NOTE_BLOCK_NUMBER (insn) = current_block_number;
5370 current_block_number = block_stack[--depth];
5371 }
5372 }
5373
5374 if (n_blocks != next_block_number)
5375 abort ();
5376
5377 return block_vector;
5378 }
5379
5380 /* Given BLOCK_VECTOR which was returned by identify_blocks,
5381 and a revised instruction chain, rebuild the tree structure
5382 of BLOCK nodes to correspond to the new order of RTL.
5383 The new block tree is inserted below TOP_BLOCK.
5384 Returns the current top-level block. */
5385
5386 tree
5387 reorder_blocks (block_vector, block, insns)
5388 tree *block_vector;
5389 tree block;
5390 rtx insns;
5391 {
5392 tree current_block = block;
5393 rtx insn;
5394
5395 if (block_vector == 0)
5396 return block;
5397
5398 /* Prune the old trees away, so that it doesn't get in the way. */
5399 BLOCK_SUBBLOCKS (current_block) = 0;
5400 BLOCK_CHAIN (current_block) = 0;
5401
5402 for (insn = insns; insn; insn = NEXT_INSN (insn))
5403 if (GET_CODE (insn) == NOTE)
5404 {
5405 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5406 {
5407 tree block = block_vector[NOTE_BLOCK_NUMBER (insn)];
5408 /* If we have seen this block before, copy it. */
5409 if (TREE_ASM_WRITTEN (block))
5410 block = copy_node (block);
5411 BLOCK_SUBBLOCKS (block) = 0;
5412 TREE_ASM_WRITTEN (block) = 1;
5413 BLOCK_SUPERCONTEXT (block) = current_block;
5414 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
5415 BLOCK_SUBBLOCKS (current_block) = block;
5416 current_block = block;
5417 NOTE_SOURCE_FILE (insn) = 0;
5418 }
5419 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5420 {
5421 BLOCK_SUBBLOCKS (current_block)
5422 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5423 current_block = BLOCK_SUPERCONTEXT (current_block);
5424 NOTE_SOURCE_FILE (insn) = 0;
5425 }
5426 }
5427
5428 BLOCK_SUBBLOCKS (current_block)
5429 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
5430 return current_block;
5431 }
5432
5433 /* Reverse the order of elements in the chain T of blocks,
5434 and return the new head of the chain (old last element). */
5435
5436 static tree
5437 blocks_nreverse (t)
5438 tree t;
5439 {
5440 register tree prev = 0, decl, next;
5441 for (decl = t; decl; decl = next)
5442 {
5443 next = BLOCK_CHAIN (decl);
5444 BLOCK_CHAIN (decl) = prev;
5445 prev = decl;
5446 }
5447 return prev;
5448 }
5449
5450 /* Count the subblocks of the list starting with BLOCK, and list them
5451 all into the vector VECTOR. Also clear TREE_ASM_WRITTEN in all
5452 blocks. */
5453
5454 static int
5455 all_blocks (block, vector)
5456 tree block;
5457 tree *vector;
5458 {
5459 int n_blocks = 0;
5460
5461 while (block)
5462 {
5463 TREE_ASM_WRITTEN (block) = 0;
5464
5465 /* Record this block. */
5466 if (vector)
5467 vector[n_blocks] = block;
5468
5469 ++n_blocks;
5470
5471 /* Record the subblocks, and their subblocks... */
5472 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
5473 vector ? vector + n_blocks : 0);
5474 block = BLOCK_CHAIN (block);
5475 }
5476
5477 return n_blocks;
5478 }
5479 \f
5480 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
5481 and initialize static variables for generating RTL for the statements
5482 of the function. */
5483
5484 void
5485 init_function_start (subr, filename, line)
5486 tree subr;
5487 char *filename;
5488 int line;
5489 {
5490 init_stmt_for_function ();
5491
5492 cse_not_expected = ! optimize;
5493
5494 /* Caller save not needed yet. */
5495 caller_save_needed = 0;
5496
5497 /* No stack slots have been made yet. */
5498 stack_slot_list = 0;
5499
5500 /* There is no stack slot for handling nonlocal gotos. */
5501 nonlocal_goto_handler_slots = 0;
5502 nonlocal_goto_stack_level = 0;
5503
5504 /* No labels have been declared for nonlocal use. */
5505 nonlocal_labels = 0;
5506
5507 /* No function calls so far in this function. */
5508 function_call_count = 0;
5509
5510 /* No parm regs have been allocated.
5511 (This is important for output_inline_function.) */
5512 max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
5513
5514 /* Initialize the RTL mechanism. */
5515 init_emit ();
5516
5517 /* Initialize the queue of pending postincrement and postdecrements,
5518 and some other info in expr.c. */
5519 init_expr ();
5520
5521 /* We haven't done register allocation yet. */
5522 reg_renumber = 0;
5523
5524 init_const_rtx_hash_table ();
5525
5526 current_function_name = (*decl_printable_name) (subr, 2);
5527
5528 /* Nonzero if this is a nested function that uses a static chain. */
5529
5530 current_function_needs_context
5531 = (decl_function_context (current_function_decl) != 0
5532 && ! DECL_NO_STATIC_CHAIN (current_function_decl));
5533
5534 /* Set if a call to setjmp is seen. */
5535 current_function_calls_setjmp = 0;
5536
5537 /* Set if a call to longjmp is seen. */
5538 current_function_calls_longjmp = 0;
5539
5540 current_function_calls_alloca = 0;
5541 current_function_has_nonlocal_label = 0;
5542 current_function_has_nonlocal_goto = 0;
5543 current_function_contains_functions = 0;
5544 current_function_sp_is_unchanging = 0;
5545 current_function_is_thunk = 0;
5546
5547 current_function_returns_pcc_struct = 0;
5548 current_function_returns_struct = 0;
5549 current_function_epilogue_delay_list = 0;
5550 current_function_uses_const_pool = 0;
5551 current_function_uses_pic_offset_table = 0;
5552 current_function_cannot_inline = 0;
5553
5554 /* We have not yet needed to make a label to jump to for tail-recursion. */
5555 tail_recursion_label = 0;
5556
5557 /* We haven't had a need to make a save area for ap yet. */
5558
5559 arg_pointer_save_area = 0;
5560
5561 /* No stack slots allocated yet. */
5562 frame_offset = 0;
5563
5564 /* No SAVE_EXPRs in this function yet. */
5565 save_expr_regs = 0;
5566
5567 /* No RTL_EXPRs in this function yet. */
5568 rtl_expr_chain = 0;
5569
5570 /* Set up to allocate temporaries. */
5571 init_temp_slots ();
5572
5573 /* Within function body, compute a type's size as soon it is laid out. */
5574 immediate_size_expand++;
5575
5576 /* We haven't made any trampolines for this function yet. */
5577 trampoline_list = 0;
5578
5579 init_pending_stack_adjust ();
5580 inhibit_defer_pop = 0;
5581
5582 current_function_outgoing_args_size = 0;
5583
5584 /* Prevent ever trying to delete the first instruction of a function.
5585 Also tell final how to output a linenum before the function prologue.
5586 Note linenums could be missing, e.g. when compiling a Java .class file. */
5587 if (line > 0)
5588 emit_line_note (filename, line);
5589
5590 /* Make sure first insn is a note even if we don't want linenums.
5591 This makes sure the first insn will never be deleted.
5592 Also, final expects a note to appear there. */
5593 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5594
5595 /* Set flags used by final.c. */
5596 if (aggregate_value_p (DECL_RESULT (subr)))
5597 {
5598 #ifdef PCC_STATIC_STRUCT_RETURN
5599 current_function_returns_pcc_struct = 1;
5600 #endif
5601 current_function_returns_struct = 1;
5602 }
5603
5604 /* Warn if this value is an aggregate type,
5605 regardless of which calling convention we are using for it. */
5606 if (warn_aggregate_return
5607 && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
5608 warning ("function returns an aggregate");
5609
5610 current_function_returns_pointer
5611 = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
5612
5613 /* Indicate that we need to distinguish between the return value of the
5614 present function and the return value of a function being called. */
5615 rtx_equal_function_value_matters = 1;
5616
5617 /* Indicate that we have not instantiated virtual registers yet. */
5618 virtuals_instantiated = 0;
5619
5620 /* Indicate we have no need of a frame pointer yet. */
5621 frame_pointer_needed = 0;
5622
5623 /* By default assume not varargs or stdarg. */
5624 current_function_varargs = 0;
5625 current_function_stdarg = 0;
5626 }
5627
5628 /* Indicate that the current function uses extra args
5629 not explicitly mentioned in the argument list in any fashion. */
5630
5631 void
5632 mark_varargs ()
5633 {
5634 current_function_varargs = 1;
5635 }
5636
5637 /* Expand a call to __main at the beginning of a possible main function. */
5638
5639 #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
5640 #undef HAS_INIT_SECTION
5641 #define HAS_INIT_SECTION
5642 #endif
5643
5644 void
5645 expand_main_function ()
5646 {
5647 #if !defined (HAS_INIT_SECTION)
5648 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
5649 VOIDmode, 0);
5650 #endif /* not HAS_INIT_SECTION */
5651 }
5652 \f
5653 extern struct obstack permanent_obstack;
5654
5655 /* Start the RTL for a new function, and set variables used for
5656 emitting RTL.
5657 SUBR is the FUNCTION_DECL node.
5658 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
5659 the function's parameters, which must be run at any return statement. */
5660
5661 void
5662 expand_function_start (subr, parms_have_cleanups)
5663 tree subr;
5664 int parms_have_cleanups;
5665 {
5666 register int i;
5667 tree tem;
5668 rtx last_ptr = NULL_RTX;
5669
5670 /* Make sure volatile mem refs aren't considered
5671 valid operands of arithmetic insns. */
5672 init_recog_no_volatile ();
5673
5674 /* Set this before generating any memory accesses. */
5675 current_function_check_memory_usage
5676 = (flag_check_memory_usage
5677 && ! DECL_NO_CHECK_MEMORY_USAGE (current_function_decl));
5678
5679 current_function_instrument_entry_exit
5680 = (flag_instrument_function_entry_exit
5681 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
5682
5683 /* If function gets a static chain arg, store it in the stack frame.
5684 Do this first, so it gets the first stack slot offset. */
5685 if (current_function_needs_context)
5686 {
5687 last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
5688
5689 /* Delay copying static chain if it is not a register to avoid
5690 conflicts with regs used for parameters. */
5691 if (! SMALL_REGISTER_CLASSES
5692 || GET_CODE (static_chain_incoming_rtx) == REG)
5693 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5694 }
5695
5696 /* If the parameters of this function need cleaning up, get a label
5697 for the beginning of the code which executes those cleanups. This must
5698 be done before doing anything with return_label. */
5699 if (parms_have_cleanups)
5700 cleanup_label = gen_label_rtx ();
5701 else
5702 cleanup_label = 0;
5703
5704 /* Make the label for return statements to jump to, if this machine
5705 does not have a one-instruction return and uses an epilogue,
5706 or if it returns a structure, or if it has parm cleanups. */
5707 #ifdef HAVE_return
5708 if (cleanup_label == 0 && HAVE_return
5709 && ! current_function_instrument_entry_exit
5710 && ! current_function_returns_pcc_struct
5711 && ! (current_function_returns_struct && ! optimize))
5712 return_label = 0;
5713 else
5714 return_label = gen_label_rtx ();
5715 #else
5716 return_label = gen_label_rtx ();
5717 #endif
5718
5719 /* Initialize rtx used to return the value. */
5720 /* Do this before assign_parms so that we copy the struct value address
5721 before any library calls that assign parms might generate. */
5722
5723 /* Decide whether to return the value in memory or in a register. */
5724 if (aggregate_value_p (DECL_RESULT (subr)))
5725 {
5726 /* Returning something that won't go in a register. */
5727 register rtx value_address = 0;
5728
5729 #ifdef PCC_STATIC_STRUCT_RETURN
5730 if (current_function_returns_pcc_struct)
5731 {
5732 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
5733 value_address = assemble_static_space (size);
5734 }
5735 else
5736 #endif
5737 {
5738 /* Expect to be passed the address of a place to store the value.
5739 If it is passed as an argument, assign_parms will take care of
5740 it. */
5741 if (struct_value_incoming_rtx)
5742 {
5743 value_address = gen_reg_rtx (Pmode);
5744 emit_move_insn (value_address, struct_value_incoming_rtx);
5745 }
5746 }
5747 if (value_address)
5748 {
5749 DECL_RTL (DECL_RESULT (subr))
5750 = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
5751 MEM_SET_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)),
5752 AGGREGATE_TYPE_P (TREE_TYPE
5753 (DECL_RESULT
5754 (subr))));
5755 }
5756 }
5757 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
5758 /* If return mode is void, this decl rtl should not be used. */
5759 DECL_RTL (DECL_RESULT (subr)) = 0;
5760 else if (parms_have_cleanups || current_function_instrument_entry_exit)
5761 {
5762 /* If function will end with cleanup code for parms,
5763 compute the return values into a pseudo reg,
5764 which we will copy into the true return register
5765 after the cleanups are done. */
5766
5767 enum machine_mode mode = DECL_MODE (DECL_RESULT (subr));
5768
5769 #ifdef PROMOTE_FUNCTION_RETURN
5770 tree type = TREE_TYPE (DECL_RESULT (subr));
5771 int unsignedp = TREE_UNSIGNED (type);
5772
5773 mode = promote_mode (type, mode, &unsignedp, 1);
5774 #endif
5775
5776 DECL_RTL (DECL_RESULT (subr)) = gen_reg_rtx (mode);
5777 }
5778 else
5779 /* Scalar, returned in a register. */
5780 {
5781 #ifdef FUNCTION_OUTGOING_VALUE
5782 DECL_RTL (DECL_RESULT (subr))
5783 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5784 #else
5785 DECL_RTL (DECL_RESULT (subr))
5786 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr);
5787 #endif
5788
5789 /* Mark this reg as the function's return value. */
5790 if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG)
5791 {
5792 REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1;
5793 /* Needed because we may need to move this to memory
5794 in case it's a named return value whose address is taken. */
5795 DECL_REGISTER (DECL_RESULT (subr)) = 1;
5796 }
5797 }
5798
5799 /* Initialize rtx for parameters and local variables.
5800 In some cases this requires emitting insns. */
5801
5802 assign_parms (subr, 0);
5803
5804 /* Copy the static chain now if it wasn't a register. The delay is to
5805 avoid conflicts with the parameter passing registers. */
5806
5807 if (SMALL_REGISTER_CLASSES && current_function_needs_context)
5808 if (GET_CODE (static_chain_incoming_rtx) != REG)
5809 emit_move_insn (last_ptr, static_chain_incoming_rtx);
5810
5811 /* The following was moved from init_function_start.
5812 The move is supposed to make sdb output more accurate. */
5813 /* Indicate the beginning of the function body,
5814 as opposed to parm setup. */
5815 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_BEG);
5816
5817 /* If doing stupid allocation, mark parms as born here. */
5818
5819 if (GET_CODE (get_last_insn ()) != NOTE)
5820 emit_note (NULL_PTR, NOTE_INSN_DELETED);
5821 parm_birth_insn = get_last_insn ();
5822
5823 if (obey_regdecls)
5824 {
5825 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
5826 use_variable (regno_reg_rtx[i]);
5827
5828 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
5829 use_variable (current_function_internal_arg_pointer);
5830 }
5831
5832 context_display = 0;
5833 if (current_function_needs_context)
5834 {
5835 /* Fetch static chain values for containing functions. */
5836 tem = decl_function_context (current_function_decl);
5837 /* If not doing stupid register allocation copy the static chain
5838 pointer into a pseudo. If we have small register classes, copy
5839 the value from memory if static_chain_incoming_rtx is a REG. If
5840 we do stupid register allocation, we use the stack address
5841 generated above. */
5842 if (tem && ! obey_regdecls)
5843 {
5844 /* If the static chain originally came in a register, put it back
5845 there, then move it out in the next insn. The reason for
5846 this peculiar code is to satisfy function integration. */
5847 if (SMALL_REGISTER_CLASSES
5848 && GET_CODE (static_chain_incoming_rtx) == REG)
5849 emit_move_insn (static_chain_incoming_rtx, last_ptr);
5850 last_ptr = copy_to_reg (static_chain_incoming_rtx);
5851 }
5852
5853 while (tem)
5854 {
5855 tree rtlexp = make_node (RTL_EXPR);
5856
5857 RTL_EXPR_RTL (rtlexp) = last_ptr;
5858 context_display = tree_cons (tem, rtlexp, context_display);
5859 tem = decl_function_context (tem);
5860 if (tem == 0)
5861 break;
5862 /* Chain thru stack frames, assuming pointer to next lexical frame
5863 is found at the place we always store it. */
5864 #ifdef FRAME_GROWS_DOWNWARD
5865 last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
5866 #endif
5867 last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
5868 memory_address (Pmode, last_ptr)));
5869
5870 /* If we are not optimizing, ensure that we know that this
5871 piece of context is live over the entire function. */
5872 if (! optimize)
5873 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
5874 save_expr_regs);
5875 }
5876 }
5877
5878 if (current_function_instrument_entry_exit)
5879 {
5880 rtx fun = DECL_RTL (current_function_decl);
5881 if (GET_CODE (fun) == MEM)
5882 fun = XEXP (fun, 0);
5883 else
5884 abort ();
5885 emit_library_call (profile_function_entry_libfunc, 0, VOIDmode, 2,
5886 fun, Pmode,
5887 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5888 0,
5889 hard_frame_pointer_rtx),
5890 Pmode);
5891 }
5892
5893 /* After the display initializations is where the tail-recursion label
5894 should go, if we end up needing one. Ensure we have a NOTE here
5895 since some things (like trampolines) get placed before this. */
5896 tail_recursion_reentry = emit_note (NULL_PTR, NOTE_INSN_DELETED);
5897
5898 /* Evaluate now the sizes of any types declared among the arguments. */
5899 for (tem = nreverse (get_pending_sizes ()); tem; tem = TREE_CHAIN (tem))
5900 {
5901 expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode,
5902 EXPAND_MEMORY_USE_BAD);
5903 /* Flush the queue in case this parameter declaration has
5904 side-effects. */
5905 emit_queue ();
5906 }
5907
5908 /* Make sure there is a line number after the function entry setup code. */
5909 force_next_line_note ();
5910 }
5911 \f
5912 /* Generate RTL for the end of the current function.
5913 FILENAME and LINE are the current position in the source file.
5914
5915 It is up to language-specific callers to do cleanups for parameters--
5916 or else, supply 1 for END_BINDINGS and we will call expand_end_bindings. */
5917
5918 void
5919 expand_function_end (filename, line, end_bindings)
5920 char *filename;
5921 int line;
5922 int end_bindings;
5923 {
5924 register int i;
5925 tree link;
5926
5927 #ifdef TRAMPOLINE_TEMPLATE
5928 static rtx initial_trampoline;
5929 #endif
5930
5931 #ifdef NON_SAVING_SETJMP
5932 /* Don't put any variables in registers if we call setjmp
5933 on a machine that fails to restore the registers. */
5934 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
5935 {
5936 if (DECL_INITIAL (current_function_decl) != error_mark_node)
5937 setjmp_protect (DECL_INITIAL (current_function_decl));
5938
5939 setjmp_protect_args ();
5940 }
5941 #endif
5942
5943 /* Save the argument pointer if a save area was made for it. */
5944 if (arg_pointer_save_area)
5945 {
5946 rtx x = gen_move_insn (arg_pointer_save_area, virtual_incoming_args_rtx);
5947 emit_insn_before (x, tail_recursion_reentry);
5948 }
5949
5950 /* Initialize any trampolines required by this function. */
5951 for (link = trampoline_list; link; link = TREE_CHAIN (link))
5952 {
5953 tree function = TREE_PURPOSE (link);
5954 rtx context = lookup_static_chain (function);
5955 rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
5956 #ifdef TRAMPOLINE_TEMPLATE
5957 rtx blktramp;
5958 #endif
5959 rtx seq;
5960
5961 #ifdef TRAMPOLINE_TEMPLATE
5962 /* First make sure this compilation has a template for
5963 initializing trampolines. */
5964 if (initial_trampoline == 0)
5965 {
5966 end_temporary_allocation ();
5967 initial_trampoline
5968 = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
5969 resume_temporary_allocation ();
5970 }
5971 #endif
5972
5973 /* Generate insns to initialize the trampoline. */
5974 start_sequence ();
5975 tramp = round_trampoline_addr (XEXP (tramp, 0));
5976 #ifdef TRAMPOLINE_TEMPLATE
5977 blktramp = change_address (initial_trampoline, BLKmode, tramp);
5978 emit_block_move (blktramp, initial_trampoline,
5979 GEN_INT (TRAMPOLINE_SIZE),
5980 TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5981 #endif
5982 INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
5983 seq = get_insns ();
5984 end_sequence ();
5985
5986 /* Put those insns at entry to the containing function (this one). */
5987 emit_insns_before (seq, tail_recursion_reentry);
5988 }
5989
5990 /* If we are doing stack checking and this function makes calls,
5991 do a stack probe at the start of the function to ensure we have enough
5992 space for another stack frame. */
5993 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
5994 {
5995 rtx insn, seq;
5996
5997 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5998 if (GET_CODE (insn) == CALL_INSN)
5999 {
6000 start_sequence ();
6001 probe_stack_range (STACK_CHECK_PROTECT,
6002 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6003 seq = get_insns ();
6004 end_sequence ();
6005 emit_insns_before (seq, tail_recursion_reentry);
6006 break;
6007 }
6008 }
6009
6010 /* Warn about unused parms if extra warnings were specified. */
6011 if (warn_unused && extra_warnings)
6012 {
6013 tree decl;
6014
6015 for (decl = DECL_ARGUMENTS (current_function_decl);
6016 decl; decl = TREE_CHAIN (decl))
6017 if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6018 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6019 warning_with_decl (decl, "unused parameter `%s'");
6020 }
6021
6022 /* Delete handlers for nonlocal gotos if nothing uses them. */
6023 if (nonlocal_goto_handler_slots != 0
6024 && ! current_function_has_nonlocal_label)
6025 delete_handlers ();
6026
6027 /* End any sequences that failed to be closed due to syntax errors. */
6028 while (in_sequence_p ())
6029 end_sequence ();
6030
6031 /* Outside function body, can't compute type's actual size
6032 until next function's body starts. */
6033 immediate_size_expand--;
6034
6035 /* If doing stupid register allocation,
6036 mark register parms as dying here. */
6037
6038 if (obey_regdecls)
6039 {
6040 rtx tem;
6041 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_parm_reg; i++)
6042 use_variable (regno_reg_rtx[i]);
6043
6044 /* Likewise for the regs of all the SAVE_EXPRs in the function. */
6045
6046 for (tem = save_expr_regs; tem; tem = XEXP (tem, 1))
6047 {
6048 use_variable (XEXP (tem, 0));
6049 use_variable_after (XEXP (tem, 0), parm_birth_insn);
6050 }
6051
6052 if (current_function_internal_arg_pointer != virtual_incoming_args_rtx)
6053 use_variable (current_function_internal_arg_pointer);
6054 }
6055
6056 clear_pending_stack_adjust ();
6057 do_pending_stack_adjust ();
6058
6059 /* Mark the end of the function body.
6060 If control reaches this insn, the function can drop through
6061 without returning a value. */
6062 emit_note (NULL_PTR, NOTE_INSN_FUNCTION_END);
6063
6064 /* Must mark the last line number note in the function, so that the test
6065 coverage code can avoid counting the last line twice. This just tells
6066 the code to ignore the immediately following line note, since there
6067 already exists a copy of this note somewhere above. This line number
6068 note is still needed for debugging though, so we can't delete it. */
6069 if (flag_test_coverage)
6070 emit_note (NULL_PTR, NOTE_REPEATED_LINE_NUMBER);
6071
6072 /* Output a linenumber for the end of the function.
6073 SDB depends on this. */
6074 emit_line_note_force (filename, line);
6075
6076 /* Output the label for the actual return from the function,
6077 if one is expected. This happens either because a function epilogue
6078 is used instead of a return instruction, or because a return was done
6079 with a goto in order to run local cleanups, or because of pcc-style
6080 structure returning. */
6081
6082 if (return_label)
6083 emit_label (return_label);
6084
6085 /* C++ uses this. */
6086 if (end_bindings)
6087 expand_end_bindings (0, 0, 0);
6088
6089 /* Now handle any leftover exception regions that may have been
6090 created for the parameters. */
6091 {
6092 rtx last = get_last_insn ();
6093 rtx label;
6094
6095 expand_leftover_cleanups ();
6096
6097 /* If the above emitted any code, may sure we jump around it. */
6098 if (last != get_last_insn ())
6099 {
6100 label = gen_label_rtx ();
6101 last = emit_jump_insn_after (gen_jump (label), last);
6102 last = emit_barrier_after (last);
6103 emit_label (label);
6104 }
6105 }
6106
6107 if (current_function_instrument_entry_exit)
6108 {
6109 rtx fun = DECL_RTL (current_function_decl);
6110 if (GET_CODE (fun) == MEM)
6111 fun = XEXP (fun, 0);
6112 else
6113 abort ();
6114 emit_library_call (profile_function_exit_libfunc, 0, VOIDmode, 2,
6115 fun, Pmode,
6116 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6117 0,
6118 hard_frame_pointer_rtx),
6119 Pmode);
6120 }
6121
6122 /* If we had calls to alloca, and this machine needs
6123 an accurate stack pointer to exit the function,
6124 insert some code to save and restore the stack pointer. */
6125 #ifdef EXIT_IGNORE_STACK
6126 if (! EXIT_IGNORE_STACK)
6127 #endif
6128 if (current_function_calls_alloca)
6129 {
6130 rtx tem = 0;
6131
6132 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
6133 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
6134 }
6135
6136 /* If scalar return value was computed in a pseudo-reg,
6137 copy that to the hard return register. */
6138 if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0
6139 && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG
6140 && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl)))
6141 >= FIRST_PSEUDO_REGISTER))
6142 {
6143 rtx real_decl_result;
6144
6145 #ifdef FUNCTION_OUTGOING_VALUE
6146 real_decl_result
6147 = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6148 current_function_decl);
6149 #else
6150 real_decl_result
6151 = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)),
6152 current_function_decl);
6153 #endif
6154 REG_FUNCTION_VALUE_P (real_decl_result) = 1;
6155 /* If this is a BLKmode structure being returned in registers, then use
6156 the mode computed in expand_return. */
6157 if (GET_MODE (real_decl_result) == BLKmode)
6158 PUT_MODE (real_decl_result,
6159 GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
6160 emit_move_insn (real_decl_result,
6161 DECL_RTL (DECL_RESULT (current_function_decl)));
6162 emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
6163
6164 /* The delay slot scheduler assumes that current_function_return_rtx
6165 holds the hard register containing the return value, not a temporary
6166 pseudo. */
6167 current_function_return_rtx = real_decl_result;
6168 }
6169
6170 /* If returning a structure, arrange to return the address of the value
6171 in a place where debuggers expect to find it.
6172
6173 If returning a structure PCC style,
6174 the caller also depends on this value.
6175 And current_function_returns_pcc_struct is not necessarily set. */
6176 if (current_function_returns_struct
6177 || current_function_returns_pcc_struct)
6178 {
6179 rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6180 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
6181 #ifdef FUNCTION_OUTGOING_VALUE
6182 rtx outgoing
6183 = FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
6184 current_function_decl);
6185 #else
6186 rtx outgoing
6187 = FUNCTION_VALUE (build_pointer_type (type),
6188 current_function_decl);
6189 #endif
6190
6191 /* Mark this as a function return value so integrate will delete the
6192 assignment and USE below when inlining this function. */
6193 REG_FUNCTION_VALUE_P (outgoing) = 1;
6194
6195 emit_move_insn (outgoing, value_address);
6196 use_variable (outgoing);
6197 }
6198
6199 /* If this is an implementation of __throw, do what's necessary to
6200 communicate between __builtin_eh_return and the epilogue. */
6201 expand_eh_return ();
6202
6203 /* Output a return insn if we are using one.
6204 Otherwise, let the rtl chain end here, to drop through
6205 into the epilogue. */
6206
6207 #ifdef HAVE_return
6208 if (HAVE_return)
6209 {
6210 emit_jump_insn (gen_return ());
6211 emit_barrier ();
6212 }
6213 #endif
6214
6215 /* Fix up any gotos that jumped out to the outermost
6216 binding level of the function.
6217 Must follow emitting RETURN_LABEL. */
6218
6219 /* If you have any cleanups to do at this point,
6220 and they need to create temporary variables,
6221 then you will lose. */
6222 expand_fixups (get_insns ());
6223 }
6224 \f
6225 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
6226
6227 static int *prologue;
6228 static int *epilogue;
6229
6230 /* Create an array that records the INSN_UIDs of INSNS (either a sequence
6231 or a single insn). */
6232
6233 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6234 static int *
6235 record_insns (insns)
6236 rtx insns;
6237 {
6238 int *vec;
6239
6240 if (GET_CODE (insns) == SEQUENCE)
6241 {
6242 int len = XVECLEN (insns, 0);
6243 vec = (int *) oballoc ((len + 1) * sizeof (int));
6244 vec[len] = 0;
6245 while (--len >= 0)
6246 vec[len] = INSN_UID (XVECEXP (insns, 0, len));
6247 }
6248 else
6249 {
6250 vec = (int *) oballoc (2 * sizeof (int));
6251 vec[0] = INSN_UID (insns);
6252 vec[1] = 0;
6253 }
6254 return vec;
6255 }
6256
6257 /* Determine how many INSN_UIDs in VEC are part of INSN. */
6258
6259 static int
6260 contains (insn, vec)
6261 rtx insn;
6262 int *vec;
6263 {
6264 register int i, j;
6265
6266 if (GET_CODE (insn) == INSN
6267 && GET_CODE (PATTERN (insn)) == SEQUENCE)
6268 {
6269 int count = 0;
6270 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6271 for (j = 0; vec[j]; j++)
6272 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == vec[j])
6273 count++;
6274 return count;
6275 }
6276 else
6277 {
6278 for (j = 0; vec[j]; j++)
6279 if (INSN_UID (insn) == vec[j])
6280 return 1;
6281 }
6282 return 0;
6283 }
6284 #endif /* HAVE_prologue || HAVE_epilogue */
6285
6286 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
6287 this into place with notes indicating where the prologue ends and where
6288 the epilogue begins. Update the basic block information when possible. */
6289
6290 void
6291 thread_prologue_and_epilogue_insns (f)
6292 rtx f ATTRIBUTE_UNUSED;
6293 {
6294 #ifdef HAVE_prologue
6295 if (HAVE_prologue)
6296 {
6297 rtx head, seq;
6298
6299 /* The first insn (a NOTE_INSN_DELETED) is followed by zero or more
6300 prologue insns and a NOTE_INSN_PROLOGUE_END. */
6301 emit_note_after (NOTE_INSN_PROLOGUE_END, f);
6302 seq = gen_prologue ();
6303 head = emit_insn_after (seq, f);
6304
6305 /* Include the new prologue insns in the first block. Ignore them
6306 if they form a basic block unto themselves. */
6307 if (x_basic_block_head && n_basic_blocks
6308 && GET_CODE (BLOCK_HEAD (0)) != CODE_LABEL)
6309 BLOCK_HEAD (0) = NEXT_INSN (f);
6310
6311 /* Retain a map of the prologue insns. */
6312 prologue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : head);
6313 }
6314 else
6315 #endif
6316 prologue = 0;
6317
6318 #ifdef HAVE_epilogue
6319 if (HAVE_epilogue)
6320 {
6321 rtx insn = get_last_insn ();
6322 rtx prev = prev_nonnote_insn (insn);
6323
6324 /* If we end with a BARRIER, we don't need an epilogue. */
6325 if (! (prev && GET_CODE (prev) == BARRIER))
6326 {
6327 rtx tail, seq, tem;
6328 rtx first_use = 0;
6329 rtx last_use = 0;
6330
6331 /* The last basic block ends with a NOTE_INSN_EPILOGUE_BEG, the
6332 epilogue insns, the USE insns at the end of a function,
6333 the jump insn that returns, and then a BARRIER. */
6334
6335 /* Move the USE insns at the end of a function onto a list. */
6336 while (prev
6337 && GET_CODE (prev) == INSN
6338 && GET_CODE (PATTERN (prev)) == USE)
6339 {
6340 tem = prev;
6341 prev = prev_nonnote_insn (prev);
6342
6343 NEXT_INSN (PREV_INSN (tem)) = NEXT_INSN (tem);
6344 PREV_INSN (NEXT_INSN (tem)) = PREV_INSN (tem);
6345 if (first_use)
6346 {
6347 NEXT_INSN (tem) = first_use;
6348 PREV_INSN (first_use) = tem;
6349 }
6350 first_use = tem;
6351 if (!last_use)
6352 last_use = tem;
6353 }
6354
6355 emit_barrier_after (insn);
6356
6357 seq = gen_epilogue ();
6358 tail = emit_jump_insn_after (seq, insn);
6359
6360 /* Insert the USE insns immediately before the return insn, which
6361 must be the first instruction before the final barrier. */
6362 if (first_use)
6363 {
6364 tem = prev_nonnote_insn (get_last_insn ());
6365 NEXT_INSN (PREV_INSN (tem)) = first_use;
6366 PREV_INSN (first_use) = PREV_INSN (tem);
6367 PREV_INSN (tem) = last_use;
6368 NEXT_INSN (last_use) = tem;
6369 }
6370
6371 emit_note_after (NOTE_INSN_EPILOGUE_BEG, insn);
6372
6373 /* Include the new epilogue insns in the last block. Ignore
6374 them if they form a basic block unto themselves. */
6375 if (x_basic_block_end && n_basic_blocks
6376 && GET_CODE (BLOCK_END (n_basic_blocks - 1)) != JUMP_INSN)
6377 BLOCK_END (n_basic_blocks - 1) = tail;
6378
6379 /* Retain a map of the epilogue insns. */
6380 epilogue = record_insns (GET_CODE (seq) == SEQUENCE ? seq : tail);
6381 return;
6382 }
6383 }
6384 #endif
6385 epilogue = 0;
6386 }
6387
6388 /* Reposition the prologue-end and epilogue-begin notes after instruction
6389 scheduling and delayed branch scheduling. */
6390
6391 void
6392 reposition_prologue_and_epilogue_notes (f)
6393 rtx f ATTRIBUTE_UNUSED;
6394 {
6395 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
6396 /* Reposition the prologue and epilogue notes. */
6397 if (n_basic_blocks)
6398 {
6399 rtx next, prev;
6400 int len;
6401
6402 if (prologue)
6403 {
6404 register rtx insn, note = 0;
6405
6406 /* Scan from the beginning until we reach the last prologue insn.
6407 We apparently can't depend on basic_block_{head,end} after
6408 reorg has run. */
6409 for (len = 0; prologue[len]; len++)
6410 ;
6411 for (insn = f; len && insn; insn = NEXT_INSN (insn))
6412 {
6413 if (GET_CODE (insn) == NOTE)
6414 {
6415 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
6416 note = insn;
6417 }
6418 else if ((len -= contains (insn, prologue)) == 0)
6419 {
6420 /* Find the prologue-end note if we haven't already, and
6421 move it to just after the last prologue insn. */
6422 if (note == 0)
6423 {
6424 for (note = insn; (note = NEXT_INSN (note));)
6425 if (GET_CODE (note) == NOTE
6426 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
6427 break;
6428 }
6429
6430 next = NEXT_INSN (note);
6431 prev = PREV_INSN (note);
6432 if (prev)
6433 NEXT_INSN (prev) = next;
6434 if (next)
6435 PREV_INSN (next) = prev;
6436
6437 /* Whether or not we can depend on BLOCK_HEAD,
6438 attempt to keep it up-to-date. */
6439 if (BLOCK_HEAD (0) == note)
6440 BLOCK_HEAD (0) = next;
6441
6442 add_insn_after (note, insn);
6443 }
6444 }
6445 }
6446
6447 if (epilogue)
6448 {
6449 register rtx insn, note = 0;
6450
6451 /* Scan from the end until we reach the first epilogue insn.
6452 We apparently can't depend on basic_block_{head,end} after
6453 reorg has run. */
6454 for (len = 0; epilogue[len]; len++)
6455 ;
6456 for (insn = get_last_insn (); len && insn; insn = PREV_INSN (insn))
6457 {
6458 if (GET_CODE (insn) == NOTE)
6459 {
6460 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
6461 note = insn;
6462 }
6463 else if ((len -= contains (insn, epilogue)) == 0)
6464 {
6465 /* Find the epilogue-begin note if we haven't already, and
6466 move it to just before the first epilogue insn. */
6467 if (note == 0)
6468 {
6469 for (note = insn; (note = PREV_INSN (note));)
6470 if (GET_CODE (note) == NOTE
6471 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
6472 break;
6473 }
6474 next = NEXT_INSN (note);
6475 prev = PREV_INSN (note);
6476 if (prev)
6477 NEXT_INSN (prev) = next;
6478 if (next)
6479 PREV_INSN (next) = prev;
6480
6481 /* Whether or not we can depend on BLOCK_HEAD,
6482 attempt to keep it up-to-date. */
6483 if (n_basic_blocks
6484 && BLOCK_HEAD (n_basic_blocks-1) == insn)
6485 BLOCK_HEAD (n_basic_blocks-1) = note;
6486
6487 add_insn_before (note, insn);
6488 }
6489 }
6490 }
6491 }
6492 #endif /* HAVE_prologue or HAVE_epilogue */
6493 }